repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
MostafaGazar/tensorflow
|
refs/heads/master
|
tensorflow/python/summary/event_file_inspector.py
|
62
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Logic for TensorBoard inspector to help humans investigate event files.
Example usages:
tensorboard --inspect --event_file=myevents.out
tensorboard --inspect --event_file=myevents.out --tag=loss
tensorboard --inspect --logdir=mylogdir
tensorboard --inspect --logdir=mylogdir --tag=loss
This script runs over a logdir and creates an InspectionUnit for every
subdirectory with event files. If running over an event file, it creates only
one InspectionUnit. One block of output is printed to console for each
InspectionUnit.
The primary content of an InspectionUnit is the dict field_to_obs that maps
fields (e.g. "scalar", "histogram", "session_log:start", etc.) to a list of
Observations for the field. Observations correspond one-to-one with Events in an
event file but contain less information because they only store what is
necessary to generate the final console output.
The final output is rendered to console by applying some aggregating function
to the lists of Observations. Different functions are applied depending on the
type of field. For instance, for "scalar" fields, the inspector shows aggregate
statistics. For other fields like "session_log:start", all observed steps are
printed in order to aid debugging.
[1] Query a logdir or an event file for its logged tags and summary statistics
using --logdir or --event_file.
[[event_file]] contains these tags:
histograms
binary/Sign/Activations
binary/nn_tanh/act/Activations
binary/nn_tanh/biases
binary/nn_tanh/biases:gradient
binary/nn_tanh/weights
binary/nn_tanh/weights:gradient
images
input_images/image/0
input_images/image/1
input_images/image/2
scalars
Learning Rate
Total Cost
Total Cost (raw)
Debug output aggregated over all tags:
graph
first_step 0
last_step 0
max_step 0
min_step 0
num_steps 1
outoforder_steps []
histograms
first_step 491
last_step 659823
max_step 659823
min_step 491
num_steps 993
outoforder_steps []
images -
scalars
first_step 0
last_step 659823
max_step 659823
min_step 0
num_steps 1985
outoforder_steps []
sessionlog:checkpoint
first_step 7129
last_step 657167
max_step 657167
min_step 7129
num_steps 99
outoforder_steps []
sessionlog:start
outoforder_steps []
steps [0L]
sessionlog:stop -
[2] Drill down into a particular tag using --tag.
Debug output for binary/Sign/Activations:
histograms
first_step 491
last_step 659823
max_step 659823
min_step 491
num_steps 993
outoforder_steps []
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import itertools
import os
from tensorflow.core.util.event_pb2 import SessionLog
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
from tensorflow.python.platform import gfile
from tensorflow.python.summary import event_accumulator
from tensorflow.python.summary import event_multiplexer
from tensorflow.python.summary.impl import event_file_loader
FLAGS = flags.FLAGS
# Map of field names within summary.proto to the user-facing names that this
# script outputs.
SUMMARY_TYPE_TO_FIELD = {'simple_value': 'scalars',
'histo': 'histograms',
'image': 'images',
'audio': 'audio'}
for summary_type in event_accumulator.SUMMARY_TYPES:
if summary_type not in SUMMARY_TYPE_TO_FIELD:
SUMMARY_TYPE_TO_FIELD[summary_type] = summary_type
# Types of summaries that we may want to query for by tag.
TAG_FIELDS = list(SUMMARY_TYPE_TO_FIELD.values())
# Summaries that we want to see every instance of.
LONG_FIELDS = ['sessionlog:start', 'sessionlog:stop']
# Summaries that we only want an abridged digest of, since they would
# take too much screen real estate otherwise.
SHORT_FIELDS = ['graph', 'sessionlog:checkpoint'] + TAG_FIELDS
# All summary types that we can inspect.
TRACKED_FIELDS = SHORT_FIELDS + LONG_FIELDS
# An `Observation` contains the data within each Event file that the inspector
# cares about. The inspector accumulates Observations as it processes events.
Observation = collections.namedtuple('Observation', ['step', 'wall_time',
'tag'])
# An InspectionUnit is created for each organizational structure in the event
# files visible in the final terminal output. For instance, one InspectionUnit
# is created for each subdirectory in logdir. When asked to inspect a single
# event file, there may only be one InspectionUnit.
# The InspectionUnit contains the `name` of the organizational unit that will be
# printed to console, a `generator` that yields `Event` protos, and a mapping
# from string fields to `Observations` that the inspector creates.
InspectionUnit = collections.namedtuple('InspectionUnit', ['name', 'generator',
'field_to_obs'])
PRINT_SEPARATOR = '=' * 70 + '\n'
def get_field_to_observations_map(generator, query_for_tag=''):
"""Return a field to `Observations` dict for the event generator.
Args:
generator: A generator over event protos.
query_for_tag: A string that if specified, only create observations for
events with this tag name.
Returns:
A dict mapping keys in `TRACKED_FIELDS` to an `Observation` list.
"""
def increment(stat, event, tag=''):
assert stat in TRACKED_FIELDS
field_to_obs[stat].append(Observation(step=event.step,
wall_time=event.wall_time,
tag=tag)._asdict())
field_to_obs = dict([(t, []) for t in TRACKED_FIELDS])
for event in generator:
## Process the event
if event.HasField('graph_def') and (not query_for_tag):
increment('graph', event)
if event.HasField('session_log') and (not query_for_tag):
status = event.session_log.status
if status == SessionLog.START:
increment('sessionlog:start', event)
elif status == SessionLog.STOP:
increment('sessionlog:stop', event)
elif status == SessionLog.CHECKPOINT:
increment('sessionlog:checkpoint', event)
elif event.HasField('summary'):
for value in event.summary.value:
if query_for_tag and value.tag != query_for_tag:
continue
for proto_name, display_name in SUMMARY_TYPE_TO_FIELD.items():
if value.HasField(proto_name):
increment(display_name, event, value.tag)
return field_to_obs
def get_unique_tags(field_to_obs):
"""Returns a dictionary of tags that a user could query over.
Args:
field_to_obs: Dict that maps string field to `Observation` list.
Returns:
A dict that maps keys in `TAG_FIELDS` to a list of string tags present in
the event files. If the dict does not have any observations of the type,
maps to an empty list so that we can render this to console.
"""
return {field: sorted(set([x.get('tag', '') for x in observations]))
for field, observations in field_to_obs.items()
if field in TAG_FIELDS}
def print_dict(d, show_missing=True):
"""Prints a shallow dict to console.
Args:
d: Dict to print.
show_missing: Whether to show keys with empty values.
"""
for k, v in sorted(d.items()):
if (not v) and show_missing:
# No instances of the key, so print missing symbol.
print('{} -'.format(k))
elif isinstance(v, list):
# Value is a list, so print each item of the list.
print(k)
for item in v:
print(' {}'.format(item))
elif isinstance(v, dict):
# Value is a dict, so print each (key, value) pair of the dict.
print(k)
for kk, vv in sorted(v.items()):
print(' {:<20} {}'.format(kk, vv))
def get_dict_to_print(field_to_obs):
"""Transform the field-to-obs mapping into a printable dictionary.
Args:
field_to_obs: Dict that maps string field to `Observation` list.
Returns:
A dict with the keys and values to print to console.
"""
def compressed_steps(steps):
return {'num_steps': len(set(steps)),
'min_step': min(steps),
'max_step': max(steps),
'last_step': steps[-1],
'first_step': steps[0],
'outoforder_steps': get_out_of_order(steps)}
def full_steps(steps):
return {'steps': steps, 'outoforder_steps': get_out_of_order(steps)}
output = {}
for field, observations in field_to_obs.items():
if not observations:
output[field] = None
continue
steps = [x['step'] for x in observations]
if field in SHORT_FIELDS:
output[field] = compressed_steps(steps)
if field in LONG_FIELDS:
output[field] = full_steps(steps)
return output
def get_out_of_order(list_of_numbers):
"""Returns elements that break the monotonically non-decreasing trend.
This is used to find instances of global step values that are "out-of-order",
which may trigger TensorBoard event discarding logic.
Args:
list_of_numbers: A list of numbers.
Returns:
A list of tuples in which each tuple are two elements are adjacent, but the
second element is lower than the first.
"""
# TODO(cassandrax): Consider changing this to only check for out-of-order
# steps within a particular tag.
result = []
for i in range(len(list_of_numbers)):
if i == 0:
continue
if list_of_numbers[i] < list_of_numbers[i - 1]:
result.append((list_of_numbers[i - 1], list_of_numbers[i]))
return result
def generators_from_logdir(logdir):
"""Returns a list of event generators for subdirectories with event files.
The number of generators returned should equal the number of directories
within logdir that contain event files. If only logdir contains event files,
returns a list of length one.
Args:
logdir: A log directory that contains event files.
Returns:
List of event generators for each subdirectory with event files.
"""
subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
generators = [itertools.chain(*[
generator_from_event_file(os.path.join(subdir, f))
for f in gfile.ListDirectory(subdir)
if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
]) for subdir in subdirs]
return generators
def generator_from_event_file(event_file):
"""Returns a generator that yields events from an event file."""
return event_file_loader.EventFileLoader(event_file).Load()
def get_inspection_units(logdir='', event_file='', tag=''):
"""Returns a list of InspectionUnit objects given either logdir or event_file.
If logdir is given, the number of InspectionUnits should equal the
number of directories or subdirectories that contain event files.
If event_file is given, the number of InspectionUnits should be 1.
Args:
logdir: A log directory that contains event files.
event_file: Or, a particular event file path.
tag: An optional tag name to query for.
Returns:
A list of InspectionUnit objects.
"""
if logdir:
subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
inspection_units = []
for subdir in subdirs:
generator = itertools.chain(*[
generator_from_event_file(os.path.join(subdir, f))
for f in gfile.ListDirectory(subdir)
if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
])
inspection_units.append(InspectionUnit(
name=subdir,
generator=generator,
field_to_obs=get_field_to_observations_map(generator, tag)))
if inspection_units:
print('Found event files in:\n{}\n'.format('\n'.join(
[u.name for u in inspection_units])))
elif event_accumulator.IsTensorFlowEventsFile(logdir):
print(
'It seems that {} may be an event file instead of a logdir. If this '
'is the case, use --event_file instead of --logdir to pass '
'it in.'.format(logdir))
else:
print('No event files found within logdir {}'.format(logdir))
return inspection_units
elif event_file:
generator = generator_from_event_file(event_file)
return [InspectionUnit(
name=event_file,
generator=generator,
field_to_obs=get_field_to_observations_map(generator, tag))]
def inspect(logdir='', event_file='', tag=''):
"""Main function for inspector that prints out a digest of event files.
Args:
logdir: A log directory that contains event files.
event_file: Or, a particular event file path.
tag: An optional tag name to query for.
Raises:
ValueError: If neither logdir and event_file are given, or both are given.
"""
if logdir and event_file:
raise ValueError(
'Must specify either --logdir or --event_file, but not both.')
if not (logdir or event_file):
raise ValueError('Must specify either --logdir or --event_file.')
print(PRINT_SEPARATOR +
'Processing event files... (this can take a few minutes)\n' +
PRINT_SEPARATOR)
inspection_units = get_inspection_units(logdir, event_file, tag)
for unit in inspection_units:
if tag:
print('Event statistics for tag {} in {}:'.format(tag, unit.name))
else:
# If the user is not inspecting a particular tag, also print the list of
# all available tags that they can query.
print('These tags are in {}:'.format(unit.name))
print_dict(get_unique_tags(unit.field_to_obs))
print(PRINT_SEPARATOR)
print('Event statistics for {}:'.format(unit.name))
print_dict(get_dict_to_print(unit.field_to_obs), show_missing=(not tag))
print(PRINT_SEPARATOR)
if __name__ == '__main__':
app.run()
|
be-cloud-be/horizon-addons
|
refs/heads/9.0
|
web/web_translate_dialog/__init__.py
|
14224
|
# -*- coding: utf-8 -*-
|
DBrianKimmel/PyHouse
|
refs/heads/develop
|
Project/src/Modules/_test/test_Drivers.py
|
1
|
"""
@name: C:/Users/briank/workspace/PyHouse/src/Modules/_test/test_Drivers.py
@author: D. Brian Kimmel
@contact: D.BrianKimmel@gmail.com
@copyright: (c) 2015-2018 by D. Brian Kimmel
@license: MIT License
@note: Created on Jul 30, 2015
@Summary:
Passed all tests - DBK - 2018-02-13
"""
__updated__ = '2018-02-13'
# Import system type stuff
from twisted.trial import unittest, reporter, runner
# Import PyMh files and modules.
from Modules.Drivers import test as I_test
class Z_Suite(unittest.TestCase):
def setUp(self):
self.m_test = runner.TestLoader()
def test_Drivers(self):
l_package = runner.TestLoader().loadPackage(I_test)
l_ret = reporter.Reporter()
l_package.run(l_ret)
l_ret.done()
#
print('\n====================\n*** test_Drivers ***\n{}\n'.format(l_ret))
# ## END DBK
|
Chandra-MARX/marxs
|
refs/heads/hamogu-patch-1
|
marxs/base/__init__.py
|
2
|
# Licensed under GPL version 3 - see LICENSE.rst
from .base import (GeometryError,
DocMeta,
MarxsElement, SimulationSequenceElement,
_parse_position_keywords
)
|
uwcirg/true_nth_usa_portal
|
refs/heads/develop
|
portal/views/crossdomain.py
|
1
|
"""Cross Domain Decorators"""
from datetime import timedelta
from functools import update_wrapper
from flask import current_app, make_response, request
from ..models.client import validate_origin
def crossdomain(
origin=None,
methods=None,
headers=(
'Authorization',
'X-Requested-With',
'X-CSRFToken',
'Content-Type'
),
max_age=21600, automatic_options=True):
"""Decorator to add specified crossdomain headers to response
:param origin: '*' to allow all origins, otherwise a string with
a single origin or a list of origins that might
access the resource. If no origin is provided, use
request.headers['Origin'], but ONLY if it validates. If
no origin is provided and the request doesn't include an
**Origin** header, no CORS headers will be added.
:param methods: Optionally a list of methods that are allowed
for this view. If not provided it will allow
all methods that are implemented.
:param headers: Optionally a list of headers that are allowed
for this request.
:param max_age: The number of seconds as integer or timedelta
object for which the preflighted request is valid.
:param automatic_options: If enabled the decorator will use the
default Flask OPTIONS response and attach the headers there,
otherwise the view function will be called to generate an
appropriate response.
:raises :py:exc:`werkzeug.exceptions.Unauthorized`:
if no origin is provided and the one in
request.headers['Origin'] doesn't validate as one we know.
"""
def get_headers():
if headers is not None and not isinstance(headers, str):
return ', '.join(x.upper() for x in headers)
return headers
def get_methods():
if methods is not None:
return ', '.join(sorted(x.upper() for x in methods))
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def get_origin():
"""Given origin used blind, request.origin requires validation"""
if origin:
if not isinstance(origin, str):
return ', '.join(origin)
return origin
use_origin = None
if 'Origin' in request.headers:
use_origin = request.headers['Origin']
if use_origin:
validate_origin(use_origin)
return use_origin
def get_max_age():
if isinstance(max_age, timedelta):
return str(max_age.total_seconds())
return str(max_age)
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
origin = get_origin()
if origin:
h = resp.headers
h['Access-Control-Allow-Credentials'] = 'true'
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = get_max_age()
h['Access-Control-Allow-Headers'] = get_headers()
h['Access-Control-Expose-Headers'] = 'content-length'
return resp
f.provide_automatic_options = False
f.required_methods = getattr(f, 'required_methods', set())
f.required_methods.add('OPTIONS')
return update_wrapper(wrapped_function, f)
return decorator
|
epitron/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/apa.py
|
1
|
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
determine_ext,
js_to_json,
)
class APAIE(InfoExtractor):
_VALID_URL = r'https?://[^/]+\.apa\.at/embed/(?P<id>[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})'
_TESTS = [{
'url': 'http://uvp.apa.at/embed/293f6d17-692a-44e3-9fd5-7b178f3a1029',
'md5': '2b12292faeb0a7d930c778c7a5b4759b',
'info_dict': {
'id': 'jjv85FdZ',
'ext': 'mp4',
'title': '"Blau ist mysteriös": Die Blue Man Group im Interview',
'description': 'md5:d41d8cd98f00b204e9800998ecf8427e',
'thumbnail': r're:^https?://.*\.jpg$',
'duration': 254,
'timestamp': 1519211149,
'upload_date': '20180221',
},
}, {
'url': 'https://uvp-apapublisher.sf.apa.at/embed/2f94e9e6-d945-4db2-9548-f9a41ebf7b78',
'only_matching': True,
}, {
'url': 'http://uvp-rma.sf.apa.at/embed/70404cca-2f47-4855-bbb8-20b1fae58f76',
'only_matching': True,
}, {
'url': 'http://uvp-kleinezeitung.sf.apa.at/embed/f1c44979-dba2-4ebf-b021-e4cf2cac3c81',
'only_matching': True,
}]
@staticmethod
def _extract_urls(webpage):
return [
mobj.group('url')
for mobj in re.finditer(
r'<iframe[^>]+\bsrc=(["\'])(?P<url>(?:https?:)?//[^/]+\.apa\.at/embed/[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}.*?)\1',
webpage)]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
jwplatform_id = self._search_regex(
r'media[iI]d\s*:\s*["\'](?P<id>[a-zA-Z0-9]{8})', webpage,
'jwplatform id', default=None)
if jwplatform_id:
return self.url_result(
'jwplatform:' + jwplatform_id, ie='JWPlatform',
video_id=video_id)
sources = self._parse_json(
self._search_regex(
r'sources\s*=\s*(\[.+?\])\s*;', webpage, 'sources'),
video_id, transform_source=js_to_json)
formats = []
for source in sources:
if not isinstance(source, dict):
continue
source_url = source.get('file')
if not source_url or not isinstance(source_url, compat_str):
continue
ext = determine_ext(source_url)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
source_url, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='hls', fatal=False))
else:
formats.append({
'url': source_url,
})
self._sort_formats(formats)
thumbnail = self._search_regex(
r'image\s*:\s*(["\'])(?P<url>(?:(?!\1).)+)\1', webpage,
'thumbnail', fatal=False, group='url')
return {
'id': video_id,
'title': video_id,
'thumbnail': thumbnail,
'formats': formats,
}
|
chStaiger/ELIXIR-gridftp-PID
|
refs/heads/master
|
gridftp.py
|
1
|
#System import subprocess
import subprocess
import getopt
import sys
# PID imports
from b2handle.clientcredentials import PIDClientCredentials
from b2handle.handleclient import EUDATHandleClient
import uuid
import hashlib
import os, shutil
RED = "\033[31m"
GREEN = "\033[92m"
BLUE = "\033[34m"
DEFAULT = "\033[0m"
#Upload dataset to gridFTP server
def gridftp_upload(dataset, server, protocol, destination):
exit_code = subprocess.call(["grid-proxy-init"])
if exit_code == 0:
print GREEN, "DEBUG", DEFAULT, \
"Uplopading", dataset, "to", protocol+"://"+server+destination
exit_code = subprocess.call(["globus-url-copy", "-cd", "-r",
dataset,
protocol+"://"+server+destination])
print GREEN, "DEBUG"
exit_code = subprocess.call(["globus-url-copy", "-list",
protocol+"://"+server+destination])
print DEFAULT
return exit_code
#Connect to handle server --> return client instance
def conn_handle(credentials='cred_21.T12995.json'):
cred = PIDClientCredentials.load_from_JSON(credentials)
print GREEN, "DEBUG"
print('PID prefix ' + cred.get_prefix())
print('Server ' + cred.get_server_URL())
print DEFAULT
ec = EUDATHandleClient.instantiate_with_credentials(cred)
return ec, cred
#Create a PID for the root collection
def register_dataset(ec, cred, dataset, protocol, server):
#reverse lookup
rev_args = dict([('URL', dataset)])
if ec.search_handle(**rev_args)==[]:
#Register dataset (folder) on gridftp server
uid = uuid.uuid1()
Handle = ec.register_handle(cred.get_prefix() + '/' + str(uid), dataset)
print GREEN, "DEBUG", DEFAULT, \
"Creating handle", Handle, "for", dataset
#Add information types
args = dict([('TYPE', 'Folder'), ('PROTOCOL', protocol),
('SITE', server)])
ec.modify_handle_value(Handle, ttl=None,
add_if_not_exist=True, **args)
else:
print RED, "WARNING", DEFAULT, dataset, \
"already has handles", ec.search_handle(**rev_args)
Handle = ec.search_handle(**rev_args)[0]
return Handle
# Returns a list of children or empty list
def get_children(pid, ec):
entry = ec.get_value_from_handle(pid, 'CHILDREN')
if entry == None:
return []
else:
return entry.replace("u'", "").replace("'", "").strip("]").strip("[").split(', ')
#Given the root collection, registers folders and files
#Data needs to reside on a gridFTP server
def register_files(ec, cred, dataset, protocol, server):
#Create PID for each file and subcollection in the dataset
args = dict([('TYPE', 'Folder'), ('PROTOCOL', protocol), ('SITE', server)])
parent_args = dict()
collection = [dataset] # root collection
while len(collection) > 0:
children = []
coll = collection[0]
coll_pid = ec.search_handle(**dict([('URL', coll)]))[0]
p = subprocess.Popen(["globus-url-copy -list "+ protocol+"://"+server+coll],
shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in p.stdout.readlines()[1:]: #get files and folders
if line.strip() == "":
continue
else:
#Check if folder or file
if line.strip().endswith('/'):
args["TYPE"] = "Folder"
collection.append(coll+line.strip())
else:
args["TYPE"] = "File"
args["PARENT"] = coll_pid
#reverse lookup for field URL, do not regenerate PIDs for same paths
rev_args = dict([('URL', coll+line.strip())])
if ec.search_handle(**rev_args)==[]:
uid = uuid.uuid1()
h = ec.register_handle(cred.get_prefix() + '/' + str(uid),
coll+line.strip())
children.append(h)
ec.modify_handle_value(h, ttl=None,
add_if_not_exist=True, **args)
print GREEN, "DEBUG", DEFAULT, "Created handle", h, \
"for", coll+line.strip()
else:
children.extend(ec.search_handle(**rev_args))
print RED, "WARNING", DEFAULT, coll+line.strip(), \
"already has handles", ec.search_handle(**rev_args)
# Update collection with all PIDs to children
parent_args['CHILDREN'] = ', '.join(children)
print GREEN, "DEBUG", DEFAULT, "Update ", coll_pid
#print GREEN, "DEBUG", DEFAULT, "CHILDREN ", children
ec.modify_handle_value(coll_pid, ttl=None,
add_if_not_exist=True, **parent_args)
collection.remove(coll)
def sync_dataset(pid, local_data, ec, cred):
# Synchronises a local dataset to an existing registered dataset on a grdiFTP server.
# Get location of dataset
assert ec.get_value_from_handle(pid, 'TYPE') == 'Folder'
server = ec.get_value_from_handle(pid, 'SITE').strip('/')
protocol = ec.get_value_from_handle(pid, 'PROTOCOL')
dest_coll = ec.get_value_from_handle(pid, 'URL')
# By default only files that are not present will be transfered
# -sync -sync-level 0
exit_code = subprocess.call(["grid-proxy-init"])
if exit_code == 0:
print GREEN, "DEBUG", DEFAULT, \
"Uplopading", local_data, "to", protocol+"://"+server+dest_coll
exit_code = subprocess.call(["globus-url-copy", "-cd", "-r",
"-sync", "-sync-level", "0", local_data, protocol+"://"+server+dest_coll])
print GREEN, "DEBUG", exit_code
exit_code = subprocess.call(["globus-url-copy", "-list",
protocol+"://"+server+dest_coll])
print GREEN, "DEBUG", exit_code
print DEFAULT
# Update PID entries for a whole dataset, e.g. after adding more data
# Add children and link to parent
# Assumes that data cannot be deleted from dataset.
register_files(ec, cred, dest_coll, protocol, server)
def download_dataset(pid, destination):
#Instantiate client for reading --> credentials necessary
ec = EUDATHandleClient.instantiate_for_read_access('https://hdl.handle.net')
record = ec.retrieve_handle_record(pid)
assert record != None
assert 'URL' in record
assert 'PROTOCOL' in record
assert 'SITE' in record
protocol = record['PROTOCOL']
site = record['SITE']
source = record['URL']
print GREEN, "DEBUG", DEFAULT, \
"PID", pid, "resolves to", protocol+"://"+site+source
exit_code = subprocess.call(["grid-proxy-init"])
print GREEN, "DEBUG downloading:"
exit_code = subprocess.call(["globus-url-copy", "-list",
protocol+"://"+site+source])
print "Destination", destination
print DEFAULT
exit_code = subprocess.call(["globus-url-copy", "-cd", "-r",
protocol+"://"+site+source, destination])
return exit_code
def update_url(pid, new_location, ec):
# After moving a dataset to another location all the URL fields need to be updated.
# new_location needs a trailing '/'!!
# Check whether we have the dataset pid:
# No PARENT entry
record = ec.retrieve_handle_record(pid)
assert record != None
assert 'PARENT' not in record
url = record['URL'] # root of the dataset
# Check if new location is really new
assert url != new_location
print GREEN, "DEBUG", DEFAULT, "Update URL of collection", pid
print GREEN, "DEBUG", DEFAULT, "Old location", url
print GREEN, "DEBUG", DEFAULT, "New location", new_location
print
# Update all URL fields in the directory tree
collection = [pid] # root collection
args = {}
while len(collection) > 0:
# get all children
collection.extend(get_children(collection[0], ec))
#print GREEN, "DEBUG", DEFAULT, collection
r = ec.retrieve_handle_record(collection[0])
cur_url = r['URL']
args['URL'] = cur_url.replace(url, new_location)
ec.modify_handle_value(collection[0], ttl=None, **args)
collection.remove(collection[0])
print GREEN, "DEBUG", DEFAULT, "URL Update DONE."
def main():
"""
Usage:
Upload to gridFTP server
python gridftp.py -u <upload collection> -g <destination>
Synchronise existing dataset
python gridftp.py -u <upload collection> -p <PID>
Update URL of a dataset
python gridftp.py -p <PID> -n <new URL to dataset>
Download with PID
pythoe gridftp.py -d <local download destination> -p <PID>
"""
# parse command line options
try:
opts, args = getopt.getopt(sys.argv[1:], "hu:d:p:g:e:n:s:", ["help"])
except getopt.error, msg:
print msg
print "for help use --help"
sys.exit(2)
if args != []:
print "for help use --help"
sys.exit(2)
# process options
#for upload
dataset_up = ""
protocol = "gsiftp"
server = "nlnode.elixirgridftp-sara.surf-hosted.nl/"
destination_ftp = ""
#for download
pid = "21.T12995/A866A7A8-E947-11E6-A26B-040091643BEA"
destination = ""
#for update
new_loc = ""
for o, a in opts:
print o, a
if o in ("-h", "--help"):
print "Help"
print __doc__
sys.exit(0)
elif o == "--sync":
sync = True
elif o == "-u":
dataset_up = a
elif o == "-d":
destination = a
elif o == "-p":
pid = a
elif o == "-g":
destination_ftp = a
elif o == "-e":
protocol = a
elif o == "-s":
server = a
elif o == "-n":
new_loc = a
else:
print "option unknown"
sys.exit(2)
if not (protocol and server):
print "%sDefine server and protocol. For help use --help%s" %(RED, DEFAULT)
return 0
if dataset_up and destination_ftp:
print "Uploading data to gridFTP server"
gridftp_upload(dataset_up, server, protocol, destination_ftp)
print "Registering PIDs"
ec, cred = conn_handle(credentials='cred_21.T12995.json')
pid = register_dataset(ec, cred, destination_ftp, protocol, server)
print "Dataset PID:", pid
register_files(ec, cred, destination_ftp, protocol, server)
elif dataset_up and pid:
ec, cred = conn_handle(credentials='cred_21.T12995.json')
sync_dataset(pid, dataset_up, ec, cred)
elif pid and destination:
print "Downloading data fom gridFTP server"
download_dataset(pid, destination)
elif pid and new_loc:
ec, cred = conn_handle(credentials='cred_21.T12995.json')
update_url(pid, new_loc, ec)
else:
print "%sNot a valid option. For help use --help%s" %(RED, DEFAULT)
return 0
if __name__ == "__main__":
main()
|
skyddv/neutron
|
refs/heads/master
|
neutron/db/migration/alembic_migrations/versions/liberty/expand/52c5312f6baf_address_scopes.py
|
40
|
# Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Initial operations in support of address scopes
"""
# revision identifiers, used by Alembic.
revision = '52c5312f6baf'
down_revision = '599c6a226151'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'address_scopes',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('tenant_id', sa.String(length=255), nullable=True,
index=True),
sa.Column('shared', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id'))
|
FrankBian/kuma
|
refs/heads/master
|
vendor/packages/html5lib/src/html5lib/treebuilders/_base.py
|
26
|
from html5lib.constants import scopingElements, tableInsertModeElements, namespaces
try:
frozenset
except NameError:
# Import from the sets module for python 2.3
from sets import Set as set
from sets import ImmutableSet as frozenset
# The scope markers are inserted when entering buttons, object elements,
# marquees, table cells, and table captions, and are used to prevent formatting
# from "leaking" into tables, buttons, object elements, and marquees.
Marker = None
class Node(object):
def __init__(self, name):
"""Node representing an item in the tree.
name - The tag name associated with the node
parent - The parent of the current node (or None for the document node)
value - The value of the current node (applies to text nodes and
comments
attributes - a dict holding name, value pairs for attributes of the node
childNodes - a list of child nodes of the current node. This must
include all elements but not necessarily other node types
_flags - A list of miscellaneous flags that can be set on the node
"""
self.name = name
self.parent = None
self.value = None
self.attributes = {}
self.childNodes = []
self._flags = []
def __unicode__(self):
attributesStr = " ".join(["%s=\"%s\""%(name, value)
for name, value in
self.attributes.iteritems()])
if attributesStr:
return "<%s %s>"%(self.name,attributesStr)
else:
return "<%s>"%(self.name)
def __repr__(self):
return "<%s>" % (self.name)
def appendChild(self, node):
"""Insert node as a child of the current node
"""
raise NotImplementedError
def insertText(self, data, insertBefore=None):
"""Insert data as text in the current node, positioned before the
start of node insertBefore or to the end of the node's text.
"""
raise NotImplementedError
def insertBefore(self, node, refNode):
"""Insert node as a child of the current node, before refNode in the
list of child nodes. Raises ValueError if refNode is not a child of
the current node"""
raise NotImplementedError
def removeChild(self, node):
"""Remove node from the children of the current node
"""
raise NotImplementedError
def reparentChildren(self, newParent):
"""Move all the children of the current node to newParent.
This is needed so that trees that don't store text as nodes move the
text in the correct way
"""
#XXX - should this method be made more general?
for child in self.childNodes:
newParent.appendChild(child)
self.childNodes = []
def cloneNode(self):
"""Return a shallow copy of the current node i.e. a node with the same
name and attributes but with no parent or child nodes
"""
raise NotImplementedError
def hasContent(self):
"""Return true if the node has children or text, false otherwise
"""
raise NotImplementedError
class TreeBuilder(object):
"""Base treebuilder implementation
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
"""
#Document class
documentClass = None
#The class to use for creating a node
elementClass = None
#The class to use for creating comments
commentClass = None
#The class to use for creating doctypes
doctypeClass = None
#Fragment class
fragmentClass = None
def __init__(self, namespaceHTMLElements):
if namespaceHTMLElements:
self.defaultNamespace = "http://www.w3.org/1999/xhtml"
else:
self.defaultNamespace = None
self.reset()
def reset(self):
self.openElements = []
self.activeFormattingElements = []
#XXX - rename these to headElement, formElement
self.headPointer = None
self.formPointer = None
self.insertFromTable = False
self.document = self.documentClass()
def elementInScope(self, target, variant=None):
# Exit early when possible.
listElementsMap = {
None:scopingElements,
"list":scopingElements | set([(namespaces["html"], "ol"),
(namespaces["html"], "ul")]),
"table":set([(namespaces["html"], "html"),
(namespaces["html"], "table")])
}
listElements = listElementsMap[variant]
for node in reversed(self.openElements):
if node.name == target:
return True
elif node.nameTuple in listElements:
return False
assert False # We should never reach this point
def reconstructActiveFormattingElements(self):
# Within this algorithm the order of steps described in the
# specification is not quite the same as the order of steps in the
# code. It should still do the same though.
# Step 1: stop the algorithm when there's nothing to do.
if not self.activeFormattingElements:
return
# Step 2 and step 3: we start with the last element. So i is -1.
i = len(self.activeFormattingElements) - 1
entry = self.activeFormattingElements[i]
if entry == Marker or entry in self.openElements:
return
# Step 6
while entry != Marker and entry not in self.openElements:
if i == 0:
#This will be reset to 0 below
i = -1
break
i -= 1
# Step 5: let entry be one earlier in the list.
entry = self.activeFormattingElements[i]
while True:
# Step 7
i += 1
# Step 8
entry = self.activeFormattingElements[i]
clone = entry.cloneNode() #Mainly to get a new copy of the attributes
# Step 9
element = self.insertElement({"type":"StartTag",
"name":clone.name,
"namespace":clone.namespace,
"data":clone.attributes})
# Step 10
self.activeFormattingElements[i] = element
# Step 11
if element == self.activeFormattingElements[-1]:
break
def clearActiveFormattingElements(self):
entry = self.activeFormattingElements.pop()
while self.activeFormattingElements and entry != Marker:
entry = self.activeFormattingElements.pop()
def elementInActiveFormattingElements(self, name):
"""Check if an element exists between the end of the active
formatting elements and the last marker. If it does, return it, else
return false"""
for item in self.activeFormattingElements[::-1]:
# Check for Marker first because if it's a Marker it doesn't have a
# name attribute.
if item == Marker:
break
elif item.name == name:
return item
return False
def insertRoot(self, token):
element = self.createElement(token)
self.openElements.append(element)
self.document.appendChild(element)
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
doctype = self.doctypeClass(name, publicId, systemId)
self.document.appendChild(doctype)
def insertComment(self, token, parent=None):
if parent is None:
parent = self.openElements[-1]
parent.appendChild(self.commentClass(token["data"]))
def createElement(self, token):
"""Create an element but don't insert it anywhere"""
name = token["name"]
namespace = token.get("namespace", self.defaultNamespace)
element = self.elementClass(name, namespace)
element.attributes = token["data"]
return element
def _getInsertFromTable(self):
return self._insertFromTable
def _setInsertFromTable(self, value):
"""Switch the function used to insert an element from the
normal one to the misnested table one and back again"""
self._insertFromTable = value
if value:
self.insertElement = self.insertElementTable
else:
self.insertElement = self.insertElementNormal
insertFromTable = property(_getInsertFromTable, _setInsertFromTable)
def insertElementNormal(self, token):
name = token["name"]
namespace = token.get("namespace", self.defaultNamespace)
element = self.elementClass(name, namespace)
element.attributes = token["data"]
self.openElements[-1].appendChild(element)
self.openElements.append(element)
return element
def insertElementTable(self, token):
"""Create an element and insert it into the tree"""
element = self.createElement(token)
if self.openElements[-1].name not in tableInsertModeElements:
return self.insertElementNormal(token)
else:
#We should be in the InTable mode. This means we want to do
#special magic element rearranging
parent, insertBefore = self.getTableMisnestedNodePosition()
if insertBefore is None:
parent.appendChild(element)
else:
parent.insertBefore(element, insertBefore)
self.openElements.append(element)
return element
def insertText(self, data, parent=None):
"""Insert text data."""
if parent is None:
parent = self.openElements[-1]
if (not self.insertFromTable or (self.insertFromTable and
self.openElements[-1].name
not in tableInsertModeElements)):
parent.insertText(data)
else:
# We should be in the InTable mode. This means we want to do
# special magic element rearranging
parent, insertBefore = self.getTableMisnestedNodePosition()
parent.insertText(data, insertBefore)
def getTableMisnestedNodePosition(self):
"""Get the foster parent element, and sibling to insert before
(or None) when inserting a misnested table node"""
# The foster parent element is the one which comes before the most
# recently opened table element
# XXX - this is really inelegant
lastTable=None
fosterParent = None
insertBefore = None
for elm in self.openElements[::-1]:
if elm.name == "table":
lastTable = elm
break
if lastTable:
# XXX - we should really check that this parent is actually a
# node here
if lastTable.parent:
fosterParent = lastTable.parent
insertBefore = lastTable
else:
fosterParent = self.openElements[
self.openElements.index(lastTable) - 1]
else:
fosterParent = self.openElements[0]
return fosterParent, insertBefore
def generateImpliedEndTags(self, exclude=None):
name = self.openElements[-1].name
# XXX td, th and tr are not actually needed
if (name in frozenset(("dd", "dt", "li", "p", "td", "th", "tr"))
and name != exclude):
self.openElements.pop()
# XXX This is not entirely what the specification says. We should
# investigate it more closely.
self.generateImpliedEndTags(exclude)
def getDocument(self):
"Return the final tree"
return self.document
def getFragment(self):
"Return the final fragment"
#assert self.innerHTML
fragment = self.fragmentClass()
self.openElements[0].reparentChildren(fragment)
return fragment
def testSerializer(self, node):
"""Serialize the subtree of node in the format required by unit tests
node - the node from which to start serializing"""
raise NotImplementedError
|
isshe/Language
|
refs/heads/master
|
Python/20161126/2_test_object.py
|
1
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
class Student(object):
def __init__(self, name, score):
self.name = name
self.score = score
def print_score(self):
print('%s: %s' % (self.name, self.score))
if __name__=='__main__':
isshe = Student('isshe', 99)
isshe.print_score()
|
persandstrom/home-assistant
|
refs/heads/master
|
homeassistant/components/sensor/ios.py
|
5
|
"""
Support for Home Assistant iOS app sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/ecosystem/ios/
"""
from homeassistant.components import ios
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level
DEPENDENCIES = ['ios']
SENSOR_TYPES = {
'level': ['Battery Level', '%'],
'state': ['Battery State', None]
}
DEFAULT_ICON_LEVEL = 'mdi:battery'
DEFAULT_ICON_STATE = 'mdi:power-plug'
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the iOS sensor."""
# Leave here for if someone accidentally adds platform: ios to config
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up iOS from a config entry."""
dev = list()
for device_name, device in ios.devices(hass).items():
for sensor_type in ('level', 'state'):
dev.append(IOSSensor(sensor_type, device_name, device))
async_add_entities(dev, True)
class IOSSensor(Entity):
"""Representation of an iOS sensor."""
def __init__(self, sensor_type, device_name, device):
"""Initialize the sensor."""
self._device_name = device_name
self._name = "{} {}".format(device_name, SENSOR_TYPES[sensor_type][0])
self._device = device
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
@property
def device_info(self):
"""Return information about the device."""
return {
'identifiers': {
(ios.DOMAIN,
self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_PERMANENT_ID]),
},
'name': self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_NAME],
'manufacturer': 'Apple',
'model': self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_TYPE],
'sw_version':
self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_SYSTEM_VERSION],
}
@property
def name(self):
"""Return the name of the iOS sensor."""
device_name = self._device[ios.ATTR_DEVICE][ios.ATTR_DEVICE_NAME]
return "{} {}".format(device_name, SENSOR_TYPES[self.type][0])
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unique_id(self):
"""Return the unique ID of this sensor."""
device_id = self._device[ios.ATTR_DEVICE_ID]
return "{}_{}".format(self.type, device_id)
@property
def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._unit_of_measurement
@property
def device_state_attributes(self):
"""Return the device state attributes."""
device = self._device[ios.ATTR_DEVICE]
device_battery = self._device[ios.ATTR_BATTERY]
return {
"Battery State": device_battery[ios.ATTR_BATTERY_STATE],
"Battery Level": device_battery[ios.ATTR_BATTERY_LEVEL],
"Device Type": device[ios.ATTR_DEVICE_TYPE],
"Device Name": device[ios.ATTR_DEVICE_NAME],
"Device Version": device[ios.ATTR_DEVICE_SYSTEM_VERSION],
}
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
device_battery = self._device[ios.ATTR_BATTERY]
battery_state = device_battery[ios.ATTR_BATTERY_STATE]
battery_level = device_battery[ios.ATTR_BATTERY_LEVEL]
charging = True
icon_state = DEFAULT_ICON_STATE
if battery_state in (ios.ATTR_BATTERY_STATE_FULL,
ios.ATTR_BATTERY_STATE_UNPLUGGED):
charging = False
icon_state = "{}-off".format(DEFAULT_ICON_STATE)
elif battery_state == ios.ATTR_BATTERY_STATE_UNKNOWN:
battery_level = None
charging = False
icon_state = "{}-unknown".format(DEFAULT_ICON_LEVEL)
if self.type == "state":
return icon_state
return icon_for_battery_level(battery_level=battery_level,
charging=charging)
def update(self):
"""Get the latest state of the sensor."""
self._device = ios.devices(self.hass).get(self._device_name)
self._state = self._device[ios.ATTR_BATTERY][self.type]
|
demvher/pythondotorg
|
refs/heads/master
|
docs/source/conf.py
|
2
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Python.org Website documentation build configuration file, created by
# sphinx-quickstart on Fri Feb 21 10:03:44 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import time
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Python.org Website'
copyright = '%s, Python Software Foundation' % time.strftime('%Y')
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
try:
import sphinx_rtd_theme
except ImportError:
html_theme = 'default'
else:
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'PythonorgWebsitedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'PythonorgWebsite.tex', 'Python.org Website Documentation',
'Python Software Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pythonorgwebsite', 'Python.org Website Documentation',
['Python Software Foundation'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'PythonorgWebsite', 'Python.org Website Documentation',
'Python Software Foundation', 'PythonorgWebsite', '',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
muraliselva10/cloudkitty
|
refs/heads/master
|
cloudkitty/api/v1/types.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2014 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Stéphane Albert
#
from oslo_utils import uuidutils
from wsme import types as wtypes
from cloudkitty.i18n import _LE
class UuidType(wtypes.UuidType):
"""A simple UUID type."""
basetype = wtypes.text
name = 'uuid'
@staticmethod
def validate(value):
if not uuidutils.is_uuid_like(value):
raise ValueError(_LE("Invalid UUID, got '%s'") % value)
return value
# Code taken from ironic types
class MultiType(wtypes.UserType):
"""A complex type that represents one or more types.
Used for validating that a value is an instance of one of the types.
:param *types: Variable-length list of types.
"""
def __init__(self, *types):
self.types = types
def __str__(self):
return ' | '.join(map(str, self.types))
def validate(self, value):
for t in self.types:
if t is wtypes.text and isinstance(value, wtypes.bytes):
value = value.decode()
if isinstance(value, t):
return value
else:
raise ValueError(
_LE("Wrong type. Expected '%(type)s', got '%(value)s'")
% {'type': self.types, 'value': type(value)})
|
knittledan/netflixXBMC
|
refs/heads/master
|
resources/mechanize/_headersutil.py
|
133
|
"""Utility functions for HTTP header value parsing and construction.
Copyright 1997-1998, Gisle Aas
Copyright 2002-2006, John J. Lee
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
import os, re
from types import StringType
from types import UnicodeType
STRING_TYPES = StringType, UnicodeType
from _util import http2time
import _rfc3986
def is_html_file_extension(url, allow_xhtml):
ext = os.path.splitext(_rfc3986.urlsplit(url)[2])[1]
html_exts = [".htm", ".html"]
if allow_xhtml:
html_exts += [".xhtml"]
return ext in html_exts
def is_html(ct_headers, url, allow_xhtml=False):
"""
ct_headers: Sequence of Content-Type headers
url: Response URL
"""
if not ct_headers:
return is_html_file_extension(url, allow_xhtml)
headers = split_header_words(ct_headers)
if len(headers) < 1:
return is_html_file_extension(url, allow_xhtml)
first_header = headers[0]
first_parameter = first_header[0]
ct = first_parameter[0]
html_types = ["text/html"]
if allow_xhtml:
html_types += [
"text/xhtml", "text/xml",
"application/xml", "application/xhtml+xml",
]
return ct in html_types
def unmatched(match):
"""Return unmatched part of re.Match object."""
start, end = match.span(0)
return match.string[:start]+match.string[end:]
token_re = re.compile(r"^\s*([^=\s;,]+)")
quoted_value_re = re.compile(r"^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"")
value_re = re.compile(r"^\s*=\s*([^\s;,]*)")
escape_re = re.compile(r"\\(.)")
def split_header_words(header_values):
r"""Parse header values into a list of lists containing key,value pairs.
The function knows how to deal with ",", ";" and "=" as well as quoted
values after "=". A list of space separated tokens are parsed as if they
were separated by ";".
If the header_values passed as argument contains multiple values, then they
are treated as if they were a single value separated by comma ",".
This means that this function is useful for parsing header fields that
follow this syntax (BNF as from the HTTP/1.1 specification, but we relax
the requirement for tokens).
headers = #header
header = (token | parameter) *( [";"] (token | parameter))
token = 1*<any CHAR except CTLs or separators>
separators = "(" | ")" | "<" | ">" | "@"
| "," | ";" | ":" | "\" | <">
| "/" | "[" | "]" | "?" | "="
| "{" | "}" | SP | HT
quoted-string = ( <"> *(qdtext | quoted-pair ) <"> )
qdtext = <any TEXT except <">>
quoted-pair = "\" CHAR
parameter = attribute "=" value
attribute = token
value = token | quoted-string
Each header is represented by a list of key/value pairs. The value for a
simple token (not part of a parameter) is None. Syntactically incorrect
headers will not necessarily be parsed as you would want.
This is easier to describe with some examples:
>>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz'])
[[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]]
>>> split_header_words(['text/html; charset="iso-8859-1"'])
[[('text/html', None), ('charset', 'iso-8859-1')]]
>>> split_header_words([r'Basic realm="\"foo\bar\""'])
[[('Basic', None), ('realm', '"foobar"')]]
"""
assert type(header_values) not in STRING_TYPES
result = []
for text in header_values:
orig_text = text
pairs = []
while text:
m = token_re.search(text)
if m:
text = unmatched(m)
name = m.group(1)
m = quoted_value_re.search(text)
if m: # quoted value
text = unmatched(m)
value = m.group(1)
value = escape_re.sub(r"\1", value)
else:
m = value_re.search(text)
if m: # unquoted value
text = unmatched(m)
value = m.group(1)
value = value.rstrip()
else:
# no value, a lone token
value = None
pairs.append((name, value))
elif text.lstrip().startswith(","):
# concatenated headers, as per RFC 2616 section 4.2
text = text.lstrip()[1:]
if pairs: result.append(pairs)
pairs = []
else:
# skip junk
non_junk, nr_junk_chars = re.subn("^[=\s;]*", "", text)
assert nr_junk_chars > 0, (
"split_header_words bug: '%s', '%s', %s" %
(orig_text, text, pairs))
text = non_junk
if pairs: result.append(pairs)
return result
join_escape_re = re.compile(r"([\"\\])")
def join_header_words(lists):
"""Do the inverse of the conversion done by split_header_words.
Takes a list of lists of (key, value) pairs and produces a single header
value. Attribute values are quoted if needed.
>>> join_header_words([[("text/plain", None), ("charset", "iso-8859/1")]])
'text/plain; charset="iso-8859/1"'
>>> join_header_words([[("text/plain", None)], [("charset", "iso-8859/1")]])
'text/plain, charset="iso-8859/1"'
"""
headers = []
for pairs in lists:
attr = []
for k, v in pairs:
if v is not None:
if not re.search(r"^\w+$", v):
v = join_escape_re.sub(r"\\\1", v) # escape " and \
v = '"%s"' % v
if k is None: # Netscape cookies may have no name
k = v
else:
k = "%s=%s" % (k, v)
attr.append(k)
if attr: headers.append("; ".join(attr))
return ", ".join(headers)
def strip_quotes(text):
if text.startswith('"'):
text = text[1:]
if text.endswith('"'):
text = text[:-1]
return text
def parse_ns_headers(ns_headers):
"""Ad-hoc parser for Netscape protocol cookie-attributes.
The old Netscape cookie format for Set-Cookie can for instance contain
an unquoted "," in the expires field, so we have to use this ad-hoc
parser instead of split_header_words.
XXX This may not make the best possible effort to parse all the crap
that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient
parser is probably better, so could do worse than following that if
this ever gives any trouble.
Currently, this is also used for parsing RFC 2109 cookies.
"""
known_attrs = ("expires", "domain", "path", "secure",
# RFC 2109 attrs (may turn up in Netscape cookies, too)
"version", "port", "max-age")
result = []
for ns_header in ns_headers:
pairs = []
version_set = False
params = re.split(r";\s*", ns_header)
for ii in range(len(params)):
param = params[ii]
param = param.rstrip()
if param == "": continue
if "=" not in param:
k, v = param, None
else:
k, v = re.split(r"\s*=\s*", param, 1)
k = k.lstrip()
if ii != 0:
lc = k.lower()
if lc in known_attrs:
k = lc
if k == "version":
# This is an RFC 2109 cookie.
v = strip_quotes(v)
version_set = True
if k == "expires":
# convert expires date to seconds since epoch
v = http2time(strip_quotes(v)) # None if invalid
pairs.append((k, v))
if pairs:
if not version_set:
pairs.append(("version", "0"))
result.append(pairs)
return result
def _test():
import doctest, _headersutil
return doctest.testmod(_headersutil)
if __name__ == "__main__":
_test()
|
geminy/aidear
|
refs/heads/master
|
oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/third_party/bintrees/bintrees/__init__.py
|
156
|
#!/usr/bin/env python
#coding:utf-8
# Author: mozman
# Purpose: binary trees package
# Created: 03.05.2010
# Copyright (c) 2010-2013 by Manfred Moitzi
# License: MIT License
from __future__ import absolute_import
__doc__ = """
Binary Tree Package
===================
Python Trees
------------
Balanced and unbalance binary trees written in pure Python with a dict-like API.
Classes
~~~~~~~
* BinaryTree -- unbalanced binary tree
* AVLTree -- balanced AVL-Tree
* RBTree -- balanced Red-Black-Tree
Cython Trees
------------
Basic tree functions written in Cython, merged with TreeMixin to provide the
full API of the Python Trees.
Classes
~~~~~~~
* FastBinaryTree -- unbalanced binary tree
* FastAVLTree -- balanced AVLTree
* FastRBTree -- balanced Red-Black-Tree
Overview of API for all Classes
===============================
* TreeClass ([compare]) -> new empty tree.
* TreeClass(mapping, [compare]) -> new tree initialized from a mapping
* TreeClass(seq, [compare]) -> new tree initialized from seq [(k1, v1), (k2, v2), ... (kn, vn)]
Methods
-------
* __contains__(k) -> True if T has a key k, else False, O(log(n))
* __delitem__(y) <==> del T[y], O(log(n))
* __getitem__(y) <==> T[y], O(log(n))
* __iter__() <==> iter(T)
* __len__() <==> len(T), O(1)
* __max__() <==> max(T), get max item (k,v) of T, O(log(n))
* __min__() <==> min(T), get min item (k,v) of T, O(log(n))
* __and__(other) <==> T & other, intersection
* __or__(other) <==> T | other, union
* __sub__(other) <==> T - other, difference
* __xor__(other) <==> T ^ other, symmetric_difference
* __repr__() <==> repr(T)
* __setitem__(k, v) <==> T[k] = v, O(log(n))
* clear() -> None, Remove all items from T, , O(n)
* copy() -> a shallow copy of T, O(n*log(n))
* discard(k) -> None, remove k from T, if k is present, O(log(n))
* get(k[,d]) -> T[k] if k in T, else d, O(log(n))
* is_empty() -> True if len(T) == 0, O(1)
* items([reverse]) -> list of T's (k, v) pairs, as 2-tuples, O(n)
* keys([reverse]) -> list of T's keys, O(n)
* pop(k[,d]) -> v, remove specified key and return the corresponding value, O(log(n))
* popitem() -> (k, v), remove and return some (key, value) pair as a 2-tuple, O(log(n))
* setdefault(k[,d]) -> T.get(k, d), also set T[k]=d if k not in T, O(log(n))
* update(E) -> None. Update T from dict/iterable E, O(E*log(n))
* values([reverse]) -> list of T's values, O(n)
walk forward/backward, O(log(n))
* prev_item(key) -> get (k, v) pair, where k is predecessor to key, O(log(n))
* prev_key(key) -> k, get the predecessor of key, O(log(n))
* succ_item(key) -> get (k,v) pair as a 2-tuple, where k is successor to key, O(log(n))
* succ_key(key) -> k, get the successor of key, O(log(n))
slicing by keys
* itemslice(s, e) -> generator for (k, v) items of T for s <= key < e, O(n)
* keyslice(s, e) -> generator for keys of T for s <= key < e, O(n)
* valueslice(s, e) -> generator for values of T for s <= key < e, O(n)
* T[s:e] -> TreeSlice object, with keys in range s <= key < e, O(n)
* del T[s:e] -> remove items by key slicing, for s <= key < e, O(n)
if 's' is None or T[:e] TreeSlice/iterator starts with value of min_key()
if 'e' is None or T[s:] TreeSlice/iterator ends with value of max_key()
T[:] is a TreeSlice which represents the whole tree.
TreeSlice is a tree wrapper with range check, and contains no references
to objects, deleting objects in the associated tree also deletes the object
in the TreeSlice.
* TreeSlice[k] -> get value for key k, raises KeyError if k not exists in range s:e
* TreeSlice[s1:e1] -> TreeSlice object, with keys in range s1 <= key < e1
* new lower bound is max(s, s1)
* new upper bound is min(e, e1)
TreeSlice methods:
* items() -> generator for (k, v) items of T, O(n)
* keys() -> generator for keys of T, O(n)
* values() -> generator for values of T, O(n)
* __iter__ <==> keys()
* __repr__ <==> repr(T)
* __contains__(key)-> True if TreeSlice has a key k, else False, O(log(n))
Heap methods
* max_item() -> get biggest (key, value) pair of T, O(log(n))
* max_key() -> get biggest key of T, O(log(n))
* min_item() -> get smallest (key, value) pair of T, O(log(n))
* min_key() -> get smallest key of T, O(log(n))
* pop_min() -> (k, v), remove item with minimum key, O(log(n))
* pop_max() -> (k, v), remove item with maximum key, O(log(n))
* nlargest(i[,pop]) -> get list of i largest items (k, v), O(i*log(n))
* nsmallest(i[,pop]) -> get list of i smallest items (k, v), O(i*log(n))
Set methods (using frozenset)
* intersection(t1, t2, ...) -> Tree with keys *common* to all trees
* union(t1, t2, ...) -> Tree with keys from *either* trees
* difference(t1, t2, ...) -> Tree with keys in T but not any of t1, t2, ...
* symmetric_difference(t1) -> Tree with keys in either T and t1 but not both
* issubset(S) -> True if every element in T is in S
* issuperset(S) -> True if every element in S is in T
* isdisjoint(S) -> True if T has a null intersection with S
Classmethods
* fromkeys(S[,v]) -> New tree with keys from S and values equal to v.
"""
__all__ = [
'FastBinaryTree',
'FastAVLTree',
'FastRBTree',
'BinaryTree',
'AVLTree',
'RBTree'
]
from .treemixin import TreeMixin
from .bintree import BinaryTree
from .avltree import AVLTree
from .rbtree import RBTree
try:
from .qbintree import cBinaryTree
class FastBinaryTree(cBinaryTree, TreeMixin):
""" Faster unbalanced binary tree written in Cython with C-Code. """
except ImportError: # fall back to pure Python version
FastBinaryTree = BinaryTree
except ValueError: # for pypy
FastBinaryTree = BinaryTree
try:
from .qavltree import cAVLTree
class FastAVLTree(cAVLTree, TreeMixin):
""" Faster balanced AVL-Tree written in Cython with C-Code. """
except ImportError: # fall back to pure Python version
FastAVLTree = AVLTree
except ValueError: # for pypy
FastAVLTree = AVLTree
try:
from .qrbtree import cRBTree
class FastRBTree(cRBTree, TreeMixin):
""" Faster balanced Red-Black-Tree written in Cython with C-Code. """
except ImportError: # fall back to pure Python version
FastRBTree = RBTree
except ValueError: # for pypy
FastRBTree = RBTree
|
cysp/gyp
|
refs/heads/master
|
test/rules-rebuild/src/make-sources.py
|
337
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
assert len(sys.argv) == 4, sys.argv
(in_file, c_file, h_file) = sys.argv[1:]
def write_file(filename, contents):
open(filename, 'wb').write(contents)
write_file(c_file, open(in_file, 'rb').read())
write_file(h_file, '#define NAME "%s"\n' % in_file)
sys.exit(0)
|
nelsonvarela/mollie-api-python
|
refs/heads/master
|
examples/app.py
|
2
|
import os
import flask
app = flask.Flask(__name__)
examples = [
'1-new-payment',
'2-webhook-verification',
'3-return-page',
'4-ideal-payment',
'5-payments-history',
'6-list-activated-methods'
]
@app.route('/')
def show_list():
body = ''
for example in examples:
body += '<a href="/%s">%s</a><br>' % (example, example)
return body
@app.route('/<example>', methods=['GET', 'POST'])
def run_example(example=None):
if example not in examples:
flask.abort(404, 'Example does not exist')
return __import__(example).main()
if __name__ == "__main__":
app.debug = True
app.run()
#
# NOTE: This example uses a plain txt file as a "database". Please use a real database like MySQL in production.
#
def database_write(order_nr, status):
order_nr = int(order_nr)
database = open(os.path.dirname(__file__) + "/orders/order-%s.txt" % order_nr, 'w')
database.write(status)
def database_read(order_nr):
order_nr = int(order_nr)
database = open(os.path.dirname(__file__) + "/orders/order-%s.txt" % order_nr, 'r')
return database.read()
|
anryko/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/amazon/aws_config_rule.py
|
10
|
#!/usr/bin/python
# Copyright: (c) 2018, Aaron Smith <ajsmith10381@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aws_config_rule
short_description: Manage AWS Config resources
description:
- Module manages AWS Config rules
version_added: "2.6"
requirements: [ 'botocore', 'boto3' ]
author:
- "Aaron Smith (@slapula)"
options:
name:
description:
- The name of the AWS Config resource.
required: true
type: str
state:
description:
- Whether the Config rule should be present or absent.
default: present
choices: ['present', 'absent']
type: str
description:
description:
- The description that you provide for the AWS Config rule.
type: str
scope:
description:
- Defines which resources can trigger an evaluation for the rule.
suboptions:
compliance_types:
description:
- The resource types of only those AWS resources that you want to trigger an evaluation for the rule.
You can only specify one type if you also specify a resource ID for I(compliance_id).
compliance_id:
description:
- The ID of the only AWS resource that you want to trigger an evaluation for the rule. If you specify a resource ID,
you must specify one resource type for I(compliance_types).
tag_key:
description:
- The tag key that is applied to only those AWS resources that you want to trigger an evaluation for the rule.
tag_value:
description:
- The tag value applied to only those AWS resources that you want to trigger an evaluation for the rule.
If you specify a value for I(tag_value), you must also specify a value for I(tag_key).
type: dict
source:
description:
- Provides the rule owner (AWS or customer), the rule identifier, and the notifications that cause the function to
evaluate your AWS resources.
suboptions:
owner:
description:
- The resource types of only those AWS resources that you want to trigger an evaluation for the rule.
You can only specify one type if you also specify a resource ID for I(compliance_id).
identifier:
description:
- The ID of the only AWS resource that you want to trigger an evaluation for the rule.
If you specify a resource ID, you must specify one resource type for I(compliance_types).
details:
description:
- Provides the source and type of the event that causes AWS Config to evaluate your AWS resources.
- This parameter expects a list of dictionaries. Each dictionary expects the following key/value pairs.
- Key `EventSource` The source of the event, such as an AWS service, that triggers AWS Config to evaluate your AWS resources.
- Key `MessageType` The type of notification that triggers AWS Config to run an evaluation for a rule.
- Key `MaximumExecutionFrequency` The frequency at which you want AWS Config to run evaluations for a custom rule with a periodic trigger.
type: dict
required: true
input_parameters:
description:
- A string, in JSON format, that is passed to the AWS Config rule Lambda function.
type: str
execution_frequency:
description:
- The maximum frequency with which AWS Config runs evaluations for a rule.
choices: ['One_Hour', 'Three_Hours', 'Six_Hours', 'Twelve_Hours', 'TwentyFour_Hours']
type: str
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
- name: Create Config Rule for AWS Config
aws_config_rule:
name: test_config_rule
state: present
description: 'This AWS Config rule checks for public write access on S3 buckets'
scope:
compliance_types:
- 'AWS::S3::Bucket'
source:
owner: AWS
identifier: 'S3_BUCKET_PUBLIC_WRITE_PROHIBITED'
'''
RETURN = '''#'''
try:
import botocore
from botocore.exceptions import BotoCoreError, ClientError
except ImportError:
pass # handled by AnsibleAWSModule
from ansible.module_utils.aws.core import AnsibleAWSModule, is_boto3_error_code
from ansible.module_utils.ec2 import AWSRetry, camel_dict_to_snake_dict
def rule_exists(client, module, params):
try:
rule = client.describe_config_rules(
ConfigRuleNames=[params['ConfigRuleName']],
aws_retry=True,
)
return rule['ConfigRules'][0]
except is_boto3_error_code('NoSuchConfigRuleException'):
return
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e)
def create_resource(client, module, params, result):
try:
client.put_config_rule(
ConfigRule=params
)
result['changed'] = True
return result
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't create AWS Config rule")
def update_resource(client, module, params, result):
current_params = client.describe_config_rules(
ConfigRuleNames=[params['ConfigRuleName']],
aws_retry=True,
)
del current_params['ConfigRules'][0]['ConfigRuleArn']
del current_params['ConfigRules'][0]['ConfigRuleId']
if params != current_params['ConfigRules'][0]:
try:
client.put_config_rule(
ConfigRule=params
)
result['changed'] = True
result['rule'] = camel_dict_to_snake_dict(rule_exists(client, module, params))
return result
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't create AWS Config rule")
def delete_resource(client, module, params, result):
try:
response = client.delete_config_rule(
ConfigRuleName=params['ConfigRuleName'],
aws_retry=True,
)
result['changed'] = True
result['rule'] = {}
return result
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't delete AWS Config rule")
def main():
module = AnsibleAWSModule(
argument_spec={
'name': dict(type='str', required=True),
'state': dict(type='str', choices=['present', 'absent'], default='present'),
'description': dict(type='str'),
'scope': dict(type='dict'),
'source': dict(type='dict', required=True),
'input_parameters': dict(type='str'),
'execution_frequency': dict(
type='str',
choices=[
'One_Hour',
'Three_Hours',
'Six_Hours',
'Twelve_Hours',
'TwentyFour_Hours'
]
),
},
supports_check_mode=False,
)
result = {
'changed': False
}
name = module.params.get('name')
resource_type = module.params.get('resource_type')
state = module.params.get('state')
params = {}
if name:
params['ConfigRuleName'] = name
if module.params.get('description'):
params['Description'] = module.params.get('description')
if module.params.get('scope'):
params['Scope'] = {}
if module.params.get('scope').get('compliance_types'):
params['Scope'].update({
'ComplianceResourceTypes': module.params.get('scope').get('compliance_types')
})
if module.params.get('scope').get('tag_key'):
params['Scope'].update({
'TagKey': module.params.get('scope').get('tag_key')
})
if module.params.get('scope').get('tag_value'):
params['Scope'].update({
'TagValue': module.params.get('scope').get('tag_value')
})
if module.params.get('scope').get('compliance_id'):
params['Scope'].update({
'ComplianceResourceId': module.params.get('scope').get('compliance_id')
})
if module.params.get('source'):
params['Source'] = {}
if module.params.get('source').get('owner'):
params['Source'].update({
'Owner': module.params.get('source').get('owner')
})
if module.params.get('source').get('identifier'):
params['Source'].update({
'SourceIdentifier': module.params.get('source').get('identifier')
})
if module.params.get('source').get('details'):
params['Source'].update({
'SourceDetails': module.params.get('source').get('details')
})
if module.params.get('input_parameters'):
params['InputParameters'] = module.params.get('input_parameters')
if module.params.get('execution_frequency'):
params['MaximumExecutionFrequency'] = module.params.get('execution_frequency')
params['ConfigRuleState'] = 'ACTIVE'
client = module.client('config', retry_decorator=AWSRetry.jittered_backoff())
existing_rule = rule_exists(client, module, params)
if state == 'present':
if not existing_rule:
create_resource(client, module, params, result)
else:
update_resource(client, module, params, result)
if state == 'absent':
if existing_rule:
delete_resource(client, module, params, result)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
rpsingh21/resultanalysis
|
refs/heads/master
|
resultAnalysis/result/api/serializer.py
|
1
|
from rest_framework.serializers import ModelSerializer,ValidationError
from result.models import (
ContactUs,
ReportBug,
ReportError,
)
class ContactUsSerializer(ModelSerializer):
class Meta:
model = ContactUs
fields = [
'name',
'mobNo',
'email',
'comment',
]
def validate_email(self, email):
allowedDomains = [
"aol.com", "att.net", "comcast.net", "facebook.com", "gmail.com", "gmx.com", "googlemail.com",
"google.com", "hotmail.com", "hotmail.co.uk", "mac.com", "me.com", "mail.com", "msn.com",
"live.com", "sbcglobal.net", "verizon.net", "yahoo.com", "yahoo.co.uk",
"email.com", "games.com" , "gmx.net", "hush.com", "hushmail.com", "icloud.com", "inbox.com",
"lavabit.com", "love.com" , "outlook.com", "pobox.com", "rocketmail.com",
"safe-mail.net", "wow.com", "ygm.com" , "ymail.com", "zoho.com", "fastmail.fm",
"yandex.com","iname.com"]
domain = email.split("@")[1]
if domain not in allowedDomains:
raise ValidationError('Invalid email address')
return email
class ReportBugSerializer(ModelSerializer):
class Meta:
model = ReportBug
fields = [
'email',
'description',
]
def validate_email(self, email):
allowedDomains = [
"aol.com", "att.net", "comcast.net", "facebook.com", "gmail.com", "gmx.com", "googlemail.com",
"google.com", "hotmail.com", "hotmail.co.uk", "mac.com", "me.com", "mail.com", "msn.com",
"live.com", "sbcglobal.net", "verizon.net", "yahoo.com", "yahoo.co.uk",
"email.com", "games.com" , "gmx.net", "hush.com", "hushmail.com", "icloud.com", "inbox.com",
"lavabit.com", "love.com" , "outlook.com", "pobox.com", "rocketmail.com",
"safe-mail.net", "wow.com", "ygm.com" , "ymail.com", "zoho.com", "fastmail.fm",
"yandex.com","iname.com"
]
domain = email.split("@")[1]
if domain not in allowedDomains:
raise ValidationError('Invalid email address')
return email
class ReportErrorSerializer(ModelSerializer):
class Meta:
model = ReportError
fields = [
'rollNo',
'url',
]
|
surajssd/kuma
|
refs/heads/master
|
vendor/packages/logilab/common/date.py
|
89
|
# copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""Date manipulation helper functions."""
from __future__ import division
__docformat__ = "restructuredtext en"
import math
import re
import sys
from locale import getlocale, LC_TIME
from datetime import date, time, datetime, timedelta
from time import strptime as time_strptime
from calendar import monthrange, timegm
from six.moves import range
try:
from mx.DateTime import RelativeDateTime, Date, DateTimeType
except ImportError:
endOfMonth = None
DateTimeType = datetime
else:
endOfMonth = RelativeDateTime(months=1, day=-1)
# NOTE: should we implement a compatibility layer between date representations
# as we have in lgc.db ?
FRENCH_FIXED_HOLIDAYS = {
'jour_an': '%s-01-01',
'fete_travail': '%s-05-01',
'armistice1945': '%s-05-08',
'fete_nat': '%s-07-14',
'assomption': '%s-08-15',
'toussaint': '%s-11-01',
'armistice1918': '%s-11-11',
'noel': '%s-12-25',
}
FRENCH_MOBILE_HOLIDAYS = {
'paques2004': '2004-04-12',
'ascension2004': '2004-05-20',
'pentecote2004': '2004-05-31',
'paques2005': '2005-03-28',
'ascension2005': '2005-05-05',
'pentecote2005': '2005-05-16',
'paques2006': '2006-04-17',
'ascension2006': '2006-05-25',
'pentecote2006': '2006-06-05',
'paques2007': '2007-04-09',
'ascension2007': '2007-05-17',
'pentecote2007': '2007-05-28',
'paques2008': '2008-03-24',
'ascension2008': '2008-05-01',
'pentecote2008': '2008-05-12',
'paques2009': '2009-04-13',
'ascension2009': '2009-05-21',
'pentecote2009': '2009-06-01',
'paques2010': '2010-04-05',
'ascension2010': '2010-05-13',
'pentecote2010': '2010-05-24',
'paques2011': '2011-04-25',
'ascension2011': '2011-06-02',
'pentecote2011': '2011-06-13',
'paques2012': '2012-04-09',
'ascension2012': '2012-05-17',
'pentecote2012': '2012-05-28',
}
# XXX this implementation cries for multimethod dispatching
def get_step(dateobj, nbdays=1):
# assume date is either a python datetime or a mx.DateTime object
if isinstance(dateobj, date):
return ONEDAY * nbdays
return nbdays # mx.DateTime is ok with integers
def datefactory(year, month, day, sampledate):
# assume date is either a python datetime or a mx.DateTime object
if isinstance(sampledate, datetime):
return datetime(year, month, day)
if isinstance(sampledate, date):
return date(year, month, day)
return Date(year, month, day)
def weekday(dateobj):
# assume date is either a python datetime or a mx.DateTime object
if isinstance(dateobj, date):
return dateobj.weekday()
return dateobj.day_of_week
def str2date(datestr, sampledate):
# NOTE: datetime.strptime is not an option until we drop py2.4 compat
year, month, day = [int(chunk) for chunk in datestr.split('-')]
return datefactory(year, month, day, sampledate)
def days_between(start, end):
if isinstance(start, date):
delta = end - start
# datetime.timedelta.days is always an integer (floored)
if delta.seconds:
return delta.days + 1
return delta.days
else:
return int(math.ceil((end - start).days))
def get_national_holidays(begin, end):
"""return french national days off between begin and end"""
begin = datefactory(begin.year, begin.month, begin.day, begin)
end = datefactory(end.year, end.month, end.day, end)
holidays = [str2date(datestr, begin)
for datestr in FRENCH_MOBILE_HOLIDAYS.values()]
for year in range(begin.year, end.year+1):
for datestr in FRENCH_FIXED_HOLIDAYS.values():
date = str2date(datestr % year, begin)
if date not in holidays:
holidays.append(date)
return [day for day in holidays if begin <= day < end]
def add_days_worked(start, days):
"""adds date but try to only take days worked into account"""
step = get_step(start)
weeks, plus = divmod(days, 5)
end = start + ((weeks * 7) + plus) * step
if weekday(end) >= 5: # saturday or sunday
end += (2 * step)
end += len([x for x in get_national_holidays(start, end + step)
if weekday(x) < 5]) * step
if weekday(end) >= 5: # saturday or sunday
end += (2 * step)
return end
def nb_open_days(start, end):
assert start <= end
step = get_step(start)
days = days_between(start, end)
weeks, plus = divmod(days, 7)
if weekday(start) > weekday(end):
plus -= 2
elif weekday(end) == 6:
plus -= 1
open_days = weeks * 5 + plus
nb_week_holidays = len([x for x in get_national_holidays(start, end+step)
if weekday(x) < 5 and x < end])
open_days -= nb_week_holidays
if open_days < 0:
return 0
return open_days
def date_range(begin, end, incday=None, incmonth=None):
"""yields each date between begin and end
:param begin: the start date
:param end: the end date
:param incr: the step to use to iterate over dates. Default is
one day.
:param include: None (means no exclusion) or a function taking a
date as parameter, and returning True if the date
should be included.
When using mx datetime, you should *NOT* use incmonth argument, use instead
oneDay, oneHour, oneMinute, oneSecond, oneWeek or endOfMonth (to enumerate
months) as `incday` argument
"""
assert not (incday and incmonth)
begin = todate(begin)
end = todate(end)
if incmonth:
while begin < end:
yield begin
begin = next_month(begin, incmonth)
else:
incr = get_step(begin, incday or 1)
while begin < end:
yield begin
begin += incr
# makes py datetime usable #####################################################
ONEDAY = timedelta(days=1)
ONEWEEK = timedelta(days=7)
try:
strptime = datetime.strptime
except AttributeError: # py < 2.5
from time import strptime as time_strptime
def strptime(value, format):
return datetime(*time_strptime(value, format)[:6])
def strptime_time(value, format='%H:%M'):
return time(*time_strptime(value, format)[3:6])
def todate(somedate):
"""return a date from a date (leaving unchanged) or a datetime"""
if isinstance(somedate, datetime):
return date(somedate.year, somedate.month, somedate.day)
assert isinstance(somedate, (date, DateTimeType)), repr(somedate)
return somedate
def totime(somedate):
"""return a time from a time (leaving unchanged), date or datetime"""
# XXX mx compat
if not isinstance(somedate, time):
return time(somedate.hour, somedate.minute, somedate.second)
assert isinstance(somedate, (time)), repr(somedate)
return somedate
def todatetime(somedate):
"""return a date from a date (leaving unchanged) or a datetime"""
# take care, datetime is a subclass of date
if isinstance(somedate, datetime):
return somedate
assert isinstance(somedate, (date, DateTimeType)), repr(somedate)
return datetime(somedate.year, somedate.month, somedate.day)
def datetime2ticks(somedate):
return timegm(somedate.timetuple()) * 1000
def ticks2datetime(ticks):
miliseconds, microseconds = divmod(ticks, 1000)
try:
return datetime.fromtimestamp(miliseconds)
except (ValueError, OverflowError):
epoch = datetime.fromtimestamp(0)
nb_days, seconds = divmod(int(miliseconds), 86400)
delta = timedelta(nb_days, seconds=seconds, microseconds=microseconds)
try:
return epoch + delta
except (ValueError, OverflowError):
raise
def days_in_month(somedate):
return monthrange(somedate.year, somedate.month)[1]
def days_in_year(somedate):
feb = date(somedate.year, 2, 1)
if days_in_month(feb) == 29:
return 366
else:
return 365
def previous_month(somedate, nbmonth=1):
while nbmonth:
somedate = first_day(somedate) - ONEDAY
nbmonth -= 1
return somedate
def next_month(somedate, nbmonth=1):
while nbmonth:
somedate = last_day(somedate) + ONEDAY
nbmonth -= 1
return somedate
def first_day(somedate):
return date(somedate.year, somedate.month, 1)
def last_day(somedate):
return date(somedate.year, somedate.month, days_in_month(somedate))
def ustrftime(somedate, fmt='%Y-%m-%d'):
"""like strftime, but returns a unicode string instead of an encoded
string which may be problematic with localized date.
"""
if sys.version_info >= (3, 3):
# datetime.date.strftime() supports dates since year 1 in Python >=3.3.
return somedate.strftime(fmt)
else:
try:
if sys.version_info < (3, 0):
encoding = getlocale(LC_TIME)[1] or 'ascii'
return unicode(somedate.strftime(str(fmt)), encoding)
else:
return somedate.strftime(fmt)
except ValueError:
if somedate.year >= 1900:
raise
# datetime is not happy with dates before 1900
# we try to work around this, assuming a simple
# format string
fields = {'Y': somedate.year,
'm': somedate.month,
'd': somedate.day,
}
if isinstance(somedate, datetime):
fields.update({'H': somedate.hour,
'M': somedate.minute,
'S': somedate.second})
fmt = re.sub('%([YmdHMS])', r'%(\1)02d', fmt)
return unicode(fmt) % fields
def utcdatetime(dt):
if dt.tzinfo is None:
return dt
return (dt.replace(tzinfo=None) - dt.utcoffset())
def utctime(dt):
if dt.tzinfo is None:
return dt
return (dt + dt.utcoffset() + dt.dst()).replace(tzinfo=None)
def datetime_to_seconds(date):
"""return the number of seconds since the begining of the day for that date
"""
return date.second+60*date.minute + 3600*date.hour
def timedelta_to_days(delta):
"""return the time delta as a number of seconds"""
return delta.days + delta.seconds / (3600*24)
def timedelta_to_seconds(delta):
"""return the time delta as a fraction of days"""
return delta.days*(3600*24) + delta.seconds
|
bhcopeland/ansible-modules-extras
|
refs/heads/devel
|
network/illumos/dladm_etherstub.py
|
29
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Adam Števko <adam.stevko@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: dladm_etherstub
short_description: Manage etherstubs on Solaris/illumos systems.
description:
- Create or delete etherstubs on Solaris/illumos systems.
version_added: "2.2"
author: Adam Števko (@xen0l)
options:
name:
description:
- Etherstub name.
required: true
temporary:
description:
- Specifies that the etherstub is temporary. Temporary etherstubs
do not persist across reboots.
required: false
default: false
choices: [ "true", "false" ]
state:
description:
- Create or delete Solaris/illumos etherstub.
required: false
default: "present"
choices: [ "present", "absent" ]
'''
EXAMPLES = '''
# Create 'stub0' etherstub
dladm_etherstub: name=stub0 state=present
# Remove 'stub0 etherstub
dladm_etherstub: name=stub0 state=absent
'''
RETURN = '''
name:
description: etherstub name
returned: always
type: string
sample: "switch0"
state:
description: state of the target
returned: always
type: string
sample: "present"
temporary:
description: etherstub's persistence
returned: always
type: boolean
sample: "True"
'''
class Etherstub(object):
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.temporary = module.params['temporary']
self.state = module.params['state']
def etherstub_exists(self):
cmd = [self.module.get_bin_path('dladm', True)]
cmd.append('show-etherstub')
cmd.append(self.name)
(rc, _, _) = self.module.run_command(cmd)
if rc == 0:
return True
else:
return False
def create_etherstub(self):
cmd = [self.module.get_bin_path('dladm', True)]
cmd.append('create-etherstub')
if self.temporary:
cmd.append('-t')
cmd.append(self.name)
return self.module.run_command(cmd)
def delete_etherstub(self):
cmd = [self.module.get_bin_path('dladm', True)]
cmd.append('delete-etherstub')
if self.temporary:
cmd.append('-t')
cmd.append(self.name)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
temporary=dict(default=False, type='bool'),
state=dict(default='present', choices=['absent', 'present']),
),
supports_check_mode=True
)
etherstub = Etherstub(module)
rc = None
out = ''
err = ''
result = {}
result['name'] = etherstub.name
result['state'] = etherstub.state
result['temporary'] = etherstub.temporary
if etherstub.state == 'absent':
if etherstub.etherstub_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = etherstub.delete_etherstub()
if rc != 0:
module.fail_json(name=etherstub.name, msg=err, rc=rc)
elif etherstub.state == 'present':
if not etherstub.etherstub_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = etherstub.create_etherstub()
if rc is not None and rc != 0:
module.fail_json(name=etherstub.name, msg=err, rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
from ansible.module_utils.basic import *
main()
|
birdonwheels5/p2pool-myrScrypt
|
refs/heads/master
|
p2pool/test/util/test_datachunker.py
|
287
|
import random
import unittest
from p2pool.util import datachunker
def random_bytes(length):
return ''.join(chr(random.randrange(2**8)) for i in xrange(length))
class Test(unittest.TestCase):
def test_stringbuffer(self):
for i in xrange(100):
sb = datachunker.StringBuffer()
r = random_bytes(random.randrange(1000))
amount_inserted = 0
while amount_inserted < len(r):
x = random.randrange(10)
sb.add(r[amount_inserted:amount_inserted+x])
amount_inserted += x
amount_removed = 0
while amount_removed < len(r):
x = random.randrange(min(10, len(r) - amount_removed) + 1)
this = sb.get(x)
assert r[amount_removed:amount_removed+x] == this
amount_removed += x
|
bitcrystal/bitcrystal_v20
|
refs/heads/master
|
contrib/testgen/base58.py
|
2139
|
'''
Bitcoin base58 encoding and decoding.
Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain)
'''
import hashlib
# for compatibility with following code...
class SHA256:
new = hashlib.sha256
if str != bytes:
# Python 3.x
def ord(c):
return c
def chr(n):
return bytes( (n,) )
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
b58chars = __b58chars
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length = None):
""" decode v into a string of len bytes
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = bytes()
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def checksum(v):
"""Return 32-bit checksum based on SHA256"""
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
def b58encode_chk(v):
"""b58encode a string, with 32-bit checksum"""
return b58encode(v + checksum(v))
def b58decode_chk(v):
"""decode a base58 string, check and remove checksum"""
result = b58decode(v)
if result is None:
return None
h3 = checksum(result[:-4])
if result[-4:] == checksum(result[:-4]):
return result[:-4]
else:
return None
def get_bcaddress_version(strAddress):
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
addr = b58decode_chk(strAddress)
if addr is None or len(addr)!=21: return None
version = addr[0]
return ord(version)
if __name__ == '__main__':
# Test case (from http://gitorious.org/bitcoin/python-base58.git)
assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0
_ohai = 'o hai'.encode('ascii')
_tmp = b58encode(_ohai)
assert _tmp == 'DYB3oMS'
assert b58decode(_tmp, 5) == _ohai
print("Tests passed")
|
sballesteros/node-gyp
|
refs/heads/master
|
gyp/test/hello/gyptest-target.py
|
351
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simplest-possible build of a "Hello, world!" program
using an explicit build target of 'hello'.
"""
import TestGyp
test = TestGyp.TestGyp(workdir='workarea_target')
test.run_gyp('hello.gyp')
test.build('hello.gyp', 'hello')
test.run_built_executable('hello', stdout="Hello, world!\n")
test.up_to_date('hello.gyp', 'hello')
test.pass_test()
|
BeATz-UnKNoWN/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Tools/pynche/DetailsViewer.py
|
48
|
"""DetailsViewer class.
This class implements a pure input window which allows you to meticulously
edit the current color. You have both mouse control of the color (via the
buttons along the bottom row), and there are keyboard bindings for each of the
increment/decrement buttons.
The top three check buttons allow you to specify which of the three color
variations are tied together when incrementing and decrementing. Red, green,
and blue are self evident. By tying together red and green, you can modify
the yellow level of the color. By tying together red and blue, you can modify
the magenta level of the color. By tying together green and blue, you can
modify the cyan level, and by tying all three together, you can modify the
grey level.
The behavior at the boundaries (0 and 255) are defined by the `At boundary'
option menu:
Stop
When the increment or decrement would send any of the tied variations
out of bounds, the entire delta is discarded.
Wrap Around
When the increment or decrement would send any of the tied variations
out of bounds, the out of bounds variation is wrapped around to the
other side. Thus if red were at 238 and 25 were added to it, red
would have the value 7.
Preseve Distance
When the increment or decrement would send any of the tied variations
out of bounds, all tied variations are wrapped as one, so as to
preserve the distance between them. Thus if green and blue were tied,
and green was at 238 while blue was at 223, and an increment of 25
were applied, green would be at 15 and blue would be at 0.
Squash
When the increment or decrement would send any of the tied variations
out of bounds, the out of bounds variation is set to the ceiling of
255 or floor of 0, as appropriate. In this way, all tied variations
are squashed to one edge or the other.
The following key bindings can be used as accelerators. Note that Pynche can
fall behind if you hold the key down as a key repeat:
Left arrow == -1
Right arrow == +1
Control + Left == -10
Control + Right == 10
Shift + Left == -25
Shift + Right == +25
"""
from tkinter import *
STOP = 'Stop'
WRAP = 'Wrap Around'
RATIO = 'Preserve Distance'
GRAV = 'Squash'
ADDTOVIEW = 'Details Window...'
class DetailsViewer:
def __init__(self, switchboard, master=None):
self.__sb = switchboard
optiondb = switchboard.optiondb()
self.__red, self.__green, self.__blue = switchboard.current_rgb()
# GUI
root = self.__root = Toplevel(master, class_='Pynche')
root.protocol('WM_DELETE_WINDOW', self.withdraw)
root.title('Pynche Details Window')
root.iconname('Pynche Details Window')
root.bind('<Alt-q>', self.__quit)
root.bind('<Alt-Q>', self.__quit)
root.bind('<Alt-w>', self.withdraw)
root.bind('<Alt-W>', self.withdraw)
# accelerators
root.bind('<KeyPress-Left>', self.__minus1)
root.bind('<KeyPress-Right>', self.__plus1)
root.bind('<Control-KeyPress-Left>', self.__minus10)
root.bind('<Control-KeyPress-Right>', self.__plus10)
root.bind('<Shift-KeyPress-Left>', self.__minus25)
root.bind('<Shift-KeyPress-Right>', self.__plus25)
#
# color ties
frame = self.__frame = Frame(root)
frame.pack(expand=YES, fill=X)
self.__l1 = Label(frame, text='Move Sliders:')
self.__l1.grid(row=1, column=0, sticky=E)
self.__rvar = IntVar()
self.__rvar.set(optiondb.get('RSLIDER', 4))
self.__radio1 = Checkbutton(frame, text='Red',
variable=self.__rvar,
command=self.__effect,
onvalue=4, offvalue=0)
self.__radio1.grid(row=1, column=1, sticky=W)
self.__gvar = IntVar()
self.__gvar.set(optiondb.get('GSLIDER', 2))
self.__radio2 = Checkbutton(frame, text='Green',
variable=self.__gvar,
command=self.__effect,
onvalue=2, offvalue=0)
self.__radio2.grid(row=2, column=1, sticky=W)
self.__bvar = IntVar()
self.__bvar.set(optiondb.get('BSLIDER', 1))
self.__radio3 = Checkbutton(frame, text='Blue',
variable=self.__bvar,
command=self.__effect,
onvalue=1, offvalue=0)
self.__radio3.grid(row=3, column=1, sticky=W)
self.__l2 = Label(frame)
self.__l2.grid(row=4, column=1, sticky=W)
self.__effect()
#
# Boundary behavior
self.__l3 = Label(frame, text='At boundary:')
self.__l3.grid(row=5, column=0, sticky=E)
self.__boundvar = StringVar()
self.__boundvar.set(optiondb.get('ATBOUND', STOP))
self.__omenu = OptionMenu(frame, self.__boundvar,
STOP, WRAP, RATIO, GRAV)
self.__omenu.grid(row=5, column=1, sticky=W)
self.__omenu.configure(width=17)
#
# Buttons
frame = self.__btnframe = Frame(frame)
frame.grid(row=0, column=0, columnspan=2, sticky='EW')
self.__down25 = Button(frame, text='-25',
command=self.__minus25)
self.__down10 = Button(frame, text='-10',
command=self.__minus10)
self.__down1 = Button(frame, text='-1',
command=self.__minus1)
self.__up1 = Button(frame, text='+1',
command=self.__plus1)
self.__up10 = Button(frame, text='+10',
command=self.__plus10)
self.__up25 = Button(frame, text='+25',
command=self.__plus25)
self.__down25.pack(expand=YES, fill=X, side=LEFT)
self.__down10.pack(expand=YES, fill=X, side=LEFT)
self.__down1.pack(expand=YES, fill=X, side=LEFT)
self.__up1.pack(expand=YES, fill=X, side=LEFT)
self.__up10.pack(expand=YES, fill=X, side=LEFT)
self.__up25.pack(expand=YES, fill=X, side=LEFT)
def __effect(self, event=None):
tie = self.__rvar.get() + self.__gvar.get() + self.__bvar.get()
if tie in (0, 1, 2, 4):
text = ''
else:
text = '(= %s Level)' % {3: 'Cyan',
5: 'Magenta',
6: 'Yellow',
7: 'Grey'}[tie]
self.__l2.configure(text=text)
def __quit(self, event=None):
self.__root.quit()
def withdraw(self, event=None):
self.__root.withdraw()
def deiconify(self, event=None):
self.__root.deiconify()
def __minus25(self, event=None):
self.__delta(-25)
def __minus10(self, event=None):
self.__delta(-10)
def __minus1(self, event=None):
self.__delta(-1)
def __plus1(self, event=None):
self.__delta(1)
def __plus10(self, event=None):
self.__delta(10)
def __plus25(self, event=None):
self.__delta(25)
def __delta(self, delta):
tie = []
if self.__rvar.get():
red = self.__red + delta
tie.append(red)
else:
red = self.__red
if self.__gvar.get():
green = self.__green + delta
tie.append(green)
else:
green = self.__green
if self.__bvar.get():
blue = self.__blue + delta
tie.append(blue)
else:
blue = self.__blue
# now apply at boundary behavior
atbound = self.__boundvar.get()
if atbound == STOP:
if red < 0 or green < 0 or blue < 0 or \
red > 255 or green > 255 or blue > 255:
# then
red, green, blue = self.__red, self.__green, self.__blue
elif atbound == WRAP or (atbound == RATIO and len(tie) < 2):
if red < 0:
red += 256
if green < 0:
green += 256
if blue < 0:
blue += 256
if red > 255:
red -= 256
if green > 255:
green -= 256
if blue > 255:
blue -= 256
elif atbound == RATIO:
# for when 2 or 3 colors are tied together
dir = 0
for c in tie:
if c < 0:
dir = -1
elif c > 255:
dir = 1
if dir == -1:
delta = max(tie)
if self.__rvar.get():
red = red + 255 - delta
if self.__gvar.get():
green = green + 255 - delta
if self.__bvar.get():
blue = blue + 255 - delta
elif dir == 1:
delta = min(tie)
if self.__rvar.get():
red = red - delta
if self.__gvar.get():
green = green - delta
if self.__bvar.get():
blue = blue - delta
elif atbound == GRAV:
if red < 0:
red = 0
if green < 0:
green = 0
if blue < 0:
blue = 0
if red > 255:
red = 255
if green > 255:
green = 255
if blue > 255:
blue = 255
self.__sb.update_views(red, green, blue)
self.__root.update_idletasks()
def update_yourself(self, red, green, blue):
self.__red = red
self.__green = green
self.__blue = blue
def save_options(self, optiondb):
optiondb['RSLIDER'] = self.__rvar.get()
optiondb['GSLIDER'] = self.__gvar.get()
optiondb['BSLIDER'] = self.__bvar.get()
optiondb['ATBOUND'] = self.__boundvar.get()
|
sounak98/coala-bears
|
refs/heads/master
|
bears/c_languages/CSecurityBear.py
|
2
|
from coalib.bearlib.abstractions.Linter import linter
from coalib.bears.requirements.DistributionRequirement import (
DistributionRequirement)
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
@linter(executable='flawfinder',
output_format='regex',
output_regex=r'.+:(?P<line>\d+):(?P<column>\d+):\s*'
r'\[(?P<severity>\d)\]\s*'
r'\((?P<origin>.+)\) (?P<message>.+)',
severity_map={'1': RESULT_SEVERITY.INFO,
'2': RESULT_SEVERITY.INFO,
'3': RESULT_SEVERITY.NORMAL,
'4': RESULT_SEVERITY.NORMAL,
'5': RESULT_SEVERITY.MAJOR},
prerequisite_check_command=('flawfinder',),
prerequisite_check_fail_message=('Flawfinder needs to be run with '
'python2.'))
class CSecurityBear:
"""
Report possible security weaknesses for C/C++.
For more information, consult <http://www.dwheeler.com/flawfinder/>.
"""
LANGUAGES = {'C', 'C++'}
REQUIREMENTS = {DistributionRequirement(apt_get='flawfinder')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
ASCIINEMA_URL = 'https://asciinema.org/a/7z8ol9mpsgtuo1096c6jk8hi6'
CAN_DETECT = {'Security', 'Memory Leak', 'Code Simplification'}
@staticmethod
def create_arguments(filename, file, config_file):
return '--columns', '--dataonly', '--quiet', '--singleline', filename
|
liangjiaxing/sympy
|
refs/heads/master
|
sympy/combinatorics/tests/test_prufer.py
|
102
|
from sympy.combinatorics.prufer import Prufer
from sympy.utilities.pytest import raises
def test_prufer():
# number of nodes is optional
assert Prufer([[0, 1], [0, 2], [0, 3], [0, 4]], 5).nodes == 5
assert Prufer([[0, 1], [0, 2], [0, 3], [0, 4]]).nodes == 5
a = Prufer([[0, 1], [0, 2], [0, 3], [0, 4]])
assert a.rank == 0
assert a.nodes == 5
assert a.prufer_repr == [0, 0, 0]
a = Prufer([[2, 4], [1, 4], [1, 3], [0, 5], [0, 4]])
assert a.rank == 924
assert a.nodes == 6
assert a.tree_repr == [[2, 4], [1, 4], [1, 3], [0, 5], [0, 4]]
assert a.prufer_repr == [4, 1, 4, 0]
assert Prufer.edges([0, 1, 2, 3], [1, 4, 5], [1, 4, 6]) == \
([[0, 1], [1, 2], [1, 4], [2, 3], [4, 5], [4, 6]], 7)
assert Prufer([0]*4).size == Prufer([6]*4).size == 1296
# accept iterables but convert to list of lists
tree = [(0, 1), (1, 5), (0, 3), (0, 2), (2, 6), (4, 7), (2, 4)]
tree_lists = [list(t) for t in tree]
assert Prufer(tree).tree_repr == tree_lists
assert sorted(Prufer(set(tree)).tree_repr) == sorted(tree_lists)
raises(ValueError, lambda: Prufer([[1, 2], [3, 4]])) # 0 is missing
assert Prufer(*Prufer.edges([1, 2], [3, 4])).prufer_repr == [1, 3]
raises(ValueError, lambda: Prufer.edges(
[1, 3], [3, 4])) # a broken tree but edges doesn't care
raises(ValueError, lambda: Prufer.edges([1, 2], [5, 6]))
def test_round_trip():
def doit(t, b):
e, n = Prufer.edges(*t)
t = Prufer(e, n)
a = sorted(t.tree_repr)
b = [i - 1 for i in b]
assert t.prufer_repr == b
assert sorted(Prufer(b).tree_repr) == a
assert Prufer.unrank(t.rank, n).prufer_repr == b
doit([[1, 2]], [])
doit([[2, 1, 3]], [1])
doit([[1, 3, 2]], [3])
doit([[1, 2, 3]], [2])
doit([[2, 1, 4], [1, 3]], [1, 1])
doit([[3, 2, 1, 4]], [2, 1])
doit([[3, 2, 1], [2, 4]], [2, 2])
doit([[1, 3, 2, 4]], [3, 2])
doit([[1, 4, 2, 3]], [4, 2])
doit([[3, 1, 4, 2]], [4, 1])
doit([[4, 2, 1, 3]], [1, 2])
doit([[1, 2, 4, 3]], [2, 4])
doit([[1, 3, 4, 2]], [3, 4])
doit([[2, 4, 1], [4, 3]], [4, 4])
doit([[1, 2, 3, 4]], [2, 3])
doit([[2, 3, 1], [3, 4]], [3, 3])
doit([[1, 4, 3, 2]], [4, 3])
doit([[2, 1, 4, 3]], [1, 4])
doit([[2, 1, 3, 4]], [1, 3])
doit([[6, 2, 1, 4], [1, 3, 5, 8], [3, 7]], [1, 2, 1, 3, 3, 5])
|
stshine/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/old-tests/webdriver/timeouts/implicit_waits_tests.py
|
142
|
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../..")))
import base_test
from selenium.common import exceptions
class ImplicitWaitsTests(base_test.WebDriverBaseTest):
def setUp(self):
self.driver.get(self.webserver.where_is('timeouts/res/implicit_waits_tests.html'))
def test_find_element_by_id(self):
add = self.driver.find_element_by_css_selector("#adder")
self.driver.implicitly_wait(3)
add.click()
self.driver.find_element_by_css_selector("#box0") # All is well if this doesn't throw.
def test_should_still_fail_to_find_an_element_when_implicit_waits_are_enabled(self):
self.driver.implicitly_wait(0.5)
try:
self.driver.find_element_by_css_selector("#box0")
self.fail("Expected NoSuchElementException to have been thrown")
except exceptions.NoSuchElementException as e:
pass
except Exception as e:
self.fail("Expected NoSuchElementException but got " + str(e))
def test_should_return_after_first_attempt_to_find_one_after_disabling_implicit_waits(self):
self.driver.implicitly_wait(3)
self.driver.implicitly_wait(0)
try:
self.driver.find_element_by_css_selector("#box0")
self.fail("Expected NoSuchElementException to have been thrown")
except exceptions.NoSuchElementException as e:
pass
except Exception as e:
self.fail("Expected NoSuchElementException but got " + str(e))
def test_should_implicitly_wait_until_at_least_one_element_is_found_when_searching_for_many(self):
add = self.driver.find_element_by_css_selector("#adder")
self.driver.implicitly_wait(2)
add.click()
add.click()
elements = self.driver.find_elements_by_css_selector(".redbox")
self.assertTrue(len(elements) >= 1)
def test_should_still_fail_to_find_an_element_by_class_when_implicit_waits_are_enabled(self):
self.driver.implicitly_wait(0.5)
elements = self.driver.find_elements_by_css_selector(".redbox")
self.assertEqual(0, len(elements))
def test_should_return_after_first_attempt_to_find_many_after_disabling_implicit_waits(self):
add = self.driver.find_element_by_css_selector("#adder")
self.driver.implicitly_wait(1.1)
self.driver.implicitly_wait(0)
add.click()
elements = self.driver.find_elements_by_css_selector(".redbox")
self.assertEqual(0, len(elements))
if __name__ == "__main__":
unittest.main()
|
enthought/etsproxy
|
refs/heads/master
|
enthought/chaco/svg_graphics_context.py
|
1
|
# proxy module
from __future__ import absolute_import
from chaco.svg_graphics_context import *
|
Azure/azure-sdk-for-python
|
refs/heads/sync-eng/common-js-nightly-docs-2-1768-ForTestPipeline
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_12_01/operations/_virtual_networks_operations.py
|
1
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworksOperations(object):
"""VirtualNetworksOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetwork"
"""Gets the specified virtual network by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetwork, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.VirtualNetwork
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
parameters, # type: "_models.VirtualNetwork"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetwork"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetwork')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
parameters, # type: "_models.VirtualNetwork"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetwork"]
"""Creates or updates a virtual network in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param parameters: Parameters supplied to the create or update virtual network operation.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.VirtualNetwork
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetwork or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.VirtualNetwork]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetwork"
"""Updates a virtual network tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param parameters: Parameters supplied to update virtual network tags.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetwork, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.VirtualNetwork
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkListResult"]
"""Gets all virtual networks in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.VirtualNetworkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/virtualNetworks'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkListResult"]
"""Gets all virtual networks in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.VirtualNetworkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks'} # type: ignore
def check_ip_address_availability(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
ip_address, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.IPAddressAvailabilityResult"
"""Checks whether a private IP address is available for use.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param ip_address: The private IP address to be verified.
:type ip_address: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IPAddressAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.IPAddressAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IPAddressAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self.check_ip_address_availability.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['ipAddress'] = self._serialize.query("ip_address", ip_address, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('IPAddressAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_ip_address_availability.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/CheckIPAddressAvailability'} # type: ignore
def list_usage(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkListUsageResult"]
"""Lists usage stats.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkListUsageResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.VirtualNetworkListUsageResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkListUsageResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_usage.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkListUsageResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_usage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/usages'} # type: ignore
|
teamotrinidad/plugin.video.chicovara
|
refs/heads/master
|
servers/shareflare.py
|
41
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para shareflare
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
return True,""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[shareflare.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
#http://shareflare.net/download/99094.9feafdcc1fa511c89ea775cd862f/Emergo.dvdrip.avi.html
patronvideos = '(shareflare.net/download/[a-zA-Z0-9\.\/]+\.html)'
logger.info("[shareflare.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[shareflare]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'shareflare' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
|
speccy88/Mote
|
refs/heads/master
|
MoteServer/client.py
|
1
|
import zmq
class MoteClient:
def __init__(self, serverAddress):
self.context = zmq.Context()
self.socket = self.context.socket(zmq.REQ)
self.socket.connect("tcp://"+serverAddress+":5006")
def Close(self):
self.socket.close()
self.context.term()
def Fetch(self, addr, msg):
self.SendMSG(addr,"@"+msg)
return self.GetMSG()
def Store(self, addr, msg):
self.SendMSG(addr,"!"+msg)
self.GetMSG()
def SendMSG(self, addr, msg):
self.socket.send(addr+msg)
def GetMSG(self):
return self.socket.recv()
def Ask(self):
while(True):
print ""
print """\
(1)Store = LED/ON
(2)Store = LED/OFF
(3)Store next poll = TIME
(4)Store = MAIL
(5)Fetch = COUNTER """
choice = raw_input("?")
if choice == "0":
self.Store(ADDRESS,"")
elif choice == "1":
self.Store(ADDRESS,"LED/ON")
elif choice == "2":
self.Store(ADDRESS,"LED/OFF")
elif choice == "3":
self.Store(ADDRESS,"TIME")
elif choice == "4":
self.Store(ADDRESS,"MAIL")
elif choice == "5":
print self.Fetch(ADDRESS,"COUNTER")
else:
self.Close()
quit()
if __name__=="__main__":
c = MoteClient("192.168.0.198")
ADDRESS = "B"
c.Ask()
|
PhonologicalCorpusTools/CorpusTools
|
refs/heads/master
|
corpustools/corpus/io/text_ilg.py
|
1
|
import os
import re
from collections import Counter, defaultdict
from corpustools.corpus.classes import SpontaneousSpeechCorpus
from corpustools.corpus.classes import Corpus, Word, Discourse, WordToken, Attribute
from corpustools.corpus.io.binary import load_binary
from corpustools.exceptions import (DelimiterError, ILGError, ILGLinesMismatchError,
ILGWordMismatchError)
from .helper import (compile_digraphs, parse_transcription,
DiscourseData, AnnotationType, data_to_discourse, data_to_discourse2,
Annotation, BaseAnnotation)
import corpustools.gui.modernize as modernize
def calculate_lines_per_gloss(lines):
line_counts = [len(x[1]) for x in lines]
equaled = list()
number = 1
for i,line in enumerate(line_counts):
if i == 0:
equaled.append(False)
else:
equaled.append(line == line_counts[i-1])
if False not in equaled[1:]:
#All lines happen to have the same length
for i in range(2,6):
if len(lines) % i == 0:
number = i
else:
false_intervals = list()
ind = 0
for i,e in enumerate(equaled):
if i == 0:
continue
if not e:
false_intervals.append(i - ind)
ind = i
false_intervals.append(i+1 - ind)
counter = Counter(false_intervals)
number = max(counter.keys(), key = lambda x: (counter[x],x))
if number > 10:
prev_maxes = set([number])
while number > 10:
prev_maxes.add(number)
number = max(x for x in false_intervals if x not in prev_maxes)
return number
def most_frequent_value(dictionary):
c = Counter(dictionary.values())
return max(c.keys(), key = lambda x: c[x])
def inspect_discourse_ilg(path, number = None):
"""
Generate a list of AnnotationTypes for a specified text file for parsing
it as an interlinear gloss text file
Parameters
----------
path : str
Full path to text file
number : int, optional
Number of lines per gloss, if not supplied, it is auto-detected
Returns
-------
list of AnnotationTypes
Autodetected AnnotationTypes for the text file
"""
trans_delimiters = ['.', ';', ',']
lines = {}
if os.path.isdir(path):
numbers = {}
for root, subdirs, files in os.walk(path):
for filename in files:
if not filename.lower().endswith('.txt'):
continue
p = os.path.join(root, filename)
lines[p] = text_to_lines(p)
numbers[p] = calculate_lines_per_gloss(lines[p])
number = most_frequent_value(numbers)
else:
lines[path] = text_to_lines(path)
number = calculate_lines_per_gloss(lines[path])
p = path
annotation_types = []
for i in range(number):
name = 'Line {}'.format(i+1)
if i == 0:
att = Attribute('spelling','spelling','Spelling')
a = AnnotationType(name, None, None, anchor = True, token = False, attribute = att)
else:
labels = lines[p][i][1]
cat = Attribute.guess_type(labels, trans_delimiters)
att = Attribute(Attribute.sanitize_name(name), cat, name)
a = AnnotationType(name, None, annotation_types[0].name, token = False, attribute = att)
if cat == 'tier' and a.trans_delimiter is None:
for l in labels:
for delim in trans_delimiters:
if delim in l:
a.trans_delimiter = delim
break
if a.trans_delimiter is not None:
break
a.add(lines[p][i][1], save = False)
annotation_types.append(a)
for k,v in lines.items():
if k == p:
continue
for i in range(number):
labels = lines[k][i][1]
annotation_types[i].add(labels, save = False)
return annotation_types
def text_to_lines(path):
delimiter = None
with open(path, encoding='utf-8-sig', mode='r') as f:
text = f.read()
if delimiter is not None and delimiter not in text:
e = DelimiterError('The delimiter specified does not create multiple words. Please specify another delimiter.')
raise(e)
lines = enumerate(text.splitlines())
lines = [(x[0],x[1].strip().split(delimiter)) for x in lines if x[1].strip() != '']
return lines
def ilg_to_data(corpus_name, path, annotation_types,
stop_check = None, call_back = None):
#if 'spelling' not in line_names:
# raise(PCTError('Spelling required for parsing interlinear gloss files.'))
lines = text_to_lines(path)
if len(lines) % len(annotation_types) != 0:
raise(ILGLinesMismatchError(lines))
if call_back is not None:
call_back('Processing file...')
call_back(0,len(lines))
cur = 0
index = 0
name = corpus_name
for a in annotation_types:
a.reset()
data = DiscourseData(name, annotation_types)
mismatching_lines = list()
while index < len(lines):
if stop_check is not None and stop_check():
return
if call_back is not None:
cur += 1
call_back(cur)
cur_line = {}
mismatch = False
for line_ind, annotation_type in enumerate(annotation_types):
if annotation_type.name == 'ignore':
continue
actual_line_ind, line = lines[index+line_ind]
if len(cur_line.values()) != 0 and len(list(cur_line.values())[-1]) != len(line):
mismatch = True
if annotation_type.delimited:
line = [parse_transcription(x, annotation_type) for x in line]
cur_line[annotation_type.attribute.name] = line
if mismatch:
start_line = lines[index][0]
end_line = start_line + len(annotation_types)
mismatching_lines.append(((start_line, end_line), cur_line))
if len(mismatching_lines) > 0:
index += len(annotation_types)
continue
for word_name in data.word_levels:
if stop_check is not None and stop_check():
return
if call_back is not None:
cur += 1
call_back(cur)
for i, s in enumerate(cur_line[word_name]):
annotations = {}
word = Annotation(s)
for n in data.base_levels:
tier_elements = cur_line[n][i]
level_count = data.level_length(n)
word.references.append(n)
word.begins.append(level_count)
word.ends.append(level_count + len(tier_elements))
tier_elements[0].begin = level_count
tier_elements[-1].end =level_count + len(tier_elements)
annotations[n] = tier_elements
for line_type in cur_line.keys():
if data[line_type].ignored:
continue
if data[line_type].base:
continue
if data[line_type].anchor:
continue
if data[line_type].token:
word.token[line_type] = cur_line[line_type][i]
else:
word.additional[line_type] = cur_line[line_type][i]
annotations[word_name] = [word]
data.add_annotations(**annotations)
index += len(annotation_types)
if len(mismatching_lines) > 0:
raise(ILGWordMismatchError(mismatching_lines))
return data
def load_discourse_ilg(corpus_name, path, annotation_types,
lexicon = None,
feature_system_path = None,
stop_check = None, call_back = None):
"""
Load a discourse from a text file containing interlinear glosses
Parameters
----------
corpus_name : str
Informative identifier to refer to corpus
path : str
Full path to text file
annotation_types : list of AnnotationType
List of AnnotationType specifying how to parse the glosses.
Can be generated through ``inspect_discourse_ilg``.
lexicon : Corpus, optional
Corpus to store Discourse word information
feature_system_path : str
Full path to pickled FeatureMatrix to use with the Corpus
stop_check : callable or None
Optional function to check whether to gracefully terminate early
call_back : callable or None
Optional function to supply progress information during the loading
Returns
-------
Discourse
Discourse object generated from the text file
"""
data = ilg_to_data(corpus_name, path, annotation_types,stop_check, call_back)
#discourse = data_to_discourse(data, lexicon, call_back=call_back, stop_check=stop_check)
discourse = data_to_discourse2(corpus_name=corpus_name, annotation_types=annotation_types,
stop_check=stop_check, call_back=call_back)
if discourse is None:
return
if feature_system_path is not None:
feature_matrix = load_binary(feature_system_path)
discourse.lexicon.set_feature_matrix(feature_matrix)
discourse.lexicon.specifier = modernize.modernize_specifier(discourse.lexicon.specifier)
return discourse
def load_directory_ilg(corpus_name, path, annotation_types,
feature_system_path = None,
stop_check = None, call_back = None):
"""
Loads a directory of interlinear gloss text files
Parameters
----------
corpus_name : str
Name of corpus
path : str
Path to directory of text files
annotation_types : list of AnnotationType
List of AnnotationType specifying how to parse the glosses.
Can be generated through ``inspect_discourse_ilg``.
feature_system_path : str, optional
File path of FeatureMatrix binary to specify segments
stop_check : callable or None
Optional function to check whether to gracefully terminate early
call_back : callable or None
Optional function to supply progress information during the loading
Returns
-------
SpontaneousSpeechCorpus
Corpus containing Discourses corresponding to the text files
"""
if call_back is not None:
call_back('Finding files...')
call_back(0, 0)
file_tuples = []
for root, subdirs, files in os.walk(path):
for filename in files:
if not filename.lower().endswith('.txt'):
continue
file_tuples.append((root, filename))
if call_back is not None:
call_back('Parsing files...')
call_back(0,len(file_tuples))
cur = 0
corpus = SpontaneousSpeechCorpus(corpus_name, path)
for i, t in enumerate(file_tuples):
if stop_check is not None and stop_check():
return
if call_back is not None:
call_back('Parsing file {} of {}...'.format(i+1,len(file_tuples)))
call_back(i)
root, filename = t
name = os.path.splitext(filename)[0]
d = load_discourse_ilg(name, os.path.join(root,filename),
annotation_types, corpus.lexicon,
None,
stop_check, call_back)
corpus.add_discourse(d)
if feature_system_path is not None:
feature_matrix = load_binary(feature_system_path)
corpus.lexicon.set_feature_matrix(feature_matrix)
corpus.lexicon.specifier = modernize.modernize_specifier(corpus.lexicon.specifier)
return corpus
def export_discourse_ilg(discourse, path, trans_delim = '.'):
"""
Export a discourse to an interlinear gloss text file, with a maximal
line size of 10 words
Parameters
----------
discourse : Discourse
Discourse object to export
path : str
Path to export to
trans_delim : str, optional
Delimiter for segments, defaults to ``.``
"""
with open(path, encoding='utf-8-sig', mode='w') as f:
spellings = list()
transcriptions = list()
for wt in discourse:
spellings.append(wt.spelling)
transcriptions.append(trans_delim.join(wt.transcription))
if len(spellings) > 10:
f.write(' '.join(spellings))
f.write('\n')
f.write(' '.join(transcriptions))
f.write('\n')
spellings = list()
transcriptions = list()
if spellings:
f.write(' '.join(spellings))
f.write('\n')
f.write(' '.join(transcriptions))
f.write('\n')
|
MikeLing/treeherder
|
refs/heads/master
|
tests/webapp/api/test_failureline.py
|
3
|
from django.core.urlresolvers import reverse
from rest_framework.test import APIClient
from tests.autoclassify.utils import (create_failure_lines,
create_text_log_errors,
test_line)
from treeherder.autoclassify.detectors import ManualDetector
from treeherder.model.models import (BugJobMap,
FailureLine,
Job,
JobNote,
Matcher,
MatcherManager,
TextLogError,
TextLogErrorMetadata)
from treeherder.model.search import TestFailureLine as _TestFailureLine
def test_get_failure_line(webapp, failure_lines):
"""
test getting a single failure line
"""
resp = webapp.get(
reverse("failure-line-detail", kwargs={"pk": failure_lines[0].id}))
assert resp.status_int == 200
failure_line = resp.json
assert isinstance(failure_line, object)
exp_failure_keys = ["id", "job_guid", "repository", "job_log",
"action", "line", "test", "subtest", "status", "expected", "message",
"signature", "level", "created", "modified", "matches",
"best_classification", "best_is_verified", "classified_failures",
"unstructured_bugs"]
assert set(failure_line.keys()) == set(exp_failure_keys)
def test_update_failure_line_verify(test_repository,
text_log_errors_failure_lines,
classified_failures,
test_user):
text_log_errors, failure_lines = text_log_errors_failure_lines
client = APIClient()
client.force_authenticate(user=test_user)
failure_line = failure_lines[0]
error_line = text_log_errors[0]
assert failure_line.best_classification == classified_failures[0]
assert failure_line.best_is_verified is False
assert error_line.metadata.failure_line == failure_line
assert error_line.metadata.best_classification == classified_failures[0]
assert error_line.metadata.best_is_verified is False
body = {"project": test_repository.name,
"best_classification": classified_failures[0].id}
resp = client.put(
reverse("failure-line-detail", kwargs={"pk": failure_line.id}),
body, format="json")
assert resp.status_code == 200
failure_line.refresh_from_db()
error_line.metadata.refresh_from_db()
error_line.refresh_from_db()
assert failure_line.best_classification == classified_failures[0]
assert failure_line.best_is_verified
assert error_line.metadata.best_classification == classified_failures[0]
assert error_line.metadata.best_is_verified
es_line = _TestFailureLine.get(failure_line.id, routing=failure_line.test)
assert es_line.best_classification == classified_failures[0].id
assert es_line.best_is_verified
def test_update_failure_line_replace(test_repository,
text_log_errors_failure_lines,
classified_failures,
test_user):
MatcherManager.register_detector(ManualDetector)
client = APIClient()
client.force_authenticate(user=test_user)
text_log_errors, failure_lines = text_log_errors_failure_lines
failure_line = failure_lines[0]
error_line = text_log_errors[0]
assert failure_line.best_classification == classified_failures[0]
assert failure_line.best_is_verified is False
assert error_line.metadata.failure_line == failure_line
assert error_line.metadata.best_classification == classified_failures[0]
assert error_line.metadata.best_is_verified is False
body = {"project": test_repository.name,
"best_classification": classified_failures[1].id}
resp = client.put(
reverse("failure-line-detail", kwargs={"pk": failure_line.id}),
body, format="json")
assert resp.status_code == 200
failure_line.refresh_from_db()
error_line.metadata.refresh_from_db()
error_line.refresh_from_db()
assert failure_line.best_classification == classified_failures[1]
assert failure_line.best_is_verified
assert len(failure_line.classified_failures.all()) == 2
assert error_line.metadata.failure_line == failure_line
assert error_line.metadata.best_classification == classified_failures[1]
assert error_line.metadata.best_is_verified
expected_matcher = Matcher.objects.get(name="ManualDetector")
assert failure_line.matches.get(classified_failure_id=classified_failures[1].id).matcher == expected_matcher
assert error_line.matches.get(classified_failure_id=classified_failures[1].id).matcher == expected_matcher
def test_update_failure_line_mark_job(test_repository, test_job,
text_log_errors_failure_lines,
classified_failures,
test_user):
text_log_errors, failure_lines = text_log_errors_failure_lines
MatcherManager.register_detector(ManualDetector)
client = APIClient()
client.force_authenticate(user=test_user)
classified_failures[1].bug_number = 1234
classified_failures[1].save()
for text_log_error, failure_line in zip(text_log_errors, failure_lines):
assert failure_line.best_is_verified is False
assert text_log_error.metadata.best_is_verified is False
body = {"best_classification": classified_failures[1].id}
resp = client.put(reverse("failure-line-detail", kwargs={"pk": failure_line.id}),
body, format="json")
assert resp.status_code == 200
failure_line.refresh_from_db()
text_log_error.refresh_from_db()
text_log_error.metadata.refresh_from_db()
assert failure_line.best_classification == classified_failures[1]
assert failure_line.best_is_verified
assert text_log_error.metadata.best_classification == classified_failures[1]
assert text_log_error.metadata.best_is_verified
assert test_job.is_fully_verified()
# should only be one, will assert if that isn't the case
note = JobNote.objects.get(job=test_job)
assert note.failure_classification.id == 4
assert note.user == test_user
job_bugs = BugJobMap.objects.filter(job=test_job)
assert job_bugs.count() == 1
assert job_bugs[0].bug_id == 1234
def test_update_failure_line_mark_job_with_human_note(test_job,
text_log_errors_failure_lines,
classified_failures, test_user):
text_log_errors, failure_lines = text_log_errors_failure_lines
MatcherManager.register_detector(ManualDetector)
client = APIClient()
client.force_authenticate(user=test_user)
JobNote.objects.create(job=test_job,
failure_classification_id=4,
user=test_user,
text="note")
for failure_line in failure_lines:
body = {"best_classification": classified_failures[1].id}
resp = client.put(reverse("failure-line-detail", kwargs={"pk": failure_line.id}),
body, format="json")
assert resp.status_code == 200
assert test_job.is_fully_verified()
# should only be one, will assert if that isn't the case
note = JobNote.objects.get(job=test_job)
assert note.failure_classification.id == 4
assert note.user == test_user
def test_update_failure_line_mark_job_with_auto_note(test_job,
mock_autoclassify_jobs_true,
test_repository,
text_log_errors_failure_lines,
classified_failures,
test_user):
text_log_errors, failure_lines = text_log_errors_failure_lines
MatcherManager.register_detector(ManualDetector)
client = APIClient()
client.force_authenticate(user=test_user)
JobNote.objects.create(job=test_job,
failure_classification_id=7,
text="note")
for failure_line in failure_lines:
body = {"best_classification": classified_failures[1].id}
resp = client.put(reverse("failure-line-detail", kwargs={"pk": failure_line.id}),
body, format="json")
assert resp.status_code == 200
assert test_job.is_fully_verified()
notes = JobNote.objects.filter(job=test_job).order_by('-created')
assert notes.count() == 2
assert notes[0].failure_classification.id == 4
assert notes[0].user == test_user
assert notes[0].text == ''
assert notes[1].failure_classification.id == 7
assert not notes[1].user
assert notes[1].text == "note"
def test_update_failure_lines(mock_autoclassify_jobs_true,
test_repository,
text_log_errors_failure_lines,
classified_failures,
eleven_jobs_stored,
test_user):
jobs = (Job.objects.get(id=1), Job.objects.get(id=2))
MatcherManager.register_detector(ManualDetector)
client = APIClient()
client.force_authenticate(user=test_user)
lines = [(test_line, {}),
(test_line, {"subtest": "subtest2"})]
new_failure_lines = create_failure_lines(jobs[1], lines)
new_text_log_errors = create_text_log_errors(jobs[1], lines)
for text_log_error, failure_line in zip(new_text_log_errors,
new_failure_lines):
TextLogErrorMetadata.objects.create(text_log_error=text_log_error,
failure_line=failure_line)
failure_lines = FailureLine.objects.filter(
job_guid__in=[job.guid for job in jobs]).all()
text_log_errors = TextLogError.objects.filter(
step__job__in=jobs).all()
for text_log_error, failure_line in zip(text_log_errors, failure_lines):
assert text_log_error.metadata.best_is_verified is False
assert failure_line.best_is_verified is False
body = [{"id": failure_line.id,
"best_classification": classified_failures[1].id}
for failure_line in failure_lines]
resp = client.put(reverse("failure-line-list"), body, format="json")
assert resp.status_code == 200
for text_log_error, failure_line in zip(text_log_errors, failure_lines):
text_log_error.refresh_from_db()
text_log_error.metadata.refresh_from_db()
failure_line.refresh_from_db()
assert failure_line.best_classification == classified_failures[1]
assert failure_line.best_is_verified
assert text_log_error.metadata.best_classification == classified_failures[1]
assert text_log_error.metadata.best_is_verified
for job in jobs:
assert job.is_fully_verified()
# will assert if we don't have exactly one job, which is what we want
note = JobNote.objects.get(job=job)
assert note.failure_classification.id == 4
assert note.user == test_user
def test_update_failure_line_ignore(test_job,
test_repository,
text_log_errors_failure_lines,
classified_failures, test_user):
text_log_errors, failure_lines = text_log_errors_failure_lines
client = APIClient()
client.force_authenticate(user=test_user)
MatcherManager.register_detector(ManualDetector)
text_log_error = text_log_errors[0]
failure_line = failure_lines[0]
assert text_log_error.metadata.best_classification == classified_failures[0]
assert text_log_error.metadata.best_is_verified is False
assert failure_line.best_classification == classified_failures[0]
assert failure_line.best_is_verified is False
body = {"project": test_job.repository.name,
"best_classification": None}
resp = client.put(
reverse("failure-line-detail", kwargs={"pk": failure_line.id}),
body, format="json")
assert resp.status_code == 200
failure_line.refresh_from_db()
text_log_error.refresh_from_db()
text_log_error.metadata.refresh_from_db()
assert failure_line.best_classification is None
assert failure_line.best_is_verified
assert text_log_error.metadata.best_classification is None
assert text_log_error.metadata.best_is_verified
def test_update_failure_line_all_ignore_mark_job(test_job,
mock_autoclassify_jobs_true,
text_log_errors_failure_lines,
classified_failures,
test_user):
text_log_errors, failure_lines = text_log_errors_failure_lines
MatcherManager.register_detector(ManualDetector)
client = APIClient()
client.force_authenticate(user=test_user)
job_failure_lines = [line for line in failure_lines if
line.job_guid == test_job.guid]
job_text_log_errors = [error for error in text_log_errors if
error.step.job == test_job]
for error_line, failure_line in zip(job_text_log_errors, job_failure_lines):
error_line.metadata.best_is_verified = False
error_line.metadata.best_classification = None
failure_line.best_is_verified = False
failure_line.best_classification = None
assert JobNote.objects.count() == 0
for error_line, failure_line in zip(job_text_log_errors, job_failure_lines):
assert error_line.metadata.best_is_verified is False
assert failure_line.best_is_verified is False
body = {"best_classification": None}
resp = client.put(reverse("failure-line-detail", kwargs={"pk": failure_line.id}),
body, format="json")
assert resp.status_code == 200
error_line.refresh_from_db()
error_line.metadata.refresh_from_db()
failure_line.refresh_from_db()
assert failure_line.best_classification is None
assert failure_line.best_is_verified
assert error_line.metadata.best_classification is None
assert error_line.metadata.best_is_verified
assert test_job.is_fully_verified()
assert JobNote.objects.count() == 1
def test_update_failure_line_partial_ignore_mark_job(test_job,
mock_autoclassify_jobs_true,
text_log_errors_failure_lines,
classified_failures,
test_user):
text_log_errors, failure_lines = text_log_errors_failure_lines
MatcherManager.register_detector(ManualDetector)
client = APIClient()
client.force_authenticate(user=test_user)
for i, (error_line, failure_line) in enumerate(zip(text_log_errors, failure_lines)):
assert error_line.metadata.best_is_verified is False
assert failure_line.best_is_verified is False
body = {"best_classification": None if i == 0 else classified_failures[0].id}
resp = client.put(reverse("failure-line-detail", kwargs={"pk": failure_line.id}),
body, format="json")
assert resp.status_code == 200
error_line.refresh_from_db()
error_line.metadata.refresh_from_db()
failure_line.refresh_from_db()
if i == 0:
assert failure_line.best_classification is None
assert error_line.metadata.best_classification is None
else:
assert failure_line.best_classification == classified_failures[0]
assert error_line.metadata.best_classification == classified_failures[0]
assert failure_line.best_is_verified
assert test_job.is_fully_verified()
# will assert if we don't have exactly one note for this job, which is
# what we want
note = JobNote.objects.get(job=test_job)
assert note.failure_classification.id == 4
assert note.user == test_user
|
Stanford-Online/edx-platform
|
refs/heads/master
|
common/test/acceptance/pages/lms/completion.py
|
19
|
# -*- coding: utf-8 -*-
"""
Mixins for completion.
"""
class CompletionOnViewMixin(object):
"""
Methods for testing completion on view.
"""
def xblock_components_mark_completed_on_view_value(self):
"""
Return the xblock components data-mark-completed-on-view-after-delay value.
"""
return self.q(css=self.xblock_component_selector).attrs('data-mark-completed-on-view-after-delay')
def wait_for_xblock_component_to_be_marked_completed_on_view(self, index=0):
"""
Wait for xblock component to be marked completed on view.
Arguments
index (int): index of block to wait on. (default is 0)
"""
self.wait_for(lambda: (self.xblock_components_mark_completed_on_view_value()[index] == '0'),
'Waiting for xblock to be marked completed on view.')
|
jerli/sympy
|
refs/heads/master
|
sympy/unify/rewrite.py
|
93
|
""" Functions to support rewriting of SymPy expressions """
from __future__ import print_function, division
from sympy.unify.usympy import unify
from sympy.unify.usympy import rebuild
from sympy.strategies.tools import subs
from sympy import Expr
from sympy.assumptions import ask
def rewriterule(source, target, variables=(), condition=None, assume=None):
""" Rewrite rule
Transform expressions that match source into expressions that match target
treating all `variables` as wilds.
>>> from sympy.abc import w, x, y, z
>>> from sympy.unify.rewrite import rewriterule
>>> from sympy.utilities import default_sort_key
>>> rl = rewriterule(x + y, x**y, [x, y])
>>> sorted(rl(z + 3), key=default_sort_key)
[3**z, z**3]
Use ``condition`` to specify additional requirements. Inputs are taken in
the same order as is found in variables.
>>> rl = rewriterule(x + y, x**y, [x, y], lambda x, y: x.is_integer)
>>> list(rl(z + 3))
[3**z]
Use ``assume`` to specify additional requirements using new assumptions.
>>> from sympy.assumptions import Q
>>> rl = rewriterule(x + y, x**y, [x, y], assume=Q.integer(x))
>>> list(rl(z + 3))
[3**z]
Assumptions for the local context are provided at rule runtime
>>> list(rl(w + z, Q.integer(z)))
[z**w]
"""
def rewrite_rl(expr, assumptions=True):
for match in unify(source, expr, {}, variables=variables):
if (condition and
not condition(*[match.get(var, var) for var in variables])):
continue
if (assume and not ask(assume.xreplace(match), assumptions)):
continue
expr2 = subs(match)(target)
if isinstance(expr2, Expr):
expr2 = rebuild(expr2)
yield expr2
return rewrite_rl
|
hkawasaki/kawasaki-aio8-2
|
refs/heads/gacco2/0701_kim_again
|
lms/djangoapps/courseware/features/word_cloud.py
|
46
|
# pylint: disable=C0111
from time import sleep
from lettuce import world, step
from lettuce.django import django_url
from common import i_am_registered_for_the_course, section_location, visit_scenario_item
@step('I view the word cloud and it has rendered')
def word_cloud_is_rendered(_step):
assert world.is_css_present('.word_cloud')
@step('the course has a Word Cloud component')
def view_word_cloud(_step):
coursenum = 'test_course'
i_am_registered_for_the_course(_step, coursenum)
add_word_cloud_to_course(coursenum)
visit_scenario_item('SECTION')
@step('I press the Save button')
def press_the_save_button(_step):
button_css = '.input_cloud_section input.save'
world.css_click(button_css)
@step('I see the empty result')
def see_empty_result(_step):
assert (world.css_text('.your_words', 0) == '')
@step('I fill inputs')
def fill_inputs(_step):
input_css = '.input_cloud_section .input-cloud'
world.css_fill(input_css, 'text1', 0)
for index in range(1, 4):
world.css_fill('.input_cloud_section .input-cloud', 'text2', index)
@step('I see the result with words count')
def see_result(_step):
strong_css = '.your_words strong'
target_text = set([world.css_text(strong_css, i) for i in range(2)])
assert set(['text1', 'text2']) == target_text
def add_word_cloud_to_course(course):
category = 'word_cloud'
world.ItemFactory.create(parent_location=section_location(course),
category=category,
display_name='Word Cloud')
|
ymap/aioredis
|
refs/heads/master
|
examples/iscan.py
|
1
|
import asyncio
import aioredis
async def main():
redis = await aioredis.create_redis(
'redis://localhost')
await redis.delete('something:hash',
'something:set',
'something:zset')
await redis.mset('something', 'value',
'something:else', 'else')
await redis.hmset('something:hash',
'something:1', 'value:1',
'something:2', 'value:2')
await redis.sadd('something:set', 'something:1',
'something:2', 'something:else')
await redis.zadd('something:zset', 1, 'something:1',
2, 'something:2', 3, 'something:else')
await go(redis)
redis.close()
await redis.wait_closed()
async def go(redis):
async for key in redis.iscan(match='something*'):
print('Matched:', key)
key = 'something:hash'
async for name, val in redis.ihscan(key, match='something*'):
print('Matched:', name, '->', val)
key = 'something:set'
async for val in redis.isscan(key, match='something*'):
print('Matched:', val)
key = 'something:zset'
async for val, score in redis.izscan(key, match='something*'):
print('Matched:', val, ':', score)
if __name__ == '__main__':
import os
if 'redis_version:2.6' not in os.environ.get('REDIS_VERSION', ''):
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
|
dneg/cortex
|
refs/heads/master
|
test/IECoreHoudini/procedurals/sphereProcedural/sphereProcedural-2.py
|
12
|
##########################################################################
#
# Copyright 2010 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios),
# its affiliates and/or its licensors.
#
# Copyright (c) 2011, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from __future__ import with_statement
import IECore
# renders a sphere
class sphereProcedural( IECore.ParameterisedProcedural ) :
def __init__( self ) :
IECore.ParameterisedProcedural.__init__( self, "Renders a sphere." )
rad_param = IECore.FloatParameter(
name = "radius",
description = "Sphere radius.",
defaultValue = 1,
minValue = 0.01,
maxValue = 100.0,
userData = { 'UI': { "update" : IECore.BoolData( True ) } }
)
theta_param = IECore.FloatParameter(
name = "theta",
description = "Sphere theta.",
defaultValue = 360,
minValue = 1,
maxValue = 360,
userData = { 'UI': { "update" : IECore.BoolData( True ) } }
)
extra_parm = IECore.BoolParameter( "extra", "Extra", False )
self.parameters().addParameters( [rad_param, theta_param, extra_parm] )
def doBound( self, args ) :
rad = args["radius"].value
return IECore.Box3f( IECore.V3f(-rad,-rad,-rad), IECore.V3f(rad,rad,rad) )
def doRenderState( self, renderer, args ) :
pass
def doRender( self, renderer, args ) :
rad = args["radius"].value
theta = args["theta"].value
with IECore.AttributeBlock( renderer ):
renderer.sphere( rad, -1, 1, theta, {} )
# register
IECore.registerRunTimeTyped( sphereProcedural )
|
Microsoft/hummingbird
|
refs/heads/master
|
hummingbird/ml/_utils.py
|
1
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
Collection of utility functions used throughout Hummingbird.
"""
from distutils.version import LooseVersion
import warnings
from .exceptions import ConstantError
def torch_installed():
"""
Checks that *PyTorch* is available.
"""
try:
import torch
return True
except ImportError:
return False
def onnx_ml_tools_installed():
"""
Checks that *ONNXMLTools* is available.
"""
try:
import onnxmltools
return True
except ImportError:
print("ONNXMLTOOLS not installed. Please check https://github.com/onnx/onnxmltools for instructions.")
return False
def onnx_runtime_installed():
"""
Checks that *ONNX Runtime* is available.
"""
try:
import onnxruntime
return True
except ImportError:
return False
def sklearn_installed():
"""
Checks that *Sklearn* is available.
"""
try:
import sklearn
return True
except ImportError:
return False
def lightgbm_installed():
"""
Checks that *LightGBM* is available.
"""
try:
import lightgbm
return True
except ImportError:
return False
def xgboost_installed():
"""
Checks that *XGBoost* is available.
"""
try:
import xgboost
except ImportError:
return False
from xgboost.core import _LIB
try:
_LIB.XGBoosterDumpModelEx
except AttributeError:
# The version is not recent enough even though it is version 0.6.
# You need to install xgboost from github and not from pypi.
return False
from xgboost import __version__
vers = LooseVersion(__version__)
allowed_min = LooseVersion("0.90")
if vers < allowed_min:
warnings.warn("The converter works for xgboost >= 0.9. Different versions might not.")
return True
def pandas_installed():
"""
Checks that *Pandas* is available.
"""
try:
import pandas
except ImportError:
return False
return True
class _Constants(object):
"""
Class enabling the proper definition of constants.
"""
def __init__(self, constants, other_constants=None):
for constant in dir(constants):
if constant.isupper():
setattr(self, constant, getattr(constants, constant))
for constant in dir(other_constants):
if constant.isupper():
setattr(self, constant, getattr(other_constants, constant))
def __setattr__(self, name, value):
if name in self.__dict__:
raise ConstantError("Overwriting a constant is not allowed {}".format(name))
self.__dict__[name] = value
|
flh/odoo
|
refs/heads/master
|
addons/point_of_sale/report/pos_payment_report.py
|
380
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
class pos_payment_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(pos_payment_report, self).__init__(cr, uid, name, context=context)
self.total = 0.0
self.localcontext.update({
'time': time,
'pos_payment': self._pos_payment,
'pos_payment_total':self._pos_payment_total,
})
def _pos_payment(self, obj):
self.total = 0
data={}
sql = """ select id from pos_order where id = %d"""%(obj.id)
self.cr.execute(sql)
if self.cr.fetchone():
self.cr.execute ("select pt.name,pp.default_code as code,pol.qty,pu.name as uom,pol.discount,pol.price_unit, " \
"(pol.price_unit * pol.qty * (1 - (pol.discount) / 100.0)) as total " \
"from pos_order as po,pos_order_line as pol,product_product as pp,product_template as pt, product_uom as pu " \
"where pt.id=pp.product_tmpl_id and pp.id=pol.product_id and po.id = pol.order_id and pu.id=pt.uom_id " \
"and po.state IN ('paid','invoiced') and to_char(date_trunc('day',po.date_order),'YYYY-MM-DD')::date = current_date and po.id=%d"%(obj.id))
data=self.cr.dictfetchall()
else:
self.cr.execute ("select pt.name,pp.default_code as code,pol.qty,pu.name as uom,pol.discount,pol.price_unit, " \
"(pol.price_unit * pol.qty * (1 - (pol.discount) / 100.0)) as total " \
"from pos_order as po,pos_order_line as pol,product_product as pp,product_template as pt, product_uom as pu " \
"where pt.id=pp.product_tmpl_id and pp.id=pol.product_id and po.id = pol.order_id and pu.id=pt.uom_id " \
"and po.state IN ('paid','invoiced') and to_char(date_trunc('day',po.date_order),'YYYY-MM-DD')::date = current_date")
data=self.cr.dictfetchall()
for d in data:
self.total += d['price_unit'] * d['qty']
return data
def _pos_payment_total(self, o):
return self.total
class report_pos_payment(osv.AbstractModel):
_name = 'report.point_of_sale.report_payment'
_inherit = 'report.abstract_report'
_template = 'point_of_sale.report_payment'
_wrapped_report_class = pos_payment_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
zahanm/foodpedia
|
refs/heads/master
|
django/contrib/gis/admin/__init__.py
|
637
|
# Getting the normal admin routines, classes, and `site` instance.
from django.contrib.admin import autodiscover, site, AdminSite, ModelAdmin, StackedInline, TabularInline, HORIZONTAL, VERTICAL
# Geographic admin options classes and widgets.
from django.contrib.gis.admin.options import GeoModelAdmin
from django.contrib.gis.admin.widgets import OpenLayersWidget
try:
from django.contrib.gis.admin.options import OSMGeoAdmin
HAS_OSM = True
except ImportError:
HAS_OSM = False
|
nacc/autotest
|
refs/heads/master
|
client/tests/fsstress/fsstress.py
|
2
|
import os
from autotest.client import test, utils
class fsstress(test.test):
version = 1
def initialize(self):
self.job.require_gcc()
# http://www.zip.com.au/~akpm/linux/patches/stuff/ext3-tools.tar.gz
def setup(self, tarball = 'ext3-tools.tar.gz'):
self.tarball = utils.unmap_url(self.bindir, tarball, self.tmpdir)
utils.extract_tarball_to_dir(self.tarball, self.srcdir)
os.chdir(self.srcdir)
utils.system('patch -p1 < ../fsstress-ltp.patch')
utils.system('patch -p1 < ../makefile.patch')
utils.make('fsstress')
def run_once(self, testdir = None, extra_args = '', nproc = '1000', nops = '1000'):
if not testdir:
testdir = self.tmpdir
args = '-d %s -p %s -n %s %s' % (testdir, nproc, nops, extra_args)
cmd = self.srcdir + '/fsstress ' + args
utils.system(cmd)
|
dbrattli/RxPY
|
refs/heads/master
|
rx/testing/reactivetest.py
|
1
|
import math
import types
from rx.notification import OnNext, OnError, OnCompleted
from .recorded import Recorded
from .subscription import Subscription
def is_prime(i):
"""Tests if number is prime or not"""
if i <= 1:
return False
_max = int(math.floor(math.sqrt(i)))
for j in range(2, _max+1):
if not i % j:
return False
return True
# New predicate tests
class OnNextPredicate(object):
def __init__(self, predicate):
self.predicate = predicate
def __eq__(self, other):
if other == self:
return True
if other is None:
return False
if other.kind != 'N':
return False
return self.predicate(other.value)
class OnErrorPredicate(object):
def __init__(self, predicate):
self.predicate = predicate
def __eq__(self, other):
if other == self:
return True
if other is None:
return False
if other.kind != 'E':
return False
return self.predicate(other.exception)
class ReactiveTest(object):
created = 100
subscribed = 200
disposed = 1000
@classmethod
def on_next(cls, ticks, value):
if isinstance(value, types.FunctionType):
return Recorded(ticks, OnNextPredicate(value))
return Recorded(ticks, OnNext(value))
@classmethod
def on_error(cls, ticks, exception):
if isinstance(exception, types.FunctionType):
return Recorded(ticks, OnErrorPredicate(exception))
return Recorded(ticks, OnError(exception))
@classmethod
def on_completed(cls, ticks):
return Recorded(ticks, OnCompleted())
@classmethod
def subscribe(cls, start, end):
return Subscription(start, end)
|
ntuecon/server
|
refs/heads/master
|
pyenv/Lib/site-packages/twisted/persisted/test/test_styles.py
|
13
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.persisted.styles}.
"""
import pickle
from twisted.trial import unittest
from twisted.persisted.styles import unpickleMethod, _UniversalPicklingError
class Foo:
"""
Helper class.
"""
def method(self):
"""
Helper method.
"""
class Bar:
"""
Helper class.
"""
def sampleFunction():
"""
A sample function for pickling.
"""
lambdaExample = lambda x: x
class UniversalPicklingErrorTests(unittest.TestCase):
"""
Tests the L{_UniversalPicklingError} exception.
"""
def raise_UniversalPicklingError(self):
"""
Raise L{UniversalPicklingError}.
"""
raise _UniversalPicklingError
def test_handledByPickleModule(self):
"""
Handling L{pickle.PicklingError} handles
L{_UniversalPicklingError}.
"""
self.assertRaises(pickle.PicklingError,
self.raise_UniversalPicklingError)
def test_handledBycPickleModule(self):
"""
Handling L{cPickle.PicklingError} handles
L{_UniversalPicklingError}.
"""
try:
import cPickle
except ImportError:
raise unittest.SkipTest("cPickle not available.")
else:
self.assertRaises(cPickle.PicklingError,
self.raise_UniversalPicklingError)
class UnpickleMethodTests(unittest.TestCase):
"""
Tests for the unpickleMethod function.
"""
def test_instanceBuildingNamePresent(self):
"""
L{unpickleMethod} returns an instance method bound to the
instance passed to it.
"""
foo = Foo()
m = unpickleMethod('method', foo, Foo)
self.assertEqual(m, foo.method)
self.assertIsNot(m, foo.method)
def test_instanceBuildingNameNotPresent(self):
"""
If the named method is not present in the class,
L{unpickleMethod} finds a method on the class of the instance
and returns a bound method from there.
"""
foo = Foo()
m = unpickleMethod('method', foo, Bar)
self.assertEqual(m, foo.method)
self.assertIsNot(m, foo.method)
def test_primeDirective(self):
"""
We do not contaminate normal function pickling with concerns from
Twisted.
"""
def expected(n):
return "\n".join([
"c" + __name__,
sampleFunction.__name__, "p" + n, "."
]).encode("ascii")
self.assertEqual(pickle.dumps(sampleFunction, protocol=0),
expected("0"))
try:
import cPickle
except:
pass
else:
self.assertEqual(
cPickle.dumps(sampleFunction, protocol=0),
expected("1")
)
def test_lambdaRaisesPicklingError(self):
"""
Pickling a C{lambda} function ought to raise a L{pickle.PicklingError}.
"""
self.assertRaises(pickle.PicklingError, pickle.dumps, lambdaExample)
try:
import cPickle
except:
pass
else:
self.assertRaises(cPickle.PicklingError, cPickle.dumps,
lambdaExample)
|
baylee-d/osf.io
|
refs/heads/develop
|
osf/migrations/0080_add_abstractprovider.py
|
6
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-02-15 19:48
from __future__ import unicode_literals
import dirtyfields.dirtyfields
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import osf.models.base
import osf.utils.datetime_aware_jsonfield
import osf.utils.fields
class Migration(migrations.Migration):
dependencies = [
('osf', '0079_merge_20180207_1545'),
]
operations = [
migrations.CreateModel(
name='AbstractProvider',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('_id', models.CharField(db_index=True, default=osf.models.base.generate_object_id, max_length=24, unique=True)),
('reviews_workflow', models.CharField(blank=True, choices=[(None, 'None'), ('pre-moderation', 'Pre-Moderation'), ('post-moderation', 'Post-Moderation')], max_length=15, null=True)),
('reviews_comments_private', models.NullBooleanField()),
('reviews_comments_anonymous', models.NullBooleanField()),
('type', models.CharField(choices=[('osf.preprintprovider', 'preprint provider')], db_index=True, max_length=255)),
('name', models.CharField(max_length=128)),
('advisory_board', models.TextField(blank=True, default='')),
('description', models.TextField(blank=True, default='')),
('domain', models.URLField(blank=True, default='')),
('domain_redirect_enabled', models.BooleanField(default=False)),
('external_url', models.URLField(blank=True, null=True)),
('email_contact', models.CharField(blank=True, max_length=200, null=True)),
('email_support', models.CharField(blank=True, max_length=200, null=True)),
('social_twitter', models.CharField(blank=True, max_length=200, null=True)),
('social_facebook', models.CharField(blank=True, max_length=200, null=True)),
('social_instagram', models.CharField(blank=True, max_length=200, null=True)),
('footer_links', models.TextField(blank=True, default='')),
('facebook_app_id', models.BigIntegerField(blank=True, null=True)),
('example', models.CharField(blank=True, max_length=20, null=True)),
('allow_submissions', models.BooleanField(default=True)),
('share_publish_type', models.CharField(choices=[('Preprint', 'Preprint'), ('Thesis', 'Thesis')], default='Preprint', help_text='This SHARE type will be used when pushing publications to SHARE', max_length=32, null=True)),
('share_source', models.CharField(blank=True, max_length=200, null=True)),
('share_title', models.TextField(blank=True, default='', null=True)),
('additional_providers', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), blank=True, default=list, null=True, size=None)),
('access_token', osf.utils.fields.EncryptedTextField(blank=True, null=True)),
('preprint_word', models.CharField(choices=[('preprint', 'Preprint'), ('paper', 'Paper'), ('thesis', 'Thesis'), ('none', 'None')], default='preprint', max_length=10, null=True)),
('subjects_acceptable', osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONField(blank=True, default=list, encoder=osf.utils.datetime_aware_jsonfield.DateTimeAwareJSONEncoder, null=True)),
('default_license', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='default_license', to='osf.NodeLicense')),
('licenses_acceptable', models.ManyToManyField(blank=True, related_name='licenses_acceptable', to='osf.NodeLicense')),
],
options={
'abstract': False,
},
bases=(dirtyfields.dirtyfields.DirtyFieldsMixin, models.Model),
),
migrations.RunSQL(
[
"""
INSERT INTO osf_abstractprovider (id, created, modified, _id,
reviews_workflow, reviews_comments_private, reviews_comments_anonymous, name, advisory_board, description,
domain, domain_redirect_enabled, external_url, email_contact, email_support, social_twitter, social_facebook, social_instagram,
footer_links, facebook_app_id, example, allow_submissions, share_publish_type, share_source, share_title, additional_providers,
access_token, preprint_word, subjects_acceptable, default_license_id, type)
SELECT id, created, modified, _id,
reviews_workflow, reviews_comments_private, reviews_comments_anonymous, name, advisory_board, description,
domain, domain_redirect_enabled, external_url, email_contact, email_support, social_twitter, social_facebook, social_instagram,
footer_links, facebook_app_id, example, allow_submissions, share_publish_type, share_source, share_title, additional_providers,
access_token, preprint_word, subjects_acceptable, default_license_id, 'osf.preprintprovider' as type
FROM osf_preprintprovider;
INSERT INTO osf_abstractprovider_licenses_acceptable (id, abstractprovider_id, nodelicense_id)
SELECT id, preprintprovider_id, nodelicense_id
FROM osf_preprintprovider_licenses_acceptable
"""
], [
"""
INSERT INTO osf_preprintprovider_licenses_acceptable (id, preprintprovider_id, nodelicense_id)
SELECT id, abstractprovider_id, nodelicense_id
FROM osf_abstractprovider_licenses_acceptable
"""
]
),
migrations.AlterField(
model_name='subject',
name='provider',
field=models.ForeignKey(on_delete=models.deletion.CASCADE, related_name='subjects', to='osf.AbstractProvider'),
),
migrations.RunSQL(
migrations.RunSQL.noop,
[
"""
INSERT INTO osf_preprintprovider (id, created, modified, _id,
reviews_workflow, reviews_comments_private, reviews_comments_anonymous, name, advisory_board, description,
domain, domain_redirect_enabled, external_url, email_contact, email_support, social_twitter, social_facebook, social_instagram,
footer_links, facebook_app_id, example, allow_submissions, share_publish_type, share_source, share_title, additional_providers,
access_token, preprint_word, subjects_acceptable, default_license_id)
SELECT id, created, modified, _id,
reviews_workflow, reviews_comments_private, reviews_comments_anonymous, name, advisory_board, description,
domain, domain_redirect_enabled, external_url, email_contact, email_support, social_twitter, social_facebook, social_instagram,
footer_links, facebook_app_id, example, allow_submissions, share_publish_type, share_source, share_title, additional_providers,
access_token, preprint_word, subjects_acceptable, default_license_id
FROM osf_abstractprovider
"""
]
),
migrations.RemoveField(
model_name='preprintprovider',
name='default_license',
),
migrations.RemoveField(
model_name='preprintprovider',
name='licenses_acceptable',
),
migrations.DeleteModel(
name='PreprintProvider',
),
migrations.CreateModel(
name='PreprintProvider',
fields=[
],
options={
'indexes': [],
'proxy': True,
'permissions': (('view_submissions', 'Can view all submissions to this provider'), ('add_moderator', 'Can add other users as moderators for this provider'), ('view_actions', 'Can view actions on submissions to this provider'), ('add_reviewer', 'Can add other users as reviewers for this provider'), ('review_assigned_submissions', 'Can submit reviews for submissions to this provider which have been assigned to this user'), ('assign_reviewer', 'Can assign reviewers to review specific submissions to this provider'), ('set_up_moderation', 'Can set up moderation for this provider'), ('view_assigned_submissions', 'Can view submissions to this provider which have been assigned to this user'), ('edit_reviews_settings', 'Can edit reviews settings for this provider'), ('accept_submissions', 'Can accept submissions to this provider'), ('reject_submissions', 'Can reject submissions to this provider'), ('edit_review_comments', 'Can edit comments on actions for this provider'), ('view_preprintprovider', 'Can view preprint provider details')),
},
bases=('osf.abstractprovider',),
),
]
|
godfather1103/WeiboRobot
|
refs/heads/master
|
python27/1.0/lib/test/test_linuxaudiodev.py
|
134
|
from test import test_support
test_support.requires('audio')
from test.test_support import findfile, run_unittest
import errno
import sys
import audioop
import unittest
linuxaudiodev = test_support.import_module('linuxaudiodev', deprecated=True)
sunaudio = test_support.import_module('sunaudio', deprecated=True)
SND_FORMAT_MULAW_8 = 1
class LinuxAudioDevTests(unittest.TestCase):
def setUp(self):
self.dev = linuxaudiodev.open('w')
def tearDown(self):
self.dev.close()
def test_methods(self):
# at least check that these methods can be invoked
self.dev.bufsize()
self.dev.obufcount()
self.dev.obuffree()
self.dev.getptr()
self.dev.fileno()
def test_play_sound_file(self):
path = findfile("audiotest.au")
fp = open(path, 'r')
size, enc, rate, nchannels, extra = sunaudio.gethdr(fp)
data = fp.read()
fp.close()
if enc != SND_FORMAT_MULAW_8:
self.fail("Expect .au file with 8-bit mu-law samples")
# convert the data to 16-bit signed
data = audioop.ulaw2lin(data, 2)
# set the data format
if sys.byteorder == 'little':
fmt = linuxaudiodev.AFMT_S16_LE
else:
fmt = linuxaudiodev.AFMT_S16_BE
# set parameters based on .au file headers
self.dev.setparameters(rate, 16, nchannels, fmt)
self.dev.write(data)
self.dev.flush()
def test_errors(self):
size = 8
fmt = linuxaudiodev.AFMT_U8
rate = 8000
nchannels = 1
try:
self.dev.setparameters(-1, size, nchannels, fmt)
except ValueError, err:
self.assertEqual(err.args[0], "expected rate >= 0, not -1")
try:
self.dev.setparameters(rate, -2, nchannels, fmt)
except ValueError, err:
self.assertEqual(err.args[0], "expected sample size >= 0, not -2")
try:
self.dev.setparameters(rate, size, 3, fmt)
except ValueError, err:
self.assertEqual(err.args[0], "nchannels must be 1 or 2, not 3")
try:
self.dev.setparameters(rate, size, nchannels, 177)
except ValueError, err:
self.assertEqual(err.args[0], "unknown audio encoding: 177")
try:
self.dev.setparameters(rate, size, nchannels, linuxaudiodev.AFMT_U16_LE)
except ValueError, err:
self.assertEqual(err.args[0], "for linear unsigned 16-bit little-endian "
"audio, expected sample size 16, not 8")
try:
self.dev.setparameters(rate, 16, nchannels, fmt)
except ValueError, err:
self.assertEqual(err.args[0], "for linear unsigned 8-bit audio, expected "
"sample size 8, not 16")
def test_main():
try:
dsp = linuxaudiodev.open('w')
except linuxaudiodev.error, msg:
if msg.args[0] in (errno.EACCES, errno.ENOENT, errno.ENODEV, errno.EBUSY):
raise unittest.SkipTest(msg)
raise
dsp.close()
run_unittest(LinuxAudioDevTests)
if __name__ == '__main__':
test_main()
|
kjagoo/wger_stark
|
refs/heads/master
|
wger/weight/__init__.py
|
15
|
# -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from wger import get_version
VERSION = get_version()
|
garthylou/Libreosteo
|
refs/heads/master
|
libreosteoweb/api/renderers.py
|
1
|
# This file is part of LibreOsteo.
#
# LibreOsteo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LibreOsteo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LibreOsteo. If not, see <http://www.gnu.org/licenses/>.
from rest_framework_csv import renderers
class PatientCSVRenderer(renderers.CSVRenderer):
header = [
'family_name', 'original_name', 'first_name', 'birth_date', 'sex',
'address_street', 'address_complement', 'address_zipcode',
'address_city', 'email', 'phone', 'mobile_phone', 'job', 'hobbies',
'smoker', 'laterality', 'important_info', 'current_treatment',
'surgical_history', 'medical_history', 'family_history',
'trauma_history', 'medical_reports'
]
class ExaminationCSVRenderer(renderers.CSVRenderer):
header = [
'patient_detail.first_name', 'patient_detail.family_name',
'patient_detail.birth_date', 'date', 'reason', 'reason_description',
'orl', 'visceral', 'pulmo', 'uro_gyneco', 'periphery', 'general_state',
'medical_examination', 'diagnosis', 'treatments', 'conclusion',
'therapeut_detail.first_name', 'therapeut_detail.last_name'
]
class InvoiceCSVRenderer(renderers.CSVRenderer):
pass
|
sasukeh/cinder
|
refs/heads/master
|
cinder/tests/unit/api/contrib/test_volume_encryption_metadata.py
|
23
|
# Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import webob
from cinder.api.contrib import volume_encryption_metadata
from cinder import context
from cinder import db
from cinder import test
from cinder.tests.unit.api import fakes
def return_volume_type_encryption_metadata(context, volume_type_id):
return stub_volume_type_encryption()
def stub_volume_type_encryption():
values = {
'cipher': 'cipher',
'key_size': 256,
'provider': 'nova.volume.encryptors.base.VolumeEncryptor',
'volume_type_id': 'volume_type',
'control_location': 'front-end',
}
return values
class VolumeEncryptionMetadataTest(test.TestCase):
@staticmethod
def _create_volume(context,
display_name='test_volume',
display_description='this is a test volume',
status='creating',
availability_zone='fake_az',
host='fake_host',
size=1,
encryption_key_id='fake_key'):
"""Create a volume object."""
volume = {
'size': size,
'user_id': 'fake',
'project_id': 'fake',
'status': status,
'display_name': display_name,
'display_description': display_description,
'attach_status': 'detached',
'availability_zone': availability_zone,
'host': host,
'encryption_key_id': encryption_key_id,
}
return db.volume_create(context, volume)['id']
def setUp(self):
super(VolumeEncryptionMetadataTest, self).setUp()
self.controller = (volume_encryption_metadata.
VolumeEncryptionMetadataController())
self.stubs.Set(db.sqlalchemy.api, 'volume_type_encryption_get',
return_volume_type_encryption_metadata)
self.ctxt = context.RequestContext('fake', 'fake')
self.volume_id = self._create_volume(self.ctxt)
self.addCleanup(db.volume_destroy, self.ctxt.elevated(),
self.volume_id)
def test_index(self):
req = webob.Request.blank('/v2/fake/volumes/%s/encryption'
% self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
res_dict = json.loads(res.body)
expected = {
"encryption_key_id": "fake_key",
"control_location": "front-end",
"cipher": "cipher",
"provider": "nova.volume.encryptors.base.VolumeEncryptor",
"key_size": 256,
}
self.assertEqual(expected, res_dict)
def test_index_bad_tenant_id(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption'
% ('bad-tenant-id', self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(400, res.status_code)
res_dict = json.loads(res.body)
expected = {'badRequest': {'code': 400,
'message': 'Malformed request url'}}
self.assertEqual(expected, res_dict)
def test_index_bad_volume_id(self):
bad_volume_id = 'bad_volume_id'
req = webob.Request.blank('/v2/fake/volumes/%s/encryption'
% bad_volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(404, res.status_code)
res_dict = json.loads(res.body)
expected = {'itemNotFound': {'code': 404,
'message': 'VolumeNotFound: Volume '
'%s could not be found.'
% bad_volume_id}}
self.assertEqual(expected, res_dict)
def test_show_key(self):
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'encryption_key_id' % self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual('fake_key', res.body)
def test_show_control(self):
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'control_location' % self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual('front-end', res.body)
def test_show_provider(self):
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'provider' % self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual('nova.volume.encryptors.base.VolumeEncryptor',
res.body)
def test_show_bad_tenant_id(self):
req = webob.Request.blank('/v2/%s/volumes/%s/encryption/'
'encryption_key_id' % ('bad-tenant-id',
self.volume_id))
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(400, res.status_code)
res_dict = json.loads(res.body)
expected = {'badRequest': {'code': 400,
'message': 'Malformed request url'}}
self.assertEqual(expected, res_dict)
def test_show_bad_volume_id(self):
bad_volume_id = 'bad_volume_id'
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'encryption_key_id' % bad_volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(404, res.status_code)
res_dict = json.loads(res.body)
expected = {'itemNotFound': {'code': 404,
'message': 'VolumeNotFound: Volume '
'%s could not be found.'
% bad_volume_id}}
self.assertEqual(expected, res_dict)
def test_retrieve_key_admin(self):
ctxt = context.RequestContext('fake', 'fake', is_admin=True)
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'encryption_key_id' % self.volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual('fake_key', res.body)
def test_show_volume_not_encrypted_type(self):
self.stubs.Set(db.sqlalchemy.api, 'volume_type_encryption_get',
lambda *args, **kwargs: None)
volume_id = self._create_volume(self.ctxt, encryption_key_id=None)
self.addCleanup(db.volume_destroy, self.ctxt.elevated(), volume_id)
req = webob.Request.blank('/v2/fake/volumes/%s/encryption/'
'encryption_key_id' % volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
self.assertEqual(0, len(res.body))
def test_index_volume_not_encrypted_type(self):
self.stubs.Set(db.sqlalchemy.api, 'volume_type_encryption_get',
lambda *args, **kwargs: None)
volume_id = self._create_volume(self.ctxt, encryption_key_id=None)
self.addCleanup(db.volume_destroy, self.ctxt.elevated(), volume_id)
req = webob.Request.blank('/v2/fake/volumes/%s/encryption'
% volume_id)
res = req.get_response(fakes.wsgi_app(fake_auth_context=self.ctxt))
self.assertEqual(200, res.status_code)
res_dict = json.loads(res.body)
expected = {
'encryption_key_id': None
}
self.assertEqual(expected, res_dict)
|
EvanK/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/cloudstack/cs_network_offering.py
|
14
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, David Passante (@dpassante)
# Copyright (c) 2017, René Moser <mail@renemoser.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_network_offering
short_description: Manages network offerings on Apache CloudStack based clouds.
description:
- Create, update, enable, disable and remove network offerings.
version_added: '2.5'
author: "David Passante (@dpassante)"
options:
state:
description:
- State of the network offering.
choices: [ enabled, present, disabled, absent]
default: present
display_text:
description:
- Display text of the network offerings.
guest_ip_type:
description:
- Guest type of the network offering.
choices: [ Shared, Isolated ]
name:
description:
- The name of the network offering.
required: true
supported_services:
description:
- Services supported by the network offering.
- One or more of the choices.
choices: [ Dns, PortForwarding, Dhcp, SourceNat, UserData, Firewall, StaticNat, Vpn, Lb ]
aliases: [ supported_service ]
traffic_type:
description:
- The traffic type for the network offering.
default: Guest
availability:
description:
- The availability of network offering. Default value is Optional
conserve_mode:
description:
- Whether the network offering has IP conserve mode enabled.
type: bool
details:
description:
- Network offering details in key/value pairs.
- with service provider as a value
choices: [ internallbprovider, publiclbprovider ]
egress_default_policy:
description:
- Whether the default egress policy is allow or to deny.
choices: [ allow, deny ]
persistent:
description:
- True if network offering supports persistent networks
- defaulted to false if not specified
type: bool
keepalive_enabled:
description:
- If true keepalive will be turned on in the loadbalancer.
- At the time of writing this has only an effect on haproxy.
- the mode http and httpclose options are unset in the haproxy conf file.
type: bool
max_connections:
description:
- Maximum number of concurrent connections supported by the network offering.
network_rate:
description:
- Data transfer rate in megabits per second allowed.
service_capabilities:
description:
- Desired service capabilities as part of network offering.
aliases: [ service_capability ]
service_offering:
description:
- The service offering name or ID used by virtual router provider.
service_provider:
description:
- Provider to service mapping.
- If not specified, the provider for the service will be mapped to the default provider on the physical network.
aliases: [service_provider]
specify_ip_ranges:
description:
- Wheter the network offering supports specifying IP ranges.
- Defaulted to C(no) by the API if not specified.
type: bool
specify_vlan:
description:
- Whether the network offering supports vlans or not.
type: bool
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Create a network offering and enable it
local_action:
module: cs_network_offering
name: my_network_offering
display_text: network offering description
state: enabled
guest_ip_type: Isolated
supported_services: [ Dns, PortForwarding, Dhcp, SourceNat, UserData, Firewall, StaticNat, Vpn, Lb ]
service_providers:
- { service: 'dns', provider: 'virtualrouter' }
- { service: 'dhcp', provider: 'virtualrouter' }
- name: Remove a network offering
local_action:
module: cs_network_offering
name: my_network_offering
state: absent
'''
RETURN = '''
---
id:
description: UUID of the network offering.
returned: success
type: str
sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f
name:
description: The name of the network offering.
returned: success
type: str
sample: MyCustomNetworkOffering
display_text:
description: The display text of the network offering.
returned: success
type: str
sample: My network offering
state:
description: The state of the network offering.
returned: success
type: str
sample: Enabled
guest_ip_type:
description: Guest type of the network offering.
returned: success
type: str
sample: Isolated
availability:
description: The availability of network offering.
returned: success
type: str
sample: Optional
service_offering_id:
description: The service offering ID.
returned: success
type: str
sample: c5f7a5fc-43f8-11e5-a151-feff819cdc9f
max_connections:
description: The maximum number of concurrents connections to be handled by LB.
returned: success
type: int
sample: 300
network_rate:
description: The network traffic transfer ate in Mbit/s.
returned: success
type: int
sample: 200
traffic_type:
description: The traffic type.
returned: success
type: str
sample: Guest
egress_default_policy:
description: Default egress policy.
returned: success
type: str
sample: allow
is_persistent:
description: Whether persistent networks are supported or not.
returned: success
type: bool
sample: false
is_default:
description: Whether network offering is the default offering or not.
returned: success
type: bool
sample: false
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together,
)
class AnsibleCloudStackNetworkOffering(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackNetworkOffering, self).__init__(module)
self.returns = {
'guestiptype': 'guest_ip_type',
'availability': 'availability',
'serviceofferingid': 'service_offering_id',
'networkrate': 'network_rate',
'maxconnections': 'max_connections',
'traffictype': 'traffic_type',
'isdefault': 'is_default',
'ispersistent': 'is_persistent',
}
self.network_offering = None
def get_service_offering_id(self):
service_offering = self.module.params.get('service_offering')
if not service_offering:
return None
args = {
'issystem': True
}
service_offerings = self.query_api('listServiceOfferings', **args)
if service_offerings:
for s in service_offerings['serviceoffering']:
if service_offering in [s['name'], s['id']]:
return s['id']
self.fail_json(msg="Service offering '%s' not found" % service_offering)
def get_network_offering(self):
if self.network_offering:
return self.network_offering
args = {
'name': self.module.params.get('name'),
'guestiptype': self.module.params.get('guest_type'),
}
no = self.query_api('listNetworkOfferings', **args)
if no:
self.network_offering = no['networkoffering'][0]
return self.network_offering
def create_or_update(self):
network_offering = self.get_network_offering()
if not network_offering:
network_offering = self.create_network_offering()
return self.update_network_offering(network_offering=network_offering)
def create_network_offering(self):
network_offering = None
self.result['changed'] = True
args = {
'state': self.module.params.get('state'),
'displaytext': self.module.params.get('display_text'),
'guestiptype': self.module.params.get('guest_ip_type'),
'name': self.module.params.get('name'),
'supportedservices': self.module.params.get('supported_services'),
'traffictype': self.module.params.get('traffic_type'),
'availability': self.module.params.get('availability'),
'conservemode': self.module.params.get('conserve_mode'),
'details': self.module.params.get('details'),
'egressdefaultpolicy': self.module.params.get('egress_default_policy') == 'allow',
'ispersistent': self.module.params.get('persistent'),
'keepaliveenabled': self.module.params.get('keepalive_enabled'),
'maxconnections': self.module.params.get('max_connections'),
'networkrate': self.module.params.get('network_rate'),
'servicecapabilitylist': self.module.params.get('service_capabilities'),
'serviceofferingid': self.get_service_offering_id(),
'serviceproviderlist': self.module.params.get('service_providers'),
'specifyipranges': self.module.params.get('specify_ip_ranges'),
'specifyvlan': self.module.params.get('specify_vlan'),
}
required_params = [
'display_text',
'guest_ip_type',
'supported_services',
'service_providers',
]
self.module.fail_on_missing_params(required_params=required_params)
if not self.module.check_mode:
res = self.query_api('createNetworkOffering', **args)
network_offering = res['networkoffering']
return network_offering
def delete_network_offering(self):
network_offering = self.get_network_offering()
if network_offering:
self.result['changed'] = True
if not self.module.check_mode:
self.query_api('deleteNetworkOffering', id=network_offering['id'])
return network_offering
def update_network_offering(self, network_offering):
if not network_offering:
return network_offering
args = {
'id': network_offering['id'],
'state': self.module.params.get('state'),
'displaytext': self.module.params.get('display_text'),
'name': self.module.params.get('name'),
'availability': self.module.params.get('availability'),
'maxconnections': self.module.params.get('max_connections'),
}
if args['state'] in ['enabled', 'disabled']:
args['state'] = args['state'].title()
else:
del args['state']
if self.has_changed(args, network_offering):
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('updateNetworkOffering', **args)
network_offering = res['networkoffering']
return network_offering
def get_result(self, network_offering):
super(AnsibleCloudStackNetworkOffering, self).get_result(network_offering)
if network_offering:
self.result['egress_default_policy'] = 'allow' if network_offering.get('egressdefaultpolicy') else 'deny'
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
state=dict(choices=['enabled', 'present', 'disabled', 'absent'], default='present'),
display_text=dict(),
guest_ip_type=dict(choices=['Shared', 'Isolated']),
name=dict(required=True),
supported_services=dict(type='list', aliases=['supported_service']),
traffic_type=dict(default='Guest'),
availability=dict(),
conserve_mode=dict(type='bool'),
details=dict(type='list'),
egress_default_policy=dict(choices=['allow', 'deny']),
persistent=dict(type='bool'),
keepalive_enabled=dict(type='bool'),
max_connections=dict(type='int'),
network_rate=dict(type='int'),
service_capabilities=dict(type='list', aliases=['service_capability']),
service_offering=dict(),
service_providers=dict(type='list', aliases=['service_provider']),
specify_ip_ranges=dict(type='bool'),
specify_vlan=dict(type='bool'),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_network_offering = AnsibleCloudStackNetworkOffering(module)
state = module.params.get('state')
if state in ['absent']:
network_offering = acs_network_offering.delete_network_offering()
else:
network_offering = acs_network_offering.create_or_update()
result = acs_network_offering.get_result(network_offering)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
AntouanK/rethinkdb
|
refs/heads/next
|
test/rql_test/connections/http_support/werkzeug/testsuite/wsgi.py
|
146
|
# -*- coding: utf-8 -*-
"""
werkzeug.testsuite.wsgi
~~~~~~~~~~~~~~~~~~~~~~~
Tests the WSGI utilities.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import unittest
from os import path
from contextlib import closing
from werkzeug.testsuite import WerkzeugTestCase, get_temporary_directory
from werkzeug.wrappers import BaseResponse
from werkzeug.exceptions import BadRequest, ClientDisconnected
from werkzeug.test import Client, create_environ, run_wsgi_app
from werkzeug import wsgi
from werkzeug._compat import StringIO, BytesIO, NativeStringIO, to_native
class WSGIUtilsTestCase(WerkzeugTestCase):
def test_shareddatamiddleware_get_file_loader(self):
app = wsgi.SharedDataMiddleware(None, {})
assert callable(app.get_file_loader('foo'))
def test_shared_data_middleware(self):
def null_application(environ, start_response):
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
yield b'NOT FOUND'
test_dir = get_temporary_directory()
with open(path.join(test_dir, to_native(u'äöü', 'utf-8')), 'w') as test_file:
test_file.write(u'FOUND')
app = wsgi.SharedDataMiddleware(null_application, {
'/': path.join(path.dirname(__file__), 'res'),
'/sources': path.join(path.dirname(__file__), 'res'),
'/pkg': ('werkzeug.debug', 'shared'),
'/foo': test_dir
})
for p in '/test.txt', '/sources/test.txt', '/foo/äöü':
app_iter, status, headers = run_wsgi_app(app, create_environ(p))
self.assert_equal(status, '200 OK')
with closing(app_iter) as app_iter:
data = b''.join(app_iter).strip()
self.assert_equal(data, b'FOUND')
app_iter, status, headers = run_wsgi_app(
app, create_environ('/pkg/debugger.js'))
with closing(app_iter) as app_iter:
contents = b''.join(app_iter)
self.assert_in(b'$(function() {', contents)
app_iter, status, headers = run_wsgi_app(
app, create_environ('/missing'))
self.assert_equal(status, '404 NOT FOUND')
self.assert_equal(b''.join(app_iter).strip(), b'NOT FOUND')
def test_get_host(self):
env = {'HTTP_X_FORWARDED_HOST': 'example.org',
'SERVER_NAME': 'bullshit', 'HOST_NAME': 'ignore me dammit'}
self.assert_equal(wsgi.get_host(env), 'example.org')
self.assert_equal(
wsgi.get_host(create_environ('/', 'http://example.org')),
'example.org')
def test_get_host_multiple_forwarded(self):
env = {'HTTP_X_FORWARDED_HOST': 'example.com, example.org',
'SERVER_NAME': 'bullshit', 'HOST_NAME': 'ignore me dammit'}
self.assert_equal(wsgi.get_host(env), 'example.com')
self.assert_equal(
wsgi.get_host(create_environ('/', 'http://example.com')),
'example.com')
def test_get_host_validation(self):
env = {'HTTP_X_FORWARDED_HOST': 'example.org',
'SERVER_NAME': 'bullshit', 'HOST_NAME': 'ignore me dammit'}
self.assert_equal(wsgi.get_host(env, trusted_hosts=['.example.org']),
'example.org')
self.assert_raises(BadRequest, wsgi.get_host, env,
trusted_hosts=['example.com'])
def test_responder(self):
def foo(environ, start_response):
return BaseResponse(b'Test')
client = Client(wsgi.responder(foo), BaseResponse)
response = client.get('/')
self.assert_equal(response.status_code, 200)
self.assert_equal(response.data, b'Test')
def test_pop_path_info(self):
original_env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b///c'}
# regular path info popping
def assert_tuple(script_name, path_info):
self.assert_equal(env.get('SCRIPT_NAME'), script_name)
self.assert_equal(env.get('PATH_INFO'), path_info)
env = original_env.copy()
pop = lambda: wsgi.pop_path_info(env)
assert_tuple('/foo', '/a/b///c')
self.assert_equal(pop(), 'a')
assert_tuple('/foo/a', '/b///c')
self.assert_equal(pop(), 'b')
assert_tuple('/foo/a/b', '///c')
self.assert_equal(pop(), 'c')
assert_tuple('/foo/a/b///c', '')
self.assert_is_none(pop())
def test_peek_path_info(self):
env = {
'SCRIPT_NAME': '/foo',
'PATH_INFO': '/aaa/b///c'
}
self.assert_equal(wsgi.peek_path_info(env), 'aaa')
self.assert_equal(wsgi.peek_path_info(env), 'aaa')
self.assert_equal(wsgi.peek_path_info(env, charset=None), b'aaa')
self.assert_equal(wsgi.peek_path_info(env, charset=None), b'aaa')
def test_path_info_and_script_name_fetching(self):
env = create_environ(u'/\N{SNOWMAN}', u'http://example.com/\N{COMET}/')
self.assert_equal(wsgi.get_path_info(env), u'/\N{SNOWMAN}')
self.assert_equal(wsgi.get_path_info(env, charset=None), u'/\N{SNOWMAN}'.encode('utf-8'))
self.assert_equal(wsgi.get_script_name(env), u'/\N{COMET}')
self.assert_equal(wsgi.get_script_name(env, charset=None), u'/\N{COMET}'.encode('utf-8'))
def test_query_string_fetching(self):
env = create_environ(u'/?\N{SNOWMAN}=\N{COMET}')
qs = wsgi.get_query_string(env)
self.assert_strict_equal(qs, '%E2%98%83=%E2%98%84')
def test_limited_stream(self):
class RaisingLimitedStream(wsgi.LimitedStream):
def on_exhausted(self):
raise BadRequest('input stream exhausted')
io = BytesIO(b'123456')
stream = RaisingLimitedStream(io, 3)
self.assert_strict_equal(stream.read(), b'123')
self.assert_raises(BadRequest, stream.read)
io = BytesIO(b'123456')
stream = RaisingLimitedStream(io, 3)
self.assert_strict_equal(stream.tell(), 0)
self.assert_strict_equal(stream.read(1), b'1')
self.assert_strict_equal(stream.tell(), 1)
self.assert_strict_equal(stream.read(1), b'2')
self.assert_strict_equal(stream.tell(), 2)
self.assert_strict_equal(stream.read(1), b'3')
self.assert_strict_equal(stream.tell(), 3)
self.assert_raises(BadRequest, stream.read)
io = BytesIO(b'123456\nabcdefg')
stream = wsgi.LimitedStream(io, 9)
self.assert_strict_equal(stream.readline(), b'123456\n')
self.assert_strict_equal(stream.readline(), b'ab')
io = BytesIO(b'123456\nabcdefg')
stream = wsgi.LimitedStream(io, 9)
self.assert_strict_equal(stream.readlines(), [b'123456\n', b'ab'])
io = BytesIO(b'123456\nabcdefg')
stream = wsgi.LimitedStream(io, 9)
self.assert_strict_equal(stream.readlines(2), [b'12'])
self.assert_strict_equal(stream.readlines(2), [b'34'])
self.assert_strict_equal(stream.readlines(), [b'56\n', b'ab'])
io = BytesIO(b'123456\nabcdefg')
stream = wsgi.LimitedStream(io, 9)
self.assert_strict_equal(stream.readline(100), b'123456\n')
io = BytesIO(b'123456\nabcdefg')
stream = wsgi.LimitedStream(io, 9)
self.assert_strict_equal(stream.readlines(100), [b'123456\n', b'ab'])
io = BytesIO(b'123456')
stream = wsgi.LimitedStream(io, 3)
self.assert_strict_equal(stream.read(1), b'1')
self.assert_strict_equal(stream.read(1), b'2')
self.assert_strict_equal(stream.read(), b'3')
self.assert_strict_equal(stream.read(), b'')
io = BytesIO(b'123456')
stream = wsgi.LimitedStream(io, 3)
self.assert_strict_equal(stream.read(-1), b'123')
io = BytesIO(b'123456')
stream = wsgi.LimitedStream(io, 0)
self.assert_strict_equal(stream.read(-1), b'')
io = StringIO(u'123456')
stream = wsgi.LimitedStream(io, 0)
self.assert_strict_equal(stream.read(-1), u'')
io = StringIO(u'123\n456\n')
stream = wsgi.LimitedStream(io, 8)
self.assert_strict_equal(list(stream), [u'123\n', u'456\n'])
def test_limited_stream_disconnection(self):
io = BytesIO(b'A bit of content')
# disconnect detection on out of bytes
stream = wsgi.LimitedStream(io, 255)
with self.assert_raises(ClientDisconnected):
stream.read()
# disconnect detection because file close
io = BytesIO(b'x' * 255)
io.close()
stream = wsgi.LimitedStream(io, 255)
with self.assert_raises(ClientDisconnected):
stream.read()
def test_path_info_extraction(self):
x = wsgi.extract_path_info('http://example.com/app', '/app/hello')
self.assert_equal(x, u'/hello')
x = wsgi.extract_path_info('http://example.com/app',
'https://example.com/app/hello')
self.assert_equal(x, u'/hello')
x = wsgi.extract_path_info('http://example.com/app/',
'https://example.com/app/hello')
self.assert_equal(x, u'/hello')
x = wsgi.extract_path_info('http://example.com/app/',
'https://example.com/app')
self.assert_equal(x, u'/')
x = wsgi.extract_path_info(u'http://☃.net/', u'/fööbär')
self.assert_equal(x, u'/fööbär')
x = wsgi.extract_path_info(u'http://☃.net/x', u'http://☃.net/x/fööbär')
self.assert_equal(x, u'/fööbär')
env = create_environ(u'/fööbär', u'http://☃.net/x/')
x = wsgi.extract_path_info(env, u'http://☃.net/x/fööbär')
self.assert_equal(x, u'/fööbär')
x = wsgi.extract_path_info('http://example.com/app/',
'https://example.com/a/hello')
self.assert_is_none(x)
x = wsgi.extract_path_info('http://example.com/app/',
'https://example.com/app/hello',
collapse_http_schemes=False)
self.assert_is_none(x)
def test_get_host_fallback(self):
self.assert_equal(wsgi.get_host({
'SERVER_NAME': 'foobar.example.com',
'wsgi.url_scheme': 'http',
'SERVER_PORT': '80'
}), 'foobar.example.com')
self.assert_equal(wsgi.get_host({
'SERVER_NAME': 'foobar.example.com',
'wsgi.url_scheme': 'http',
'SERVER_PORT': '81'
}), 'foobar.example.com:81')
def test_get_current_url_unicode(self):
env = create_environ()
env['QUERY_STRING'] = 'foo=bar&baz=blah&meh=\xcf'
rv = wsgi.get_current_url(env)
self.assert_strict_equal(rv,
u'http://localhost/?foo=bar&baz=blah&meh=\ufffd')
def test_multi_part_line_breaks(self):
data = 'abcdef\r\nghijkl\r\nmnopqrstuvwxyz\r\nABCDEFGHIJK'
test_stream = NativeStringIO(data)
lines = list(wsgi.make_line_iter(test_stream, limit=len(data),
buffer_size=16))
self.assert_equal(lines, ['abcdef\r\n', 'ghijkl\r\n',
'mnopqrstuvwxyz\r\n', 'ABCDEFGHIJK'])
data = 'abc\r\nThis line is broken by the buffer length.' \
'\r\nFoo bar baz'
test_stream = NativeStringIO(data)
lines = list(wsgi.make_line_iter(test_stream, limit=len(data),
buffer_size=24))
self.assert_equal(lines, ['abc\r\n', 'This line is broken by the '
'buffer length.\r\n', 'Foo bar baz'])
def test_multi_part_line_breaks_bytes(self):
data = b'abcdef\r\nghijkl\r\nmnopqrstuvwxyz\r\nABCDEFGHIJK'
test_stream = BytesIO(data)
lines = list(wsgi.make_line_iter(test_stream, limit=len(data),
buffer_size=16))
self.assert_equal(lines, [b'abcdef\r\n', b'ghijkl\r\n',
b'mnopqrstuvwxyz\r\n', b'ABCDEFGHIJK'])
data = b'abc\r\nThis line is broken by the buffer length.' \
b'\r\nFoo bar baz'
test_stream = BytesIO(data)
lines = list(wsgi.make_line_iter(test_stream, limit=len(data),
buffer_size=24))
self.assert_equal(lines, [b'abc\r\n', b'This line is broken by the '
b'buffer length.\r\n', b'Foo bar baz'])
def test_multi_part_line_breaks_problematic(self):
data = 'abc\rdef\r\nghi'
for x in range(1, 10):
test_stream = NativeStringIO(data)
lines = list(wsgi.make_line_iter(test_stream, limit=len(data),
buffer_size=4))
self.assert_equal(lines, ['abc\r', 'def\r\n', 'ghi'])
def test_iter_functions_support_iterators(self):
data = ['abcdef\r\nghi', 'jkl\r\nmnopqrstuvwxyz\r', '\nABCDEFGHIJK']
lines = list(wsgi.make_line_iter(data))
self.assert_equal(lines, ['abcdef\r\n', 'ghijkl\r\n',
'mnopqrstuvwxyz\r\n', 'ABCDEFGHIJK'])
def test_make_chunk_iter(self):
data = [u'abcdefXghi', u'jklXmnopqrstuvwxyzX', u'ABCDEFGHIJK']
rv = list(wsgi.make_chunk_iter(data, 'X'))
self.assert_equal(rv, [u'abcdef', u'ghijkl', u'mnopqrstuvwxyz',
u'ABCDEFGHIJK'])
data = u'abcdefXghijklXmnopqrstuvwxyzXABCDEFGHIJK'
test_stream = StringIO(data)
rv = list(wsgi.make_chunk_iter(test_stream, 'X', limit=len(data),
buffer_size=4))
self.assert_equal(rv, [u'abcdef', u'ghijkl', u'mnopqrstuvwxyz',
u'ABCDEFGHIJK'])
def test_make_chunk_iter_bytes(self):
data = [b'abcdefXghi', b'jklXmnopqrstuvwxyzX', b'ABCDEFGHIJK']
rv = list(wsgi.make_chunk_iter(data, 'X'))
self.assert_equal(rv, [b'abcdef', b'ghijkl', b'mnopqrstuvwxyz',
b'ABCDEFGHIJK'])
data = b'abcdefXghijklXmnopqrstuvwxyzXABCDEFGHIJK'
test_stream = BytesIO(data)
rv = list(wsgi.make_chunk_iter(test_stream, 'X', limit=len(data),
buffer_size=4))
self.assert_equal(rv, [b'abcdef', b'ghijkl', b'mnopqrstuvwxyz',
b'ABCDEFGHIJK'])
def test_lines_longer_buffer_size(self):
data = '1234567890\n1234567890\n'
for bufsize in range(1, 15):
lines = list(wsgi.make_line_iter(NativeStringIO(data), limit=len(data),
buffer_size=4))
self.assert_equal(lines, ['1234567890\n', '1234567890\n'])
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(WSGIUtilsTestCase))
return suite
|
mamchecker/mamchecker
|
refs/heads/master
|
mamchecker/r/bl/__init__.py
|
1
|
# -*- coding: utf-8 -*-
from random import randrange, sample
from mamchecker.hlp import Struct
def given():
i1 = randrange(15, 90) / 10.0
di = (randrange(1, 10) - 5) / 50.0
if di == 0:
di = 0.05
i2 = i1 + di
i_c = [4, 12, 1]
i = [ii for ii in sample(i_c, 2)]
i_n = ['i_' + str(ii) for ii in i]
clc = lambda ai, ii: '{:.2f}'.format(
100 * ((ai / 100.0 + 1.0) ** (1.0 / ii) - 1))
i_v = [clc(i1, i[0]), clc(i2, i[1])]
g = Struct(
i1=i[0],
i2=i[1],
i1n=i_n[0],
i2n=i_n[1],
i1v=i_v[0],
i2v=i_v[1])
return g
def calc(g):
res = 2 if (
(1 +
float(
g.i1v) /
100.0) ** g.i1 -
1 < (
1 +
float(
g.i2v) /
100.0) ** g.i2 -
1) else 1
return [res]
|
vladmm/intellij-community
|
refs/heads/master
|
python/testData/quickFixes/PyAddSpecifierToFormatQuickFixTest/int.py
|
80
|
a = <warning descr="Format specifier character missing">"test<caret> %"</warning> % 1
|
rchacon/sms-service
|
refs/heads/master
|
scraper.py
|
1
|
import os
import sys
import re
import BeautifulSoup
from googlevoice import Voice
from pymongo import MongoClient
def extractsms(htmlsms) :
"""
extractsms -- extract SMS messages from BeautifulSoup tree of Google Voice SMS HTML.
Output is a list of dictionaries, one per message.
"""
msgitems = []
# extract all conversations by searching for a DIV with an ID at top level.
tree = BeautifulSoup.BeautifulSoup(htmlsms)
conversations = tree.findAll('div', attrs={'id' : True}, recursive=False)
for conversation in conversations:
# Get phone number
phone_text = conversation.findAll(attrs={'class' : 'gc-quickcall-calldesc-phone'})[0]
phone = extractphone(phone_text.text)
# Get date time of most recent message in conversation
datetime_ = conversation.findAll(attrs={'class' : 'gc-message-time-row'})[0].text
# for each conversation, extract each row, which is one SMS message.
rows = conversation.findAll(attrs={'class' : 'gc-message-sms-row'})
for row in rows:
# for each row, which is one message, extract all the fields.
msgitem = {
u'conversation_id' : conversation['id'],
u'phone': phone
}
spans = row.findAll('span',attrs={'class' : True}, recursive=False)
for span in spans:
cl = span['class'].replace('gc-message-sms-', '')
# put text in dict
msgitem[cl] = (' '.join(span.findAll(text=True))).strip()
# add msg dictionary to list
msgitems.append(msgitem)
# add datetime_ to last msgitem of conversation
msgitems[-1]['datetime'] = datetime_
return msgitems
def extractphone(text):
"""
Given 'Google will call your phone and connect you to(510) 315-1225.'
return (510) 315-1225
"""
match = re.search(r'(\([0-9]{3}\)\s[0-9]{3}-[0-9]{4})', text)
try:
return match.groups(0)[0]
except AttributeError as ex:
pass
return None
def filterNewMessages(all_messages,fetched_messages):
for e in range(len(fetched_messages) - 1, -1, -1):
if fetched_messages[e] in db_messages:
fetched_messages.pop(e)
return fetched_messages
if __name__ == '__main__':
# Get sms
voice = Voice()
voice.login()
voice.sms()
# Connect to mongo
mongo_uri = os.getenv('MONGO_URI', 'mongodb://localhost:27017/sms')
client = MongoClient(mongo_uri)
db = client.get_default_database()
# Insert messages to mongo
messages = extractsms(voice.sms.html)
# Get messages from database
db_messages = list(db.messages.find({},{'_id': False}))
# Only insert new messages
new_messages = filterNewMessages(db_messages,messages)
if len(new_messages) > 0:
coll_result = db.messages.insert_many(new_messages)
print('Records inserted: %s' % len(coll_result.inserted_ids))
else:
print('Records inserted: %s' % 0)
client.close()
|
ahmedaljazzar/edx-platform
|
refs/heads/master
|
common/djangoapps/enrollment/tests/test_views.py
|
1
|
"""
Tests for user enrollment.
"""
import datetime
import itertools
import json
import unittest
import ddt
import httpretty
import pytz
from django.conf import settings
from django.core.cache import cache
from django.core.exceptions import ImproperlyConfigured
from django.core.handlers.wsgi import WSGIRequest
from django.urls import reverse
from django.test import Client
from django.test.utils import override_settings
from mock import patch
from rest_framework import status
from rest_framework.test import APITestCase
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, check_mongo_calls_range
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from enrollment import api
from enrollment.errors import CourseEnrollmentError
from enrollment.views import EnrollmentUserThrottle
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.embargo.models import Country, CountryAccessRule, RestrictedCourse
from openedx.core.djangoapps.embargo.test_utils import restrict_course
from openedx.core.djangoapps.course_groups import cohorts
from openedx.core.djangoapps.user_api.models import (
RetirementState,
UserRetirementStatus,
UserOrgTag
)
from openedx.core.lib.django_test_client_utils import get_absolute_url
from openedx.core.lib.token_utils import JwtBuilder
from openedx.features.enterprise_support.tests import FAKE_ENTERPRISE_CUSTOMER
from openedx.features.enterprise_support.tests.mixins.enterprise import EnterpriseServiceMockMixin
from student.models import (
CourseEnrollment,
get_retired_username_by_username,
get_retired_email_by_email,
)
from student.roles import CourseStaffRole
from student.tests.factories import AdminFactory, UserFactory, SuperuserFactory
from util.models import RateLimitConfiguration
from util.testing import UrlResetMixin
class EnrollmentTestMixin(object):
""" Mixin with methods useful for testing enrollments. """
API_KEY = "i am a key"
def assert_enrollment_status(
self,
course_id=None,
username=None,
expected_status=status.HTTP_200_OK,
email_opt_in=None,
as_server=False,
mode=CourseMode.DEFAULT_MODE_SLUG,
is_active=None,
enrollment_attributes=None,
min_mongo_calls=0,
max_mongo_calls=0,
linked_enterprise_customer=None,
cohort=None,
):
"""
Enroll in the course and verify the response's status code. If the expected status is 200, also validates
the response content.
Returns
Response
"""
course_id = course_id or unicode(self.course.id)
username = username or self.user.username
data = {
'mode': mode,
'course_details': {
'course_id': course_id
},
'user': username,
'enrollment_attributes': enrollment_attributes
}
if is_active is not None:
data['is_active'] = is_active
if email_opt_in is not None:
data['email_opt_in'] = email_opt_in
if linked_enterprise_customer is not None:
data['linked_enterprise_customer'] = linked_enterprise_customer
if cohort is not None:
data['cohort'] = cohort
extra = {}
if as_server:
extra['HTTP_X_EDX_API_KEY'] = self.API_KEY
# Verify that the modulestore is queried as expected.
with check_mongo_calls_range(min_finds=min_mongo_calls, max_finds=max_mongo_calls):
with patch('enrollment.views.audit_log') as mock_audit_log:
url = reverse('courseenrollments')
response = self.client.post(url, json.dumps(data), content_type='application/json', **extra)
self.assertEqual(response.status_code, expected_status)
if expected_status == status.HTTP_200_OK:
data = json.loads(response.content)
self.assertEqual(course_id, data['course_details']['course_id'])
if mode is not None:
self.assertEqual(mode, data['mode'])
if is_active is not None:
self.assertEqual(is_active, data['is_active'])
else:
self.assertTrue(data['is_active'])
if as_server:
# Verify that an audit message was logged.
self.assertTrue(mock_audit_log.called)
# If multiple enrollment calls are made in the scope of a
# single test, we want to validate that audit messages are
# logged for each call.
mock_audit_log.reset_mock()
return response
def assert_enrollment_activation(self, expected_activation, expected_mode):
"""Change an enrollment's activation and verify its activation and mode are as expected."""
self.assert_enrollment_status(
as_server=True,
mode=expected_mode,
is_active=expected_activation,
expected_status=status.HTTP_200_OK
)
actual_mode, actual_activation = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertEqual(actual_activation, expected_activation)
self.assertEqual(actual_mode, expected_mode)
def _get_enrollments(self):
"""Retrieve the enrollment list for the current user. """
resp = self.client.get(reverse("courseenrollments"))
return json.loads(resp.content)
@override_settings(EDX_API_KEY="i am a key")
@ddt.ddt
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class EnrollmentTest(EnrollmentTestMixin, ModuleStoreTestCase, APITestCase, EnterpriseServiceMockMixin):
"""
Test user enrollment, especially with different course modes.
"""
shard = 3
USERNAME = "Bob"
EMAIL = "bob@example.com"
PASSWORD = "edx"
OTHER_USERNAME = "Jane"
OTHER_EMAIL = "jane@example.com"
ENABLED_CACHES = ['default', 'mongo_metadata_inheritance', 'loc_cache']
ENABLED_SIGNALS = ['course_published']
def setUp(self):
""" Create a course and user, then log in. """
super(EnrollmentTest, self).setUp()
self.rate_limit_config = RateLimitConfiguration.current()
self.rate_limit_config.enabled = False
self.rate_limit_config.save()
throttle = EnrollmentUserThrottle()
self.rate_limit, __ = throttle.parse_rate(throttle.rate)
# Pass emit_signals when creating the course so it would be cached
# as a CourseOverview. Enrollments require a cached CourseOverview.
self.course = CourseFactory.create(emit_signals=True)
self.user = UserFactory.create(
username=self.USERNAME,
email=self.EMAIL,
password=self.PASSWORD,
)
self.other_user = UserFactory.create(
username=self.OTHER_USERNAME,
email=self.OTHER_EMAIL,
password=self.PASSWORD,
)
self.client.login(username=self.USERNAME, password=self.PASSWORD)
@ddt.data(
# Default (no course modes in the database)
# Expect that users are automatically enrolled as the default
([], CourseMode.DEFAULT_MODE_SLUG),
# Audit / Verified
# We should always go to the "choose your course" page.
# We should also be enrolled as the default.
([CourseMode.VERIFIED, CourseMode.AUDIT], CourseMode.DEFAULT_MODE_SLUG),
)
@ddt.unpack
def test_enroll(self, course_modes, enrollment_mode):
# Create the course modes (if any) required for this test case
for mode_slug in course_modes:
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode_slug,
mode_display_name=mode_slug,
)
# Create an enrollment
resp = self.assert_enrollment_status()
# Verify that the response contains the correct course_name
data = json.loads(resp.content)
self.assertEqual(self.course.display_name_with_default, data['course_details']['course_name'])
# Verify that the enrollment was created correctly
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, enrollment_mode)
def test_check_enrollment(self):
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
)
# Create an enrollment
self.assert_enrollment_status()
resp = self.client.get(
reverse('courseenrollment', kwargs={'username': self.user.username, "course_id": unicode(self.course.id)})
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = json.loads(resp.content)
self.assertEqual(unicode(self.course.id), data['course_details']['course_id'])
self.assertEqual(self.course.display_name_with_default, data['course_details']['course_name'])
self.assertEqual(CourseMode.DEFAULT_MODE_SLUG, data['mode'])
self.assertTrue(data['is_active'])
@ddt.data(
(True, u"True"),
(False, u"False"),
(None, None)
)
@ddt.unpack
def test_email_opt_in_true(self, opt_in, pref_value):
"""
Verify that the email_opt_in parameter sets the underlying flag.
And that if the argument is not present, then it does not affect the flag
"""
def _assert_no_opt_in_set():
""" Check the tag doesn't exit"""
with self.assertRaises(UserOrgTag.DoesNotExist):
UserOrgTag.objects.get(user=self.user, org=self.course.id.org, key="email-optin")
_assert_no_opt_in_set()
self.assert_enrollment_status(email_opt_in=opt_in)
if opt_in is None:
_assert_no_opt_in_set()
else:
preference = UserOrgTag.objects.get(user=self.user, org=self.course.id.org, key="email-optin")
self.assertEquals(preference.value, pref_value)
def test_enroll_prof_ed(self):
# Create the prod ed mode.
CourseModeFactory.create(
course_id=self.course.id,
mode_slug='professional',
mode_display_name='Professional Education',
)
# Enroll in the course, this will fail if the mode is not explicitly professional.
resp = self.assert_enrollment_status(expected_status=status.HTTP_400_BAD_REQUEST)
# While the enrollment wrong is invalid, the response content should have
# all the valid enrollment modes.
data = json.loads(resp.content)
self.assertEqual(unicode(self.course.id), data['course_details']['course_id'])
self.assertEqual(1, len(data['course_details']['course_modes']))
self.assertEqual('professional', data['course_details']['course_modes'][0]['slug'])
def test_user_not_specified(self):
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
)
# Create an enrollment
self.assert_enrollment_status()
resp = self.client.get(
reverse('courseenrollment', kwargs={"course_id": unicode(self.course.id)})
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = json.loads(resp.content)
self.assertEqual(unicode(self.course.id), data['course_details']['course_id'])
self.assertEqual(CourseMode.DEFAULT_MODE_SLUG, data['mode'])
self.assertTrue(data['is_active'])
def test_user_not_authenticated(self):
# Log out, so we're no longer authenticated
self.client.logout()
# Try to enroll, this should fail.
self.assert_enrollment_status(expected_status=status.HTTP_401_UNAUTHORIZED)
def test_user_not_activated(self):
# Log out the default user, Bob.
self.client.logout()
# Create a user account
self.user = UserFactory.create(
username="inactive",
email="inactive@example.com",
password=self.PASSWORD,
is_active=True
)
# Log in with the unactivated account
self.client.login(username="inactive", password=self.PASSWORD)
# Deactivate the user. Has to be done after login to get the user into the
# request and properly logged in.
self.user.is_active = False
self.user.save()
# Enrollment should succeed, even though we haven't authenticated.
self.assert_enrollment_status()
def test_user_does_not_match_url(self):
# Try to enroll a user that is not the authenticated user.
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
)
self.assert_enrollment_status(username=self.other_user.username, expected_status=status.HTTP_404_NOT_FOUND)
# Verify that the server still has access to this endpoint.
self.client.logout()
self.assert_enrollment_status(username=self.other_user.username, as_server=True)
def _assert_enrollments_visible_in_list(self, courses, use_server_key=False):
"""
Check that the list of enrollments of self.user returned for the currently logged in user
matches the list of courses passed in in 'courses'.
"""
kwargs = {}
if use_server_key:
kwargs.update(HTTP_X_EDX_API_KEY=self.API_KEY)
response = self.client.get(reverse('courseenrollments'), {'user': self.user.username}, **kwargs)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = json.loads(response.content)
self.assertItemsEqual(
[(datum['course_details']['course_id'], datum['course_details']['course_name']) for datum in data],
[(unicode(course.id), course.display_name_with_default) for course in courses]
)
def test_enrollment_list_permissions(self):
"""
Test that the correct list of enrollments is returned, depending on the permissions of the
requesting user.
"""
# Create another course, and enroll self.user in both courses.
other_course = CourseFactory.create(emit_signals=True)
for course in self.course, other_course:
CourseModeFactory.create(
course_id=unicode(course.id),
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
)
self.assert_enrollment_status(
course_id=unicode(course.id),
max_mongo_calls=0,
)
# Verify the user himself can see both of his enrollments.
self._assert_enrollments_visible_in_list([self.course, other_course])
# Verify that self.other_user can't see any of the enrollments.
self.client.login(username=self.OTHER_USERNAME, password=self.PASSWORD)
self._assert_enrollments_visible_in_list([])
# Create a staff user for self.course (but nor for other_course) and log her in.
staff_user = UserFactory.create(username='staff', email='staff@example.com', password=self.PASSWORD)
CourseStaffRole(self.course.id).add_users(staff_user)
self.client.login(username='staff', password=self.PASSWORD)
# Verify that she can see only the enrollment in the course she has staff privileges for.
self._assert_enrollments_visible_in_list([self.course])
# Create a global staff user, and verify she can see all enrollments.
AdminFactory(username='global_staff', email='global_staff@example.com', password=self.PASSWORD)
self.client.login(username='global_staff', password=self.PASSWORD)
self._assert_enrollments_visible_in_list([self.course, other_course])
# Verify the server can see all enrollments.
self.client.logout()
self._assert_enrollments_visible_in_list([self.course, other_course], use_server_key=True)
def test_user_does_not_match_param(self):
"""
The view should return status 404 if the enrollment username does not match the username of the user
making the request, unless the request is made by a staff user or with a server API key.
"""
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.HONOR,
mode_display_name=CourseMode.HONOR,
)
url = reverse('courseenrollment',
kwargs={'username': self.other_user.username, "course_id": unicode(self.course.id)})
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# Verify that the server still has access to this endpoint.
self.client.logout()
response = self.client.get(url, **{'HTTP_X_EDX_API_KEY': self.API_KEY})
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Verify staff have access to this endpoint
staff_user = UserFactory.create(password=self.PASSWORD, is_staff=True)
self.client.login(username=staff_user.username, password=self.PASSWORD)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_course_details(self):
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.HONOR,
mode_display_name=CourseMode.HONOR,
sku='123',
bulk_sku="BULK123"
)
resp = self.client.get(
reverse('courseenrollmentdetails', kwargs={"course_id": unicode(self.course.id)})
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = json.loads(resp.content)
self.assertEqual(unicode(self.course.id), data['course_id'])
self.assertEqual(self.course.display_name_with_default, data['course_name'])
mode = data['course_modes'][0]
self.assertEqual(mode['slug'], CourseMode.HONOR)
self.assertEqual(mode['sku'], '123')
self.assertEqual(mode['bulk_sku'], 'BULK123')
self.assertEqual(mode['name'], CourseMode.HONOR)
def test_get_course_details_with_credit_course(self):
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.CREDIT_MODE,
mode_display_name=CourseMode.CREDIT_MODE,
)
resp = self.client.get(
reverse('courseenrollmentdetails', kwargs={"course_id": unicode(self.course.id)})
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = json.loads(resp.content)
self.assertEqual(unicode(self.course.id), data['course_id'])
mode = data['course_modes'][0]
self.assertEqual(mode['slug'], CourseMode.CREDIT_MODE)
self.assertEqual(mode['name'], CourseMode.CREDIT_MODE)
@ddt.data(
# NOTE: Studio requires a start date, but this is not
# enforced at the data layer, so we need to handle the case
# in which no dates are specified.
(None, None, None, None),
(datetime.datetime(2015, 1, 2, 3, 4, 5, tzinfo=pytz.UTC), None, "2015-01-02T03:04:05Z", None),
(None, datetime.datetime(2015, 1, 2, 3, 4, 5, tzinfo=pytz.UTC), None, "2015-01-02T03:04:05Z"),
(datetime.datetime(2014, 6, 7, 8, 9, 10, tzinfo=pytz.UTC), datetime.datetime(2015, 1, 2, 3, 4, 5, tzinfo=pytz.UTC), "2014-06-07T08:09:10Z", "2015-01-02T03:04:05Z"),
)
@ddt.unpack
def test_get_course_details_course_dates(self, start_datetime, end_datetime, expected_start, expected_end):
course = CourseFactory.create(start=start_datetime, end=end_datetime)
# Load a CourseOverview. This initial load should result in a cache
# miss; the modulestore is queried and course metadata is cached.
__ = CourseOverview.get_from_id(course.id)
self.assert_enrollment_status(course_id=unicode(course.id))
# Check course details
url = reverse('courseenrollmentdetails', kwargs={"course_id": unicode(course.id)})
resp = self.client.get(url)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = json.loads(resp.content)
self.assertEqual(data['course_start'], expected_start)
self.assertEqual(data['course_end'], expected_end)
# Check enrollment course details
url = reverse('courseenrollment', kwargs={"course_id": unicode(course.id)})
resp = self.client.get(url)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = json.loads(resp.content)
self.assertEqual(data['course_details']['course_start'], expected_start)
self.assertEqual(data['course_details']['course_end'], expected_end)
# Check enrollment list course details
resp = self.client.get(reverse('courseenrollments'))
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = json.loads(resp.content)
self.assertEqual(data[0]['course_details']['course_start'], expected_start)
self.assertEqual(data[0]['course_details']['course_end'], expected_end)
def test_with_invalid_course_id(self):
self.assert_enrollment_status(
course_id='entirely/fake/course',
expected_status=status.HTTP_400_BAD_REQUEST,
min_mongo_calls=3,
max_mongo_calls=4
)
def test_get_enrollment_details_bad_course(self):
resp = self.client.get(
reverse('courseenrollmentdetails', kwargs={"course_id": "some/fake/course"})
)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
@patch.object(api, "get_enrollment")
def test_get_enrollment_internal_error(self, mock_get_enrollment):
mock_get_enrollment.side_effect = CourseEnrollmentError("Something bad happened.")
resp = self.client.get(
reverse('courseenrollment', kwargs={'username': self.user.username, "course_id": unicode(self.course.id)})
)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
def test_enrollment_already_enrolled(self):
response = self.assert_enrollment_status()
repeat_response = self.assert_enrollment_status(expected_status=status.HTTP_200_OK)
self.assertEqual(json.loads(response.content), json.loads(repeat_response.content))
def test_get_enrollment_with_invalid_key(self):
resp = self.client.post(
reverse('courseenrollments'),
{
'course_details': {
'course_id': 'invalidcourse'
},
'user': self.user.username
},
format='json'
)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn("No course ", resp.content)
def test_enrollment_throttle_for_user(self):
"""Make sure a user requests do not exceed the maximum number of requests"""
self.rate_limit_config.enabled = True
self.rate_limit_config.save()
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
)
for attempt in xrange(self.rate_limit + 2):
expected_status = status.HTTP_429_TOO_MANY_REQUESTS if attempt >= self.rate_limit else status.HTTP_200_OK
self.assert_enrollment_status(expected_status=expected_status)
@ddt.data('staff', 'user')
def test_enrollment_throttle_is_set_correctly(self, user_scope):
""" Make sure throttle rate is set correctly for different user scopes. """
self.rate_limit_config.enabled = True
self.rate_limit_config.save()
throttle = EnrollmentUserThrottle()
throttle.scope = user_scope
try:
throttle.parse_rate(throttle.get_rate())
except ImproperlyConfigured:
self.fail("No throttle rate set for {}".format(user_scope))
def test_create_enrollment_with_cohort(self):
"""Enroll in the course, and also add to a cohort."""
# Create a cohort
cohort_name = 'masters'
cohorts.set_course_cohorted(self.course.id, True)
cohorts.add_cohort(self.course.id, cohort_name, 'test')
# Create an enrollment
self.assert_enrollment_status(cohort=cohort_name)
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(cohorts.get_cohort(self.user, self.course.id, assign=False).name, cohort_name)
def test_create_enrollment_with_wrong_cohort(self):
"""Enroll in the course, and also add to a cohort."""
# Create a cohort
cohorts.set_course_cohorted(self.course.id, True)
cohorts.add_cohort(self.course.id, 'masters', 'test')
# Create an enrollment
self.assert_enrollment_status(cohort='missing', expected_status=status.HTTP_400_BAD_REQUEST)
def test_create_enrollment_with_mode(self):
"""With the right API key, create a new enrollment with a mode set other than the default."""
# Create a professional ed course mode.
CourseModeFactory.create(
course_id=self.course.id,
mode_slug='professional',
mode_display_name='professional',
)
# Create an enrollment
self.assert_enrollment_status(as_server=True, mode='professional')
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, 'professional')
def test_enrollment_includes_expired_verified(self):
"""With the right API key, request that expired course verifications are still returned. """
# Create a honor mode for a course.
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.HONOR,
mode_display_name=CourseMode.HONOR,
)
# Create a verified mode for a course.
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.VERIFIED,
mode_display_name=CourseMode.VERIFIED,
expiration_datetime='1970-01-01 05:00:00Z'
)
# Passes the include_expired parameter to the API call
v_response = self.client.get(
reverse('courseenrollmentdetails', kwargs={"course_id": unicode(self.course.id)}), {'include_expired': True}
)
v_data = json.loads(v_response.content)
# Ensure that both course modes are returned
self.assertEqual(len(v_data['course_modes']), 2)
# Omits the include_expired parameter from the API call
h_response = self.client.get(reverse('courseenrollmentdetails', kwargs={"course_id": unicode(self.course.id)}))
h_data = json.loads(h_response.content)
# Ensure that only one course mode is returned and that it is honor
self.assertEqual(len(h_data['course_modes']), 1)
self.assertEqual(h_data['course_modes'][0]['slug'], CourseMode.HONOR)
def test_update_enrollment_with_mode(self):
"""With the right API key, update an existing enrollment with a new mode. """
# Create an honor and verified mode for a course. This allows an update.
for mode in [CourseMode.DEFAULT_MODE_SLUG, CourseMode.VERIFIED]:
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode,
mode_display_name=mode,
)
# Create an enrollment
self.assert_enrollment_status(as_server=True)
# Check that the enrollment is default.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.DEFAULT_MODE_SLUG)
# Check that the enrollment upgraded to verified.
self.assert_enrollment_status(as_server=True, mode=CourseMode.VERIFIED, expected_status=status.HTTP_200_OK)
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.VERIFIED)
def test_enrollment_with_credit_mode(self):
"""With the right API key, update an existing enrollment with credit
mode and set enrollment attributes.
"""
for mode in [CourseMode.DEFAULT_MODE_SLUG, CourseMode.CREDIT_MODE]:
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode,
mode_display_name=mode,
)
# Create an enrollment
self.assert_enrollment_status(as_server=True)
# Check that the enrollment is the default.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.DEFAULT_MODE_SLUG)
# Check that the enrollment upgraded to credit.
enrollment_attributes = [{
"namespace": "credit",
"name": "provider_id",
"value": "hogwarts",
}]
self.assert_enrollment_status(
as_server=True,
mode=CourseMode.CREDIT_MODE,
expected_status=status.HTTP_200_OK,
enrollment_attributes=enrollment_attributes
)
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.CREDIT_MODE)
def test_enrollment_with_invalid_attr(self):
"""Check response status is bad request when invalid enrollment
attributes are passed
"""
for mode in [CourseMode.DEFAULT_MODE_SLUG, CourseMode.CREDIT_MODE]:
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode,
mode_display_name=mode,
)
# Create an enrollment
self.assert_enrollment_status(as_server=True)
# Check that the enrollment is the default.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.DEFAULT_MODE_SLUG)
# Check that the enrollment upgraded to credit.
enrollment_attributes = [{
"namespace": "credit",
"name": "invalid",
"value": "hogwarts",
}]
self.assert_enrollment_status(
as_server=True,
mode=CourseMode.CREDIT_MODE,
expected_status=status.HTTP_400_BAD_REQUEST,
enrollment_attributes=enrollment_attributes
)
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.DEFAULT_MODE_SLUG)
def test_downgrade_enrollment_with_mode(self):
"""With the right API key, downgrade an existing enrollment with a new mode. """
# Create an honor and verified mode for a course. This allows an update.
for mode in [CourseMode.DEFAULT_MODE_SLUG, CourseMode.VERIFIED]:
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode,
mode_display_name=mode,
)
# Create a 'verified' enrollment
self.assert_enrollment_status(as_server=True, mode=CourseMode.VERIFIED)
# Check that the enrollment is verified.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.VERIFIED)
# Check that the enrollment was downgraded to the default mode.
self.assert_enrollment_status(
as_server=True,
mode=CourseMode.DEFAULT_MODE_SLUG,
expected_status=status.HTTP_200_OK
)
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.DEFAULT_MODE_SLUG)
@ddt.data(
((CourseMode.DEFAULT_MODE_SLUG, ), CourseMode.DEFAULT_MODE_SLUG),
((CourseMode.DEFAULT_MODE_SLUG, CourseMode.VERIFIED), CourseMode.DEFAULT_MODE_SLUG),
((CourseMode.DEFAULT_MODE_SLUG, CourseMode.VERIFIED), CourseMode.VERIFIED),
((CourseMode.PROFESSIONAL, ), CourseMode.PROFESSIONAL),
((CourseMode.NO_ID_PROFESSIONAL_MODE, ), CourseMode.NO_ID_PROFESSIONAL_MODE),
((CourseMode.VERIFIED, CourseMode.CREDIT_MODE), CourseMode.VERIFIED),
((CourseMode.VERIFIED, CourseMode.CREDIT_MODE), CourseMode.CREDIT_MODE),
)
@ddt.unpack
def test_deactivate_enrollment(self, configured_modes, selected_mode):
"""With the right API key, deactivate (i.e., unenroll from) an existing enrollment."""
# Configure a set of modes for the course.
for mode in configured_modes:
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode,
mode_display_name=mode,
)
# Create an enrollment with the selected mode.
self.assert_enrollment_status(as_server=True, mode=selected_mode)
# Check that the enrollment has the correct mode and is active.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, selected_mode)
# Verify that a non-Boolean enrollment status is treated as invalid.
self.assert_enrollment_status(
as_server=True,
mode=None,
is_active='foo',
expected_status=status.HTTP_400_BAD_REQUEST
)
# Verify that the enrollment has been deactivated, and that the mode is unchanged.
self.assert_enrollment_activation(False, selected_mode)
# Verify that enrollment deactivation is idempotent.
self.assert_enrollment_activation(False, selected_mode)
# Verify that omitting the mode returns 400 for course configurations
# in which the default mode doesn't exist.
expected_status = (
status.HTTP_200_OK
if CourseMode.DEFAULT_MODE_SLUG in configured_modes
else status.HTTP_400_BAD_REQUEST
)
self.assert_enrollment_status(
as_server=True,
is_active=False,
expected_status=expected_status,
)
def test_deactivate_enrollment_expired_mode(self):
"""Verify that an enrollment in an expired mode can be deactivated."""
for mode in (CourseMode.HONOR, CourseMode.VERIFIED):
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode,
mode_display_name=mode,
)
# Create verified enrollment.
self.assert_enrollment_status(as_server=True, mode=CourseMode.VERIFIED)
# Change verified mode expiration.
mode = CourseMode.objects.get(course_id=self.course.id, mode_slug=CourseMode.VERIFIED)
mode.expiration_datetime = datetime.datetime(year=1970, month=1, day=1, tzinfo=pytz.utc)
mode.save()
# Deactivate enrollment.
self.assert_enrollment_activation(False, CourseMode.VERIFIED)
def test_change_mode_from_user(self):
"""Users should not be able to alter the enrollment mode on an enrollment. """
# Create a default and a verified mode for a course. This allows an update.
for mode in [CourseMode.DEFAULT_MODE_SLUG, CourseMode.VERIFIED]:
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode,
mode_display_name=mode,
)
# Create an enrollment
self.assert_enrollment_status()
# Check that the enrollment is honor.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.DEFAULT_MODE_SLUG)
# Get a 403 response when trying to upgrade yourself.
self.assert_enrollment_status(mode=CourseMode.VERIFIED, expected_status=status.HTTP_403_FORBIDDEN)
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.DEFAULT_MODE_SLUG)
@ddt.data(*itertools.product(
(CourseMode.HONOR, CourseMode.VERIFIED),
(CourseMode.HONOR, CourseMode.VERIFIED),
(True, False),
(True, False),
))
@ddt.unpack
def test_change_mode_from_server(self, old_mode, new_mode, old_is_active, new_is_active):
"""
Server-to-server calls should be allowed to change the mode of any
enrollment, as long as the enrollment is not being deactivated during
the same call (this is assumed to be an error on the client's side).
"""
for mode in [CourseMode.HONOR, CourseMode.VERIFIED]:
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode,
mode_display_name=mode,
)
# Set up the initial enrollment
self.assert_enrollment_status(as_server=True, mode=old_mode, is_active=old_is_active)
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertEqual(is_active, old_is_active)
self.assertEqual(course_mode, old_mode)
expected_status = status.HTTP_400_BAD_REQUEST if (
old_mode != new_mode and
old_is_active != new_is_active and
not new_is_active
) else status.HTTP_200_OK
# simulate the server-server api call under test
response = self.assert_enrollment_status(
as_server=True,
mode=new_mode,
is_active=new_is_active,
expected_status=expected_status,
)
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
if expected_status == status.HTTP_400_BAD_REQUEST:
# nothing should have changed
self.assertEqual(is_active, old_is_active)
self.assertEqual(course_mode, old_mode)
# error message should contain specific text. Otto checks for this text in the message.
self.assertRegexpMatches(json.loads(response.content)['message'], 'Enrollment mode mismatch')
else:
# call should have succeeded
self.assertEqual(is_active, new_is_active)
self.assertEqual(course_mode, new_mode)
def test_change_mode_invalid_user(self):
"""
Attempts to change an enrollment for a non-existent user should result in an HTTP 404 for non-server users,
and HTTP 406 for server users.
"""
self.assert_enrollment_status(username='fake-user', expected_status=status.HTTP_404_NOT_FOUND, as_server=False)
self.assert_enrollment_status(username='fake-user', expected_status=status.HTTP_406_NOT_ACCEPTABLE,
as_server=True)
@ddt.data(
(True, CourseMode.VERIFIED),
(False, CourseMode.DEFAULT_MODE_SLUG)
)
@ddt.unpack
def test_update_enrollment_with_expired_mode(self, using_api_key, updated_mode):
"""Verify that if verified mode is expired than it's enrollment cannot be updated. """
for mode in [CourseMode.DEFAULT_MODE_SLUG, CourseMode.VERIFIED]:
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=mode,
mode_display_name=mode,
)
# Create an enrollment
self.assert_enrollment_status(as_server=True)
# Check that the enrollment is the default.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, self.course.id))
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, CourseMode.DEFAULT_MODE_SLUG)
# Change verified mode expiration.
mode = CourseMode.objects.get(course_id=self.course.id, mode_slug=CourseMode.VERIFIED)
mode.expiration_datetime = datetime.datetime(year=1970, month=1, day=1, tzinfo=pytz.utc)
mode.save()
self.assert_enrollment_status(
as_server=using_api_key,
mode=CourseMode.VERIFIED,
expected_status=status.HTTP_200_OK if using_api_key else status.HTTP_403_FORBIDDEN
)
course_mode, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, self.course.id)
self.assertTrue(is_active)
self.assertEqual(course_mode, updated_mode)
@httpretty.activate
@override_settings(ENTERPRISE_SERVICE_WORKER_USERNAME='enterprise_worker',
FEATURES=dict(ENABLE_ENTERPRISE_INTEGRATION=True))
@patch('openedx.features.enterprise_support.api.enterprise_customer_from_api')
def test_enterprise_course_enrollment_with_ec_uuid(self, mock_enterprise_customer_from_api):
"""Verify that the enrollment completes when the EnterpriseCourseEnrollment creation succeeds. """
UserFactory.create(
username='enterprise_worker',
email=self.EMAIL,
password=self.PASSWORD,
)
CourseModeFactory.create(
course_id=self.course.id,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE_SLUG,
)
consent_kwargs = {
'username': self.user.username,
'course_id': unicode(self.course.id),
'ec_uuid': 'this-is-a-real-uuid'
}
mock_enterprise_customer_from_api.return_value = FAKE_ENTERPRISE_CUSTOMER
self.mock_enterprise_course_enrollment_post_api()
self.mock_consent_missing(**consent_kwargs)
self.mock_consent_post(**consent_kwargs)
self.assert_enrollment_status(
expected_status=status.HTTP_200_OK,
as_server=True,
username='enterprise_worker',
linked_enterprise_customer='this-is-a-real-uuid',
)
self.assertEqual(
httpretty.last_request().path,
'/consent/api/v1/data_sharing_consent',
)
self.assertEqual(
httpretty.last_request().method,
httpretty.POST
)
def test_enrollment_attributes_always_written(self):
""" Enrollment attributes should always be written, regardless of whether
the enrollment is being created or updated.
"""
course_key = self.course.id
for mode in [CourseMode.DEFAULT_MODE_SLUG, CourseMode.VERIFIED]:
CourseModeFactory.create(
course_id=course_key,
mode_slug=mode,
mode_display_name=mode,
)
# Creating a new enrollment should write attributes
order_number = 'EDX-1000'
enrollment_attributes = [{
'namespace': 'order',
'name': 'order_number',
'value': order_number,
}]
mode = CourseMode.VERIFIED
self.assert_enrollment_status(
as_server=True,
is_active=True,
mode=mode,
enrollment_attributes=enrollment_attributes
)
enrollment = CourseEnrollment.objects.get(user=self.user, course_id=course_key)
self.assertTrue(enrollment.is_active)
self.assertEqual(enrollment.mode, CourseMode.VERIFIED)
self.assertEqual(enrollment.attributes.get(namespace='order', name='order_number').value, order_number)
# Updating an enrollment should update attributes
order_number = 'EDX-2000'
enrollment_attributes = [{
'namespace': 'order',
'name': 'order_number',
'value': order_number,
}]
mode = CourseMode.DEFAULT_MODE_SLUG
self.assert_enrollment_status(
as_server=True,
mode=mode,
enrollment_attributes=enrollment_attributes
)
enrollment.refresh_from_db()
self.assertTrue(enrollment.is_active)
self.assertEqual(enrollment.mode, mode)
self.assertEqual(enrollment.attributes.get(namespace='order', name='order_number').value, order_number)
# Updating an enrollment should update attributes (for audit mode enrollments also)
order_number = 'EDX-3000'
enrollment_attributes = [{
'namespace': 'order',
'name': 'order_number',
'value': order_number,
}]
self.assert_enrollment_status(
as_server=True,
mode='audit',
enrollment_attributes=enrollment_attributes
)
enrollment.refresh_from_db()
self.assertTrue(enrollment.is_active)
self.assertEqual(enrollment.mode, mode)
self.assertEqual(enrollment.attributes.get(namespace='order', name='order_number').value, order_number)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class EnrollmentEmbargoTest(EnrollmentTestMixin, UrlResetMixin, ModuleStoreTestCase):
"""Test that enrollment is blocked from embargoed countries. """
USERNAME = "Bob"
EMAIL = "bob@example.com"
PASSWORD = "edx"
URLCONF_MODULES = ['openedx.core.djangoapps.embargo']
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def setUp(self):
""" Create a course and user, then log in. """
super(EnrollmentEmbargoTest, self).setUp()
self.course = CourseFactory.create()
# Load a CourseOverview. This initial load should result in a cache
# miss; the modulestore is queried and course metadata is cached.
__ = CourseOverview.get_from_id(self.course.id)
self.user = UserFactory.create(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD)
self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.url = reverse('courseenrollments')
def _generate_data(self):
return json.dumps({
'course_details': {
'course_id': unicode(self.course.id)
},
'user': self.user.username
})
def assert_access_denied(self, user_message_path):
"""
Verify that the view returns HTTP status 403 and includes a URL in the response, and no enrollment is created.
"""
data = self._generate_data()
response = self.client.post(self.url, data, content_type='application/json')
# Expect an error response
self.assertEqual(response.status_code, 403)
# Expect that the redirect URL is included in the response
resp_data = json.loads(response.content)
user_message_url = get_absolute_url(user_message_path)
self.assertEqual(resp_data['user_message_url'], user_message_url)
# Verify that we were not enrolled
self.assertEqual(self._get_enrollments(), [])
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_embargo_change_enrollment_restrict_geoip(self):
""" Validates that enrollment changes are blocked if the request originates from an embargoed country. """
# Use the helper to setup the embargo and simulate a request from a blocked IP address.
with restrict_course(self.course.id) as redirect_path:
self.assert_access_denied(redirect_path)
def _setup_embargo(self):
restricted_course = RestrictedCourse.objects.create(course_key=self.course.id)
restricted_country = Country.objects.create(country='US')
unrestricted_country = Country.objects.create(country='CA')
CountryAccessRule.objects.create(
rule_type=CountryAccessRule.BLACKLIST_RULE,
restricted_course=restricted_course,
country=restricted_country
)
# Clear the cache to remove the effects of previous embargo tests
cache.clear()
return unrestricted_country, restricted_country
@override_settings(EDX_API_KEY=EnrollmentTestMixin.API_KEY)
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_embargo_change_enrollment_restrict_user_profile(self):
""" Validates that enrollment changes are blocked if the user's profile is linked to an embargoed country. """
__, restricted_country = self._setup_embargo()
# Update the user's profile, linking the user to the embargoed country.
self.user.profile.country = restricted_country.country
self.user.profile.save()
path = reverse('embargo:blocked_message', kwargs={'access_point': 'enrollment', 'message_key': 'default'})
self.assert_access_denied(path)
@override_settings(EDX_API_KEY=EnrollmentTestMixin.API_KEY)
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_embargo_change_enrollment_allow_user_profile(self):
"""
Validates that enrollment changes are allowed if the user's profile is NOT linked to an embargoed country.
"""
# Setup the embargo
unrestricted_country, __ = self._setup_embargo()
# Verify that users without black-listed country codes *can* be enrolled
self.user.profile.country = unrestricted_country.country
self.user.profile.save()
self.assert_enrollment_status()
@patch.dict(settings.FEATURES, {'EMBARGO': True})
def test_embargo_change_enrollment_allow(self):
self.assert_enrollment_status()
# Verify that we were enrolled
self.assertEqual(len(self._get_enrollments()), 1)
def cross_domain_config(func):
"""Decorator for configuring a cross-domain request. """
feature_flag_decorator = patch.dict(settings.FEATURES, {
'ENABLE_CORS_HEADERS': True,
'ENABLE_CROSS_DOMAIN_CSRF_COOKIE': True
})
settings_decorator = override_settings(
CORS_ORIGIN_WHITELIST=["www.edx.org"],
CROSS_DOMAIN_CSRF_COOKIE_NAME="prod-edx-csrftoken",
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=".edx.org"
)
is_secure_decorator = patch.object(WSGIRequest, 'is_secure', return_value=True)
return feature_flag_decorator(
settings_decorator(
is_secure_decorator(func)
)
)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class EnrollmentCrossDomainTest(ModuleStoreTestCase):
"""Test cross-domain calls to the enrollment end-points. """
USERNAME = "Bob"
EMAIL = "bob@example.com"
PASSWORD = "edx"
REFERER = "https://www.edx.org"
def setUp(self):
""" Create a course and user, then log in. """
super(EnrollmentCrossDomainTest, self).setUp()
self.course = CourseFactory.create()
self.user = UserFactory.create(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD)
self.client = Client(enforce_csrf_checks=True)
self.client.login(username=self.USERNAME, password=self.PASSWORD)
@cross_domain_config
def test_cross_domain_change_enrollment(self, *args): # pylint: disable=unused-argument
csrf_cookie = self._get_csrf_cookie()
resp = self._cross_domain_post(csrf_cookie)
# Expect that the request gets through successfully,
# passing the CSRF checks (including the referer check).
self.assertEqual(resp.status_code, 200)
@cross_domain_config
def test_cross_domain_missing_csrf(self, *args): # pylint: disable=unused-argument
resp = self._cross_domain_post('invalid_csrf_token')
self.assertEqual(resp.status_code, 403)
def _get_csrf_cookie(self):
"""Retrieve the cross-domain CSRF cookie. """
url = reverse('courseenrollment', kwargs={
'course_id': unicode(self.course.id)
})
resp = self.client.get(url, HTTP_REFERER=self.REFERER)
self.assertEqual(resp.status_code, 200)
self.assertIn('prod-edx-csrftoken', resp.cookies) # pylint: disable=no-member
return resp.cookies['prod-edx-csrftoken'].value # pylint: disable=no-member
def _cross_domain_post(self, csrf_cookie):
"""Perform a cross-domain POST request. """
url = reverse('courseenrollments')
params = json.dumps({
'course_details': {
'course_id': unicode(self.course.id),
},
'user': self.user.username
})
return self.client.post(
url, params, content_type='application/json',
HTTP_REFERER=self.REFERER,
HTTP_X_CSRFTOKEN=csrf_cookie
)
@ddt.ddt
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class UnenrollmentTest(EnrollmentTestMixin, ModuleStoreTestCase):
"""
Tests unenrollment functionality. The API being tested is intended to
unenroll a learner from all of their courses.g
"""
USERNAME = "Bob"
EMAIL = "bob@example.com"
PASSWORD = "edx"
ENABLED_CACHES = ['default', 'mongo_metadata_inheritance', 'loc_cache']
ENABLED_SIGNALS = ['course_published']
def setUp(self):
""" Create a course and user, then log in. """
super(UnenrollmentTest, self).setUp()
self.superuser = SuperuserFactory()
# Pass emit_signals when creating the course so it would be cached
# as a CourseOverview. Enrollments require a cached CourseOverview.
self.first_org_course = CourseFactory.create(emit_signals=True, org="org", course="course", run="run")
self.other_first_org_course = CourseFactory.create(emit_signals=True, org="org", course="course2", run="run2")
self.second_org_course = CourseFactory.create(emit_signals=True, org="org2", course="course3", run="run3")
self.third_org_course = CourseFactory.create(emit_signals=True, org="org3", course="course4", run="run4")
self.courses = [
self.first_org_course, self.other_first_org_course, self.second_org_course, self.third_org_course
]
self.orgs = {"org", "org2", "org3"}
for course in self.courses:
CourseModeFactory.create(
course_id=str(course.id),
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
mode_display_name=CourseMode.DEFAULT_MODE,
)
self.user = UserFactory.create(
username=self.USERNAME,
email=self.EMAIL,
password=self.PASSWORD,
)
self.client.login(username=self.USERNAME, password=self.PASSWORD)
for course in self.courses:
self.assert_enrollment_status(course_id=str(course.id), username=self.USERNAME, is_active=True)
def _create_test_retirement(self, user=None):
"""
Helper method to create a RetirementStatus with useful defaults
"""
pending_state = RetirementState.objects.create(
state_name='PENDING',
state_execution_order=1,
is_dead_end_state=False,
required=False
)
if user is None:
user = UserFactory()
return UserRetirementStatus.create_retirement(user)
def build_jwt_headers(self, user):
"""
Helper function for creating headers for the JWT authentication.
"""
token = JwtBuilder(user).build_token([])
headers = {'HTTP_AUTHORIZATION': 'JWT ' + token}
return headers
def test_deactivate_enrollments(self):
self._assert_active()
self._create_test_retirement(self.user)
response = self._submit_unenroll(self.superuser, self.user.username)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = json.loads(response.content)
# order doesn't matter so compare sets
self.assertEqual(set(data), self.orgs)
self._assert_inactive()
def test_deactivate_enrollments_no_retirement_status(self):
self._assert_active()
response = self._submit_unenroll(self.superuser, self.user.username)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_deactivate_enrollments_unauthorized(self):
self._assert_active()
response = self._submit_unenroll(self.user, self.user.username)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self._assert_active()
def test_deactivate_enrollments_no_username(self):
self._assert_active()
response = self._submit_unenroll(self.superuser, None)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
data = json.loads(response.content)
self.assertEqual(data, u"Username not specified.")
self._assert_active()
def test_deactivate_enrollments_empty_username(self):
self._assert_active()
self._create_test_retirement(self.user)
response = self._submit_unenroll(self.superuser, "")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self._assert_active()
def test_deactivate_enrollments_invalid_username(self):
self._assert_active()
self._create_test_retirement(self.user)
response = self._submit_unenroll(self.superuser, "a made up username")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self._assert_active()
def test_deactivate_enrollments_called_twice(self):
self._assert_active()
self._create_test_retirement(self.user)
response = self._submit_unenroll(self.superuser, self.user.username)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self._submit_unenroll(self.superuser, self.user.username)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(response.content, "")
self._assert_inactive()
def _assert_active(self):
for course in self.courses:
self.assertTrue(CourseEnrollment.is_enrolled(self.user, course.id))
_, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, course.id)
self.assertTrue(is_active)
def _assert_inactive(self):
for course in self.courses:
_, is_active = CourseEnrollment.enrollment_mode_for_user(self.user, course.id)
self.assertFalse(is_active)
def _submit_unenroll(self, submitting_user, unenrolling_username):
data = {}
if unenrolling_username is not None:
data['username'] = unenrolling_username
url = reverse('unenrollment')
headers = self.build_jwt_headers(submitting_user)
return self.client.post(url, json.dumps(data), content_type='application/json', **headers)
|
google/pigweed
|
refs/heads/main
|
pw_rpc/py/tests/client_test.py
|
1
|
#!/usr/bin/env python3
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Tests creating pw_rpc client."""
import unittest
from typing import Optional
from pw_protobuf_compiler import python_protos
from pw_status import Status
from pw_rpc import callback_client, client, packets
import pw_rpc.ids
from pw_rpc.internal.packet_pb2 import PacketType, RpcPacket
TEST_PROTO_1 = """\
syntax = "proto3";
package pw.test1;
message SomeMessage {
uint32 magic_number = 1;
}
message AnotherMessage {
enum Result {
FAILED = 0;
FAILED_MISERABLY = 1;
I_DONT_WANT_TO_TALK_ABOUT_IT = 2;
}
Result result = 1;
string payload = 2;
}
service PublicService {
rpc SomeUnary(SomeMessage) returns (AnotherMessage) {}
rpc SomeServerStreaming(SomeMessage) returns (stream AnotherMessage) {}
rpc SomeClientStreaming(stream SomeMessage) returns (AnotherMessage) {}
rpc SomeBidiStreaming(stream SomeMessage) returns (stream AnotherMessage) {}
}
"""
TEST_PROTO_2 = """\
syntax = "proto2";
package pw.test2;
message Request {
optional float magic_number = 1;
}
message Response {
}
service Alpha {
rpc Unary(Request) returns (Response) {}
}
service Bravo {
rpc BidiStreaming(stream Request) returns (stream Response) {}
}
"""
def _test_setup(output=None):
protos = python_protos.Library.from_strings([TEST_PROTO_1, TEST_PROTO_2])
return protos, client.Client.from_modules(
callback_client.Impl(),
[client.Channel(1, output),
client.Channel(2, lambda _: None)], protos.modules())
class ChannelClientTest(unittest.TestCase):
"""Tests the ChannelClient."""
def setUp(self) -> None:
self._channel_client = _test_setup()[1].channel(1)
def test_access_service_client_as_attribute_or_index(self) -> None:
self.assertIs(self._channel_client.rpcs.pw.test1.PublicService,
self._channel_client.rpcs['pw.test1.PublicService'])
self.assertIs(
self._channel_client.rpcs.pw.test1.PublicService,
self._channel_client.rpcs[pw_rpc.ids.calculate(
'pw.test1.PublicService')])
def test_access_method_client_as_attribute_or_index(self) -> None:
self.assertIs(self._channel_client.rpcs.pw.test2.Alpha.Unary,
self._channel_client.rpcs['pw.test2.Alpha']['Unary'])
self.assertIs(
self._channel_client.rpcs.pw.test2.Alpha.Unary,
self._channel_client.rpcs['pw.test2.Alpha'][pw_rpc.ids.calculate(
'Unary')])
def test_service_name(self) -> None:
self.assertEqual(
self._channel_client.rpcs.pw.test2.Alpha.Unary.service.name,
'Alpha')
self.assertEqual(
self._channel_client.rpcs.pw.test2.Alpha.Unary.service.full_name,
'pw.test2.Alpha')
def test_method_name(self) -> None:
self.assertEqual(
self._channel_client.rpcs.pw.test2.Alpha.Unary.method.name,
'Unary')
self.assertEqual(
self._channel_client.rpcs.pw.test2.Alpha.Unary.method.full_name,
'pw.test2.Alpha.Unary')
def test_iterate_over_all_methods(self) -> None:
channel_client = self._channel_client
all_methods = {
channel_client.rpcs.pw.test1.PublicService.SomeUnary,
channel_client.rpcs.pw.test1.PublicService.SomeServerStreaming,
channel_client.rpcs.pw.test1.PublicService.SomeClientStreaming,
channel_client.rpcs.pw.test1.PublicService.SomeBidiStreaming,
channel_client.rpcs.pw.test2.Alpha.Unary,
channel_client.rpcs.pw.test2.Bravo.BidiStreaming,
}
self.assertEqual(set(channel_client.methods()), all_methods)
def test_check_for_presence_of_services(self) -> None:
self.assertIn('pw.test1.PublicService', self._channel_client.rpcs)
self.assertIn(pw_rpc.ids.calculate('pw.test1.PublicService'),
self._channel_client.rpcs)
def test_check_for_presence_of_missing_services(self) -> None:
self.assertNotIn('PublicService', self._channel_client.rpcs)
self.assertNotIn('NotAService', self._channel_client.rpcs)
self.assertNotIn(-1213, self._channel_client.rpcs)
def test_check_for_presence_of_methods(self) -> None:
service = self._channel_client.rpcs.pw.test1.PublicService
self.assertIn('SomeUnary', service)
self.assertIn(pw_rpc.ids.calculate('SomeUnary'), service)
def test_check_for_presence_of_missing_methods(self) -> None:
service = self._channel_client.rpcs.pw.test1.PublicService
self.assertNotIn('Some', service)
self.assertNotIn('Unary', service)
self.assertNotIn(12345, service)
def test_method_fully_qualified_name(self) -> None:
self.assertIs(self._channel_client.method('pw.test2.Alpha/Unary'),
self._channel_client.rpcs.pw.test2.Alpha.Unary)
self.assertIs(self._channel_client.method('pw.test2.Alpha.Unary'),
self._channel_client.rpcs.pw.test2.Alpha.Unary)
class ClientTest(unittest.TestCase):
"""Tests the pw_rpc Client independently of the ClientImpl."""
def setUp(self) -> None:
self._last_packet_sent_bytes: Optional[bytes] = None
self._protos, self._client = _test_setup(self._save_packet)
def _save_packet(self, packet) -> None:
self._last_packet_sent_bytes = packet
def _last_packet_sent(self) -> RpcPacket:
packet = RpcPacket()
assert self._last_packet_sent_bytes is not None
packet.MergeFromString(self._last_packet_sent_bytes)
return packet
def test_channel(self) -> None:
self.assertEqual(self._client.channel(1).channel.id, 1)
self.assertEqual(self._client.channel(2).channel.id, 2)
def test_channel_default_is_first_listed(self) -> None:
self.assertEqual(self._client.channel().channel.id, 1)
def test_channel_invalid(self) -> None:
with self.assertRaises(KeyError):
self._client.channel(404)
def test_all_methods(self) -> None:
services = self._client.services
all_methods = {
services['pw.test1.PublicService'].methods['SomeUnary'],
services['pw.test1.PublicService'].methods['SomeServerStreaming'],
services['pw.test1.PublicService'].methods['SomeClientStreaming'],
services['pw.test1.PublicService'].methods['SomeBidiStreaming'],
services['pw.test2.Alpha'].methods['Unary'],
services['pw.test2.Bravo'].methods['BidiStreaming'],
}
self.assertEqual(set(self._client.methods()), all_methods)
def test_method_present(self) -> None:
self.assertIs(
self._client.method('pw.test1.PublicService.SomeUnary'), self.
_client.services['pw.test1.PublicService'].methods['SomeUnary'])
self.assertIs(
self._client.method('pw.test1.PublicService/SomeUnary'), self.
_client.services['pw.test1.PublicService'].methods['SomeUnary'])
def test_method_invalid_format(self) -> None:
with self.assertRaises(ValueError):
self._client.method('SomeUnary')
def test_method_not_present(self) -> None:
with self.assertRaises(KeyError):
self._client.method('pw.test1.PublicService/ThisIsNotGood')
with self.assertRaises(KeyError):
self._client.method('nothing.Good')
def test_process_packet_invalid_proto_data(self) -> None:
self.assertIs(self._client.process_packet(b'NOT a packet!'),
Status.DATA_LOSS)
def test_process_packet_not_for_client(self) -> None:
self.assertIs(
self._client.process_packet(
RpcPacket(type=PacketType.REQUEST).SerializeToString()),
Status.INVALID_ARGUMENT)
def test_process_packet_unrecognized_channel(self) -> None:
self.assertIs(
self._client.process_packet(
packets.encode_response(
(123, 456, 789),
self._protos.packages.pw.test2.Request())),
Status.NOT_FOUND)
def test_process_packet_unrecognized_service(self) -> None:
self.assertIs(
self._client.process_packet(
packets.encode_response(
(1, 456, 789), self._protos.packages.pw.test2.Request())),
Status.OK)
self.assertEqual(
self._last_packet_sent(),
RpcPacket(type=PacketType.CLIENT_ERROR,
channel_id=1,
service_id=456,
method_id=789,
status=Status.NOT_FOUND.value))
def test_process_packet_unrecognized_method(self) -> None:
service = next(iter(self._client.services))
self.assertIs(
self._client.process_packet(
packets.encode_response(
(1, service.id, 789),
self._protos.packages.pw.test2.Request())), Status.OK)
self.assertEqual(
self._last_packet_sent(),
RpcPacket(type=PacketType.CLIENT_ERROR,
channel_id=1,
service_id=service.id,
method_id=789,
status=Status.NOT_FOUND.value))
def test_process_packet_non_pending_method(self) -> None:
service = next(iter(self._client.services))
method = next(iter(service.methods))
self.assertIs(
self._client.process_packet(
packets.encode_response(
(1, service.id, method.id),
self._protos.packages.pw.test2.Request())), Status.OK)
self.assertEqual(
self._last_packet_sent(),
RpcPacket(type=PacketType.CLIENT_ERROR,
channel_id=1,
service_id=service.id,
method_id=method.id,
status=Status.FAILED_PRECONDITION.value))
if __name__ == '__main__':
unittest.main()
|
feer56/Kitsune2
|
refs/heads/master
|
kitsune/gallery/__init__.py
|
24
|
# The number of items per page
ITEMS_PER_PAGE = 24
|
pwoodworth/intellij-community
|
refs/heads/master
|
python/testData/resolve/ClassPrivateOutsideClass.py
|
83
|
class A(object):
__X = 1
A._<ref>_X
|
lmazuel/azure-sdk-for-python
|
refs/heads/master
|
azure-mgmt-subscription/azure/mgmt/subscription/models/subscription_definition_paged.py
|
2
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class SubscriptionDefinitionPaged(Paged):
"""
A paging container for iterating over a list of :class:`SubscriptionDefinition <azure.mgmt.subscription.models.SubscriptionDefinition>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[SubscriptionDefinition]'}
}
def __init__(self, *args, **kwargs):
super(SubscriptionDefinitionPaged, self).__init__(*args, **kwargs)
|
sinotradition/meridian
|
refs/heads/master
|
meridian/tst/acupoints/test_shaohai43.py
|
1
|
#!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
import unittest
from meridian.acupoints import shaohai43
class TestShaohai43Functions(unittest.TestCase):
def setUp(self):
pass
def test_xxx(self):
pass
if __name__ == '__main__':
unittest.main()
|
patilsangram/erpnext
|
refs/heads/develop
|
erpnext/hr/doctype/employee/test_employee.py
|
3
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import unittest
import frappe.utils
test_records = frappe.get_test_records('Employee')
class TestEmployee(unittest.TestCase):
def test_birthday_reminders(self):
employee = frappe.get_doc("Employee", frappe.db.sql_list("select name from tabEmployee limit 1")[0])
employee.date_of_birth = "1992" + frappe.utils.nowdate()[4:]
employee.company_email = "test@example.com"
employee.save()
from erpnext.hr.doctype.employee.employee import get_employees_who_are_born_today, send_birthday_reminders
self.assertTrue(employee.name in [e.name for e in get_employees_who_are_born_today()])
frappe.db.sql("delete from `tabEmail Queue`")
hr_settings = frappe.get_doc("HR Settings", "HR Settings")
hr_settings.stop_birthday_reminders = 0
hr_settings.save()
send_birthday_reminders()
email_queue = frappe.db.sql("""select * from `tabEmail Queue`""", as_dict=True)
self.assertTrue("Subject: Birthday Reminder" in email_queue[0].message)
def make_employee(user):
if not frappe.db.get_value("User", user):
frappe.get_doc({
"doctype": "User",
"email": user,
"first_name": user,
"new_password": "password",
"roles": [{"doctype": "Has Role", "role": "Employee"}]
}).insert()
if not frappe.db.get_value("Employee", {"user_id": user}):
employee = frappe.get_doc({
"doctype": "Employee",
"naming_series": "EMP-",
"first_name": user,
"company": erpnext.get_default_company(),
"user_id": user,
"date_of_birth": "1990-05-08",
"date_of_joining": "2013-01-01",
"department": frappe.get_all("Department", fields="name")[0].name,
"gender": "Female",
"company_email": user,
"prefered_contact_email": "Company Email",
"prefered_email": user,
"status": "Active",
"employment_type": "Intern"
}).insert()
return employee.name
else:
return frappe.get_value("Employee", {"employee_name":user}, "name")
|
connoranderson/Speechables
|
refs/heads/master
|
mechanize-0.2.5/mechanize/_firefox3cookiejar.py
|
134
|
"""Firefox 3 "cookies.sqlite" cookie persistence.
Copyright 2008 John J Lee <jjl@pobox.com>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
import logging
import time
from _clientcookie import CookieJar, Cookie, MappingIterator
from _util import isstringlike, experimental
debug = logging.getLogger("mechanize.cookies").debug
class Firefox3CookieJar(CookieJar):
"""Firefox 3 cookie jar.
The cookies are stored in Firefox 3's "cookies.sqlite" format.
Constructor arguments:
filename: filename of cookies.sqlite (typically found at the top level
of a firefox profile directory)
autoconnect: as a convenience, connect to the SQLite cookies database at
Firefox3CookieJar construction time (default True)
policy: an object satisfying the mechanize.CookiePolicy interface
Note that this is NOT a FileCookieJar, and there are no .load(),
.save() or .restore() methods. The database is in sync with the
cookiejar object's state after each public method call.
Following Firefox's own behaviour, session cookies are never saved to
the database.
The file is created, and an sqlite database written to it, if it does
not already exist. The moz_cookies database table is created if it does
not already exist.
"""
# XXX
# handle DatabaseError exceptions
# add a FileCookieJar (explicit .save() / .revert() / .load() methods)
def __init__(self, filename, autoconnect=True, policy=None):
experimental("Firefox3CookieJar is experimental code")
CookieJar.__init__(self, policy)
if filename is not None and not isstringlike(filename):
raise ValueError("filename must be string-like")
self.filename = filename
self._conn = None
if autoconnect:
self.connect()
def connect(self):
import sqlite3 # not available in Python 2.4 stdlib
self._conn = sqlite3.connect(self.filename)
self._conn.isolation_level = "DEFERRED"
self._create_table_if_necessary()
def close(self):
self._conn.close()
def _transaction(self, func):
try:
cur = self._conn.cursor()
try:
result = func(cur)
finally:
cur.close()
except:
self._conn.rollback()
raise
else:
self._conn.commit()
return result
def _execute(self, query, params=()):
return self._transaction(lambda cur: cur.execute(query, params))
def _query(self, query, params=()):
# XXX should we bother with a transaction?
cur = self._conn.cursor()
try:
cur.execute(query, params)
return cur.fetchall()
finally:
cur.close()
def _create_table_if_necessary(self):
self._execute("""\
CREATE TABLE IF NOT EXISTS moz_cookies (id INTEGER PRIMARY KEY, name TEXT,
value TEXT, host TEXT, path TEXT,expiry INTEGER,
lastAccessed INTEGER, isSecure INTEGER, isHttpOnly INTEGER)""")
def _cookie_from_row(self, row):
(pk, name, value, domain, path, expires,
last_accessed, secure, http_only) = row
version = 0
domain = domain.encode("ascii", "ignore")
path = path.encode("ascii", "ignore")
name = name.encode("ascii", "ignore")
value = value.encode("ascii", "ignore")
secure = bool(secure)
# last_accessed isn't a cookie attribute, so isn't added to rest
rest = {}
if http_only:
rest["HttpOnly"] = None
if name == "":
name = value
value = None
initial_dot = domain.startswith(".")
domain_specified = initial_dot
discard = False
if expires == "":
expires = None
discard = True
return Cookie(version, name, value,
None, False,
domain, domain_specified, initial_dot,
path, False,
secure,
expires,
discard,
None,
None,
rest)
def clear(self, domain=None, path=None, name=None):
CookieJar.clear(self, domain, path, name)
where_parts = []
sql_params = []
if domain is not None:
where_parts.append("host = ?")
sql_params.append(domain)
if path is not None:
where_parts.append("path = ?")
sql_params.append(path)
if name is not None:
where_parts.append("name = ?")
sql_params.append(name)
where = " AND ".join(where_parts)
if where:
where = " WHERE " + where
def clear(cur):
cur.execute("DELETE FROM moz_cookies%s" % where,
tuple(sql_params))
self._transaction(clear)
def _row_from_cookie(self, cookie, cur):
expires = cookie.expires
if cookie.discard:
expires = ""
domain = unicode(cookie.domain)
path = unicode(cookie.path)
name = unicode(cookie.name)
value = unicode(cookie.value)
secure = bool(int(cookie.secure))
if value is None:
value = name
name = ""
last_accessed = int(time.time())
http_only = cookie.has_nonstandard_attr("HttpOnly")
query = cur.execute("""SELECT MAX(id) + 1 from moz_cookies""")
pk = query.fetchone()[0]
if pk is None:
pk = 1
return (pk, name, value, domain, path, expires,
last_accessed, secure, http_only)
def set_cookie(self, cookie):
if cookie.discard:
CookieJar.set_cookie(self, cookie)
return
def set_cookie(cur):
# XXX
# is this RFC 2965-correct?
# could this do an UPDATE instead?
row = self._row_from_cookie(cookie, cur)
name, unused, domain, path = row[1:5]
cur.execute("""\
DELETE FROM moz_cookies WHERE host = ? AND path = ? AND name = ?""",
(domain, path, name))
cur.execute("""\
INSERT INTO moz_cookies VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
""", row)
self._transaction(set_cookie)
def __iter__(self):
# session (non-persistent) cookies
for cookie in MappingIterator(self._cookies):
yield cookie
# persistent cookies
for row in self._query("""\
SELECT * FROM moz_cookies ORDER BY name, path, host"""):
yield self._cookie_from_row(row)
def _cookies_for_request(self, request):
session_cookies = CookieJar._cookies_for_request(self, request)
def get_cookies(cur):
query = cur.execute("SELECT host from moz_cookies")
domains = [row[0] for row in query.fetchall()]
cookies = []
for domain in domains:
cookies += self._persistent_cookies_for_domain(domain,
request, cur)
return cookies
persistent_coookies = self._transaction(get_cookies)
return session_cookies + persistent_coookies
def _persistent_cookies_for_domain(self, domain, request, cur):
cookies = []
if not self._policy.domain_return_ok(domain, request):
return []
debug("Checking %s for cookies to return", domain)
query = cur.execute("""\
SELECT * from moz_cookies WHERE host = ? ORDER BY path""",
(domain,))
cookies = [self._cookie_from_row(row) for row in query.fetchall()]
last_path = None
r = []
for cookie in cookies:
if (cookie.path != last_path and
not self._policy.path_return_ok(cookie.path, request)):
last_path = cookie.path
continue
if not self._policy.return_ok(cookie, request):
debug(" not returning cookie")
continue
debug(" it's a match")
r.append(cookie)
return r
|
rosenvladimirov/addons
|
refs/heads/8.0
|
hw_datecs_bg/icl/icl.py
|
1
|
# -*- coding: utf-8 -*-
#
# Copyright 2013 Rosen Vladimirov <vladimirov.rosen@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
from __future__ import unicode_literals
import serial
import time
import datetime
from .conf import *
from .protocol import *
from .utils import *
# ASCII
#ENQ = chr(0x05) # Enquire. Прошу подтверждения.
#STX = chr(0x02) # Start of Text, начало текста.
STX = chr(0x01)
ARGS_SEPARATOR = chr(0x04)
ARGS_END = chr(0x05)
END = chr(0x3)
ACK = chr(0x06) # Acknowledgement. Подтверждаю.
NAK = chr(0x15) # Negative Acknowledgment, не подтверждаю.
SYN = chr(0x16) # Slave send need time to finish
class ICLError(Exception):
def __init__(self, value):
if isinstance(value, int):
self.value = value
self.source, self.message = BUGS[value]
msg = '%s: %s' % (self.source, self.message)
else:
msg = value
if PY2:
try:
msg = msg.encode('utf-8')
except UnicodeError:
pass
super(ICLError, self).__init__(msg)
class ConnectionError(ICLError):
pass
class BaseICL(object):
"""
Базовый класс включает методы непосредственного общения с
устройством.
Общие положения.
В информационном обмене «Хост – ККТ» хост является главным
устройством, а ККТ – подчиненным. Поэтому направление
передачи данных определяется хостом. Физический интерфейс
«Хост – ККТ» – последовательный интерфейс RS-232С, без линий
аппаратного квитирования.
Скорость обмена по интерфейсу RS-232С – 2400, 4800, 9600, 19200,
38400, 57600, 115200.
При обмене хост и ККТ оперируют сообщениями. Сообщение может
содержать команду (от хоста) или ответ на команду (от ККТ).
Формат сообщения:
Байт 0: признак начала сообщения STX;
Байт 1: длина сообщения (N) – ДВОИЧНОЕ число.
В длину сообщения не включаются байты 0, LRC и этот байт;
Байт 2: код команды или ответа – ДВОИЧНОЕ число;
Байты 3 – (N + 1): параметры, зависящие от команды
(могут отсутствовать);
Байт N + 2 – контрольная сумма сообщения – байт LRC
– вычисляется поразрядным сложением (XOR) всех байтов
сообщения (кроме байта 0).
Сообщение считается принятым, если приняты байт STX
и байт длины. Сообщение считается принятым корректно, если
приняты байты сообщения, определенные его байтом длины, и
байт LRC.
Каждое принятое сообщение подтверждается передачей
одного байта (ACK – положительное подтверждение, NAK –
отрицательное подтверждение).
Ответ NAK свидетельствует об ошибке интерфейса (данные приняты
с ошибкой или не распознан STX), но не о неверной команде.
Отсутствие подтверждения в течение тайм-аута означает, что
сообщение не принято.
Если в ответ на сообщение ККТ получен NAK, сообщение не
повторяется, ККТ ждет уведомления ENQ для повторения ответа.
После включения питания ККТ ожидает байт запроса – ENQ.
Ответ от ККТ в виде байта NAK означает, что ККТ находится в
состоянии ожидания очередной команды;
ответ ACK означает, что ККТ подготавливает ответное
сообщение, отсутствии ответа означает отсутствие связи между
хостом и ККТ.
По умолчанию устанавливаются следующие параметры порта: 8 бит
данных, 1 стоп- бит, отсутствует проверка на четность,
скорость обмена 4800 бод и тайм-аут ожидания каждого байта,
равный 50 мс. Две последние характеристики обмена могут быть
изменены командой от хоста. Минимальное время между приемом
последнего байта сообщения и передачей подтверждения, и между
приемом ENQ и реакцией на него равно тайм-ауту приема байта.
Количество повторов при неудачных сеансах связи (нет
подтверждения после передачи команды, отрицательное
подтверждение после передачи команды, данные ответа приняты с
ошибкой или не распознан STX ответа) настраивается при
реализации программного обеспечения хоста. Коды знаков STX,
ENQ, ACK и NAK – коды WIN1251.
"""
error = ''
port = DEFAULT_PORT
password = password_prapare(DEFAULT_PASSWORD)
admin_password = password_prapare(DEFAULT_ADMIN_PASSWORD)
bod = DEFAULT_BOD
parity = serial.PARITY_NONE
stopbits = serial.STOPBITS_ONE
timeout = 0.7
writeTimeout = 0.7
frameSeqNumber = 0x20
def __init__(self, **kwargs):
""" Пароли можно передавать в виде набора шестнадцатеричных
значений, либо в виде обычной ASCII строки. Длина пароля 4
ASCII символа.
"""
if 'password' in kwargs:
self.password = password_prapare(kwargs.pop('password'))
if 'admin_password' in kwargs:
self.admin_password = password_prapare(kwargs.pop('admin_password'))
[ setattr(self, k, v) for k,v in kwargs.items() ]
@property
def is_connected(self):
""" Возвращает состояние соединение """
return bool(self._conn)
@property
def conn(self):
""" Возвращает соединение """
if hasattr(self, '_conn') and self._conn is not None:
return self._conn
self.connect()
return self._conn
def connect(self):
""" Устанавливает соединение """
try:
self._conn = serial.Serial(
self.port, self.bod,
parity=self.parity,
stopbits=self.stopbits,
timeout=self.timeout,
writeTimeout=self.writeTimeout
)
except serial.SerialException:
raise ConnectionError('Невозможно соединиться с FP (порт=%s)' % self.port)
return self.check_port()
def disconnect(self):
""" Закрывает соединение """
if self.conn:
self._conn.close()
self._conn = None
return True
def check_port(self):
""" Проверка на готовность порта """
if not self.conn.isOpen():
raise ConnectionError('Последовательный порт закрыт')
return True
def check_state(self):
""" Проверка на ожидание команды """
self.check_port()
#self._write(ENQ)
answer = self._read(1)
if not answer:
time.sleep(MIN_TIMEOUT)
answer = self._read(1)
if answer in (SYN, ASK):
return answer
elif not answer:
raise ConnectionError('Нет связи с устройством')
def check_STX(self):
""" Проверка на данные """
answer = self._read(1)
# Для гарантированного получения ответа стоит обождать
# некоторое время, от минимального (0.05 секунд)
# до 12.8746337890625 секунд по умолчанию для 12 попыток
n = 0
timeout = MIN_TIMEOUT
while not answer and n < MAX_ATTEMPT:
time.sleep(timeout)
answer = self._read(1)
n += 1
timeout *= 1.5
if answer == STX:
return True
else:
raise ConnectionError('Нет связи с устройством')
def check_SYN(self):
""" Проверка на ожидание команды """
answer = self.check_state()
if answer == SYN:
return True
return False
def check_NAK(self):
""" Проверка на ожидание команды """
answer = self.check_state()
if answer == NAK:
return True
return False
def check_ASK(self):
""" Проверка на подготовку ответа """
answer = self.check_state()
if answer == ASK:
return True
return False
def _read(self, read=None):
""" Высокоуровневый метод считывания соединения """
return self.conn.read(read)
def _write(self, write):
""" Высокоуровневый метод записи в соединение """
return self.conn.write(write)
def _flush(self):
""" Высокоуровневый метод слива в ККТ """
return self.conn.flush()
def clear(self):
""" Сбрасывает ответ, если он болтается в ККМ """
def one_round():
self._write(ENQ)
answer = self._read(1)
if answer == NAK or not answer:
return True
time.sleep(MIN_TIMEOUT*10)
return False
n = 0
while n < MAX_ATTEMPT and not one_round():
n += 1
if n >= MAX_ATTEMPT:
return False
return True
def read(self):
""" Считывает весь ответ ККМ """
answer = self.check_state()
if answer == SYN or answer == ACK:
i = 0
while i < MAX_ATTEMPT and not self.check_ACK():
i += 1
if i >= MAX_ATTEMPT:
self.disconnect()
raise ConnectionError('Нет связи с устройством')
elif not answer:
self.disconnect()
raise ConnectionError('Нет связи с устройством')
j = 0
while j < MAX_ATTEMPT and not self.check_STX():
j += 1
if j >= MAX_ATTEMPT:
self.disconnect()
raise ConnectionError('Нет связи с устройством')
#<01><LEN><SEQ><CMD><DATA><04><STATUS><05><BCC><03>
length = ord(self._read(1))
sqn = self._read(1)
command = self._read(1)
data = self._read(length-4-32)
args_sp = self._read(1)
error = self._read(6)
args_en = self._read(1)
if length-4-32 != len(data):
self._write(NAK)
self.disconnect()
msg = 'Длина ответа (%i) не равна длине полученных данных (%i)' % (length, len(data))
raise ICLError(msg)
control_read = self._read(1)+4+32
control_summ = get_control_summ(STX + chr(length) + chr(sqn) + command + data + args_sp + error + args_en)
if control_read != control_summ:
self._write(NAK)
self.disconnect()
msg = "Контрольная сумма %i должна быть равна %i " % (ord(control_summ), ord(control_read))
raise ICLError(msg)
self._write(ACK)
self._flush()
#~ time.sleep(MIN_TIMEOUT*2)
return {
'command': command,
'error': ord(error),
'data': data
}
def send(self, command, params, quick=False):
""" Стандартная обработка команды """
#~ self.clear()
if not quick:
self._flush()
data = ''
length = 4+32 # 4 = <Preamble> + <LEN> + <SEQ> + <CMD> 32 = 0x20,
if not params is None:
data = params
length += len(params)
#<01><LEN><SEQ><CMD><DATA><05><BCC><03>
content = STX
content += "%s%s%s%s" % (chr(length),chr(self.frameSeqNumber),chr(command),data)
content += ARGS_END
control_summ = get_control_summ(content)
self._write(content + control_summ + END)
self._flush()
return True
def ask(self, command, params=None, sleep=0, pre_clear=True,\
without_password=False, disconnect=True, quick=False):
""" Высокоуровневый метод получения ответа. Состоит из
последовательной цепочки действий.
Возвращает позиционные параметры: (data, error, command)
"""
#~ raise ICLError('Тест ошибки')
if quick:
pre_clear = False
disconnect = False
sleep = 0
if params is None and not without_password:
params = self.password
#~ if pre_clear:
#~ self.clear()
self.send(command, params, quick=quick)
if sleep:
time.sleep(sleep)
a = self.read()
answer, error, command = (a['data'], a['error'], a['command'])
if disconnect:
self.disconnect()
if error:
raise ICLError(error)
return answer, error, command
class ICL(BaseICL):
""" Класс с командами, исполняемыми согласно протокола """
## Implemented
def x01(self, code):
""" Запрос дампа
Команда: 01H. Длина сообщения: 6 байт.
Пароль ЦТО или пароль системного администратора, если
пароль ЦТО не был установлен (4 байта)
Код устройства (1 байт)
01 – накопитель ФП 1
02 – накопитель ФП 2
03 – часы
04 – энергонезависимая память
05 – процессор ФП
06 – память программ ККТ
07 – оперативная память ККТ
Ответ: 01H. Длина сообщения: 4 байта.
Код ошибки (1 байт)
Количество блоков данных (2 байта)
"""
command = 0x01
params = self.admin_password + chr(code)
data, error, command = self.ask(command, params)
return data
## Implemented
def x02(self, code):
""" Запрос данных
Команда: 02H. Длина сообщения: 5 байт.
Пароль ЦТО или пароль системного администратора, если
пароль ЦТО не был установлен (4 байта)
Ответ: 02H. Длина сообщения: 37 байт.
Код ошибки (1 байт)
Код устройства в команде запроса дампа (1 байт):
01 – накопитель ФП1
02 – накопитель ФП2
03 – часы
04 – энергонезависимая память
05 – процессор ФП
06 – память программ ККТ
07 – оперативная память ККТ
Номер блока данных (2 байта)
Блок данных (32 байта)
"""
command = 0x02
params = self.admin_password + chr(code)
data, error, command = self.ask(command, params)
return data
## Implemented
def x03(self):
""" Прерывание выдачи данных
Команда: 03H. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: 03H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
command = 0x03
params = self.admin_password
data, error, command = self.ask(command, params)
return error
def x0D(self, old_password, new_password, rnm, inn):
""" Фискализация (перерегистрация) с длинным РНМ
Команда: 0DH. Длина сообщения: 22 байта.
Пароль старый (4 байта)
Пароль новый (4 байта)
РНМ (7 байт) 00000000000000...99999999999999
ИНН (6 байт) 000000000000...999999999999
Ответ: 0DH. Длина сообщения: 9 байт.
Код ошибки (1 байт)
Номер фискализации (перерегистрации) (1 байт) 1...16
Количество оставшихся перерегистраций (1 байт) 0...15
Номер последней закрытой смены (2 байта) 0000...2100
Дата фискализации (перерегистрации) (3 байта) ДД-ММ-ГГ
"""
raise NotImplemented
def x0E(self):
""" Ввод длинного заводского номера
Команда: 0EH. Длина сообщения: 12 байт.
Пароль (4 байта) (пароль «0»)
Заводской номер (7 байт) 00000000000000...99999999999999
Ответ: 0EH. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
def x0F(self):
""" Запрос длинного заводского номера и длинного РНМ
Команда: 0FH. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 0FH. Длина сообщения: 16 байт.
Код ошибки (1 байт)
Заводской номер (7 байт) 00000000000000...99999999999999
РНМ (7 байт) 00000000000000...99999999999999
"""
raise NotImplemented
## Implemented
def x10(self):
""" Короткий запрос состояния ФР
Команда: 10H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 10H. Длина сообщения: 16 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Флаги ККТ (2 байта)
Режим ККТ (1 байт)
Подрежим ККТ (1 байт)
Количество операций в чеке (1 байт) младший байт
двухбайтного числа (см. документацию)
Напряжение резервной батареи (1 байт)
Напряжение источника питания (1 байт)
Код ошибки ФП (1 байт)
Код ошибки ЭКЛЗ (1 байт)
Количество операций в чеке (1 байт) старший байт
двухбайтного числа (см. документацию)
Зарезервировано (3 байта)
"""
command = 0x10
data, error, command = self.ask(command)
# Флаги ККТ
kkt_flags = string2bits(data[2] + data[1]) # старший байт и младший байт
kkt_flags = [ KKT_FLAGS[i] for i, x in enumerate(kkt_flags) if x ]
# Количество операций
operations = int2.unpack(data[10]+data[5]) # старший байт и младший байт
result = {
'error': error,
'operator': ord(data[0]),
'kkt_flags': kkt_flags,
'kkt_mode': ord(data[3]),
'kkt_submode': ord(data[4]),
'voltage_battery': ord(data[6]),
'voltage_power': ord(data[7]),
'fp_error': ord(data[8]),
'eklz_error': ord(data[9]),
'operations': operations,
'reserve': data[11:],
}
return result
## Implemented
def x11(self):
""" Запрос состояния ФР
Команда: 11H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 11H. Длина сообщения: 48 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Версия ПО ККТ (2 байта)
Сборка ПО ККТ (2 байта)
Дата ПО ККТ (3 байта) ДД-ММ-ГГ
Номер в зале (1 байт)
Сквозной номер текущего документа (2 байта)
Флаги ККТ (2 байта)
Режим ККТ (1 байт)
Подрежим ККТ (1 байт)
Порт ККТ (1 байт)
Версия ПО ФП (2 байта)
Сборка ПО ФП (2 байта)
Дата ПО ФП (3 байта) ДД-ММ-ГГ
Дата (3 байта) ДД-ММ-ГГ
Время (3 байта) ЧЧ-ММ-СС
Флаги ФП (1 байт)
Заводской номер (4 байта)
Номер последней закрытой смены (2 байта)
Количество свободных записей в ФП (2 байта)
Количество перерегистраций (фискализаций) (1 байт)
Количество оставшихся перерегистраций (фискализаций)
(1 байт)
ИНН (6 байт)
"""
command = 0x11
data, error, command = self.ask(command)
# Дата ПО ККТ
day = ord(data[5])
month = ord(data[6])
year = ord(data[7])
if year > 90:
kkt_date = datetime.date(1900+year, month, day)
else:
kkt_date = datetime.date(2000+year, month, day)
# Флаги ККТ
kkt_flags = string2bits(data[12] + data[11]) # старший байт и младший байт
kkt_flags = [ KKT_FLAGS[i] for i, x in enumerate(kkt_flags) if x ]
# Дата ПО ФП
day = ord(data[20])
month = ord(data[21])
year = ord(data[22])
if year > 90:
fp_date = datetime.date(1900+year, month, day)
else:
fp_date = datetime.date(2000+year, month, day)
# Дата и время текущие
date = datetime.date(2000+ord(data[25]), ord(data[24]), ord(data[23]))
time = datetime.time(ord(data[26]), ord(data[27]), ord(data[28]))
# Флаги ФП
fp_flags = string2bits(data[29])
fp_flags = [ FP_FLAGS[i][x] for i, x in enumerate(fp_flags) ]
result = {
'error': error,
'operator': ord(data[0]),
'kkt_version': '%s.%s' % (data[1], data[2]),
'kkt_build': int2.unpack(data[3] + data[4]),
'kkt_date': kkt_date,
'hall': ord(data[8]),
'document': int2.unpack(data[9] + data[10]),
'kkt_flags': kkt_flags,
'kkt_mode': ord(data[13]),
'kkt_submode': ord(data[14]),
'kkt_port': ord(data[15]),
'fp_version': '%s.%s' % (data[16], data[17]),
'fp_build': int2.unpack(data[18] + data[19]),
'fp_date': fp_date,
'date': date,
'time': time,
'fp_flags': fp_flags,
'serial_number': int4.unpack(data[30] + data[31] \
+ data[32] + data[33]),
'last_closed_session': int2.unpack(data[34] + data[35]),
'fp_free_records': int2.unpack(data[36] + data[37]),
'registration_count': ord(data[38]),
'registration_left': ord(data[39]),
'inn': int6.unpack(data[40] + data[41] + data[42]\
+ data[43] + data[44] + data[45])
}
return result
## Implemented multistring for x12
def x12_loop(self, text='', control_tape=False):
""" Печать жирной строки без ограничения на 20 символов """
last_result = None
while len(text) > 0:
last_result = self.x12(text=text[:20], control_tape=control_tape)
text = text[20:]
return last_result
## Implemented
def x12(self, text='', control_tape=False):
""" Печать жирной строки
Команда: 12H. Длина сообщения: 26 байт.
Пароль оператора (4 байта)
Флаги (1 байт) Бит 0 – контрольная лента, Бит 1 –
чековая лента.
Печатаемые символы (20 байт)
Ответ: 12H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x12
flags = 2 # по умолчанию bin(2) == '0b00000010'
if control_tape:
flags = 1 # bin(1) == '0b00000001'
if len(text) > 20:
raise KktError('Длина строки должна быть меньше или равна 20 символов')
text = text.encode(CODE_PAGE).ljust(20, chr(0x0))
params = self.password + chr(flags) + text
data, error, command = self.ask(command, params, quick=True)
operator = ord(data[0])
return operator
## Implemented
def x13(self):
""" Гудок
Команда: 13H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 13H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
params = self.password
data, error, command = self.ask(0x13, params)
return error
def x14(self):
""" Установка параметров обмена
Команда: 14H. Длина сообщения: 8 байт.
Пароль системного администратора (4 байта)
Номер порта (1 байт) 0...255
Код скорости обмена (1 байт) 0...6
Тайм аут приема байта (1 байт) 0...255
Ответ: 14H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание:
ККТ поддерживает обмен со скоростями 2400, 4800, 9600,
19200, 38400, 57600, 115200 для порта 0, чему
соответствуют коды от 0 до 6. Для остальных портов
диапазон скоростей может быть сужен, и в этом случае,
если порт не поддерживает выбранную скорость, будет
выдано сообщение об ошибке. Тайм-аут приема байта
нелинейный. Диапазон допустимых значений [0...255]
распадается на три диапазона:
1. В диапазоне [0...150] каждая единица
соответствует 1 мс, т.е. данным диапазоном
задаются значения тайм-аута от 0 до 150 мс;
2. В диапазоне [151...249] каждая единица
соответствует 150 мс, т.е. данным диапазоном
задаются значения тайм-аута от 300 мс до 15 сек;
3. В диапазоне [250...255] каждая единица
соответствует 15 сек, т.е. данным диапазоном
задаются значения тайм-аута от 30 сек до 105 сек.
По умолчанию все порты настроены на параметры:
скорость 4800 бод с тайм-аутом 100 мс. Если
устанавливается порт, по которому ведется обмен, то
подтверждение на прием команды и ответное сообщение
выдаются ККТ со старой скоростью обмена.
"""
raise NotImplemented
def x15(self):
""" Чтение параметров обмена
Команда: 15H. Длина сообщения: 6 байт.
Пароль системного администратора (4 байта)
Номер порта (1 байт) 0...255
Ответ: 15H. Длина сообщения: 4 байта.
Код ошибки (1 байт)
Код скорости обмена (1 байт) 0...6
Тайм аут приема байта (1 байт) 0...255
"""
raise NotImplemented
def x16(self):
""" Технологическое обнуление
Команда: 16H. Длина сообщения: 1 байт.
Ответ: 16H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание:
Технологическое обнуление доступно только после
вскрытия пломбы на кожухе ККТ и выполнения
последовательности действий, описанных в ремонтной
документации на ККТ.
"""
raise NotImplemented
## Implemented multistring for x17
def x17_loop(self, text='', control_tape=False):
""" Печать строки без ограничения на 36 символов
В документации указано 40, но 4 символа выходят за область
печати на ФРК.
"""
last_result = None
while len(text) > 0:
last_result = self.x17(text=text[:36], control_tape=control_tape)
text = text[36:]
return last_result
## Implemented
def x17(self, text='', control_tape=False):
""" Печать строки
Команда: 17H. Длина сообщения: 46 байт.
Пароль оператора (4 байта)
Флаги (1 байт) Бит 0 – контрольная лента, Бит 1 –
чековая лента.
Печатаемые символы (40 байт)
Ответ: 17H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Примечание:
Печатаемые символы – символы в кодовой странице
WIN1251. Символы с кодами 0..31 не отображаются.
"""
command = 0x17
flags = 2 # по умолчанию bin(2) == '0b00000010'
if control_tape:
flags = 1 # bin(1) == '0b00000001'
if len(text) > 40:
raise KktError('Длина строки должна быть меньше или равна 40 символов')
text = text.encode(CODE_PAGE).ljust(40, chr(0x0))
params = self.password + chr(flags) + text
data, error, command = self.ask(command, params, quick=True)
operator = ord(data[0])
return operator
## Implemented
def x18(self, text, number=1):
""" Печать заголовка документа
Команда: 18H. Длина сообщения: 37 байт.
Пароль оператора (4 байта)
Наименование документа (30 байт)
Номер документа (2 байта)
Ответ: 18H. Длина сообщения: 5 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Сквозной номер документа (2 байта)
Примечание:
Печатаемые символы – символы в кодовой странице
WIN1251. Символы с кодами 0..31 не отображаются.
"""
command = 0x18
if len(text) > 30:
raise 'Длина строки должна быть меньше или равна 30 символов'
text = text.encode(CODE_PAGE).ljust(30, chr(0x0))
params = self.password + text + chr(flags)
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
def x19(self):
""" Тестовый прогон
Команда: 19H. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Период вывода в минутах (1 байт) 1...99
Ответ: 19H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
## Implemented
def x1A(self):
""" Запрос денежного регистра
Команда: 1AH. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Номер регистра (1 байт) 0... 255
Ответ: 1AH. Длина сообщения: 9 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Содержимое регистра (6 байт)
Пример запроса:
integer2money(int6.unpack(kkt.ask(0x1A, kkt.password + chr(121))[0][1:]))
"""
command = 0x1A
params = self.password + chr(number)
data, error, command = self.ask(command, params)
return integer2money(int6.unpack(data[1:]))
## Implemented
def x1B(self):
""" Запрос операционного регистра
Команда: 1BH. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Номер регистра (1 байт) 0... 255
Ответ: 1BH. Длина сообщения: 5 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Содержимое регистра (2 байта)
"""
command = 0x1B
params = self.password + chr(number)
data, error, command = self.ask(command, params)
return int2.unpack(data[1:])
def x1C(self):
""" Запись лицензии
Команда: 1CH. Длина сообщения: 10 байт.
Пароль системного администратора (4 байта)
Лицензия (5 байт) 0000000000...9999999999
Ответ: 1CH. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
def x1D(self):
""" Чтение лицензии
Команда: 1DH. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: 1DH. Длина сообщения: 7 байт.
Код ошибки (1 байт)
Лицензия (5 байт) 0000000000...9999999999
"""
raise NotImplemented
## Implemented
def x1E(self, table, row, field, value):
""" Запись таблицы
Команда: 1EH. Длина сообщения: (9+X) байт.
Пароль системного администратора (4 байта)
Таблица (1 байт)
Ряд (2 байта)
Поле (1 байт)
Значение (X байт) до 40 байт
Ответ: 1EH. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание: поля бывают бинарные и строковые, поэтому value
делаем в исходном виде.
"""
command = 0x1E
table = chr(table)
row = int2.pack(row)
field = chr(field)
params = self.admin_password + table + row + field + value
data, error, command = self.ask(command, params)
return error
def x1F(self):
""" Чтение таблицы
Команда: 1FH. Длина сообщения: 9 байт.
Пароль системного администратора (4 байта)
Таблица (1 байт)
Ряд (2 байта)
Поле (1 байт)
Ответ: 1FH. Длина сообщения: (2+X) байт.
Код ошибки (1 байт)
Значение (X байт) до 40 байт
"""
raise NotImplemented
def x20(self):
""" Запись положения десятичной точки
Команда: 20H. Длина сообщения: 6 байт.
Пароль системного администратора (4 байта)
Положение десятичной точки (1 байт) «0»– 0 разряд, «1»– 2 разряд
Ответ: 20H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
## Implemented
def x21(self, hour, minute, second):
""" Программирование времени
Команда: 21H. Длина сообщения: 8 байт.
Пароль системного администратора (4 байта)
Время (3 байта) ЧЧ-ММ-СС
Ответ: 21H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
command = 0x21
hour = chr(hour)
minute = chr(minute)
second = chr(second)
params = self.admin_password + hour + minute + second
data, error, command = self.ask(command, params)
return error
## Implemented
def x22(self, year, month, day):
""" Программирование даты
Команда: 22H. Длина сообщения: 8 байт.
Пароль системного администратора (4 байта)
Дата (3 байта) ДД-ММ-ГГ
Ответ: 22H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
command = 0x22
if year >= 2000:
year = year - 2000
year = chr(year)
month = chr(month)
day = chr(day)
params = self.admin_password + day + month + year
data, error, command = self.ask(command, params)
return error
## Implemented
def x23(self, year, month, day):
""" Подтверждение программирования даты
Команда: 23H. Длина сообщения: 8 байт.
Пароль системного администратора (4 байта)
Дата (3 байта) ДД-ММ-ГГ
Ответ: 23H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
command = 0x23
if year >= 2000:
year = year - 2000
year = chr(year)
month = chr(month)
day = chr(day)
params = self.admin_password + day + month + year
data, error, command = self.ask(command, params)
return error
def x24(self):
""" Инициализация таблиц начальными значениями
Команда: 24H. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: 24H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
## Implemented
def x25(self, fullcut=True):
""" Отрезка чека
Команда: 25H. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Тип отрезки (1 байт) «0» – полная, «1» – неполная
Ответ: 25H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x25
cut = int(not bool(fullcut)) # 0 по умолчанию
params = self.password + chr(cut)
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
def x26(self):
""" Прочитать параметры шрифта
Команда: 26H. Длина сообщения: 6 байт.
Пароль системного администратора (4 байта)
Номер шрифта (1 байт)
Ответ: 26H. Длина сообщения: 7 байт.
Код ошибки (1 байт)
Ширина области печати в точках (2 байта)
Ширина символа с учетом межсимвольного интервала в точках (1 байт)
Высота символа с учетом межстрочного интервала в точках (1 байт)
Количество шрифтов в ККТ (1 байт)
"""
raise NotImplemented
def x27(self):
""" Общее гашение
Команда: 27H. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: 27H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
def x28(self):
""" Открыть денежный ящик
Команда: 28H. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Номер денежного ящика (1 байт) 0, 1
Ответ: 28H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
## Implemented
def x29(self, receipt_tape=False, control_tape=False, row_count=1):
""" Протяжка
Команда: 29H. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Флаги (1 байт) Бит 0 – контрольная лента, Бит 1 –
чековая лента, Бит 2 – подкладной документ.
Количество строк (1 байт) 1...255 – максимальное
количество строк ограничивается размером буфера
печати, но не превышает 255
Ответ: 29H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x29
flags = 4 # по умолчанию bin(4) == '0b00000100'
if receipt_tape:
tape = 2 # bin(2) == '0b00000010'
if control_tape:
tape = 1 # bin(1) == '0b00000001'
if row_count < 1 or row_count > 255:
raise KktError("Количество строк должно быть в диапазоне между 1 и 255")
params = self.password + chr(flags) + chr(row_count)
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
def x2A(self):
""" Выброс подкладного документа
Команда: 2AH. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Направление выброса подкладного документа (1 байт) «0» – вниз, «1» – вверх
Ответ: 2AH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x2B(self):
""" Прерывание тестового прогона
Команда: 2BH. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 2BH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x2C(self):
""" Снятие показаний операционных регистров
Команда: 2СH. Длина сообщения: 5 байт.
Пароль администратора или системного администратора (4 байта)
Ответ: 2СH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 29, 30
"""
raise NotImplemented
def x2D(self):
""" Запрос структуры таблицы
Команда: 2DH. Длина сообщения: 6 байт.
Пароль системного администратора (4 байта)
Номер таблицы (1 байт)
Ответ: 2DH. Длина сообщения: 45 байт.
Код ошибки (1 байт)
Название таблицы (40 байт)
Количество рядов (2 байта)
Количество полей (1 байт)
"""
raise NotImplemented
def x2E(self):
""" Запрос структуры поля
Команда: 2EH. Длина сообщения: 7 байт.
Пароль системного администратора (4 байта)
Номер таблицы (1 байт)
Номер поля (1 байт)
Ответ: 2EH. Длина сообщения: (44+X+X) байт.
Код ошибки (1 байт)
Название поля (40 байт)
Тип поля (1 байт) «0» – BIN, «1» – CHAR
Количество байт – X (1 байт)
Минимальное значение поля – для полей типа BIN (X байт)
Максимальное значение поля – для полей типа BIN (X байт)
"""
raise NotImplemented
def x2F(self):
""" Печать строки данным шрифтом
Команда: 2FH. Длина сообщения: 47 байт.
Пароль оператора (4 байта)
Флаги (1 байт) Бит 0 – контрольная лента, Бит 1 –
чековая лента.
Номер шрифта (1 байт) 0...255
Печатаемые символы (40 байт)
Ответ: 2FH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Примечание:
Печатаемые символы – символы в кодовой странице
WIN1251. Символы с кодами 0...31 не отображаются.
"""
raise NotImplemented
## Implemented
def x40(self):
""" Суточный отчет без гашения
Команда: 40H. Длина сообщения: 5 байт.
Пароль администратора или системного администратора (4 байта)
Ответ: 40H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 29, 30
"""
command = 0x40
params = self.admin_password
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
## Implemented
def x41(self):
""" Суточный отчет с гашением
Команда: 41H. Длина сообщения: 5 байт.
Пароль администратора или системного администратора (4 байта)
Ответ: 41H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 29, 30
"""
command = 0x41
params = self.admin_password
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
def x42(self):
""" Отчѐт по секциям
Команда: 42H. Длина сообщения: 5 байт.
Пароль администратора или системного администратора (4 байта)
Ответ: 42H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 29, 30
"""
raise NotImplemented
def x43(self):
""" Отчѐт по налогам
Команда: 43H. Длина сообщения: 5 байт.
Пароль администратора или системного администратора (4 байта)
Ответ: 43H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 29, 30
"""
raise NotImplemented
## Implemented
def x50(self, summa):
""" Внесение
Команда: 50H. Длина сообщения: 10 байт.
Пароль оператора (4 байта)
Сумма (5 байт)
Ответ: 50H. Длина сообщения: 5 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Сквозной номер документа (2 байта)
"""
command = 0x50
summa = money2integer(summa)
summa = int5.pack(summa)
params = self.password + summa
data, error, command = self.ask(command, params)
operator = ord(data[0])
document = int4.unpack(data[1:3])
result = {
'operator': operator,
'document': document,
}
return result
## Implemented
def x51(self, summa):
""" Выплата
Команда: 51H. Длина сообщения: 10 байт.
Пароль оператора (4 байта)
Сумма (5 байт)
Ответ: 51H. Длина сообщения: 5 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Сквозной номер документа (2 байта)
"""
command = 0x51
summa = money2integer(summa)
summa = int5.pack(summa)
params = self.password + summa
data, error, command = self.ask(command, params)
operator = ord(data[0])
document = int4.unpack(data[1:3])
result = {
'operator': operator,
'document': document,
}
return result
## Implemented
def x52(self):
""" Печать клише
Команда: 52H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 52H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x52
params = self.password
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
def x53(self):
""" Конец Документа
Команда: 53H. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Параметр (1 байт)
0- без рекламного текста
1 - с рекламным тестом
Ответ: 53H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x54(self):
""" Печать рекламного текста
Команда: 54H. Длина сообщения:5 байт.
Пароль оператора (4 байта)
Ответ: 54H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x60(self):
""" Ввод заводского номера
Команда: 60H. Длина сообщения: 9 байт.
Пароль (4 байта) (пароль «0»)
Заводской номер (4 байта) 00000000...99999999
Ответ: 60H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
def x61(self):
""" Инициализация ФП
Команда: 61H. Длина сообщения: 1 байт.
Ответ: 61H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание:
Команда доступна только в случае установки в ФП
процессора с программным обеспечением для
инициализации и используется в технологических целях
при производстве ККМ на заводе-изготовителе.
"""
raise NotImplemented
## Implemented
def x62(self, after=False):
""" Запрос суммы записей в ФП
Команда: 62H. Длина сообщения: 6 байт.
Пароль администратора или системного администратора
(4 байта)
Тип запроса (1 байт) «0» – сумма всех записей, «1» –
сумма записей после последней перерегистрации
Ответ: 62H. Длина сообщения: 29 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 29, 30
Сумма всех сменных итогов продаж (8 байт)
Сумма всех сменных итогов покупок (6 байт) При отсутствии ФП 2:
FFh FFh FFh FFh FFh FFh
Сумма всех сменных возвратов продаж (6 байт) При отсутствии ФП 2:
FFh FFh FFh FFh FFh FFh
Сумма всех сменных возвратов покупок (6 байт) При отсутствии ФП 2:
FFh FFh FFh FFh FFh FFh
"""
command = 0x62
params = self.admin_password + chr(1 if after else 0)
data, error, command = self.ask(command, params)
result = {
'operator': ord(data[0]),
'sale': integer2money(int8.unpack(data[1:9])),
'purchase': integer2money(int6.unpack(data[9:15])),
'refuse_sale': integer2money(int6.unpack(data[15:21])),
'refuse_purchase': integer2money(int6.unpack(data[21:])),
}
# Если ФП 2 установлена, то почемуто вовращает предельное число.
# Поэтому мы его сбрасываем.
for key in ('purchase', 'refuse_sale', 'refuse_purchase'):
if result[key] == 2814749767106.55:
result[key] = 0
return result
def x63(self):
""" Запрос даты последней записи в ФП
Команда: 63H. Длина сообщения: 5 байт.
Пароль администратора или системного администратора
(4 байта)
Ответ: 63H. Длина сообщения: 7 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 29, 30
Тип последней записи (1 байт) «0» – фискализация
(перерегистрация), «1» – сменный итог
Дата (3 байта) ДД-ММ-ГГ
"""
raise NotImplemented
def x64(self):
""" Запрос диапазона дат и смен
Команда: 64H. Длина сообщения: 5 байт.
Пароль налогового инспектора (4 байта)
Ответ: 64H. Длина сообщения: 12 байт.
Код ошибки (1 байт)
Дата первой смены (3 байта) ДД-ММ-ГГ
Дата последней смены (3 байта) ДД-ММ-ГГ
Номер первой смены (2 байта) 0000...2100
Номер последней смены (2 байта) 0000...2100
"""
raise NotImplemented
def x65(self):
""" Фискализация (перерегистрация)
Команда: 65H. Длина сообщения: 20 байт.
Пароль старый (4 байта)
Пароль новый (4 байта)
РНМ (5 байт) 0000000000...9999999999
ИНН (6 байт) 000000000000...999999999999
Ответ: 65H. Длина сообщения: 9 байт.
Код ошибки (1 байт)
Номер фискализации (перерегистрации) (1 байт) 1...16
Количество оставшихся перерегистраций (1 байт) 0...15
Номер последней закрытой смены (2 байта) 0000...2100
Дата фискализации (перерегистрации) (3 байта) ДД-ММ-ГГ
"""
raise NotImplemented
def x66(self):
""" Фискальный отчет по диапазону дат
Команда: 66H. Длина сообщения: 12 байт.
Пароль налогового инспектора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Дата первой смены (3 байта) ДД-ММ-ГГ
Дата последней смены (3 байта) ДД-ММ-ГГ
Ответ: 66H. Длина сообщения: 12 байт.
Код ошибки (1 байт)
Дата первой смены (3 байта) ДД-ММ-ГГ
Дата последней смены (3 байта) ДД-ММ-ГГ
Номер первой смены (2 байта) 0000...2100
Номер последней смены (2 байта) 0000...2100
"""
raise NotImplemented
def x67(self):
""" Фискальный отчет по диапазону смен
Команда: 67H. Длина сообщения: 10 байт.
Пароль налогового инспектора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Номер первой смены (2 байта) 0000...2100
Номер последней смены (2 байта) 0000...2100
Ответ: 67H. Длина сообщения: 12 байт.
Код ошибки (1 байт)
Дата первой смены (3 байта) ДД-ММ-ГГ
Дата последней смены (3 байта) ДД-ММ-ГГ
Номер первой смены (2 байта) 0000...2100
Номер последней смены (2 байта) 0000...2100
"""
raise NotImplemented
def x68(self):
""" Прерывание полного отчета
Команда: 68H. Длина сообщения: 5 байт.
Пароль налогового инспектора (4 байта)
Ответ: 68H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
def x69(self):
""" Чтение параметров фискализации (перерегистрации)
Команда: 69H. Длина сообщения: 6 байт.
Пароль налогового инспектора, при котором была проведена
данная фискализация (4 байта)
Номер фискализации (перерегистрации) (1 байт) 1...16
Ответ: 69H. Длина сообщения: 22 байта.
Код ошибки (1 байт)
Пароль (4 байта)
РНМ (5 байт) 0000000000...9999999999
ИНН (6 байт) 000000000000...999999999999
Номер смены перед фискализацией (перерегистрацией)
(2 байта) 0000...2100
Дата фискализации (перерегистрации) (3 байта) ДД-ММ-ГГ
"""
raise NotImplemented
def x70(self):
""" Открыть фискальный подкладной документ
Команда: 70H. Длина сообщения: 26 байт.
Пароль оператора (4 байта)
Тип документа (1 байт) «0» – продажа, «1» – покупка,
«2» – возврат продажи, «3» – возврат покупки
Дублирование печати (извещение, квитанция) (1 байт) «0»
– колонки, «1» – блоки строк
Количество дублей (1 байт) 0...5
Смещение между оригиналом и 1-ым дублем печати (1 байт) *
Смещение между 1-ым и 2-ым дублями печати (1 байт) *
Смещение между 2-ым и 3-им дублями печати (1 байт) *
Смещение между 3-им и 4-ым дублями печати (1 байт) *
Смещение между 4-ым и 5-ым дублями печати (1 байт) *
Номер шрифта клише (1 байт)
Номер шрифта заголовка документа (1 байт)
Номер шрифта номера ЭКЛЗ (1 байт)
Номер шрифта значения КПК и номера КПК (1 байт)
Номер строки клише (1 байт)
Номер строки заголовка документа (1 байт)
Номер строки номера ЭКЛЗ (1 байт)
Номер строки признака повтора документа (1 байт)
Смещение клише в строке (1 байт)
Смещение заголовка документа в строке (1 байт)
Смещение номера ЭКЛЗ в строке (1 байт)
Смещение КПК и номера КПК в строке (1 байт)
Смещение признака повтора документа в строке (1 байт)
Ответ: 70H. Длина сообщения: 5 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Сквозной номер документа (2 байта)
*– Для колонок величина смещения задаѐтся в символах, для
блоков строк – в строках.
"""
raise NotImplemented
def x71(self):
""" Открыть стандартный фискальный подкладной документ
Команда: 71H. Длина сообщения: 13 байт.
Пароль оператора (4 байта)
Тип документа (1 байт) «0» – продажа, «1» – покупка, «2» – возврат
продажи, «3» – возврат покупки
Дублирование печати (извещение, квитанция) (1 байт) «0» – колонки,
«1» – блоки строк
Количество дублей (1 байт) 0...5
Смещение между оригиналом и 1-ым дублем печати (1 байт) *
Смещение между 1-ым и 2-ым дублями печати (1 байт) *
Смещение между 2-ым и 3-им дублями печати (1 байт) *
Смещение между 3-им и 4-ым дублями печати (1 байт) *
Смещение между 4-ым и 5-ым дублями печати (1 байт) *
Ответ: 71H. Длина сообщения: 5 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Сквозной номер документа (2 байта)
"""
raise NotImplemented
def x72(self):
""" Формирование операции на подкладном документе
Команда: 72H. Длина сообщения: 82 байта.
Пароль оператора (4 байта)
Формат целого количества (1 байт) «0» – без цифр после запятой, «1» – с цифрами
после запятой
Количество строк в операции (1 байт) 1...3
Номер текстовой строки в операции (1 байт) 0...3, «0» – не печатать
Номер строки произведения количества на цену в операции (1 байт) 0...3, «0» – не
печатать
Номер строки суммы в операции (1 байт) 1...3
Номер строки отдела в операции (1 байт) 1...3
Номер шрифта текстовой строки (1 байт)
Номер шрифта количества (1 байт)
Номер шрифта знака умножения количества на цену (1 байт)
Номер шрифта цены (1 байт)
Номер шрифта суммы (1 байт)
Номер шрифта отдела (1 байт)
Количество символов поля текстовой строки (1 байт)
Количество символов поля количества (1 байт)
Количество символов поля цены (1 байт)
Количество символов поля суммы (1 байт)
Количество символов поля отдела (1 байт)
Смещение поля текстовой строки в строке (1 байт)
Смещение поля произведения количества на цену в строке (1 байт)
Смещение поля суммы в строке (1 байт)
Смещение поля отдела в строке (1 байт)
Номер строки ПД с первой строкой блока операции (1 байт)
Количество (5 байт)
Цена (5 байт)
Отдел (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 72H. Длина сообщения: 3 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x73(self):
""" Формирование стандартной операции на подкладном
документе
Команда: 73H. Длина сообщения: 61 байт.
Пароль оператора (4 байта)
Номер строки ПД с первой строкой блока операции (1 байт)
Количество (5 байт)
Цена (5 байт)
Отдел (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 73H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x74(self):
""" Формирование скидки/надбавки на подкладном документе
Команда: 74H. Длина сообщения: 68 байт.
Пароль оператора (4 байта)
Количество строк в операции (1 байт) 1...2
Номер текстовой строки в операции (1 байт) 0...2, «0» – не печатать
Номер строки названия операции в операции (1 байт) 1...2
Номер строки суммы в операции (1 байт) 1...2
Номер шрифта текстовой строки (1 байт)
Номер шрифта названия операции (1 байт)
Номер шрифта суммы (1 байт)
Количество символов поля текстовой строки (1 байт)
Количество символов поля суммы (1 байт)
Смещение поля текстовой строки в строке (1 байт)
Смещение поля названия операции в строке (1 байт)
Смещение поля суммы в строке (1 байт)
Тип операции (1 байт) «0» – скидка, «1» – надбавка
Номер строки ПД с первой строкой блока скидки/надбавки (1 байт)
Сумма (5 байт)
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 74H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x75(self):
""" Формирование стандартной скидки/надбавки на
подкладном документе
Команда: 75H. Длина сообщения: 56 байт.
Пароль оператора (4 байта)
Тип операции (1 байт) «0» – скидка, «1» – надбавка
Номер строки ПД с первой строкой блока скидки/надбавки
(1 байт)
Сумма (5 байт)
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 75H. Длина сообщения: 3 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x76(self):
""" Формирование закрытия чека на подкладном документе
Команда: 76H. Длина сообщения: 182 байта.
Пароль оператора (4 байта)
Количество строк в операции (1 байт) 1...17
Номер строки итога в операции (1 байт) 1...17
Номер текстовой строки в операции (1 байт) 0...17,
«0» – не печатать
Номер строки наличных в операции (1 байт) 0...17,
«0» – не печатать
Номер строки типа оплаты 2 в операции (1 байт) 0...17,
«0» – не печатать
Номер строки типа оплаты 3 в операции (1 байт) 0...17,
«0» – не печатать
Номер строки типа оплаты 4 в операции (1 байт) 0...17,
«0» – не печатать
Номер строки сдачи в операции (1 байт) 0...17, «0» – не
печатать
Номер строки оборота по налогу А в операции (1 байт)
0...17, «0» – не печатать
Номер строки оборота по налогу Б в операции (1 байт)
0...17, «0» – не печатать
Номер строки оборота по налогу В в операции (1 байт)
0...17, «0» – не печатать
Номер строки оборота по налогу Г в операции (1 байт)
0...17, «0» – не печатать
Номер строки суммы по налогу А в операции (1 байт)
0...17, «0» – не печатать
Номер строки суммы по налогу Б в операции (1 байт)
0...17, «0» – не печатать
Номер строки суммы по налогу В в операции (1 байт)
0...17, «0» – не печатать
Номер строки суммы по налогу Г в операции (1 байт)
0...17, «0» – не печатать
Номер строки суммы до начисления скидки в операции
(1 байт) 0...17, «0» – не
печатать
Номер строки суммы скидки в операции (1 байт) 0...17,
«0» – не печатать
Номер шрифта текстовой строки (1 байт)
Номер шрифта «ИТОГ» (1 байт)
Номер шрифта суммы итога (1 байт)
Номер шрифта «НАЛИЧНЫМИ» (1 байт)
Номер шрифта суммы наличных (1 байт)
Номер шрифта названия типа оплаты 2 (1 байт)
Номер шрифта суммы типа оплаты 2 (1 байт)
Номер шрифта названия типа оплаты 3 (1 байт)
Номер шрифта суммы типа оплаты 3 (1 байт)
Номер шрифта названия типа оплаты 4 (1 байт)
Номер шрифта суммы типа оплаты 4 (1 байт)
Номер шрифта «СДАЧА» (1 байт)
Номер шрифта суммы сдачи (1 байт)
Номер шрифта названия налога А (1 байт)
Номер шрифта оборота налога А (1 байт)
Номер шрифта ставки налога А (1 байт)
Номер шрифта суммы налога А (1 байт)
Номер шрифта названия налога Б (1 байт)
Номер шрифта оборота налога Б (1 байт)
Номер шрифта ставки налога Б (1 байт)
Номер шрифта суммы налога Б (1 байт)
Номер шрифта названия налога В (1 байт)
Номер шрифта оборота налога В (1 байт)
Номер шрифта ставки налога В (1 байт)
Номер шрифта суммы налога В (1 байт)
Номер шрифта названия налога Г (1 байт)
Номер шрифта оборота налога Г (1 байт)
Номер шрифта ставки налога Г (1 байт)
Номер шрифта суммы налога Г (1 байт)
Номер шрифта «ВСЕГО» (1 байт)
Номер шрифта суммы до начисления скидки (1 байт)
Номер шрифта «СКИДКА ХХ.ХХ %» (1 байт)
Номер шрифта суммы скидки на чек (1 байт)
Количество символов поля текстовой строки (1 байт)
Количество символов поля суммы итога (1 байт)
Количество символов поля суммы наличных (1 байт)
Количество символов поля суммы типа оплаты 2 (1 байт)
Количество символов поля суммы типа оплаты 3 (1 байт)
Количество символов поля суммы типа оплаты 4 (1 байт)
Количество символов поля суммы сдачи (1 байт)
Количество символов поля названия налога А (1 байт)
Количество символов поля оборота налога А (1 байт)
Количество символов поля ставки налога А (1 байт)
Количество символов поля суммы налога А (1 байт)
Количество символов поля названия налога Б (1 байт)
Количество символов поля оборота налога Б (1 байт)
Количество символов поля ставки налога Б (1 байт)
Количество символов поля суммы налога Б (1 байт)
Количество символов поля названия налога В (1 байт)
Количество символов поля оборота налога В (1 байт)
Количество символов поля ставки налога В (1 байт)
Количество символов поля суммы налога В (1 байт)
Количество символов поля названия налога Г (1 байт)
Количество символов поля оборота налога Г (1 байт)
Количество символов поля ставки налога Г (1 байт)
Количество символов поля суммы налога Г (1 байт)
Количество символов поля суммы до начисления скидки
(1 байт)
Количество символов поля процентной скидки на чек
(1 байт)
Количество символов поля суммы скидки на чек (1 байт)
Смещение поля текстовой строки в строке (1 байт)
Смещение поля «ИТОГ» в строке (1 байт)
Смещение поля суммы итога в строке (1 байт)
Смещение поля «НАЛИЧНЫМИ» в строке (1 байт)
Смещение поля суммы наличных в строке (1 байт)
Смещение поля названия типа оплаты 2 в строке (1 байт)
Смещение поля суммы типа оплаты 2 в строке (1 байт)
Смещение поля названия типа оплаты 3 в строке (1 байт)
Смещение поля суммы типа оплаты 3 в строке (1 байт)
Смещение поля названия типа оплаты 4 в строке (1 байт)
Смещение поля суммы типа оплаты 4 в строке (1 байт)
Смещение поля «СДАЧА» в строке (1 байт)
Смещение поля суммы сдачи в строке (1 байт)
Смещение поля названия налога А в строке (1 байт)
Смещение поля оборота налога А в строке (1 байт)
Смещение поля ставки налога А в строке (1 байт)
Смещение поля суммы налога А в строке (1 байт)
Смещение поля названия налога Б в строке (1 байт)
Смещение поля оборота налога Б в строке (1 байт)
Смещение поля ставки налога Б в строке (1 байт)
Смещение поля суммы налога Б в строке (1 байт)
Смещение поля названия налога В в строке (1 байт)
Смещение поля оборота налога В в строке (1 байт)
Смещение поля ставки налога В в строке (1 байт)
Смещение поля суммы налога В в строке (1 байт)
Смещение поля названия налога Г в строке (1 байт)
Смещение поля оборота налога Г в строке (1 байт)
Смещение поля ставки налога Г в строке (1 байт)
Смещение поля суммы налога Г в строке (1 байт)
Смещение поля «ВСЕГО» в строке (1 байт)
Смещение поля суммы до начисления скидки в строке
(1 байт)
Смещение поля «СКИДКА ХХ.ХХ %» в строке (1 байт)
Смещение поля суммы скидки в строке (1 байт)
Номер строки ПД с первой строкой блока операции (1 байт)
Сумма наличных (5 байт)
Сумма типа оплаты 2 (5 байт)
Сумма типа оплаты 3 (5 байт)
Сумма типа оплаты 4 (5 байт)
Скидка в % на чек от 0 до 99,99 % (2 байта) 0000...9999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 76H. Длина сообщения: 8 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Сдача (5 байт) 0000000000...9999999999
"""
raise NotImplemented
## Implemented
def x77(self, cash=0, payment2=0, payment3=0, payment4=0, discount=0,
text='', taxes=[0,0,0,0]):
""" Формирование стандартного закрытия чека на подкладном
документе
Команда: 77H. Длина сообщения: 72 байта.
Пароль оператора (4 байта)
Номер строки ПД с первой строкой блока операции (1 байт)
Сумма наличных (5 байт)
Сумма типа оплаты 2 (5 байт)
Сумма типа оплаты 3 (5 байт)
Сумма типа оплаты 4 (5 байт)
Скидка в % на чек от 0 до 99,99 % (2 байта) 0000...9999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 77H. Длина сообщения: 8 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Сдача (5 байт) 0000000000...9999999999
"""
command = 0x77
cash = money2integer(cash)
payment2 = money2integer(payment2)
payment3 = money2integer(payment3)
payment4 = money2integer(payment4)
discount = money2integer(discount)
if cash < 0 or cash > 9999999999:
raise KktError("Наличные должны быть в диапазоне между 0 и 9999999999")
if payment2 < 0 or payment2 > 9999999999:
raise KktError("Оплата 2 должна быть в диапазоне между 0 и 9999999999")
if payment3 < 0 or payment3 > 9999999999:
raise KktError("Оплата 3 должна быть в диапазоне между 0 и 0..9999999999")
if payment4 < 0 or payment4 > 9999999999:
raise KktError("Оплата 4 должна быть в диапазоне между 0 и 9999999999")
if discount < -9999 or discount > 9999:
raise KktError("Скидка должна быть в диапазоне между -9999 и 9999")
if len(text) > 40:
raise KktError("Текст должнен быть менее или равен 40 символам")
if len(taxes) != 4:
raise KktError("Количество налогов должно равняться 4")
if not isinstance(taxes, (list, tuple)):
raise KktError("Перечень налогов должен быть типом list или tuple")
for t in taxes:
if t not in range(0, 5):
raise KktError("Налоги должны быть равны 0,1,2,3 или 4")
cash = int5.pack(cash)
payment2 = int5.pack(payment2)
payment3 = int5.pack(payment3)
payment4 = int5.pack(payment4)
discount = int2.pack(discount)
taxes = digits2string(taxes)
text = text.encode(CODE_PAGE).ljust(40, chr(0x0))
params = self.password + cash + payment2 + payment3 + payment4\
+ discount + taxes + text
data, error, command = self.ask(command, params, quick=True)
operator = ord(data[0])
odd = int5.unpack(data[1:6])
result = {
'operator': operator,
'odd': integer2money(odd),
}
return result
def x78(self):
""" Конфигурация подкладного документа
Команда: 78H. Длина сообщения: 209 байт.
Пароль оператора (4 байта)
Ширина подкладного документа в шагах (2 байта)*
Длина подкладного документа в шагах (2 байта)*
Ориентация печати – поворот в градусах по часовой
стрелке (1 байт) «0» – 0o, «1» – 90o, «2» – 180o,
«3» – 270o
Межстрочный интервал между 1-ой и 2-ой строками в шагах
(1 байт)*
Межстрочный интервал между 2-ой и 3-ей строками в шагах
(1 байт)*
Аналогично для строк 3...199 в шагах (1 байт)*
Межстрочный интервал между 199-ой и 200-ой строками в
шагах (1 байт)*
Ответ: 78H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
* - размер шага зависит от печатающего механизма
конкретного фискального регистратора. Шаг по горизонтали
не равен шагу по вертикали: эти параметры печатающего
механизма указываются в инструкции по эксплуатации на ККТ.
"""
raise NotImplemented
def x79(self):
""" Установка стандартной конфигурации подкладного
документа
Команда: 79H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 79H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x7A(self):
""" Заполнение буфера подкладного документа нефискальной
информацией
Команда: 7AH. Длина сообщения: (6 + X) байт.
Пароль оператора (4 байта)
Номер строки (1 байт) 1...200
Печатаемая информация (X байт) символ с кодом 27 и
следующий за ним символ не помещаются в буфер
подкладного документа, а задают тип шрифта
следующих символов; не более 250 байт
Ответ: 7AH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x7B(self):
""" Очистка строки буфера подкладного документа от
нефискальной информации
Команда: 7BH. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Номер строки (1 байт) 1...200
Ответ: 7BH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x7C(self):
""" Очистка всего буфера подк ладного документа от
нефискальной информации
Команда: 7CH. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 7CH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x7D(self):
""" Печать подкладного документа
Команда: 7DH. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Очистка нефискальной информации (1 байт) «0» – есть,
«1» – нет
Тип печатаемой информации (1 байт) «0» – только
нефискальная информация, «1» – только фискальная
информация, «2» – вся информация
Ответ: 7DH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x7E(self):
""" Общая конфигурация подкладного документа
Команда: 7EH. Длина сообщения: 11 байт.
Пароль оператора (4 байта)
Ширина подкладного документа в шагах (2 байта)*
Длина подкладного документа в шагах (2 байта)*
Ориентация печати (1 байт) «0» – 0o; «1» – 90o; «2» –
180o; «3» – 270o
Межстрочный интервал между строками в шагах (1 байт)*
Ответ: 7EH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
* - размер шага зависит от печатающего механизма
конкретного фискального регистратора. Шаг по горизонтали
не равен шагу по вертикали: эти параметры печатающего
механизма указываются в инструкции по эксплуатации на ККТ.
"""
raise NotImplemented
## Implemented
def _x8count(self, command, count, price, text='', department=0, taxes=[0,0,0,0]):
""" Общий метод для продаж, покупок, возвратов и сторно
Команда: 80H. Длина сообщения: 60 байт.
Пароль оператора (4 байта)
Количество (5 байт) 0000000000...9999999999
Цена (5 байт) 0000000000...9999999999
Номер отдела (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 80H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = command
count = count2integer(count)
price = money2integer(price)
if count < 0 or count > 9999999999:
raise KktError("Количество должно быть в диапазоне между 0 и 9999999999")
if price < 0 or price > 9999999999:
raise KktError("Цена должна быть в диапазоне между 0 и 9999999999")
if not department in range(17):
raise KktError("Номер отдела должен быть в диапазоне между 0 и 16")
if len(text) > 40:
raise KktError("Текст должнен быть менее или равен 40 символам")
if len(taxes) != 4:
raise KktError("Количество налогов должно равняться 4")
if not isinstance(taxes, (list, tuple)):
raise KktError("Перечень налогов должен быть типом list или tuple")
for t in taxes:
if t not in range(0, 5):
raise KktError("Налоги должны быть равны 0,1,2,3 или 4")
count = int5.pack(count)
price = int5.pack(price)
department = chr(department)
taxes = digits2string(taxes)
text = text.encode(CODE_PAGE).ljust(40, chr(0x0))
params = self.password + count + price + department + taxes + text
data, error, command = self.ask(command, params, quick=True)
operator = ord(data[0])
return operator
## Implemented
def x80(self, count, price, text='', department=0, taxes=[0,0,0,0]):
""" Продажа
Команда: 80H. Длина сообщения: 60 байт.
Пароль оператора (4 байта)
Количество (5 байт) 0000000000...9999999999
Цена (5 байт) 0000000000...9999999999
Номер отдела (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 80H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x80
return self._x8count(command=command, count=count, price=price,
text=text, department=department, taxes=taxes)
## Implemented
def x81(self, count, price, text='', department=0, taxes=[0,0,0,0]):
""" Покупка
Команда: 81H. Длина сообщения: 60 байт.
Пароль оператора (4 байта)
Количество (5 байт) 0000000000...9999999999
Цена (5 байт) 0000000000...9999999999
Номер отдела (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 81H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x81
return self._x8count(command=command, count=count, price=price,
text=text, department=department, taxes=taxes)
## Implemented
def x82(self, count, price, text='', department=0, taxes=[0,0,0,0]):
""" Возврат продажи
Команда: 82H. Длина сообщения: 60 байт.
Пароль оператора (4 байта)
Количество (5 байт) 0000000000...9999999999
Цена (5 байт) 0000000000...9999999999
Номер отдела (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 82H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x82
return self._x8count(command=command, count=count, price=price,
text=text, department=department, taxes=taxes)
## Implemented
def x83(self, count, price, text='', department=0, taxes=[0,0,0,0]):
""" Возврат покупки
Команда: 83H. Длина сообщения: 60 байт.
Пароль оператора (4 байта)
Количество (5 байт) 0000000000...9999999999
Цена (5 байт) 0000000000...9999999999
Номер отдела (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 83H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x83
return self._x8count(command=command, count=count, price=price,
text=text, department=department, taxes=taxes)
## Implemented
def x84(self, count, price, text='', department=0, taxes=[0,0,0,0]):
""" Сторно
Команда: 84H. Длина сообщения: 60 байт.
Пароль оператора (4 байта)
Количество (5 байт) 0000000000...9999999999
Цена (5 байт) 0000000000...9999999999
Номер отдела (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 84H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x84
return self._x8count(command=command, count=count, price=price,
text=text, department=department, taxes=taxes)
## Implemented
def x85(self, cash=0, summs=[0,0,0,0], discount=0, taxes=[0,0,0,0], text=''):
""" Закрытие чека
Команда: 85H. Длина сообщения: 71 байт.
Пароль оператора (4 байта)
Сумма наличных (5 байт) 0000000000...9999999999
Сумма типа оплаты 2 (5 байт) 0000000000...9999999999
Сумма типа оплаты 3 (5 байт) 0000000000...9999999999
Сумма типа оплаты 4 (5 байт) 0000000000...9999999999
Скидка/Надбавка(в случае отрицательного значения) в % на
чек от 0 до 99,99 % (2 байта со знаком) -9999...9999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 85H. Длина сообщения: 8 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Сдача (5 байт) 0000000000...9999999999
"""
command = 0x85
summa1 = money2integer(summs[0] or cash)
summa2 = money2integer(summs[1])
summa3 = money2integer(summs[2])
summa4 = money2integer(summs[3])
discount = money2integer(discount)
for i,s in enumerate([summa1, summa2, summa3, summa4]):
if s < 0 or s > 9999999999:
raise KktError("Переменная `summa%d` должна быть в диапазоне между 0 и 9999999999" % i+1)
if discount < -9999 or discount > 9999:
raise KktError("Скидка должна быть в диапазоне между -9999 и 9999")
if len(text) > 40:
raise KktError("Текст должнен быть менее или равен 40 символам")
if len(taxes) != 4:
raise KktError("Количество налогов должно равняться 4")
if not isinstance(taxes, (list, tuple)):
raise KktError("Перечень налогов должен быть типом list или tuple")
for t in taxes:
if t not in range(0, 5):
raise KktError("Налоги должны быть равны 0,1,2,3 или 4")
summa1 = int5.pack(summa1)
summa2 = int5.pack(summa2)
summa3 = int5.pack(summa3)
summa4 = int5.pack(summa4)
discount = int2.pack(discount)
taxes = digits2string(taxes)
text = text.encode(CODE_PAGE).ljust(40, chr(0x0))
params = self.password + summa1 + summa2 + summa3 + summa4 \
+ discount + taxes + text
data, error, command = self.ask(command, params)
operator = ord(data[0])
odd = int5.unpack(data[1:6])
result = {
'operator': operator,
'odd': integer2money(odd),
}
return result
## Implemented
def _x8summa(self, command, summa, text='', taxes=[0,0,0,0]):
""" Общий метод для скидок,
Команда: 86H. Длина сообщения: 54 байт.
Пароль оператора (4 байта)
Сумма (5 байт) 0000000000...9999999999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 86H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = command
summa = money2integer(summa)
if summa < 0 or summa > 9999999999:
raise KktError("Сумма должна быть в диапазоне между 0 и 9999999999")
if len(text) > 40:
raise KktError("Текст должнен быть менее или равен 40 символам")
if len(taxes) != 4:
raise KktError("Количество налогов должно равняться 4")
if not isinstance(taxes, (list, tuple)):
raise KktError("Перечень налогов должен быть типом list или tuple")
for t in taxes:
if t not in range(0, 5):
raise KktError("Налоги должны быть равны 0,1,2,3 или 4")
summa = int5.pack(summa)
taxes = digits2string(taxes)
text = text.encode(CODE_PAGE).ljust(40, chr(0x0))
params = self.password + summa + taxes + text
data, error, command = self.ask(command, params, quick=True)
operator = ord(data[0])
return operator
## Implemented
def x86(self, summa, text='', taxes=[0,0,0,0]):
""" Скидка
Команда: 86H. Длина сообщения: 54 байт.
Пароль оператора (4 байта)
Сумма (5 байт) 0000000000...9999999999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 86H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x86
return self._x8summa(command=command, summa=summa,
text=text, taxes=taxes)
## Implemented
def x87(self, summa, text='', taxes=[0,0,0,0]):
""" Надбавка
Команда: 87H. Длина сообщения: 54 байт.
Пароль оператора (4 байта)
Сумма (5 байт) 0000000000...9999999999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 87H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x87
return self._x8summa(command=command, summa=summa,
text=text, taxes=taxes)
## Implemented
def x88(self):
""" Аннулирование чека
Команда: 88H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 88H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x88
data, error, command = self.ask(command)
operator = ord(data[0])
return operator
## Implemented
def x89(self):
""" Подытог чека
Команда: 89H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 89H. Длина сообщения: 8 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Подытог чека (5 байт) 0000000000...9999999999
"""
command = 0x89
data, error, command = self.ask(command)
operator = ord(data[0])
return operator
## Implemented
def x8A(self, summa, text='', taxes=[0,0,0,0]):
""" Сторно скидки
Команда: 8AH. Длина сообщения: 54 байта.
Пароль оператора (4 байта)
Сумма (5 байт) 0000000000...9999999999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 8AH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x8A
return self._x8summa(command=command, summa=summa,
text=text, taxes=taxes)
## Implemented
def x8B(self, summa, text='', taxes=[0,0,0,0]):
""" Сторно надбавки
Команда: 8BH. Длина сообщения: 54 байта.
Пароль оператора (4 байта)
Сумма (5 байт) 0000000000...9999999999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 8BH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x8B
return self._x8summa(command=command, summa=summa,
text=text, taxes=taxes)
## Implemented
def x8C(self):
""" Повтор документа
Команда: 8CH. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 8CH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Примечание:
Команда выводит на печать копию последнего закрытого
документа продажи, покупки, возврата продажи и
возврата покупки.
"""
command = 0x8C
data, error, command = self.ask(command)
operator = ord(data[0])
return operator
## Implemented
def x8D(self, document_type):
""" Открыть чек
Команда: 8DH. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Тип документа (1 байт):
0 – продажа;
1 – покупка;
2 – возврат продажи;
3 – возврат покупки
Ответ: 8DH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0x8D
if not document_type in range(4):
raise KktError("Тип документа должен быть значением 0,1,2 или 3")
params = self.password + chr(document_type)
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
def x90(self):
""" Формирование чека отпуска нефтепродуктов в режиме
предоплаты заданной дозы
Команда: 90H. Длина сообщения: 61 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Доза в миллилитрах (4 байта)
Номер отдела (1 байт) 0...16
Сумма наличных (5 байт) 0000000000...9999999999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 90H. Длина сообщения: 12 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Доза в миллилитрах (4 байта) 00000000...99999999
Доза в денежных единицах (5 байт) 0000000000...9999999999
"""
raise NotImplemented
def x91(self):
""" Формирование чека отпуска нефтепродуктов в режиме
предоплаты на заданную сумму
Команда: 91H. Длина сообщения: 57 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Номер отдела (1 байт) 0...16
Сумма наличных (5 байт) 0000000000...9999999999
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 91H. Длина сообщения: 12 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Доза в миллилитрах (4 байта) 00000000...99999999
Доза в денежных единицах (5 байт) 0000000000...9999999999
"""
raise NotImplemented
def x92(self):
""" Формирование чека коррекции при неполном отпуске
нефтепродуктов
Команда: 92H. Длина сообщения: 52 байта.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Номер отдела (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 92H. Длина сообщения: 12 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Недолитая доза в миллилитрах (4 байта) 00000000...99999999
Возвращаемая сумма (5 байт) 0000000000...9999999999
"""
raise NotImplemented
def x93(self):
""" Задание дозы РК в миллилитрах
Команда: 93H. Длина сообщения: 11 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Доза в миллилитрах (4 байта), если доза FFh FFh FFh FFh, то производится
заправка до полного бака: 00000000...99999999
Ответ: 93H. Длина сообщения: 12 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Доза в миллилитрах (4 байта) 00000000...99999999
Доза в денежных единицах (5 байт) 0000000000...9999999999
"""
raise NotImplemented
def x94(self):
""" Задание дозы РК в денежных единицах
Команда: 94H. Длина сообщения: 12 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Сумма наличных (5 байт) 0000000000...9999999999
Ответ: 94H. Длина сообщения: 12 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Доза в миллилитрах (4 байта) 00000000...99999999
Доза в денежных единицах (5 байт) 0000000000...9999999999
"""
raise NotImplemented
def x95(self):
""" Продажа нефтепродуктов
Команда: 95H. Длина сообщения: 52 байта.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Номер отдела (1 байт) 0...16
Налог 1 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 2 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 3 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Налог 4 (1 байт) «0» – нет, «1»...«4» – налоговая группа
Текст (40 байт)
Ответ: 95H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x96(self):
""" Останов РК
Команда: 96H. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Ответ: 96H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x97(self):
""" Пуск РК
Команда: 97H. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Номер ТРК 1...31 (1 байт)
Номер РК в ТРК 1...8 (1 байт)
Ответ: 97H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x98(self):
""" Сброс РК
Команда: 98H. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Ответ: 98H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x99(self):
""" Сброс всех ТРК
Команда: 99H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: 99H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x9A(self):
""" Задание параметров РК
Команда: 9AH. Длина сообщения: 13 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Замедление в миллилитрах (3 байта) 000000...999999
Цена (3 байта) 000000...999999
Ответ: 9AH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def x9B(self):
""" Считать литровый суммарный счетчик
Команда: 9BH. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Ответ: 9BH. Длина сообщения: 7 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Суммарный счетчик в миллилитрах (4 байта) 00000000...99999999
"""
raise NotImplemented
def x9E(self):
""" Запрос текущей дозы РК
Команда: 9EH. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Ответ: 9EH. Длина сообщения: 7 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Текущая доза в миллилитрах (4 байта) 00000000...99999999
"""
raise NotImplemented
def x9F(self):
""" Запрос состояния РК
Команда: 9FH. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Номер ТРК (1 байт) 1...31
Номер РК в ТРК (1 байт) 1...8
Ответ: 9FH. Длина сообщения: 30 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Текущая доза в миллилитрах (4 байта) 00000000...99999999
Заданная доза в миллилитрах (4 байта) 00000000...99999999
Текущая доза в денежных единицах (5 байт) 0000000000...9999999999
Заданная доза в денежных единицах (5 байт) 0000000000...9999999999
Замедление в миллилитрах (3 байта) 000000...999999
Цена (3 байта) 000000...999999
Статус РК (1 байт):
00 ТРК в сервисном режиме
01 готовность, доза не задана
02 готовность, доза задана
03 пуск, ожидание снятия пистолета
04 пуск, ожидание возврата пистолета
05 пуск, ожидание снятия пистолета, после возврата пистолета
06 пуск, тест индикатора
07 заправка на полной производительности
08 заправка с замедлением
09 остановка по исчерпанию дозы
0A остановка при отсутствии импульсов с датчика (по тайм-ауту)
0B остановка по команде оператора
0С остановка по возврату пистолета
0D остановка по ошибке
Флаги РК (1 байт)
0 бит – «0» – мотор выключен, «1» – включен
1 бит – «0» – грубый клапан выключен, «1» - включен
2 бит – «0» – замедляющий клапан выключен, «1» - включен
3 бит – «0» – пистолет повешен, «1» – пистолет снят
4 бит – «0» – чек оформлен, «1» – чек не оформлен
5 бит – «0» – чек закрыт, «1» – чек не закрыт
Код ошибки при аварийной остановке (1 байт)
00 – аварийной остановки нет
01 – внутренняя ошибка контроллера
02 – обратное вращение датчика
03 – обрыв фаз датчика объема SIN
04 – обрыв цепи управления пускателя
05 – обрыв цепи управления основным клапаном
06 – обрыв цепи управления клапаном снижения
07 – переполнение
08 – перелив
09 – обрыв фаз датчика объѐма COS
FF – неисправность оборудования
"""
raise NotImplemented
def xA0(self):
""" Отчет ЭКЛЗ по отделам в заданном диапазоне дат
Команда: A0H. Длина сообщения: 13 байт.
Пароль системного администратора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Номер отдела (1 байт) 1...16
Дата первой смены (3 байта) ДД-ММ-ГГ
Дата последней смены (3 байта) ДД-ММ-ГГ
Ответ: A0H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание: Время выполнения команды – до 150 секунд.
"""
raise NotImplemented
def xA1(self):
""" Отчет ЭКЛЗ по отделам в заданном диапазоне номеров
смен
Команда: A1H. Длина сообщения: 11 байт.
Пароль системного администратора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Номер отдела (1 байт) 1...16
Номер первой смены (2 байта) 0000...2100
Номер последней смены (2 байта) 0000...2100
Ответ: A1H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание: Время выполнения команды – до 150 секунд.
"""
raise NotImplemented
def xA2(self):
""" Отчет ЭКЛЗ по закрытиям смен в заданном диапазоне дат
Команда: A2H. Длина сообщения: 12 байт.
Пароль системного администратора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Дата первой смены (3 байта) ДД-ММ-ГГ
Дата последней смены (3 байта) ДД-ММ-ГГ
Ответ: A2H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание: Время выполнения команды – до 100 секунд.
"""
raise NotImplemented
def xA3(self):
""" Отчет ЭКЛЗ по закрытиям смен в заданном диапазоне
номеров смен
Команда: A3H. Длина сообщения: 10 байт.
Пароль системного администратора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Номер первой смены (2 байта) 0000...2100
Номер последней смены (2 байта) 0000...2100
Ответ: A3H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание: Время выполнения команды – до 100 секунд.
"""
raise NotImplemented
## Implemented
def xA4(self, number):
""" Итоги смены по номеру смены ЭКЛЗ
Команда: A4H. Длина сообщения: 7 байт.
Пароль системного администратора (4 байта)
Номер смены (2 байта) 0000...2100
Ответ: A4H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание: Время выполнения команды – до 40 секунд.
"""
command = 0xBA
params = self.admin_password + int2.pack(int(number))
data, error, command = self.ask(command, params)
return True
def xA5(self):
""" Платежный документ из ЭКЛЗ по номеру КПК
Команда: A5H. Длина сообщения: 9 байт.
Пароль системного администратора (4 байта)
Номер КПК (4 байта) 00000000...99999999
Ответ: A5H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание: Время выполнения команды – до 40 секунд.
"""
raise NotImplemented
def xA6(self):
""" Контрольная лента из ЭКЛЗ по номеру смены
Команда: A6H. Длина сообщения: 7 байт.
Пароль системного администратора (4 байта)
Номер смены (2 байта) 0000...2100
Ответ: A6H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание: Время выполнения команды – до 40 секунд.
"""
raise NotImplemented
## Implemented
def xA7(self):
""" Прерывание полного отчета ЭКЛЗ или контрольной ленты
ЭКЛЗ или печати платежного документа ЭКЛЗ
Команда: A7H. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: A7H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
command = 0xA7
params = self.admin_password
data, error, command = self.ask(command, params)
return error
def xA8(self):
""" Итог активизации ЭКЛЗ
Команда: A8H. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: A8H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
#~ command = 0xA8
#~ params = self.admin_password
#~ data, error, command = self.ask(command, params)
#~ return error
def xA9(self):
""" Активизация ЭКЛЗ
Команда: A9H. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: A9H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
#~ command = 0xA9
#~ params = self.admin_password
#~ data, error, command = self.ask(command, params)
#~ return error
def xAA(self):
""" Закрытие архива ЭКЛЗ
Команда: AAH. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: AAH. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
#~ command = 0xAA
#~ params = self.admin_password
#~ data, error, command = self.ask(command, params)
#~ return error
## Implemented
def xAB(self):
""" Запрос регистрационного номера ЭКЛЗ
Команда: ABH. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: ABH. Длина сообщения: 7 байт.
Код ошибки (1 байт)
Номер ЭКЛЗ (5 байт) 0000000000...9999999999
"""
command = 0xAB
params = self.admin_password
data, error, command = self.ask(command, params)
return int5.unpack(data[:5])
def xAC(self):
""" Прекращение ЭКЛЗ
Команда: ACH. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: ACH. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
def xAD(self):
""" Запрос состояния по коду 1 ЭКЛЗ
Команда: ADH. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: ADH. Длина сообщения: 22 байта.
Код ошибки (1 байт)
Итог документа последнего КПК (5 байт) 0000000000...9999999999
Дата последнего КПК (3 байта) ДД-ММ-ГГ
Время последнего КПК (2 байта) ЧЧ-ММ
Номер последнего КПК (4 байта) 00000000...99999999
Номер ЭКЛЗ (5 байт) 0000000000...9999999999
Флаги ЭКЛЗ (см. описание ЭКЛЗ) (1 байт)
Примечание:
Флаги, используемые ЭКЛЗ, описаны в документе
«Драйвер ККТ: руководство программиста» версии А4.3 и
выше.
"""
raise NotImplemented
command = 0xAD
params = self.admin_password
data, error, command = self.ask(command, params)
def xAE(self):
""" Запрос состояния по коду 2 ЭКЛЗ
Команда: AEH. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: AEH. Длина сообщения: 28 байт.
Код ошибки (1 байт)
Номер смены (2 байта) 0000...2100
Итог продаж (6 байт) 000000000000...999999999999
Итог покупок (6 байт) 000000000000...999999999999
Итог возвратов продаж (6 байт) 000000000000...999999999999
Итог возвратов покупок (6 байт) 000000000000...999999999999
"""
raise NotImplemented
command = 0xAE
params = self.admin_password
data, error, command = self.ask(command, params)
## Implemented
def xAF(self):
""" Тест целостности архива ЭКЛЗ
Команда: AFH. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: AFH. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
command = 0xAF
params = self.admin_password
data, error, command = self.ask(command, params)
return error
## Implemented
def xB0(self, admin_password=None):
""" Продолжение печати
Команда: B0H. Длина сообщения: 5 байт.
Пароль оператора, администратора или системного
администратора (4 байта)
Ответ: B0H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0xB0
params = self.admin_password
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
## Implemented
def xB1(self):
""" Запрос версии ЭКЛЗ
Команда: B1H. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: B1H. Длина сообщения: 20 байт.
Код ошибки (1 байт)
Строка символов в кодировке WIN1251 (18 байт)
"""
command = 0xB1
params = self.admin_password
data, error, command = self.ask(command, params)
version = data[:18].decode(CODE_PAGE)
return version
## Implemented
def xB2(self):
""" Инициализация архива ЭКЛЗ
Команда: B2H. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: B2H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание:
Команда работает только с отладочным комплектом ЭКЛЗ.
Время выполнения команды – до 20 секунд.
"""
command = 0xB2
params = self.admin_password
data, error, command = self.ask(command, params)
return error
## Implemented
def xB3(self):
""" Запрос данных отчѐта ЭКЛЗ
Команда: B3H. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: B3H. Длина сообщения: (2+Х) байт.
Код ошибки (1 байт)
Строка или фрагмент отчета (см. спецификацию ЭКЛЗ) (X байт)
"""
command = 0xE1
params = self.admin_password
data, error, command = self.ask(command, params)
return data.decode(CODE_PAGE)
def xB4(self):
""" Запрос контрольной ленты ЭКЛЗ
Команда: B4H. Длина сообщения: 7 байт.
Пароль системного администратора (4 байта)
Номер смены (2 байта) 0000...2100
Ответ: B4H. Длина сообщения: 18 байт.
Код ошибки (1 байт)
Тип ККМ – строка символов в кодировке WIN1251 (16 байт)
"""
raise NotImplemented
def xB5(self):
""" Запрос документа ЭКЛЗ
Команда: B5H. Длина сообщения: 9 байт.
Пароль системного администратора (4 байта)
Номер КПК (4 байта) 00000000...99999999
Ответ: B5H. Длина сообщения: 18 байт.
Код ошибки (1 байт)
Тип ККМ – строка символов в кодировке WIN1251 (16 байт)
Примечание: Время выполнения команды – до 40 секунд.
"""
raise NotImplemented
def xB6(self):
""" Запрос отчѐта ЭКЛЗ по отделам в заданном диапазоне дат
Команда: B6H. Длина сообщения: 13 байт.
Пароль системного администратора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Номер отдела (1 байт) 1...16
Дата первой смены (3 байта) ДД-ММ-ГГ
Дата последней смены (3 байта) ДД-ММ-ГГ
Ответ: B6H. Длина сообщения: 18 байт.
Код ошибки (1 байт)
Тип ККМ – строка символов в кодировке WIN1251 (16 байт)
Примечание: Время выполнения команды – до 150 секунд.
"""
raise NotImplemented
def xB7(self):
""" Запрос отчѐта ЭКЛЗ по отделам в заданном диапазоне
номеров смен
Команда: B7H. Длина сообщения: 11 байт.
Пароль системного администратора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Номер отдела (1 байт) 1...16
Номер первой смены (2 байта) 0000...2100
Номер последней смены (2 байта) 0000...2100
Ответ: B7H. Длина сообщения: 18 байт.
Код ошибки (1 байт)
Тип ККМ – строка символов в кодировке WIN1251 (16 байт)
Примечание: Время выполнения команды – до 150 секунд.
"""
raise NotImplemented
def xB8(self):
""" Запрос отчѐта ЭКЛЗ по закрытиям смен в заданном
диапазоне дат
Команда: B8H. Длина сообщения: 12 байт.
Пароль системного администратора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Дата первой смены (3 байта) ДД-ММ-ГГ
Дата последней смены (3 байта) ДД-ММ-ГГ
Ответ: B8H. Длина сообщения: 18 байт.
Код ошибки (1 байт)
Тип ККМ – строка символов в кодировке WIN1251 (16 байт)
Примечание: Время выполнения команды – до 100 секунд.
"""
raise NotImplemented
def xB9(self):
""" Запрос отчѐта ЭКЛЗ по закрытиям смен в заданном диапазоне
номеров смен.
Команда: B9H. Длина сообщения: 10 байт.
Пароль системного администратора (4 байта)
Тип отчета (1 байт) «0» – короткий, «1» – полный
Номер первой смены (2 байта) 0000...2100
Номер последней смены (2 байта) 0000...2100
Ответ: B9H. Длина сообщения: 18 байт.
Код ошибки (1 байт)
Тип ККМ – строка символов в кодировке WIN1251 (16 байт)
Примечание: Время выполнения команды – до 100 секунд.
"""
raise NotImplemented
## Implemented
def xBA(self, number):
""" Запрос в ЭКЛЗ итогов смены по номеру смены
Команда: BAH. Длина сообщения: 7 байт.
Пароль системного администратора (4 байта)
Номер смены (2 байта) 0000...2100
Ответ: BAH. Длина сообщения: 18 байт.
Код ошибки (1 байт)
Тип ККМ – строка символов в кодировке WIN1251 (16 байт)
Примечание: Время выполнения команды – до 40 секунд.
"""
command = 0xBA
params = self.admin_password + int2.pack(int(number))
data, error, command = self.ask(command, params)
kkm = data.decode(CODE_PAGE)
return kkm
def xBB(self):
""" Запрос итога активизации ЭКЛЗ
Команда: BBH. Длина сообщения: 5 байт.
Пароль системного администратора (4 байта)
Ответ: BBH. Длина сообщения: 18 байт.
Код ошибки (1 байт)
Тип ККМ – строка символов в кодировке WIN1251 (16 байт)
"""
raise NotImplemented
def xBC(self):
""" Вернуть ошибку ЭКЛЗ
Команда: BCH. Длина сообщения: 6 байт.
Пароль системного администратора (4 байта)
Код ошибки (1 байт)
Ответ: BCH. Длина сообщения: 2 байта.
Код ошибки (1 байт)
Примечание:
Команда работает только с отладочным комплектом ЭКЛЗ.
"""
raise NotImplemented
def xC0(self):
""" Загрузка графики
Команда: C0H. Длина сообщения: 46 байт.
Пароль оператора (4 байта)
Номер линии (1 байт) 0...199
Графическая информация (40 байт)
Ответ: C0H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xC1(self):
""" Печать графики
Команда: C1H. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Начальная линия (1 байт) 1...200
Конечная линия (1 байт) 1...200
Ответ: С1H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xC2(self,barcode):
""" Печать штрих-кода
Команда: C2H. Длина сообщения: 10 байт.
Пароль оператора (4 байта)
Штрих-код (5 байт) 000000000000...999999999999
Ответ: С2H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
command = 0xC2
barcode = int5.pack(barcode)
params = self.password + barcode
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
def xC3(self):
""" Печать расширенной графики
Команда: C3H. Длина сообщения: 9 байт.
Пароль оператора (4 байта)
Начальная линия (2 байта) 1...1200
Конечная линия (2 байта) 1...1200
Ответ: C3H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xC4(self):
""" Загрузка расширенной графики
Команда: C4H. Длина сообщения: 47 байт.
Пароль оператора (4 байта)
Номер линии (2 байта) 0...1199
Графическая информация (40 байт)
Ответ: С4H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xC5(self):
""" Печать линии
Команда: C5H. Длина сообщения: X + 7 байт.
Пароль оператора (4 байта)
Количество повторов (2 байта)
Графическая информация (X байт)
Ответ: C5H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xC6(self):
""" Суточный отчет с гашением в буфер
Команда: C6H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: C6H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xC7(self):
""" Распечатать отчет из буфера
Команда: C7H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: C7H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xC8(self):
""" Запрос количества строк в буфере печати
Команда: C8H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: C8H. Длина сообщения: 6 байт.
Код ошибки (1 байт)
Количество строк в буфере печати(2 байта)
Количество напечатанных строк (2 байта)
"""
raise NotImplemented
def xC9(self):
""" Получить строку буфера печати
Команда: C9H. Длина сообщения: 7 байт.
Пароль оператора (4 байта)
Номер строки (2 байта)
Ответ: C9H. Длина сообщения: 2 + n байт
Код ошибки (1 байт)
Данные строки (n байт)
"""
raise NotImplemented
## Implemented
def xCA(self):
""" Очистить буфер печати
Команда: CAH. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: CAH. Длина сообщения: 2 байта
Код ошибки (1 байт)
"""
command = 0xCA
data, error, command = self.ask(command)
return error
def xD0(self):
""" Запрос состояния ФР IBM длинный
Команда: D0H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: D0H. Длина сообщения: 44 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Текущая дата (3 байта) ДД-ММ-ГГ
Текущее время (3 байта) ЧЧ-ММ-СС
Номер последней закрытой смены (2 байта)
Сквозной номер последнего закрытого документа (4 байта)
Количество чеков продаж в текущей смене (2 байта)
Количество чеков покупок текущей смене (2 байта)
Количество чеков возврата продаж в текущей смене
(2 байта)
Количество чеков чека возврата покупок продаж в текущей
смене (2 байта)
Дата начала открытой смены (3 байта) ДД-ММ-ГГ
Время начала открытой смены (3 байта) ЧЧ-ММ-СС
Наличные в кассе (6 байт)
Состояние принтера (8 байт)
Флаги (1 байт)
Битовое поле (назначение бит):
0 – Сериализована (0 –нет, 1 – есть)
1 – Фискализирована (0 –нет, 1 – есть)
2 – Активизирована ЭКЛЗ (0 – нет, 1 – да)
3 – Смена открыта (0 – нет, 1 – есть)
4 – Смена открыта 24 часа закончились (0 – нет,
1 – есть)
"""
raise NotImplemented
def xD1(self):
""" Запрос состояния ФР IBM короткий
Команда: D1H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: D1H. Длина сообщения: 12 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Состояние принтера (8 байт)
Флаги (1 байт)
Битовое поле (назначение бит):
0 – Буфер печати ККТ пуст (0 –нет, 1 – есть)
"""
raise NotImplemented
def xDD(self):
""" Загрузка данных
Команда: DDH. Длина сообщения: 71 байт.
Пароль (4 байта)
Тип данных (1 байт) 0 – данные для двумерного штрих-кода
Порядковый номер блока данных (1 байт)
Данные (64 байта)
Ответ: DDH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xDE(self):
""" Печать многомерного штрих -кода
Команда: DEH. Длина сообщения: 15 байт.
Пароль (4 байта)
Тип штрих-кода (1 байт)
Длина данных штрих-кода (2 байта)
Номер начального блока данных (1байт)
Параметр 1 (1 байт)
Параметр 2 (1 байт)
Параметр 3 (1 байт)
Параметр 4 (1 байт)
Параметр 5 (1 байт)
Выравнивание (1 байт)
Ответ: DEH. Длина сообщения: 3 байт.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Примечание: тип штрих-кода смотрите в документации
"""
raise NotImplemented
## Implemented
def xE0(self):
""" Открыть смену
Команда: E0H. Длина сообщения: 5байт.
Пароль оператора (4 байта)
Ответ: E0H. Длина сообщения: 2 байта.
Порядковый номер оператора (1 байт) 1...30
Примечание:
Команда открывает смену в ФП и переводит ККТ в режим
«Открытой смены».
"""
command = 0xE0
data, error, command = self.ask(command)
operator = ord(data[0])
return operator
## Implemented
def xE1(self):
""" Допечатать ПД
Команда: E1H. Длина сообщения: 5байт.
Пароль оператора (4 байта)
Ответ: E1H. Длина сообщения: 2 байта.
Порядковый номер оператора (1 байт) 1...30
Примечание:
Команда допечатывает ПД после нештатных ситуаций
(обрыв бумаги, отключение питания и т.д.). Печать
возобновляется с той же строки, на которой произошел
останов печати в случае отключения питания или обрыва
бумаги.
"""
command = 0xE1
data, error, command = self.ask(command)
operator = ord(data[0])
return operator
## Implemented
def xE2(self):
""" Открыть нефискальный документ
Команда: E2H. Длина сообщения: 5байт.
Пароль оператора (4 байта)
Ответ: E2H. Длина сообщения: 3 байта.
Код ошибки(1 байт)
Порядковый номер оператора (1 байт) 1...30
Примечание:
Команда переводит ККТ в режим, позволяющий печатать
произвольные текстовые строки.
"""
command = 0xE2
data, error, command = self.ask(command)
operator = ord(data[0])
return operator
## Implemented
def xE3(self):
""" Закрыть нефискальный документ
Команда: E3H. Длина сообщения: 5байт.
Пароль оператора (4 байта)
Ответ: E3H. Длина сообщения: 3 байта.
Код ошибки(1 байт)
Порядковый номер оператора (1 байт) 1...30
Примечание:
Команда выводит ККТ в режим, позволяющий печатать
произвольные текстовые строки.
"""
command = 0xE3
data, error, command = self.ask(command)
operator = ord(data[0])
return operator
def xE4(self):
""" Печать Реквизита
Команда: E4H. Длина сообщения: 7-206 байт.
Пароль оператора (4 байта)
Номер реквизита (1 байт)
Значение реквизита (1-200 байт)
Ответ: E4H. Длина сообщения: 3 байта.
Код ошибки(1 байт)
Порядковый номер оператора (1 байт) 1...30
Примечание:
Команда печатает реквизит в открытом фискальном
документе. Поле «значение реквизита» содержит
текстовую информацию в кодировке win1251 с
разделителем строк 0х0А. Может быть напечатано не
более 4-х строк.
"""
raise NotImplemented
def xE5(self):
""" Запрос состояния купюроприемника
Команда: E5H. Длина сообщения: 5 байт.
Пароль оператора (4 байта)
Ответ: E5H. Длина сообщения: 6 байт.
Код ошибки(1 байт)
Порядковый номер оператора (1 байт) 1...30
Режим опроса купюроприемника (1 байт) 0 – не ведется,
1 – ведется
Poll 1 (1 байт)
Poll 2 (1 байт) – Байты, которые вернул купюроприемник
на последнюю команду
Poll (подробности в описании протокола CCNet)
"""
raise NotImplemented
def xE6(self):
""" Запрос регистров купюроприемника
Команда: E6H. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Номер набора регистров (1 байт) 0 – количество купюр в
текущем чеке, 1 – количество купюр в текущей
смене, 2 – Общее количество принятых купюр.
Ответ: E6H. Длина сообщения: 100 байт.
Код ошибки(1 байт)
Порядковый номер оператора (1 байт) 1...30
Номер набора регистров (1 байт)
Количество купюр типа 0.23(4*24=96 байт) 24 4-х байтный
целых числа.
"""
raise NotImplemented
## Implemented
def xE7(self):
""" Отчет по купюроприемнику
Команда: E7H. Длина сообщения: 5 байт.
Пароль администратора или системного администратора (4 байта)
Ответ: E7H. Длина сообщения: 3 байта.
Код ошибки(1 байт)
Порядковый номер оператора (1 байт) 29, 30
"""
command = 0xE7
params = self.admin_password
data, error, command = self.ask(command, params)
operator = ord(data[0])
return operator
## Implemented
def xE8(self, tax_password):
""" Оперативный отчет НИ
Команда: E8H. Длина сообщения: 5 байт.
Пароль НИ (4 байта)
Ответ: E8H. Длина сообщения: 2 байта.
Код ошибки(1 байт)
"""
command = 0xE8
params = tax_password
data, error, command = self.ask(command, params)
return error
def xF0(self):
""" Управление заслонкой
Команда: F0H. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Положение (1 байт) «1» – открыта; «0» – закрыта
Ответ: F0H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xF1(self):
""" Выдать чек
Команда: F1H. Длина сообщения: 6 байт.
Пароль оператора (4 байта)
Тип выдачи (1 байт)
1 - до срабатывания датчика на выходе из презентера
(захватить чек)
0 - не учитывать датчик (выброс чека)
Ответ: F1H. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
"""
raise NotImplemented
def xF3(self):
""" Установить пароль ЦТО
Команда: F3H. Длина сообщения: 9 байт.
Пароль ЦТО (4 байта)
Новый пароль ЦТО (4 байта)
Ответ: F3H. Длина сообщения: 2 байта.
Код ошибки (1 байт)
"""
raise NotImplemented
## Implemented
def xFC(self):
""" Получить тип устройства
Команда: FCH. Длина сообщения: 1 байт.
Ответ: FCH. Длина сообщения: (8+X) байт.
Код ошибки (1 байт)
Тип устройства (1 байт) 0...255
Подтип устройства (1 байт) 0...255
Версия протокола для данного устройства (1 байт) 0...255
Подверсия протокола для данного устройства (1 байт)
0...255
Модель устройства (1 байт) 0...255
Язык устройства (1 байт) 0...255 русский – 0;
английский – 1;
Название устройства – строка символов в кодировке
WIN1251. Количество байт, отводимое под название
устройства, определяется в каждом конкретном
случае самостоятельно разработчиками устройства
(X байт)
Примечание:
Команда предназначена для идентификации устройств.
"""
command = 0xFC
data, error, command = self.ask(command, without_password=True)
result = {
'device_type': ord(data[0]),
'device_subtype': ord(data[1]),
'protocol_version': ord(data[2]),
'protocol_subversion': ord(data[3]),
'device_model': ord(data[4]),
'device_language': ord(data[5]),
'device_name': data[6:].decode(CODE_PAGE),
}
return result
def xFD(self):
""" Управление портом дополнительного внешнего устройства
Команда: FDH. Длина сообщения: (6+X) байт.
Пароль оператора (4 байта)
Номер порта (1 байт) 0...255
Строка команд, которые будут посланы в порт
дополнительного внешнего устройства (X байт).
Ответ: FDH. Длина сообщения: 3 байта.
Код ошибки (1 байт)
Порядковый номер оператора (1 байт) 1...30
Примечание:
Дополнительное внешнее устройство – устройство, для
функционирования которого не требуется формирования
ответного сообщения.
"""
raise NotImplemented
|
t3dev/odoo
|
refs/heads/master
|
addons/purchase_stock/tests/test_onchange_product.py
|
14
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from odoo import fields
from odoo.tests.common import TransactionCase
from odoo.tools import DEFAULT_SERVER_DATETIME_FORMAT
class TestOnchangeProductId(TransactionCase):
"""Test that when an included tax is mapped by a fiscal position, the included tax must be
subtracted to the price of the product.
"""
def setUp(self):
super(TestOnchangeProductId, self).setUp()
self.fiscal_position_model = self.env['account.fiscal.position']
self.fiscal_position_tax_model = self.env['account.fiscal.position.tax']
self.tax_model = self.env['account.tax']
self.po_model = self.env['purchase.order']
self.po_line_model = self.env['purchase.order.line']
self.res_partner_model = self.env['res.partner']
self.product_tmpl_model = self.env['product.template']
self.product_model = self.env['product.product']
self.product_uom_model = self.env['uom.uom']
self.supplierinfo_model = self.env["product.supplierinfo"]
def test_onchange_product_id(self):
uom_id = self.product_uom_model.search([('name', '=', 'Unit(s)')])[0]
partner_id = self.res_partner_model.create(dict(name="George"))
tax_include_id = self.tax_model.create(dict(name="Include tax",
amount='21.00',
price_include=True,
type_tax_use='purchase'))
tax_exclude_id = self.tax_model.create(dict(name="Exclude tax",
amount='0.00',
type_tax_use='purchase'))
supplierinfo_vals = {
'name': partner_id.id,
'price': 121.0,
}
supplierinfo = self.supplierinfo_model.create(supplierinfo_vals)
product_tmpl_id = self.product_tmpl_model.create(dict(name="Voiture",
list_price=121,
seller_ids=[(6, 0, [supplierinfo.id])],
supplier_taxes_id=[(6, 0, [tax_include_id.id])]))
product_id = self.product_model.create(dict(product_tmpl_id=product_tmpl_id.id))
fp_id = self.fiscal_position_model.create(dict(name="fiscal position", sequence=1))
fp_tax_id = self.fiscal_position_tax_model.create(dict(position_id=fp_id.id,
tax_src_id=tax_include_id.id,
tax_dest_id=tax_exclude_id.id))
po_vals = {
'partner_id': partner_id.id,
'fiscal_position_id': fp_id.id,
'order_line': [
(0, 0, {
'name': product_id.name,
'product_id': product_id.id,
'product_qty': 1.0,
'product_uom': uom_id.id,
'price_unit': 121.0,
'date_planned': datetime.today().strftime(DEFAULT_SERVER_DATETIME_FORMAT),
})],
}
po = self.po_model.create(po_vals)
po_line = po.order_line[0]
po_line.onchange_product_id()
self.assertEquals(100, po_line.price_unit, "The included tax must be subtracted to the price")
supplierinfo.write({'min_qty': 24})
po_line.write({'product_qty': 20})
po_line._onchange_quantity()
self.assertEquals(0, po_line.price_unit, "Unit price should be reset to 0 since the supplier supplies minimum of 24 quantities")
po_line.write({'product_qty': 3, 'product_uom': self.ref("uom.product_uom_dozen")})
po_line._onchange_quantity()
self.assertEquals(1200, po_line.price_unit, "Unit price should be 1200 for one Dozen")
product_ipad = self.env.ref('product.product_product_4')
po_line2 = self.po_line_model.create({
'name': product_ipad.name,
'product_id': product_ipad.id,
'order_id': po.id,
'product_qty': 5,
'product_uom': uom_id.id,
'price_unit': 100.0,
'date_planned': fields.Date().today()
})
po_line2.onchange_product_id()
self.assertEquals(0, po_line2.price_unit, "No vendor supplies this product, hence unit price should be set to 0")
|
cntnboys/410Lab6
|
refs/heads/master
|
v1/lib/python2.7/site-packages/django/db/backends/oracle/creation.py
|
34
|
import sys
import time
from django.conf import settings
from django.db.backends.creation import BaseDatabaseCreation
from django.db.utils import DatabaseError
from django.utils.six.moves import input
TEST_DATABASE_PREFIX = 'test_'
PASSWORD = 'Im_a_lumberjack'
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated Oracle column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
#
# Any format strings starting with "qn_" are quoted before being used in the
# output (the "qn_" prefix is stripped before the lookup is performed.
data_types = {
'AutoField': 'NUMBER(11)',
'BinaryField': 'BLOB',
'BooleanField': 'NUMBER(1)',
'CharField': 'NVARCHAR2(%(max_length)s)',
'CommaSeparatedIntegerField': 'VARCHAR2(%(max_length)s)',
'DateField': 'DATE',
'DateTimeField': 'TIMESTAMP',
'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)',
'FileField': 'NVARCHAR2(%(max_length)s)',
'FilePathField': 'NVARCHAR2(%(max_length)s)',
'FloatField': 'DOUBLE PRECISION',
'IntegerField': 'NUMBER(11)',
'BigIntegerField': 'NUMBER(19)',
'IPAddressField': 'VARCHAR2(15)',
'GenericIPAddressField': 'VARCHAR2(39)',
'NullBooleanField': 'NUMBER(1)',
'OneToOneField': 'NUMBER(11)',
'PositiveIntegerField': 'NUMBER(11)',
'PositiveSmallIntegerField': 'NUMBER(11)',
'SlugField': 'NVARCHAR2(%(max_length)s)',
'SmallIntegerField': 'NUMBER(11)',
'TextField': 'NCLOB',
'TimeField': 'TIMESTAMP',
'URLField': 'VARCHAR2(%(max_length)s)',
}
data_type_check_constraints = {
'BooleanField': '%(qn_column)s IN (0,1)',
'NullBooleanField': '(%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL)',
'PositiveIntegerField': '%(qn_column)s >= 0',
'PositiveSmallIntegerField': '%(qn_column)s >= 0',
}
def __init__(self, connection):
super(DatabaseCreation, self).__init__(connection)
def _create_test_db(self, verbosity=1, autoclobber=False):
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
cursor = self.connection.cursor()
if self._test_database_create():
try:
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_NAME)
if autoclobber or confirm == 'yes':
if verbosity >= 1:
print("Destroying old test database '%s'..." % self.connection.alias)
try:
self._execute_test_db_destruction(cursor, parameters, verbosity)
except DatabaseError as e:
if 'ORA-29857' in str(e):
self._handle_objects_preventing_db_destruction(cursor, parameters,
verbosity, autoclobber)
else:
# Ran into a database error that isn't about leftover objects in the tablespace
sys.stderr.write("Got an error destroying the old test database: %s\n" % e)
sys.exit(2)
except Exception as e:
sys.stderr.write("Got an error destroying the old test database: %s\n" % e)
sys.exit(2)
try:
self._execute_test_db_creation(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
if self._test_user_create():
if verbosity >= 1:
print("Creating test user...")
try:
self._create_test_user(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error creating the test user: %s\n" % e)
if not autoclobber:
confirm = input("It appears the test user, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_USER)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test user...")
self._destroy_test_user(cursor, parameters, verbosity)
if verbosity >= 1:
print("Creating test user...")
self._create_test_user(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error recreating the test user: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
self.connection.close() # done with main user -- test user and tablespaces created
real_settings = settings.DATABASES[self.connection.alias]
real_settings['SAVED_USER'] = self.connection.settings_dict['SAVED_USER'] = self.connection.settings_dict['USER']
real_settings['SAVED_PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD'] = self.connection.settings_dict['PASSWORD']
real_test_settings = real_settings['TEST']
test_settings = self.connection.settings_dict['TEST']
real_test_settings['USER'] = real_settings['USER'] = test_settings['USER'] = self.connection.settings_dict['USER'] = TEST_USER
real_settings['PASSWORD'] = self.connection.settings_dict['PASSWORD'] = TEST_PASSWD
return self.connection.settings_dict['NAME']
def _handle_objects_preventing_db_destruction(self, cursor, parameters, verbosity, autoclobber):
# There are objects in the test tablespace which prevent dropping it
# The easy fix is to drop the test user -- but are we allowed to do so?
print("There are objects in the old test database which prevent its destruction.")
print("If they belong to the test user, deleting the user will allow the test "
"database to be recreated.")
print("Otherwise, you will need to find and remove each of these objects, "
"or use a different tablespace.\n")
if self._test_user_create():
if not autoclobber:
confirm = input("Type 'yes' to delete user %s: " % parameters['user'])
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test user...")
self._destroy_test_user(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error destroying the test user: %s\n" % e)
sys.exit(2)
try:
if verbosity >= 1:
print("Destroying old test database '%s'..." % self.connection.alias)
self._execute_test_db_destruction(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error destroying the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled -- test database cannot be recreated.")
sys.exit(1)
else:
print("Django is configured to use pre-existing test user '%s',"
" and will not attempt to delete it.\n" % parameters['user'])
print("Tests cancelled -- test database cannot be recreated.")
sys.exit(1)
def _destroy_test_db(self, test_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
TEST_NAME = self._test_database_name()
TEST_USER = self._test_database_user()
TEST_PASSWD = self._test_database_passwd()
TEST_TBLSPACE = self._test_database_tblspace()
TEST_TBLSPACE_TMP = self._test_database_tblspace_tmp()
self.connection.settings_dict['USER'] = self.connection.settings_dict['SAVED_USER']
self.connection.settings_dict['PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD']
parameters = {
'dbname': TEST_NAME,
'user': TEST_USER,
'password': TEST_PASSWD,
'tblspace': TEST_TBLSPACE,
'tblspace_temp': TEST_TBLSPACE_TMP,
}
cursor = self.connection.cursor()
time.sleep(1) # To avoid "database is being accessed by other users" errors.
if self._test_user_create():
if verbosity >= 1:
print('Destroying test user...')
self._destroy_test_user(cursor, parameters, verbosity)
if self._test_database_create():
if verbosity >= 1:
print('Destroying test database tables...')
self._execute_test_db_destruction(cursor, parameters, verbosity)
self.connection.close()
def _execute_test_db_creation(self, cursor, parameters, verbosity):
if verbosity >= 2:
print("_create_test_db(): dbname = %s" % parameters['dbname'])
statements = [
"""CREATE TABLESPACE %(tblspace)s
DATAFILE '%(tblspace)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 300M
""",
"""CREATE TEMPORARY TABLESPACE %(tblspace_temp)s
TEMPFILE '%(tblspace_temp)s.dbf' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE 150M
""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _create_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print("_create_test_user(): username = %s" % parameters['user'])
statements = [
"""CREATE USER %(user)s
IDENTIFIED BY %(password)s
DEFAULT TABLESPACE %(tblspace)s
TEMPORARY TABLESPACE %(tblspace_temp)s
QUOTA UNLIMITED ON %(tblspace)s
""",
"""GRANT CONNECT, RESOURCE TO %(user)s""",
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_test_db_destruction(self, cursor, parameters, verbosity):
if verbosity >= 2:
print("_execute_test_db_destruction(): dbname=%s" % parameters['dbname'])
statements = [
'DROP TABLESPACE %(tblspace)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
'DROP TABLESPACE %(tblspace_temp)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _destroy_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print("_destroy_test_user(): user=%s" % parameters['user'])
print("Be patient. This can take some time...")
statements = [
'DROP USER %(user)s CASCADE',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_statements(self, cursor, statements, parameters, verbosity):
for template in statements:
stmt = template % parameters
if verbosity >= 2:
print(stmt)
try:
cursor.execute(stmt)
except Exception as err:
sys.stderr.write("Failed (%s)\n" % (err))
raise
def _test_settings_get(self, key, default=None, prefixed=None):
"""
Return a value from the test settings dict,
or a given default,
or a prefixed entry from the main settings dict
"""
settings_dict = self.connection.settings_dict
val = settings_dict['TEST'].get(key, default)
if val is None:
val = TEST_DATABASE_PREFIX + settings_dict[prefixed]
return val
def _test_database_name(self):
return self._test_settings_get('NAME', prefixed='NAME')
def _test_database_create(self):
return self._test_settings_get('CREATE_DB', default=True)
def _test_user_create(self):
return self._test_settings_get('CREATE_USER', default=True)
def _test_database_user(self):
return self._test_settings_get('USER', prefixed='USER')
def _test_database_passwd(self):
return self._test_settings_get('PASSWORD', default=PASSWORD)
def _test_database_tblspace(self):
return self._test_settings_get('TBLSPACE', prefixed='NAME')
def _test_database_tblspace_tmp(self):
settings_dict = self.connection.settings_dict
return settings_dict['TEST'].get('TBLSPACE_TMP',
TEST_DATABASE_PREFIX + settings_dict['NAME'] + '_temp')
def _get_test_db_name(self):
"""
We need to return the 'production' DB name to get the test DB creation
machinery to work. This isn't a great deal in this case because DB
names as handled by Django haven't real counterparts in Oracle.
"""
return self.connection.settings_dict['NAME']
def test_db_signature(self):
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME'],
self._test_database_user(),
)
|
auferack08/edx-platform
|
refs/heads/master
|
docs/en_us/developers/source/conf.py
|
30
|
# -*- coding: utf-8 -*-
# pylint: disable=C0103
# pylint: disable=W0622
# pylint: disable=W0212
# pylint: disable=W0613
import sys, os
from path import path
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
sys.path.append('../../../../')
from docs.shared.conf import *
# Add any paths that contain templates here, relative to this directory.
templates_path.append('source/_templates')
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path.append('source/_static')
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
root = path('../../../..').abspath()
sys.path.insert(0, root)
sys.path.append(root / "common/djangoapps")
sys.path.append(root / "common/lib")
sys.path.append(root / "common/lib/capa")
sys.path.append(root / "common/lib/chem")
sys.path.append(root / "common/lib/sandbox-packages")
sys.path.append(root / "common/lib/xmodule")
sys.path.append(root / "common/lib/opaque_keys")
sys.path.append(root / "lms/djangoapps")
sys.path.append(root / "lms/lib")
sys.path.append(root / "cms/djangoapps")
sys.path.append(root / "cms/lib")
sys.path.insert(0, os.path.abspath(os.path.normpath(os.path.dirname(__file__)
+ '/../../../')))
sys.path.append('.')
# django configuration - careful here
if on_rtd:
os.environ['DJANGO_SETTINGS_MODULE'] = 'lms'
else:
os.environ['DJANGO_SETTINGS_MODULE'] = 'lms.envs.test'
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.pngmath',
'sphinx.ext.mathjax', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['build']
# Output file base name for HTML help builder.
htmlhelp_basename = 'edXDocs'
project = u'edX Platform Developer Documentation'
copyright = u'2014, edX'
# --- Mock modules ------------------------------------------------------------
# Mock all the modules that the readthedocs build can't import
class Mock(object):
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return Mock()
@classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return '/dev/null'
elif name[0] == name[0].upper():
mockType = type(name, (), {})
mockType.__module__ = __name__
return mockType
else:
return Mock()
# The list of modules and submodules that we know give RTD trouble.
# Make sure you've tried including the relevant package in
# docs/share/requirements.txt before adding to this list.
MOCK_MODULES = [
'bson',
'bson.errors',
'bson.objectid',
'dateutil',
'dateutil.parser',
'fs',
'fs.errors',
'fs.osfs',
'lazy',
'mako',
'mako.template',
'matplotlib',
'matplotlib.pyplot',
'mock',
'numpy',
'oauthlib',
'oauthlib.oauth1',
'oauthlib.oauth1.rfc5849',
'PIL',
'pymongo',
'pyparsing',
'pysrt',
'requests',
'scipy.interpolate',
'scipy.constants',
'scipy.optimize',
'yaml',
'webob',
'webob.multidict',
]
if on_rtd:
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
# -----------------------------------------------------------------------------
# from http://djangosnippets.org/snippets/2533/
# autogenerate models definitions
import inspect
import types
from HTMLParser import HTMLParser
def force_unicode(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_unicode, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if strings_only and isinstance(s, (types.NoneType, int)):
return s
if not isinstance(s, basestring,):
if hasattr(s, '__unicode__'):
s = unicode(s)
else:
s = unicode(str(s), encoding, errors)
elif not isinstance(s, unicode):
s = unicode(s, encoding, errors)
return s
class MLStripper(HTMLParser):
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def strip_tags(html):
s = MLStripper()
s.feed(html)
return s.get_data()
def process_docstring(app, what, name, obj, options, lines):
"""Autodoc django models"""
# This causes import errors if left outside the function
from django.db import models
# If you want extract docs from django forms:
# from django import forms
# from django.forms.models import BaseInlineFormSet
# Only look at objects that inherit from Django's base MODEL class
if inspect.isclass(obj) and issubclass(obj, models.Model):
# Grab the field list from the meta class
fields = obj._meta._fields()
for field in fields:
# Decode and strip any html out of the field's help text
help_text = strip_tags(force_unicode(field.help_text))
# Decode and capitalize the verbose name, for use if there isn't
# any help text
verbose_name = force_unicode(field.verbose_name).capitalize()
if help_text:
# Add the model field to the end of the docstring as a param
# using the help text as the description
lines.append(u':param %s: %s' % (field.attname, help_text))
else:
# Add the model field to the end of the docstring as a param
# using the verbose name as the description
lines.append(u':param %s: %s' % (field.attname, verbose_name))
# Add the field's type to the docstring
lines.append(u':type %s: %s' % (field.attname, type(field).__name__))
return lines
def setup(app):
"""Setup docsting processors"""
#Register the docstring processor with sphinx
app.connect('autodoc-process-docstring', process_docstring)
|
hevel/goflow
|
refs/heads/master
|
sampleproject/manage.py
|
79
|
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
|
nadley/Sick-Beard
|
refs/heads/development
|
lib/hachoir_core/log.py
|
90
|
import os, sys, time
import lib.hachoir_core.config as config
from lib.hachoir_core.i18n import _
class Log:
LOG_INFO = 0
LOG_WARN = 1
LOG_ERROR = 2
level_name = {
LOG_WARN: "[warn]",
LOG_ERROR: "[err!]",
LOG_INFO: "[info]"
}
def __init__(self):
self.__buffer = {}
self.__file = None
self.use_print = True
self.use_buffer = False
self.on_new_message = None # Prototype: def func(level, prefix, text, context)
def shutdown(self):
if self.__file:
self._writeIntoFile(_("Stop Hachoir"))
def setFilename(self, filename, append=True):
"""
Use a file to store all messages. The
UTF-8 encoding will be used. Write an informative
message if the file can't be created.
@param filename: C{L{string}}
"""
# Look if file already exists or not
filename = os.path.expanduser(filename)
filename = os.path.realpath(filename)
append = os.access(filename, os.F_OK)
# Create log file (or open it in append mode, if it already exists)
try:
import codecs
if append:
self.__file = codecs.open(filename, "a", "utf-8")
else:
self.__file = codecs.open(filename, "w", "utf-8")
self._writeIntoFile(_("Starting Hachoir"))
except IOError, err:
if err.errno == 2:
self.__file = None
self.info(_("[Log] setFilename(%s) fails: no such file") % filename)
else:
raise
def _writeIntoFile(self, message):
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
self.__file.write(u"%s - %s\n" % (timestamp, message))
self.__file.flush()
def newMessage(self, level, text, ctxt=None):
"""
Write a new message : append it in the buffer,
display it to the screen (if needed), and write
it in the log file (if needed).
@param level: Message level.
@type level: C{int}
@param text: Message content.
@type text: C{str}
@param ctxt: The caller instance.
"""
if level < self.LOG_ERROR and config.quiet or \
level <= self.LOG_INFO and not config.verbose:
return
if config.debug:
from lib.hachoir_core.error import getBacktrace
backtrace = getBacktrace(None)
if backtrace:
text += "\n\n" + backtrace
_text = text
if hasattr(ctxt, "_logger"):
_ctxt = ctxt._logger()
if _ctxt is not None:
text = "[%s] %s" % (_ctxt, text)
# Add message to log buffer
if self.use_buffer:
if not self.__buffer.has_key(level):
self.__buffer[level] = [text]
else:
self.__buffer[level].append(text)
# Add prefix
prefix = self.level_name.get(level, "[info]")
# Display on stdout (if used)
if self.use_print:
sys.stdout.flush()
sys.stderr.write("%s %s\n" % (prefix, text))
sys.stderr.flush()
# Write into outfile (if used)
if self.__file:
self._writeIntoFile("%s %s" % (prefix, text))
# Use callback (if used)
if self.on_new_message:
self.on_new_message (level, prefix, _text, ctxt)
def info(self, text):
"""
New informative message.
@type text: C{str}
"""
self.newMessage(Log.LOG_INFO, text)
def warning(self, text):
"""
New warning message.
@type text: C{str}
"""
self.newMessage(Log.LOG_WARN, text)
def error(self, text):
"""
New error message.
@type text: C{str}
"""
self.newMessage(Log.LOG_ERROR, text)
log = Log()
class Logger(object):
def _logger(self):
return "<%s>" % self.__class__.__name__
def info(self, text):
log.newMessage(Log.LOG_INFO, text, self)
def warning(self, text):
log.newMessage(Log.LOG_WARN, text, self)
def error(self, text):
log.newMessage(Log.LOG_ERROR, text, self)
|
Venturi/cms
|
refs/heads/master
|
env/lib/python2.7/site-packages/django/contrib/flatpages/views.py
|
475
|
from django.conf import settings
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.shortcuts import get_current_site
from django.http import Http404, HttpResponse, HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.template import loader
from django.utils.safestring import mark_safe
from django.views.decorators.csrf import csrf_protect
DEFAULT_TEMPLATE = 'flatpages/default.html'
# This view is called from FlatpageFallbackMiddleware.process_response
# when a 404 is raised, which often means CsrfViewMiddleware.process_view
# has not been called even if CsrfViewMiddleware is installed. So we need
# to use @csrf_protect, in case the template needs {% csrf_token %}.
# However, we can't just wrap this view; if no matching flatpage exists,
# or a redirect is required for authentication, the 404 needs to be returned
# without any CSRF checks. Therefore, we only
# CSRF protect the internal implementation.
def flatpage(request, url):
"""
Public interface to the flat page view.
Models: `flatpages.flatpages`
Templates: Uses the template defined by the ``template_name`` field,
or :template:`flatpages/default.html` if template_name is not defined.
Context:
flatpage
`flatpages.flatpages` object
"""
if not url.startswith('/'):
url = '/' + url
site_id = get_current_site(request).id
try:
f = get_object_or_404(FlatPage,
url=url, sites=site_id)
except Http404:
if not url.endswith('/') and settings.APPEND_SLASH:
url += '/'
f = get_object_or_404(FlatPage,
url=url, sites=site_id)
return HttpResponsePermanentRedirect('%s/' % request.path)
else:
raise
return render_flatpage(request, f)
@csrf_protect
def render_flatpage(request, f):
"""
Internal interface to the flat page view.
"""
# If registration is required for accessing this page, and the user isn't
# logged in, redirect to the login page.
if f.registration_required and not request.user.is_authenticated():
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(request.path)
if f.template_name:
template = loader.select_template((f.template_name, DEFAULT_TEMPLATE))
else:
template = loader.get_template(DEFAULT_TEMPLATE)
# To avoid having to always use the "|safe" filter in flatpage templates,
# mark the title and content as already safe (since they are raw HTML
# content in the first place).
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
response = HttpResponse(template.render({'flatpage': f}, request))
return response
|
40223119/2015cda
|
refs/heads/master
|
static/Brython3.1.0-20150301-090019/Lib/site-packages/pygame/color.py
|
603
|
## pygame - Python Game Library
## Copyright (C) 2000-2003 Pete Shinners
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Library General Public
## License as published by the Free Software Foundation; either
## version 2 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Library General Public License for more details.
##
## You should have received a copy of the GNU Library General Public
## License along with this library; if not, write to the Free
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## Pete Shinners
## pete@shinners.org
"""Manipulate colors"""
try:
from colordict import THECOLORS
except ImportError:
#the colordict module isn't available
THECOLORS = {}
def Color(colorname):
"""pygame.color.Color(colorname) -> RGBA
Get RGB values from common color names
The color name can be the name of a common english color,
or a "web" style color in the form of 0xFF00FF. The english
color names are defined by the standard 'rgb' colors for X11.
With the hex color formatting you may optionally include an
alpha value, the formatting is 0xRRGGBBAA. You may also specify
a hex formatted color by starting the string with a '#'.
The color name used is case insensitive and whitespace is ignored.
"""
if colorname[:2] == '0x' or colorname[0] == '#': #webstyle
if colorname[0] == '#':
colorname = colorname[1:]
else:
colorname = colorname[2:]
a = 255
try:
r = int('0x' + colorname[0:2], 16)
g = int('0x' + colorname[2:4], 16)
b = int('0x' + colorname[4:6], 16)
if len(colorname) > 6:
a = int('0x' + colorname[6:8], 16)
except ValueError:
raise ValueError("Illegal hex color")
return r, g, b, a
else: #color name
#no spaces and lowercase
name = colorname.replace(' ', '').lower()
try:
return THECOLORS[name]
except KeyError:
raise ValueError("Illegal color name, " + name)
def _splitcolor(color, defaultalpha=255):
try:
second = int(color)
r = g = b = color
a = defaultalpha
except TypeError:
if len(color) == 4:
r, g, b, a = color
elif len(color) == 3:
r, g, b = color
a = defaultalpha
return r, g, b, a
def add(color1, color2):
"""pygame.color.add(color1, color2) -> RGBA
add two colors
Add the RGB values of two colors together. If one of the
colors is only a single numeric value, it is applied to the
RGB components of the first color. Color values will be clamped
to the maximum color value of 255.
"""
r1, g1, b1, a1 = _splitcolor(color1)
r2, g2, b2, a2 = _splitcolor(color2)
m, i = min, int
return m(i(r1+r2), 255), m(i(g1+g2), 255), m(i(b1+b2), 255), m(i(a1+a2), 255)
def subtract(color1, color2):
"""pygame.color.subtract(color1, color2) -> RGBA
subtract two colors
Subtract the RGB values of two colors together. If one of the
colors is only a single numeric value, it is applied to the
RGB components of the first color. Color values will be clamped
to the minimum color value of 0.
"""
r1, g1, b1, a1 = _splitcolor(color1)
r2, g2, b2, a2 = _splitcolor(color2, 0)
m, i = max, int
return m(i(r1-r2), 0), m(i(g1-g2), 0), m(i(b1-b2), 0), m(i(a1-a2), 0)
def multiply(color1, color2):
"""pygame.color.multiply(color1, color2) -> RGBA
multiply two colors
Multiply the RGB values of two colors together. If one of the
colors is only a single numeric value, it is applied to the
RGB components of the first color.
"""
r1, g1, b1, a1 = _splitcolor(color1)
r2, g2, b2, a2 = _splitcolor(color2)
m, i = min, int
return m(i(r1*r2)/255, 255), m(i(g1*g2)/255, 255), m(i(b1*b2)/255, 255), m(i(a1*a2)/255, 255)
|
cjh1/AutobahnPython
|
refs/heads/master
|
autobahn/autobahn/resource.py
|
9
|
###############################################################################
##
## Copyright 2012 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
__all__ = ("WebSocketResource","HTTPChannelHixie76Aware",)
from zope.interface import implements
from twisted.python import log
from twisted.protocols.policies import ProtocolWrapper
try:
from twisted.web.error import NoResource
except:
## starting from Twisted 12.2, NoResource has moved
from twisted.web.resource import NoResource
from twisted.web.error import UnsupportedMethod
from twisted.web.resource import IResource, Resource
from twisted.web.server import NOT_DONE_YET
from twisted.web.http import HTTPChannel
from websocket import WebSocketServerFactory, WebSocketServerProtocol
class HTTPChannelHixie76Aware(HTTPChannel):
"""
Hixie-76 is deadly broken. It includes 8 bytes of body, but then does not
set content-length header. This hacked HTTPChannel injects the missing
HTTP header upon detecting Hixie-76. We need this since otherwise
Twisted Web will silently ignore the body.
To use this, set twisted.web.server.Site.protocol = HTTPChannelHixie76Aware
"""
def headerReceived(self, line):
header = line.split(':')[0].lower()
if header == "sec-websocket-key1" and not self._transferDecoder:
HTTPChannel.headerReceived(self, "Content-Length: 8")
HTTPChannel.headerReceived(self, line)
class WSGIRootResource(Resource):
"""
Root resource when you want a WSGI resource be the default serving
resource for a Site, but have subpaths served by different resources.
This is a hack needed since WSGIResource does not provide putChild().
See also:
http://blog.vrplumber.com/index.php?/archives/2426-Making-your-Twisted-resources-a-url-sub-tree-of-your-WSGI-resource....html
"""
def __init__(self, wsgiResource, children):
Resource.__init__(self)
self._wsgiResource = wsgiResource
self.children = children
def getChild(self, path, request):
request.prepath.pop()
request.postpath.insert(0, path)
return self._wsgiResource
class WebSocketResource(object):
"""
A Twisted Web resource for WebSocket. This resource needs to be instantiated
with a factory derived from WebSocketServerFactory.
"""
implements(IResource)
isLeaf = True
def __init__(self, factory):
"""
Ctor.
:param factory: An instance of WebSocketServerFactory.
:type factory: obj
"""
self._factory = factory
def getChildWithDefault(self, name, request):
"""
This resource cannot have children, hence this will always fail.
"""
return NoResource("No such child resource.")
def putChild(self, path, child):
"""
This resource cannot have children, hence this is always ignored.
"""
pass
def render(self, request):
"""
Render the resource. This will takeover the transport underlying
the request, create a WebSocketServerProtocol and let that do
any subsequent communication.
"""
## Create Autobahn WebSocket protocol.
##
protocol = self._factory.buildProtocol(request.transport.getPeer())
if not protocol:
## If protocol creation fails, we signal "internal server error"
request.setResponseCode(500)
return ""
## Take over the transport from Twisted Web
##
transport, request.transport = request.transport, None
## Connect the transport to our protocol. Once #3204 is fixed, there
## may be a cleaner way of doing this.
## http://twistedmatrix.com/trac/ticket/3204
##
if isinstance(transport, ProtocolWrapper):
## i.e. TLS is a wrapping protocol
transport.wrappedProtocol = protocol
else:
transport.protocol = protocol
protocol.makeConnection(transport)
## We recreate the request and forward the raw data. This is somewhat
## silly (since Twisted Web already did the HTTP request parsing
## which we will do a 2nd time), but it's totally non-invasive to our
## code. Maybe improve this.
##
data = "%s %s HTTP/1.1\x0d\x0a" % (request.method, request.path)
for h in request.requestHeaders.getAllRawHeaders():
data += "%s: %s\x0d\x0a" % (h[0], ",".join(h[1]))
data += "\x0d\x0a"
data += request.content.read() # we need this for Hixie-76
protocol.dataReceived(data)
return NOT_DONE_YET
|
grilo/ansible-1
|
refs/heads/devel
|
lib/ansible/modules/network/f5/bigip_gtm_facts.py
|
16
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2016 F5 Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bigip_gtm_facts
short_description: Collect facts from F5 BIG-IP GTM devices.
description:
- Collect facts from F5 BIG-IP GTM devices.
version_added: "2.3"
options:
include:
description:
- Fact category to collect
required: true
choices:
- pool
- wide_ip
- virtual_server
filter:
description:
- Perform regex filter of response. Filtering is done on the name of
the resource. Valid filters are anything that can be provided to
Python's C(re) module.
required: false
default: None
notes:
- Requires the f5-sdk Python package on the host. This is as easy as
pip install f5-sdk
extends_documentation_fragment: f5
requirements:
- f5-sdk
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Get pool facts
bigip_gtm_facts:
server: "lb.mydomain.com"
user: "admin"
password: "secret"
include: "pool"
filter: "my_pool"
delegate_to: localhost
'''
RETURN = '''
wide_ip:
description:
Contains the lb method for the wide ip and the pools
that are within the wide ip.
returned: changed
type: dict
sample:
wide_ip:
- enabled: "True"
failure_rcode: "noerror"
failure_rcode_response: "disabled"
failure_rcode_ttl: "0"
full_path: "/Common/foo.ok.com"
last_resort_pool: ""
minimal_response: "enabled"
name: "foo.ok.com"
partition: "Common"
persist_cidr_ipv4: "32"
persist_cidr_ipv6: "128"
persistence: "disabled"
pool_lb_mode: "round-robin"
pools:
- name: "d3qw"
order: "0"
partition: "Common"
ratio: "1"
ttl_persistence: "3600"
type: "naptr"
pool:
description: Contains the pool object status and enabled status.
returned: changed
type: dict
sample:
pool:
- alternate_mode: "round-robin"
dynamic_ratio: "disabled"
enabled: "True"
fallback_mode: "return-to-dns"
full_path: "/Common/d3qw"
load_balancing_mode: "round-robin"
manual_resume: "disabled"
max_answers_returned: "1"
members:
- disabled: "True"
flags: "a"
full_path: "ok3.com"
member_order: "0"
name: "ok3.com"
order: "10"
preference: "10"
ratio: "1"
service: "80"
name: "d3qw"
partition: "Common"
qos_hit_ratio: "5"
qos_hops: "0"
qos_kilobytes_second: "3"
qos_lcs: "30"
qos_packet_rate: "1"
qos_rtt: "50"
qos_topology: "0"
qos_vs_capacity: "0"
qos_vs_score: "0"
ttl: "30"
type: "naptr"
verify_member_availability: "disabled"
virtual_server:
description:
Contains the virtual server enabled and availability
status, and address
returned: changed
type: dict
sample:
virtual_server:
- addresses:
- device_name: "/Common/qweqwe"
name: "10.10.10.10"
translation: "none"
datacenter: "/Common/xfxgh"
enabled: "True"
expose_route_domains: "no"
full_path: "/Common/qweqwe"
iq_allow_path: "yes"
iq_allow_service_check: "yes"
iq_allow_snmp: "yes"
limit_cpu_usage: "0"
limit_cpu_usage_status: "disabled"
limit_max_bps: "0"
limit_max_bps_status: "disabled"
limit_max_connections: "0"
limit_max_connections_status: "disabled"
limit_max_pps: "0"
limit_max_pps_status: "disabled"
limit_mem_avail: "0"
limit_mem_avail_status: "disabled"
link_discovery: "disabled"
monitor: "/Common/bigip "
name: "qweqwe"
partition: "Common"
product: "single-bigip"
virtual_server_discovery: "disabled"
virtual_servers:
- destination: "10.10.10.10:0"
enabled: "True"
full_path: "jsdfhsd"
limit_max_bps: "0"
limit_max_bps_status: "disabled"
limit_max_connections: "0"
limit_max_connections_status: "disabled"
limit_max_pps: "0"
limit_max_pps_status: "disabled"
name: "jsdfhsd"
translation_address: "none"
translation_port: "0"
'''
try:
from distutils.version import LooseVersion
from f5.bigip.contexts import TransactionContextManager
from f5.bigip import ManagementRoot
from icontrol.session import iControlUnexpectedHTTPError
HAS_F5SDK = True
except ImportError:
HAS_F5SDK = False
import re
class BigIpGtmFactsCommon(object):
def __init__(self):
self.api = None
self.attributes_to_remove = [
'kind', 'generation', 'selfLink', '_meta_data',
'membersReference', 'datacenterReference',
'virtualServersReference', 'nameReference'
]
self.gtm_types = dict(
a_s='a',
aaaas='aaaa',
cnames='cname',
mxs='mx',
naptrs='naptr',
srvs='srv'
)
self.request_params = dict(
params='expandSubcollections=true'
)
def is_version_less_than_12(self):
version = self.api.tmos_version
if LooseVersion(version) < LooseVersion('12.0.0'):
return True
else:
return False
def format_string_facts(self, parameters):
result = dict()
for attribute in self.attributes_to_remove:
parameters.pop(attribute, None)
for key, val in parameters.items():
result[key] = str(val)
return result
def filter_matches_name(self, name):
if not self.params['filter']:
return True
matches = re.match(self.params['filter'], str(name))
if matches:
return True
else:
return False
def get_facts_from_collection(self, collection, collection_type=None):
results = []
for item in collection:
if not self.filter_matches_name(item.name):
continue
facts = self.format_facts(item, collection_type)
results.append(facts)
return results
def connect_to_bigip(self, **kwargs):
return ManagementRoot(kwargs['server'],
kwargs['user'],
kwargs['password'],
port=kwargs['server_port'])
class BigIpGtmFactsPools(BigIpGtmFactsCommon):
def __init__(self, *args, **kwargs):
super(BigIpGtmFactsPools, self).__init__()
self.params = kwargs
def get_facts(self):
self.api = self.connect_to_bigip(**self.params)
return self.get_facts_from_device()
def get_facts_from_device(self):
try:
if self.is_version_less_than_12():
return self.get_facts_without_types()
else:
return self.get_facts_with_types()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
def get_facts_with_types(self):
result = []
for key, type in self.gtm_types.items():
facts = self.get_all_facts_by_type(key, type)
if facts:
result.append(facts)
return result
def get_facts_without_types(self):
pools = self.api.tm.gtm.pools.get_collection(**self.request_params)
return self.get_facts_from_collection(pools)
def get_all_facts_by_type(self, key, type):
collection = getattr(self.api.tm.gtm.pools, key)
pools = collection.get_collection(**self.request_params)
return self.get_facts_from_collection(pools, type)
def format_facts(self, pool, collection_type):
result = dict()
pool_dict = pool.to_dict()
result.update(self.format_string_facts(pool_dict))
result.update(self.format_member_facts(pool))
if collection_type:
result['type'] = collection_type
return camel_dict_to_snake_dict(result)
def format_member_facts(self, pool):
result = []
if not 'items' in pool.membersReference:
return dict(members=[])
for member in pool.membersReference['items']:
member_facts = self.format_string_facts(member)
result.append(member_facts)
return dict(members=result)
class BigIpGtmFactsWideIps(BigIpGtmFactsCommon):
def __init__(self, *args, **kwargs):
super(BigIpGtmFactsWideIps, self).__init__()
self.params = kwargs
def get_facts(self):
self.api = self.connect_to_bigip(**self.params)
return self.get_facts_from_device()
def get_facts_from_device(self):
try:
if self.is_version_less_than_12():
return self.get_facts_without_types()
else:
return self.get_facts_with_types()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
def get_facts_with_types(self):
result = []
for key, type in self.gtm_types.items():
facts = self.get_all_facts_by_type(key, type)
if facts:
result.append(facts)
return result
def get_facts_without_types(self):
wideips = self.api.tm.gtm.wideips.get_collection(
**self.request_params
)
return self.get_facts_from_collection(wideips)
def get_all_facts_by_type(self, key, type):
collection = getattr(self.api.tm.gtm.wideips, key)
wideips = collection.get_collection(**self.request_params)
return self.get_facts_from_collection(wideips, type)
def format_facts(self, wideip, collection_type):
result = dict()
wideip_dict = wideip.to_dict()
result.update(self.format_string_facts(wideip_dict))
result.update(self.format_pool_facts(wideip))
if collection_type:
result['type'] = collection_type
return camel_dict_to_snake_dict(result)
def format_pool_facts(self, wideip):
result = []
if not hasattr(wideip, 'pools'):
return dict(pools=[])
for pool in wideip.pools:
pool_facts = self.format_string_facts(pool)
result.append(pool_facts)
return dict(pools=result)
class BigIpGtmFactsVirtualServers(BigIpGtmFactsCommon):
def __init__(self, *args, **kwargs):
super(BigIpGtmFactsVirtualServers, self).__init__()
self.params = kwargs
def get_facts(self):
try:
self.api = self.connect_to_bigip(**self.params)
return self.get_facts_from_device()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
def get_facts_from_device(self):
servers = self.api.tm.gtm.servers.get_collection(
**self.request_params
)
return self.get_facts_from_collection(servers)
def format_facts(self, server, collection_type=None):
result = dict()
server_dict = server.to_dict()
result.update(self.format_string_facts(server_dict))
result.update(self.format_address_facts(server))
result.update(self.format_virtual_server_facts(server))
return camel_dict_to_snake_dict(result)
def format_address_facts(self, server):
result = []
if not hasattr(server, 'addresses'):
return dict(addresses=[])
for address in server.addresses:
address_facts = self.format_string_facts(address)
result.append(address_facts)
return dict(addresses=result)
def format_virtual_server_facts(self, server):
result = []
if not 'items' in server.virtualServersReference:
return dict(virtual_servers=[])
for server in server.virtualServersReference['items']:
server_facts = self.format_string_facts(server)
result.append(server_facts)
return dict(virtual_servers=result)
class BigIpGtmFactsManager(object):
def __init__(self, *args, **kwargs):
self.params = kwargs
self.api = None
def get_facts(self):
result = dict()
facts = dict()
if 'pool' in self.params['include']:
facts['pool'] = self.get_pool_facts()
if 'wide_ip' in self.params['include']:
facts['wide_ip'] = self.get_wide_ip_facts()
if 'virtual_server' in self.params['include']:
facts['virtual_server'] = self.get_virtual_server_facts()
result.update(**facts)
result.update(dict(changed=True))
return result
def get_pool_facts(self):
pools = BigIpGtmFactsPools(**self.params)
return pools.get_facts()
def get_wide_ip_facts(self):
wide_ips = BigIpGtmFactsWideIps(**self.params)
return wide_ips.get_facts()
def get_virtual_server_facts(self):
wide_ips = BigIpGtmFactsVirtualServers(**self.params)
return wide_ips.get_facts()
class BigIpGtmFactsModuleConfig(object):
def __init__(self):
self.argument_spec = dict()
self.meta_args = dict()
self.supports_check_mode = False
self.valid_includes = ['pool', 'wide_ip', 'virtual_server']
self.initialize_meta_args()
self.initialize_argument_spec()
def initialize_meta_args(self):
args = dict(
include=dict(type='list', required=True),
filter=dict(type='str', required=False)
)
self.meta_args = args
def initialize_argument_spec(self):
self.argument_spec = f5_argument_spec()
self.argument_spec.update(self.meta_args)
def create(self):
return AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=self.supports_check_mode
)
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
config = BigIpGtmFactsModuleConfig()
module = config.create()
try:
obj = BigIpGtmFactsManager(
check_mode=module.check_mode, **module.params
)
result = obj.get_facts()
module.exit_json(**result)
except F5ModuleError as e:
module.fail_json(msg=str(e))
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
from ansible.module_utils.f5_utils import *
if __name__ == '__main__':
main()
|
open-synergy/account-payment
|
refs/heads/8.0
|
account_payment_return_import/wizard/__init__.py
|
4
|
# -*- coding: utf-8 -*-
from . import payment_return_import
|
wxgeo/geophar
|
refs/heads/master
|
wxgeometrie/sympy/core/symbol.py
|
4
|
from __future__ import print_function, division
from sympy.core.assumptions import StdFactKB
from sympy.core.compatibility import (string_types, range, is_sequence,
ordered)
from .basic import Basic
from .sympify import sympify
from .singleton import S
from .expr import Expr, AtomicExpr
from .cache import cacheit
from .function import FunctionClass
from sympy.core.logic import fuzzy_bool
from sympy.logic.boolalg import Boolean
from sympy.utilities.iterables import cartes
from sympy.core.containers import Tuple
import string
import re as _re
import random
def _symbol(s, matching_symbol=None, **assumptions):
"""Return s if s is a Symbol, else if s is a string, return either
the matching_symbol if the names are the same or else a new symbol
with the same assumptions as the matching symbol (or the
assumptions as provided).
Examples
========
>>> from sympy import Symbol, Dummy
>>> from sympy.core.symbol import _symbol
>>> _symbol('y')
y
>>> _.is_real is None
True
>>> _symbol('y', real=True).is_real
True
>>> x = Symbol('x')
>>> _symbol(x, real=True)
x
>>> _.is_real is None # ignore attribute if s is a Symbol
True
Below, the variable sym has the name 'foo':
>>> sym = Symbol('foo', real=True)
Since 'x' is not the same as sym's name, a new symbol is created:
>>> _symbol('x', sym).name
'x'
It will acquire any assumptions give:
>>> _symbol('x', sym, real=False).is_real
False
Since 'foo' is the same as sym's name, sym is returned
>>> _symbol('foo', sym)
foo
Any assumptions given are ignored:
>>> _symbol('foo', sym, real=False).is_real
True
NB: the symbol here may not be the same as a symbol with the same
name defined elsewhere as a result of different assumptions.
See Also
========
sympy.core.symbol.Symbol
"""
if isinstance(s, string_types):
if matching_symbol and matching_symbol.name == s:
return matching_symbol
return Symbol(s, **assumptions)
elif isinstance(s, Symbol):
return s
else:
raise ValueError('symbol must be string for symbol name or Symbol')
def _uniquely_named_symbol(xname, exprs=(), compare=str, modify=None, **assumptions):
"""Return a symbol which, when printed, will have a name unique
from any other already in the expressions given. The name is made
unique by prepending underscores (default) but this can be
customized with the keyword 'modify'.
Parameters
==========
xname : a string or a Symbol (when symbol xname <- str(xname))
compare : a single arg function that takes a symbol and returns
a string to be compared with xname (the default is the str
function which indicates how the name will look when it
is printed, e.g. this includes underscores that appear on
Dummy symbols)
modify : a single arg function that changes its string argument
in some way (the default is to preppend underscores)
Examples
========
>>> from sympy.core.symbol import _uniquely_named_symbol as usym, Dummy
>>> from sympy.abc import x
>>> usym('x', x)
_x
"""
default = None
if is_sequence(xname):
xname, default = xname
x = str(xname)
if not exprs:
return _symbol(x, default, **assumptions)
if not is_sequence(exprs):
exprs = [exprs]
syms = set().union(*[e.free_symbols for e in exprs])
if modify is None:
modify = lambda s: '_' + s
while any(x == compare(s) for s in syms):
x = modify(x)
return _symbol(x, default, **assumptions)
class Symbol(AtomicExpr, Boolean):
"""
Assumptions:
commutative = True
You can override the default assumptions in the constructor:
>>> from sympy import symbols
>>> A,B = symbols('A,B', commutative = False)
>>> bool(A*B != B*A)
True
>>> bool(A*B*2 == 2*A*B) == True # multiplication by scalars is commutative
True
"""
is_comparable = False
__slots__ = ['name']
is_Symbol = True
is_symbol = True
@property
def _diff_wrt(self):
"""Allow derivatives wrt Symbols.
Examples
========
>>> from sympy import Symbol
>>> x = Symbol('x')
>>> x._diff_wrt
True
"""
return True
@staticmethod
def _sanitize(assumptions, obj=None):
"""Remove None, covert values to bool, check commutativity *in place*.
"""
# be strict about commutativity: cannot be None
is_commutative = fuzzy_bool(assumptions.get('commutative', True))
if is_commutative is None:
whose = '%s ' % obj.__name__ if obj else ''
raise ValueError(
'%scommutativity must be True or False.' % whose)
# sanitize other assumptions so 1 -> True and 0 -> False
for key in list(assumptions.keys()):
from collections import defaultdict
from sympy.utilities.exceptions import SymPyDeprecationWarning
keymap = defaultdict(lambda: None)
keymap.update({'bounded': 'finite', 'unbounded': 'infinite', 'infinitesimal': 'zero'})
if keymap[key]:
SymPyDeprecationWarning(
feature="%s assumption" % key,
useinstead="%s" % keymap[key],
issue=8071,
deprecated_since_version="0.7.6").warn()
assumptions[keymap[key]] = assumptions[key]
assumptions.pop(key)
key = keymap[key]
v = assumptions[key]
if v is None:
assumptions.pop(key)
continue
assumptions[key] = bool(v)
def __new__(cls, name, **assumptions):
"""Symbols are identified by name and assumptions::
>>> from sympy import Symbol
>>> Symbol("x") == Symbol("x")
True
>>> Symbol("x", real=True) == Symbol("x", real=False)
False
"""
cls._sanitize(assumptions, cls)
return Symbol.__xnew_cached_(cls, name, **assumptions)
def __new_stage2__(cls, name, **assumptions):
if not isinstance(name, string_types):
raise TypeError("name should be a string, not %s" % repr(type(name)))
obj = Expr.__new__(cls)
obj.name = name
# TODO: Issue #8873: Forcing the commutative assumption here means
# later code such as ``srepr()`` cannot tell whether the user
# specified ``commutative=True`` or omitted it. To workaround this,
# we keep a copy of the assumptions dict, then create the StdFactKB,
# and finally overwrite its ``._generator`` with the dict copy. This
# is a bit of a hack because we assume StdFactKB merely copies the
# given dict as ``._generator``, but future modification might, e.g.,
# compute a minimal equivalent assumption set.
tmp_asm_copy = assumptions.copy()
# be strict about commutativity
is_commutative = fuzzy_bool(assumptions.get('commutative', True))
assumptions['commutative'] = is_commutative
obj._assumptions = StdFactKB(assumptions)
obj._assumptions._generator = tmp_asm_copy # Issue #8873
return obj
__xnew__ = staticmethod(
__new_stage2__) # never cached (e.g. dummy)
__xnew_cached_ = staticmethod(
cacheit(__new_stage2__)) # symbols are always cached
def __getnewargs__(self):
return (self.name,)
def __getstate__(self):
return {'_assumptions': self._assumptions}
def _hashable_content(self):
# Note: user-specified assumptions not hashed, just derived ones
return (self.name,) + tuple(sorted(self.assumptions0.items()))
@property
def assumptions0(self):
return dict((key, value) for key, value
in self._assumptions.items() if value is not None)
@cacheit
def sort_key(self, order=None):
return self.class_key(), (1, (str(self),)), S.One.sort_key(), S.One
def as_dummy(self):
"""Return a Dummy having the same name and same assumptions as self."""
return Dummy(self.name, **self._assumptions.generator)
def __call__(self, *args):
from .function import Function
return Function(self.name)(*args)
def as_real_imag(self, deep=True, **hints):
from sympy import im, re
if hints.get('ignore') == self:
return None
else:
return (re(self), im(self))
def _sage_(self):
import sage.all as sage
return sage.var(self.name)
def is_constant(self, *wrt, **flags):
if not wrt:
return False
return not self in wrt
@property
def free_symbols(self):
return {self}
binary_symbols = free_symbols # in this case, not always
def as_set(self):
return S.UniversalSet
class Dummy(Symbol):
"""Dummy symbols are each unique, even if they have the same name:
>>> from sympy import Dummy
>>> Dummy("x") == Dummy("x")
False
If a name is not supplied then a string value of an internal count will be
used. This is useful when a temporary variable is needed and the name
of the variable used in the expression is not important.
>>> Dummy() #doctest: +SKIP
_Dummy_10
"""
# In the rare event that a Dummy object needs to be recreated, both the
# `name` and `dummy_index` should be passed. This is used by `srepr` for
# example:
# >>> d1 = Dummy()
# >>> d2 = eval(srepr(d1))
# >>> d2 == d1
# True
#
# If a new session is started between `srepr` and `eval`, there is a very
# small chance that `d2` will be equal to a previously-created Dummy.
_count = 0
_prng = random.Random()
_base_dummy_index = _prng.randint(10**6, 9*10**6)
__slots__ = ['dummy_index']
is_Dummy = True
def __new__(cls, name=None, dummy_index=None, **assumptions):
if dummy_index is not None:
assert name is not None, "If you specify a dummy_index, you must also provide a name"
if name is None:
name = "Dummy_" + str(Dummy._count)
if dummy_index is None:
dummy_index = Dummy._base_dummy_index + Dummy._count
Dummy._count += 1
cls._sanitize(assumptions, cls)
obj = Symbol.__xnew__(cls, name, **assumptions)
obj.dummy_index = dummy_index
return obj
def __getstate__(self):
return {'_assumptions': self._assumptions, 'dummy_index': self.dummy_index}
@cacheit
def sort_key(self, order=None):
return self.class_key(), (
2, (str(self), self.dummy_index)), S.One.sort_key(), S.One
def _hashable_content(self):
return Symbol._hashable_content(self) + (self.dummy_index,)
class Wild(Symbol):
"""
A Wild symbol matches anything, or anything
without whatever is explicitly excluded.
Parameters
==========
name : str
Name of the Wild instance.
exclude : iterable, optional
Instances in ``exclude`` will not be matched.
properties : iterable of functions, optional
Functions, each taking an expressions as input
and returns a ``bool``. All functions in ``properties``
need to return ``True`` in order for the Wild instance
to match the expression.
Examples
========
>>> from sympy import Wild, WildFunction, cos, pi
>>> from sympy.abc import x, y, z
>>> a = Wild('a')
>>> x.match(a)
{a_: x}
>>> pi.match(a)
{a_: pi}
>>> (3*x**2).match(a*x)
{a_: 3*x}
>>> cos(x).match(a)
{a_: cos(x)}
>>> b = Wild('b', exclude=[x])
>>> (3*x**2).match(b*x)
>>> b.match(a)
{a_: b_}
>>> A = WildFunction('A')
>>> A.match(a)
{a_: A_}
Tips
====
When using Wild, be sure to use the exclude
keyword to make the pattern more precise.
Without the exclude pattern, you may get matches
that are technically correct, but not what you
wanted. For example, using the above without
exclude:
>>> from sympy import symbols
>>> a, b = symbols('a b', cls=Wild)
>>> (2 + 3*y).match(a*x + b*y)
{a_: 2/x, b_: 3}
This is technically correct, because
(2/x)*x + 3*y == 2 + 3*y, but you probably
wanted it to not match at all. The issue is that
you really didn't want a and b to include x and y,
and the exclude parameter lets you specify exactly
this. With the exclude parameter, the pattern will
not match.
>>> a = Wild('a', exclude=[x, y])
>>> b = Wild('b', exclude=[x, y])
>>> (2 + 3*y).match(a*x + b*y)
Exclude also helps remove ambiguity from matches.
>>> E = 2*x**3*y*z
>>> a, b = symbols('a b', cls=Wild)
>>> E.match(a*b)
{a_: 2*y*z, b_: x**3}
>>> a = Wild('a', exclude=[x, y])
>>> E.match(a*b)
{a_: z, b_: 2*x**3*y}
>>> a = Wild('a', exclude=[x, y, z])
>>> E.match(a*b)
{a_: 2, b_: x**3*y*z}
Wild also accepts a ``properties`` parameter:
>>> a = Wild('a', properties=[lambda k: k.is_Integer])
>>> E.match(a*b)
{a_: 2, b_: x**3*y*z}
"""
is_Wild = True
__slots__ = ['exclude', 'properties']
def __new__(cls, name, exclude=(), properties=(), **assumptions):
exclude = tuple([sympify(x) for x in exclude])
properties = tuple(properties)
cls._sanitize(assumptions, cls)
return Wild.__xnew__(cls, name, exclude, properties, **assumptions)
def __getnewargs__(self):
return (self.name, self.exclude, self.properties)
@staticmethod
@cacheit
def __xnew__(cls, name, exclude, properties, **assumptions):
obj = Symbol.__xnew__(cls, name, **assumptions)
obj.exclude = exclude
obj.properties = properties
return obj
def _hashable_content(self):
return super(Wild, self)._hashable_content() + (self.exclude, self.properties)
# TODO add check against another Wild
def matches(self, expr, repl_dict={}, old=False):
if any(expr.has(x) for x in self.exclude):
return None
if any(not f(expr) for f in self.properties):
return None
repl_dict = repl_dict.copy()
repl_dict[self] = expr
return repl_dict
def __call__(self, *args, **kwargs):
raise TypeError("'%s' object is not callable" % type(self).__name__)
_range = _re.compile('([0-9]*:[0-9]+|[a-zA-Z]?:[a-zA-Z])')
def symbols(names, **args):
r"""
Transform strings into instances of :class:`Symbol` class.
:func:`symbols` function returns a sequence of symbols with names taken
from ``names`` argument, which can be a comma or whitespace delimited
string, or a sequence of strings::
>>> from sympy import symbols, Function
>>> x, y, z = symbols('x,y,z')
>>> a, b, c = symbols('a b c')
The type of output is dependent on the properties of input arguments::
>>> symbols('x')
x
>>> symbols('x,')
(x,)
>>> symbols('x,y')
(x, y)
>>> symbols(('a', 'b', 'c'))
(a, b, c)
>>> symbols(['a', 'b', 'c'])
[a, b, c]
>>> symbols({'a', 'b', 'c'})
{a, b, c}
If an iterable container is needed for a single symbol, set the ``seq``
argument to ``True`` or terminate the symbol name with a comma::
>>> symbols('x', seq=True)
(x,)
To reduce typing, range syntax is supported to create indexed symbols.
Ranges are indicated by a colon and the type of range is determined by
the character to the right of the colon. If the character is a digit
then all contiguous digits to the left are taken as the nonnegative
starting value (or 0 if there is no digit left of the colon) and all
contiguous digits to the right are taken as 1 greater than the ending
value::
>>> symbols('x:10')
(x0, x1, x2, x3, x4, x5, x6, x7, x8, x9)
>>> symbols('x5:10')
(x5, x6, x7, x8, x9)
>>> symbols('x5(:2)')
(x50, x51)
>>> symbols('x5:10,y:5')
(x5, x6, x7, x8, x9, y0, y1, y2, y3, y4)
>>> symbols(('x5:10', 'y:5'))
((x5, x6, x7, x8, x9), (y0, y1, y2, y3, y4))
If the character to the right of the colon is a letter, then the single
letter to the left (or 'a' if there is none) is taken as the start
and all characters in the lexicographic range *through* the letter to
the right are used as the range::
>>> symbols('x:z')
(x, y, z)
>>> symbols('x:c') # null range
()
>>> symbols('x(:c)')
(xa, xb, xc)
>>> symbols(':c')
(a, b, c)
>>> symbols('a:d, x:z')
(a, b, c, d, x, y, z)
>>> symbols(('a:d', 'x:z'))
((a, b, c, d), (x, y, z))
Multiple ranges are supported; contiguous numerical ranges should be
separated by parentheses to disambiguate the ending number of one
range from the starting number of the next::
>>> symbols('x:2(1:3)')
(x01, x02, x11, x12)
>>> symbols(':3:2') # parsing is from left to right
(00, 01, 10, 11, 20, 21)
Only one pair of parentheses surrounding ranges are removed, so to
include parentheses around ranges, double them. And to include spaces,
commas, or colons, escape them with a backslash::
>>> symbols('x((a:b))')
(x(a), x(b))
>>> symbols(r'x(:1\,:2)') # or r'x((:1)\,(:2))'
(x(0,0), x(0,1))
All newly created symbols have assumptions set according to ``args``::
>>> a = symbols('a', integer=True)
>>> a.is_integer
True
>>> x, y, z = symbols('x,y,z', real=True)
>>> x.is_real and y.is_real and z.is_real
True
Despite its name, :func:`symbols` can create symbol-like objects like
instances of Function or Wild classes. To achieve this, set ``cls``
keyword argument to the desired type::
>>> symbols('f,g,h', cls=Function)
(f, g, h)
>>> type(_[0])
<class 'sympy.core.function.UndefinedFunction'>
"""
result = []
if isinstance(names, string_types):
marker = 0
literals = [r'\,', r'\:', r'\ ']
for i in range(len(literals)):
lit = literals.pop(0)
if lit in names:
while chr(marker) in names:
marker += 1
lit_char = chr(marker)
marker += 1
names = names.replace(lit, lit_char)
literals.append((lit_char, lit[1:]))
def literal(s):
if literals:
for c, l in literals:
s = s.replace(c, l)
return s
names = names.strip()
as_seq = names.endswith(',')
if as_seq:
names = names[:-1].rstrip()
if not names:
raise ValueError('no symbols given')
# split on commas
names = [n.strip() for n in names.split(',')]
if not all(n for n in names):
raise ValueError('missing symbol between commas')
# split on spaces
for i in range(len(names) - 1, -1, -1):
names[i: i + 1] = names[i].split()
cls = args.pop('cls', Symbol)
seq = args.pop('seq', as_seq)
for name in names:
if not name:
raise ValueError('missing symbol')
if ':' not in name:
symbol = cls(literal(name), **args)
result.append(symbol)
continue
split = _range.split(name)
# remove 1 layer of bounding parentheses around ranges
for i in range(len(split) - 1):
if i and ':' in split[i] and split[i] != ':' and \
split[i - 1].endswith('(') and \
split[i + 1].startswith(')'):
split[i - 1] = split[i - 1][:-1]
split[i + 1] = split[i + 1][1:]
for i, s in enumerate(split):
if ':' in s:
if s[-1].endswith(':'):
raise ValueError('missing end range')
a, b = s.split(':')
if b[-1] in string.digits:
a = 0 if not a else int(a)
b = int(b)
split[i] = [str(c) for c in range(a, b)]
else:
a = a or 'a'
split[i] = [string.ascii_letters[c] for c in range(
string.ascii_letters.index(a),
string.ascii_letters.index(b) + 1)] # inclusive
if not split[i]:
break
else:
split[i] = [s]
else:
seq = True
if len(split) == 1:
names = split[0]
else:
names = [''.join(s) for s in cartes(*split)]
if literals:
result.extend([cls(literal(s), **args) for s in names])
else:
result.extend([cls(s, **args) for s in names])
if not seq and len(result) <= 1:
if not result:
return ()
return result[0]
return tuple(result)
else:
for name in names:
result.append(symbols(name, **args))
return type(names)(result)
def var(names, **args):
"""
Create symbols and inject them into the global namespace.
This calls :func:`symbols` with the same arguments and puts the results
into the *global* namespace. It's recommended not to use :func:`var` in
library code, where :func:`symbols` has to be used::
Examples
========
>>> from sympy import var
>>> var('x')
x
>>> x
x
>>> var('a,ab,abc')
(a, ab, abc)
>>> abc
abc
>>> var('x,y', real=True)
(x, y)
>>> x.is_real and y.is_real
True
See :func:`symbol` documentation for more details on what kinds of
arguments can be passed to :func:`var`.
"""
def traverse(symbols, frame):
"""Recursively inject symbols to the global namespace. """
for symbol in symbols:
if isinstance(symbol, Basic):
frame.f_globals[symbol.name] = symbol
elif isinstance(symbol, FunctionClass):
frame.f_globals[symbol.__name__] = symbol
else:
traverse(symbol, frame)
from inspect import currentframe
frame = currentframe().f_back
try:
syms = symbols(names, **args)
if syms is not None:
if isinstance(syms, Basic):
frame.f_globals[syms.name] = syms
elif isinstance(syms, FunctionClass):
frame.f_globals[syms.__name__] = syms
else:
traverse(syms, frame)
finally:
del frame # break cyclic dependencies as stated in inspect docs
return syms
def disambiguate(*iter):
"""
Return a Tuple containing the passed expressions with symbols
that appear the same when printed replaced with numerically
subscripted symbols, and all Dummy symbols replaced with Symbols.
Parameters
==========
iter: list of symbols or expressions.
Examples
========
>>> from sympy.core.symbol import disambiguate
>>> from sympy import Dummy, Symbol, Tuple
>>> from sympy.abc import y
>>> tup = Symbol('_x'), Dummy('x'), Dummy('x')
>>> disambiguate(*tup)
(x_2, x, x_1)
>>> eqs = Tuple(Symbol('x')/y, Dummy('x')/y)
>>> disambiguate(*eqs)
(x_1/y, x/y)
>>> ix = Symbol('x', integer=True)
>>> vx = Symbol('x')
>>> disambiguate(vx + ix)
(x + x_1,)
To make your own mapping of symbols to use, pass only the free symbols
of the expressions and create a dictionary:
>>> free = eqs.free_symbols
>>> mapping = dict(zip(free, disambiguate(*free)))
>>> eqs.xreplace(mapping)
(x_1/y, x/y)
"""
new_iter = Tuple(*iter)
key = lambda x:tuple(sorted(x.assumptions0.items()))
syms = ordered(new_iter.free_symbols, keys=key)
mapping = {}
for s in syms:
mapping.setdefault(str(s).lstrip('_'), []).append(s)
reps = {}
for k in mapping:
# the first or only symbol doesn't get subscripted but make
# sure that it's a Symbol, not a Dummy
k0 = Symbol("%s" % (k), **mapping[k][0].assumptions0)
if k != k0:
reps[mapping[k][0]] = k0
# the others get subscripts (and are made into Symbols)
skip = 0
for i in range(1, len(mapping[k])):
while True:
name = "%s_%i" % (k, i + skip)
if name not in mapping:
break
skip += 1
ki = mapping[k][i]
reps[ki] = Symbol(name, **ki.assumptions0)
return new_iter.xreplace(reps)
|
Hons/troposphere
|
refs/heads/master
|
troposphere/redshift.py
|
3
|
# Copyright (c) 2014, Guillem Anguera <ganguera@gmail.com>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import boolean, integer
class Cluster(AWSObject):
type = "AWS::Redshift::Cluster"
props = {
'AllowVersionUpgrade': (boolean, False),
'AutomatedSnapshotRetentionPeriod': (integer, False),
'AvailabilityZone': (basestring, False),
'ClusterParameterGroupName': (basestring, False),
'ClusterSecurityGroups': (list, False),
'ClusterSubnetGroupName': (basestring, False),
'ClusterType': (basestring, True),
'ClusterVersion': (basestring, False),
'DBName': (basestring, True),
'ElasticIp': (basestring, False),
'Encrypted': (boolean, False),
'HsmClientCertificateIdentifier': (basestring, False),
'HsmConfigurationIdentifier': (basestring, False),
'MasterUsername': (basestring, True),
'MasterUserPassword': (basestring, True),
'NodeType': (basestring, True),
'NumberOfNodes': (integer, False), # Conditional
'OwnerAccount': (basestring, False),
'Port': (integer, False),
'PreferredMaintenanceWindow': (basestring, False),
'PubliclyAccessible': (boolean, False),
'SnapshotClusterIdentifier': (basestring, False),
'SnapshotIdentifier': (basestring, False),
'VpcSecurityGroupIds': (list, False),
}
class AmazonRedshiftParameter(AWSProperty):
props = {
'ParameterName': (basestring, True),
'ParameterValue': (basestring, True),
}
class ClusterParameterGroup(AWSObject):
type = "AWS::Redshift::ClusterParameterGroup"
props = {
'Description': (basestring, True),
'ParameterGroupFamily': (basestring, True),
'Parameters': ([AmazonRedshiftParameter], False),
}
class ClusterSecurityGroup(AWSObject):
type = "AWS::Redshift::ClusterSecurityGroup"
props = {
'Description': (basestring, True),
}
class ClusterSecurityGroupIngress(AWSObject):
type = "AWS::Redshift::ClusterSecurityGroupIngress"
props = {
'ClusterSecurityGroupName': (basestring, True),
'CIDRIP': (basestring, False),
'EC2SecurityGroupName': (basestring, False),
'EC2SecurityGroupOwnerId': (basestring, False),
}
class ClusterSubnetGroup(AWSObject):
type = "AWS::Redshift::ClusterSubnetGroup"
props = {
'Description': (basestring, True),
'SubnetIds': (list, True),
}
|
kerr-huang/SL4A
|
refs/heads/master
|
python/src/Lib/test/sample_doctest.py
|
229
|
"""This is a sample module that doesn't really test anything all that
interesting.
It simply has a few tests, some of which succeed and some of which fail.
It's important that the numbers remain constant as another test is
testing the running of these tests.
>>> 2+2
4
"""
def foo():
"""
>>> 2+2
5
>>> 2+2
4
"""
def bar():
"""
>>> 2+2
4
"""
def test_silly_setup():
"""
>>> import test.test_doctest
>>> test.test_doctest.sillySetup
True
"""
def w_blank():
"""
>>> if 1:
... print 'a'
... print
... print 'b'
a
<BLANKLINE>
b
"""
x = 1
def x_is_one():
"""
>>> x
1
"""
def y_is_one():
"""
>>> y
1
"""
__test__ = {'good': """
>>> 42
42
""",
'bad': """
>>> 42
666
""",
}
def test_suite():
import doctest
return doctest.DocTestSuite()
|
chrisjaquet/FreeCAD
|
refs/heads/master
|
src/Mod/Assembly/FCDocTool.py
|
38
|
#! python
# -*- coding: utf-8 -*-
#***************************************************************************
#* *
#* Copyright (c) 2012 *
#* Juergen Riegel <FreeCAD@juergen-riegel.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
__title__="FreeCAD document tool"
__author__ = "Juergen Riegel <FreeCAD@juergen-riegel.net>"
__url__ = "http://free-cad.sourceforge.net"
'''
General description:
Command line tool and lib for exploring FreeCAD documents
User manual:
TODO
How it works / how to extend:
TODO
'''
import zipfile
from xml.dom.minidom import parse, parseString
class Document:
""" Document representation """
def __init__(self,DocFile):
self.FileName = DocFile
self.ZFile = zipfile.ZipFile(DocFile,'r')
DStr = self.ZFile.read('Document.xml')
self.DDom = parseString(DStr)
def fileInfo(self):
ret = ''
for i in self.ZFile.infolist():
i += i.filename
i += '\n'
return ret
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE")
parser.add_option("-l", "--list",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
(options, args) = parser.parse_args()
print (options,args)
d = Document(args[0])
|
EvilGRAHAM/school-projects
|
refs/heads/master
|
Random Python3/domtempcode.py
|
1
|
import random
import sys
row=9
col=12
mine=10
def createBoard(row,col,mine):
board=[]
for r in range (row):
board.append([])
for c in range (col):
board[r].append("C ")
while mine > 0:
rowTemp= random.randrange(0,row-1)
colTemp= random.randrange(0,col-1)
if board[rowTemp][colTemp]=="C ":
board[rowTemp][colTemp]="C*"
mine=mine-1
print(board)
return board
board=createBoard(row,col,mine)
|
rubenvb/skia
|
refs/heads/m75
|
infra/bots/assets/android_sdk_linux/create.py
|
11
|
#!/usr/bin/env python
#
# Copyright 2017 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Create the asset."""
import argparse
import os
import shutil
def create_asset(target_dir, android_sdk_root):
"""Create the asset."""
if not android_sdk_root:
android_sdk_root = (os.environ.get('ANDROID_HOME') or
os.environ.get('ANDROID_SDK_ROOT'))
if not android_sdk_root:
raise Exception('No --android_sdk_root provided and no ANDROID_HOME or '
'ANDROID_SDK_ROOT environment variables.')
dst = os.path.join(target_dir, 'android-sdk')
shutil.copytree(android_sdk_root, dst)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--android_sdk_root')
parser.add_argument('--target_dir', '-t', required=True)
args = parser.parse_args()
create_asset(args.target_dir, args.android_sdk_root)
if __name__ == '__main__':
main()
|
doismellburning/django
|
refs/heads/master
|
tests/template_backends/test_django.py
|
4
|
from django.template import RequestContext
from django.template.backends.django import DjangoTemplates
from django.test import ignore_warnings, RequestFactory
from django.utils.deprecation import RemovedInDjango20Warning
from template_tests.test_response import test_processor_name
from .test_dummy import TemplateStringsTests
class DjangoTemplatesTests(TemplateStringsTests):
engine_class = DjangoTemplates
backend_name = 'django'
def test_context_has_priority_over_template_context_processors(self):
# See ticket #23789.
engine = DjangoTemplates({
'DIRS': [],
'APP_DIRS': False,
'NAME': 'django',
'OPTIONS': {
'context_processors': [test_processor_name],
},
})
template = engine.from_string('{{ processors }}')
request = RequestFactory().get('/')
# Check that context processors run
content = template.render({}, request)
self.assertEqual(content, 'yes')
# Check that context overrides context processors
content = template.render({'processors': 'no'}, request)
self.assertEqual(content, 'no')
@ignore_warnings(category=RemovedInDjango20Warning)
def test_request_context_conflicts_with_request(self):
template = self.engine.from_string('hello')
request = RequestFactory().get('/')
request_context = RequestContext(request)
# This doesn't raise an exception.
template.render(request_context, request)
other_request = RequestFactory().get('/')
msg = ("render() was called with a RequestContext and a request "
"argument which refer to different requests. Make sure "
"that the context argument is a dict or at least that "
"the two arguments refer to the same request.")
with self.assertRaisesMessage(ValueError, msg):
template.render(request_context, other_request)
|
opendesk/winnow
|
refs/heads/master
|
src/winnow/options.py
|
1
|
import collections
from winnow.utils import deep_copy_dict as deepcopy
# from copy import deepcopy
from winnow import utils
from winnow.values.option_values import OptionWinnowValue
from winnow.values import value_factory, value_path_factory
from winnow.values.option_values import OptionResourceWinnowValue, OptionStringWinnowValue
from winnow.values.exception_values import ExceptionWinnowValue
from winnow.keys.key_matching import KeyMatcher
from winnow.exceptions import OptionsExceptionSetWithException
import time
"""
OptionsSet
This is the beef.
all the logical operations on sieves actually happen in their options dict
"""
class OptionsSet(collections.MutableMapping):
"""a dict like object that supports merging, patching etc wraps an existing dict"""
def __init__(self, d):
"""
really its just a wrapped around an existing dict
"""
self.store = d
self.matcher = KeyMatcher.from_dict(d)
def __getitem__(self, key):
return self.store[key]
def __setitem__(self, key, value):
self.store[key] = value
def __delitem__(self, key):
del self.store[key]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)
def mega_store(self, other):
#
# print "****STORES****"
# print self.store
# print other.store
#
expanded = deepcopy(self.store)
for k in self.store.keys():
if "*" in k:
matching = other.matcher.get_matching_paths(k)
for match in matching:
expanded[match] = self.store[k]
# this consumes matched wildcards values
if matching:
del expanded[k]
mega_store = {}
for k, v in expanded.iteritems():
new_key, real_value = value_path_factory(k, v)
if real_value is not None:
if not new_key in mega_store.keys():
mega_store[new_key] = []
mega_store[new_key].append(real_value)
return mega_store
def _merge_value_array(self, key, values):
value_types = set([type(v) for v in values])
#
if value_types == {OptionStringWinnowValue, OptionResourceWinnowValue}:
raise Exception("cant mix strings and resources")
if len(values) == 1:
return values[0]
result = values[0]
for v in values[1:]:
result = result.intersection(v)
if result == None:
return ExceptionWinnowValue(key, [v.as_json() for v in values])
return result
def _check_for_exceptions(self, all_values):
for v in all_values:
if isinstance(v, ExceptionWinnowValue):
return v
return None
def merge(self, other):
"""
A union of all keys
An intersection of values
"""
options = {}
this_mega_store = self.mega_store(other)
that_mega_store = other.mega_store(self)
this_keys = set(this_mega_store.keys())
that_keys = set(that_mega_store.keys())
emptyValues = []
# print this_keys, that_keys
for key in this_keys.union(that_keys):
all_values = this_mega_store.get(key, []) + that_mega_store.get(key, [])
exception_value = self._check_for_exceptions(all_values)
if exception_value:
merged_value = exception_value
else:
merged_value = self._merge_value_array(key, all_values)
options[key] = merged_value.as_json()
if isinstance(merged_value, ExceptionWinnowValue):
options[key] = None
emptyValues.append((key, all_values))
options_set = OptionsSet(options)
if emptyValues:
raise OptionsExceptionSetWithException(options_set, emptyValues)
return options_set
def disallowed_keys(self, other):
return self._disallowed(other)
def allows(self, other):
disallowed = self._disallowed(other)
return not bool(disallowed)
def _disallowed(self, other):
"""
An intersection of keys
A subset check on values
"""
disallowed = []
this_mega_store = self.mega_store(other)
that_mega_store = other.mega_store(self)
this_keys = set(this_mega_store.keys())
that_keys = set(that_mega_store.keys())
if this_keys is not None and that_keys is not None:
all_keys = this_keys.intersection(that_keys)
if all_keys is not None:
for key in all_keys:
all_values = this_mega_store.get(key, []) + that_mega_store.get(key, [])
exception_value = self._check_for_exceptions(all_values)
if exception_value:
disallowed.append(key)
else:
this = self._merge_value_array(key, this_mega_store[key])
that = self._merge_value_array(key, that_mega_store[key])
if not that.issubset(this):
disallowed.append(key)
return disallowed
def default(self):
options = {}
for k, v in self.store.iteritems():
value = value_factory(v)
options[k] = value.default
if isinstance(value, OptionWinnowValue):
child_options = value.get_default_value_options()
if child_options is not None:
childSet = OptionsSet(child_options)
child_defaults = childSet.default().store
for ck, cv in child_defaults.iteritems():
path = "{}/{}".format(k, ck)
options[path] = cv
return OptionsSet(options)
def default_full_values(self):
options = {}
for k, v in self.store.iteritems():
options[k] = value_factory(v).default_full_value
return OptionsSet(options)
#
# def scope(self, scope_name):
# """
# extracts a subset of options by scope
# """
# options = {}
# for k, v in self.store.iteritems():
# if isinstance(v, dict) and u"scopes" in v.keys():
# scopes = set(v[u"scopes"])
# if not scopes.isdisjoint(set([scope_name])):
# options[k] = deepcopy(v)
# else:
# options[k] = deepcopy(v)
# return OptionsSet(options)
def match(self, others):
return [other for other in others if self.allows(other)]
def reverse_match(self, others):
return [other for other in others if other.allows(self)]
@property
def key_set(self):
return set(self.store.keys())
|
ETCBC/shebanq
|
refs/heads/master
|
routes.example.py
|
22
|
# -*- coding: utf-8 -*-
# This is an app-specific example router
#
# This simple router is used for setting languages from app/languages directory
# as a part of the application path: app/<lang>/controller/function
# Language from default.py or 'en' (if the file is not found) is used as
# a default_language
#
# See <web2py-root-dir>/router.example.py for parameter's detail
#-------------------------------------------------------------------------------------
# To enable this route file you must do the steps:
#
# 1. rename <web2py-root-dir>/router.example.py to routes.py
# 2. rename this APP/routes.example.py to APP/routes.py
# (where APP - is your application directory)
# 3. restart web2py (or reload routes in web2py admin interfase)
#
# YOU CAN COPY THIS FILE TO ANY APLLICATION'S ROOT DIRECTORY WITHOUT CHANGES!
from fileutils import abspath
from languages import read_possible_languages
possible_languages = read_possible_languages(abspath('applications', app))
#NOTE! app - is an application based router's parameter with name of an
# application. E.g.'welcome'
routers = {
app: dict(
default_language = possible_languages['default'][0],
languages = [lang for lang in possible_languages
if lang != 'default']
)
}
#NOTE! To change language in your application using these rules add this line
#in one of your models files:
# if request.uri_language: T.force(request.uri_language)
|
shitolepriya/test-erp
|
refs/heads/develop
|
erpnext/accounts/report/purchase_register/purchase_register.py
|
45
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt
from frappe import msgprint, _
def execute(filters=None):
if not filters: filters = {}
invoice_list = get_invoices(filters)
columns, expense_accounts, tax_accounts = get_columns(invoice_list)
if not invoice_list:
msgprint(_("No record found"))
return columns, invoice_list
invoice_expense_map = get_invoice_expense_map(invoice_list)
invoice_expense_map, invoice_tax_map = get_invoice_tax_map(invoice_list,
invoice_expense_map, expense_accounts)
invoice_po_pr_map = get_invoice_po_pr_map(invoice_list)
supplier_details = get_supplier_deatils(invoice_list)
data = []
for inv in invoice_list:
# invoice details
purchase_order = list(set(invoice_po_pr_map.get(inv.name, {}).get("purchase_order", [])))
purchase_receipt = list(set(invoice_po_pr_map.get(inv.name, {}).get("purchase_receipt", [])))
project_name = list(set(invoice_po_pr_map.get(inv.name, {}).get("project_name", [])))
row = [inv.name, inv.posting_date, inv.supplier, inv.supplier_name,
supplier_details.get(inv.supplier),
inv.credit_to, ", ".join(project_name), inv.bill_no, inv.bill_date, inv.remarks,
", ".join(purchase_order), ", ".join(purchase_receipt)]
# map expense values
base_net_total = 0
for expense_acc in expense_accounts:
expense_amount = flt(invoice_expense_map.get(inv.name, {}).get(expense_acc))
base_net_total += expense_amount
row.append(expense_amount)
# net total
row.append(base_net_total or inv.base_net_total)
# tax account
total_tax = 0
for tax_acc in tax_accounts:
if tax_acc not in expense_accounts:
tax_amount = flt(invoice_tax_map.get(inv.name, {}).get(tax_acc))
total_tax += tax_amount
row.append(tax_amount)
# total tax, grand total, outstanding amount & rounded total
row += [total_tax, inv.base_grand_total, flt(inv.base_grand_total, 2), inv.outstanding_amount]
data.append(row)
return columns, data
def get_columns(invoice_list):
"""return columns based on filters"""
columns = [
_("Invoice") + ":Link/Purchase Invoice:120", _("Posting Date") + ":Date:80", _("Supplier Id") + "::120",
_("Supplier Name") + "::120", _("Supplier Type") + ":Link/Supplier Type:120", _("Payable Account") + ":Link/Account:120",
_("Project") + ":Link/Project:80", _("Bill No") + "::120", _("Bill Date") + ":Date:80", _("Remarks") + "::150",
_("Purchase Order") + ":Link/Purchase Order:100", _("Purchase Receipt") + ":Link/Purchase Receipt:100"
]
expense_accounts = tax_accounts = expense_columns = tax_columns = []
if invoice_list:
expense_accounts = frappe.db.sql_list("""select distinct expense_account
from `tabPurchase Invoice Item` where docstatus = 1 and ifnull(expense_account, '') != ''
and parent in (%s) order by expense_account""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]))
tax_accounts = frappe.db.sql_list("""select distinct account_head
from `tabPurchase Taxes and Charges` where parenttype = 'Purchase Invoice'
and docstatus = 1 and ifnull(account_head, '') != '' and category in ('Total', 'Valuation and Total')
and parent in (%s) order by account_head""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]))
expense_columns = [(account + ":Currency:120") for account in expense_accounts]
for account in tax_accounts:
if account not in expense_accounts:
tax_columns.append(account + ":Currency:120")
columns = columns + expense_columns + [_("Net Total") + ":Currency:120"] + tax_columns + \
[_("Total Tax") + ":Currency:120", _("Grand Total") + ":Currency:120",
_("Rounded Total") + ":Currency:120", _("Outstanding Amount") + ":Currency:120"]
return columns, expense_accounts, tax_accounts
def get_conditions(filters):
conditions = ""
if filters.get("company"): conditions += " and company=%(company)s"
if filters.get("supplier"): conditions += " and supplier = %(supplier)s"
if filters.get("from_date"): conditions += " and posting_date>=%(from_date)s"
if filters.get("to_date"): conditions += " and posting_date<=%(to_date)s"
return conditions
def get_invoices(filters):
conditions = get_conditions(filters)
return frappe.db.sql("""select name, posting_date, credit_to, supplier, supplier_name,
bill_no, bill_date, remarks, base_net_total, base_grand_total, outstanding_amount
from `tabPurchase Invoice` where docstatus = 1 %s
order by posting_date desc, name desc""" % conditions, filters, as_dict=1)
def get_invoice_expense_map(invoice_list):
expense_details = frappe.db.sql("""select parent, expense_account, sum(base_net_amount) as amount
from `tabPurchase Invoice Item` where parent in (%s) group by parent, expense_account""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_expense_map = {}
for d in expense_details:
invoice_expense_map.setdefault(d.parent, frappe._dict()).setdefault(d.expense_account, [])
invoice_expense_map[d.parent][d.expense_account] = flt(d.amount)
return invoice_expense_map
def get_invoice_tax_map(invoice_list, invoice_expense_map, expense_accounts):
tax_details = frappe.db.sql("""select parent, account_head, sum(base_tax_amount_after_discount_amount) as tax_amount
from `tabPurchase Taxes and Charges` where parent in (%s) group by parent, account_head""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_tax_map = {}
for d in tax_details:
if d.account_head in expense_accounts:
if invoice_expense_map[d.parent].has_key(d.account_head):
invoice_expense_map[d.parent][d.account_head] += flt(d.tax_amount)
else:
invoice_expense_map[d.parent][d.account_head] = flt(d.tax_amount)
else:
invoice_tax_map.setdefault(d.parent, frappe._dict()).setdefault(d.account_head, [])
invoice_tax_map[d.parent][d.account_head] = flt(d.tax_amount)
return invoice_expense_map, invoice_tax_map
def get_invoice_po_pr_map(invoice_list):
pi_items = frappe.db.sql("""select parent, purchase_order, purchase_receipt, po_detail,
project_name from `tabPurchase Invoice Item` where parent in (%s)
and (ifnull(purchase_order, '') != '' or ifnull(purchase_receipt, '') != '')""" %
', '.join(['%s']*len(invoice_list)), tuple([inv.name for inv in invoice_list]), as_dict=1)
invoice_po_pr_map = {}
for d in pi_items:
if d.purchase_order:
invoice_po_pr_map.setdefault(d.parent, frappe._dict()).setdefault(
"purchase_order", []).append(d.purchase_order)
pr_list = None
if d.purchase_receipt:
pr_list = [d.purchase_receipt]
elif d.po_detail:
pr_list = frappe.db.sql_list("""select distinct parent from `tabPurchase Receipt Item`
where docstatus=1 and prevdoc_detail_docname=%s""", d.po_detail)
if pr_list:
invoice_po_pr_map.setdefault(d.parent, frappe._dict()).setdefault("purchase_receipt", pr_list)
if d.project_name:
invoice_po_pr_map.setdefault(d.parent, frappe._dict()).setdefault(
"project_name", []).append(d.project_name)
return invoice_po_pr_map
def get_account_details(invoice_list):
account_map = {}
accounts = list(set([inv.credit_to for inv in invoice_list]))
for acc in frappe.db.sql("""select name, parent_account from tabAccount
where name in (%s)""" % ", ".join(["%s"]*len(accounts)), tuple(accounts), as_dict=1):
account_map[acc.name] = acc.parent_account
return account_map
def get_supplier_deatils(invoice_list):
supplier_details = {}
suppliers = list(set([inv.supplier for inv in invoice_list]))
for supp in frappe.db.sql("""select name, supplier_type from `tabSupplier`
where name in (%s)""" % ", ".join(["%s"]*len(suppliers)), tuple(suppliers), as_dict=1):
supplier_details.setdefault(supp.name, supp.supplier_type)
return supplier_details
|
qskycolor/viewfinder
|
refs/heads/master
|
backend/www/test/service_counters_test.py
|
13
|
# Copyright 2012 Viewfinder Inc. All Rights Reserved.
"""Test case for performance counters related to the service frontend.
"""
__author__ = 'matt@emailscrubbed.com (Matt Tracy)'
import time
from viewfinder.backend.base import util, counters
from viewfinder.backend.base.testing import async_test
from viewfinder.backend.www.test import service_base_test
class ServiceCountersTestCase(service_base_test.ServiceBaseTestCase):
def setUp(self):
super(ServiceCountersTestCase, self).setUp()
self.meter = counters.Meter(counters.counters.viewfinder.service)
self.meter_start = time.time()
def _CheckCounters(self, expected_requests, expected_failures):
sample = self.meter.sample()
elapsed = time.time() - self.meter_start
# High deltas because of very small denominators.
self.assertAlmostEqual(sample.viewfinder.service.req_per_min, (expected_requests / elapsed) * 60, delta=100.0)
self.assertAlmostEqual(sample.viewfinder.service.fail_per_min, (expected_failures / elapsed) * 60, delta=100.0)
self.meter_start += elapsed
def testServiceCounters(self):
"""Verify the requests per second and failures per second performance counters."""
self._CheckCounters(0, 0)
for i in range(5):
self._SendRequest('query_notifications', self._cookie, {})
self.assertRaisesHttpError(400, self._SendRequest, 'query_notifications', self._cookie, {'start_key': 2})
self._CheckCounters(10, 5)
self._CheckCounters(0, 0)
|
bva24/smart
|
refs/heads/master
|
cartridge/shop/__init__.py
|
17
|
from __future__ import unicode_literals
from cartridge import __version__
|
2014c2g5/2014c2
|
refs/heads/master
|
exts/wsgi/static/Brython2.1.0-20140419-113919/Lib/_socket.py
|
742
|
"""Implementation module for socket operations.
See the socket module for documentation."""
AF_APPLETALK = 16
AF_DECnet = 12
AF_INET = 2
AF_INET6 = 23
AF_IPX = 6
AF_IRDA = 26
AF_SNA = 11
AF_UNSPEC = 0
AI_ADDRCONFIG = 1024
AI_ALL = 256
AI_CANONNAME = 2
AI_NUMERICHOST = 4
AI_NUMERICSERV = 8
AI_PASSIVE = 1
AI_V4MAPPED = 2048
CAPI = '<capsule object "_socket.CAPI" at 0x00BC4F38>'
EAI_AGAIN = 11002
EAI_BADFLAGS = 10022
EAI_FAIL = 11003
EAI_FAMILY = 10047
EAI_MEMORY = 8
EAI_NODATA = 11001
EAI_NONAME = 11001
EAI_SERVICE = 10109
EAI_SOCKTYPE = 10044
INADDR_ALLHOSTS_GROUP = -536870911
INADDR_ANY = 0
INADDR_BROADCAST = -1
INADDR_LOOPBACK = 2130706433
INADDR_MAX_LOCAL_GROUP = -536870657
INADDR_NONE = -1
INADDR_UNSPEC_GROUP = -536870912
IPPORT_RESERVED = 1024
IPPORT_USERRESERVED = 5000
IPPROTO_ICMP = 1
IPPROTO_IP = 0
IPPROTO_RAW = 255
IPPROTO_TCP = 6
IPPROTO_UDP = 17
IPV6_CHECKSUM = 26
IPV6_DONTFRAG = 14
IPV6_HOPLIMIT = 21
IPV6_HOPOPTS = 1
IPV6_JOIN_GROUP = 12
IPV6_LEAVE_GROUP = 13
IPV6_MULTICAST_HOPS = 10
IPV6_MULTICAST_IF = 9
IPV6_MULTICAST_LOOP = 11
IPV6_PKTINFO = 19
IPV6_RECVRTHDR = 38
IPV6_RECVTCLASS = 40
IPV6_RTHDR = 32
IPV6_TCLASS = 39
IPV6_UNICAST_HOPS = 4
IPV6_V6ONLY = 27
IP_ADD_MEMBERSHIP = 12
IP_DROP_MEMBERSHIP = 13
IP_HDRINCL = 2
IP_MULTICAST_IF = 9
IP_MULTICAST_LOOP = 11
IP_MULTICAST_TTL = 10
IP_OPTIONS = 1
IP_RECVDSTADDR = 25
IP_TOS = 3
IP_TTL = 4
MSG_BCAST = 1024
MSG_CTRUNC = 512
MSG_DONTROUTE = 4
MSG_MCAST = 2048
MSG_OOB = 1
MSG_PEEK = 2
MSG_TRUNC = 256
NI_DGRAM = 16
NI_MAXHOST = 1025
NI_MAXSERV = 32
NI_NAMEREQD = 4
NI_NOFQDN = 1
NI_NUMERICHOST = 2
NI_NUMERICSERV = 8
RCVALL_MAX = 3
RCVALL_OFF = 0
RCVALL_ON = 1
RCVALL_SOCKETLEVELONLY = 2
SHUT_RD = 0
SHUT_RDWR = 2
SHUT_WR = 1
SIO_KEEPALIVE_VALS = 2550136836
SIO_RCVALL = 2550136833
SOCK_DGRAM = 2
SOCK_RAW = 3
SOCK_RDM = 4
SOCK_SEQPACKET = 5
SOCK_STREAM = 1
SOL_IP = 0
SOL_SOCKET = 65535
SOL_TCP = 6
SOL_UDP = 17
SOMAXCONN = 2147483647
SO_ACCEPTCONN = 2
SO_BROADCAST = 32
SO_DEBUG = 1
SO_DONTROUTE = 16
SO_ERROR = 4103
SO_EXCLUSIVEADDRUSE = -5
SO_KEEPALIVE = 8
SO_LINGER = 128
SO_OOBINLINE = 256
SO_RCVBUF = 4098
SO_RCVLOWAT = 4100
SO_RCVTIMEO = 4102
SO_REUSEADDR = 4
SO_SNDBUF = 4097
SO_SNDLOWAT = 4099
SO_SNDTIMEO = 4101
SO_TYPE = 4104
SO_USELOOPBACK = 64
class SocketType:
pass
TCP_MAXSEG = 4
TCP_NODELAY = 1
__loader__ = '<_frozen_importlib.ExtensionFileLoader object at 0x00CA2D90>'
def dup(*args,**kw):
"""dup(integer) -> integer
Duplicate an integer socket file descriptor. This is like os.dup(), but for
sockets; on some platforms os.dup() won't work for socket file descriptors."""
pass
class error:
pass
class gaierror:
pass
def getaddrinfo(*args,**kw):
"""getaddrinfo(host, port [, family, socktype, proto, flags]) -> list of (family, socktype, proto, canonname, sockaddr)
Resolve host and port into addrinfo struct."""
pass
def getdefaulttimeout(*args,**kw):
"""getdefaulttimeout() -> timeout
Returns the default timeout in seconds (float) for new socket objects.
A value of None indicates that new socket objects have no timeout.
When the socket module is first imported, the default is None."""
pass
def gethostbyaddr(*args,**kw):
"""gethostbyaddr(host) -> (name, aliaslist, addresslist)
Return the true host name, a list of aliases, and a list of IP addresses,
for a host. The host argument is a string giving a host name or IP number."""
pass
def gethostbyname(*args,**kw):
"""gethostbyname(host) -> address
Return the IP address (a string of the form '255.255.255.255') for a host."""
pass
def gethostbyname_ex(*args,**kw):
"""gethostbyname_ex(host) -> (name, aliaslist, addresslist)
Return the true host name, a list of aliases, and a list of IP addresses,
for a host. The host argument is a string giving a host name or IP number."""
pass
def gethostname(*args,**kw):
"""gethostname() -> string
Return the current host name."""
pass
def getnameinfo(*args,**kw):
"""getnameinfo(sockaddr, flags) --> (host, port)
Get host and port for a sockaddr."""
pass
def getprotobyname(*args,**kw):
"""getprotobyname(name) -> integer
Return the protocol number for the named protocol. (Rarely used.)"""
pass
def getservbyname(*args,**kw):
"""getservbyname(servicename[, protocolname]) -> integer
Return a port number from a service name and protocol name.
The optional protocol name, if given, should be 'tcp' or 'udp',
otherwise any protocol will match."""
pass
def getservbyport(*args,**kw):
"""getservbyport(port[, protocolname]) -> string
Return the service name from a port number and protocol name.
The optional protocol name, if given, should be 'tcp' or 'udp',
otherwise any protocol will match."""
pass
has_ipv6 = True
class herror:
pass
def htonl(*args,**kw):
"""htonl(integer) -> integer
Convert a 32-bit integer from host to network byte order."""
pass
def htons(*args,**kw):
"""htons(integer) -> integer
Convert a 16-bit integer from host to network byte order."""
pass
def inet_aton(*args,**kw):
"""inet_aton(string) -> bytes giving packed 32-bit IP representation
Convert an IP address in string format (123.45.67.89) to the 32-bit packed
binary format used in low-level network functions."""
pass
def inet_ntoa(*args,**kw):
"""inet_ntoa(packed_ip) -> ip_address_string
Convert an IP address from 32-bit packed binary format to string format"""
pass
def ntohl(*args,**kw):
"""ntohl(integer) -> integer
Convert a 32-bit integer from network to host byte order."""
pass
def ntohs(*args,**kw):
"""ntohs(integer) -> integer
Convert a 16-bit integer from network to host byte order."""
pass
def setdefaulttimeout(*args,**kw):
"""setdefaulttimeout(timeout)
Set the default timeout in seconds (float) for new socket objects.
A value of None indicates that new socket objects have no timeout.
When the socket module is first imported, the default is None."""
pass
class socket:
def __init__(self,*args,**kw):
pass
def bind(self,*args,**kw):
pass
def close(self):
pass
class timeout:
pass
|
schalkneethling/bedrock
|
refs/heads/master
|
lib/l10n_utils/extract.py
|
11
|
# mostly borrowed from tower
from babel.messages.extract import extract_python as babel_extract_py
from jinja2 import ext
from lib.l10n_utils.utils import strip_whitespace
def add_context(context, message):
# \x04 is a magic gettext number.
return u"%s\x04%s" % (context, message)
def tweak_message(message):
"""We piggyback on jinja2's babel_extract() (really, Babel's extract_*
functions) but they don't support some things we need so this function will
tweak the message. Specifically:
1) We strip whitespace from the msgid. Jinja2 will only strip
whitespace from the ends of a string so linebreaks show up in
your .po files still.
2) Babel doesn't support context (msgctxt). We hack that in ourselves
here.
"""
if isinstance(message, basestring):
message = strip_whitespace(message)
elif isinstance(message, tuple):
# A tuple of 2 has context, 3 is plural, 4 is plural with context
if len(message) == 2:
message = add_context(message[1], message[0])
elif len(message) == 3:
if all(isinstance(x, basestring) for x in message[:2]):
singular, plural, num = message
message = (strip_whitespace(singular),
strip_whitespace(plural),
num)
elif len(message) == 4:
singular, plural, num, ctxt = message
message = (add_context(ctxt, strip_whitespace(singular)),
add_context(ctxt, strip_whitespace(plural)),
num)
return message
def extract_python(fileobj, keywords, comment_tags, options):
for lineno, funcname, message, comments in \
list(babel_extract_py(fileobj, keywords, comment_tags, options)):
message = tweak_message(message)
yield lineno, funcname, message, comments
def extract_jinja2(fileobj, keywords, comment_tags, options):
for lineno, funcname, message, comments in \
list(ext.babel_extract(fileobj, keywords, comment_tags, options)):
message = tweak_message(message)
yield lineno, funcname, message, comments
|
mtanski/samba
|
refs/heads/master
|
lib/testtools/testtools/tests/test_helpers.py
|
12
|
# Copyright (c) 2010-2012 testtools developers. See LICENSE for details.
from testtools import TestCase
from testtools.helpers import (
try_import,
try_imports,
)
from testtools.matchers import (
Equals,
Is,
Not,
)
from testtools.tests.helpers import (
FullStackRunTest,
hide_testtools_stack,
is_stack_hidden,
safe_hasattr,
)
def check_error_callback(test, function, arg, expected_error_count,
expect_result):
"""General test template for error_callback argument.
:param test: Test case instance.
:param function: Either try_import or try_imports.
:param arg: Name or names to import.
:param expected_error_count: Expected number of calls to the callback.
:param expect_result: Boolean for whether a module should
ultimately be returned or not.
"""
cb_calls = []
def cb(e):
test.assertIsInstance(e, ImportError)
cb_calls.append(e)
try:
result = function(arg, error_callback=cb)
except ImportError:
test.assertFalse(expect_result)
else:
if expect_result:
test.assertThat(result, Not(Is(None)))
else:
test.assertThat(result, Is(None))
test.assertEquals(len(cb_calls), expected_error_count)
class TestSafeHasattr(TestCase):
def test_attribute_not_there(self):
class Foo(object):
pass
self.assertEqual(False, safe_hasattr(Foo(), 'anything'))
def test_attribute_there(self):
class Foo(object):
pass
foo = Foo()
foo.attribute = None
self.assertEqual(True, safe_hasattr(foo, 'attribute'))
def test_property_there(self):
class Foo(object):
@property
def attribute(self):
return None
foo = Foo()
self.assertEqual(True, safe_hasattr(foo, 'attribute'))
def test_property_raises(self):
class Foo(object):
@property
def attribute(self):
1/0
foo = Foo()
self.assertRaises(ZeroDivisionError, safe_hasattr, foo, 'attribute')
class TestTryImport(TestCase):
def test_doesnt_exist(self):
# try_import('thing', foo) returns foo if 'thing' doesn't exist.
marker = object()
result = try_import('doesntexist', marker)
self.assertThat(result, Is(marker))
def test_None_is_default_alternative(self):
# try_import('thing') returns None if 'thing' doesn't exist.
result = try_import('doesntexist')
self.assertThat(result, Is(None))
def test_existing_module(self):
# try_import('thing', foo) imports 'thing' and returns it if it's a
# module that exists.
result = try_import('os', object())
import os
self.assertThat(result, Is(os))
def test_existing_submodule(self):
# try_import('thing.another', foo) imports 'thing' and returns it if
# it's a module that exists.
result = try_import('os.path', object())
import os
self.assertThat(result, Is(os.path))
def test_nonexistent_submodule(self):
# try_import('thing.another', foo) imports 'thing' and returns foo if
# 'another' doesn't exist.
marker = object()
result = try_import('os.doesntexist', marker)
self.assertThat(result, Is(marker))
def test_object_from_module(self):
# try_import('thing.object') imports 'thing' and returns
# 'thing.object' if 'thing' is a module and 'object' is not.
result = try_import('os.path.join')
import os
self.assertThat(result, Is(os.path.join))
def test_error_callback(self):
# the error callback is called on failures.
check_error_callback(self, try_import, 'doesntexist', 1, False)
def test_error_callback_missing_module_member(self):
# the error callback is called on failures to find an object
# inside an existing module.
check_error_callback(self, try_import, 'os.nonexistent', 1, False)
def test_error_callback_not_on_success(self):
# the error callback is not called on success.
check_error_callback(self, try_import, 'os.path', 0, True)
class TestTryImports(TestCase):
def test_doesnt_exist(self):
# try_imports('thing', foo) returns foo if 'thing' doesn't exist.
marker = object()
result = try_imports(['doesntexist'], marker)
self.assertThat(result, Is(marker))
def test_fallback(self):
result = try_imports(['doesntexist', 'os'])
import os
self.assertThat(result, Is(os))
def test_None_is_default_alternative(self):
# try_imports('thing') returns None if 'thing' doesn't exist.
e = self.assertRaises(
ImportError, try_imports, ['doesntexist', 'noreally'])
self.assertThat(
str(e),
Equals("Could not import any of: doesntexist, noreally"))
def test_existing_module(self):
# try_imports('thing', foo) imports 'thing' and returns it if it's a
# module that exists.
result = try_imports(['os'], object())
import os
self.assertThat(result, Is(os))
def test_existing_submodule(self):
# try_imports('thing.another', foo) imports 'thing' and returns it if
# it's a module that exists.
result = try_imports(['os.path'], object())
import os
self.assertThat(result, Is(os.path))
def test_nonexistent_submodule(self):
# try_imports('thing.another', foo) imports 'thing' and returns foo if
# 'another' doesn't exist.
marker = object()
result = try_imports(['os.doesntexist'], marker)
self.assertThat(result, Is(marker))
def test_fallback_submodule(self):
result = try_imports(['os.doesntexist', 'os.path'])
import os
self.assertThat(result, Is(os.path))
def test_error_callback(self):
# One error for every class that doesn't exist.
check_error_callback(self, try_imports,
['os.doesntexist', 'os.notthiseither'],
2, False)
check_error_callback(self, try_imports,
['os.doesntexist', 'os.notthiseither', 'os'],
2, True)
check_error_callback(self, try_imports,
['os.path'],
0, True)
class TestStackHiding(TestCase):
run_tests_with = FullStackRunTest
def setUp(self):
super(TestStackHiding, self).setUp()
self.addCleanup(hide_testtools_stack, is_stack_hidden())
def test_is_stack_hidden_consistent_true(self):
hide_testtools_stack(True)
self.assertEqual(True, is_stack_hidden())
def test_is_stack_hidden_consistent_false(self):
hide_testtools_stack(False)
self.assertEqual(False, is_stack_hidden())
def test_suite():
from unittest import TestLoader
return TestLoader().loadTestsFromName(__name__)
|
chatcannon/numpy
|
refs/heads/master
|
tools/allocation_tracking/track_allocations.py
|
102
|
from __future__ import division, absolute_import, print_function
import numpy as np
import gc
import inspect
from alloc_hook import NumpyAllocHook
class AllocationTracker(object):
def __init__(self, threshold=0):
'''track numpy allocations of size threshold bytes or more.'''
self.threshold = threshold
# The total number of bytes currently allocated with size above
# threshold
self.total_bytes = 0
# We buffer requests line by line and move them into the allocation
# trace when a new line occurs
self.current_line = None
self.pending_allocations = []
self.blocksizes = {}
# list of (lineinfo, bytes allocated, bytes freed, # allocations, #
# frees, maximum memory usage, long-lived bytes allocated)
self.allocation_trace = []
self.numpy_hook = NumpyAllocHook(self.hook)
def __enter__(self):
self.numpy_hook.__enter__()
def __exit__(self, type, value, traceback):
self.check_line_changed() # forces pending events to be handled
self.numpy_hook.__exit__()
def hook(self, inptr, outptr, size):
# minimize the chances that the garbage collector kicks in during a
# cython __dealloc__ call and causes a double delete of the current
# object. To avoid this fully the hook would have to avoid all python
# api calls, e.g. by being implemented in C like python 3.4's
# tracemalloc module
gc_on = gc.isenabled()
gc.disable()
if outptr == 0: # it's a free
self.free_cb(inptr)
elif inptr != 0: # realloc
self.realloc_cb(inptr, outptr, size)
else: # malloc
self.alloc_cb(outptr, size)
if gc_on:
gc.enable()
def alloc_cb(self, ptr, size):
if size >= self.threshold:
self.check_line_changed()
self.blocksizes[ptr] = size
self.pending_allocations.append(size)
def free_cb(self, ptr):
size = self.blocksizes.pop(ptr, 0)
if size:
self.check_line_changed()
self.pending_allocations.append(-size)
def realloc_cb(self, newptr, oldptr, size):
if (size >= self.threshold) or (oldptr in self.blocksizes):
self.check_line_changed()
oldsize = self.blocksizes.pop(oldptr, 0)
self.pending_allocations.append(size - oldsize)
self.blocksizes[newptr] = size
def get_code_line(self):
# first frame is this line, then check_line_changed(), then 2 callbacks,
# then actual code.
try:
return inspect.stack()[4][1:]
except:
return inspect.stack()[0][1:]
def check_line_changed(self):
line = self.get_code_line()
if line != self.current_line and (self.current_line is not None):
# move pending events into the allocation_trace
max_size = self.total_bytes
bytes_allocated = 0
bytes_freed = 0
num_allocations = 0
num_frees = 0
before_size = self.total_bytes
for allocation in self.pending_allocations:
self.total_bytes += allocation
if allocation > 0:
bytes_allocated += allocation
num_allocations += 1
else:
bytes_freed += -allocation
num_frees += 1
max_size = max(max_size, self.total_bytes)
long_lived = max(self.total_bytes - before_size, 0)
self.allocation_trace.append((self.current_line, bytes_allocated,
bytes_freed, num_allocations,
num_frees, max_size, long_lived))
# clear pending allocations
self.pending_allocations = []
# move to the new line
self.current_line = line
def write_html(self, filename):
f = open(filename, "w")
f.write('<HTML><HEAD><script src="sorttable.js"></script></HEAD><BODY>\n')
f.write('<TABLE class="sortable" width=100%>\n')
f.write("<TR>\n")
cols = "event#,lineinfo,bytes allocated,bytes freed,#allocations,#frees,max memory usage,long lived bytes".split(',')
for header in cols:
f.write(" <TH>{0}</TH>".format(header))
f.write("\n</TR>\n")
for idx, event in enumerate(self.allocation_trace):
f.write("<TR>\n")
event = [idx] + list(event)
for col, val in zip(cols, event):
if col == 'lineinfo':
# special handling
try:
filename, line, module, code, index = val
val = "{0}({1}): {2}".format(filename, line, code[index])
except:
# sometimes this info is not available (from eval()?)
val = str(val)
f.write(" <TD>{0}</TD>".format(val))
f.write("\n</TR>\n")
f.write("</TABLE></BODY></HTML>\n")
f.close()
if __name__ == '__main__':
tracker = AllocationTracker(1000)
with tracker:
for i in range(100):
np.zeros(i * 100)
np.zeros(i * 200)
tracker.write_html("allocations.html")
|
myhdl/myhdl
|
refs/heads/master
|
example/manual/test_mux.py
|
6
|
import random
from myhdl import block, instance, Signal, intbv, delay
from mux import mux
random.seed(5)
randrange = random.randrange
@block
def test_mux():
z, a, b, sel = [Signal(intbv(0)) for i in range(4)]
mux_1 = mux(z, a, b, sel)
@instance
def stimulus():
print("z a b sel")
for i in range(12):
a.next, b.next, sel.next = randrange(8), randrange(8), randrange(2)
yield delay(10)
print("%s %s %s %s" % (z, a, b, sel))
return mux_1, stimulus
tb = test_mux()
tb.run_sim()
|
nju520/django
|
refs/heads/master
|
tests/sitemaps_tests/urls/http.py
|
311
|
from datetime import date, datetime
from django.conf.urls import url
from django.conf.urls.i18n import i18n_patterns
from django.contrib.sitemaps import GenericSitemap, Sitemap, views
from django.http import HttpResponse
from django.utils import timezone
from django.views.decorators.cache import cache_page
from ..models import I18nTestModel, TestModel
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
lastmod = datetime.now()
def items(self):
return [object()]
class SimpleI18nSitemap(Sitemap):
changefreq = "never"
priority = 0.5
i18n = True
def items(self):
return I18nTestModel.objects.all()
class EmptySitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
def items(self):
return []
class FixedLastmodSitemap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0)
class FixedLastmodMixedSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
loop = 0
def items(self):
o1 = TestModel()
o1.lastmod = datetime(2013, 3, 13, 10, 0, 0)
o2 = TestModel()
return [o1, o2]
class DateSiteMap(SimpleSitemap):
lastmod = date(2013, 3, 13)
class TimezoneSiteMap(SimpleSitemap):
lastmod = datetime(2013, 3, 13, 10, 0, 0, tzinfo=timezone.get_fixed_timezone(-300))
def testmodelview(request, id):
return HttpResponse()
simple_sitemaps = {
'simple': SimpleSitemap,
}
simple_i18nsitemaps = {
'simple': SimpleI18nSitemap,
}
empty_sitemaps = {
'empty': EmptySitemap,
}
fixed_lastmod_sitemaps = {
'fixed-lastmod': FixedLastmodSitemap,
}
fixed_lastmod__mixed_sitemaps = {
'fixed-lastmod-mixed': FixedLastmodMixedSitemap,
}
generic_sitemaps = {
'generic': GenericSitemap({'queryset': TestModel.objects.all()}),
}
urlpatterns = [
url(r'^simple/index\.xml$', views.index, {'sitemaps': simple_sitemaps}),
url(r'^simple/custom-index\.xml$', views.index,
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap_index.xml'}),
url(r'^simple/sitemap-(?P<section>.+)\.xml$', views.sitemap,
{'sitemaps': simple_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^simple/sitemap\.xml$', views.sitemap,
{'sitemaps': simple_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^simple/i18n\.xml$', views.sitemap,
{'sitemaps': simple_i18nsitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^simple/custom-sitemap\.xml$', views.sitemap,
{'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap.xml'},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^empty/sitemap\.xml$', views.sitemap,
{'sitemaps': empty_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod/sitemap\.xml$', views.sitemap,
{'sitemaps': fixed_lastmod_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod-mixed/sitemap\.xml$', views.sitemap,
{'sitemaps': fixed_lastmod__mixed_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod/date-sitemap.xml$', views.sitemap,
{'sitemaps': {'date-sitemap': DateSiteMap}},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^lastmod/tz-sitemap.xml$', views.sitemap,
{'sitemaps': {'tz-sitemap': TimezoneSiteMap}},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^generic/sitemap\.xml$', views.sitemap,
{'sitemaps': generic_sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'^cached/index\.xml$', cache_page(1)(views.index),
{'sitemaps': simple_sitemaps, 'sitemap_url_name': 'cached_sitemap'}),
url(r'^cached/sitemap-(?P<section>.+)\.xml', cache_page(1)(views.sitemap),
{'sitemaps': simple_sitemaps}, name='cached_sitemap')
]
urlpatterns += i18n_patterns(
url(r'^i18n/testmodel/(?P<id>\d+)/$', testmodelview, name='i18n_testmodel'),
)
|
resmo/cloudstack
|
refs/heads/master
|
plugins/hypervisors/ovm/scripts/vm/hypervisor/ovm/OvmHostModule.py
|
9
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from OvmCommonModule import *
from OVSSiteRMServer import get_master_ip, register_server
from OVSCommons import *
from OVSXMonitor import xen_get_xm_info
from OVSXSysInfo import get_agent_version
from OVSSiteRMServer import get_srv_agent_status
from OVSXMonitor import sys_perf_info
from OVSDB import db_get_vm
from OvmStoragePoolModule import OvmStoragePool
from OvmHaHeartBeatModule import OvmHaHeartBeat
import re
logger = OvmLogger('OvmHost')
class OvmHostEncoder(json.JSONEncoder):
def default(self, obj):
if not isinstance(obj, OvmHost): raise Exception("%s is not instance of OvmHost"%type(obj))
dct = {}
safeDictSet(obj, dct, 'masterIp')
safeDictSet(obj, dct, 'cpuNum')
safeDictSet(obj, dct, 'cpuSpeed')
safeDictSet(obj, dct, 'totalMemory')
safeDictSet(obj, dct, 'freeMemory')
safeDictSet(obj, dct, 'dom0Memory')
safeDictSet(obj, dct, 'agentVersion')
safeDictSet(obj, dct, 'name')
safeDictSet(obj, dct, 'dom0KernelVersion')
safeDictSet(obj, dct, 'hypervisorVersion')
return dct
def fromOvmHost(host):
return normalizeToGson(json.dumps(host, cls=OvmHostEncoder))
class OvmHost(OvmObject):
masterIp = ''
cpuNum = 0
cpuSpeed = 0
totalMemory = 0
freeMemory = 0
dom0Memory = 0
agentVersion = ''
name = ''
dom0KernelVersion = ''
hypervisorVersion = ''
def _getVmPathFromPrimaryStorage(self, vmName):
'''
we don't have a database to store vm states, so there is no way to retrieve information of a vm
when it was already stopped. The trick is to try to find the vm path in primary storage then we
can read information from its configure file.
'''
mps = OvmStoragePool()._getAllMountPoints()
vmPath = None
for p in mps:
vmPath = join(p, 'running_pool', vmName)
if exists(vmPath): break
if not vmPath:
logger.error(self._getVmPathFromPrimaryStorage, "Cannot find link for %s in any primary storage, the vm was really gone!"%vmName)
raise Exception("Cannot find link for %s in any primary storage, the vm was really gone!"%vmName)
return vmPath
def _vmNameToPath(self, vmName):
# the xen_get_vm_path always sucks!!!
#return successToMap((vmName))['path']
return self._getVmPathFromPrimaryStorage(vmName)
def _getAllDomains(self):
stdout = timeout_command(["xm", "list"])
l = [ line.split()[:2] for line in stdout.splitlines() ]
l = [ (name, id) for (name, id) in l if name not in ("Name", "Domain-0") ]
return l
def _getDomainIdByName(self, vmName):
l = self._getAllDomains()
for name, id in l:
if vmName == name: return id
raise NoVmFoundException("No domain id for %s found"%vmName)
@staticmethod
def registerAsMaster(hostname, username="oracle", password="password", port=8899, isSsl=False):
try:
logger.debug(OvmHost.registerAsMaster, "ip=%s, username=%s, password=%s, port=%s, isSsl=%s"%(hostname, username, password, port, isSsl))
exceptionIfNoSuccess(register_server(hostname, 'site', False, username, password, port, isSsl),
"Register %s as site failed"%hostname)
exceptionIfNoSuccess(register_server(hostname, 'utility', False, username, password, port, isSsl),
"Register %s as utility failed"%hostname)
rs = SUCC()
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.registerAsMaster, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.registerAsMaster), errmsg)
@staticmethod
def registerAsVmServer(hostname, username="oracle", password="password", port=8899, isSsl=False):
try:
logger.debug(OvmHost.registerAsVmServer, "ip=%s, username=%s, password=%s, port=%s, isSsl=%s"%(hostname, username, password, port, isSsl))
exceptionIfNoSuccess(register_server(hostname, 'xen', False, username, password, port, isSsl),
"Register %s as site failed"%hostname)
rs = SUCC()
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.registerAsVmServer, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.registerAsVmServer), errmsg)
@staticmethod
def ping(hostname):
try:
logger.debug(OvmHost.ping, "ping %s"%hostname)
exceptionIfNoSuccess(get_srv_agent_status(hostname), "Ovs agent is down")
rs = SUCC()
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.ping, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.ping, errmsg))
@staticmethod
def getDetails():
try:
obj = OvmHost()
masterIp = successToMap(get_master_ip())
safeSetAttr(obj, 'masterIp', masterIp['ip'])
xmInfo = successToMap(xen_get_xm_info())
totalMemory = MtoBytes(long(xmInfo['total_memory']))
safeSetAttr(obj, 'totalMemory', totalMemory)
freeMemory = MtoBytes(long(xmInfo['free_memory']))
safeSetAttr(obj, 'freeMemory', freeMemory)
dom0Memory = totalMemory - freeMemory
safeSetAttr(obj, 'dom0Memory', dom0Memory)
cpuNum = int(xmInfo['nr_cpus'])
safeSetAttr(obj, 'cpuNum', cpuNum)
cpuSpeed = int(xmInfo['cpu_mhz'])
safeSetAttr(obj, 'cpuSpeed', cpuSpeed)
name = xmInfo['host']
safeSetAttr(obj, 'name', name)
dom0KernelVersion = xmInfo['release']
safeSetAttr(obj, 'dom0KernelVersion', dom0KernelVersion)
hypervisorVersion = xmInfo['xen_major'] + '.' + xmInfo['xen_minor'] + xmInfo['xen_extra']
safeSetAttr(obj, 'hypervisorVersion', hypervisorVersion)
agtVersion = successToMap(get_agent_version())
safeSetAttr(obj, 'agentVersion', agtVersion['agent_version'])
res = fromOvmHost(obj)
logger.debug(OvmHost.getDetails, res)
return res
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.getDetails, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.getDetails), errmsg)
@staticmethod
def getPerformanceStats(bridgeName):
try:
rxBytesPath = join("/sys/class/net/", bridgeName, "statistics/rx_bytes")
txBytesPath = join("/sys/class/net/", bridgeName, "statistics/tx_bytes")
if not exists(rxBytesPath): raise Exception("Cannot find %s"%rxBytesPath)
if not exists(txBytesPath): raise Exception("Cannot find %s"%txBytesPath)
rxBytes = long(doCmd(['cat', rxBytesPath])) / 1000
txBytes = long(doCmd(['cat', txBytesPath])) / 1000
sysPerf = successToMap(sys_perf_info())
cpuUtil = float(100 - float(sysPerf['cpu_idle']) * 100)
freeMemory = MtoBytes(long(sysPerf['mem_free']))
xmInfo = successToMap(xen_get_xm_info())
totalMemory = MtoBytes(long(xmInfo['total_memory']))
rs = toGson({"cpuUtil":cpuUtil, "totalMemory":totalMemory, "freeMemory":freeMemory, "rxBytes":rxBytes, "txBytes":txBytes})
logger.info(OvmHost.getPerformanceStats, rs)
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.getPerformanceStats, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.getPerformanceStats), errmsg)
@staticmethod
def getAllVms():
def scanStoppedVmOnPrimaryStorage(vms):
def isMyVmDirLink(path):
return (islink(path) and exists(join(path, 'vm.cfg')) and ('-' in basename(path)) and (exists(join(path, makeOwnerFileName()))))
mps = OvmStoragePool()._getAllMountPoints()
for mountPoint in mps:
runningPool = join(mountPoint, 'running_pool')
if not exists(runningPool):
logger.debug(OvmHost.getAllVms, "Primary storage %s not existing, skip it. this should be first getAllVms() called from Ovm resource configure"%runningPool)
continue
for dir in os.listdir(runningPool):
vmDir = join(runningPool, dir)
if not isMyVmDirLink(vmDir):
logger.debug(OvmHost.getAllVms, "%s is not our vm directory, skip it"%vmDir)
continue
if vms.has_key(dir):
logger.debug(OvmHost.getAllVms, "%s is already in running list, skip it"%dir)
continue
logger.debug(OvmHost.getAllVms, "Found a stopped vm %s on primary storage %s, report it to management server" % (dir, mountPoint))
vms[dir] = "DOWN"
try:
l = OvmHost()._getAllDomains()
dct = {}
host = OvmHost()
for name, id in l:
try:
vmPath = host._getVmPathFromPrimaryStorage(name)
vmStatus = db_get_vm(vmPath)
dct[name] = vmStatus['status']
except Exception, e:
logger.debug(OvmHost.getAllVms, "Cannot find link for %s on primary storage, treat it as Error"%name)
dct[name] = 'ERROR'
scanStoppedVmOnPrimaryStorage(dct)
rs = toGson(dct)
logger.info(OvmHost.getAllVms, rs)
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.getAllVms, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.getAllVms), errmsg)
@staticmethod
def fence(ip):
# try 3 times to avoid race condition that read when heartbeat file is being written
def getTimeStamp(hbFile):
for i in range(1, 3):
f = open(hbFile, 'r')
str = f.readline()
items = re.findall(HEARTBEAT_TIMESTAMP_PATTERN, str)
if len(items) == 0:
logger.debug(OvmHost.fence, "Get an incorrect heartbeat data %s, will retry %s times" % (str, 3-i))
f.close()
time.sleep(5)
else:
f.close()
timestamp = items[0]
return timestamp.lstrip('<timestamp>').rstrip('</timestamp>')
# totally check in 6 mins, the update frequency is 2 mins
def check(hbFile):
for i in range(1, 6):
ts = getTimeStamp(hbFile)
time.sleep(60)
nts = getTimeStamp(hbFile)
if ts != nts: return True
else: logger.debug(OvmHost.fence, '%s is not updated, old value=%s, will retry %s times'%(hbFile, ts, 6-i))
return False
try:
mountpoints = OvmStoragePool()._getAllMountPoints()
hbFile = None
for m in mountpoints:
p = join(m, HEARTBEAT_DIR, ipToHeartBeatFileName(ip))
if exists(p):
hbFile = p
break
if not hbFile: raise Exception('Can not find heartbeat file for %s in pools %s'%(ip, mountpoints))
rs = toGson({"isLive":check(hbFile)})
logger.debug(OvmHost.fence, rs)
return rs
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.fence, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.fence), errmsg)
@staticmethod
def setupHeartBeat(poolUuid, ip):
try:
sr = OvmStoragePool()._getSrByNameLable(poolUuid)
OvmHaHeartBeat.start(sr.mountpoint, ip)
return SUCC()
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.setupHeartBeat, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.setupHeartBeat), errmsg)
@staticmethod
def pingAnotherHost(ip):
try:
doCmd(['ping', '-c', '1', '-n', '-q', ip])
return SUCC()
except Exception, e:
errmsg = fmt_err_msg(e)
logger.error(OvmHost.pingAnotherHost, errmsg)
raise XmlRpcFault(toErrCode(OvmHost, OvmHost.pingAnotherHost), errmsg)
if __name__ == "__main__":
print OvmHost.getAllVms()
|
bspink/django
|
refs/heads/master
|
tests/backends/tests.py
|
77
|
# -*- coding: utf-8 -*-
# Unit and doctests for specific database backends.
from __future__ import unicode_literals
import copy
import datetime
import re
import threading
import unittest
import warnings
from decimal import Decimal, Rounded
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.color import no_style
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connection, connections,
reset_queries, transaction,
)
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.postgresql_psycopg2 import version as pg_version
from django.db.backends.signals import connection_created
from django.db.backends.utils import CursorWrapper, format_number
from django.db.models import Avg, StdDev, Sum, Variance
from django.db.models.sql.constants import CURSOR
from django.db.utils import ConnectionHandler
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, mock, override_settings,
skipIfDBFeature, skipUnlessDBFeature,
)
from django.test.utils import str_prefix
from django.utils import six
from django.utils.six.moves import range
from . import models
class DummyBackendTest(SimpleTestCase):
def test_no_databases(self):
"""
Test that empty DATABASES setting default to the dummy backend.
"""
DATABASES = {}
conns = ConnectionHandler(DATABASES)
self.assertEqual(conns[DEFAULT_DB_ALIAS].settings_dict['ENGINE'],
'django.db.backends.dummy')
with self.assertRaises(ImproperlyConfigured):
conns[DEFAULT_DB_ALIAS].ensure_connection()
@unittest.skipUnless(connection.vendor == 'oracle', "Test only for Oracle")
class OracleTests(unittest.TestCase):
def test_quote_name(self):
# Check that '%' chars are escaped for query execution.
name = '"SOME%NAME"'
quoted_name = connection.ops.quote_name(name)
self.assertEqual(quoted_name % (), name)
def test_dbms_session(self):
# If the backend is Oracle, test that we can call a standard
# stored procedure through our cursor wrapper.
from django.db.backends.oracle.base import convert_unicode
with connection.cursor() as cursor:
cursor.callproc(convert_unicode('DBMS_SESSION.SET_IDENTIFIER'),
[convert_unicode('_django_testing!')])
def test_cursor_var(self):
# If the backend is Oracle, test that we can pass cursor variables
# as query parameters.
from django.db.backends.oracle.base import Database
with connection.cursor() as cursor:
var = cursor.var(Database.STRING)
cursor.execute("BEGIN %s := 'X'; END; ", [var])
self.assertEqual(var.getvalue(), 'X')
def test_long_string(self):
# If the backend is Oracle, test that we can save a text longer
# than 4000 chars and read it properly
with connection.cursor() as cursor:
cursor.execute('CREATE TABLE ltext ("TEXT" NCLOB)')
long_str = ''.join(six.text_type(x) for x in range(4000))
cursor.execute('INSERT INTO ltext VALUES (%s)', [long_str])
cursor.execute('SELECT text FROM ltext')
row = cursor.fetchone()
self.assertEqual(long_str, row[0].read())
cursor.execute('DROP TABLE ltext')
def test_client_encoding(self):
# If the backend is Oracle, test that the client encoding is set
# correctly. This was broken under Cygwin prior to r14781.
connection.ensure_connection()
self.assertEqual(connection.connection.encoding, "UTF-8")
self.assertEqual(connection.connection.nencoding, "UTF-8")
def test_order_of_nls_parameters(self):
# an 'almost right' datetime should work with configured
# NLS parameters as per #18465.
with connection.cursor() as cursor:
query = "select 1 from dual where '1936-12-29 00:00' < sysdate"
# Test that the query succeeds without errors - pre #18465 this
# wasn't the case.
cursor.execute(query)
self.assertEqual(cursor.fetchone()[0], 1)
@unittest.skipUnless(connection.vendor == 'sqlite', "Test only for SQLite")
class SQLiteTests(TestCase):
longMessage = True
def test_autoincrement(self):
"""
Check that auto_increment fields are created with the AUTOINCREMENT
keyword in order to be monotonically increasing. Refs #10164.
"""
with connection.schema_editor(collect_sql=True) as editor:
editor.create_model(models.Square)
statements = editor.collected_sql
match = re.search('"id" ([^,]+),', statements[0])
self.assertIsNotNone(match)
self.assertEqual('integer NOT NULL PRIMARY KEY AUTOINCREMENT',
match.group(1), "Wrong SQL used to create an auto-increment "
"column on SQLite")
def test_aggregation(self):
"""
#19360: Raise NotImplementedError when aggregating on date/time fields.
"""
for aggregate in (Sum, Avg, Variance, StdDev):
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate, aggregate('time'))
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate, aggregate('date'))
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate, aggregate('last_modified'))
self.assertRaises(
NotImplementedError,
models.Item.objects.all().aggregate,
**{'complex': aggregate('last_modified') + aggregate('last_modified')})
@unittest.skipUnless(connection.vendor == 'postgresql', "Test only for PostgreSQL")
class PostgreSQLTests(TestCase):
def assert_parses(self, version_string, version):
self.assertEqual(pg_version._parse_version(version_string), version)
def test_parsing(self):
"""Test PostgreSQL version parsing from `SELECT version()` output"""
self.assert_parses("PostgreSQL 9.3 beta4", 90300)
self.assert_parses("PostgreSQL 9.3", 90300)
self.assert_parses("EnterpriseDB 9.3", 90300)
self.assert_parses("PostgreSQL 9.3.6", 90306)
self.assert_parses("PostgreSQL 9.4beta1", 90400)
self.assert_parses("PostgreSQL 9.3.1 on i386-apple-darwin9.2.2, compiled by GCC i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 (Apple Inc. build 5478)", 90301)
def test_nodb_connection(self):
"""
Test that the _nodb_connection property fallbacks to the default connection
database when access to the 'postgres' database is not granted.
"""
def mocked_connect(self):
if self.settings_dict['NAME'] is None:
raise DatabaseError()
return ''
nodb_conn = connection._nodb_connection
self.assertIsNone(nodb_conn.settings_dict['NAME'])
# Now assume the 'postgres' db isn't available
del connection._nodb_connection
with warnings.catch_warnings(record=True) as w:
with mock.patch('django.db.backends.base.base.BaseDatabaseWrapper.connect',
side_effect=mocked_connect, autospec=True):
nodb_conn = connection._nodb_connection
del connection._nodb_connection
self.assertIsNotNone(nodb_conn.settings_dict['NAME'])
self.assertEqual(nodb_conn.settings_dict['NAME'], settings.DATABASES[DEFAULT_DB_ALIAS]['NAME'])
# Check a RuntimeWarning nas been emitted
self.assertEqual(len(w), 1)
self.assertEqual(w[0].message.__class__, RuntimeWarning)
def test_version_detection(self):
"""Test PostgreSQL version detection"""
# Helper mocks
class CursorMock(object):
"Very simple mock of DB-API cursor"
def execute(self, arg):
pass
def fetchone(self):
return ["PostgreSQL 9.3"]
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
pass
class OlderConnectionMock(object):
"Mock of psycopg2 (< 2.0.12) connection"
def cursor(self):
return CursorMock()
# psycopg2 < 2.0.12 code path
conn = OlderConnectionMock()
self.assertEqual(pg_version.get_version(conn), 90300)
def test_connect_and_rollback(self):
"""
PostgreSQL shouldn't roll back SET TIME ZONE, even if the first
transaction is rolled back (#17062).
"""
databases = copy.deepcopy(settings.DATABASES)
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Ensure the database default time zone is different than
# the time zone in new_connection.settings_dict. We can
# get the default time zone by reset & show.
cursor = new_connection.cursor()
cursor.execute("RESET TIMEZONE")
cursor.execute("SHOW TIMEZONE")
db_default_tz = cursor.fetchone()[0]
new_tz = 'Europe/Paris' if db_default_tz == 'UTC' else 'UTC'
new_connection.close()
# Invalidate timezone name cache, because the setting_changed
# handler cannot know about new_connection.
del new_connection.timezone_name
# Fetch a new connection with the new_tz as default
# time zone, run a query and rollback.
with self.settings(TIME_ZONE=new_tz):
new_connection.set_autocommit(False)
cursor = new_connection.cursor()
new_connection.rollback()
# Now let's see if the rollback rolled back the SET TIME ZONE.
cursor.execute("SHOW TIMEZONE")
tz = cursor.fetchone()[0]
self.assertEqual(new_tz, tz)
finally:
new_connection.close()
def test_connect_non_autocommit(self):
"""
The connection wrapper shouldn't believe that autocommit is enabled
after setting the time zone when AUTOCOMMIT is False (#21452).
"""
databases = copy.deepcopy(settings.DATABASES)
databases[DEFAULT_DB_ALIAS]['AUTOCOMMIT'] = False
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Open a database connection.
new_connection.cursor()
self.assertFalse(new_connection.get_autocommit())
finally:
new_connection.close()
def test_connect_isolation_level(self):
"""
Regression test for #18130 and #24318.
"""
from psycopg2.extensions import (
ISOLATION_LEVEL_READ_COMMITTED as read_committed,
ISOLATION_LEVEL_SERIALIZABLE as serializable,
)
# Since this is a django.test.TestCase, a transaction is in progress
# and the isolation level isn't reported as 0. This test assumes that
# PostgreSQL is configured with the default isolation level.
# Check the level on the psycopg2 connection, not the Django wrapper.
self.assertEqual(connection.connection.isolation_level, read_committed)
databases = copy.deepcopy(settings.DATABASES)
databases[DEFAULT_DB_ALIAS]['OPTIONS']['isolation_level'] = serializable
new_connections = ConnectionHandler(databases)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Start a transaction so the isolation level isn't reported as 0.
new_connection.set_autocommit(False)
# Check the level on the psycopg2 connection, not the Django wrapper.
self.assertEqual(new_connection.connection.isolation_level, serializable)
finally:
new_connection.close()
def _select(self, val):
with connection.cursor() as cursor:
cursor.execute("SELECT %s", (val,))
return cursor.fetchone()[0]
def test_select_ascii_array(self):
a = ["awef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_select_unicode_array(self):
a = ["ᄲawef"]
b = self._select(a)
self.assertEqual(a[0], b[0])
def test_lookup_cast(self):
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
do = DatabaseOperations(connection=None)
for lookup in ('iexact', 'contains', 'icontains', 'startswith',
'istartswith', 'endswith', 'iendswith', 'regex', 'iregex'):
self.assertIn('::text', do.lookup_cast(lookup))
def test_correct_extraction_psycopg2_version(self):
from django.db.backends.postgresql_psycopg2.base import psycopg2_version
version_path = 'django.db.backends.postgresql_psycopg2.base.Database.__version__'
with mock.patch(version_path, '2.6.9'):
self.assertEqual(psycopg2_version(), (2, 6, 9))
with mock.patch(version_path, '2.5.dev0'):
self.assertEqual(psycopg2_version(), (2, 5))
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') - see
#12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
years = models.SchoolClass.objects.dates('last_updated', 'year')
self.assertEqual(list(years), [datetime.date(2010, 1, 1)])
def test_django_date_extract(self):
"""
Test the custom ``django_date_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') - see #12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
classes = models.SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
@override_settings(DEBUG=True)
class LastExecutedQueryTest(TestCase):
def test_last_executed_query(self):
"""
last_executed_query should not raise an exception even if no previous
query has been run.
"""
cursor = connection.cursor()
connection.ops.last_executed_query(cursor, '', ())
def test_debug_sql(self):
list(models.Reporter.objects.filter(first_name="test"))
sql = connection.queries[-1]['sql'].lower()
self.assertIn("select", sql)
self.assertIn(models.Reporter._meta.db_table, sql)
def test_query_encoding(self):
"""
Test that last_executed_query() returns an Unicode string
"""
data = models.RawData.objects.filter(raw_data=b'\x00\x46 \xFE').extra(select={'föö': 1})
sql, params = data.query.sql_with_params()
cursor = data.query.get_compiler('default').execute_sql(CURSOR)
last_sql = cursor.db.ops.last_executed_query(cursor, sql, params)
self.assertIsInstance(last_sql, six.text_type)
@unittest.skipUnless(connection.vendor == 'sqlite',
"This test is specific to SQLite.")
def test_no_interpolation_on_sqlite(self):
# Regression for #17158
# This shouldn't raise an exception
query = "SELECT strftime('%Y', 'now');"
connection.cursor().execute(query)
self.assertEqual(connection.queries[-1]['sql'],
str_prefix("QUERY = %(_)s\"SELECT strftime('%%Y', 'now');\" - PARAMS = ()"))
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
cursor = connection.cursor()
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.table_name_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
self.assertRaises(Exception, cursor.executemany, query, [(1, 2, 3)])
self.assertRaises(Exception, cursor.executemany, query, [(1,)])
# Unfortunately, the following tests would be a good test to run on all
# backends, but it breaks MySQL hard. Until #13711 is fixed, it can't be run
# everywhere (although it would be an effective test of #13711).
class LongNameTest(TransactionTestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
available_apps = ['backends']
def test_sequence_name_length_limits_create(self):
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
def test_sequence_name_length_limits_m2m(self):
"""Test an m2m save of a model with a long name and a long m2m field name doesn't error as on Django >=1.2 this now uses object saves. Ref #8901"""
obj = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
rel_obj = models.Person.objects.create(first_name='Django', last_name='Reinhardt')
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
def test_sequence_name_length_limits_flush(self):
"""Test that sequence resetting as part of a flush with model with long name and long pk name doesn't error. Ref #8901"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sequences = [
{
'column': VLM._meta.pk.column,
'table': VLM._meta.db_table
},
]
cursor = connection.cursor()
for statement in connection.ops.sql_flush(no_style(), tables, sequences):
cursor.execute(statement)
class SequenceResetTest(TestCase):
def test_generic_relation(self):
"Sequence names are correct when resetting generic relations (Ref #13941)"
# Create an object with a manually specified PK
models.Post.objects.create(id=10, name='1st post', text='hello world')
# Reset the sequences for the database
cursor = connection.cursor()
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(no_style(), [models.Post])
for sql in commands:
cursor.execute(sql)
# If we create a new object now, it should have a PK greater
# than the PK we specified manually.
obj = models.Post.objects.create(name='New post', text='goodbye world')
self.assertGreater(obj.pk, 10)
# This test needs to run outside of a transaction, otherwise closing the
# connection would implicitly rollback and cause problems during teardown.
class ConnectionCreatedSignalTest(TransactionTestCase):
available_apps = []
# Unfortunately with sqlite3 the in-memory test database cannot be closed,
# and so it cannot be re-opened during testing.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_signal(self):
data = {}
def receiver(sender, connection, **kwargs):
data["connection"] = connection
connection_created.connect(receiver)
connection.close()
connection.cursor()
self.assertIs(data["connection"].connection, connection.connection)
connection_created.disconnect(receiver)
data.clear()
connection.cursor()
self.assertEqual(data, {})
class EscapingChecks(TestCase):
"""
All tests in this test case are also run with settings.DEBUG=True in
EscapingChecksDebug test case, to also test CursorDebugWrapper.
"""
bare_select_suffix = connection.features.bare_select_suffix
def test_paramless_no_escaping(self):
cursor = connection.cursor()
cursor.execute("SELECT '%s'" + self.bare_select_suffix)
self.assertEqual(cursor.fetchall()[0][0], '%s')
def test_parameter_escaping(self):
cursor = connection.cursor()
cursor.execute("SELECT '%%', %s" + self.bare_select_suffix, ('%d',))
self.assertEqual(cursor.fetchall()[0], ('%', '%d'))
@unittest.skipUnless(connection.vendor == 'sqlite',
"This is an sqlite-specific issue")
def test_sqlite_parameter_escaping(self):
# '%s' escaping support for sqlite3 #13648
cursor = connection.cursor()
cursor.execute("select strftime('%s', date('now'))")
response = cursor.fetchall()[0][0]
# response should be an non-zero integer
self.assertTrue(int(response))
@override_settings(DEBUG=True)
class EscapingChecksDebug(EscapingChecks):
pass
class BackendTestCase(TransactionTestCase):
available_apps = ['backends']
def create_squares_with_executemany(self, args):
self.create_squares(args, 'format', True)
def create_squares(self, args, paramstyle, multiple):
cursor = connection.cursor()
opts = models.Square._meta
tbl = connection.introspection.table_name_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field('root').column)
f2 = connection.ops.quote_name(opts.get_field('square').column)
if paramstyle == 'format':
query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2)
elif paramstyle == 'pyformat':
query = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (tbl, f1, f2)
else:
raise ValueError("unsupported paramstyle in test")
if multiple:
cursor.executemany(query, args)
else:
cursor.execute(query, args)
def test_cursor_executemany(self):
# Test cursor.executemany #4896
args = [(i, i ** 2) for i in range(-5, 6)]
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 11)
for i in range(-5, 6):
square = models.Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
def test_cursor_executemany_with_empty_params_list(self):
# Test executemany with params=[] does nothing #4765
args = []
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 0)
def test_cursor_executemany_with_iterator(self):
# Test executemany accepts iterators #10320
args = iter((i, i ** 2) for i in range(-3, 2))
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 5)
args = iter((i, i ** 2) for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 9)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_execute_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = {'root': 3, 'square': 9}
self.create_squares(args, 'pyformat', multiple=False)
self.assertEqual(models.Square.objects.count(), 1)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)]
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 11)
for i in range(-5, 6):
square = models.Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat_iterator(self):
args = iter({'root': i, 'square': i ** 2} for i in range(-3, 2))
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 5)
args = iter({'root': i, 'square': i ** 2} for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 9)
def test_unicode_fetches(self):
# fetchone, fetchmany, fetchall return strings as unicode objects #6254
qn = connection.ops.quote_name
models.Person(first_name="John", last_name="Doe").save()
models.Person(first_name="Jane", last_name="Doe").save()
models.Person(first_name="Mary", last_name="Agnelline").save()
models.Person(first_name="Peter", last_name="Parker").save()
models.Person(first_name="Clark", last_name="Kent").save()
opts2 = models.Person._meta
f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
query2 = ('SELECT %s, %s FROM %s ORDER BY %s'
% (qn(f3.column), qn(f4.column), connection.introspection.table_name_converter(opts2.db_table),
qn(f3.column)))
cursor = connection.cursor()
cursor.execute(query2)
self.assertEqual(cursor.fetchone(), ('Clark', 'Kent'))
self.assertEqual(list(cursor.fetchmany(2)), [('Jane', 'Doe'), ('John', 'Doe')])
self.assertEqual(list(cursor.fetchall()), [('Mary', 'Agnelline'), ('Peter', 'Parker')])
def test_unicode_password(self):
old_password = connection.settings_dict['PASSWORD']
connection.settings_dict['PASSWORD'] = "françois"
try:
connection.cursor()
except DatabaseError:
# As password is probably wrong, a database exception is expected
pass
except Exception as e:
self.fail("Unexpected error raised with unicode password: %s" % e)
finally:
connection.settings_dict['PASSWORD'] = old_password
def test_database_operations_helper_class(self):
# Ticket #13630
self.assertTrue(hasattr(connection, 'ops'))
self.assertTrue(hasattr(connection.ops, 'connection'))
self.assertEqual(connection, connection.ops.connection)
def test_database_operations_init(self):
"""
Test that DatabaseOperations initialization doesn't query the database.
See #17656.
"""
with self.assertNumQueries(0):
connection.ops.__class__(connection)
def test_cached_db_features(self):
self.assertIn(connection.features.supports_transactions, (True, False))
self.assertIn(connection.features.supports_stddev, (True, False))
self.assertIn(connection.features.can_introspect_foreign_keys, (True, False))
def test_duplicate_table_error(self):
""" Test that creating an existing table returns a DatabaseError """
cursor = connection.cursor()
query = 'CREATE TABLE %s (id INTEGER);' % models.Article._meta.db_table
with self.assertRaises(DatabaseError):
cursor.execute(query)
def test_cursor_contextmanager(self):
"""
Test that cursors can be used as a context manager
"""
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
# Both InterfaceError and ProgrammingError seem to be used when
# accessing closed cursor (psycopg2 has InterfaceError, rest seem
# to use ProgrammingError).
with self.assertRaises(connection.features.closed_cursor_error_class):
# cursor should be closed, so no queries should be possible.
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
@unittest.skipUnless(connection.vendor == 'postgresql',
"Psycopg2 specific cursor.closed attribute needed")
def test_cursor_contextmanager_closing(self):
# There isn't a generic way to test that cursors are closed, but
# psycopg2 offers us a way to check that by closed attribute.
# So, run only on psycopg2 for that reason.
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
self.assertTrue(cursor.closed)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_is_usable_after_database_disconnects(self):
"""
Test that is_usable() doesn't crash when the database disconnects.
Regression for #21553.
"""
# Open a connection to the database.
with connection.cursor():
pass
# Emulate a connection close by the database.
connection._close()
# Even then is_usable() should not raise an exception.
try:
self.assertFalse(connection.is_usable())
finally:
# Clean up the mess created by connection._close(). Since the
# connection is already closed, this crashes on some backends.
try:
connection.close()
except Exception:
pass
@override_settings(DEBUG=True)
def test_queries(self):
"""
Test the documented API of connection.queries.
"""
with connection.cursor() as cursor:
reset_queries()
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
six.assertCountEqual(self, connection.queries[0].keys(), ['sql', 'time'])
reset_queries()
self.assertEqual(0, len(connection.queries))
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
@override_settings(DEBUG=True)
def test_queries_limit(self):
"""
Test that the backend doesn't store an unlimited number of queries.
Regression for #12581.
"""
old_queries_limit = BaseDatabaseWrapper.queries_limit
BaseDatabaseWrapper.queries_limit = 3
new_connections = ConnectionHandler(settings.DATABASES)
new_connection = new_connections[DEFAULT_DB_ALIAS]
# Initialize the connection and clear initialization statements.
with new_connection.cursor():
pass
new_connection.queries_log.clear()
try:
with new_connection.cursor() as cursor:
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(2, len(new_connection.queries))
self.assertEqual(0, len(w))
with new_connection.cursor() as cursor:
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(3, len(new_connection.queries))
self.assertEqual(1, len(w))
self.assertEqual(str(w[0].message), "Limit for query logging "
"exceeded, only the last 3 queries will be returned.")
finally:
BaseDatabaseWrapper.queries_limit = old_queries_limit
new_connection.close()
# We don't make these tests conditional because that means we would need to
# check and differentiate between:
# * MySQL+InnoDB, MySQL+MYISAM (something we currently can't do).
# * if sqlite3 (if/once we get #14204 fixed) has referential integrity turned
# on or not, something that would be controlled by runtime support and user
# preference.
# verify if its type is django.database.db.IntegrityError.
class FkConstraintsTests(TransactionTestCase):
available_apps = ['backends']
def setUp(self):
# Create a Reporter.
self.r = models.Reporter.objects.create(first_name='John', last_name='Smith')
def test_integrity_checks_on_creation(self):
"""
Try to create a model instance that violates a FK constraint. If it
fails it should fail with IntegrityError.
"""
a1 = models.Article(headline="This is a test", pub_date=datetime.datetime(2005, 7, 27), reporter_id=30)
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy models. Refs #17519
a2 = models.Article(headline='This is another test', reporter=self.r,
pub_date=datetime.datetime(2012, 8, 3),
reporter_proxy_id=30)
self.assertRaises(IntegrityError, a2.save)
def test_integrity_checks_on_update(self):
"""
Try to update a model instance introducing a FK constraint violation.
If it fails it should fail with IntegrityError.
"""
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a1 = models.Article.objects.get(headline="Test article")
a1.reporter_id = 30
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy models. Refs #17519
# Create another article
r_proxy = models.ReporterProxy.objects.get(pk=self.r.pk)
models.Article.objects.create(headline='Another article',
pub_date=datetime.datetime(1988, 5, 15),
reporter=self.r, reporter_proxy=r_proxy)
# Retrieve the second article from the DB
a2 = models.Article.objects.get(headline='Another article')
a2.reporter_proxy_id = 30
self.assertRaises(IntegrityError, a2.save)
def test_disable_constraint_checks_manually(self):
"""
When constraint checks are disabled, should be able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
connection.disable_constraint_checking()
a.save()
connection.enable_constraint_checking()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_disable_constraint_checks_context_manager(self):
"""
When constraint checks are disabled (using context manager), should be able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_check_constraints(self):
"""
Constraint checks should raise an IntegrityError when bad data is in the DB.
"""
with transaction.atomic():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
with connection.constraint_checks_disabled():
a.save()
with self.assertRaises(IntegrityError):
connection.check_constraints()
transaction.set_rollback(True)
class ThreadTests(TransactionTestCase):
available_apps = ['backends']
def test_default_connection_thread_local(self):
"""
Ensure that the default connection (i.e. django.db.connection) is
different for each thread.
Refs #17258.
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
connection.cursor()
connections_dict[id(connection)] = connection
def runner():
# Passing django.db.connection between threads doesn't work while
# connections[DEFAULT_DB_ALIAS] does.
from django.db import connections
connection = connections[DEFAULT_DB_ALIAS]
# Allow thread sharing so the connection can be closed by the
# main thread.
connection.allow_thread_sharing = True
connection.cursor()
connections_dict[id(connection)] = connection
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
# Check that each created connection got different inner connection.
self.assertEqual(
len(set(conn.connection for conn in connections_dict.values())),
3)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_connections_thread_local(self):
"""
Ensure that the connections are different for each thread.
Refs #17258.
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
for conn in connections.all():
connections_dict[id(conn)] = conn
def runner():
from django.db import connections
for conn in connections.all():
# Allow thread sharing so the connection can be closed by the
# main thread.
conn.allow_thread_sharing = True
connections_dict[id(conn)] = conn
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEqual(len(connections_dict), 6)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_pass_connection_between_threads(self):
"""
Ensure that a connection can be passed from one thread to the other.
Refs #17258.
"""
models.Person.objects.create(first_name="John", last_name="Doe")
def do_thread():
def runner(main_thread_connection):
from django.db import connections
connections['default'] = main_thread_connection
try:
models.Person.objects.get(first_name="John", last_name="Doe")
except Exception as e:
exceptions.append(e)
t = threading.Thread(target=runner, args=[connections['default']])
t.start()
t.join()
# Without touching allow_thread_sharing, which should be False by default.
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to False
connections['default'].allow_thread_sharing = False
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to True
connections['default'].allow_thread_sharing = True
exceptions = []
do_thread()
# All good
self.assertEqual(exceptions, [])
def test_closing_non_shared_connections(self):
"""
Ensure that a connection that is not explicitly shareable cannot be
closed by another thread.
Refs #17258.
"""
# First, without explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# The exception was raised
self.assertEqual(len(exceptions), 1)
# Then, with explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
# Enable thread sharing
connections['default'].allow_thread_sharing = True
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# No exception was raised
self.assertEqual(len(exceptions), 0)
class MySQLPKZeroTests(TestCase):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for autoincrement primary key.
"""
@skipIfDBFeature('allows_auto_pk_0')
def test_zero_as_autoval(self):
with self.assertRaises(ValueError):
models.Square.objects.create(id=0, root=0, square=1)
class DBConstraintTestCase(TestCase):
def test_can_reference_existent(self):
obj = models.Object.objects.create()
ref = models.ObjectReference.objects.create(obj=obj)
self.assertEqual(ref.obj, obj)
ref = models.ObjectReference.objects.get(obj=obj)
self.assertEqual(ref.obj, obj)
def test_can_reference_non_existent(self):
self.assertFalse(models.Object.objects.filter(id=12345).exists())
ref = models.ObjectReference.objects.create(obj_id=12345)
ref_new = models.ObjectReference.objects.get(obj_id=12345)
self.assertEqual(ref, ref_new)
with self.assertRaises(models.Object.DoesNotExist):
ref.obj
def test_many_to_many(self):
obj = models.Object.objects.create()
obj.related_objects.create()
self.assertEqual(models.Object.objects.count(), 2)
self.assertEqual(obj.related_objects.count(), 1)
intermediary_model = models.Object._meta.get_field("related_objects").remote_field.through
intermediary_model.objects.create(from_object_id=obj.id, to_object_id=12345)
self.assertEqual(obj.related_objects.count(), 1)
self.assertEqual(intermediary_model.objects.count(), 2)
class BackendUtilTests(SimpleTestCase):
def test_format_number(self):
"""
Test the format_number converter utility
"""
def equal(value, max_d, places, result):
self.assertEqual(format_number(Decimal(value), max_d, places), result)
equal('0', 12, 3,
'0.000')
equal('0', 12, 8,
'0.00000000')
equal('1', 12, 9,
'1.000000000')
equal('0.00000000', 12, 8,
'0.00000000')
equal('0.000000004', 12, 8,
'0.00000000')
equal('0.000000008', 12, 8,
'0.00000001')
equal('0.000000000000000000999', 10, 8,
'0.00000000')
equal('0.1234567890', 12, 10,
'0.1234567890')
equal('0.1234567890', 12, 9,
'0.123456789')
equal('0.1234567890', 12, 8,
'0.12345679')
equal('0.1234567890', 12, 5,
'0.12346')
equal('0.1234567890', 12, 3,
'0.123')
equal('0.1234567890', 12, 1,
'0.1')
equal('0.1234567890', 12, 0,
'0')
equal('0.1234567890', None, 0,
'0')
equal('1234567890.1234567890', None, 0,
'1234567890')
equal('1234567890.1234567890', None, 2,
'1234567890.12')
equal('0.1234', 5, None,
'0.1234')
equal('123.12', 5, None,
'123.12')
with self.assertRaises(Rounded):
equal('0.1234567890', 5, None,
'0.12346')
with self.assertRaises(Rounded):
equal('1234567890.1234', 5, None,
'1234600000')
@unittest.skipUnless(connection.vendor == 'sqlite', 'SQLite specific test.')
@skipUnlessDBFeature('can_share_in_memory_db')
class TestSqliteThreadSharing(TransactionTestCase):
available_apps = ['backends']
def test_database_sharing_in_threads(self):
def create_object():
models.Object.objects.create()
create_object()
thread = threading.Thread(target=create_object)
thread.start()
thread.join()
self.assertEqual(models.Object.objects.count(), 2)
|
adedayo/intellij-community
|
refs/heads/master
|
python/helpers/pydev/tests_python/_debugger_case7.py
|
98
|
def Call():
variable_for_test_1 = 10
variable_for_test_2 = 20
variable_for_test_3 = 30
if __name__ == '__main__':
Call()
print('TEST SUCEEDED!')
|
mgr0dzicki/python-neo
|
refs/heads/master
|
neo/test/iotest/test_elanio.py
|
13
|
# -*- coding: utf-8 -*-
"""
Tests of neo.io.elanio
"""
# needed for python 3 compatibility
from __future__ import absolute_import, division
import sys
try:
import unittest2 as unittest
except ImportError:
import unittest
from neo.io import ElanIO
from neo.test.iotest.common_io_test import BaseTestIO
@unittest.skipIf(sys.version_info[0] > 2, "not Python 3 compatible")
class TestElanIO(BaseTestIO, unittest.TestCase, ):
ioclass = ElanIO
files_to_test = ['File_elan_1.eeg']
files_to_download = ['File_elan_1.eeg',
'File_elan_1.eeg.ent',
'File_elan_1.eeg.pos',
]
if __name__ == "__main__":
unittest.main()
|
jacky-young/crosswalk-test-suite
|
refs/heads/master
|
misc/sampleapp-android-tests/sampleapp/hangonman_install.py
|
2
|
#!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Li, Cici<cici.x.li@intel.com>
import unittest
import os, sys, commands
import comm
class TestSampleAppFunctions(unittest.TestCase):
def test_install(self):
comm.setUp()
os.chdir(comm.const_path + "/../testapp/")
app_name = "Hangonman"
apk_file = commands.getstatusoutput("ls | grep %s" % app_name)[1]
cmd = "adb -s " + comm.device + " install -r " + apk_file
cmdfind = "adb -s " + comm.device + " shell pm list packages |grep org.xwalk.%s" % (app_name.lower())
comm.app_install(cmd, cmdfind, self)
if __name__ == '__main__':
unittest.main()
|
johnmulder/pycon-tutorial-student
|
refs/heads/master
|
setup.py
|
1
|
from setuptools import setup
setup(name='pycon-tutorial-student-jmulder',
version='0.1',
description='test stuff for pycon tutorial',
py_modules=['wordcount_lib'],
scripts=['wordcount'],
setup_requires=[
'pytest-runner',
],
tests_require=[
'pytest',
],
)
|
kdwink/intellij-community
|
refs/heads/master
|
python/testData/codeInsight/controlflow/assertfalseargument.py
|
83
|
assert False, 'foo'
print('unreachable 1')
assert False, f()
print('unreachable 2')
|
sxhao/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/layout_tests/models/test_expectations_unittest.py
|
118
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.host_mock import MockHost
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.layout_tests.models.test_configuration import *
from webkitpy.layout_tests.models.test_expectations import *
from webkitpy.layout_tests.models.test_configuration import *
try:
from collections import OrderedDict
except ImportError:
# Needed for Python < 2.7
from webkitpy.thirdparty.ordered_dict import OrderedDict
class Base(unittest.TestCase):
# Note that all of these tests are written assuming the configuration
# being tested is Windows XP, Release build.
def __init__(self, testFunc):
host = MockHost()
self._port = host.port_factory.get('test-win-xp', None)
self._exp = None
unittest.TestCase.__init__(self, testFunc)
def get_test(self, test_name):
# FIXME: Remove this routine and just reference test names directly.
return test_name
def get_basic_tests(self):
return [self.get_test('failures/expected/text.html'),
self.get_test('failures/expected/image_checksum.html'),
self.get_test('failures/expected/crash.html'),
self.get_test('failures/expected/missing_text.html'),
self.get_test('failures/expected/image.html'),
self.get_test('passes/text.html')]
def get_basic_expectations(self):
return """
Bug(test) failures/expected/text.html [ Failure ]
Bug(test) failures/expected/crash.html [ WontFix ]
Bug(test) failures/expected/missing_image.html [ Rebaseline Missing ]
Bug(test) failures/expected/image_checksum.html [ WontFix ]
Bug(test) failures/expected/image.html [ WontFix Mac ]
"""
def parse_exp(self, expectations, overrides=None, is_lint_mode=False):
expectations_dict = OrderedDict()
expectations_dict['expectations'] = expectations
if overrides:
expectations_dict['overrides'] = overrides
self._port.expectations_dict = lambda: expectations_dict
expectations_to_lint = expectations_dict if is_lint_mode else None
self._exp = TestExpectations(self._port, self.get_basic_tests(), expectations_to_lint=expectations_to_lint)
def assert_exp(self, test, result):
self.assertEqual(self._exp.get_expectations(self.get_test(test)),
set([result]))
def assert_bad_expectations(self, expectations, overrides=None):
self.assertRaises(ParseError, self.parse_exp, expectations, is_lint_mode=True, overrides=overrides)
class BasicTests(Base):
def test_basic(self):
self.parse_exp(self.get_basic_expectations())
self.assert_exp('failures/expected/text.html', FAIL)
self.assert_exp('failures/expected/image_checksum.html', PASS)
self.assert_exp('passes/text.html', PASS)
self.assert_exp('failures/expected/image.html', PASS)
class MiscTests(Base):
def test_multiple_results(self):
self.parse_exp('Bug(x) failures/expected/text.html [ Crash Failure ]')
self.assertEqual(self._exp.get_expectations(
self.get_test('failures/expected/text.html')),
set([FAIL, CRASH]))
def test_result_was_expected(self):
# test basics
self.assertEqual(TestExpectations.result_was_expected(PASS, set([PASS]), test_needs_rebaselining=False, test_is_skipped=False), True)
self.assertEqual(TestExpectations.result_was_expected(FAIL, set([PASS]), test_needs_rebaselining=False, test_is_skipped=False), False)
# test handling of SKIPped tests and results
self.assertEqual(TestExpectations.result_was_expected(SKIP, set([CRASH]), test_needs_rebaselining=False, test_is_skipped=True), True)
self.assertEqual(TestExpectations.result_was_expected(SKIP, set([CRASH]), test_needs_rebaselining=False, test_is_skipped=False), False)
# test handling of MISSING results and the REBASELINE modifier
self.assertEqual(TestExpectations.result_was_expected(MISSING, set([PASS]), test_needs_rebaselining=True, test_is_skipped=False), True)
self.assertEqual(TestExpectations.result_was_expected(MISSING, set([PASS]), test_needs_rebaselining=False, test_is_skipped=False), False)
def test_remove_pixel_failures(self):
self.assertEqual(TestExpectations.remove_pixel_failures(set([FAIL])), set([FAIL]))
self.assertEqual(TestExpectations.remove_pixel_failures(set([PASS])), set([PASS]))
self.assertEqual(TestExpectations.remove_pixel_failures(set([IMAGE])), set([PASS]))
self.assertEqual(TestExpectations.remove_pixel_failures(set([FAIL])), set([FAIL]))
self.assertEqual(TestExpectations.remove_pixel_failures(set([PASS, IMAGE, CRASH])), set([PASS, CRASH]))
def test_suffixes_for_expectations(self):
self.assertEqual(TestExpectations.suffixes_for_expectations(set([FAIL])), set(['txt', 'png', 'wav']))
self.assertEqual(TestExpectations.suffixes_for_expectations(set([IMAGE])), set(['png']))
self.assertEqual(TestExpectations.suffixes_for_expectations(set([FAIL, IMAGE, CRASH])), set(['txt', 'png', 'wav']))
self.assertEqual(TestExpectations.suffixes_for_expectations(set()), set())
def test_category_expectations(self):
# This test checks unknown tests are not present in the
# expectations and that known test part of a test category is
# present in the expectations.
exp_str = 'Bug(x) failures/expected [ WontFix ]'
self.parse_exp(exp_str)
test_name = 'failures/expected/unknown-test.html'
unknown_test = self.get_test(test_name)
self.assertRaises(KeyError, self._exp.get_expectations,
unknown_test)
self.assert_exp('failures/expected/crash.html', PASS)
def test_get_modifiers(self):
self.parse_exp(self.get_basic_expectations())
self.assertEqual(self._exp.get_modifiers(
self.get_test('passes/text.html')), [])
def test_get_expectations_string(self):
self.parse_exp(self.get_basic_expectations())
self.assertEqual(self._exp.get_expectations_string(
self.get_test('failures/expected/text.html')),
'FAIL')
def test_expectation_to_string(self):
# Normal cases are handled by other tests.
self.parse_exp(self.get_basic_expectations())
self.assertRaises(ValueError, self._exp.expectation_to_string,
-1)
def test_get_test_set(self):
# Handle some corner cases for this routine not covered by other tests.
self.parse_exp(self.get_basic_expectations())
s = self._exp.get_test_set(WONTFIX)
self.assertEqual(s,
set([self.get_test('failures/expected/crash.html'),
self.get_test('failures/expected/image_checksum.html')]))
def test_parse_warning(self):
try:
filesystem = self._port.host.filesystem
filesystem.write_text_file(filesystem.join(self._port.layout_tests_dir(), 'disabled-test.html-disabled'), 'content')
self.get_test('disabled-test.html-disabled'),
self.parse_exp("[ FOO ] failures/expected/text.html [ Failure ]\n"
"Bug(rniwa) non-existent-test.html [ Failure ]\n"
"Bug(rniwa) disabled-test.html-disabled [ ImageOnlyFailure ]", is_lint_mode=True)
self.assertFalse(True, "ParseError wasn't raised")
except ParseError, e:
warnings = ("expectations:1 Unrecognized modifier 'foo' failures/expected/text.html\n"
"expectations:2 Path does not exist. non-existent-test.html")
self.assertEqual(str(e), warnings)
def test_parse_warnings_are_logged_if_not_in_lint_mode(self):
oc = OutputCapture()
try:
oc.capture_output()
self.parse_exp('-- this should be a syntax error', is_lint_mode=False)
finally:
_, _, logs = oc.restore_output()
self.assertNotEquals(logs, '')
def test_error_on_different_platform(self):
# parse_exp uses a Windows port. Assert errors on Mac show up in lint mode.
self.assertRaises(ParseError, self.parse_exp,
'Bug(test) [ Mac ] failures/expected/text.html [ Failure ]\nBug(test) [ Mac ] failures/expected/text.html [ Failure ]',
is_lint_mode=True)
def test_error_on_different_build_type(self):
# parse_exp uses a Release port. Assert errors on DEBUG show up in lint mode.
self.assertRaises(ParseError, self.parse_exp,
'Bug(test) [ Debug ] failures/expected/text.html [ Failure ]\nBug(test) [ Debug ] failures/expected/text.html [ Failure ]',
is_lint_mode=True)
def test_overrides(self):
self.parse_exp("Bug(exp) failures/expected/text.html [ Failure ]",
"Bug(override) failures/expected/text.html [ ImageOnlyFailure ]")
self.assert_exp('failures/expected/text.html', IMAGE)
def test_overrides__directory(self):
self.parse_exp("Bug(exp) failures/expected/text.html [ Failure ]",
"Bug(override) failures/expected [ Crash ]")
self.assert_exp('failures/expected/text.html', CRASH)
self.assert_exp('failures/expected/image.html', CRASH)
def test_overrides__duplicate(self):
self.assert_bad_expectations("Bug(exp) failures/expected/text.html [ Failure ]",
"Bug(override) failures/expected/text.html [ ImageOnlyFailure ]\n"
"Bug(override) failures/expected/text.html [ Crash ]\n")
def test_pixel_tests_flag(self):
def match(test, result, pixel_tests_enabled):
return self._exp.matches_an_expected_result(
self.get_test(test), result, pixel_tests_enabled)
self.parse_exp(self.get_basic_expectations())
self.assertTrue(match('failures/expected/text.html', FAIL, True))
self.assertTrue(match('failures/expected/text.html', FAIL, False))
self.assertFalse(match('failures/expected/text.html', CRASH, True))
self.assertFalse(match('failures/expected/text.html', CRASH, False))
self.assertTrue(match('failures/expected/image_checksum.html', PASS,
True))
self.assertTrue(match('failures/expected/image_checksum.html', PASS,
False))
self.assertTrue(match('failures/expected/crash.html', PASS, False))
self.assertTrue(match('passes/text.html', PASS, False))
def test_more_specific_override_resets_skip(self):
self.parse_exp("Bug(x) failures/expected [ Skip ]\n"
"Bug(x) failures/expected/text.html [ ImageOnlyFailure ]\n")
self.assert_exp('failures/expected/text.html', IMAGE)
self.assertFalse(self._port._filesystem.join(self._port.layout_tests_dir(),
'failures/expected/text.html') in
self._exp.get_tests_with_result_type(SKIP))
class SkippedTests(Base):
def check(self, expectations, overrides, skips, lint=False):
port = MockHost().port_factory.get('qt')
port._filesystem.write_text_file(port._filesystem.join(port.layout_tests_dir(), 'failures/expected/text.html'), 'foo')
expectations_dict = OrderedDict()
expectations_dict['expectations'] = expectations
if overrides:
expectations_dict['overrides'] = overrides
port.expectations_dict = lambda: expectations_dict
port.skipped_layout_tests = lambda tests: set(skips)
expectations_to_lint = expectations_dict if lint else None
exp = TestExpectations(port, ['failures/expected/text.html'], expectations_to_lint=expectations_to_lint)
# Check that the expectation is for BUG_DUMMY SKIP : ... [ Pass ]
self.assertEqual(exp.get_modifiers('failures/expected/text.html'),
[TestExpectationParser.DUMMY_BUG_MODIFIER, TestExpectationParser.SKIP_MODIFIER, TestExpectationParser.WONTFIX_MODIFIER])
self.assertEqual(exp.get_expectations('failures/expected/text.html'), set([PASS]))
def test_skipped_tests_work(self):
self.check(expectations='', overrides=None, skips=['failures/expected/text.html'])
def test_duplicate_skipped_test_fails_lint(self):
self.assertRaises(ParseError, self.check, expectations='Bug(x) failures/expected/text.html [ Failure ]\n', overrides=None, skips=['failures/expected/text.html'], lint=True)
def test_skipped_file_overrides_expectations(self):
self.check(expectations='Bug(x) failures/expected/text.html [ Failure ]\n', overrides=None,
skips=['failures/expected/text.html'])
def test_skipped_dir_overrides_expectations(self):
self.check(expectations='Bug(x) failures/expected/text.html [ Failure ]\n', overrides=None,
skips=['failures/expected'])
def test_skipped_file_overrides_overrides(self):
self.check(expectations='', overrides='Bug(x) failures/expected/text.html [ Failure ]\n',
skips=['failures/expected/text.html'])
def test_skipped_dir_overrides_overrides(self):
self.check(expectations='', overrides='Bug(x) failures/expected/text.html [ Failure ]\n',
skips=['failures/expected'])
def test_skipped_entry_dont_exist(self):
port = MockHost().port_factory.get('qt')
expectations_dict = OrderedDict()
expectations_dict['expectations'] = ''
port.expectations_dict = lambda: expectations_dict
port.skipped_layout_tests = lambda tests: set(['foo/bar/baz.html'])
capture = OutputCapture()
capture.capture_output()
exp = TestExpectations(port)
_, _, logs = capture.restore_output()
self.assertEqual('The following test foo/bar/baz.html from the Skipped list doesn\'t exist\n', logs)
class ExpectationSyntaxTests(Base):
def test_unrecognized_expectation(self):
self.assert_bad_expectations('Bug(test) failures/expected/text.html [ Unknown ]')
def test_macro(self):
exp_str = 'Bug(test) [ Win ] failures/expected/text.html [ Failure ]'
self.parse_exp(exp_str)
self.assert_exp('failures/expected/text.html', FAIL)
def assert_tokenize_exp(self, line, bugs=None, modifiers=None, expectations=None, warnings=None, comment=None, name='foo.html'):
bugs = bugs or []
modifiers = modifiers or []
expectations = expectations or []
warnings = warnings or []
filename = 'TestExpectations'
line_number = 1
expectation_line = TestExpectationParser._tokenize_line(filename, line, line_number)
self.assertEqual(expectation_line.warnings, warnings)
self.assertEqual(expectation_line.name, name)
self.assertEqual(expectation_line.filename, filename)
self.assertEqual(expectation_line.line_number, line_number)
if not warnings:
self.assertEqual(expectation_line.modifiers, modifiers)
self.assertEqual(expectation_line.expectations, expectations)
def test_bare_name(self):
self.assert_tokenize_exp('foo.html', modifiers=['SKIP'], expectations=['PASS'])
def test_bare_name_and_bugs(self):
self.assert_tokenize_exp('webkit.org/b/12345 foo.html', modifiers=['BUGWK12345', 'SKIP'], expectations=['PASS'])
self.assert_tokenize_exp('Bug(dpranke) foo.html', modifiers=['BUGDPRANKE', 'SKIP'], expectations=['PASS'])
self.assert_tokenize_exp('webkit.org/b/12345 webkit.org/b/34567 foo.html', modifiers=['BUGWK12345', 'BUGWK34567', 'SKIP'], expectations=['PASS'])
def test_comments(self):
self.assert_tokenize_exp("# comment", name=None, comment="# comment")
self.assert_tokenize_exp("foo.html # comment", comment="# comment", expectations=['PASS'], modifiers=['SKIP'])
def test_config_modifiers(self):
self.assert_tokenize_exp('[ Mac ] foo.html', modifiers=['MAC', 'SKIP'], expectations=['PASS'])
self.assert_tokenize_exp('[ Mac Vista ] foo.html', modifiers=['MAC', 'VISTA', 'SKIP'], expectations=['PASS'])
self.assert_tokenize_exp('[ Mac ] foo.html [ Failure ] ', modifiers=['MAC'], expectations=['FAIL'])
def test_unknown_config(self):
self.assert_tokenize_exp('[ Foo ] foo.html ', modifiers=['Foo', 'SKIP'], expectations=['PASS'])
def test_unknown_expectation(self):
self.assert_tokenize_exp('foo.html [ Audio ]', warnings=['Unrecognized expectation "Audio"'])
def test_skip(self):
self.assert_tokenize_exp('foo.html [ Skip ]', modifiers=['SKIP'], expectations=['PASS'])
def test_slow(self):
self.assert_tokenize_exp('foo.html [ Slow ]', modifiers=['SLOW'], expectations=['PASS'])
def test_wontfix(self):
self.assert_tokenize_exp('foo.html [ WontFix ]', modifiers=['WONTFIX', 'SKIP'], expectations=['PASS'])
self.assert_tokenize_exp('foo.html [ WontFix ImageOnlyFailure ]', modifiers=['WONTFIX'], expectations=['IMAGE'])
self.assert_tokenize_exp('foo.html [ WontFix Pass Failure ]', modifiers=['WONTFIX'], expectations=['PASS', 'FAIL'])
def test_blank_line(self):
self.assert_tokenize_exp('', name=None)
def test_warnings(self):
self.assert_tokenize_exp('[ Mac ]', warnings=['Did not find a test name.'], name=None)
self.assert_tokenize_exp('[ [', warnings=['unexpected "["'], name=None)
self.assert_tokenize_exp('webkit.org/b/12345 ]', warnings=['unexpected "]"'], name=None)
self.assert_tokenize_exp('foo.html webkit.org/b/12345 ]', warnings=['"webkit.org/b/12345" is not at the start of the line.'])
class SemanticTests(Base):
def test_bug_format(self):
self.assertRaises(ParseError, self.parse_exp, 'BUG1234 failures/expected/text.html [ Failure ]', is_lint_mode=True)
def test_bad_bugid(self):
try:
self.parse_exp('BUG1234 failures/expected/text.html [ Failure ]', is_lint_mode=True)
self.fail('should have raised an error about a bad bug identifier')
except ParseError, exp:
self.assertEqual(len(exp.warnings), 1)
def test_missing_bugid(self):
self.parse_exp('failures/expected/text.html [ Failure ]')
self.assertFalse(self._exp.has_warnings())
self._port.warn_if_bug_missing_in_test_expectations = lambda: True
self.parse_exp('failures/expected/text.html [ Failure ]')
line = self._exp._model.get_expectation_line('failures/expected/text.html')
self.assertFalse(line.is_invalid())
self.assertEqual(line.warnings, ['Test lacks BUG modifier.'])
def test_skip_and_wontfix(self):
# Skip is not allowed to have other expectations as well, because those
# expectations won't be exercised and may become stale .
self.parse_exp('failures/expected/text.html [ Failure Skip ]')
self.assertTrue(self._exp.has_warnings())
self.parse_exp('failures/expected/text.html [ Crash WontFix ]')
self.assertFalse(self._exp.has_warnings())
self.parse_exp('failures/expected/text.html [ Pass WontFix ]')
self.assertFalse(self._exp.has_warnings())
def test_slow_and_timeout(self):
# A test cannot be SLOW and expected to TIMEOUT.
self.assertRaises(ParseError, self.parse_exp,
'Bug(test) failures/expected/timeout.html [ Slow Timeout ]', is_lint_mode=True)
def test_rebaseline(self):
# Can't lint a file w/ 'REBASELINE' in it.
self.assertRaises(ParseError, self.parse_exp,
'Bug(test) failures/expected/text.html [ Failure Rebaseline ]',
is_lint_mode=True)
def test_duplicates(self):
self.assertRaises(ParseError, self.parse_exp, """
Bug(exp) failures/expected/text.html [ Failure ]
Bug(exp) failures/expected/text.html [ ImageOnlyFailure ]""", is_lint_mode=True)
self.assertRaises(ParseError, self.parse_exp,
self.get_basic_expectations(), overrides="""
Bug(override) failures/expected/text.html [ Failure ]
Bug(override) failures/expected/text.html [ ImageOnlyFailure ]""", is_lint_mode=True)
def test_missing_file(self):
self.parse_exp('Bug(test) missing_file.html [ Failure ]')
self.assertTrue(self._exp.has_warnings(), 1)
class PrecedenceTests(Base):
def test_file_over_directory(self):
# This tests handling precedence of specific lines over directories
# and tests expectations covering entire directories.
exp_str = """
Bug(x) failures/expected/text.html [ Failure ]
Bug(y) failures/expected [ WontFix ]
"""
self.parse_exp(exp_str)
self.assert_exp('failures/expected/text.html', FAIL)
self.assert_exp('failures/expected/crash.html', PASS)
exp_str = """
Bug(x) failures/expected [ WontFix ]
Bug(y) failures/expected/text.html [ Failure ]
"""
self.parse_exp(exp_str)
self.assert_exp('failures/expected/text.html', FAIL)
self.assert_exp('failures/expected/crash.html', PASS)
def test_ambiguous(self):
self.assert_bad_expectations("Bug(test) [ Release ] passes/text.html [ Pass ]\n"
"Bug(test) [ Win ] passes/text.html [ Failure ]\n")
def test_more_modifiers(self):
self.assert_bad_expectations("Bug(test) [ Release ] passes/text.html [ Pass ]\n"
"Bug(test) [ Win Release ] passes/text.html [ Failure ]\n")
def test_order_in_file(self):
self.assert_bad_expectations("Bug(test) [ Win Release ] : passes/text.html [ Failure ]\n"
"Bug(test) [ Release ] : passes/text.html [ Pass ]\n")
def test_macro_overrides(self):
self.assert_bad_expectations("Bug(test) [ Win ] passes/text.html [ Pass ]\n"
"Bug(test) [ XP ] passes/text.html [ Failure ]\n")
class RemoveConfigurationsTest(Base):
def test_remove(self):
host = MockHost()
test_port = host.port_factory.get('test-win-xp', None)
test_port.test_exists = lambda test: True
test_port.test_isfile = lambda test: True
test_config = test_port.test_configuration()
test_port.expectations_dict = lambda: {"expectations": """Bug(x) [ Linux Win Release ] failures/expected/foo.html [ Failure ]
Bug(y) [ Win Mac Debug ] failures/expected/foo.html [ Crash ]
"""}
expectations = TestExpectations(test_port, self.get_basic_tests())
actual_expectations = expectations.remove_configuration_from_test('failures/expected/foo.html', test_config)
self.assertEqual("""Bug(x) [ Linux Vista Win7 Release ] failures/expected/foo.html [ Failure ]
Bug(y) [ Win Mac Debug ] failures/expected/foo.html [ Crash ]
""", actual_expectations)
def test_remove_line(self):
host = MockHost()
test_port = host.port_factory.get('test-win-xp', None)
test_port.test_exists = lambda test: True
test_port.test_isfile = lambda test: True
test_config = test_port.test_configuration()
test_port.expectations_dict = lambda: {'expectations': """Bug(x) [ Win Release ] failures/expected/foo.html [ Failure ]
Bug(y) [ Win Debug ] failures/expected/foo.html [ Crash ]
"""}
expectations = TestExpectations(test_port)
actual_expectations = expectations.remove_configuration_from_test('failures/expected/foo.html', test_config)
actual_expectations = expectations.remove_configuration_from_test('failures/expected/foo.html', host.port_factory.get('test-win-vista', None).test_configuration())
actual_expectations = expectations.remove_configuration_from_test('failures/expected/foo.html', host.port_factory.get('test-win-win7', None).test_configuration())
self.assertEqual("""Bug(y) [ Win Debug ] failures/expected/foo.html [ Crash ]
""", actual_expectations)
class RebaseliningTest(Base):
"""Test rebaselining-specific functionality."""
def assertRemove(self, input_expectations, input_overrides, tests, expected_expectations, expected_overrides):
self.parse_exp(input_expectations, is_lint_mode=False, overrides=input_overrides)
actual_expectations = self._exp.remove_rebaselined_tests(tests, 'expectations')
self.assertEqual(expected_expectations, actual_expectations)
actual_overrides = self._exp.remove_rebaselined_tests(tests, 'overrides')
self.assertEqual(expected_overrides, actual_overrides)
def test_remove(self):
self.assertRemove('Bug(x) failures/expected/text.html [ Failure Rebaseline ]\n'
'Bug(y) failures/expected/image.html [ ImageOnlyFailure Rebaseline ]\n'
'Bug(z) failures/expected/crash.html [ Crash ]\n',
'Bug(x0) failures/expected/image.html [ Crash ]\n',
['failures/expected/text.html'],
'Bug(y) failures/expected/image.html [ ImageOnlyFailure Rebaseline ]\n'
'Bug(z) failures/expected/crash.html [ Crash ]\n',
'Bug(x0) failures/expected/image.html [ Crash ]\n')
# Ensure that we don't modify unrelated lines, even if we could rewrite them.
# i.e., the second line doesn't get rewritten to "Bug(y) failures/expected/skip.html"
self.assertRemove('Bug(x) failures/expected/text.html [ Failure Rebaseline ]\n'
'Bug(Y) failures/expected/image.html [ Skip ]\n'
'Bug(z) failures/expected/crash.html\n',
'',
['failures/expected/text.html'],
'Bug(Y) failures/expected/image.html [ Skip ]\n'
'Bug(z) failures/expected/crash.html\n',
'')
def test_get_rebaselining_failures(self):
# Make sure we find a test as needing a rebaseline even if it is not marked as a failure.
self.parse_exp('Bug(x) failures/expected/text.html [ Rebaseline ]\n')
self.assertEqual(len(self._exp.get_rebaselining_failures()), 1)
self.parse_exp(self.get_basic_expectations())
self.assertEqual(len(self._exp.get_rebaselining_failures()), 0)
class TestExpectationSerializationTests(unittest.TestCase):
def __init__(self, testFunc):
host = MockHost()
test_port = host.port_factory.get('test-win-xp', None)
self._converter = TestConfigurationConverter(test_port.all_test_configurations(), test_port.configuration_specifier_macros())
unittest.TestCase.__init__(self, testFunc)
def _tokenize(self, line):
return TestExpectationParser._tokenize_line('path', line, 0)
def assert_round_trip(self, in_string, expected_string=None):
expectation = self._tokenize(in_string)
if expected_string is None:
expected_string = in_string
self.assertEqual(expected_string, expectation.to_string(self._converter))
def assert_list_round_trip(self, in_string, expected_string=None):
host = MockHost()
parser = TestExpectationParser(host.port_factory.get('test-win-xp', None), [], allow_rebaseline_modifier=False)
expectations = parser.parse('path', in_string)
if expected_string is None:
expected_string = in_string
self.assertEqual(expected_string, TestExpectations.list_to_string(expectations, self._converter))
def test_unparsed_to_string(self):
expectation = TestExpectationLine()
self.assertEqual(expectation.to_string(self._converter), '')
expectation.comment = ' Qux.'
self.assertEqual(expectation.to_string(self._converter), '# Qux.')
expectation.name = 'bar'
self.assertEqual(expectation.to_string(self._converter), 'bar # Qux.')
expectation.modifiers = ['foo']
# FIXME: case should be preserved here but we can't until we drop the old syntax.
self.assertEqual(expectation.to_string(self._converter), '[ FOO ] bar # Qux.')
expectation.expectations = ['bAz']
self.assertEqual(expectation.to_string(self._converter), '[ FOO ] bar [ BAZ ] # Qux.')
expectation.expectations = ['bAz1', 'baZ2']
self.assertEqual(expectation.to_string(self._converter), '[ FOO ] bar [ BAZ1 BAZ2 ] # Qux.')
expectation.modifiers = ['foo1', 'foO2']
self.assertEqual(expectation.to_string(self._converter), '[ FOO1 FOO2 ] bar [ BAZ1 BAZ2 ] # Qux.')
expectation.warnings.append('Oh the horror.')
self.assertEqual(expectation.to_string(self._converter), '')
expectation.original_string = 'Yes it is!'
self.assertEqual(expectation.to_string(self._converter), 'Yes it is!')
def test_unparsed_list_to_string(self):
expectation = TestExpectationLine()
expectation.comment = 'Qux.'
expectation.name = 'bar'
expectation.modifiers = ['foo']
expectation.expectations = ['bAz1', 'baZ2']
# FIXME: case should be preserved here but we can't until we drop the old syntax.
self.assertEqual(TestExpectations.list_to_string([expectation]), '[ FOO ] bar [ BAZ1 BAZ2 ] #Qux.')
def test_parsed_to_string(self):
expectation_line = TestExpectationLine()
expectation_line.parsed_bug_modifiers = ['BUGX']
expectation_line.name = 'test/name/for/realz.html'
expectation_line.parsed_expectations = set([IMAGE])
self.assertEqual(expectation_line.to_string(self._converter), None)
expectation_line.matching_configurations = set([TestConfiguration('xp', 'x86', 'release')])
self.assertEqual(expectation_line.to_string(self._converter), 'Bug(x) [ XP Release ] test/name/for/realz.html [ ImageOnlyFailure ]')
expectation_line.matching_configurations = set([TestConfiguration('xp', 'x86', 'release'), TestConfiguration('xp', 'x86', 'debug')])
self.assertEqual(expectation_line.to_string(self._converter), 'Bug(x) [ XP ] test/name/for/realz.html [ ImageOnlyFailure ]')
def test_serialize_parsed_expectations(self):
expectation_line = TestExpectationLine()
expectation_line.parsed_expectations = set([])
parsed_expectation_to_string = dict([[parsed_expectation, expectation_string] for expectation_string, parsed_expectation in TestExpectations.EXPECTATIONS.items()])
self.assertEqual(expectation_line._serialize_parsed_expectations(parsed_expectation_to_string), '')
expectation_line.parsed_expectations = set([FAIL])
self.assertEqual(expectation_line._serialize_parsed_expectations(parsed_expectation_to_string), 'fail')
expectation_line.parsed_expectations = set([PASS, IMAGE])
self.assertEqual(expectation_line._serialize_parsed_expectations(parsed_expectation_to_string), 'pass image')
expectation_line.parsed_expectations = set([FAIL, PASS])
self.assertEqual(expectation_line._serialize_parsed_expectations(parsed_expectation_to_string), 'pass fail')
def test_serialize_parsed_modifier_string(self):
expectation_line = TestExpectationLine()
expectation_line.parsed_bug_modifiers = ['garden-o-matic']
expectation_line.parsed_modifiers = ['for', 'the']
self.assertEqual(expectation_line._serialize_parsed_modifiers(self._converter, []), 'garden-o-matic for the')
self.assertEqual(expectation_line._serialize_parsed_modifiers(self._converter, ['win']), 'garden-o-matic for the win')
expectation_line.parsed_bug_modifiers = []
expectation_line.parsed_modifiers = []
self.assertEqual(expectation_line._serialize_parsed_modifiers(self._converter, []), '')
self.assertEqual(expectation_line._serialize_parsed_modifiers(self._converter, ['win']), 'win')
expectation_line.parsed_bug_modifiers = ['garden-o-matic', 'total', 'is']
self.assertEqual(expectation_line._serialize_parsed_modifiers(self._converter, ['win']), 'garden-o-matic is total win')
expectation_line.parsed_bug_modifiers = []
expectation_line.parsed_modifiers = ['garden-o-matic', 'total', 'is']
self.assertEqual(expectation_line._serialize_parsed_modifiers(self._converter, ['win']), 'garden-o-matic is total win')
def test_format_line(self):
self.assertEqual(TestExpectationLine._format_line(['MODIFIERS'], 'name', ['EXPECTATIONS'], 'comment'), '[ MODIFIERS ] name [ EXPECTATIONS ] #comment')
self.assertEqual(TestExpectationLine._format_line(['MODIFIERS'], 'name', ['EXPECTATIONS'], None), '[ MODIFIERS ] name [ EXPECTATIONS ]')
def test_string_roundtrip(self):
self.assert_round_trip('')
self.assert_round_trip('FOO')
self.assert_round_trip('[')
self.assert_round_trip('FOO [')
self.assert_round_trip('FOO ] bar')
self.assert_round_trip(' FOO [')
self.assert_round_trip(' [ FOO ] ')
self.assert_round_trip('[ FOO ] bar [ BAZ ]')
self.assert_round_trip('[ FOO ] bar [ BAZ ] # Qux.')
self.assert_round_trip('[ FOO ] bar [ BAZ ] # Qux.')
self.assert_round_trip('[ FOO ] bar [ BAZ ] # Qux. ')
self.assert_round_trip('[ FOO ] bar [ BAZ ] # Qux. ')
self.assert_round_trip('[ FOO ] ] ] bar BAZ')
self.assert_round_trip('[ FOO ] ] ] bar [ BAZ ]')
self.assert_round_trip('FOO ] ] bar ==== BAZ')
self.assert_round_trip('=')
self.assert_round_trip('#')
self.assert_round_trip('# ')
self.assert_round_trip('# Foo')
self.assert_round_trip('# Foo')
self.assert_round_trip('# Foo :')
self.assert_round_trip('# Foo : =')
def test_list_roundtrip(self):
self.assert_list_round_trip('')
self.assert_list_round_trip('\n')
self.assert_list_round_trip('\n\n')
self.assert_list_round_trip('bar')
self.assert_list_round_trip('bar\n# Qux.')
self.assert_list_round_trip('bar\n# Qux.\n')
def test_reconstitute_only_these(self):
lines = []
reconstitute_only_these = []
def add_line(matching_configurations, reconstitute):
expectation_line = TestExpectationLine()
expectation_line.original_string = "Nay"
expectation_line.parsed_bug_modifiers = ['BUGX']
expectation_line.name = 'Yay'
expectation_line.parsed_expectations = set([IMAGE])
expectation_line.matching_configurations = matching_configurations
lines.append(expectation_line)
if reconstitute:
reconstitute_only_these.append(expectation_line)
add_line(set([TestConfiguration('xp', 'x86', 'release')]), True)
add_line(set([TestConfiguration('xp', 'x86', 'release'), TestConfiguration('xp', 'x86', 'debug')]), False)
serialized = TestExpectations.list_to_string(lines, self._converter)
self.assertEqual(serialized, "Bug(x) [ XP Release ] Yay [ ImageOnlyFailure ]\nBug(x) [ XP ] Yay [ ImageOnlyFailure ]")
serialized = TestExpectations.list_to_string(lines, self._converter, reconstitute_only_these=reconstitute_only_these)
self.assertEqual(serialized, "Bug(x) [ XP Release ] Yay [ ImageOnlyFailure ]\nNay")
def disabled_test_string_whitespace_stripping(self):
# FIXME: Re-enable this test once we rework the code to no longer support the old syntax.
self.assert_round_trip('\n', '')
self.assert_round_trip(' [ FOO ] bar [ BAZ ]', '[ FOO ] bar [ BAZ ]')
self.assert_round_trip('[ FOO ] bar [ BAZ ]', '[ FOO ] bar [ BAZ ]')
self.assert_round_trip('[ FOO ] bar [ BAZ ] # Qux.', '[ FOO ] bar [ BAZ ] # Qux.')
self.assert_round_trip('[ FOO ] bar [ BAZ ] # Qux.', '[ FOO ] bar [ BAZ ] # Qux.')
self.assert_round_trip('[ FOO ] bar [ BAZ ] # Qux.', '[ FOO ] bar [ BAZ ] # Qux.')
self.assert_round_trip('[ FOO ] bar [ BAZ ] # Qux.', '[ FOO ] bar [ BAZ ] # Qux.')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.