repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
dan1/horizon-proto | refs/heads/master | openstack_dashboard/dashboards/identity/ngusers/panel.py | 38 | # Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
from openstack_dashboard.dashboards.identity import dashboard
class NGUsers(horizon.Panel):
name = _("Users")
slug = 'ngusers'
policy_rules = (("identity", "identity:get_user"),
("identity", "identity:list_users"))
dashboard.Identity.register(NGUsers)
|
michaelgallacher/intellij-community | refs/heads/master | python/testData/mover/exceptElse.py | 83 |
try:
print(zoo(1).foo(2))
except:
print<caret>(zoo(0).foo(2)) # <- move statement up here
else:
a = 1 |
RafaelCosman/pybrain | refs/heads/master | examples/rl/environments/cartpole/cart_all.py | 30 | #!/usr/bin/env python
#########################################################################
# Reinforcement Learning with several optimization algorithms
# on the CartPoleEnvironment
#
# Requirements: pylab (for plotting only). If not available, comment the
# last 3 lines out
#########################################################################
__author__ = "Thomas Rueckstiess, Frank Sehnke"
from pybrain.tools.example_tools import ExTools
from pybrain.tools.shortcuts import buildNetwork
from pybrain.rl.environments.cartpole import CartPoleEnvironment, BalanceTask
from pybrain.rl.agents import OptimizationAgent
from pybrain.optimization import PGPE #@UnusedImport
from pybrain.optimization import ExactNES #@UnusedImport
from pybrain.optimization import FEM #@UnusedImport
from pybrain.optimization import CMAES #@UnusedImport
from pybrain.rl.experiments import EpisodicExperiment
batch=2 #number of samples per learning step
prnts=100 #number of learning steps after results are printed
epis=4000/batch/prnts #number of roleouts
numbExp=40 #number of experiments
et = ExTools(batch, prnts) #tool for printing and plotting
expList = ["PGPE(storeAllEvaluations = True)", "ExactNES(storeAllEvaluations = True)", "FEM(storeAllEvaluations = True)", "CMAES(storeAllEvaluations = True)"]
for e in expList:
for runs in range(numbExp):
# create environment
env = CartPoleEnvironment()
# create task
task = BalanceTask(env, 200, desiredValue=None)
# create controller network
net = buildNetwork(4, 1, bias=False)
# create agent with controller and learner (and its options)
agent = OptimizationAgent(net, eval(e))
et.agent = agent
# create the experiment
experiment = EpisodicExperiment(task, agent)
#Do the experiment
for updates in range(epis):
for i in range(prnts):
experiment.doEpisodes(batch)
et.printResults((agent.learner._allEvaluations)[-50:-1], runs, updates)
et.addExps()
et.nextExps()
et.showExps()
|
azureplus/chromium_depot_tools | refs/heads/master | git_cherry_pick_upload.py | 28 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Upload a cherry pick CL to rietveld."""
import md5
import optparse
import subprocess2
import sys
import auth
from git_cl import Changelist
from git_common import config, run
from third_party.upload import EncodeMultipartFormData, GitVCS
from rietveld import Rietveld
def cherry_pick(target_branch, commit, auth_config):
"""Attempt to upload a cherry pick CL to rietveld.
Args:
target_branch: The branch to cherry pick onto.
commit: The git hash of the commit to cherry pick.
auth_config: auth.AuthConfig object with authentication configuration.
"""
author = config('user.email')
description = '%s\n\n(cherry picked from commit %s)\n' % (
run('show', '--pretty=%B', '--quiet', commit), commit)
parent = run('show', '--pretty=%P', '--quiet', commit)
print 'Found parent revision:', parent
class Options(object):
def __init__(self):
self.emulate_svn_auto_props = False
content_type, payload = EncodeMultipartFormData([
('base', '%s@%s' % (Changelist().GetRemoteUrl(), target_branch)),
('cc', config('rietveld.cc')),
('content_upload', '1'),
('description', description),
('project', '%s@%s' % (config('rietveld.project'), target_branch)),
('subject', description.splitlines()[0]),
('user', author),
], [
('data', 'data.diff', GitVCS(Options()).PostProcessDiff(
run('diff', parent, commit))),
])
rietveld = Rietveld(config('rietveld.server'), auth_config, author)
# pylint: disable=W0212
output = rietveld._send(
'/upload',
payload=payload,
content_type=content_type,
).splitlines()
# If successful, output will look like:
# Issue created. URL: https://codereview.chromium.org/1234567890
# 1
# 10001 some/path/first.file
# 10002 some/path/second.file
# 10003 some/path/third.file
if output[0].startswith('Issue created. URL: '):
print output[0]
issue = output[0].rsplit('/', 1)[-1]
patchset = output[1]
files = output[2:]
for f in files:
file_id, filename = f.split()
mode = 'M'
try:
content = run('show', '%s:%s' % (parent, filename))
except subprocess2.CalledProcessError:
# File didn't exist in the parent revision.
content = ''
mode = 'A'
content_type, payload = EncodeMultipartFormData([
('checksum', md5.md5(content).hexdigest()),
('filename', filename),
('is_current', 'False'),
('status', mode),
], [
('data', filename, content),
])
# pylint: disable=W0212
print ' Uploading base file for %s:' % filename, rietveld._send(
'/%s/upload_content/%s/%s' % (issue, patchset, file_id),
payload=payload,
content_type=content_type,
)
try:
content = run('show', '%s:%s' % (commit, filename))
except subprocess2.CalledProcessError:
# File no longer exists in the new commit.
content = ''
mode = 'D'
content_type, payload = EncodeMultipartFormData([
('checksum', md5.md5(content).hexdigest()),
('filename', filename),
('is_current', 'True'),
('status', mode),
], [
('data', filename, content),
])
# pylint: disable=W0212
print ' Uploading %s:' % filename, rietveld._send(
'/%s/upload_content/%s/%s' % (issue, patchset, file_id),
payload=payload,
content_type=content_type,
)
# pylint: disable=W0212
print 'Finalizing upload:', rietveld._send('/%s/upload_complete/1' % issue)
def main():
parser = optparse.OptionParser(
usage='usage: %prog --branch <branch> <commit>')
parser.add_option(
'--branch',
'-b',
help='The upstream branch to cherry pick to.',
metavar='<branch>')
auth.add_auth_options(parser)
options, args = parser.parse_args()
auth_config = auth.extract_auth_config_from_options
if not options.branch:
parser.error('--branch is required')
if len(args) != 1:
parser.error('Expecting single argument <commit>')
cherry_pick(options.branch, args[0], auth_config)
return 0
if __name__ == '__main__':
try:
sys.exit(main())
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(1)
|
snarfed/beautifulsoup | refs/heads/master | bs4/tests/test_lxml.py | 115 | """Tests to ensure that the lxml tree builder generates good trees."""
import re
import warnings
try:
import lxml.etree
LXML_PRESENT = True
LXML_VERSION = lxml.etree.LXML_VERSION
except ImportError, e:
LXML_PRESENT = False
LXML_VERSION = (0,)
if LXML_PRESENT:
from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
from bs4 import (
BeautifulSoup,
BeautifulStoneSoup,
)
from bs4.element import Comment, Doctype, SoupStrainer
from bs4.testing import skipIf
from bs4.tests import test_htmlparser
from bs4.testing import (
HTMLTreeBuilderSmokeTest,
XMLTreeBuilderSmokeTest,
SoupTest,
skipIf,
)
@skipIf(
not LXML_PRESENT,
"lxml seems not to be present, not testing its tree builder.")
class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
"""See ``HTMLTreeBuilderSmokeTest``."""
@property
def default_builder(self):
return LXMLTreeBuilder()
def test_out_of_range_entity(self):
self.assertSoupEquals(
"<p>foo�bar</p>", "<p>foobar</p>")
self.assertSoupEquals(
"<p>foo�bar</p>", "<p>foobar</p>")
self.assertSoupEquals(
"<p>foo�bar</p>", "<p>foobar</p>")
# In lxml < 2.3.5, an empty doctype causes a segfault. Skip this
# test if an old version of lxml is installed.
@skipIf(
not LXML_PRESENT or LXML_VERSION < (2,3,5,0),
"Skipping doctype test for old version of lxml to avoid segfault.")
def test_empty_doctype(self):
soup = self.soup("<!DOCTYPE>")
doctype = soup.contents[0]
self.assertEqual("", doctype.strip())
def test_beautifulstonesoup_is_xml_parser(self):
# Make sure that the deprecated BSS class uses an xml builder
# if one is installed.
with warnings.catch_warnings(record=True) as w:
soup = BeautifulStoneSoup("<b />")
self.assertEqual(u"<b/>", unicode(soup.b))
self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message))
@skipIf(
not LXML_PRESENT,
"lxml seems not to be present, not testing its XML tree builder.")
class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest):
"""See ``HTMLTreeBuilderSmokeTest``."""
@property
def default_builder(self):
return LXMLTreeBuilderForXML()
|
mozilla/relman-auto-nag | refs/heads/master | auto_nag/log.py | 2 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import os
from libmozdata import utils as lmdutils
from . import mail, utils
def clean():
path = utils.get_config("common", "log")
os.remove(path)
def get_msg(path):
with open(path, "r") as In:
data = In.read()
errors = 0
for line in data.split("\n"):
if "ERROR" in line or "CRITICAL" in line:
errors += 1
if errors == 1:
return data, []
return "There are {} errors: see the log in attachment.".format(errors), [path]
def send():
path = utils.get_config("common", "log")
try:
n = os.path.getsize(path)
if n != 0:
login_info = utils.get_login_info()
date = lmdutils.get_date("today")
msg, files = get_msg(path)
mail.send(
login_info["ldap_username"],
utils.get_config("common", "on-errors"),
"[autonag] Something bad happened when running auto-nag the {}".format(
date
),
msg,
html=False,
login=login_info,
dryrun=False,
files=files,
)
except Exception:
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Manage logs")
parser.add_argument(
"-c", "--clean", dest="clean", action="store_true", help="Remove the log files"
)
parser.add_argument(
"-s",
"--send",
dest="send",
action="store_true",
help="Send the log if not empty",
)
args = parser.parse_args()
if args.clean:
clean()
if args.send:
send()
|
mzizzi/ansible | refs/heads/devel | lib/ansible/modules/commands/shell.py | 13 | # There is no actual shell module source, when you use 'shell' in ansible,
# it runs the 'command' module with special arguments and it behaves differently.
# See the command source and the comment "#USE_SHELL".
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: shell
short_description: Execute commands in nodes.
description:
- The C(shell) module takes the command name followed by a list of space-delimited arguments.
It is almost exactly like the M(command) module but runs
the command through a shell (C(/bin/sh)) on the remote node.
- For Windows targets, use the M(win_shell) module instead.
version_added: "0.2"
options:
free_form:
description:
- The shell module takes a free form command to run, as a string. There's not an actual
option named "free form". See the examples!
required: true
default: null
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: no
default: null
removes:
description:
- a filename, when it does not exist, this step will B(not) be run.
version_added: "0.8"
required: no
default: null
chdir:
description:
- cd into this directory before running the command
required: false
default: null
version_added: "0.6"
executable:
description:
- change the shell used to execute the command. Should be an absolute path to the executable.
required: false
default: null
version_added: "0.9"
warn:
description:
- if command warnings are on in ansible.cfg, do not warn about this particular line if set to no/false.
required: false
default: True
version_added: "1.8"
notes:
- If you want to execute a command securely and predictably, it may be
better to use the M(command) module instead. Best practices when writing
playbooks will follow the trend of using M(command) unless the C(shell)
module is explicitly required. When running ad-hoc commands, use your best
judgement.
- To sanitize any variables passed to the shell module, you should use
"{{ var | quote }}" instead of just "{{ var }}" to make sure they don't include evil things like semicolons.
- For Windows targets, use the M(win_shell) module instead.
requirements: [ ]
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = '''
- name: Execute the command in remote shell; stdout goes to the specified file on the remote.
shell: somescript.sh >> somelog.txt
- name: Change the working directory to somedir/ before executing the command.
shell: somescript.sh >> somelog.txt
args:
chdir: somedir/
# You can also use the 'args' form to provide the options.
- name: This command will change the working directory to somedir/ and will only run when somedir/somelog.txt doesn't exist.
shell: somescript.sh >> somelog.txt
args:
chdir: somedir/
creates: somelog.txt
- name: Run a command that uses non-posix shell-isms (in this example /bin/sh doesn't handle redirection and wildcards together but bash does)
shell: cat < /tmp/*txt
args:
executable: /bin/bash
- name: Run a command using a templated variable (always use quote filter to avoid injection)
shell: cat {{ myfile|quote }}
# You can use shell to run other executables to perform actions inline
- name: Run expect to wait for a successful PXE boot via out-of-band CIMC
shell: |
set timeout 300
spawn ssh admin@{{ cimc_host }}
expect "password:"
send "{{ cimc_password }}\\n"
expect "\\n{{ cimc_name }}"
send "connect host\\n"
expect "pxeboot.n12"
send "\\n"
exit 0
args:
executable: /usr/bin/expect
delegate_to: localhost
'''
RETURN = '''
msg:
description: changed
returned: always
type: boolean
sample: True
start:
description: The command execution start time
returned: always
type: string
sample: '2016-02-25 09:18:26.429568'
end:
description: The command execution end time
returned: always
type: string
sample: '2016-02-25 09:18:26.755339'
delta:
description: The command execution delta time
returned: always
type: string
sample: '0:00:00.325771'
stdout:
description: The command standard output
returned: always
type: string
sample: 'Clustering node rabbit@slave1 with rabbit@master ...'
stderr:
description: The command standard error
returned: always
type: string
sample: 'ls: cannot access foo: No such file or directory'
cmd:
description: The command executed by the task
returned: always
type: string
sample: 'rabbitmqctl join_cluster rabbit@master'
rc:
description: The command return code (0 means success)
returned: always
type: int
sample: 0
stdout_lines:
description: The command standard output split in lines
returned: always
type: list
sample: [u'Clustering node rabbit@slave1 with rabbit@master ...']
'''
|
sprax/python | refs/heads/master | txt/xdv.py | 1 | #!/usr/bin/env python3
'''
XDV == eXchange (I/O) Depending on Verbosity.
Basic debugging output for Python 3.5+
'''
XDV_VERBOSITY = None
XDV_DEFAULT = 0
def printv(level, verbose, *args, **kwargs):
'''
prints args with kwargs if level < verbose.
Lightweight conditional output.
'''
if level < verbose:
print(*args, **kwargs)
def set_xdv_verbosity(verbosity):
'''Set the module-global variable XDV_VERBOSITY'''
global XDV_VERBOSITY
if XDV_VERBOSITY != verbosity:
print("Setting XDV_VERBOSITY = %d" % verbosity)
XDV_VERBOSITY = verbosity
def get_xdv_verbosity():
'''get the current verbosity'''
if XDV_VERBOSITY is None:
set_xdv_verbosity()
return XDV_VERBOSITY
def xdv(level, *args, **kwargs):
'''Conditional output: eXpress Depending on Verbosity'''
try:
if level <= XDV_VERBOSITY:
print(*args, **kwargs)
except TypeError:
print("WARNING: XDV_VERBOSITY was None (in {}); setting it to {}"
.format(__name__, XDV_DEFAULT))
set_xdv_verbosity(XDV_DEFAULT)
xdv(level, *args, **kwargs)
def xdvr(level, *args, **kwargs):
'''Conditional output: eXpress Depending on Verbosity'''
try:
if level <= xdvr_verbosity():
print(*args, **kwargs)
except TypeError:
print("WARNING: XDV_VERBOSITY was None (in {}); setting it to {}"
.format(__name__, XDV_DEFAULT))
def xdvr_verbosity():
return xdvr
xdv(level, *args, **kwargs)
def try_xdv():
'''try xdv'''
xdv(0, "0 -- hi from xdv", 0)
xdv(1, "1 -- greetings from xdv")
xdv(2, "2 -- howdy from xdv")
xdv(3, "3 -- hello from xdv", "\n",
" -- bonus line, still in xdv(3 ...", sep='')
xdv(4, "4 -- hey from xdv", " YO, 4 is as high as I go!",
"\n{}".format("goodbye from xdv"), sep='')
def test_xdv():
'''test xdv module methods'''
try_xdv()
set_xdv_verbosity(2)
try_xdv()
set_xdv_verbosity(4)
try_xdv()
if __name__ == '__main__':
test_xdv()
|
ol-loginov/intellij-community | refs/heads/master | python/testData/inspections/PyTypeCheckerInspection/BuiltinsPy3.py | 49 | def test_operators():
print(2 + <warning descr="Expected type 'Number', got 'str' instead">'foo'</warning>)
print(b'foo' + <warning descr="Expected type 'bytes', got 'str' instead">'bar'</warning>)
print(b'foo' + <warning descr="Expected type 'bytes', got 'int' instead">3</warning>)
def test_numerics():
abs(False)
int(10)
long(False)
float(False)
complex(False)
divmod(False, False)
divmod(<warning descr="Expected type 'Number', got 'bytes' instead">b'foo'</warning>, <warning descr="Expected type 'Number', got 'str' instead">'bar'</warning>)
pow(False, True)
round(False, <warning descr="Expected type 'Optional[Integral]', got 'str' instead">'foo'</warning>)
|
imankulov/sentry | refs/heads/master | src/sentry/db/exceptions.py | 38 | """
sentry.db.exceptions
~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
class QueryError(Exception):
pass
class CannotResolveExpression(Exception):
pass
|
SujaySKumar/django | refs/heads/master | django/template/backends/base.py | 584 | # Since this package contains a "django" module, this is required on Python 2.
from __future__ import absolute_import
from django.core.exceptions import (
ImproperlyConfigured, SuspiciousFileOperation,
)
from django.template.utils import get_app_template_dirs
from django.utils._os import safe_join
from django.utils.functional import cached_property
class BaseEngine(object):
# Core methods: engines have to provide their own implementation
# (except for from_string which is optional).
def __init__(self, params):
"""
Initializes the template engine.
Receives the configuration settings as a dict.
"""
params = params.copy()
self.name = params.pop('NAME')
self.dirs = list(params.pop('DIRS'))
self.app_dirs = bool(params.pop('APP_DIRS'))
if params:
raise ImproperlyConfigured(
"Unknown parameters: {}".format(", ".join(params)))
@property
def app_dirname(self):
raise ImproperlyConfigured(
"{} doesn't support loading templates from installed "
"applications.".format(self.__class__.__name__))
def from_string(self, template_code):
"""
Creates and returns a template for the given source code.
This method is optional.
"""
raise NotImplementedError(
"subclasses of BaseEngine should provide "
"a from_string() method")
def get_template(self, template_name):
"""
Loads and returns a template for the given name.
Raises TemplateDoesNotExist if no such template exists.
"""
raise NotImplementedError(
"subclasses of BaseEngine must provide "
"a get_template() method")
# Utility methods: they are provided to minimize code duplication and
# security issues in third-party backends.
@cached_property
def template_dirs(self):
"""
Returns a list of directories to search for templates.
"""
# Immutable return value because it's cached and shared by callers.
template_dirs = tuple(self.dirs)
if self.app_dirs:
template_dirs += get_app_template_dirs(self.app_dirname)
return template_dirs
def iter_template_filenames(self, template_name):
"""
Iterates over candidate files for template_name.
Ignores files that don't lie inside configured template dirs to avoid
directory traversal attacks.
"""
for template_dir in self.template_dirs:
try:
yield safe_join(template_dir, template_name)
except SuspiciousFileOperation:
# The joined path was located outside of this template_dir
# (it might be inside another one, so this isn't fatal).
pass
|
Kimanicodes/wananchi | refs/heads/master | app/reports/__init__.py | 1 | from flask import Blueprint
reports = Blueprint('reports', __name__)
from . import views
|
gtko/CouchPotatoServer | refs/heads/develop | libs/apscheduler/triggers/cron/__init__.py | 103 | from datetime import date, datetime
from apscheduler.triggers.cron.fields import *
from apscheduler.util import datetime_ceil, convert_to_datetime, iteritems
class CronTrigger(object):
FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour',
'minute', 'second')
FIELDS_MAP = {'year': BaseField,
'month': BaseField,
'week': WeekField,
'day': DayOfMonthField,
'day_of_week': DayOfWeekField,
'hour': BaseField,
'minute': BaseField,
'second': BaseField}
def __init__(self, **values):
self.start_date = values.pop('start_date', None)
if self.start_date:
self.start_date = convert_to_datetime(self.start_date)
# Check field names and yank out all None valued fields
for key, value in list(iteritems(values)):
if key not in self.FIELD_NAMES:
raise TypeError('Invalid field name: %s' % key)
if value is None:
del values[key]
self.fields = []
assign_defaults = False
for field_name in self.FIELD_NAMES:
if field_name in values:
exprs = values.pop(field_name)
is_default = False
assign_defaults = not values
elif assign_defaults:
exprs = DEFAULT_VALUES[field_name]
is_default = True
else:
exprs = '*'
is_default = True
field_class = self.FIELDS_MAP[field_name]
field = field_class(field_name, exprs, is_default)
self.fields.append(field)
def _increment_field_value(self, dateval, fieldnum):
"""
Increments the designated field and resets all less significant fields
to their minimum values.
:type dateval: datetime
:type fieldnum: int
:type amount: int
:rtype: tuple
:return: a tuple containing the new date, and the number of the field
that was actually incremented
"""
i = 0
values = {}
while i < len(self.fields):
field = self.fields[i]
if not field.REAL:
if i == fieldnum:
fieldnum -= 1
i -= 1
else:
i += 1
continue
if i < fieldnum:
values[field.name] = field.get_value(dateval)
i += 1
elif i > fieldnum:
values[field.name] = field.get_min(dateval)
i += 1
else:
value = field.get_value(dateval)
maxval = field.get_max(dateval)
if value == maxval:
fieldnum -= 1
i -= 1
else:
values[field.name] = value + 1
i += 1
return datetime(**values), fieldnum
def _set_field_value(self, dateval, fieldnum, new_value):
values = {}
for i, field in enumerate(self.fields):
if field.REAL:
if i < fieldnum:
values[field.name] = field.get_value(dateval)
elif i > fieldnum:
values[field.name] = field.get_min(dateval)
else:
values[field.name] = new_value
return datetime(**values)
def get_next_fire_time(self, start_date):
if self.start_date:
start_date = max(start_date, self.start_date)
next_date = datetime_ceil(start_date)
fieldnum = 0
while 0 <= fieldnum < len(self.fields):
field = self.fields[fieldnum]
curr_value = field.get_value(next_date)
next_value = field.get_next_value(next_date)
if next_value is None:
# No valid value was found
next_date, fieldnum = self._increment_field_value(
next_date, fieldnum - 1)
elif next_value > curr_value:
# A valid, but higher than the starting value, was found
if field.REAL:
next_date = self._set_field_value(
next_date, fieldnum, next_value)
fieldnum += 1
else:
next_date, fieldnum = self._increment_field_value(
next_date, fieldnum)
else:
# A valid value was found, no changes necessary
fieldnum += 1
if fieldnum >= 0:
return next_date
def __str__(self):
options = ["%s='%s'" % (f.name, str(f)) for f in self.fields
if not f.is_default]
return 'cron[%s]' % (', '.join(options))
def __repr__(self):
options = ["%s='%s'" % (f.name, str(f)) for f in self.fields
if not f.is_default]
if self.start_date:
options.append("start_date='%s'" % self.start_date.isoformat(' '))
return '<%s (%s)>' % (self.__class__.__name__, ', '.join(options))
|
rjeli/scikit-image | refs/heads/master | skimage/future/setup.py | 48 |
def configuration(parent_package='skimage', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('future', parent_package, top_path)
config.add_subpackage('graph')
return config
if __name__ == "__main__":
from numpy.distutils.core import setup
config = configuration(top_path='').todict()
setup(**config)
|
hassanabidpk/django | refs/heads/master | tests/gis_tests/gdal_tests/test_geom.py | 256 | import json
import unittest
from binascii import b2a_hex
from unittest import skipUnless
from django.contrib.gis.gdal import HAS_GDAL
from django.utils.six.moves import range
from ..test_data import TestDataMixin
try:
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
if HAS_GDAL:
from django.contrib.gis.gdal import (OGRGeometry, OGRGeomType,
GDALException, OGRIndexError, SpatialReference, CoordTransform,
GDAL_VERSION)
@skipUnless(HAS_GDAL, "GDAL is required")
class OGRGeomTest(unittest.TestCase, TestDataMixin):
"This tests the OGR Geometry."
def test_geomtype(self):
"Testing OGRGeomType object."
# OGRGeomType should initialize on all these inputs.
OGRGeomType(1)
OGRGeomType(7)
OGRGeomType('point')
OGRGeomType('GeometrycollectioN')
OGRGeomType('LINearrING')
OGRGeomType('Unknown')
# Should throw TypeError on this input
self.assertRaises(GDALException, OGRGeomType, 23)
self.assertRaises(GDALException, OGRGeomType, 'fooD')
self.assertRaises(GDALException, OGRGeomType, 9)
# Equivalence can take strings, ints, and other OGRGeomTypes
self.assertEqual(OGRGeomType(1), OGRGeomType(1))
self.assertEqual(OGRGeomType(7), 'GeometryCollection')
self.assertEqual(OGRGeomType('point'), 'POINT')
self.assertNotEqual(OGRGeomType('point'), 2)
self.assertEqual(OGRGeomType('unknown'), 0)
self.assertEqual(OGRGeomType(6), 'MULtiPolyGON')
self.assertEqual(OGRGeomType(1), OGRGeomType('point'))
self.assertNotEqual(OGRGeomType('POINT'), OGRGeomType(6))
# Testing the Django field name equivalent property.
self.assertEqual('PointField', OGRGeomType('Point').django)
self.assertEqual('GeometryField', OGRGeomType('Geometry').django)
self.assertEqual('GeometryField', OGRGeomType('Unknown').django)
self.assertIsNone(OGRGeomType('none').django)
# 'Geometry' initialization implies an unknown geometry type.
gt = OGRGeomType('Geometry')
self.assertEqual(0, gt.num)
self.assertEqual('Unknown', gt.name)
def test_geomtype_25d(self):
"Testing OGRGeomType object with 25D types."
wkb25bit = OGRGeomType.wkb25bit
self.assertEqual(OGRGeomType(wkb25bit + 1), 'Point25D')
self.assertEqual(OGRGeomType('MultiLineString25D'), (5 + wkb25bit))
self.assertEqual('GeometryCollectionField', OGRGeomType('GeometryCollection25D').django)
def test_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
self.assertEqual(g.wkt, geom.wkt)
def test_ewkt(self):
"Testing EWKT input/output."
for ewkt_val in ('POINT (1 2 3)', 'LINEARRING (0 0,1 1,2 1,0 0)'):
# First with ewkt output when no SRID in EWKT
self.assertEqual(ewkt_val, OGRGeometry(ewkt_val).ewkt)
# No test consumption with an SRID specified.
ewkt_val = 'SRID=4326;%s' % ewkt_val
geom = OGRGeometry(ewkt_val)
self.assertEqual(ewkt_val, geom.ewkt)
self.assertEqual(4326, geom.srs.srid)
def test_gml(self):
"Testing GML output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
exp_gml = g.gml
if GDAL_VERSION >= (1, 8):
# In GDAL 1.8, the non-conformant GML tag <gml:GeometryCollection> was
# replaced with <gml:MultiGeometry>.
exp_gml = exp_gml.replace('GeometryCollection', 'MultiGeometry')
self.assertEqual(exp_gml, geom.gml)
def test_hex(self):
"Testing HEX input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
self.assertEqual(g.hex.encode(), geom1.hex)
# Constructing w/HEX
geom2 = OGRGeometry(g.hex)
self.assertEqual(geom1, geom2)
def test_wkb(self):
"Testing WKB input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
wkb = geom1.wkb
self.assertEqual(b2a_hex(wkb).upper(), g.hex.encode())
# Constructing w/WKB.
geom2 = OGRGeometry(wkb)
self.assertEqual(geom1, geom2)
def test_json(self):
"Testing GeoJSON input/output."
for g in self.geometries.json_geoms:
geom = OGRGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
# Loading jsons to prevent decimal differences
self.assertEqual(json.loads(g.json), json.loads(geom.json))
self.assertEqual(json.loads(g.json), json.loads(geom.geojson))
self.assertEqual(OGRGeometry(g.wkt), OGRGeometry(geom.json))
# Test input with some garbage content (but valid json) (#15529)
geom = OGRGeometry('{"type": "Point", "coordinates": [ 100.0, 0.0 ], "other": "<test>"}')
self.assertIsInstance(geom, OGRGeometry)
def test_points(self):
"Testing Point objects."
OGRGeometry('POINT(0 0)')
for p in self.geometries.points:
if not hasattr(p, 'z'): # No 3D
pnt = OGRGeometry(p.wkt)
self.assertEqual(1, pnt.geom_type)
self.assertEqual('POINT', pnt.geom_name)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual((p.x, p.y), pnt.tuple)
def test_multipoints(self):
"Testing MultiPoint objects."
for mp in self.geometries.multipoints:
mgeom1 = OGRGeometry(mp.wkt) # First one from WKT
self.assertEqual(4, mgeom1.geom_type)
self.assertEqual('MULTIPOINT', mgeom1.geom_name)
mgeom2 = OGRGeometry('MULTIPOINT') # Creating empty multipoint
mgeom3 = OGRGeometry('MULTIPOINT')
for g in mgeom1:
mgeom2.add(g) # adding each point from the multipoints
mgeom3.add(g.wkt) # should take WKT as well
self.assertEqual(mgeom1, mgeom2) # they should equal
self.assertEqual(mgeom1, mgeom3)
self.assertEqual(mp.coords, mgeom2.coords)
self.assertEqual(mp.n_p, mgeom2.point_count)
def test_linestring(self):
"Testing LineString objects."
prev = OGRGeometry('POINT(0 0)')
for ls in self.geometries.linestrings:
linestr = OGRGeometry(ls.wkt)
self.assertEqual(2, linestr.geom_type)
self.assertEqual('LINESTRING', linestr.geom_name)
self.assertEqual(ls.n_p, linestr.point_count)
self.assertEqual(ls.coords, linestr.tuple)
self.assertEqual(linestr, OGRGeometry(ls.wkt))
self.assertNotEqual(linestr, prev)
self.assertRaises(OGRIndexError, linestr.__getitem__, len(linestr))
prev = linestr
# Testing the x, y properties.
x = [tmpx for tmpx, tmpy in ls.coords]
y = [tmpy for tmpx, tmpy in ls.coords]
self.assertEqual(x, linestr.x)
self.assertEqual(y, linestr.y)
def test_multilinestring(self):
"Testing MultiLineString objects."
prev = OGRGeometry('POINT(0 0)')
for mls in self.geometries.multilinestrings:
mlinestr = OGRGeometry(mls.wkt)
self.assertEqual(5, mlinestr.geom_type)
self.assertEqual('MULTILINESTRING', mlinestr.geom_name)
self.assertEqual(mls.n_p, mlinestr.point_count)
self.assertEqual(mls.coords, mlinestr.tuple)
self.assertEqual(mlinestr, OGRGeometry(mls.wkt))
self.assertNotEqual(mlinestr, prev)
prev = mlinestr
for ls in mlinestr:
self.assertEqual(2, ls.geom_type)
self.assertEqual('LINESTRING', ls.geom_name)
self.assertRaises(OGRIndexError, mlinestr.__getitem__, len(mlinestr))
def test_linearring(self):
"Testing LinearRing objects."
prev = OGRGeometry('POINT(0 0)')
for rr in self.geometries.linearrings:
lr = OGRGeometry(rr.wkt)
# self.assertEqual(101, lr.geom_type.num)
self.assertEqual('LINEARRING', lr.geom_name)
self.assertEqual(rr.n_p, len(lr))
self.assertEqual(lr, OGRGeometry(rr.wkt))
self.assertNotEqual(lr, prev)
prev = lr
def test_polygons(self):
"Testing Polygon objects."
# Testing `from_bbox` class method
bbox = (-180, -90, 180, 90)
p = OGRGeometry.from_bbox(bbox)
self.assertEqual(bbox, p.extent)
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.polygons:
poly = OGRGeometry(p.wkt)
self.assertEqual(3, poly.geom_type)
self.assertEqual('POLYGON', poly.geom_name)
self.assertEqual(p.n_p, poly.point_count)
self.assertEqual(p.n_i + 1, len(poly))
# Testing area & centroid.
self.assertAlmostEqual(p.area, poly.area, 9)
x, y = poly.centroid.tuple
self.assertAlmostEqual(p.centroid[0], x, 9)
self.assertAlmostEqual(p.centroid[1], y, 9)
# Testing equivalence
self.assertEqual(poly, OGRGeometry(p.wkt))
self.assertNotEqual(poly, prev)
if p.ext_ring_cs:
ring = poly[0]
self.assertEqual(p.ext_ring_cs, ring.tuple)
self.assertEqual(p.ext_ring_cs, poly[0].tuple)
self.assertEqual(len(p.ext_ring_cs), ring.point_count)
for r in poly:
self.assertEqual('LINEARRING', r.geom_name)
def test_closepolygons(self):
"Testing closing Polygon objects."
# Both rings in this geometry are not closed.
poly = OGRGeometry('POLYGON((0 0, 5 0, 5 5, 0 5), (1 1, 2 1, 2 2, 2 1))')
self.assertEqual(8, poly.point_count)
with self.assertRaises(GDALException):
poly.centroid
poly.close_rings()
self.assertEqual(10, poly.point_count) # Two closing points should've been added
self.assertEqual(OGRGeometry('POINT(2.5 2.5)'), poly.centroid)
def test_multipolygons(self):
"Testing MultiPolygon objects."
OGRGeometry('POINT(0 0)')
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
self.assertEqual(6, mpoly.geom_type)
self.assertEqual('MULTIPOLYGON', mpoly.geom_name)
if mp.valid:
self.assertEqual(mp.n_p, mpoly.point_count)
self.assertEqual(mp.num_geom, len(mpoly))
self.assertRaises(OGRIndexError, mpoly.__getitem__, len(mpoly))
for p in mpoly:
self.assertEqual('POLYGON', p.geom_name)
self.assertEqual(3, p.geom_type)
self.assertEqual(mpoly.wkt, OGRGeometry(mp.wkt).wkt)
def test_srs(self):
"Testing OGR Geometries with Spatial Reference objects."
for mp in self.geometries.multipolygons:
# Creating a geometry w/spatial reference
sr = SpatialReference('WGS84')
mpoly = OGRGeometry(mp.wkt, sr)
self.assertEqual(sr.wkt, mpoly.srs.wkt)
# Ensuring that SRS is propagated to clones.
klone = mpoly.clone()
self.assertEqual(sr.wkt, klone.srs.wkt)
# Ensuring all children geometries (polygons and their rings) all
# return the assigned spatial reference as well.
for poly in mpoly:
self.assertEqual(sr.wkt, poly.srs.wkt)
for ring in poly:
self.assertEqual(sr.wkt, ring.srs.wkt)
# Ensuring SRS propagate in topological ops.
a = OGRGeometry(self.geometries.topology_geoms[0].wkt_a, sr)
b = OGRGeometry(self.geometries.topology_geoms[0].wkt_b, sr)
diff = a.difference(b)
union = a.union(b)
self.assertEqual(sr.wkt, diff.srs.wkt)
self.assertEqual(sr.srid, union.srs.srid)
# Instantiating w/an integer SRID
mpoly = OGRGeometry(mp.wkt, 4326)
self.assertEqual(4326, mpoly.srid)
mpoly.srs = SpatialReference(4269)
self.assertEqual(4269, mpoly.srid)
self.assertEqual('NAD83', mpoly.srs.name)
# Incrementing through the multipolygon after the spatial reference
# has been re-assigned.
for poly in mpoly:
self.assertEqual(mpoly.srs.wkt, poly.srs.wkt)
poly.srs = 32140
for ring in poly:
# Changing each ring in the polygon
self.assertEqual(32140, ring.srs.srid)
self.assertEqual('NAD83 / Texas South Central', ring.srs.name)
ring.srs = str(SpatialReference(4326)) # back to WGS84
self.assertEqual(4326, ring.srs.srid)
# Using the `srid` property.
ring.srid = 4322
self.assertEqual('WGS 72', ring.srs.name)
self.assertEqual(4322, ring.srid)
def test_srs_transform(self):
"Testing transform()."
orig = OGRGeometry('POINT (-104.609 38.255)', 4326)
trans = OGRGeometry('POINT (992385.4472045 481455.4944650)', 2774)
# Using an srid, a SpatialReference object, and a CoordTransform object
# or transformations.
t1, t2, t3 = orig.clone(), orig.clone(), orig.clone()
t1.transform(trans.srid)
t2.transform(SpatialReference('EPSG:2774'))
ct = CoordTransform(SpatialReference('WGS84'), SpatialReference(2774))
t3.transform(ct)
# Testing use of the `clone` keyword.
k1 = orig.clone()
k2 = k1.transform(trans.srid, clone=True)
self.assertEqual(k1, orig)
self.assertNotEqual(k1, k2)
prec = 3
for p in (t1, t2, t3, k2):
self.assertAlmostEqual(trans.x, p.x, prec)
self.assertAlmostEqual(trans.y, p.y, prec)
def test_transform_dim(self):
"Testing coordinate dimension is the same on transformed geometries."
ls_orig = OGRGeometry('LINESTRING(-104.609 38.255)', 4326)
ls_trans = OGRGeometry('LINESTRING(992385.4472045 481455.4944650)', 2774)
prec = 3
ls_orig.transform(ls_trans.srs)
# Making sure the coordinate dimension is still 2D.
self.assertEqual(2, ls_orig.coord_dim)
self.assertAlmostEqual(ls_trans.x[0], ls_orig.x[0], prec)
self.assertAlmostEqual(ls_trans.y[0], ls_orig.y[0], prec)
def test_difference(self):
"Testing difference()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.diff_geoms[i].wkt)
d2 = a.difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a - b) # __sub__ is difference operator
a -= b # testing __isub__
self.assertEqual(d1, a)
def test_intersection(self):
"Testing intersects() and intersection()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
i1 = OGRGeometry(self.geometries.intersect_geoms[i].wkt)
self.assertTrue(a.intersects(b))
i2 = a.intersection(b)
self.assertEqual(i1, i2)
self.assertEqual(i1, a & b) # __and__ is intersection operator
a &= b # testing __iand__
self.assertEqual(i1, a)
def test_symdifference(self):
"Testing sym_difference()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.sdiff_geoms[i].wkt)
d2 = a.sym_difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator
a ^= b # testing __ixor__
self.assertEqual(d1, a)
def test_union(self):
"Testing union()."
for i in range(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
u1 = OGRGeometry(self.geometries.union_geoms[i].wkt)
u2 = a.union(b)
self.assertEqual(u1, u2)
self.assertEqual(u1, a | b) # __or__ is union operator
a |= b # testing __ior__
self.assertEqual(u1, a)
def test_add(self):
"Testing GeometryCollection.add()."
# Can't insert a Point into a MultiPolygon.
mp = OGRGeometry('MultiPolygon')
pnt = OGRGeometry('POINT(5 23)')
self.assertRaises(GDALException, mp.add, pnt)
# GeometryCollection.add may take an OGRGeometry (if another collection
# of the same type all child geoms will be added individually) or WKT.
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
mp1 = OGRGeometry('MultiPolygon')
mp2 = OGRGeometry('MultiPolygon')
mp3 = OGRGeometry('MultiPolygon')
for poly in mpoly:
mp1.add(poly) # Adding a geometry at a time
mp2.add(poly.wkt) # Adding WKT
mp3.add(mpoly) # Adding a MultiPolygon's entire contents at once.
for tmp in (mp1, mp2, mp3):
self.assertEqual(mpoly, tmp)
def test_extent(self):
"Testing `extent` property."
# The xmin, ymin, xmax, ymax of the MultiPoint should be returned.
mp = OGRGeometry('MULTIPOINT(5 23, 0 0, 10 50)')
self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent)
# Testing on the 'real world' Polygon.
poly = OGRGeometry(self.geometries.polygons[3].wkt)
ring = poly.shell
x, y = ring.x, ring.y
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
self.assertEqual((xmin, ymin, xmax, ymax), poly.extent)
def test_25D(self):
"Testing 2.5D geometries."
pnt_25d = OGRGeometry('POINT(1 2 3)')
self.assertEqual('Point25D', pnt_25d.geom_type.name)
self.assertEqual(3.0, pnt_25d.z)
self.assertEqual(3, pnt_25d.coord_dim)
ls_25d = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)')
self.assertEqual('LineString25D', ls_25d.geom_type.name)
self.assertEqual([1.0, 2.0, 3.0], ls_25d.z)
self.assertEqual(3, ls_25d.coord_dim)
def test_pickle(self):
"Testing pickle support."
g1 = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)', 'WGS84')
g2 = pickle.loads(pickle.dumps(g1))
self.assertEqual(g1, g2)
self.assertEqual(4326, g2.srs.srid)
self.assertEqual(g1.srs.wkt, g2.srs.wkt)
def test_ogrgeometry_transform_workaround(self):
"Testing coordinate dimensions on geometries after transformation."
# A bug in GDAL versions prior to 1.7 changes the coordinate
# dimension of a geometry after it has been transformed.
# This test ensures that the bug workarounds employed within
# `OGRGeometry.transform` indeed work.
wkt_2d = "MULTILINESTRING ((0 0,1 1,2 2))"
wkt_3d = "MULTILINESTRING ((0 0 0,1 1 1,2 2 2))"
srid = 4326
# For both the 2D and 3D MultiLineString, ensure _both_ the dimension
# of the collection and the component LineString have the expected
# coordinate dimension after transform.
geom = OGRGeometry(wkt_2d, srid)
geom.transform(srid)
self.assertEqual(2, geom.coord_dim)
self.assertEqual(2, geom[0].coord_dim)
self.assertEqual(wkt_2d, geom.wkt)
geom = OGRGeometry(wkt_3d, srid)
geom.transform(srid)
self.assertEqual(3, geom.coord_dim)
self.assertEqual(3, geom[0].coord_dim)
self.assertEqual(wkt_3d, geom.wkt)
def test_equivalence_regression(self):
"Testing equivalence methods with non-OGRGeometry instances."
self.assertIsNotNone(OGRGeometry('POINT(0 0)'))
self.assertNotEqual(OGRGeometry('LINESTRING(0 0, 1 1)'), 3)
|
opensourcechipspark/platform_external_chromium_org | refs/heads/master | tools/telemetry/telemetry/core/platform/win_platform_backend.py | 23 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import ctypes
import os
import re
import subprocess
try:
import pywintypes # pylint: disable=F0401
import win32api # pylint: disable=F0401
import win32con # pylint: disable=F0401
import win32process # pylint: disable=F0401
except ImportError:
pywintypes = None
win32api = None
win32con = None
win32process = None
from telemetry.core.platform import desktop_platform_backend
class WinPlatformBackend(desktop_platform_backend.DesktopPlatformBackend):
def _GetProcessHandle(self, pid):
mask = (win32con.PROCESS_QUERY_INFORMATION |
win32con.PROCESS_VM_READ)
return win32api.OpenProcess(mask, False, pid)
# pylint: disable=W0613
def StartRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def StopRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def GetRawDisplayFrameRateMeasurements(self):
raise NotImplementedError()
def IsThermallyThrottled(self):
raise NotImplementedError()
def HasBeenThermallyThrottled(self):
raise NotImplementedError()
def GetSystemCommitCharge(self):
class PerformanceInfo(ctypes.Structure):
"""Struct for GetPerformanceInfo() call
http://msdn.microsoft.com/en-us/library/ms683210
"""
_fields_ = [('size', ctypes.c_ulong),
('CommitTotal', ctypes.c_size_t),
('CommitLimit', ctypes.c_size_t),
('CommitPeak', ctypes.c_size_t),
('PhysicalTotal', ctypes.c_size_t),
('PhysicalAvailable', ctypes.c_size_t),
('SystemCache', ctypes.c_size_t),
('KernelTotal', ctypes.c_size_t),
('KernelPaged', ctypes.c_size_t),
('KernelNonpaged', ctypes.c_size_t),
('PageSize', ctypes.c_size_t),
('HandleCount', ctypes.c_ulong),
('ProcessCount', ctypes.c_ulong),
('ThreadCount', ctypes.c_ulong)]
def __init__(self):
self.size = ctypes.sizeof(self)
super(PerformanceInfo, self).__init__()
performance_info = PerformanceInfo()
ctypes.windll.psapi.GetPerformanceInfo(
ctypes.byref(performance_info), performance_info.size)
return performance_info.CommitTotal * performance_info.PageSize / 1024
def GetMemoryStats(self, pid):
try:
memory_info = win32process.GetProcessMemoryInfo(
self._GetProcessHandle(pid))
except pywintypes.error, e:
errcode = e[0]
if errcode == 87: # The process may have been closed.
return {}
raise
return {'VM': memory_info['PagefileUsage'],
'VMPeak': memory_info['PeakPagefileUsage'],
'WorkingSetSize': memory_info['WorkingSetSize'],
'WorkingSetSizePeak': memory_info['PeakWorkingSetSize']}
def GetIOStats(self, pid):
try:
io_stats = win32process.GetProcessIoCounters(
self._GetProcessHandle(pid))
except pywintypes.error, e:
errcode = e[0]
if errcode == 87: # The process may have been closed.
return {}
raise
return {'ReadOperationCount': io_stats['ReadOperationCount'],
'WriteOperationCount': io_stats['WriteOperationCount'],
'ReadTransferCount': io_stats['ReadTransferCount'],
'WriteTransferCount': io_stats['WriteTransferCount']}
def GetChildPids(self, pid):
"""Retunds a list of child pids of |pid|."""
creation_ppid_pid_list = subprocess.Popen(
['wmic', 'process', 'get', 'CreationDate,ParentProcessId,ProcessId',
'/format:csv'],
stdout=subprocess.PIPE).communicate()[0]
ppid_map = collections.defaultdict(list)
creation_map = {}
# [3:] To skip 2 blank lines and header.
for creation_ppid_pid in creation_ppid_pid_list.splitlines()[3:]:
if not creation_ppid_pid:
continue
_, creation, curr_ppid, curr_pid = creation_ppid_pid.split(',')
ppid_map[int(curr_ppid)].append(int(curr_pid))
if creation:
creation_map[int(curr_pid)] = float(re.split('[+-]', creation)[0])
def _InnerGetChildPids(pid):
if not pid or pid not in ppid_map:
return []
ret = [p for p in ppid_map[pid] if creation_map[p] >= creation_map[pid]]
for child in ret:
if child == pid:
continue
ret.extend(_InnerGetChildPids(child))
return ret
return _InnerGetChildPids(pid)
def GetCommandLine(self, pid):
command_pid_list = subprocess.Popen(
['wmic', 'process', 'get', 'CommandLine,ProcessId',
'/format:csv'],
stdout=subprocess.PIPE).communicate()[0]
# [3:] To skip 2 blank lines and header.
for command_pid in command_pid_list.splitlines()[3:]:
if not command_pid:
continue
parts = command_pid.split(',')
curr_pid = parts[-1]
if pid == int(curr_pid):
command = ','.join(parts[1:-1])
return command
raise Exception('Could not get command line for %d' % pid)
def GetOSName(self):
return 'win'
def GetOSVersionName(self):
os_version = os.uname()[2]
if os_version.startswith('5.1.'):
return 'xp'
if os_version.startswith('6.0.'):
return 'vista'
if os_version.startswith('6.1.'):
return 'win7'
if os_version.startswith('6.2.'):
return 'win8'
def CanFlushIndividualFilesFromSystemCache(self):
return True
def GetFlushUtilityName(self):
return 'clear_system_cache.exe'
|
spdx/spdx-github | refs/heads/master | tests/unit_test.py | 1 | # Copyright (c) Anna Buhman.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import sys
from os import path, remove
import shutil
import re
import subprocess
import mock
from git import Repo
from git import test
from spdx_github import repo_scan
#Test that when given a valid zip file url,
#the download_github_zip will result in the creation
#of a local file at the returned location
class DownloadFileTestCase(unittest.TestCase):
file_location = ''
url = 'https://github.com/abuhman/test_webhooks/archive/master.zip'
def setUp(self):
self.file_location = repo_scan.download_github_zip(self.url)
def tearDown(self):
remove(self.file_location)
def testDownload(self):
assert path.isfile(self.file_location)
#Test that we can unzip a zip file.
class UnzipFileTestCase(unittest.TestCase):
file_location = 'test.zip'
extracted_directory = ''
def setUp(self):
self.extracted_directory = repo_scan.unzip_file(self.file_location)
def tearDown(self):
#Remove the unzipped directory
shutil.rmtree(self.extracted_directory)
def testUnzip(self):
assert path.isdir(self.extracted_directory)
#This tests whether a file output is produced from calling the scan method.
class ScanTestCase(unittest.TestCase):
directory = 'test2/'
spdx_file_name = ''
def setUp(self):
#Set output file name to the directory name .SPDX.
self.spdx_file_name = '{}.SPDX'.format(self.directory[:-1])
#scan the extracted directory and put results in a named file
repo_scan.scan(self.directory, self.spdx_file_name,
'scancode', 'tag-value')
def tearDown(self):
#Remove the scan results file
remove(self.spdx_file_name)
def testScan(self):
assert path.isfile(self.spdx_file_name)
#Test trying to scan with a scanner that isn't implemented
class ScannerDoesntExistTestCase(unittest.TestCase):
directory = 'test2/'
spdx_file_name = ''
#result should be false because it is a fake scanner
result = repo_scan.scan(directory, spdx_file_name, 'fake_scanner',
'tag-value')
def testScannerDoesntExist(self):
assert self.result == False
#This checks whether the check_valid_url method correctly determines
#whether a url results in an error (400 or 500 code).
#or whether it is not a real url (does not start with http://
#or https:// )
class CheckURLTestCase(unittest.TestCase):
good_url = 'https://www.google.com/'
bad_url = 'https://www.google.com/fail'
not_url = 'abcdefg'
def testGoodURL(self):
assert repo_scan.check_valid_url(self.good_url) == True
def testBadURL(self):
assert repo_scan.check_valid_url(self.bad_url) == False
def testNotURL(self):
assert repo_scan.check_valid_url(self.not_url) == False
#Check that the YAML configuration/environment method is working.
#It should return a dictionary with the contents of the config
#or environment file.
class GetConfigTestCase(unittest.TestCase):
from spdx_github import repo_scan
configExisting = repo_scan.get_config_yml('./', 'test.yml')
configNotExisting = repo_scan.get_config_yml('test/', 'configuration.yml')
environNotExisting = repo_scan.get_config_yml('test/', 'environment.yml')
#A configuration file that exists should yield values that match
#the testing file
def testExistingConfig(self):
assert self.configExisting['output_file_name'] == 'file_name.SPDX'
assert self.configExisting['output_type'] == 'rdf'
#A configuration file that does not exist should yield default
#values
def testNotExistingConfig(self):
assert self.configNotExisting['output_file_name'] == 'test.SPDX'
assert self.configNotExisting['output_type'] == 'tag-value'
#An environment file that does not exist should yield an empty
#dictionary
def testNotExistingEnviron(self):
assert type(self.environNotExisting) is dict
assert not self.environNotExisting
#Test the method that syncs a repo to its remote
class SyncRepoTestCase(unittest.TestCase):
#Set up a local repo
repo_path = './test_repo'
main_repo_user = 'abuhman'
repo_name = 'test_webhooks'
repo = Repo.init(repo_path)
#Call the sync_main_repo method to sync it with the remote
repo_scan.sync_main_repo(repo_path, main_repo_user, repo_name, repo)
#Get the remote origin and fetch any changes
main_repo_url = ('https://www.github.com/{}/{}.git'.format(main_repo_user,
repo_name))
origin = repo.create_remote('origin', main_repo_url)
repo.git.fetch()
#Check the diff between the remote version and the local version
output_string = repo.git.diff('origin/master')
repo.delete_remote(origin)
def tearDown(self):
shutil.rmtree(self.repo_path)
def testRepoSynced(self):
#Output of git diff should be empty if they are synced
assert self.output_string == ''
#Test the commit_file method which adds a file and makes a
#commit
class MakeCommitTestCase(unittest.TestCase):
#Get everything ready for the commit
file_name = 'test.yml'
subprocess.check_output(['cp', file_name, './test_repo'])
repo = Repo.init('./test_repo')
environment = {}
environment['git_name'] = 'TEST_NAME'
environment['git_email'] = 'TEST_EMAIL'
environment['git_commit_message'] = 'TEST_MSG'
#Call the commit method in order to make the commit
repo_scan.commit_file(file_name, repo, environment)
#Get the head commit.
headcommit = repo.head.commit
#To tear down, reset to origin master, which deletes the test commit
def tearDown(self):
main_repo_user = 'abuhman'
repo_name = 'test_webhooks'
main_repo_url = ('https://www.github.com/{}/{}.git'.format(
main_repo_user, repo_name))
origin = self.repo.create_remote('origin', main_repo_url)
origin.fetch()
self.repo.git.reset('--hard','origin/master')
#The head commit name should match the test commit name
#because the test commit should be the most recent commit.
def testCommitMade(self):
assert self.headcommit.author.name == 'TEST_NAME'
#Get scan info should get us the contents of both the environment
#and configuration files.
#This test will fail if the environment and configuration files
#are set up wrong
class GetScanInfoTestCase(unittest.TestCase):
url = 'https://github.com/abuhman/test_webhooks/archive/master.zip'
scanner_info = repo_scan.get_scan_info(url)
#Test that we have gotten keys from both environment.yml
#and configuration.yml
def testGetScanInfo(self):
#'scanner' is in the configuration file
assert 'scanner' in self.scanner_info
#The value of 'scanner' is in the environment file
assert self.scanner_info['scanner'] in self.scanner_info
#Test the repo_scan method, which handles the process of a
#local scan.
class repoScanTestCase(unittest.TestCase):
repo_zip_url = 'https://github.com/abuhman/test_webhooks/archive/master.zip'
spdx_file_path = repo_scan.repo_scan(repo_zip_url, remote = False,
task_id = 0)
def tearDown(self):
remove(self.spdx_file_path)
def testRepoScan(self):
assert path.isfile(self.spdx_file_path), self.spdx_file_path
#Tests the pull_request_to_github method, which makes a pull request
#to github. This test does not actually make a pull request
#due to using a mock in place of the API call.
class pullRequestToGithubTestCase(unittest.TestCase):
#Construct dummy input to call the pull request method
environment = {}
environment['github_username'] = 'test_username'
environment['github_password'] = 'test_password'
environment['github_pull_request_title'] = 'test_title'
repo_name = 'test_repo_name'
main_repo_user = 'test_username_main'
auth_string = '{}:{}'.format(environment['github_username'],
environment['github_password'])
url = 'https://api.github.com/repos/{}/{}/pulls'.format(main_repo_user,
repo_name)
pull_request_data = ('{{"title": "{}", "head": "{}:master",'
' "base": "master"}}'.format(
environment['github_pull_request_title'],
environment['github_username']))
def mock_pull_request(arguments_list):
return arguments_list
#Call the pull request method (the actual pull request portion is
#mocked)
@mock.patch('subprocess.check_output', side_effect = mock_pull_request)
def testPullRequestToGithub(self, mock_subprocess):
result = repo_scan.pull_request_to_github(self.main_repo_user,
self.repo_name,
self.environment)
#Make sure the command used for the pull request was correct
assert result == ['curl', '--user', self.auth_string, self.url,
'-d', self.pull_request_data], self.result
#Tests the create_fork method, which creates a fork
#of a remote repository. This test does not actually
#create a fork and replaces the call with a mock.
class createForkTestCase(unittest.TestCase):
#Construct testing input for the fork method
environment = {}
environment['github_username'] = 'test_username'
main_repo_user = 'test_username_main'
repo_name = 'test_repo_name'
fork_string = '{}/{}'.format(main_repo_user, repo_name)
fork_command = ['git', 'hub', 'fork', fork_string]
def mock_fork(arguments_list):
return arguments_list
#Call the fork method using the testing input and
#check that the mocked fork command was correctly called.
@mock.patch('subprocess.check_output', side_effect = mock_fork)
def testFork(self, mock_subprocess):
result = repo_scan.create_fork(self.repo_name, self.main_repo_user,
self.environment)
assert result == self.fork_command
#This tests the check_fork_exists method, which determines
#if a fork of a remote repository exists.
class checkForkExistsTestCase(unittest.TestCase):
fork_exists = ('https://api.github.com/repos/abuhmantest/test_webhooks')
fork_not_exists = 'https://api.github.com/repos/test_user/test_fork'
def testForkExists(self):
assert repo_scan.check_fork_exists(self.fork_exists)
def testForkNotExists(self):
assert not repo_scan.check_fork_exists(self.fork_not_exists)
#Tests the find_file_location method that dynamically
#finds a file in a directory.
class findFileLocationTestCase(unittest.TestCase):
directory = './'
file_name = 'configuration.YAML'
location = repo_scan.find_file_location(directory, file_name)
def testFileLocation(self):
assert self.location == './test2/', self.location
if __name__ == '__main__':
unittest.main()
|
Thielak/program-y | refs/heads/rc | src/test/aiml_tests/datetime_tests/test_datetime_aiml.py | 1 | import unittest
import os
from test.aiml_tests.client import TestClient
from programy.config import BrainFileConfiguration
unittest.util._MAX_LENGTH=2000
class BasicTestClient(TestClient):
def __init__(self):
TestClient.__init__(self, debug=True)
def load_configuration(self, arguments):
super(BasicTestClient, self).load_configuration(arguments)
self.configuration.brain_configuration._aiml_files = BrainFileConfiguration(os.path.dirname(__file__), ".aiml", False)
self.configuration.brain_configuration._set_files = BrainFileConfiguration(os.path.dirname(__file__)+"/sets", ".txt", False)
self.configuration.brain_configuration._map_files = BrainFileConfiguration(os.path.dirname(__file__)+"/maps", ".txt", False)
class DateTimeAIMLTests(unittest.TestCase):
DEFAULT_DATETIME_REGEX = "^.*.{3}\s*.{3}\s*\d{1,}\s\d{2}:\d{2}:\d{2}\s\d{4}"
def setUp(cls):
DateTimeAIMLTests.test_client = BasicTestClient()
def test_date(self):
response = DateTimeAIMLTests.test_client.bot.ask_question("test", "TEST DATE")
self.assertIsNotNone(response)
self.assertRegex(response, DateTimeAIMLTests.DEFAULT_DATETIME_REGEX)
def test_interval(self):
response = DateTimeAIMLTests.test_client.bot.ask_question("test", "TEST INTERVAL")
self.assertIsNotNone(response)
self.assertEqual(response, "2")
def test_season(self):
response = DateTimeAIMLTests.test_client.bot.ask_question("test", "SEASON")
self.assertIsNotNone(response)
self.assertEqual(response, "Winter")
def test_age(self):
DateTimeAIMLTests.test_client.bot.brain.properties.add_property('birthdate', "September 9, 2016")
response = DateTimeAIMLTests.test_client.bot.ask_question("test", "AGE")
self.assertIsNotNone(response)
self.assertRegex(response, "I am \d{1}|\d{2} months old.")
def test_age_in_years(self):
DateTimeAIMLTests.test_client.bot.brain.properties.add_property('birthdate', "September 9, 2016")
response = DateTimeAIMLTests.test_client.bot.ask_question("test", "AGE IN YEARS")
self.assertIsNotNone(response)
self.assertEqual(response, "0")
def test_days_until(self):
response = DateTimeAIMLTests.test_client.bot.ask_question("test", "DAYS UNTIL SUNDAY")
self.assertIsNotNone(response)
self.assertRegex(response, "\d{1}|\d{2}")
|
tahmid-tanzim/youtube-dl | refs/heads/master | youtube_dl/extractor/yourupload.py | 142 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
class YourUploadIE(InfoExtractor):
_VALID_URL = r'''(?x)https?://(?:www\.)?
(?:yourupload\.com/watch|
embed\.yourupload\.com|
embed\.yucache\.net
)/(?P<id>[A-Za-z0-9]+)
'''
_TESTS = [
{
'url': 'http://yourupload.com/watch/14i14h',
'md5': '5e2c63385454c557f97c4c4131a393cd',
'info_dict': {
'id': '14i14h',
'ext': 'mp4',
'title': 'BigBuckBunny_320x180.mp4',
'thumbnail': 're:^https?://.*\.jpe?g',
}
},
{
'url': 'http://embed.yourupload.com/14i14h',
'only_matching': True,
},
{
'url': 'http://embed.yucache.net/14i14h?client_file_id=803349',
'only_matching': True,
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
embed_url = 'http://embed.yucache.net/{0:}'.format(video_id)
webpage = self._download_webpage(embed_url, video_id)
title = self._og_search_title(webpage)
video_url = self._og_search_video_url(webpage)
thumbnail = self._og_search_thumbnail(webpage, default=None)
return {
'id': video_id,
'title': title,
'url': video_url,
'thumbnail': thumbnail,
'http_headers': {
'Referer': embed_url,
},
}
|
zhouzhenghui/python-for-android | refs/heads/master | python3-alpha/extra_modules/pyxmpp2/ext/component.py | 46 | #
# (C) Copyright 2003-2010 Jacek Konieczny <jajcus@jajcus.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
"""Component (jabber:component:accept) stream handling.
Normative reference:
- `JEP 114 <http://www.jabber.org/jeps/jep-0114.html>`__
"""
raise ImportError("{0} is not yet rewritten for PyXMPP2".format(__name__))
__docformat__="restructuredtext en"
import hashlib
import logging
from ..stream import Stream
from ..streambase import stanza_factory,HostMismatch
from ..xmlextra import common_doc,common_root
from ..utils import to_utf8
from ..exceptions import StreamError,FatalStreamError,ComponentStreamError,FatalComponentStreamError
class ComponentStream(Stream):
"""Handles jabberd component (jabber:component:accept) connection stream.
:Ivariables:
- `server`: server to use.
- `port`: port number to use.
- `secret`: authentication secret.
:Types:
- `server`: `str`
- `port`: `int`
- `secret`: `str`"""
def __init__(self, jid, secret, server, port, keepalive = 0, owner = None):
"""Initialize a `ComponentStream` object.
:Parameters:
- `jid`: JID of the component.
- `secret`: authentication secret.
- `server`: server address.
- `port`: TCP port number on the server.
- `keepalive`: keepalive interval. 0 to disable.
- `owner`: `Client`, `Component` or similar object "owning" this stream.
"""
Stream.__init__(self, "jabber:component:accept",
sasl_mechanisms = [],
tls_settings = None,
keepalive = keepalive,
owner = owner)
self.server=server
self.port=port
self.me=jid
self.secret=secret
self.process_all_stanzas=1
self.__logger=logging.getLogger("pyxmpp2.jabberd.ComponentStream")
def _reset(self):
"""Reset `ComponentStream` object state, making the object ready to
handle new connections."""
Stream._reset(self)
def connect(self,server=None,port=None):
"""Establish a client connection to a server.
[component only]
:Parameters:
- `server`: name or address of the server to use. If not given
then use the one specified when creating the object.
- `port`: port number of the server to use. If not given then use
the one specified when creating the object.
:Types:
- `server`: `str`
- `port`: `int`"""
self.lock.acquire()
try:
self._connect(server,port)
finally:
self.lock.release()
def _connect(self,server=None,port=None):
"""Same as `ComponentStream.connect` but assume `self.lock` is acquired."""
if self.me.node or self.me.resource:
raise Value("Component JID may have only domain defined")
if not server:
server=self.server
if not port:
port=self.port
if not server or not port:
raise ValueError("Server or port not given")
Stream._connect(self,server,port,None,self.me)
def accept(self,sock):
"""Accept an incoming component connection.
[server only]
:Parameters:
- `sock`: a listening socket."""
Stream.accept(self,sock,None)
def stream_start(self,doc):
"""Process <stream:stream> (stream start) tag received from peer.
Call `Stream.stream_start`, but ignore any `HostMismatch` error.
:Parameters:
- `doc`: document created by the parser"""
try:
Stream.stream_start(self,doc)
except HostMismatch:
pass
def _post_connect(self):
"""Initialize authentication when the connection is established
and we are the initiator."""
if self.initiator:
self._auth()
def _compute_handshake(self):
"""Compute the authentication handshake value.
:return: the computed hash value.
:returntype: `str`"""
return hashlib.sha1(to_utf8(self.stream_id)+to_utf8(self.secret)).hexdigest()
def _auth(self):
"""Authenticate on the server.
[component only]"""
if self.authenticated:
self.__logger.debug("_auth: already authenticated")
return
self.__logger.debug("doing handshake...")
hash_value=self._compute_handshake()
n=common_root.newTextChild(None,"handshake",hash_value)
self._write_node(n)
n.unlinkNode()
n.freeNode()
self.__logger.debug("handshake hash sent.")
def _process_node(self,node):
"""Process first level element of the stream.
Handle component handshake (authentication) element, and
treat elements in "jabber:component:accept", "jabber:client"
and "jabber:server" equally (pass to `self.process_stanza`).
All other elements are passed to `Stream._process_node`.
:Parameters:
- `node`: XML node describing the element
"""
ns=node.ns()
if ns:
ns_uri=node.ns().getContent()
if (not ns or ns_uri=="jabber:component:accept") and node.name=="handshake":
if self.initiator and not self.authenticated:
self.authenticated=1
self.state_change("authenticated",self.me)
self._post_auth()
return
elif not self.authenticated and node.getContent()==self._compute_handshake():
self.peer=self.me
n=common_doc.newChild(None,"handshake",None)
self._write_node(n)
n.unlinkNode()
n.freeNode()
self.peer_authenticated=1
self.state_change("authenticated",self.peer)
self._post_auth()
return
else:
self._send_stream_error("not-authorized")
raise FatalComponentStreamError("Hanshake error.")
if ns_uri in ("jabber:component:accept","jabber:client","jabber:server"):
stanza=stanza_factory(node)
self.lock.release()
try:
self.process_stanza(stanza)
finally:
self.lock.acquire()
stanza.free()
return
return Stream._process_node(self,node)
# vi: sts=4 et sw=4
|
fbagirov/scikit-learn | refs/heads/master | sklearn/svm/setup.py | 321 | import os
from os.path import join
import numpy
from sklearn._build_utils import get_blas_info
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('svm', parent_package, top_path)
config.add_subpackage('tests')
# Section LibSVM
# we compile both libsvm and libsvm_sparse
config.add_library('libsvm-skl',
sources=[join('src', 'libsvm', 'libsvm_template.cpp')],
depends=[join('src', 'libsvm', 'svm.cpp'),
join('src', 'libsvm', 'svm.h')],
# Force C++ linking in case gcc is picked up instead
# of g++ under windows with some versions of MinGW
extra_link_args=['-lstdc++'],
)
libsvm_sources = ['libsvm.c']
libsvm_depends = [join('src', 'libsvm', 'libsvm_helper.c'),
join('src', 'libsvm', 'libsvm_template.cpp'),
join('src', 'libsvm', 'svm.cpp'),
join('src', 'libsvm', 'svm.h')]
config.add_extension('libsvm',
sources=libsvm_sources,
include_dirs=[numpy.get_include(),
join('src', 'libsvm')],
libraries=['libsvm-skl'],
depends=libsvm_depends,
)
### liblinear module
cblas_libs, blas_info = get_blas_info()
if os.name == 'posix':
cblas_libs.append('m')
liblinear_sources = ['liblinear.c',
join('src', 'liblinear', '*.cpp')]
liblinear_depends = [join('src', 'liblinear', '*.h'),
join('src', 'liblinear', 'liblinear_helper.c')]
config.add_extension('liblinear',
sources=liblinear_sources,
libraries=cblas_libs,
include_dirs=[join('..', 'src', 'cblas'),
numpy.get_include(),
blas_info.pop('include_dirs', [])],
extra_compile_args=blas_info.pop('extra_compile_args',
[]),
depends=liblinear_depends,
# extra_compile_args=['-O0 -fno-inline'],
** blas_info)
## end liblinear module
# this should go *after* libsvm-skl
libsvm_sparse_sources = ['libsvm_sparse.c']
config.add_extension('libsvm_sparse', libraries=['libsvm-skl'],
sources=libsvm_sparse_sources,
include_dirs=[numpy.get_include(),
join("src", "libsvm")],
depends=[join("src", "libsvm", "svm.h"),
join("src", "libsvm",
"libsvm_sparse_helper.c")])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
matrix-static/Triangle | refs/heads/master | _baseline/refer/modules/ionicons/2.0.1/builder/generate.py | 357 | from subprocess import call
import os
import json
BUILDER_PATH = os.path.dirname(os.path.abspath(__file__))
ROOT_PATH = os.path.join(BUILDER_PATH, '..')
FONTS_FOLDER_PATH = os.path.join(ROOT_PATH, 'fonts')
CSS_FOLDER_PATH = os.path.join(ROOT_PATH, 'css')
SCSS_FOLDER_PATH = os.path.join(ROOT_PATH, 'scss')
LESS_FOLDER_PATH = os.path.join(ROOT_PATH, 'less')
def main():
generate_font_files()
data = get_build_data()
rename_svg_glyph_names(data)
generate_scss(data)
generate_less(data)
generate_cheatsheet(data)
generate_component_json(data)
generate_composer_json(data)
generate_bower_json(data)
def generate_font_files():
print "Generate Fonts"
cmd = "fontforge -script %s/scripts/generate_font.py" % (BUILDER_PATH)
call(cmd, shell=True)
def rename_svg_glyph_names(data):
# hacky and slow (but safe) way to rename glyph-name attributes
svg_path = os.path.join(FONTS_FOLDER_PATH, 'ionicons.svg')
svg_file = open(svg_path, 'r+')
svg_text = svg_file.read()
svg_file.seek(0)
for ionicon in data['icons']:
# uniF2CA
org_name = 'uni%s' % (ionicon['code'].replace('0x', '').upper())
ion_name = 'ion-%s' % (ionicon['name'])
svg_text = svg_text.replace(org_name, ion_name)
svg_file.write(svg_text)
svg_file.close()
def generate_less(data):
print "Generate LESS"
font_name = data['name']
font_version = data['version']
css_prefix = data['prefix']
variables_file_path = os.path.join(LESS_FOLDER_PATH, '_ionicons-variables.less')
icons_file_path = os.path.join(LESS_FOLDER_PATH, '_ionicons-icons.less')
d = []
d.append('/*!');
d.append('Ionicons, v%s' % (font_version) );
d.append('Created by Ben Sperry for the Ionic Framework, http://ionicons.com/');
d.append('https://twitter.com/benjsperry https://twitter.com/ionicframework');
d.append('MIT License: https://github.com/driftyco/ionicons');
d.append('*/');
d.append('// Ionicons Variables')
d.append('// --------------------------\n')
d.append('@ionicons-font-path: "../fonts";')
d.append('@ionicons-font-family: "%s";' % (font_name) )
d.append('@ionicons-version: "%s";' % (font_version) )
d.append('@ionicons-prefix: %s;' % (css_prefix) )
d.append('')
for ionicon in data['icons']:
chr_code = ionicon['code'].replace('0x', '\\')
d.append('@ionicon-var-%s: "%s";' % (ionicon['name'], chr_code) )
f = open(variables_file_path, 'w')
f.write( '\n'.join(d) )
f.close()
d = []
d.append('// Ionicons Icons')
d.append('// --------------------------\n')
group = [ '.%s' % (data['name'].lower()) ]
for ionicon in data['icons']:
group.append('.@{ionicons-prefix}%s:before' % (ionicon['name']) )
d.append( ',\n'.join(group) )
d.append('{')
d.append(' &:extend(.ion);')
d.append('}')
for ionicon in data['icons']:
chr_code = ionicon['code'].replace('0x', '\\')
d.append('.@{ionicons-prefix}%s:before { content: @ionicon-var-%s; }' % (ionicon['name'], ionicon['name']) )
f = open(icons_file_path, 'w')
f.write( '\n'.join(d) )
f.close()
def generate_scss(data):
print "Generate SCSS"
font_name = data['name']
font_version = data['version']
css_prefix = data['prefix']
variables_file_path = os.path.join(SCSS_FOLDER_PATH, '_ionicons-variables.scss')
icons_file_path = os.path.join(SCSS_FOLDER_PATH, '_ionicons-icons.scss')
d = []
d.append('// Ionicons Variables')
d.append('// --------------------------\n')
d.append('$ionicons-font-path: "../fonts" !default;')
d.append('$ionicons-font-family: "%s" !default;' % (font_name) )
d.append('$ionicons-version: "%s" !default;' % (font_version) )
d.append('$ionicons-prefix: %s !default;' % (css_prefix) )
d.append('')
for ionicon in data['icons']:
chr_code = ionicon['code'].replace('0x', '\\')
d.append('$ionicon-var-%s: "%s";' % (ionicon['name'], chr_code) )
f = open(variables_file_path, 'w')
f.write( '\n'.join(d) )
f.close()
d = []
d.append('// Ionicons Icons')
d.append('// --------------------------\n')
group = [ '.%s' % (data['name'].lower()) ]
for ionicon in data['icons']:
group.append('.#{$ionicons-prefix}%s:before' % (ionicon['name']) )
d.append( ',\n'.join(group) )
d.append('{')
d.append(' @extend .ion;')
d.append('}')
for ionicon in data['icons']:
chr_code = ionicon['code'].replace('0x', '\\')
d.append('.#{$ionicons-prefix}%s:before { content: $ionicon-var-%s; }' % (ionicon['name'], ionicon['name']) )
f = open(icons_file_path, 'w')
f.write( '\n'.join(d) )
f.close()
generate_css_from_scss(data)
def generate_css_from_scss(data):
print "Generate CSS From SCSS"
scss_file_path = os.path.join(SCSS_FOLDER_PATH, 'ionicons.scss')
css_file_path = os.path.join(CSS_FOLDER_PATH, 'ionicons.css')
css_min_file_path = os.path.join(CSS_FOLDER_PATH, 'ionicons.min.css')
cmd = "sass %s %s --style compact" % (scss_file_path, css_file_path)
call(cmd, shell=True)
print "Generate Minified CSS From SCSS"
cmd = "sass %s %s --style compressed" % (scss_file_path, css_min_file_path)
call(cmd, shell=True)
def generate_cheatsheet(data):
print "Generate Cheatsheet"
cheatsheet_file_path = os.path.join(ROOT_PATH, 'cheatsheet.html')
template_path = os.path.join(BUILDER_PATH, 'cheatsheet', 'template.html')
icon_row_path = os.path.join(BUILDER_PATH, 'cheatsheet', 'icon-row.html')
f = open(template_path, 'r')
template_html = f.read()
f.close()
f = open(icon_row_path, 'r')
icon_row_template = f.read()
f.close()
content = []
for ionicon in data['icons']:
css_code = ionicon['code'].replace('0x', '\\')
escaped_html_code = ionicon['code'].replace('0x', '&#x') + ';'
html_code = ionicon['code'].replace('0x', '&#x') + ';'
item_row = icon_row_template
item_row = item_row.replace('{{name}}', ionicon['name'])
item_row = item_row.replace('{{prefix}}', data['prefix'])
item_row = item_row.replace('{{css_code}}', css_code)
item_row = item_row.replace('{{escaped_html_code}}', escaped_html_code)
item_row = item_row.replace('{{html_code}}', html_code)
content.append(item_row)
template_html = template_html.replace("{{font_name}}", data["name"])
template_html = template_html.replace("{{font_version}}", data["version"])
template_html = template_html.replace("{{icon_count}}", str(len(data["icons"])) )
template_html = template_html.replace("{{content}}", '\n'.join(content) )
f = open(cheatsheet_file_path, 'w')
f.write(template_html)
f.close()
def generate_component_json(data):
print "Generate component.json"
d = {
"name": data['name'],
"repo": "driftyco/ionicons",
"description": "The premium icon font for Ionic Framework.",
"version": data['version'],
"keywords": [],
"dependencies": {},
"development": {},
"license": "MIT",
"styles": [
"css/%s.css" % (data['name'].lower())
],
"fonts": [
"fonts/%s.eot" % (data['name'].lower()),
"fonts/%s.svg" % (data['name'].lower()),
"fonts/%s.ttf" % (data['name'].lower()),
"fonts/%s.woff" % (data['name'].lower())
]
}
txt = json.dumps(d, indent=4, separators=(',', ': '))
component_file_path = os.path.join(ROOT_PATH, 'component.json')
f = open(component_file_path, 'w')
f.write(txt)
f.close()
def generate_composer_json(data):
print "Generate composer.json"
d = {
"name": "driftyco/ionicons",
"description": "The premium icon font for Ionic Framework.",
"keywords": [ "fonts", "icon font", "icons", "ionic", "web font"],
"homepage": "http://ionicons.com/",
"authors": [
{
"name": "Ben Sperry",
"email": "ben@drifty.com",
"role": "Designer",
"homepage": "https://twitter.com/benjsperry"
},
{
"name": "Adam Bradley",
"email": "adam@drifty.com",
"role": "Developer",
"homepage": "https://twitter.com/adamdbradley"
},
{
"name": "Max Lynch",
"email": "max@drifty.com",
"role": "Developer",
"homepage": "https://twitter.com/maxlynch"
}
],
"extra": {},
"license": [ "MIT" ]
}
txt = json.dumps(d, indent=4, separators=(',', ': '))
composer_file_path = os.path.join(ROOT_PATH, 'composer.json')
f = open(composer_file_path, 'w')
f.write(txt)
f.close()
def generate_bower_json(data):
print "Generate bower.json"
d = {
"name": data['name'],
"version": data['version'],
"homepage": "https://github.com/driftyco/ionicons",
"authors": [
"Ben Sperry <ben@drifty.com>",
"Adam Bradley <adam@drifty.com>",
"Max Lynch <max@drifty.com>"
],
"description": "Ionicons - free and beautiful icons from the creators of Ionic Framework",
"main": [
"css/%s.css" % (data['name'].lower()),
"fonts/*"
],
"keywords": [ "fonts", "icon font", "icons", "ionic", "web font"],
"license": "MIT",
"ignore": [
"**/.*",
"builder",
"node_modules",
"bower_components",
"test",
"tests"
]
}
txt = json.dumps(d, indent=4, separators=(',', ': '))
bower_file_path = os.path.join(ROOT_PATH, 'bower.json')
f = open(bower_file_path, 'w')
f.write(txt)
f.close()
def get_build_data():
build_data_path = os.path.join(BUILDER_PATH, 'build_data.json')
f = open(build_data_path, 'r')
data = json.loads(f.read())
f.close()
return data
if __name__ == "__main__":
main()
|
fmarczin/simplekv | refs/heads/master | tests/test_azure_store.py | 2 | from uuid import uuid4 as uuid
from simplekv._compat import ConfigParser, pickle
from simplekv.net.azurestore import AzureBlockBlobStore
from simplekv.contrib import ExtendedKeyspaceMixin
from basic_store import BasicStore
from conftest import ExtendedKeyspaceTests
import pytest
pytest.importorskip('azure.storage')
def load_azure_credentials():
# loaded from the same place as tox.ini. here's a sample
#
# [my-azure-storage-account]
# account_name=foo
# account_key=bar
cfg_fn = 'azure_credentials.ini'
parser = ConfigParser()
result = parser.read(cfg_fn)
if not result:
pytest.skip('file {} not found'.format(cfg_fn))
for section in parser.sections():
return {
'account_name': parser.get(section, 'account_name'),
'account_key': parser.get(section, 'account_key'),
}
def create_azure_conn_string(credentials):
account_name = credentials['account_name']
account_key = credentials['account_key']
fmt_str = 'DefaultEndpointsProtocol=https;AccountName={};AccountKey={}'
return fmt_str.format(account_name, account_key)
class TestAzureStorage(BasicStore):
@pytest.fixture
def store(self):
from azure.storage.blob import BlockBlobService
container = uuid()
conn_string = create_azure_conn_string(load_azure_credentials())
s = BlockBlobService(connection_string=conn_string)
yield AzureBlockBlobStore(conn_string=conn_string, container=container,
public=False)
s.delete_container(container)
class TestExtendedKeysAzureStorage(TestAzureStorage, ExtendedKeyspaceTests):
@pytest.fixture
def store(self):
class ExtendedKeysStore(ExtendedKeyspaceMixin, AzureBlockBlobStore):
pass
from azure.storage.blob import BlockBlobService
container = uuid()
conn_string = create_azure_conn_string(load_azure_credentials())
s = BlockBlobService(connection_string=conn_string)
yield ExtendedKeysStore(conn_string=conn_string,
container=container, public=False)
s.delete_container(container)
def test_azure_setgetstate():
from azure.storage.blob import BlockBlobService
container = uuid()
conn_string = create_azure_conn_string(load_azure_credentials())
s = BlockBlobService(connection_string=conn_string)
store = AzureBlockBlobStore(conn_string=conn_string, container=container,
public=False)
store.put(u'key1', b'value1')
buf = pickle.dumps(store, protocol=2)
store = pickle.loads(buf)
assert store.get(u'key1') == b'value1'
s.delete_container(container)
|
GNOME/gnome-music | refs/heads/master | gnomemusic/views/albumsview.py | 1 | # Copyright 2019 The GNOME Music Developers
#
# GNOME Music is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# GNOME Music is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with GNOME Music; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# The GNOME Music authors hereby grant permission for non-GPL compatible
# GStreamer plugins to be used and distributed together with GStreamer
# and GNOME Music. This permission is above and beyond the permissions
# granted by the GPL license by which GNOME Music is covered. If you
# modify this code, you may extend this exception to your version of the
# code, but you are not obligated to do so. If you do not wish to do so,
# delete this exception statement from your version.
import math
from gettext import gettext as _
from gi.repository import Gdk, GLib, GObject, Gtk
from gnomemusic.widgets.headerbar import HeaderBar
from gnomemusic.widgets.albumcover import AlbumCover
from gnomemusic.widgets.albumwidget import AlbumWidget
@Gtk.Template(resource_path="/org/gnome/Music/ui/AlbumsView.ui")
class AlbumsView(Gtk.Stack):
"""Gridlike view of all albums
Album activation switches to AlbumWidget.
"""
__gtype_name__ = "AlbumsView"
icon_name = GObject.Property(
type=str, default="media-optical-cd-audio-symbolic",
flags=GObject.ParamFlags.READABLE)
search_mode_active = GObject.Property(type=bool, default=False)
selection_mode = GObject.Property(type=bool, default=False)
title = GObject.Property(
type=str, default=_("Albums"), flags=GObject.ParamFlags.READABLE)
_scrolled_window = Gtk.Template.Child()
_flowbox = Gtk.Template.Child()
_flowbox_long_press = Gtk.Template.Child()
def __init__(self, application):
"""Initialize AlbumsView
:param application: The Application object
"""
super().__init__(transition_type=Gtk.StackTransitionType.CROSSFADE)
self.props.name = "albums"
self._application = application
self._window = application.props.window
self._headerbar = self._window._headerbar
self._adjustment_timeout_id = 0
self._viewport = self._scrolled_window.get_child()
self._widget_counter = 1
self._ctrl_hold = False
model = self._application.props.coremodel.props.albums_sort
self._flowbox.bind_model(model, self._create_widget)
self._flowbox.set_hadjustment(self._scrolled_window.get_hadjustment())
self._flowbox.set_vadjustment(self._scrolled_window.get_vadjustment())
self._flowbox.connect("child-activated", self._on_child_activated)
self.bind_property(
"selection-mode", self._window, "selection-mode",
GObject.BindingFlags.DEFAULT)
self._window.connect(
"notify::selection-mode", self._on_selection_mode_changed)
self._album_widget = AlbumWidget(self._application)
self._album_widget.bind_property(
"selection-mode", self, "selection-mode",
GObject.BindingFlags.BIDIRECTIONAL)
self.add(self._album_widget)
self.connect(
"notify::search-mode-active", self._on_search_mode_changed)
self._scrolled_window.props.vadjustment.connect(
"value-changed", self._on_vadjustment_changed)
self._scrolled_window.props.vadjustment.connect(
"changed", self._on_vadjustment_changed)
def _on_vadjustment_changed(self, adjustment):
if self._adjustment_timeout_id != 0:
GLib.source_remove(self._adjustment_timeout_id)
self._adjustment_timeout_id = 0
self._adjustment_timeout_id = GLib.timeout_add(
200, self._retrieve_covers, adjustment.props.value,
priority=GLib.PRIORITY_LOW)
def _retrieve_covers(self, old_adjustment):
adjustment = self._scrolled_window.props.vadjustment.props.value
if old_adjustment != adjustment:
return GLib.SOURCE_CONTINUE
first_cover = self._flowbox.get_child_at_index(0)
if first_cover is None:
return GLib.SOURCE_REMOVE
cover_size, _ = first_cover.get_allocated_size()
if cover_size.width == 0 or cover_size.height == 0:
return GLib.SOURCE_REMOVE
viewport_size, _ = self._viewport.get_allocated_size()
h_space = self._flowbox.get_column_spacing()
v_space = self._flowbox.get_row_spacing()
nr_cols = (
(viewport_size.width + h_space) // (cover_size.width + h_space))
top_left_cover = self._flowbox.get_child_at_index(
nr_cols * (adjustment // (cover_size.height + v_space)))
covers_col = math.ceil(viewport_size.width / cover_size.width)
covers_row = math.ceil(viewport_size.height / cover_size.height)
children = self._flowbox.get_children()
retrieve_list = []
for i, albumcover in enumerate(children):
if top_left_cover == albumcover:
retrieve_covers = covers_row * covers_col
retrieve_list = children[i:i + retrieve_covers]
break
for albumcover in retrieve_list:
albumcover.retrieve()
self._adjustment_timeout_id = 0
return GLib.SOURCE_REMOVE
def _on_selection_mode_changed(self, widget, data=None):
selection_mode = self._window.props.selection_mode
if (selection_mode == self.props.selection_mode
or self.get_parent().get_visible_child() != self):
return
self.props.selection_mode = selection_mode
if not self.props.selection_mode:
self.deselect_all()
def _on_search_mode_changed(self, klass, param):
if (not self.props.search_mode_active
and self._headerbar.props.stack.props.visible_child == self
and self.get_visible_child() == self._album_widget):
self._set_album_headerbar(self._album_widget.props.album)
def _create_widget(self, corealbum):
album_widget = AlbumCover(corealbum)
self.bind_property(
"selection-mode", album_widget, "selection-mode",
GObject.BindingFlags.SYNC_CREATE
| GObject.BindingFlags.BIDIRECTIONAL)
# NOTE: Adding SYNC_CREATE here will trigger all the nested
# models to be created. This will slow down initial start,
# but will improve initial 'select all' speed.
album_widget.bind_property(
"selected", corealbum, "selected",
GObject.BindingFlags.BIDIRECTIONAL)
GLib.timeout_add(
self._widget_counter * 250, album_widget.retrieve,
priority=GLib.PRIORITY_LOW)
self._widget_counter = self._widget_counter + 1
return album_widget
def _back_button_clicked(self, widget, data=None):
self._headerbar.state = HeaderBar.State.MAIN
self.props.visible_child = self._scrolled_window
def _on_child_activated(self, widget, child, user_data=None):
corealbum = child.props.corealbum
if self.props.selection_mode:
return
# Update and display the album widget if not in selection mode
self._album_widget.update(corealbum)
self._set_album_headerbar(corealbum)
self.set_visible_child(self._album_widget)
def _set_album_headerbar(self, corealbum):
self._headerbar.props.state = HeaderBar.State.CHILD
self._headerbar.props.title = corealbum.props.title
self._headerbar.props.subtitle = corealbum.props.artist
@Gtk.Template.Callback()
def _on_flowbox_press_begin(self, gesture, sequence):
event = gesture.get_last_event(sequence)
ok, state = event.get_state()
if ((ok is True
and state == Gdk.ModifierType.CONTROL_MASK)
or self.props.selection_mode is True):
self._flowbox.props.selection_mode = Gtk.SelectionMode.MULTIPLE
if state == Gdk.ModifierType.CONTROL_MASK:
self._ctrl_hold = True
@Gtk.Template.Callback()
def _on_flowbox_press_cancel(self, gesture, sequence):
self._flowbox.props.selection_mode = Gtk.SelectionMode.NONE
@Gtk.Template.Callback()
def _on_selected_children_changed(self, flowbox):
if self._flowbox.props.selection_mode == Gtk.SelectionMode.NONE:
return
if self.props.selection_mode is False:
self.props.selection_mode = True
rubberband_selection = len(self._flowbox.get_selected_children()) > 1
with self._application.props.coreselection.freeze_notify():
if (rubberband_selection
and not self._ctrl_hold):
self.deselect_all()
for child in self._flowbox.get_selected_children():
if (self._ctrl_hold is True
or not rubberband_selection):
child.props.selected = not child.props.selected
else:
child.props.selected = True
self._ctrl_hold = False
self._flowbox.props.selection_mode = Gtk.SelectionMode.NONE
def _toggle_all_selection(self, selected):
"""Selects or deselects all items.
"""
with self._application.props.coreselection.freeze_notify():
if self.get_visible_child() == self._album_widget:
if selected is True:
self._album_widget.select_all()
else:
self._album_widget.deselect_all()
else:
for child in self._flowbox.get_children():
child.props.selected = selected
def select_all(self):
self._toggle_all_selection(True)
def deselect_all(self):
self._toggle_all_selection(False)
|
vrv/tensorflow | refs/heads/master | tensorflow/tensorboard/tensorboard.py | 25 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Serve TensorFlow summary data to a web frontend.
This is a simple web server to proxy data from the event_loader to the web, and
serve static web files.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging as base_logging
import os
import socket
import sys
from werkzeug import serving
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
from tensorflow.python.platform import tf_logging as logging
from tensorflow.tensorboard.backend import application
from tensorflow.tensorboard.backend.event_processing import event_file_inspector as efi
from tensorflow.tensorboard.plugins.projector import projector_plugin
from tensorflow.tensorboard.plugins.text import text_plugin
# TensorBoard flags
flags.DEFINE_string('logdir', '', """logdir specifies the directory where
TensorBoard will look to find TensorFlow event files that it can display.
TensorBoard will recursively walk the directory structure rooted at logdir,
looking for .*tfevents.* files.
You may also pass a comma separated list of log directories, and TensorBoard
will watch each directory. You can also assign names to individual log
directories by putting a colon between the name and the path, as in
tensorboard --logdir=name1:/path/to/logs/1,name2:/path/to/logs/2
""")
flags.DEFINE_string('host', '', 'What host to listen to. Defaults to '
'serving on all interfaces, set to 127.0.0.1 (localhost) to'
'disable remote access (also quiets security warnings).')
flags.DEFINE_integer('port', 6006, 'What port to serve TensorBoard on.')
flags.DEFINE_boolean('purge_orphaned_data', True, 'Whether to purge data that '
'may have been orphaned due to TensorBoard restarts. '
'Disabling purge_orphaned_data can be used to debug data '
'disappearance.')
flags.DEFINE_integer('reload_interval', 5, 'How often the backend should load '
'more data.')
# Inspect Mode flags
flags.DEFINE_boolean('inspect', False, """Use this flag to print out a digest
of your event files to the command line, when no data is shown on TensorBoard or
the data shown looks weird.
Example usages:
tensorboard --inspect --event_file=myevents.out
tensorboard --inspect --event_file=myevents.out --tag=loss
tensorboard --inspect --logdir=mylogdir
tensorboard --inspect --logdir=mylogdir --tag=loss
See tensorflow/python/summary/event_file_inspector.py for more info and
detailed usage.
""")
flags.DEFINE_string(
'tag', '',
'The particular tag to query for. Only used if --inspect is present')
flags.DEFINE_string(
'event_file', '',
'The particular event file to query for. Only used if --inspect is present '
'and --logdir is not specified.')
FLAGS = flags.FLAGS
def create_tb_app(plugins):
"""Read the flags, and create a TensorBoard WSGI application.
Args:
plugins: A list of plugins for TensorBoard to initialize.
Raises:
ValueError: if a logdir is not specified.
Returns:
A new TensorBoard WSGI application.
"""
if not FLAGS.logdir:
raise ValueError('A logdir must be specified. Run `tensorboard --help` for '
'details and examples.')
logdir = os.path.expanduser(FLAGS.logdir)
return application.standard_tensorboard_wsgi(
logdir=logdir,
purge_orphaned_data=FLAGS.purge_orphaned_data,
reload_interval=FLAGS.reload_interval,
plugins=plugins)
def make_simple_server(tb_app, host, port):
"""Create an HTTP server for TensorBoard.
Args:
tb_app: The TensorBoard WSGI application to create a server for.
host: Indicates the interfaces to bind to ('::' or '0.0.0.0' for all
interfaces, '::1' or '127.0.0.1' for localhost). A blank value ('')
indicates protocol-agnostic all interfaces.
port: The port to bind to (0 indicates an unused port selected by the
operating system).
Returns:
A tuple of (server, url):
server: An HTTP server object configured to host TensorBoard.
url: A best guess at a URL where TensorBoard will be accessible once the
server has been started.
Raises:
socket.error: If a server could not be constructed with the host and port
specified. Also logs an error message.
"""
# Mute the werkzeug logging.
base_logging.getLogger('werkzeug').setLevel(base_logging.WARNING)
try:
if host:
# The user gave us an explicit host
server = serving.make_server(host, port, tb_app, threaded=True)
if ':' in host and not host.startswith('['):
# Display IPv6 addresses as [::1]:80 rather than ::1:80
final_host = '[{}]'.format(host)
else:
final_host = host
else:
# We've promised to bind to all interfaces on this host. However, we're
# not sure whether that means IPv4 or IPv6 interfaces.
try:
# First try passing in a blank host (meaning all interfaces). This,
# unfortunately, defaults to IPv4 even if no IPv4 interface is available
# (yielding a socket.error).
server = serving.make_server(host, port, tb_app, threaded=True)
except socket.error:
# If a blank host didn't work, we explicitly request IPv6 interfaces.
server = serving.make_server('::', port, tb_app, threaded=True)
final_host = socket.gethostname()
server.daemon_threads = True
except socket.error as socket_error:
if port == 0:
msg = 'TensorBoard unable to find any open port'
else:
msg = (
'TensorBoard attempted to bind to port %d, but it was already in use'
% FLAGS.port)
logging.error(msg)
print(msg)
raise socket_error
final_port = server.socket.getsockname()[1]
tensorboard_url = 'http://%s:%d' % (final_host, final_port)
return server, tensorboard_url
def run_simple_server(tb_app):
"""Run a TensorBoard HTTP server, and print some messages to the console."""
try:
server, url = make_simple_server(tb_app, FLAGS.host, FLAGS.port)
except socket.error:
# An error message was already logged
exit(-1)
msg = 'Starting TensorBoard %s at %s' % (tb_app.tag, url)
print(msg)
logging.info(msg)
print('(Press CTRL+C to quit)')
sys.stdout.flush()
server.serve_forever()
def main(unused_argv=None):
if FLAGS.inspect:
logging.info('Not bringing up TensorBoard, but inspecting event files.')
event_file = os.path.expanduser(FLAGS.event_file)
efi.inspect(FLAGS.logdir, event_file, FLAGS.tag)
return 0
else:
plugins = [
projector_plugin.ProjectorPlugin(),
text_plugin.TextPlugin(),
]
tb = create_tb_app(plugins)
run_simple_server(tb)
if __name__ == '__main__':
app.run()
|
turnkeylinux/tklbam-duplicity | refs/heads/master | duplicity/backends/_boto_single.py | 4 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2002 Ben Escoto <ben@emerose.org>
# Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import time
import duplicity.backend
from duplicity import globals
from duplicity import log
from duplicity.errors import * #@UnusedWildImport
from duplicity.util import exception_traceback
from duplicity.backend import retry
BOTO_MIN_VERSION = "1.6a"
class BotoBackend(duplicity.backend.Backend):
"""
Backend for Amazon's Simple Storage System, (aka Amazon S3), though
the use of the boto module, (http://code.google.com/p/boto/).
To make use of this backend you must set aws_access_key_id
and aws_secret_access_key in your ~/.boto or /etc/boto.cfg
with your Amazon Web Services key id and secret respectively.
Alternatively you can export the environment variables
AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY.
"""
def __init__(self, parsed_url):
duplicity.backend.Backend.__init__(self, parsed_url)
import boto
assert boto.Version >= BOTO_MIN_VERSION
from boto.s3.key import Key
# This folds the null prefix and all null parts, which means that:
# //MyBucket/ and //MyBucket are equivalent.
# //MyBucket//My///My/Prefix/ and //MyBucket/My/Prefix are equivalent.
self.url_parts = filter(lambda x: x != '', parsed_url.path.split('/'))
if self.url_parts:
self.bucket_name = self.url_parts.pop(0)
else:
# Duplicity hangs if boto gets a null bucket name.
# HC: Caught a socket error, trying to recover
raise BackendException('Boto requires a bucket name.')
self.scheme = parsed_url.scheme
self.key_class = Key
if self.url_parts:
self.key_prefix = '%s/' % '/'.join(self.url_parts)
else:
self.key_prefix = ''
self.straight_url = duplicity.backend.strip_auth_from_url(parsed_url)
self.parsed_url = parsed_url
self.resetConnection()
def resetConnection(self):
self.bucket = None
self.conn = None
try:
from boto.s3.connection import S3Connection
from boto.s3.key import Key
assert hasattr(S3Connection, 'lookup')
# Newer versions of boto default to using
# virtual hosting for buckets as a result of
# upstream deprecation of the old-style access
# method by Amazon S3. This change is not
# backwards compatible (in particular with
# respect to upper case characters in bucket
# names); so we default to forcing use of the
# old-style method unless the user has
# explicitly asked us to use new-style bucket
# access.
#
# Note that if the user wants to use new-style
# buckets, we use the subdomain calling form
# rather than given the option of both
# subdomain and vhost. The reason being that
# anything addressable as a vhost, is also
# addressable as a subdomain. Seeing as the
# latter is mostly a convenience method of
# allowing browse:able content semi-invisibly
# being hosted on S3, the former format makes
# a lot more sense for us to use - being
# explicit about what is happening (the fact
# that we are talking to S3 servers).
try:
from boto.s3.connection import OrdinaryCallingFormat
from boto.s3.connection import SubdomainCallingFormat
cfs_supported = True
calling_format = OrdinaryCallingFormat()
except ImportError:
cfs_supported = False
calling_format = None
if globals.s3_use_new_style:
if cfs_supported:
calling_format = SubdomainCallingFormat()
else:
log.FatalError("Use of new-style (subdomain) S3 bucket addressing was"
"requested, but does not seem to be supported by the "
"boto library. Either you need to upgrade your boto "
"library or duplicity has failed to correctly detect "
"the appropriate support.",
log.ErrorCode.boto_old_style)
else:
if cfs_supported:
calling_format = OrdinaryCallingFormat()
else:
calling_format = None
except ImportError:
log.FatalError("This backend (s3) requires boto library, version %s or later, "
"(http://code.google.com/p/boto/)." % BOTO_MIN_VERSION,
log.ErrorCode.boto_lib_too_old)
if self.scheme == 's3+http':
# Use the default Amazon S3 host.
self.conn = S3Connection(is_secure=(not globals.s3_unencrypted_connection))
else:
assert self.scheme == 's3'
self.conn = S3Connection(
host=self.parsed_url.hostname,
is_secure=(not globals.s3_unencrypted_connection))
if hasattr(self.conn, 'calling_format'):
if calling_format is None:
log.FatalError("It seems we previously failed to detect support for calling "
"formats in the boto library, yet the support is there. This is "
"almost certainly a duplicity bug.",
log.ErrorCode.boto_calling_format)
else:
self.conn.calling_format = calling_format
else:
# Duplicity hangs if boto gets a null bucket name.
# HC: Caught a socket error, trying to recover
raise BackendException('Boto requires a bucket name.')
self.bucket = self.conn.lookup(self.bucket_name)
def put(self, source_path, remote_filename=None):
from boto.s3.connection import Location
if globals.s3_european_buckets:
if not globals.s3_use_new_style:
log.FatalError("European bucket creation was requested, but not new-style "
"bucket addressing (--s3-use-new-style)",
log.ErrorCode.s3_bucket_not_style)
#Network glitch may prevent first few attempts of creating/looking up a bucket
for n in range(1, globals.num_retries+1):
if self.bucket:
break
if n > 1:
time.sleep(30)
try:
try:
self.bucket = self.conn.get_bucket(self.bucket_name, validate=True)
except Exception, e:
if "NoSuchBucket" in str(e):
if globals.s3_european_buckets:
self.bucket = self.conn.create_bucket(self.bucket_name,
location=Location.EU)
else:
self.bucket = self.conn.create_bucket(self.bucket_name)
else:
raise e
except Exception, e:
log.Warn("Failed to create bucket (attempt #%d) '%s' failed (reason: %s: %s)"
"" % (n, self.bucket_name,
e.__class__.__name__,
str(e)))
self.resetConnection()
if not remote_filename:
remote_filename = source_path.get_filename()
key = self.key_class(self.bucket)
key.key = self.key_prefix + remote_filename
for n in range(1, globals.num_retries+1):
if n > 1:
# sleep before retry (new connection to a **hopeful** new host, so no need to wait so long)
time.sleep(10)
if globals.s3_use_rrs:
storage_class = 'REDUCED_REDUNDANCY'
else:
storage_class = 'STANDARD'
log.Info("Uploading %s/%s to %s Storage" % (self.straight_url, remote_filename, storage_class))
try:
key.set_contents_from_filename(source_path.name, {'Content-Type': 'application/octet-stream',
'x-amz-storage-class': storage_class})
key.close()
self.resetConnection()
return
except Exception, e:
log.Warn("Upload '%s/%s' failed (attempt #%d, reason: %s: %s)"
"" % (self.straight_url,
remote_filename,
n,
e.__class__.__name__,
str(e)))
log.Debug("Backtrace of previous error: %s" % (exception_traceback(),))
self.resetConnection()
log.Warn("Giving up trying to upload %s/%s after %d attempts" %
(self.straight_url, remote_filename, globals.num_retries))
raise BackendException("Error uploading %s/%s" % (self.straight_url, remote_filename))
def get(self, remote_filename, local_path):
key = self.key_class(self.bucket)
key.key = self.key_prefix + remote_filename
for n in range(1, globals.num_retries+1):
if n > 1:
# sleep before retry (new connection to a **hopeful** new host, so no need to wait so long)
time.sleep(10)
log.Info("Downloading %s/%s" % (self.straight_url, remote_filename))
try:
key.get_contents_to_filename(local_path.name)
local_path.setdata()
self.resetConnection()
return
except Exception, e:
log.Warn("Download %s/%s failed (attempt #%d, reason: %s: %s)"
"" % (self.straight_url,
remote_filename,
n,
e.__class__.__name__,
str(e)), 1)
log.Debug("Backtrace of previous error: %s" % (exception_traceback(),))
self.resetConnection()
log.Warn("Giving up trying to download %s/%s after %d attempts" %
(self.straight_url, remote_filename, globals.num_retries))
raise BackendException("Error downloading %s/%s" % (self.straight_url, remote_filename))
def list(self):
if not self.bucket:
return []
for n in range(1, globals.num_retries+1):
if n > 1:
# sleep before retry
time.sleep(30)
log.Info("Listing %s" % self.straight_url)
try:
return self._list_filenames_in_bucket()
except Exception, e:
log.Warn("List %s failed (attempt #%d, reason: %s: %s)"
"" % (self.straight_url,
n,
e.__class__.__name__,
str(e)), 1)
log.Debug("Backtrace of previous error: %s" % (exception_traceback(),))
log.Warn("Giving up trying to list %s after %d attempts" %
(self.straight_url, globals.num_retries))
raise BackendException("Error listng %s" % self.straight_url)
def _list_filenames_in_bucket(self):
# We add a 'd' to the prefix to make sure it is not null (for boto) and
# to optimize the listing of our filenames, which always begin with 'd'.
# This will cause a failure in the regression tests as below:
# FAIL: Test basic backend operations
# <tracback snipped>
# AssertionError: Got list: []
# Wanted: ['testfile']
# Because of the need for this optimization, it should be left as is.
#for k in self.bucket.list(prefix = self.key_prefix + 'd', delimiter = '/'):
filename_list = []
for k in self.bucket.list(prefix = self.key_prefix, delimiter = '/'):
try:
filename = k.key.replace(self.key_prefix, '', 1)
filename_list.append(filename)
log.Debug("Listed %s/%s" % (self.straight_url, filename))
except AttributeError:
pass
return filename_list
def delete(self, filename_list):
for filename in filename_list:
self.bucket.delete_key(self.key_prefix + filename)
log.Debug("Deleted %s/%s" % (self.straight_url, filename))
@retry
def _query_file_info(self, filename, raise_errors=False):
try:
key = self.bucket.lookup(self.key_prefix + filename)
if key is None:
return {'size': -1}
return {'size': key.size}
except Exception, e:
log.Warn("Query %s/%s failed: %s"
"" % (self.straight_url,
filename,
str(e)))
self.resetConnection()
if raise_errors:
raise e
else:
return {'size': None}
duplicity.backend.register_backend("s3", BotoBackend)
duplicity.backend.register_backend("s3+http", BotoBackend)
|
twisteroidambassador/udpack | refs/heads/master | udpack/packers_base.py | 1 | """Implements plug-in packers and metaclasses used to modify packers."""
import asyncio
import itertools
import random
__all__ = ['make_pack_only_packer', 'make_unpack_only_packer', 'make_reverse_packer',
'NoOpPacker', 'CallSoonPacker', 'ConstDelayPacker', 'DelayPacker',
'RandomDropPacker', 'ShufflePacker', 'XorMaskPacker',
'ReverseOnePlusPacker', 'XorPtrPosPacker', 'PaddingPacker']
class BasePacker:
"""Base class for all packers.
Methods:
pack(data), unpack(data): self-explanatory.
call_packed_cb(data): call the current set packed_cb.
call_unpacked_cb(data): call the current set unpacked_cb.
Attributes:
packed_cb: a callback to be called for every packed datagram.
unpacked_cb: a callback to be called for every unpacked datagram.
"""
def __init__(self, *, packed_cb=None, unpacked_cb=None):
"""Initialize Packer.
Arguments:
packed_cb, unpacked_cb: same as the attributes.
Both the above arguments are optional, but they must be set before
calling pack() or unpack().
"""
self.packed_cb = packed_cb
self.unpacked_cb = unpacked_cb
def pack(self, data):
raise NotImplementedError
def call_packed_cb(self, data):
"""Call packed_cb() of this packer instance.
Use this method if you need to be able to call the packed_cb() in
effect at any time. For instance:
packer = Packer(packed_cb=old_callback)
callback_handle_1 = packer.packed_cb
callback_handle_2 = packer.call_packed_cb
packer.packed_cb = new_callback
callback_handle_1(data) # This calls old_callback
callback_handle_2(data) # This calls new_callback
"""
self.packed_cb(data)
def unpack(self, data):
raise NotImplementedError
def call_unpacked_cb(self, data):
"""Call unpacked_cb() of this packer instance.
See call_packed_cb for why you may need this.
"""
self.unpacked_cb(data)
class NoOpPacker(BasePacker):
"""Forward packets immediately as-is."""
def pack(self, data):
self.packed_cb(data)
def unpack(self, data):
self.unpacked_cb(data)
def make_pack_only_packer(packer_class):
"""Return a new Packer class that only does packing (unpacking is NoOp).
The unpack() method in the new class will be the same as in NoOpPacker."""
return type('PackOnly' + packer_class.__name__, (packer_class,),
{'unpack': NoOpPacker.unpack})
def make_unpack_only_packer(packer_class):
"""Return a new Packer class that only does unpacking (packing is NoOp).
The pack() method in the new class will be the same as in NoOpPacker."""
return type('UnpackOnly' + packer_class.__name__, (packer_class,),
{'pack': NoOpPacker.pack})
def make_reverse_packer(packer_class):
"""Return a new Packer class in the opposite direction.
The pack() method in the new class is the unpack() in the old class, and
vice versa.
"""
class ReversePacker(BasePacker):
def __init__(self, *args, packed_cb=None, unpacked_cb=None, **kwargs):
super().__init__(packed_cb=packed_cb, unpacked_cb=unpacked_cb)
self._inner_packer = packer_class(
*args, packed_cb=self.call_unpacked_cb,
unpacked_cb=self.call_packed_cb, **kwargs)
def pack(self, data):
self._inner_packer.unpack(data)
def unpack(self, data):
self._inner_packer.pack(data)
ReversePacker.__name__ = 'Reverse' + packer_class.__name__
return ReversePacker
class CallSoonPacker(BasePacker):
"""Forward packets using loop.call_soon().
This should give the event loop a chance to run between when the datagram
is received and when the datagram is forwarded.
"""
def __init__(self, *args, loop=None, **kwargs):
super().__init__(*args, **kwargs)
self._loop = loop or asyncio.get_event_loop()
def pack(self, data):
self._loop.call_soon(self.packed_cb, data)
def unpack(self, data):
self._loop.call_soon(self.unpacked_cb, data)
class ConstDelayPacker(BasePacker):
"""Delay packets for a set amount of time.
See BasePacker for methods and attributes.
Additional attributes:
pack_delay, unpack_delay: number of seconds to delay each datagram in that
direction.
"""
def __init__(self, pack_delay, unpack_delay, *args, loop=None, **kwargs):
super().__init__(*args, **kwargs)
self._loop = loop or asyncio.get_event_loop()
self.pack_delay = pack_delay
self.unpack_delay = unpack_delay
def pack(self, data):
self._loop.call_later(self.pack_delay, self.call_packed_cb, data)
def unpack(self, data):
self._loop.call_later(self.unpack_delay, self.call_unpacked_cb, data)
class DelayPacker(NoOpPacker):
"""Delay packets according to result of function call.
This is intended to be used with a function that returns a random number,
so each packet can be delayed randomly.
Additional attributes:
pack_delay, unpack_delay: a function/callable that returns the number of
seconds to delay. Called once for each datagram. Explicitly set to None
to disable delay in that direction.
"""
def __init__(self, pack_delay, unpack_delay, *args, loop=None, **kwargs):
"""Initialize Packer.
Additional arguments:
pack_delay, unpack_delay: same as attributes.
"""
super().__init__(*args, **kwargs)
self._loop = loop or asyncio.get_event_loop()
self.pack_delay = pack_delay
self.unpack_delay = unpack_delay
@property
def pack_delay(self):
return self._pack_delay
@pack_delay.setter
def pack_delay(self, delay):
self._pack_delay = delay
if delay is None:
self.pack = self._pack_no_delay
else:
self.pack = self._pack_with_delay
@property
def unpack_delay(self):
return self._unpack_delay
@unpack_delay.setter
def unpack_delay(self, delay):
self._unpack_delay = delay
if delay is None:
self.unpack = self._unpack_no_delay
else:
self.unpack = self._unpack_with_delay
def _pack_no_delay(self, data):
super().pack(data)
def _unpack_no_delay(self, data):
super().unpack(data)
def _pack_with_delay(self, data):
self._loop.call_later(self._pack_delay(), self.call_packed_cb, data)
def _unpack_with_delay(self, data):
self._loop.call_later(self._unpack_delay(), self.call_unpacked_cb, data)
class RandomDropPacker(BasePacker):
"""Randomly drop packets.
Additional attributes:
pack_drop_rate, unpack_drop_rate: number between 0 and 1 specifying ratio
of datagrams to drop in that direction.
"""
def __init__(self, pack_drop_rate, unpack_drop_rate, *args,
use_system_random=False, **kwargs):
"""Initialize Packer.
Additional arguments:
pack_drop_rate, unpack_drop_rate: same as attributes.
use_system_random: whether to use random.SystemRandom as random source.
"""
super().__init__(*args, **kwargs)
self.pack_drop_rate = pack_drop_rate
self.unpack_drop_rate = unpack_drop_rate
if use_system_random:
self._random = random.SystemRandom()
else:
self._random = random
def pack(self, data):
if self._random.random() >= self.pack_drop_rate:
self.packed_cb(data)
def unpack(self, data):
if self._random.random() >= self.unpack_drop_rate:
self.unpacked_cb(data)
class ShufflePacker(BasePacker):
"""Shuffle data byte order with a PRNG.
The PRNG is seeded by the length of the packet + user selected key.
"""
def __init__(self, key, *args, **kwargs):
"""Initialize Packer.
Additinal arguments:
key: integer that determines shuffle patterns. Both sides must have the
same key.
"""
super().__init__(*args, **kwargs)
self._key = key
self._random = random.Random()
self._shuffle_sequence = {}
def pack(self, data):
shuffled = bytes(data[i] for i in self._get_shuffle_sequence(len(data))[0])
self.packed_cb(shuffled)
def unpack(self, data):
unshuffled = bytes(data[i] for i in self._get_shuffle_sequence(len(data))[1])
self.unpacked_cb(unshuffled)
def _get_shuffle_sequence(self, length):
if length not in self._shuffle_sequence:
self._random.seed(length + self._key)
s = list(range(length))
self._random.shuffle(s)
s2 = list(enumerate(s))
s2.sort(key=lambda i: i[1])
s3 = [i[0] for i in s2]
self._shuffle_sequence[length] = (s, s3)
return self._shuffle_sequence[length]
# The following 3 packers replicate the effect of OpenVPN's "XOR patch".
# https://tunnelblick.net/cOpenvpn_xorpatch.html
# To replicate the "obfuscate" option, use the following packer pipeline:
# [XorPtrPosPacker, ReverseOnePlusPacker, XorPtrPosPacker, XorMaskPacker]
class XorMaskPacker(BasePacker):
"""Byte-wise XOR the datagram with a mask (repeated when necessary)."""
def __init__(self, mask, *args, **kwargs):
"""Initialize packer.
Additional arguments:
mask: some bytes to be XORed to data.
"""
super().__init__(*args, **kwargs)
self.mask = mask
def _xor_with_mask(self, data):
return bytes(a ^ b for a, b in zip(data, itertools.cycle(self.mask)))
def pack(self, data):
self.packed_cb(self._xor_with_mask(data))
def unpack(self, data):
self.unpacked_cb(self._xor_with_mask(data))
class ReverseOnePlusPacker(BasePacker):
"""Reverse order of bytes in the datagram except the first byte."""
def _reverse_one_plus(self, data):
return data[0:1] + data[:0:-1]
def pack(self, data):
self.packed_cb(self._reverse_one_plus(data))
def unpack(self, data):
self.unpacked_cb(self._reverse_one_plus(data))
class XorPtrPosPacker(BasePacker):
"""XOR each byte with its (1-based) position."""
def _xor_ptr_pos(self, data):
return bytes(((i + 1) & 255) ^ b for i, b in enumerate(data))
def pack(self, data):
self.packed_cb(self._xor_ptr_pos(data))
def unpack(self, data):
self.unpacked_cb(self._xor_ptr_pos(data))
class PaddingPacker(BasePacker):
"""Pad the length of each datagram."""
def __init__(self, calc_padding_length, *args, **kwargs):
"""Initialize packer.
calc_padding_length: a callable with signature
calc_padding_length(min_output_length) --> pad_length
where min_output_length is the minimum possible byte length of the
padded datagram, and pad_length is the byte length of the padding.
"""
super().__init__(*args, **kwargs)
self._calc_padded_length = calc_padding_length
def _pad(self, data):
orig_length = len(data)
pad_length = self._calc_padded_length(orig_length + 2)
try:
return orig_length.to_bytes(2, 'big') + data + bytes(pad_length)
except OverflowError as e:
raise ValueError('Unpadded datagram too long') from e
def _unpad(self, data):
padded_length = len(data)
if padded_length < 2:
raise ValueError('Padded datagram too short')
orig_length = int.from_bytes(data[:2], 'big')
if orig_length + 2 > padded_length:
raise ValueError('Invalid unpadded datagram length')
return data[2:2+orig_length]
def pack(self, data):
self.packed_cb(self._pad(data))
def unpack(self, data):
self.unpacked_cb(self._unpad(data))
|
raurodse/webinterfaces | refs/heads/master | webinterface/application.py | 1 | from flask import Flask
from flask import render_template
from flask import request
import netinterfaces
import os.path
import os
app = Flask(__name__)
@app.route('/')
def principal():
list_interfaces = app.network_interface.get_real_list_interfaces()
for x in app.network_interface.get_list_interfaces():
if not x in list_interfaces:
list_interfaces.append(x)
list_interfaces.remove('lo')
total = {}
for x in list_interfaces:
total[x] = parser_config(x)
return render_template('main.html', list_interfaces=list_interfaces, serialized_interfaces=total)
@app.route('/reboot')
def reboot():
os.system(local_config.REBOOT_INTERFACES)
@app.route('/update_info', methods=['POST'])
def update_info():
interface = str(request.form['interface'])
auto_start = True if 'auto-start' in request.form else False
method = str(request.form['method'])
dns = str(request.form['dns'].strip()).split('\n')
ip = str(request.form["ip"].strip())
netmask = str(request.form["mask"].strip())
gateway = str(request.form["gateway"].strip())
action = request.form["action"]
if action == 'Save':
if interface in list(app.network_interface.interface_mapping.keys()):
app.network_interface.auto_toggle(interface, auto_start)
if method == 'dhcp':
app.network_interface.change_to_dhcp(interface)
elif method == 'static':
options = {'address': ip, 'netmask': netmask}
if gateway != "":
options['gateway'] = gateway
else:
#check if needed remove gateway
for x in app.network_interface.interface_mapping[interface]:
if hasattr(x,'check_option'):
if x.check_option('gateway',True):
x.remove_option('gateway',True)
app.network_interface.change_to_static(interface, options)
else:
if auto_start:
new_stanza = netinterfaces.StanzaAuto([interface])
app.network_interface.insert_stanza(new_stanza)
new_stanza = netinterfaces.StanzaIface([interface], "inet " + method)
if method != 'dhcp':
new_stanza.set_option(str("address " + ip), unique=True)
new_stanza.set_option(str("netmask " + netmask), unique=True)
if gateway != "":
new_stanza.set_option(str("gateway " + gateway), unique=True)
app.network_interface.insert_stanza(new_stanza)
app.network_interface.update_dns(interface, dns)
elif action == 'Delete':
app.network_interface.delete_all_interface(interface)
pass
app.network_interface.write_file(local_config.OUTPUT_INTERFACE)
return render_template('update.html',interfaces=app.network_interface.print_file())
def read_file(path_file):
if os.path.exists(path_file):
app.network_interface.load(path_file)
return True
else:
return False
def parser_config(interface):
result = {'auto': False}
if interface in app.network_interface.interface_mapping:
for stanza in app.network_interface.interface_mapping[interface]:
if isinstance(stanza, netinterfaces.StanzaAuto):
result['auto'] = True
if isinstance(stanza, netinterfaces.StanzaIface):
result['family'] = stanza.family
result['method'] = stanza.method
for option in stanza.options:
try:
key, value = option[:option.find(' ')], option[option.find(' ') + 1:]
except:
key = option
if key in result:
result[key].append(value)
else:
result[key] = [value]
return result
import local_config
app.debug = local_config.DEBUG
app.network_interface = netinterfaces.InterfacesParser(local_config.LOG_PATH)
read_file(local_config.INPUT_INTERFACES)
if __name__ == '__main__':
import local_config
app.debug = local_config.DEBUG
app.network_interface = netinterfaces.InterfacesParser(
local_config.LOG_PATH)
read_file(local_config.INPUT_INTERFACES)
app.run(host=local_config.HOST, port=local_config.PORT)
|
MiyamotoAkira/kivy | refs/heads/master | kivy/lib/osc/__init__.py | 47 | '''
OSC
===
This is an heavy modified version of PyOSC that Kivy uses internally for TUIO
providers.
'''
__version__ = "0"
__author__ = "www.ixi-software.net"
__license__ = "GNU General Public License"
__all__ = ("oscAPI", "OSC")
from .OSC import *
from .oscAPI import *
|
jswanljung/iris | refs/heads/master | lib/iris/tests/unit/util/test_broadcast_to_shape.py | 17 | # (C) British Crown Copyright 2013 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Test function :func:`iris.util.broadcast_to_shape`."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import numpy as np
import numpy.ma as ma
from iris.util import broadcast_to_shape
class Test_broadcast_to_shape(tests.IrisTest):
def test_same_shape(self):
# broadcast to current shape should result in no change
a = np.random.random([2, 3])
b = broadcast_to_shape(a, a.shape, (0, 1))
self.assertArrayEqual(b, a)
def test_added_dimensions(self):
# adding two dimensions, on at the front and one in the middle of
# the existing dimensions
a = np.random.random([2, 3])
b = broadcast_to_shape(a, (5, 2, 4, 3), (1, 3))
for i in range(5):
for j in range(4):
self.assertArrayEqual(b[i, :, j, :], a)
def test_added_dimensions_transpose(self):
# adding dimensions and having the dimensions of the input
# transposed
a = np.random.random([2, 3])
b = broadcast_to_shape(a, (5, 3, 4, 2), (3, 1))
for i in range(5):
for j in range(4):
self.assertArrayEqual(b[i, :, j, :].T, a)
def test_masked(self):
# masked arrays are also accepted
a = np.random.random([2, 3])
m = ma.array(a, mask=[[0, 1, 0], [0, 1, 1]])
b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1))
for i in range(5):
for j in range(4):
self.assertMaskedArrayEqual(b[i, :, j, :].T, m)
def test_masked_degenerate(self):
# masked arrays can have degenerate masks too
a = np.random.random([2, 3])
m = ma.array(a)
b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1))
for i in range(5):
for j in range(4):
self.assertMaskedArrayEqual(b[i, :, j, :].T, m)
if __name__ == '__main__':
tests.main()
|
sdsu-its/key-server-admin | refs/heads/master | packages/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py | 2057 | try:
# Python 3.2+
from ssl import CertificateError, match_hostname
except ImportError:
try:
# Backport of the function from a pypi module
from backports.ssl_match_hostname import CertificateError, match_hostname
except ImportError:
# Our vendored copy
from ._implementation import CertificateError, match_hostname
# Not needed, but documenting what we provide.
__all__ = ('CertificateError', 'match_hostname')
|
maas/maas | refs/heads/master | setup.py | 1 | # Copyright 2012-2021 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
from setuptools import setup
setup()
|
DougFirErickson/mavlink | refs/heads/master | pymavlink/tools/mavtomfile.py | 45 | #!/usr/bin/env python
'''
convert a MAVLink tlog file to a MATLab mfile
'''
import sys, os
import re
from pymavlink import mavutil
def process_tlog(filename):
'''convert a tlog to a .m file'''
print("Processing %s" % filename)
mlog = mavutil.mavlink_connection(filename, dialect=args.dialect, zero_time_base=True)
# first walk the entire file, grabbing all messages into a hash of lists,
#and the first message of each type into a hash
msg_types = {}
msg_lists = {}
types = args.types
if types is not None:
types = types.split(',')
# note that Octave doesn't like any extra '.', '*', '-', characters in the filename
(head, tail) = os.path.split(filename)
basename = '.'.join(tail.split('.')[:-1])
mfilename = re.sub('[\.\-\+\*]','_', basename) + '.m'
# Octave also doesn't like files that don't start with a letter
if (re.match('^[a-zA-z]', mfilename) == None):
mfilename = 'm_' + mfilename
if head is not None:
mfilename = os.path.join(head, mfilename)
print("Creating %s" % mfilename)
f = open(mfilename, "w")
type_counters = {}
while True:
m = mlog.recv_match(condition=args.condition)
if m is None:
break
if types is not None and m.get_type() not in types:
continue
if m.get_type() == 'BAD_DATA':
continue
fieldnames = m._fieldnames
mtype = m.get_type()
if mtype in ['FMT', 'PARM']:
continue
if mtype not in type_counters:
type_counters[mtype] = 0
f.write("%s.columns = {'timestamp'" % mtype)
for field in fieldnames:
val = getattr(m, field)
if not isinstance(val, str):
if type(val) is not list:
f.write(",'%s'" % field)
else:
for i in range(0, len(val)):
f.write(",'%s%d'" % (field, i + 1))
f.write("};\n")
type_counters[mtype] += 1
f.write("%s.data(%u,:) = [%f" % (mtype, type_counters[mtype], m._timestamp))
for field in m._fieldnames:
val = getattr(m, field)
if not isinstance(val, str):
if type(val) is not list:
f.write(",%.20g" % val)
else:
for i in range(0, len(val)):
f.write(",%.20g" % val[i])
f.write("];\n")
f.close()
from argparse import ArgumentParser
parser = ArgumentParser(description=__doc__)
parser.add_argument("--condition", default=None, help="select packets by condition")
parser.add_argument("-o", "--output", default=None, help="output filename")
parser.add_argument("--types", default=None, help="types of messages (comma separated)")
parser.add_argument("--dialect", default="ardupilotmega", help="MAVLink dialect")
parser.add_argument("logs", metavar="LOG", nargs="+")
args = parser.parse_args()
for filename in args.logs:
process_tlog(filename)
|
vmarkovtsev/django | refs/heads/master | tests/datatypes/models.py | 449 | """
This is a basic model to test saving and loading boolean and date-related
types, which in the past were problematic for some database backends.
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Donut(models.Model):
name = models.CharField(max_length=100)
is_frosted = models.BooleanField(default=False)
has_sprinkles = models.NullBooleanField()
baked_date = models.DateField(null=True)
baked_time = models.TimeField(null=True)
consumed_at = models.DateTimeField(null=True)
review = models.TextField()
class Meta:
ordering = ('consumed_at',)
def __str__(self):
return self.name
class RumBaba(models.Model):
baked_date = models.DateField(auto_now_add=True)
baked_timestamp = models.DateTimeField(auto_now_add=True)
|
asedunov/intellij-community | refs/heads/master | python/lib/Lib/site-packages/django/contrib/admin/templatetags/log.py | 310 | from django import template
from django.contrib.admin.models import LogEntry
register = template.Library()
class AdminLogNode(template.Node):
def __init__(self, limit, varname, user):
self.limit, self.varname, self.user = limit, varname, user
def __repr__(self):
return "<GetAdminLog Node>"
def render(self, context):
if self.user is None:
context[self.varname] = LogEntry.objects.all().select_related('content_type', 'user')[:self.limit]
else:
user_id = self.user
if not user_id.isdigit():
user_id = context[self.user].id
context[self.varname] = LogEntry.objects.filter(user__id__exact=user_id).select_related('content_type', 'user')[:self.limit]
return ''
class DoGetAdminLog:
"""
Populates a template variable with the admin log for the given criteria.
Usage::
{% get_admin_log [limit] as [varname] for_user [context_var_containing_user_obj] %}
Examples::
{% get_admin_log 10 as admin_log for_user 23 %}
{% get_admin_log 10 as admin_log for_user user %}
{% get_admin_log 10 as admin_log %}
Note that ``context_var_containing_user_obj`` can be a hard-coded integer
(user ID) or the name of a template context variable containing the user
object whose ID you want.
"""
def __init__(self, tag_name):
self.tag_name = tag_name
def __call__(self, parser, token):
tokens = token.contents.split()
if len(tokens) < 4:
raise template.TemplateSyntaxError("'%s' statements require two arguments" % self.tag_name)
if not tokens[1].isdigit():
raise template.TemplateSyntaxError("First argument in '%s' must be an integer" % self.tag_name)
if tokens[2] != 'as':
raise template.TemplateSyntaxError("Second argument in '%s' must be 'as'" % self.tag_name)
if len(tokens) > 4:
if tokens[4] != 'for_user':
raise template.TemplateSyntaxError("Fourth argument in '%s' must be 'for_user'" % self.tag_name)
return AdminLogNode(limit=tokens[1], varname=tokens[3], user=(len(tokens) > 5 and tokens[5] or None))
register.tag('get_admin_log', DoGetAdminLog('get_admin_log'))
|
commtrack/temp-aquatest | refs/heads/master | apps/smsnotifications/forms.py | 2 | from django import forms
from django.forms import ModelForm
from smsnotifications.models import SmsNotification
class SmsNotificationForm(ModelForm):
class Meta:
model = SmsNotification
exclude = ('modified','created',) |
kevinlondon/sentry | refs/heads/master | api-docs/generator.py | 7 | import os
import zlib
import json
import click
import urlparse
import logging
from datetime import datetime
from subprocess import Popen, PIPE
from contextlib import contextmanager
HERE = os.path.abspath(os.path.dirname(__file__))
SENTRY_CONFIG = os.path.join(HERE, 'sentry.conf.py')
# No sentry or django imports before that point
from sentry.utils import runner
runner.configure(config_path=SENTRY_CONFIG, skip_backend_validation=True)
from django.conf import settings
# Fair game from here
from django.core.management import call_command
from sentry.utils.apidocs import Runner, MockUtils, iter_scenarios, \
iter_endpoints, get_sections
OUTPUT_PATH = os.path.join(HERE, 'cache')
HOST = urlparse.urlparse(settings.SENTRY_URL_PREFIX).netloc
# We don't care about you, go away
_logger = logging.getLogger('sentry.events')
_logger.disabled = True
def color_for_string(s):
colors = ('red', 'green', 'yellow', 'blue', 'cyan', 'magenta')
return colors[zlib.crc32(s) % len(colors)]
def report(category, message, fg=None):
if fg is None:
fg = color_for_string(category)
click.echo('[%s] %s: %s' % (
str(datetime.utcnow()).split('.')[0],
click.style(category, fg=fg),
message
))
def launch_redis():
report('redis', 'Launching redis server')
cl = Popen(['redis-server', '-'], stdin=PIPE, stdout=open(os.devnull, 'r+'))
cl.stdin.write('''
port %(port)s
databases %(databases)d
save ""
''' % {
'port': str(settings.SENTRY_APIDOCS_REDIS_PORT),
'databases': 4,
})
cl.stdin.flush()
cl.stdin.close()
return cl
def spawn_sentry():
report('sentry', 'Launching sentry server')
cl = Popen(['sentry', '--config=' + SENTRY_CONFIG, 'runserver',
'-v', '0', '--noreload', '--nothreading',
'--no-watchers', '--traceback',
'127.0.0.1:%s' % settings.SENTRY_APIDOCS_WEB_PORT])
return cl
@contextmanager
def management_connection():
from sqlite3 import connect
cfg = settings.DATABASES['default']
con = connect(cfg['NAME'])
try:
con.cursor()
yield con
finally:
con.close()
def init_db():
drop_db()
report('db', 'Migrating database (this can time some time)')
call_command('syncdb', migrate=True, interactive=False,
traceback=True, verbosity=0)
def drop_db():
report('db', 'Dropping database')
try:
os.remove(settings.DATABASES['default']['NAME'])
except (OSError, IOError):
pass
class SentryBox(object):
def __init__(self):
self.redis = None
self.sentry = None
self.task_runner = None
def __enter__(self):
self.redis = launch_redis()
self.sentry = spawn_sentry()
init_db()
return self
def __exit__(self, exc_type, exc_value, tb):
drop_db()
if self.redis is not None:
report('redis', 'Stopping redis server')
self.redis.kill()
self.redis.wait()
if self.sentry is not None:
report('sentry', 'Shutting down sentry server')
self.sentry.kill()
self.sentry.wait()
def dump_json(path, data):
path = os.path.join(OUTPUT_PATH, path)
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
with open(path, 'w') as f:
for line in json.dumps(data, indent=2, sort_keys=True).splitlines():
f.write(line.rstrip() + '\n')
def run_scenario(vars, scenario_ident, func):
runner = Runner(scenario_ident, func, **vars)
report('scenario', 'Running scenario "%s"' % scenario_ident)
func(runner)
dump_json('scenarios/%s.json' % scenario_ident, runner.to_json())
@click.command()
@click.option('--output-path', type=click.Path())
def cli(output_path):
"""API docs dummy generator."""
global OUTPUT_PATH
if output_path is not None:
OUTPUT_PATH = os.path.abspath(output_path)
with SentryBox():
utils = MockUtils()
report('org', 'Creating user and organization')
user = utils.create_user('john@interstellar.invalid')
org = utils.create_org('The Interstellar Jurisdiction',
owner=user)
api_key = utils.create_api_key(org)
report('org', 'Creating team')
team = utils.create_team('Powerful Abolitionist',
org=org)
projects = []
for project_name in 'Pump Station', 'Prime Mover':
report('project', 'Creating project "%s"' % project_name)
project = utils.create_project(project_name, team=team, org=org)
release = utils.create_release(project=project, user=user)
report('event', 'Creating event for "%s"' % project_name)
event1 = utils.create_event(project=project, release=release,
platform='python')
event2 = utils.create_event(project=project, release=release,
platform='java')
projects.append({
'project': project,
'release': release,
'events': [event1, event2],
})
vars = {
'org': org,
'api_key': api_key,
'me': user,
'api_key': api_key,
'teams': [{
'team': team,
'projects': projects,
}],
}
for scenario_ident, func in iter_scenarios():
run_scenario(vars, scenario_ident, func)
section_mapping = {}
report('docs', 'Exporting endpoint documentation')
for endpoint in iter_endpoints():
report('endpoint', 'Exporting docs for "%s"' %
endpoint['endpoint_name'])
section_mapping.setdefault(endpoint['section'], []) \
.append((endpoint['endpoint_name'],
endpoint['title']))
dump_json('endpoints/%s.json' % endpoint['endpoint_name'], endpoint)
report('docs', 'Exporting sections')
dump_json('sections.json', {
'sections': dict((section, {
'title': title,
'entries': dict(section_mapping.get(section, ())),
}) for section, title in get_sections().iteritems())
})
if __name__ == '__main__':
cli()
|
cognitiveclass/edx-platform | refs/heads/master | common/djangoapps/geoinfo/tests/test_middleware.py | 137 | """
Tests for CountryMiddleware.
"""
from mock import patch
import pygeoip
from django.contrib.sessions.middleware import SessionMiddleware
from django.test import TestCase
from django.test.client import RequestFactory
from geoinfo.middleware import CountryMiddleware
from student.tests.factories import UserFactory, AnonymousUserFactory
class CountryMiddlewareTests(TestCase):
"""
Tests of CountryMiddleware.
"""
def setUp(self):
super(CountryMiddlewareTests, self).setUp()
self.country_middleware = CountryMiddleware()
self.session_middleware = SessionMiddleware()
self.authenticated_user = UserFactory.create()
self.anonymous_user = AnonymousUserFactory.create()
self.request_factory = RequestFactory()
self.patcher = patch.object(pygeoip.GeoIP, 'country_code_by_addr', self.mock_country_code_by_addr)
self.patcher.start()
self.addCleanup(self.patcher.stop)
def mock_country_code_by_addr(self, ip_addr):
"""
Gives us a fake set of IPs
"""
ip_dict = {
'117.79.83.1': 'CN',
'117.79.83.100': 'CN',
'4.0.0.0': 'SD',
'2001:da8:20f:1502:edcf:550b:4a9c:207d': 'CN',
}
return ip_dict.get(ip_addr, 'US')
def test_country_code_added(self):
request = self.request_factory.get(
'/somewhere',
HTTP_X_FORWARDED_FOR='117.79.83.1',
)
request.user = self.authenticated_user
self.session_middleware.process_request(request)
# No country code exists before request.
self.assertNotIn('country_code', request.session)
self.assertNotIn('ip_address', request.session)
self.country_middleware.process_request(request)
# Country code added to session.
self.assertEqual('CN', request.session.get('country_code'))
self.assertEqual('117.79.83.1', request.session.get('ip_address'))
def test_ip_address_changed(self):
request = self.request_factory.get(
'/somewhere',
HTTP_X_FORWARDED_FOR='4.0.0.0',
)
request.user = self.anonymous_user
self.session_middleware.process_request(request)
request.session['country_code'] = 'CN'
request.session['ip_address'] = '117.79.83.1'
self.country_middleware.process_request(request)
# Country code is changed.
self.assertEqual('SD', request.session.get('country_code'))
self.assertEqual('4.0.0.0', request.session.get('ip_address'))
def test_ip_address_is_not_changed(self):
request = self.request_factory.get(
'/somewhere',
HTTP_X_FORWARDED_FOR='117.79.83.1',
)
request.user = self.anonymous_user
self.session_middleware.process_request(request)
request.session['country_code'] = 'CN'
request.session['ip_address'] = '117.79.83.1'
self.country_middleware.process_request(request)
# Country code is not changed.
self.assertEqual('CN', request.session.get('country_code'))
self.assertEqual('117.79.83.1', request.session.get('ip_address'))
def test_same_country_different_ip(self):
request = self.request_factory.get(
'/somewhere',
HTTP_X_FORWARDED_FOR='117.79.83.100',
)
request.user = self.anonymous_user
self.session_middleware.process_request(request)
request.session['country_code'] = 'CN'
request.session['ip_address'] = '117.79.83.1'
self.country_middleware.process_request(request)
# Country code is not changed.
self.assertEqual('CN', request.session.get('country_code'))
self.assertEqual('117.79.83.100', request.session.get('ip_address'))
def test_ip_address_is_none(self):
# IP address is not defined in request.
request = self.request_factory.get('/somewhere')
request.user = self.anonymous_user
# Run process_request to set up the session in the request
# to be able to override it.
self.session_middleware.process_request(request)
request.session['country_code'] = 'CN'
request.session['ip_address'] = '117.79.83.1'
self.country_middleware.process_request(request)
# No country code exists after request processing.
self.assertNotIn('country_code', request.session)
self.assertNotIn('ip_address', request.session)
def test_ip_address_is_ipv6(self):
request = self.request_factory.get(
'/somewhere',
HTTP_X_FORWARDED_FOR='2001:da8:20f:1502:edcf:550b:4a9c:207d'
)
request.user = self.authenticated_user
self.session_middleware.process_request(request)
# No country code exists before request.
self.assertNotIn('country_code', request.session)
self.assertNotIn('ip_address', request.session)
self.country_middleware.process_request(request)
# Country code added to session.
self.assertEqual('CN', request.session.get('country_code'))
self.assertEqual(
'2001:da8:20f:1502:edcf:550b:4a9c:207d', request.session.get('ip_address'))
|
trishnaguha/ansible | refs/heads/devel | lib/ansible/modules/network/f5/bigip_device_ntp.py | 14 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_device_ntp
short_description: Manage NTP servers on a BIG-IP
description:
- Manage NTP servers on a BIG-IP.
version_added: 2.2
options:
ntp_servers:
description:
- A list of NTP servers to set on the device. At least one of C(ntp_servers)
or C(timezone) is required.
state:
description:
- The state of the NTP servers on the system. When C(present), guarantees
that the NTP servers are set on the system. When C(absent), removes the
specified NTP servers from the device configuration.
default: present
choices:
- absent
- present
timezone:
description:
- The timezone to set for NTP lookups. At least one of C(ntp_servers) or
C(timezone) is required.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Set NTP server
bigip_device_ntp:
ntp_servers:
- 192.0.2.23
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Set timezone
bigip_device_ntp:
timezone: America/Los_Angeles
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
ntp_servers:
description: The NTP servers that were set on the device
returned: changed
type: list
sample: ["192.0.2.23", "192.0.2.42"]
timezone:
description: The timezone that was set on the device
returned: changed
type: str
sample: true
'''
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.common import is_empty_list
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.common import is_empty_list
class Parameters(AnsibleF5Parameters):
api_map = {
'servers': 'ntp_servers',
}
api_attributes = [
'servers', 'timezone',
]
updatables = [
'ntp_servers', 'timezone',
]
returnables = [
'ntp_servers', 'timezone',
]
absentables = [
'ntp_servers',
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def ntp_servers(self):
ntp_servers = self._values['ntp_servers']
if ntp_servers is None:
return None
if is_empty_list(ntp_servers):
return []
return ntp_servers
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
change = getattr(self, returnable)
if isinstance(change, dict):
result.update(change)
else:
result[returnable] = change
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def ntp_servers(self):
state = self.want.state
if self.want.ntp_servers is None:
return None
if state == 'absent':
if self.have.ntp_servers is None and self.want.ntp_servers:
return None
if set(self.want.ntp_servers) == set(self.have.ntp_servers):
return []
if set(self.want.ntp_servers) != set(self.have.ntp_servers):
return list(set(self.want.ntp_servers).difference(self.have.ntp_servers))
if not self.want.ntp_servers:
if self.have.ntp_servers is None:
return None
if self.have.ntp_servers is not None:
return self.want.ntp_servers
if self.have.ntp_servers is None:
return self.want.ntp_servers
if set(self.want.ntp_servers) != set(self.have.ntp_servers):
return self.want.ntp_servers
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.pop('module', None)
self.client = kwargs.pop('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _absent_changed_options(self):
diff = Difference(self.want, self.have)
absentables = Parameters.absentables
changed = dict()
for k in absentables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.update()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def should_absent(self):
result = self._absent_changed_options()
if result:
return True
return False
def absent(self):
self.have = self.read_current_from_device()
if not self.should_absent():
return False
if self.module.check_mode:
return True
self.absent_on_device()
return True
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/ntp/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/ntp/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
def absent_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/ntp/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
ntp_servers=dict(
type='list',
),
timezone=dict(),
state=dict(
default='present',
choices=['present', 'absent']
),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_one_of = [
['ntp_servers', 'timezone']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_one_of=spec.required_one_of
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
|
osh/gnuradio | refs/heads/master | gr-blocks/python/blocks/qa_type_conversions.py | 33 | #!/usr/bin/env python
#
# Copyright 2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
from math import sqrt, atan2
class test_type_conversions(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_char_to_float_identity(self):
src_data = (1, 2, 3, 4, 5)
expected_data = (1.0, 2.0, 3.0, 4.0, 5.0)
src = blocks.vector_source_b(src_data)
op = blocks.char_to_float()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_char_to_float_scale(self):
src_data = (1, 2, 3, 4, 5)
expected_data = (0.5, 1.0, 1.5, 2.0, 2.5)
src = blocks.vector_source_b(src_data)
op = blocks.char_to_float(scale=2.0)
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_char_to_short(self):
src_data = (1, 2, 3, 4, 5)
expected_data = (256, 512, 768, 1024, 1280)
src = blocks.vector_source_b(src_data)
op = blocks.char_to_short()
dst = blocks.vector_sink_s()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_complex_to_interleaved_short(self):
src_data = (1+2j, 3+4j, 5+6j, 7+8j, 9+10j)
expected_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
src = blocks.vector_source_c(src_data)
op = blocks.complex_to_interleaved_short()
dst = blocks.vector_sink_s()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_complex_to_float_1(self):
src_data = (1+2j, 3+4j, 5+6j, 7+8j, 9+10j)
expected_data = (1.0, 3.0, 5.0, 7.0, 9.0)
src = blocks.vector_source_c(src_data)
op = blocks.complex_to_float()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_complex_to_float_2(self):
src_data = (1+2j, 3+4j, 5+6j, 7+8j, 9+10j)
expected_data1 = (1.0, 3.0, 5.0, 7.0, 9.0)
expected_data2 = (2.0, 4.0, 6.0, 8.0, 10.0)
src = blocks.vector_source_c(src_data)
op = blocks.complex_to_float()
dst1 = blocks.vector_sink_f()
dst2 = blocks.vector_sink_f()
self.tb.connect(src, op)
self.tb.connect((op, 0), dst1)
self.tb.connect((op, 1), dst2)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data1, dst1.data())
self.assertFloatTuplesAlmostEqual(expected_data2, dst2.data())
def test_complex_to_real(self):
src_data = (1+2j, 3+4j, 5+6j, 7+8j, 9+10j)
expected_data = (1.0, 3.0, 5.0, 7.0, 9.0)
src = blocks.vector_source_c(src_data)
op = blocks.complex_to_real()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_complex_to_imag(self):
src_data = (1+2j, 3+4j, 5+6j, 7+8j, 9+10j)
expected_data = (2.0, 4.0, 6.0, 8.0, 10.0)
src = blocks.vector_source_c(src_data)
op = blocks.complex_to_imag()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_complex_to_mag(self):
src_data = (1+2j, 3-4j, 5+6j, 7-8j, -9+10j)
expected_data = (sqrt(5), sqrt(25), sqrt(61), sqrt(113), sqrt(181))
src = blocks.vector_source_c(src_data)
op = blocks.complex_to_mag()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data(), 5)
def test_complex_to_mag_squared(self):
src_data = (1+2j, 3-4j, 5+6j, 7-8j, -9+10j)
expected_data = (5.0, 25.0, 61.0, 113.0, 181.0)
src = blocks.vector_source_c(src_data)
op = blocks.complex_to_mag_squared()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_complex_to_arg(self):
src_data = (1+2j, 3-4j, 5+6j, 7-8j, -9+10j)
expected_data = (atan2(2, 1), atan2(-4,3), atan2(6, 5), atan2(-8, 7), atan2(10,-9))
src = blocks.vector_source_c(src_data)
op = blocks.complex_to_arg()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data(), 2)
def test_float_to_char_identity(self):
src_data = (1.0, 2.0, 3.0, 4.0, 5.0)
expected_data = (1, 2, 3, 4, 5)
src = blocks.vector_source_f(src_data)
op = blocks.float_to_char()
dst = blocks.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_float_to_char_scale(self):
src_data = (1.0, 2.0, 3.0, 4.0, 5.0)
expected_data = (5, 10, 15, 20, 25)
src = blocks.vector_source_f(src_data)
op = blocks.float_to_char(1, 5)
dst = blocks.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_float_to_complex_1(self):
src_data = (1.0, 3.0, 5.0, 7.0, 9.0)
expected_data = (1+0j, 3+0j, 5+0j, 7+0j, 9+0j)
src = blocks.vector_source_f(src_data)
op = blocks.float_to_complex()
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_float_to_complex_2(self):
src1_data = (1.0, 3.0, 5.0, 7.0, 9.0)
src2_data = (2.0, 4.0, 6.0, 8.0, 10.0)
expected_data = (1+2j, 3+4j, 5+6j, 7+8j, 9+10j)
src1 = blocks.vector_source_f(src1_data)
src2 = blocks.vector_source_f(src2_data)
op = blocks.float_to_complex()
dst = blocks.vector_sink_c()
self.tb.connect(src1, (op, 0))
self.tb.connect(src2, (op, 1))
self.tb.connect(op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_float_to_int_identity(self):
src_data = (1.0, 2.0, 3.0, 4.0, 5.0)
expected_data = (1, 2, 3, 4, 5)
src = blocks.vector_source_f(src_data)
op = blocks.float_to_int()
dst = blocks.vector_sink_i()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_float_to_int_scale(self):
src_data = (1.0, 2.0, 3.0, 4.0, 5.0)
expected_data = (5, 10, 15, 20, 25)
src = blocks.vector_source_f(src_data)
op = blocks.float_to_int(1, 5)
dst = blocks.vector_sink_i()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_float_to_short_identity(self):
src_data = (1.0, 2.0, 3.0, 4.0, 5.0)
expected_data = (1, 2, 3, 4, 5)
src = blocks.vector_source_f(src_data)
op = blocks.float_to_short()
dst = blocks.vector_sink_s()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_float_to_short_scale(self):
src_data = (1.0, 2.0, 3.0, 4.0, 5.0)
expected_data = (5, 10, 15, 20, 25)
src = blocks.vector_source_f(src_data)
op = blocks.float_to_short(1, 5)
dst = blocks.vector_sink_s()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_float_to_uchar(self):
src_data = (1.0, -2.0, 3.0, -4.0, 256.0)
expected_data = (1, 0, 3, 0, 255)
src = blocks.vector_source_f(src_data)
op = blocks.float_to_uchar()
dst = blocks.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_int_to_float_identity(self):
src_data = (1, 2, 3, 4, 5)
expected_data = (1.0, 2.0, 3.0, 4.0, 5.0)
src = blocks.vector_source_i(src_data)
op = blocks.int_to_float()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_int_to_float_scale(self):
src_data = (1, 2, 3, 4, 5)
expected_data = (0.2, 0.4, 0.6, 0.8, 1.0)
src = blocks.vector_source_i(src_data)
op = blocks.int_to_float(1, 5)
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertFloatTuplesAlmostEqual(expected_data, dst.data())
def test_interleaved_short_to_complex(self):
src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
expected_data = (1+2j, 3+4j, 5+6j, 7+8j, 9+10j)
src = blocks.vector_source_s(src_data)
op = blocks.interleaved_short_to_complex()
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_short_to_char(self):
src_data = (256, 512, 768, 1024, 1280)
expected_data = (1, 2, 3, 4, 5)
src = blocks.vector_source_s(src_data)
op = blocks.short_to_char()
dst = blocks.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_short_to_float_identity(self):
src_data = (1, 2, 3, 4, 5)
expected_data = (1.0, 2.0, 3.0, 4.0, 5.0)
src = blocks.vector_source_s(src_data)
op = blocks.short_to_float()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_short_to_float_scale(self):
src_data = (5, 10, 15, 20, 25)
expected_data = (1.0, 2.0, 3.0, 4.0, 5.0)
src = blocks.vector_source_s(src_data)
op = blocks.short_to_float(1, 5)
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
def test_uchar_to_float(self):
src_data = (1, 2, 3, 4, 5)
expected_data = (1.0, 2.0, 3.0, 4.0, 5.0)
src = blocks.vector_source_b(src_data)
op = blocks.uchar_to_float()
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
self.assertEqual(expected_data, dst.data())
if __name__ == '__main__':
gr_unittest.run(test_type_conversions, "test_type_conversions.xml")
|
mstana/tuned | refs/heads/master | tuned/plugins/plugin_mounts.py | 4 | import base
from decorators import *
from subprocess import Popen,PIPE
import tuned.logs
import tuned.utils.commands
import glob
log = tuned.logs.get()
class MountsPlugin(base.Plugin):
"""
Plugin for tuning options of mount-points.
"""
@classmethod
def _generate_mountpoint_topology(cls):
"""
Gets the information about disks, partitions and mountpoints. Stores information about used filesystem and
creates a list of all underlying devices (in case of LVM) for each mountpoint.
"""
mountpoint_topology = {}
current_disk = None
stdout, stderr = Popen(["/usr/bin/lsblk", "-rno", "TYPE,RM,KNAME,FSTYPE,MOUNTPOINT"], stdout=PIPE, stderr=PIPE, close_fds=True).communicate()
for columns in map(lambda line: line.split(), stdout.splitlines()):
device_type, device_removable, device_name = columns[:3]
filesystem = columns[3] if len(columns) > 3 else None
mountpoint = columns[4] if len(columns) > 4 else None
if device_type == "disk":
current_disk = device_name
continue
# skip removable, skip nonpartitions
if device_removable == "1" or device_type not in ["part", "lvm"]:
continue
if mountpoint is None or mountpoint == "[SWAP]":
continue
mountpoint_topology.setdefault(mountpoint, {"disks": set(), "device_name": device_name, "filesystem": filesystem})
mountpoint_topology[mountpoint]["disks"].add(current_disk)
cls._mountpoint_topology = mountpoint_topology
def _init_devices(self):
self._generate_mountpoint_topology()
self._devices = set(self._mountpoint_topology.keys())
self._assigned_devices = set()
self._free_devices = self._devices.copy()
def _get_config_options(self):
return {
"disable_barriers": None,
}
def _instance_init(self, instance):
instance._has_dynamic_tuning = False
instance._has_static_tuning = True
def _instance_cleanup(self, instance):
pass
def _get_device_cache_type(self, device):
"""
Get device cache type. This will work only for devices on SCSI kernel subsystem.
"""
source_filenames = glob.glob("/sys/block/%s/device/scsi_disk/*/cache_type" % device)
for source_filename in source_filenames:
return tuned.utils.commands.read_file(source_filename).strip()
return None
def _mountpoint_has_writeback_cache(self, mountpoint):
"""
Checks if the device has 'write back' cache. If the cache type cannot be determined, asume some other cache.
"""
for device in self._mountpoint_topology[mountpoint]["disks"]:
if self._get_device_cache_type(device) == "write back":
return True
return False
def _mountpoint_has_barriers(self, mountpoint):
"""
Checks if a given mountpoint is mounted with barriers enabled or disabled.
"""
with open("/proc/mounts") as mounts_file:
for line in mounts_file:
# device mountpoint filesystem options dump check
columns = line.split()
if columns[0][0] != "/":
continue
if columns[1] == mountpoint:
option_list = columns[3]
break
else:
return None
options = option_list.split(",")
for option in options:
(name, sep, value) = option.partition("=")
# nobarrier barrier=0
if name == "nobarrier" or (name == "barrier" and value == "0"):
return False
# barrier barrier=1
elif name == "barrier":
return True
else:
# default
return True
def _remount_partition(self, partition, options):
"""
Remounts partition.
"""
remount_command = ["/usr/bin/mount", partition, "-o", "remount,%s" % options]
tuned.utils.commands.execute(remount_command)
@command_custom("disable_barriers", per_device=True)
def _disable_barriers(self, start, value, mountpoint):
storage_key = self._storage_key("disable_barriers", mountpoint)
force = str(value).lower() == "force"
value = force or self._option_bool(value)
if start:
if not value:
return
reject_reason = None
if not self._mountpoint_topology[mountpoint]["filesystem"].startswith("ext"):
reject_reason = "filesystem not supported"
elif not force and self._mountpoint_has_writeback_cache(mountpoint):
reject_reason = "device uses write back cache"
else:
original_value = self._mountpoint_has_barriers(mountpoint)
if original_value is None:
reject_reason = "unknown current setting"
elif original_value == False:
reject_reason = "barriers already disabled"
if reject_reason is not None:
log.info("not disabling barriers on '%s' (%s)" % (mountpoint, reject_reason))
return
self._storage.set(storage_key, original_value)
log.info("disabling barriers on '%s'" % mountpoint)
self._remount_partition(mountpoint, "barrier=0")
else:
original_value = self._storage.get(storage_key)
if original_value is None:
return
log.info("enabling barriers on '%s'" % mountpoint)
self._remount_partition(mountpoint, "barrier=1")
self._storage.unset(storage_key)
|
Belxjander/Kirito | refs/heads/master | Python-3.5.0-main/Lib/test/test_httplib.py | 7 | import errno
from http import client
import io
import itertools
import os
import array
import socket
import unittest
TestCase = unittest.TestCase
from test import support
here = os.path.dirname(__file__)
# Self-signed cert file for 'localhost'
CERT_localhost = os.path.join(here, 'keycert.pem')
# Self-signed cert file for 'fakehostname'
CERT_fakehostname = os.path.join(here, 'keycert2.pem')
# Self-signed cert file for self-signed.pythontest.net
CERT_selfsigned_pythontestdotnet = os.path.join(here, 'selfsigned_pythontestdotnet.pem')
# constants for testing chunked encoding
chunked_start = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello worl\r\n'
'3\r\n'
'd! \r\n'
'8\r\n'
'and now \r\n'
'22\r\n'
'for something completely different\r\n'
)
chunked_expected = b'hello world! and now for something completely different'
chunk_extension = ";foo=bar"
last_chunk = "0\r\n"
last_chunk_extended = "0" + chunk_extension + "\r\n"
trailers = "X-Dummy: foo\r\nX-Dumm2: bar\r\n"
chunked_end = "\r\n"
HOST = support.HOST
class FakeSocket:
def __init__(self, text, fileclass=io.BytesIO, host=None, port=None):
if isinstance(text, str):
text = text.encode("ascii")
self.text = text
self.fileclass = fileclass
self.data = b''
self.sendall_calls = 0
self.file_closed = False
self.host = host
self.port = port
def sendall(self, data):
self.sendall_calls += 1
self.data += data
def makefile(self, mode, bufsize=None):
if mode != 'r' and mode != 'rb':
raise client.UnimplementedFileMode()
# keep the file around so we can check how much was read from it
self.file = self.fileclass(self.text)
self.file.close = self.file_close #nerf close ()
return self.file
def file_close(self):
self.file_closed = True
def close(self):
pass
def setsockopt(self, level, optname, value):
pass
class EPipeSocket(FakeSocket):
def __init__(self, text, pipe_trigger):
# When sendall() is called with pipe_trigger, raise EPIPE.
FakeSocket.__init__(self, text)
self.pipe_trigger = pipe_trigger
def sendall(self, data):
if self.pipe_trigger in data:
raise OSError(errno.EPIPE, "gotcha")
self.data += data
def close(self):
pass
class NoEOFBytesIO(io.BytesIO):
"""Like BytesIO, but raises AssertionError on EOF.
This is used below to test that http.client doesn't try to read
more from the underlying file than it should.
"""
def read(self, n=-1):
data = io.BytesIO.read(self, n)
if data == b'':
raise AssertionError('caller tried to read past EOF')
return data
def readline(self, length=None):
data = io.BytesIO.readline(self, length)
if data == b'':
raise AssertionError('caller tried to read past EOF')
return data
class FakeSocketHTTPConnection(client.HTTPConnection):
"""HTTPConnection subclass using FakeSocket; counts connect() calls"""
def __init__(self, *args):
self.connections = 0
super().__init__('example.com')
self.fake_socket_args = args
self._create_connection = self.create_connection
def connect(self):
"""Count the number of times connect() is invoked"""
self.connections += 1
return super().connect()
def create_connection(self, *pos, **kw):
return FakeSocket(*self.fake_socket_args)
class HeaderTests(TestCase):
def test_auto_headers(self):
# Some headers are added automatically, but should not be added by
# .request() if they are explicitly set.
class HeaderCountingBuffer(list):
def __init__(self):
self.count = {}
def append(self, item):
kv = item.split(b':')
if len(kv) > 1:
# item is a 'Key: Value' header string
lcKey = kv[0].decode('ascii').lower()
self.count.setdefault(lcKey, 0)
self.count[lcKey] += 1
list.append(self, item)
for explicit_header in True, False:
for header in 'Content-length', 'Host', 'Accept-encoding':
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket('blahblahblah')
conn._buffer = HeaderCountingBuffer()
body = 'spamspamspam'
headers = {}
if explicit_header:
headers[header] = str(len(body))
conn.request('POST', '/', body, headers)
self.assertEqual(conn._buffer.count[header.lower()], 1)
def test_content_length_0(self):
class ContentLengthChecker(list):
def __init__(self):
list.__init__(self)
self.content_length = None
def append(self, item):
kv = item.split(b':', 1)
if len(kv) > 1 and kv[0].lower() == b'content-length':
self.content_length = kv[1].strip()
list.append(self, item)
# Here, we're testing that methods expecting a body get a
# content-length set to zero if the body is empty (either None or '')
bodies = (None, '')
methods_with_body = ('PUT', 'POST', 'PATCH')
for method, body in itertools.product(methods_with_body, bodies):
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', body)
self.assertEqual(
conn._buffer.content_length, b'0',
'Header Content-Length incorrect on {}'.format(method)
)
# For these methods, we make sure that content-length is not set when
# the body is None because it might cause unexpected behaviour on the
# server.
methods_without_body = (
'GET', 'CONNECT', 'DELETE', 'HEAD', 'OPTIONS', 'TRACE',
)
for method in methods_without_body:
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', None)
self.assertEqual(
conn._buffer.content_length, None,
'Header Content-Length set for empty body on {}'.format(method)
)
# If the body is set to '', that's considered to be "present but
# empty" rather than "missing", so content length would be set, even
# for methods that don't expect a body.
for method in methods_without_body:
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', '')
self.assertEqual(
conn._buffer.content_length, b'0',
'Header Content-Length incorrect on {}'.format(method)
)
# If the body is set, make sure Content-Length is set.
for method in itertools.chain(methods_without_body, methods_with_body):
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', ' ')
self.assertEqual(
conn._buffer.content_length, b'1',
'Header Content-Length incorrect on {}'.format(method)
)
def test_putheader(self):
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn.putrequest('GET','/')
conn.putheader('Content-length', 42)
self.assertIn(b'Content-length: 42', conn._buffer)
conn.putheader('Foo', ' bar ')
self.assertIn(b'Foo: bar ', conn._buffer)
conn.putheader('Bar', '\tbaz\t')
self.assertIn(b'Bar: \tbaz\t', conn._buffer)
conn.putheader('Authorization', 'Bearer mytoken')
self.assertIn(b'Authorization: Bearer mytoken', conn._buffer)
conn.putheader('IterHeader', 'IterA', 'IterB')
self.assertIn(b'IterHeader: IterA\r\n\tIterB', conn._buffer)
conn.putheader('LatinHeader', b'\xFF')
self.assertIn(b'LatinHeader: \xFF', conn._buffer)
conn.putheader('Utf8Header', b'\xc3\x80')
self.assertIn(b'Utf8Header: \xc3\x80', conn._buffer)
conn.putheader('C1-Control', b'next\x85line')
self.assertIn(b'C1-Control: next\x85line', conn._buffer)
conn.putheader('Embedded-Fold-Space', 'is\r\n allowed')
self.assertIn(b'Embedded-Fold-Space: is\r\n allowed', conn._buffer)
conn.putheader('Embedded-Fold-Tab', 'is\r\n\tallowed')
self.assertIn(b'Embedded-Fold-Tab: is\r\n\tallowed', conn._buffer)
conn.putheader('Key Space', 'value')
self.assertIn(b'Key Space: value', conn._buffer)
conn.putheader('KeySpace ', 'value')
self.assertIn(b'KeySpace : value', conn._buffer)
conn.putheader(b'Nonbreak\xa0Space', 'value')
self.assertIn(b'Nonbreak\xa0Space: value', conn._buffer)
conn.putheader(b'\xa0NonbreakSpace', 'value')
self.assertIn(b'\xa0NonbreakSpace: value', conn._buffer)
def test_ipv6host_header(self):
# Default host header on IPv6 transaction should wrapped by [] if
# its actual IPv6 address
expected = b'GET /foo HTTP/1.1\r\nHost: [2001::]:81\r\n' \
b'Accept-Encoding: identity\r\n\r\n'
conn = client.HTTPConnection('[2001::]:81')
sock = FakeSocket('')
conn.sock = sock
conn.request('GET', '/foo')
self.assertTrue(sock.data.startswith(expected))
expected = b'GET /foo HTTP/1.1\r\nHost: [2001:102A::]\r\n' \
b'Accept-Encoding: identity\r\n\r\n'
conn = client.HTTPConnection('[2001:102A::]')
sock = FakeSocket('')
conn.sock = sock
conn.request('GET', '/foo')
self.assertTrue(sock.data.startswith(expected))
def test_malformed_headers_coped_with(self):
# Issue 19996
body = "HTTP/1.1 200 OK\r\nFirst: val\r\n: nval\r\nSecond: val\r\n\r\n"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.getheader('First'), 'val')
self.assertEqual(resp.getheader('Second'), 'val')
def test_invalid_headers(self):
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket('')
conn.putrequest('GET', '/')
# http://tools.ietf.org/html/rfc7230#section-3.2.4, whitespace is no
# longer allowed in header names
cases = (
(b'Invalid\r\nName', b'ValidValue'),
(b'Invalid\rName', b'ValidValue'),
(b'Invalid\nName', b'ValidValue'),
(b'\r\nInvalidName', b'ValidValue'),
(b'\rInvalidName', b'ValidValue'),
(b'\nInvalidName', b'ValidValue'),
(b' InvalidName', b'ValidValue'),
(b'\tInvalidName', b'ValidValue'),
(b'Invalid:Name', b'ValidValue'),
(b':InvalidName', b'ValidValue'),
(b'ValidName', b'Invalid\r\nValue'),
(b'ValidName', b'Invalid\rValue'),
(b'ValidName', b'Invalid\nValue'),
(b'ValidName', b'InvalidValue\r\n'),
(b'ValidName', b'InvalidValue\r'),
(b'ValidName', b'InvalidValue\n'),
)
for name, value in cases:
with self.subTest((name, value)):
with self.assertRaisesRegex(ValueError, 'Invalid header'):
conn.putheader(name, value)
class BasicTest(TestCase):
def test_status_lines(self):
# Test HTTP status lines
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(0), b'') # Issue #20007
self.assertFalse(resp.isclosed())
self.assertFalse(resp.closed)
self.assertEqual(resp.read(), b"Text")
self.assertTrue(resp.isclosed())
self.assertFalse(resp.closed)
resp.close()
self.assertTrue(resp.closed)
body = "HTTP/1.1 400.100 Not Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
self.assertRaises(client.BadStatusLine, resp.begin)
def test_bad_status_repr(self):
exc = client.BadStatusLine('')
self.assertEqual(repr(exc), '''BadStatusLine("\'\'",)''')
def test_partial_reads(self):
# if we have a length, the system knows when to close itself
# same behaviour than when we read the whole thing with read()
body = "HTTP/1.1 200 Ok\r\nContent-Length: 4\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(2), b'Te')
self.assertFalse(resp.isclosed())
self.assertEqual(resp.read(2), b'xt')
self.assertTrue(resp.isclosed())
self.assertFalse(resp.closed)
resp.close()
self.assertTrue(resp.closed)
def test_partial_readintos(self):
# if we have a length, the system knows when to close itself
# same behaviour than when we read the whole thing with read()
body = "HTTP/1.1 200 Ok\r\nContent-Length: 4\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
b = bytearray(2)
n = resp.readinto(b)
self.assertEqual(n, 2)
self.assertEqual(bytes(b), b'Te')
self.assertFalse(resp.isclosed())
n = resp.readinto(b)
self.assertEqual(n, 2)
self.assertEqual(bytes(b), b'xt')
self.assertTrue(resp.isclosed())
self.assertFalse(resp.closed)
resp.close()
self.assertTrue(resp.closed)
def test_partial_reads_no_content_length(self):
# when no length is present, the socket should be gracefully closed when
# all data was read
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(2), b'Te')
self.assertFalse(resp.isclosed())
self.assertEqual(resp.read(2), b'xt')
self.assertEqual(resp.read(1), b'')
self.assertTrue(resp.isclosed())
self.assertFalse(resp.closed)
resp.close()
self.assertTrue(resp.closed)
def test_partial_readintos_no_content_length(self):
# when no length is present, the socket should be gracefully closed when
# all data was read
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
b = bytearray(2)
n = resp.readinto(b)
self.assertEqual(n, 2)
self.assertEqual(bytes(b), b'Te')
self.assertFalse(resp.isclosed())
n = resp.readinto(b)
self.assertEqual(n, 2)
self.assertEqual(bytes(b), b'xt')
n = resp.readinto(b)
self.assertEqual(n, 0)
self.assertTrue(resp.isclosed())
def test_partial_reads_incomplete_body(self):
# if the server shuts down the connection before the whole
# content-length is delivered, the socket is gracefully closed
body = "HTTP/1.1 200 Ok\r\nContent-Length: 10\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(2), b'Te')
self.assertFalse(resp.isclosed())
self.assertEqual(resp.read(2), b'xt')
self.assertEqual(resp.read(1), b'')
self.assertTrue(resp.isclosed())
def test_partial_readintos_incomplete_body(self):
# if the server shuts down the connection before the whole
# content-length is delivered, the socket is gracefully closed
body = "HTTP/1.1 200 Ok\r\nContent-Length: 10\r\n\r\nText"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
b = bytearray(2)
n = resp.readinto(b)
self.assertEqual(n, 2)
self.assertEqual(bytes(b), b'Te')
self.assertFalse(resp.isclosed())
n = resp.readinto(b)
self.assertEqual(n, 2)
self.assertEqual(bytes(b), b'xt')
n = resp.readinto(b)
self.assertEqual(n, 0)
self.assertTrue(resp.isclosed())
self.assertFalse(resp.closed)
resp.close()
self.assertTrue(resp.closed)
def test_host_port(self):
# Check invalid host_port
for hp in ("www.python.org:abc", "user:password@www.python.org"):
self.assertRaises(client.InvalidURL, client.HTTPConnection, hp)
for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000",
"fe80::207:e9ff:fe9b", 8000),
("www.python.org:80", "www.python.org", 80),
("www.python.org:", "www.python.org", 80),
("www.python.org", "www.python.org", 80),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 80),
("[fe80::207:e9ff:fe9b]:", "fe80::207:e9ff:fe9b", 80)):
c = client.HTTPConnection(hp)
self.assertEqual(h, c.host)
self.assertEqual(p, c.port)
def test_response_headers(self):
# test response with multiple message headers with the same field name.
text = ('HTTP/1.1 200 OK\r\n'
'Set-Cookie: Customer="WILE_E_COYOTE"; '
'Version="1"; Path="/acme"\r\n'
'Set-Cookie: Part_Number="Rocket_Launcher_0001"; Version="1";'
' Path="/acme"\r\n'
'\r\n'
'No body\r\n')
hdr = ('Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"'
', '
'Part_Number="Rocket_Launcher_0001"; Version="1"; Path="/acme"')
s = FakeSocket(text)
r = client.HTTPResponse(s)
r.begin()
cookies = r.getheader("Set-Cookie")
self.assertEqual(cookies, hdr)
def test_read_head(self):
# Test that the library doesn't attempt to read any data
# from a HEAD request. (Tickles SF bug #622042.)
sock = FakeSocket(
'HTTP/1.1 200 OK\r\n'
'Content-Length: 14432\r\n'
'\r\n',
NoEOFBytesIO)
resp = client.HTTPResponse(sock, method="HEAD")
resp.begin()
if resp.read():
self.fail("Did not expect response from HEAD request")
def test_readinto_head(self):
# Test that the library doesn't attempt to read any data
# from a HEAD request. (Tickles SF bug #622042.)
sock = FakeSocket(
'HTTP/1.1 200 OK\r\n'
'Content-Length: 14432\r\n'
'\r\n',
NoEOFBytesIO)
resp = client.HTTPResponse(sock, method="HEAD")
resp.begin()
b = bytearray(5)
if resp.readinto(b) != 0:
self.fail("Did not expect response from HEAD request")
self.assertEqual(bytes(b), b'\x00'*5)
def test_too_many_headers(self):
headers = '\r\n'.join('Header%d: foo' % i
for i in range(client._MAXHEADERS + 1)) + '\r\n'
text = ('HTTP/1.1 200 OK\r\n' + headers)
s = FakeSocket(text)
r = client.HTTPResponse(s)
self.assertRaisesRegex(client.HTTPException,
r"got more than \d+ headers", r.begin)
def test_send_file(self):
expected = (b'GET /foo HTTP/1.1\r\nHost: example.com\r\n'
b'Accept-Encoding: identity\r\nContent-Length:')
with open(__file__, 'rb') as body:
conn = client.HTTPConnection('example.com')
sock = FakeSocket(body)
conn.sock = sock
conn.request('GET', '/foo', body)
self.assertTrue(sock.data.startswith(expected), '%r != %r' %
(sock.data[:len(expected)], expected))
def test_send(self):
expected = b'this is a test this is only a test'
conn = client.HTTPConnection('example.com')
sock = FakeSocket(None)
conn.sock = sock
conn.send(expected)
self.assertEqual(expected, sock.data)
sock.data = b''
conn.send(array.array('b', expected))
self.assertEqual(expected, sock.data)
sock.data = b''
conn.send(io.BytesIO(expected))
self.assertEqual(expected, sock.data)
def test_send_updating_file(self):
def data():
yield 'data'
yield None
yield 'data_two'
class UpdatingFile():
mode = 'r'
d = data()
def read(self, blocksize=-1):
return self.d.__next__()
expected = b'data'
conn = client.HTTPConnection('example.com')
sock = FakeSocket("")
conn.sock = sock
conn.send(UpdatingFile())
self.assertEqual(sock.data, expected)
def test_send_iter(self):
expected = b'GET /foo HTTP/1.1\r\nHost: example.com\r\n' \
b'Accept-Encoding: identity\r\nContent-Length: 11\r\n' \
b'\r\nonetwothree'
def body():
yield b"one"
yield b"two"
yield b"three"
conn = client.HTTPConnection('example.com')
sock = FakeSocket("")
conn.sock = sock
conn.request('GET', '/foo', body(), {'Content-Length': '11'})
self.assertEqual(sock.data, expected)
def test_send_type_error(self):
# See: Issue #12676
conn = client.HTTPConnection('example.com')
conn.sock = FakeSocket('')
with self.assertRaises(TypeError):
conn.request('POST', 'test', conn)
def test_chunked(self):
expected = chunked_expected
sock = FakeSocket(chunked_start + last_chunk + chunked_end)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), expected)
resp.close()
# Various read sizes
for n in range(1, 12):
sock = FakeSocket(chunked_start + last_chunk + chunked_end)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(n) + resp.read(n) + resp.read(), expected)
resp.close()
for x in ('', 'foo\r\n'):
sock = FakeSocket(chunked_start + x)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
try:
resp.read()
except client.IncompleteRead as i:
self.assertEqual(i.partial, expected)
expected_message = 'IncompleteRead(%d bytes read)' % len(expected)
self.assertEqual(repr(i), expected_message)
self.assertEqual(str(i), expected_message)
else:
self.fail('IncompleteRead expected')
finally:
resp.close()
def test_readinto_chunked(self):
expected = chunked_expected
nexpected = len(expected)
b = bytearray(128)
sock = FakeSocket(chunked_start + last_chunk + chunked_end)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
n = resp.readinto(b)
self.assertEqual(b[:nexpected], expected)
self.assertEqual(n, nexpected)
resp.close()
# Various read sizes
for n in range(1, 12):
sock = FakeSocket(chunked_start + last_chunk + chunked_end)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
m = memoryview(b)
i = resp.readinto(m[0:n])
i += resp.readinto(m[i:n + i])
i += resp.readinto(m[i:])
self.assertEqual(b[:nexpected], expected)
self.assertEqual(i, nexpected)
resp.close()
for x in ('', 'foo\r\n'):
sock = FakeSocket(chunked_start + x)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
try:
n = resp.readinto(b)
except client.IncompleteRead as i:
self.assertEqual(i.partial, expected)
expected_message = 'IncompleteRead(%d bytes read)' % len(expected)
self.assertEqual(repr(i), expected_message)
self.assertEqual(str(i), expected_message)
else:
self.fail('IncompleteRead expected')
finally:
resp.close()
def test_chunked_head(self):
chunked_start = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello world\r\n'
'1\r\n'
'd\r\n'
)
sock = FakeSocket(chunked_start + last_chunk + chunked_end)
resp = client.HTTPResponse(sock, method="HEAD")
resp.begin()
self.assertEqual(resp.read(), b'')
self.assertEqual(resp.status, 200)
self.assertEqual(resp.reason, 'OK')
self.assertTrue(resp.isclosed())
self.assertFalse(resp.closed)
resp.close()
self.assertTrue(resp.closed)
def test_readinto_chunked_head(self):
chunked_start = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello world\r\n'
'1\r\n'
'd\r\n'
)
sock = FakeSocket(chunked_start + last_chunk + chunked_end)
resp = client.HTTPResponse(sock, method="HEAD")
resp.begin()
b = bytearray(5)
n = resp.readinto(b)
self.assertEqual(n, 0)
self.assertEqual(bytes(b), b'\x00'*5)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.reason, 'OK')
self.assertTrue(resp.isclosed())
self.assertFalse(resp.closed)
resp.close()
self.assertTrue(resp.closed)
def test_negative_content_length(self):
sock = FakeSocket(
'HTTP/1.1 200 OK\r\nContent-Length: -1\r\n\r\nHello\r\n')
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), b'Hello\r\n')
self.assertTrue(resp.isclosed())
def test_incomplete_read(self):
sock = FakeSocket('HTTP/1.1 200 OK\r\nContent-Length: 10\r\n\r\nHello\r\n')
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
try:
resp.read()
except client.IncompleteRead as i:
self.assertEqual(i.partial, b'Hello\r\n')
self.assertEqual(repr(i),
"IncompleteRead(7 bytes read, 3 more expected)")
self.assertEqual(str(i),
"IncompleteRead(7 bytes read, 3 more expected)")
self.assertTrue(resp.isclosed())
else:
self.fail('IncompleteRead expected')
def test_epipe(self):
sock = EPipeSocket(
"HTTP/1.0 401 Authorization Required\r\n"
"Content-type: text/html\r\n"
"WWW-Authenticate: Basic realm=\"example\"\r\n",
b"Content-Length")
conn = client.HTTPConnection("example.com")
conn.sock = sock
self.assertRaises(OSError,
lambda: conn.request("PUT", "/url", "body"))
resp = conn.getresponse()
self.assertEqual(401, resp.status)
self.assertEqual("Basic realm=\"example\"",
resp.getheader("www-authenticate"))
# Test lines overflowing the max line size (_MAXLINE in http.client)
def test_overflowing_status_line(self):
body = "HTTP/1.1 200 Ok" + "k" * 65536 + "\r\n"
resp = client.HTTPResponse(FakeSocket(body))
self.assertRaises((client.LineTooLong, client.BadStatusLine), resp.begin)
def test_overflowing_header_line(self):
body = (
'HTTP/1.1 200 OK\r\n'
'X-Foo: bar' + 'r' * 65536 + '\r\n\r\n'
)
resp = client.HTTPResponse(FakeSocket(body))
self.assertRaises(client.LineTooLong, resp.begin)
def test_overflowing_chunked_line(self):
body = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
+ '0' * 65536 + 'a\r\n'
'hello world\r\n'
'0\r\n'
'\r\n'
)
resp = client.HTTPResponse(FakeSocket(body))
resp.begin()
self.assertRaises(client.LineTooLong, resp.read)
def test_early_eof(self):
# Test httpresponse with no \r\n termination,
body = "HTTP/1.1 200 Ok"
sock = FakeSocket(body)
resp = client.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(), b'')
self.assertTrue(resp.isclosed())
self.assertFalse(resp.closed)
resp.close()
self.assertTrue(resp.closed)
def test_error_leak(self):
# Test that the socket is not leaked if getresponse() fails
conn = client.HTTPConnection('example.com')
response = None
class Response(client.HTTPResponse):
def __init__(self, *pos, **kw):
nonlocal response
response = self # Avoid garbage collector closing the socket
client.HTTPResponse.__init__(self, *pos, **kw)
conn.response_class = Response
conn.sock = FakeSocket('Invalid status line')
conn.request('GET', '/')
self.assertRaises(client.BadStatusLine, conn.getresponse)
self.assertTrue(response.closed)
self.assertTrue(conn.sock.file_closed)
def test_chunked_extension(self):
extra = '3;foo=bar\r\n' + 'abc\r\n'
expected = chunked_expected + b'abc'
sock = FakeSocket(chunked_start + extra + last_chunk_extended + chunked_end)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), expected)
resp.close()
def test_chunked_missing_end(self):
"""some servers may serve up a short chunked encoding stream"""
expected = chunked_expected
sock = FakeSocket(chunked_start + last_chunk) #no terminating crlf
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), expected)
resp.close()
def test_chunked_trailers(self):
"""See that trailers are read and ignored"""
expected = chunked_expected
sock = FakeSocket(chunked_start + last_chunk + trailers + chunked_end)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), expected)
# we should have reached the end of the file
self.assertEqual(sock.file.read(100), b"") #we read to the end
resp.close()
def test_chunked_sync(self):
"""Check that we don't read past the end of the chunked-encoding stream"""
expected = chunked_expected
extradata = "extradata"
sock = FakeSocket(chunked_start + last_chunk + trailers + chunked_end + extradata)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), expected)
# the file should now have our extradata ready to be read
self.assertEqual(sock.file.read(100), extradata.encode("ascii")) #we read to the end
resp.close()
def test_content_length_sync(self):
"""Check that we don't read past the end of the Content-Length stream"""
extradata = "extradata"
expected = b"Hello123\r\n"
sock = FakeSocket('HTTP/1.1 200 OK\r\nContent-Length: 10\r\n\r\nHello123\r\n' + extradata)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), expected)
# the file should now have our extradata ready to be read
self.assertEqual(sock.file.read(100), extradata.encode("ascii")) #we read to the end
resp.close()
class ExtendedReadTest(TestCase):
"""
Test peek(), read1(), readline()
"""
lines = (
'HTTP/1.1 200 OK\r\n'
'\r\n'
'hello world!\n'
'and now \n'
'for something completely different\n'
'foo'
)
lines_expected = lines[lines.find('hello'):].encode("ascii")
lines_chunked = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello worl\r\n'
'3\r\n'
'd!\n\r\n'
'9\r\n'
'and now \n\r\n'
'23\r\n'
'for something completely different\n\r\n'
'3\r\n'
'foo\r\n'
'0\r\n' # terminating chunk
'\r\n' # end of trailers
)
def setUp(self):
sock = FakeSocket(self.lines)
resp = client.HTTPResponse(sock, method="GET")
resp.begin()
resp.fp = io.BufferedReader(resp.fp)
self.resp = resp
def test_peek(self):
resp = self.resp
# patch up the buffered peek so that it returns not too much stuff
oldpeek = resp.fp.peek
def mypeek(n=-1):
p = oldpeek(n)
if n >= 0:
return p[:n]
return p[:10]
resp.fp.peek = mypeek
all = []
while True:
# try a short peek
p = resp.peek(3)
if p:
self.assertGreater(len(p), 0)
# then unbounded peek
p2 = resp.peek()
self.assertGreaterEqual(len(p2), len(p))
self.assertTrue(p2.startswith(p))
next = resp.read(len(p2))
self.assertEqual(next, p2)
else:
next = resp.read()
self.assertFalse(next)
all.append(next)
if not next:
break
self.assertEqual(b"".join(all), self.lines_expected)
def test_readline(self):
resp = self.resp
self._verify_readline(self.resp.readline, self.lines_expected)
def _verify_readline(self, readline, expected):
all = []
while True:
# short readlines
line = readline(5)
if line and line != b"foo":
if len(line) < 5:
self.assertTrue(line.endswith(b"\n"))
all.append(line)
if not line:
break
self.assertEqual(b"".join(all), expected)
def test_read1(self):
resp = self.resp
def r():
res = resp.read1(4)
self.assertLessEqual(len(res), 4)
return res
readliner = Readliner(r)
self._verify_readline(readliner.readline, self.lines_expected)
def test_read1_unbounded(self):
resp = self.resp
all = []
while True:
data = resp.read1()
if not data:
break
all.append(data)
self.assertEqual(b"".join(all), self.lines_expected)
def test_read1_bounded(self):
resp = self.resp
all = []
while True:
data = resp.read1(10)
if not data:
break
self.assertLessEqual(len(data), 10)
all.append(data)
self.assertEqual(b"".join(all), self.lines_expected)
def test_read1_0(self):
self.assertEqual(self.resp.read1(0), b"")
def test_peek_0(self):
p = self.resp.peek(0)
self.assertLessEqual(0, len(p))
class ExtendedReadTestChunked(ExtendedReadTest):
"""
Test peek(), read1(), readline() in chunked mode
"""
lines = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello worl\r\n'
'3\r\n'
'd!\n\r\n'
'9\r\n'
'and now \n\r\n'
'23\r\n'
'for something completely different\n\r\n'
'3\r\n'
'foo\r\n'
'0\r\n' # terminating chunk
'\r\n' # end of trailers
)
class Readliner:
"""
a simple readline class that uses an arbitrary read function and buffering
"""
def __init__(self, readfunc):
self.readfunc = readfunc
self.remainder = b""
def readline(self, limit):
data = []
datalen = 0
read = self.remainder
try:
while True:
idx = read.find(b'\n')
if idx != -1:
break
if datalen + len(read) >= limit:
idx = limit - datalen - 1
# read more data
data.append(read)
read = self.readfunc()
if not read:
idx = 0 #eof condition
break
idx += 1
data.append(read[:idx])
self.remainder = read[idx:]
return b"".join(data)
except:
self.remainder = b"".join(data)
raise
class OfflineTest(TestCase):
def test_all(self):
# Documented objects defined in the module should be in __all__
expected = {"responses"} # White-list documented dict() object
# HTTPMessage, parse_headers(), and the HTTP status code constants are
# intentionally omitted for simplicity
blacklist = {"HTTPMessage", "parse_headers"}
for name in dir(client):
if name in blacklist:
continue
module_object = getattr(client, name)
if getattr(module_object, "__module__", None) == "http.client":
expected.add(name)
self.assertCountEqual(client.__all__, expected)
def test_responses(self):
self.assertEqual(client.responses[client.NOT_FOUND], "Not Found")
def test_client_constants(self):
# Make sure we don't break backward compatibility with 3.4
expected = [
'CONTINUE',
'SWITCHING_PROTOCOLS',
'PROCESSING',
'OK',
'CREATED',
'ACCEPTED',
'NON_AUTHORITATIVE_INFORMATION',
'NO_CONTENT',
'RESET_CONTENT',
'PARTIAL_CONTENT',
'MULTI_STATUS',
'IM_USED',
'MULTIPLE_CHOICES',
'MOVED_PERMANENTLY',
'FOUND',
'SEE_OTHER',
'NOT_MODIFIED',
'USE_PROXY',
'TEMPORARY_REDIRECT',
'BAD_REQUEST',
'UNAUTHORIZED',
'PAYMENT_REQUIRED',
'FORBIDDEN',
'NOT_FOUND',
'METHOD_NOT_ALLOWED',
'NOT_ACCEPTABLE',
'PROXY_AUTHENTICATION_REQUIRED',
'REQUEST_TIMEOUT',
'CONFLICT',
'GONE',
'LENGTH_REQUIRED',
'PRECONDITION_FAILED',
'REQUEST_ENTITY_TOO_LARGE',
'REQUEST_URI_TOO_LONG',
'UNSUPPORTED_MEDIA_TYPE',
'REQUESTED_RANGE_NOT_SATISFIABLE',
'EXPECTATION_FAILED',
'UNPROCESSABLE_ENTITY',
'LOCKED',
'FAILED_DEPENDENCY',
'UPGRADE_REQUIRED',
'PRECONDITION_REQUIRED',
'TOO_MANY_REQUESTS',
'REQUEST_HEADER_FIELDS_TOO_LARGE',
'INTERNAL_SERVER_ERROR',
'NOT_IMPLEMENTED',
'BAD_GATEWAY',
'SERVICE_UNAVAILABLE',
'GATEWAY_TIMEOUT',
'HTTP_VERSION_NOT_SUPPORTED',
'INSUFFICIENT_STORAGE',
'NOT_EXTENDED',
'NETWORK_AUTHENTICATION_REQUIRED',
]
for const in expected:
with self.subTest(constant=const):
self.assertTrue(hasattr(client, const))
class SourceAddressTest(TestCase):
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = support.bind_port(self.serv)
self.source_port = support.find_unused_port()
self.serv.listen()
self.conn = None
def tearDown(self):
if self.conn:
self.conn.close()
self.conn = None
self.serv.close()
self.serv = None
def testHTTPConnectionSourceAddress(self):
self.conn = client.HTTPConnection(HOST, self.port,
source_address=('', self.source_port))
self.conn.connect()
self.assertEqual(self.conn.sock.getsockname()[1], self.source_port)
@unittest.skipIf(not hasattr(client, 'HTTPSConnection'),
'http.client.HTTPSConnection not defined')
def testHTTPSConnectionSourceAddress(self):
self.conn = client.HTTPSConnection(HOST, self.port,
source_address=('', self.source_port))
# We don't test anything here other the constructor not barfing as
# this code doesn't deal with setting up an active running SSL server
# for an ssl_wrapped connect() to actually return from.
class TimeoutTest(TestCase):
PORT = None
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
TimeoutTest.PORT = support.bind_port(self.serv)
self.serv.listen()
def tearDown(self):
self.serv.close()
self.serv = None
def testTimeoutAttribute(self):
# This will prove that the timeout gets through HTTPConnection
# and into the socket.
# default -- use global socket timeout
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
httpConn = client.HTTPConnection(HOST, TimeoutTest.PORT)
httpConn.connect()
finally:
socket.setdefaulttimeout(None)
self.assertEqual(httpConn.sock.gettimeout(), 30)
httpConn.close()
# no timeout -- do not use global socket default
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
httpConn = client.HTTPConnection(HOST, TimeoutTest.PORT,
timeout=None)
httpConn.connect()
finally:
socket.setdefaulttimeout(None)
self.assertEqual(httpConn.sock.gettimeout(), None)
httpConn.close()
# a value
httpConn = client.HTTPConnection(HOST, TimeoutTest.PORT, timeout=30)
httpConn.connect()
self.assertEqual(httpConn.sock.gettimeout(), 30)
httpConn.close()
class PersistenceTest(TestCase):
def test_reuse_reconnect(self):
# Should reuse or reconnect depending on header from server
tests = (
('1.0', '', False),
('1.0', 'Connection: keep-alive\r\n', True),
('1.1', '', True),
('1.1', 'Connection: close\r\n', False),
('1.0', 'Connection: keep-ALIVE\r\n', True),
('1.1', 'Connection: cloSE\r\n', False),
)
for version, header, reuse in tests:
with self.subTest(version=version, header=header):
msg = (
'HTTP/{} 200 OK\r\n'
'{}'
'Content-Length: 12\r\n'
'\r\n'
'Dummy body\r\n'
).format(version, header)
conn = FakeSocketHTTPConnection(msg)
self.assertIsNone(conn.sock)
conn.request('GET', '/open-connection')
with conn.getresponse() as response:
self.assertEqual(conn.sock is None, not reuse)
response.read()
self.assertEqual(conn.sock is None, not reuse)
self.assertEqual(conn.connections, 1)
conn.request('GET', '/subsequent-request')
self.assertEqual(conn.connections, 1 if reuse else 2)
def test_disconnected(self):
def make_reset_reader(text):
"""Return BufferedReader that raises ECONNRESET at EOF"""
stream = io.BytesIO(text)
def readinto(buffer):
size = io.BytesIO.readinto(stream, buffer)
if size == 0:
raise ConnectionResetError()
return size
stream.readinto = readinto
return io.BufferedReader(stream)
tests = (
(io.BytesIO, client.RemoteDisconnected),
(make_reset_reader, ConnectionResetError),
)
for stream_factory, exception in tests:
with self.subTest(exception=exception):
conn = FakeSocketHTTPConnection(b'', stream_factory)
conn.request('GET', '/eof-response')
self.assertRaises(exception, conn.getresponse)
self.assertIsNone(conn.sock)
# HTTPConnection.connect() should be automatically invoked
conn.request('GET', '/reconnect')
self.assertEqual(conn.connections, 2)
def test_100_close(self):
conn = FakeSocketHTTPConnection(
b'HTTP/1.1 100 Continue\r\n'
b'\r\n'
# Missing final response
)
conn.request('GET', '/', headers={'Expect': '100-continue'})
self.assertRaises(client.RemoteDisconnected, conn.getresponse)
self.assertIsNone(conn.sock)
conn.request('GET', '/reconnect')
self.assertEqual(conn.connections, 2)
class HTTPSTest(TestCase):
def setUp(self):
if not hasattr(client, 'HTTPSConnection'):
self.skipTest('ssl support required')
def make_server(self, certfile):
from test.ssl_servers import make_https_server
return make_https_server(self, certfile=certfile)
def test_attributes(self):
# simple test to check it's storing the timeout
h = client.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30)
self.assertEqual(h.timeout, 30)
def test_networked(self):
# Default settings: requires a valid cert from a trusted CA
import ssl
support.requires('network')
with support.transient_internet('self-signed.pythontest.net'):
h = client.HTTPSConnection('self-signed.pythontest.net', 443)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_networked_noverification(self):
# Switch off cert verification
import ssl
support.requires('network')
with support.transient_internet('self-signed.pythontest.net'):
context = ssl._create_unverified_context()
h = client.HTTPSConnection('self-signed.pythontest.net', 443,
context=context)
h.request('GET', '/')
resp = h.getresponse()
h.close()
self.assertIn('nginx', resp.getheader('server'))
@support.system_must_validate_cert
def test_networked_trusted_by_default_cert(self):
# Default settings: requires a valid cert from a trusted CA
support.requires('network')
with support.transient_internet('www.python.org'):
h = client.HTTPSConnection('www.python.org', 443)
h.request('GET', '/')
resp = h.getresponse()
content_type = resp.getheader('content-type')
h.close()
self.assertIn('text/html', content_type)
def test_networked_good_cert(self):
# We feed the server's cert as a validating cert
import ssl
support.requires('network')
with support.transient_internet('self-signed.pythontest.net'):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_selfsigned_pythontestdotnet)
h = client.HTTPSConnection('self-signed.pythontest.net', 443, context=context)
h.request('GET', '/')
resp = h.getresponse()
server_string = resp.getheader('server')
h.close()
self.assertIn('nginx', server_string)
def test_networked_bad_cert(self):
# We feed a "CA" cert that is unrelated to the server's cert
import ssl
support.requires('network')
with support.transient_internet('self-signed.pythontest.net'):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_localhost)
h = client.HTTPSConnection('self-signed.pythontest.net', 443, context=context)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_local_unknown_cert(self):
# The custom cert isn't known to the default trust bundle
import ssl
server = self.make_server(CERT_localhost)
h = client.HTTPSConnection('localhost', server.port)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_local_good_hostname(self):
# The (valid) cert validates the HTTP hostname
import ssl
server = self.make_server(CERT_localhost)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_localhost)
h = client.HTTPSConnection('localhost', server.port, context=context)
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
def test_local_bad_hostname(self):
# The (valid) cert doesn't validate the HTTP hostname
import ssl
server = self.make_server(CERT_fakehostname)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
context.load_verify_locations(CERT_fakehostname)
h = client.HTTPSConnection('localhost', server.port, context=context)
with self.assertRaises(ssl.CertificateError):
h.request('GET', '/')
# Same with explicit check_hostname=True
h = client.HTTPSConnection('localhost', server.port, context=context,
check_hostname=True)
with self.assertRaises(ssl.CertificateError):
h.request('GET', '/')
# With check_hostname=False, the mismatching is ignored
context.check_hostname = False
h = client.HTTPSConnection('localhost', server.port, context=context,
check_hostname=False)
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
# The context's check_hostname setting is used if one isn't passed to
# HTTPSConnection.
context.check_hostname = False
h = client.HTTPSConnection('localhost', server.port, context=context)
h.request('GET', '/nonexistent')
self.assertEqual(h.getresponse().status, 404)
# Passing check_hostname to HTTPSConnection should override the
# context's setting.
h = client.HTTPSConnection('localhost', server.port, context=context,
check_hostname=True)
with self.assertRaises(ssl.CertificateError):
h.request('GET', '/')
@unittest.skipIf(not hasattr(client, 'HTTPSConnection'),
'http.client.HTTPSConnection not available')
def test_host_port(self):
# Check invalid host_port
for hp in ("www.python.org:abc", "user:password@www.python.org"):
self.assertRaises(client.InvalidURL, client.HTTPSConnection, hp)
for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000",
"fe80::207:e9ff:fe9b", 8000),
("www.python.org:443", "www.python.org", 443),
("www.python.org:", "www.python.org", 443),
("www.python.org", "www.python.org", 443),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 443),
("[fe80::207:e9ff:fe9b]:", "fe80::207:e9ff:fe9b",
443)):
c = client.HTTPSConnection(hp)
self.assertEqual(h, c.host)
self.assertEqual(p, c.port)
class RequestBodyTest(TestCase):
"""Test cases where a request includes a message body."""
def setUp(self):
self.conn = client.HTTPConnection('example.com')
self.conn.sock = self.sock = FakeSocket("")
self.conn.sock = self.sock
def get_headers_and_fp(self):
f = io.BytesIO(self.sock.data)
f.readline() # read the request line
message = client.parse_headers(f)
return message, f
def test_manual_content_length(self):
# Set an incorrect content-length so that we can verify that
# it will not be over-ridden by the library.
self.conn.request("PUT", "/url", "body",
{"Content-Length": "42"})
message, f = self.get_headers_and_fp()
self.assertEqual("42", message.get("content-length"))
self.assertEqual(4, len(f.read()))
def test_ascii_body(self):
self.conn.request("PUT", "/url", "body")
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("4", message.get("content-length"))
self.assertEqual(b'body', f.read())
def test_latin1_body(self):
self.conn.request("PUT", "/url", "body\xc1")
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("5", message.get("content-length"))
self.assertEqual(b'body\xc1', f.read())
def test_bytes_body(self):
self.conn.request("PUT", "/url", b"body\xc1")
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("5", message.get("content-length"))
self.assertEqual(b'body\xc1', f.read())
def test_file_body(self):
self.addCleanup(support.unlink, support.TESTFN)
with open(support.TESTFN, "w") as f:
f.write("body")
with open(support.TESTFN) as f:
self.conn.request("PUT", "/url", f)
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("4", message.get("content-length"))
self.assertEqual(b'body', f.read())
def test_binary_file_body(self):
self.addCleanup(support.unlink, support.TESTFN)
with open(support.TESTFN, "wb") as f:
f.write(b"body\xc1")
with open(support.TESTFN, "rb") as f:
self.conn.request("PUT", "/url", f)
message, f = self.get_headers_and_fp()
self.assertEqual("text/plain", message.get_content_type())
self.assertIsNone(message.get_charset())
self.assertEqual("5", message.get("content-length"))
self.assertEqual(b'body\xc1', f.read())
class HTTPResponseTest(TestCase):
def setUp(self):
body = "HTTP/1.1 200 Ok\r\nMy-Header: first-value\r\nMy-Header: \
second-value\r\n\r\nText"
sock = FakeSocket(body)
self.resp = client.HTTPResponse(sock)
self.resp.begin()
def test_getting_header(self):
header = self.resp.getheader('My-Header')
self.assertEqual(header, 'first-value, second-value')
header = self.resp.getheader('My-Header', 'some default')
self.assertEqual(header, 'first-value, second-value')
def test_getting_nonexistent_header_with_string_default(self):
header = self.resp.getheader('No-Such-Header', 'default-value')
self.assertEqual(header, 'default-value')
def test_getting_nonexistent_header_with_iterable_default(self):
header = self.resp.getheader('No-Such-Header', ['default', 'values'])
self.assertEqual(header, 'default, values')
header = self.resp.getheader('No-Such-Header', ('default', 'values'))
self.assertEqual(header, 'default, values')
def test_getting_nonexistent_header_without_default(self):
header = self.resp.getheader('No-Such-Header')
self.assertEqual(header, None)
def test_getting_header_defaultint(self):
header = self.resp.getheader('No-Such-Header',default=42)
self.assertEqual(header, 42)
class TunnelTests(TestCase):
def setUp(self):
response_text = (
'HTTP/1.0 200 OK\r\n\r\n' # Reply to CONNECT
'HTTP/1.1 200 OK\r\n' # Reply to HEAD
'Content-Length: 42\r\n\r\n'
)
self.host = 'proxy.com'
self.conn = client.HTTPConnection(self.host)
self.conn._create_connection = self._create_connection(response_text)
def tearDown(self):
self.conn.close()
def _create_connection(self, response_text):
def create_connection(address, timeout=None, source_address=None):
return FakeSocket(response_text, host=address[0], port=address[1])
return create_connection
def test_set_tunnel_host_port_headers(self):
tunnel_host = 'destination.com'
tunnel_port = 8888
tunnel_headers = {'User-Agent': 'Mozilla/5.0 (compatible, MSIE 11)'}
self.conn.set_tunnel(tunnel_host, port=tunnel_port,
headers=tunnel_headers)
self.conn.request('HEAD', '/', '')
self.assertEqual(self.conn.sock.host, self.host)
self.assertEqual(self.conn.sock.port, client.HTTP_PORT)
self.assertEqual(self.conn._tunnel_host, tunnel_host)
self.assertEqual(self.conn._tunnel_port, tunnel_port)
self.assertEqual(self.conn._tunnel_headers, tunnel_headers)
def test_disallow_set_tunnel_after_connect(self):
# Once connected, we shouldn't be able to tunnel anymore
self.conn.connect()
self.assertRaises(RuntimeError, self.conn.set_tunnel,
'destination.com')
def test_connect_with_tunnel(self):
self.conn.set_tunnel('destination.com')
self.conn.request('HEAD', '/', '')
self.assertEqual(self.conn.sock.host, self.host)
self.assertEqual(self.conn.sock.port, client.HTTP_PORT)
self.assertIn(b'CONNECT destination.com', self.conn.sock.data)
# issue22095
self.assertNotIn(b'Host: destination.com:None', self.conn.sock.data)
self.assertIn(b'Host: destination.com', self.conn.sock.data)
# This test should be removed when CONNECT gets the HTTP/1.1 blessing
self.assertNotIn(b'Host: proxy.com', self.conn.sock.data)
def test_connect_put_request(self):
self.conn.set_tunnel('destination.com')
self.conn.request('PUT', '/', '')
self.assertEqual(self.conn.sock.host, self.host)
self.assertEqual(self.conn.sock.port, client.HTTP_PORT)
self.assertIn(b'CONNECT destination.com', self.conn.sock.data)
self.assertIn(b'Host: destination.com', self.conn.sock.data)
def test_tunnel_debuglog(self):
expected_header = 'X-Dummy: 1'
response_text = 'HTTP/1.0 200 OK\r\n{}\r\n\r\n'.format(expected_header)
self.conn.set_debuglevel(1)
self.conn._create_connection = self._create_connection(response_text)
self.conn.set_tunnel('destination.com')
with support.captured_stdout() as output:
self.conn.request('PUT', '/', '')
lines = output.getvalue().splitlines()
self.assertIn('header: {}'.format(expected_header), lines)
@support.reap_threads
def test_main(verbose=None):
support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest,
PersistenceTest,
HTTPSTest, RequestBodyTest, SourceAddressTest,
HTTPResponseTest, ExtendedReadTest,
ExtendedReadTestChunked, TunnelTests)
if __name__ == '__main__':
test_main()
|
was4444/chromium.src | refs/heads/nw15 | third_party/closure_compiler/processor.py | 45 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Process Chrome resources (HTML/CSS/JS) to handle <include> and <if> tags."""
from collections import defaultdict
import re
import os
class LineNumber(object):
"""A simple wrapper to hold line information (e.g. file.js:32)."""
def __init__(self, source_file, line_number):
"""
Args:
source_file: A file path (as a string).
line_number: The line in |file| (as an integer).
"""
self.file = source_file
self.line_number = int(line_number)
class FileCache(object):
"""An in-memory cache to speed up reading the same files over and over.
Usage:
FileCache.read(path_to_file)
"""
_cache = defaultdict(str)
@classmethod
def read(self, source_file):
"""Read a file and return it as a string.
Args:
source_file: a file path (as a string) to read and return the contents.
Returns:
The contents of |source_file| (as a string).
"""
abs_file = os.path.abspath(source_file)
self._cache[abs_file] = self._cache[abs_file] or open(abs_file, "r").read()
return self._cache[abs_file]
class Processor(object):
"""Processes resource files, inlining the contents of <include> tags, removing
<if> tags, and retaining original line info.
For example
1: /* blah.js */
2: <if expr="is_win">
3: <include src="win.js">
4: </if>
would be turned into:
1: /* blah.js */
2:
3: /* win.js */
4: alert('Ew; Windows.');
5:
"""
_IF_TAGS_REG = "</?if[^>]*?>"
_INCLUDE_REG = "<include[^>]+src=['\"]([^>]*)['\"]>"
def __init__(self, source_file):
"""
Args:
source_file: A file path to process (as a string).
"""
self.included_files = set()
self._index = 0
self._lines = self._get_file(source_file)
# Can't enumerate(self._lines) here because some lines are re-processed.
while self._index < len(self._lines):
current_line = self._lines[self._index]
match = re.search(self._INCLUDE_REG, current_line[2])
if match:
file_dir = os.path.dirname(current_line[0])
file_name = os.path.abspath(os.path.join(file_dir, match.group(1)))
if file_name not in self.included_files:
self._include_file(file_name)
continue # Stay on the same line.
else:
# Found a duplicate <include>. Ignore and insert a blank line to
# preserve line numbers.
self._lines[self._index] = self._lines[self._index][:2] + ("",)
self._index += 1
for i, line in enumerate(self._lines):
self._lines[i] = line[:2] + (re.sub(self._IF_TAGS_REG, "", line[2]),)
self.contents = "\n".join(l[2] for l in self._lines)
# Returns a list of tuples in the format: (file, line number, line contents).
def _get_file(self, source_file):
lines = FileCache.read(source_file).splitlines()
return [(source_file, lnum + 1, line) for lnum, line in enumerate(lines)]
def _include_file(self, source_file):
self.included_files.add(source_file)
f = self._get_file(source_file)
self._lines = self._lines[:self._index] + f + self._lines[self._index + 1:]
def get_file_from_line(self, line_number):
"""Get the original file and line number for an expanded file's line number.
Args:
line_number: A processed file's line number (as an integer or string).
"""
line_number = int(line_number) - 1
return LineNumber(self._lines[line_number][0], self._lines[line_number][1])
|
DebrahR/project2 | refs/heads/master | server/lib/werkzeug/debug/repr.py | 313 | # -*- coding: utf-8 -*-
"""
werkzeug.debug.repr
~~~~~~~~~~~~~~~~~~~
This module implements object representations for debugging purposes.
Unlike the default repr these reprs expose a lot more information and
produce HTML instead of ASCII.
Together with the CSS and JavaScript files of the debugger this gives
a colorful and more compact output.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD.
"""
import sys
import re
import codecs
from traceback import format_exception_only
try:
from collections import deque
except ImportError: # pragma: no cover
deque = None
from werkzeug.utils import escape
from werkzeug._compat import iteritems, PY2, text_type, integer_types, \
string_types
missing = object()
_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
RegexType = type(_paragraph_re)
HELP_HTML = '''\
<div class=box>
<h3>%(title)s</h3>
<pre class=help>%(text)s</pre>
</div>\
'''
OBJECT_DUMP_HTML = '''\
<div class=box>
<h3>%(title)s</h3>
%(repr)s
<table>%(items)s</table>
</div>\
'''
def debug_repr(obj):
"""Creates a debug repr of an object as HTML unicode string."""
return DebugReprGenerator().repr(obj)
def dump(obj=missing):
"""Print the object details to stdout._write (for the interactive
console of the web debugger.
"""
gen = DebugReprGenerator()
if obj is missing:
rv = gen.dump_locals(sys._getframe(1).f_locals)
else:
rv = gen.dump_object(obj)
sys.stdout._write(rv)
class _Helper(object):
"""Displays an HTML version of the normal help, for the interactive
debugger only because it requires a patched sys.stdout.
"""
def __repr__(self):
return 'Type help(object) for help about object.'
def __call__(self, topic=None):
if topic is None:
sys.stdout._write('<span class=help>%s</span>' % repr(self))
return
import pydoc
pydoc.help(topic)
rv = sys.stdout.reset()
if isinstance(rv, bytes):
rv = rv.decode('utf-8', 'ignore')
paragraphs = _paragraph_re.split(rv)
if len(paragraphs) > 1:
title = paragraphs[0]
text = '\n\n'.join(paragraphs[1:])
else: # pragma: no cover
title = 'Help'
text = paragraphs[0]
sys.stdout._write(HELP_HTML % {'title': title, 'text': text})
helper = _Helper()
def _add_subclass_info(inner, obj, base):
if isinstance(base, tuple):
for base in base:
if type(obj) is base:
return inner
elif type(obj) is base:
return inner
module = ''
if obj.__class__.__module__ not in ('__builtin__', 'exceptions'):
module = '<span class="module">%s.</span>' % obj.__class__.__module__
return '%s%s(%s)' % (module, obj.__class__.__name__, inner)
class DebugReprGenerator(object):
def __init__(self):
self._stack = []
def _sequence_repr_maker(left, right, base=object(), limit=8):
def proxy(self, obj, recursive):
if recursive:
return _add_subclass_info(left + '...' + right, obj, base)
buf = [left]
have_extended_section = False
for idx, item in enumerate(obj):
if idx:
buf.append(', ')
if idx == limit:
buf.append('<span class="extended">')
have_extended_section = True
buf.append(self.repr(item))
if have_extended_section:
buf.append('</span>')
buf.append(right)
return _add_subclass_info(u''.join(buf), obj, base)
return proxy
list_repr = _sequence_repr_maker('[', ']', list)
tuple_repr = _sequence_repr_maker('(', ')', tuple)
set_repr = _sequence_repr_maker('set([', '])', set)
frozenset_repr = _sequence_repr_maker('frozenset([', '])', frozenset)
if deque is not None:
deque_repr = _sequence_repr_maker('<span class="module">collections.'
'</span>deque([', '])', deque)
del _sequence_repr_maker
def regex_repr(self, obj):
pattern = repr(obj.pattern)
if PY2:
pattern = pattern.decode('string-escape', 'ignore')
else:
pattern = codecs.decode(pattern, 'unicode-escape', 'ignore')
if pattern[:1] == 'u':
pattern = 'ur' + pattern[1:]
else:
pattern = 'r' + pattern
return u're.compile(<span class="string regex">%s</span>)' % pattern
def string_repr(self, obj, limit=70):
buf = ['<span class="string">']
escaped = escape(obj)
a = repr(escaped[:limit])
b = repr(escaped[limit:])
if isinstance(obj, text_type) and PY2:
buf.append('u')
a = a[1:]
b = b[1:]
if b != "''":
buf.extend((a[:-1], '<span class="extended">', b[1:], '</span>'))
else:
buf.append(a)
buf.append('</span>')
return _add_subclass_info(u''.join(buf), obj, (bytes, text_type))
def dict_repr(self, d, recursive, limit=5):
if recursive:
return _add_subclass_info(u'{...}', d, dict)
buf = ['{']
have_extended_section = False
for idx, (key, value) in enumerate(iteritems(d)):
if idx:
buf.append(', ')
if idx == limit - 1:
buf.append('<span class="extended">')
have_extended_section = True
buf.append('<span class="pair"><span class="key">%s</span>: '
'<span class="value">%s</span></span>' %
(self.repr(key), self.repr(value)))
if have_extended_section:
buf.append('</span>')
buf.append('}')
return _add_subclass_info(u''.join(buf), d, dict)
def object_repr(self, obj):
r = repr(obj)
if PY2:
r = r.decode('utf-8', 'replace')
return u'<span class="object">%s</span>' % escape(r)
def dispatch_repr(self, obj, recursive):
if obj is helper:
return u'<span class="help">%r</span>' % helper
if isinstance(obj, (integer_types, float, complex)):
return u'<span class="number">%r</span>' % obj
if isinstance(obj, string_types):
return self.string_repr(obj)
if isinstance(obj, RegexType):
return self.regex_repr(obj)
if isinstance(obj, list):
return self.list_repr(obj, recursive)
if isinstance(obj, tuple):
return self.tuple_repr(obj, recursive)
if isinstance(obj, set):
return self.set_repr(obj, recursive)
if isinstance(obj, frozenset):
return self.frozenset_repr(obj, recursive)
if isinstance(obj, dict):
return self.dict_repr(obj, recursive)
if deque is not None and isinstance(obj, deque):
return self.deque_repr(obj, recursive)
return self.object_repr(obj)
def fallback_repr(self):
try:
info = ''.join(format_exception_only(*sys.exc_info()[:2]))
except Exception: # pragma: no cover
info = '?'
if PY2:
info = info.decode('utf-8', 'ignore')
return u'<span class="brokenrepr"><broken repr (%s)>' \
u'</span>' % escape(info.strip())
def repr(self, obj):
recursive = False
for item in self._stack:
if item is obj:
recursive = True
break
self._stack.append(obj)
try:
try:
return self.dispatch_repr(obj, recursive)
except Exception:
return self.fallback_repr()
finally:
self._stack.pop()
def dump_object(self, obj):
repr = items = None
if isinstance(obj, dict):
title = 'Contents of'
items = []
for key, value in iteritems(obj):
if not isinstance(key, string_types):
items = None
break
items.append((key, self.repr(value)))
if items is None:
items = []
repr = self.repr(obj)
for key in dir(obj):
try:
items.append((key, self.repr(getattr(obj, key))))
except Exception:
pass
title = 'Details for'
title += ' ' + object.__repr__(obj)[1:-1]
return self.render_object_dump(items, title, repr)
def dump_locals(self, d):
items = [(key, self.repr(value)) for key, value in d.items()]
return self.render_object_dump(items, 'Local variables in frame')
def render_object_dump(self, items, title, repr=None):
html_items = []
for key, value in items:
html_items.append('<tr><th>%s<td><pre class=repr>%s</pre>' %
(escape(key), value))
if not html_items:
html_items.append('<tr><td><em>Nothing</em>')
return OBJECT_DUMP_HTML % {
'title': escape(title),
'repr': repr and '<pre class=repr>%s</pre>' % repr or '',
'items': '\n'.join(html_items)
}
|
kantel/processingpy | refs/heads/master | sketches/pandemie/proband.py | 1 | # coding=utf-8
from random import randint
class Proband():
def __init__(self, x, y, zustand):
self.x = x
self.y = y
self.sz = 5
self.move = 10
self.zustand = zustand
def update(self):
if self.zustand > 1:
self.zustand -= 1
if self.zustand == 1:
self.zustand = -1
self.x += randint(-self.move, self.move)
self.y += randint(-self.move, self.move)
if self.x >= width - 2*self.sz:
self.x = width - 2*self.sz
if self.y >= height - 2*self.sz:
self.y = height - 2*self.sz
if self.x <= self.sz:
self.x = self.sz
if self.y <= self.sz:
self.y = self.sz
if self.zustand > 10: # Krank
fill(200, 0, 0, 100)
if self.zustand == 0: # Gesund
fill(0, 100, 0, 100)
if self.zustand == -1:
fill(200, 200, 0, 100) # Immun
circle(self.x, self.y, 2*self.sz)
def collision(self, other):
if self != other:
distance = dist(self.x, self.y, other.x, other.y)
if distance <= self.sz + other.sz:
# return True
if other.zustand > 1 and self.zustand == 0:
self.zustand = 30
|
MarcosCommunity/odoo | refs/heads/marcos-8.0 | addons/edi/models/res_currency.py | 437 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011-2012 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from edi import EDIMixin
from openerp import SUPERUSER_ID
RES_CURRENCY_EDI_STRUCT = {
#custom: 'code'
'symbol': True,
'rate': True,
}
class res_currency(osv.osv, EDIMixin):
_inherit = "res.currency"
def edi_export(self, cr, uid, records, edi_struct=None, context=None):
edi_struct = dict(edi_struct or RES_CURRENCY_EDI_STRUCT)
edi_doc_list = []
for currency in records:
# Get EDI doc based on struct. The result will also contain all metadata fields and attachments.
edi_doc = super(res_currency,self).edi_export(cr, uid, [currency], edi_struct, context)[0]
edi_doc.update(code=currency.name)
edi_doc_list.append(edi_doc)
return edi_doc_list
def edi_import(self, cr, uid, edi_document, context=None):
self._edi_requires_attributes(('code','symbol'), edi_document)
external_id = edi_document['__id']
existing_currency = self._edi_get_object_by_external_id(cr, uid, external_id, 'res_currency', context=context)
if existing_currency:
return existing_currency.id
# find with unique ISO code
existing_ids = self.search(cr, uid, [('name','=',edi_document['code'])])
if existing_ids:
return existing_ids[0]
# nothing found, create a new one
currency_id = self.create(cr, SUPERUSER_ID, {'name': edi_document['code'],
'symbol': edi_document['symbol']}, context=context)
rate = edi_document.pop('rate')
if rate:
self.pool.get('res.currency.rate').create(cr, SUPERUSER_ID, {'currency_id': currency_id,
'rate': rate}, context=context)
return currency_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
dqnykamp/sympy | refs/heads/master | sympy/physics/quantum/tests/test_constants.py | 130 | from sympy import Float
from sympy.physics.quantum.constants import hbar
def test_hbar():
assert hbar.is_commutative is True
assert hbar.is_real is True
assert hbar.is_positive is True
assert hbar.is_negative is False
assert hbar.is_irrational is True
assert hbar.evalf() == Float(1.05457162e-34)
|
signalfire/django-property | refs/heads/master | homes_for_sale/tests/test_models.py | 2 | from django.test import TestCase
from homes_for_sale.factories.sale_factory import SaleFactory
from homes_for_sale.factories.sale_feature_factory import SaleFeatureFactory
from homes_for_sale.factories.sale_picture_factory import SalePictureFactory
from homes_for_sale.factories.sale_media_factory import SaleMediaFactory
from homes_for_sale.factories.sale_contact_factory import SaleContactFactory
from homes_for_sale.factories.sale_note_factory import SaleNoteFactory
from homes_for_sale.factories.sale_favourite_factory import SaleFavouriteFactory
class SaleModelTestCase(TestCase):
def test_string_representation(self):
sale = SaleFactory()
self.assertEquals(str(sale), sale.title)
class SaleFeatureModelTestCase(TestCase):
def test_string_representation(self):
feature = SaleFeatureFactory()
self.assertEquals(str(feature), feature.text)
class SalePictureModelTestCase(TestCase):
def test_string_representation(self):
picture = SalePictureFactory()
self.assertEquals(str(picture), picture.caption)
class SaleMediaModelTestCase(TestCase):
def test_string_representation(self):
media = SaleMediaFactory()
self.assertEquals(str(media), media.description)
class SaleContactModelTestCase(TestCase):
def test_string_representation(self):
contact = SaleContactFactory()
self.assertEquals(str(contact), "%s %s (%s)" % (contact.forename, contact.surname, contact.email))
class SaleNoteModelTestCase(TestCase):
def test_string_representation(self):
note = SaleNoteFactory()
self.assertEquals(str(note), note.text)
class SaleFavouriteModelTestCase(TestCase):
def test_string_representation(self):
favourite = SaleFavouriteFactory()
self.assertEquals(str(favourite), favourite.user.username)
|
OpenVolunteeringPlatform/django-ovp-organizations | refs/heads/master | ovp_organizations/migrations/0012_auto_20170109_1332.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-01-09 13:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ovp_organizations', '0011_organization_cover'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='causes',
field=models.ManyToManyField(blank=True, to='ovp_core.Cause'),
),
]
|
codegooglecom/jaikuengine | refs/heads/master | common/management/commands/test.py | 31 | # Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.core.management.base import BaseCommand
from optparse import make_option
import sys
class Command(BaseCommand):
""" Copied from the default django test command,
extended to include coverage
"""
option_list = BaseCommand.option_list + (
make_option(
'--noinput', action='store_false', dest='interactive',
default=True,
help='Tells Django to NOT prompt the user for input of any kind.'
),
make_option(
'--coverage', action='store_true', dest='coverage',
default=False,
help='Includes coverage reporting for the tests'
),
make_option(
'--profile_all', action='store_true', dest='profile_all',
default=False,
help='Includes profile reporting for all tests'
),
make_option(
'--include_profile', action='store_true', dest='include_profile',
default=False,
help='Includes profile reporting for profiled tests'
),
)
help = 'Runs the test suite for the specified applications, or the entire site if no apps are specified.'
args = '[appname ...]'
requires_model_validation = False
def handle(self, *test_labels, **options):
from django.conf import settings
verbosity = int(options.get('verbosity', 1))
interactive = options.get('interactive', True)
include_coverage = options.get('coverage', False)
profile_all = options.get('profile_all', False)
include_profile = options.get('include_profile', False)
test_path = settings.TEST_RUNNER.split('.')
# Allow for Python 2.5 relative paths
if len(test_path) > 1:
test_module_name = '.'.join(test_path[:-1])
else:
test_module_name = '.'
test_module = __import__(test_module_name, {}, {}, test_path[-1])
test_runner = getattr(test_module, test_path[-1])
failures = test_runner(test_labels,
verbosity=verbosity,
interactive=interactive,
include_coverage=include_coverage,
include_profile=include_profile,
profile_all=profile_all)
if failures:
sys.exit(failures)
|
OscarBC/romcollectionbrowser | refs/heads/master | resources/lib/pyscraper/descriptionparserfactory.py | 10 | # -*- coding: iso-8859-15 -*-
from xml.etree.ElementTree import *
from descriptionparserxml import *
from descriptionparserflatfile import *
class DescriptionParserFactory:
@classmethod
def getParser(self, descParseInstruction):
fp = open(descParseInstruction, 'r')
tree = fromstring(fp.read())
fp.close()
del fp
grammarNode = tree.find('GameGrammar')
del tree
if(grammarNode == None):
print "no valid parserConfig"
return None
attributes = grammarNode.attrib
parserType = attributes.get('type')
del attributes
if(parserType == 'multiline'):
return DescriptionParserFlatFile(grammarNode)
elif(parserType == 'xml'):
return DescriptionParserXml(grammarNode)
else:
print "Unknown parser: " +parserType
return None
|
kushalbhola/MyStuff | refs/heads/master | Practice/PythonApplication/env/Lib/site-packages/pandas/tests/api/test_api.py | 2 | import pandas as pd
from pandas import api, compat
from pandas.util import testing as tm
class Base:
def check(self, namespace, expected, ignored=None):
# see which names are in the namespace, minus optional
# ignored ones
# compare vs the expected
result = sorted(f for f in dir(namespace) if not f.startswith("__"))
if ignored is not None:
result = sorted(list(set(result) - set(ignored)))
expected = sorted(expected)
tm.assert_almost_equal(result, expected)
class TestPDApi(Base):
# these are optionally imported based on testing
# & need to be ignored
ignored = ["tests", "locale", "conftest"]
# top-level sub-packages
lib = [
"api",
"arrays",
"compat",
"core",
"errors",
"pandas",
"plotting",
"test",
"testing",
"tseries",
"util",
"options",
"io",
]
# these are already deprecated; awaiting removal
deprecated_modules = []
# misc
misc = ["IndexSlice", "NaT"]
# top-level classes
classes = [
"Categorical",
"CategoricalIndex",
"DataFrame",
"DateOffset",
"DatetimeIndex",
"ExcelFile",
"ExcelWriter",
"Float64Index",
"Grouper",
"HDFStore",
"Index",
"Int64Index",
"MultiIndex",
"Period",
"PeriodIndex",
"RangeIndex",
"UInt64Index",
"Series",
"SparseArray",
"SparseDataFrame",
"SparseDtype",
"SparseSeries",
"Timedelta",
"TimedeltaIndex",
"Timestamp",
"Interval",
"IntervalIndex",
"CategoricalDtype",
"PeriodDtype",
"IntervalDtype",
"DatetimeTZDtype",
"Int8Dtype",
"Int16Dtype",
"Int32Dtype",
"Int64Dtype",
"UInt8Dtype",
"UInt16Dtype",
"UInt32Dtype",
"UInt64Dtype",
"NamedAgg",
]
if not compat.PY37:
classes.append("Panel")
# these are already deprecated; awaiting removal
deprecated_classes = []
# these should be deprecated in the future
deprecated_classes_in_future = []
# external modules exposed in pandas namespace
modules = ["np", "datetime"]
# top-level functions
funcs = [
"array",
"bdate_range",
"concat",
"crosstab",
"cut",
"date_range",
"interval_range",
"eval",
"factorize",
"get_dummies",
"infer_freq",
"isna",
"isnull",
"lreshape",
"melt",
"notna",
"notnull",
"offsets",
"merge",
"merge_ordered",
"merge_asof",
"period_range",
"pivot",
"pivot_table",
"qcut",
"show_versions",
"timedelta_range",
"unique",
"value_counts",
"wide_to_long",
]
# top-level option funcs
funcs_option = [
"reset_option",
"describe_option",
"get_option",
"option_context",
"set_option",
"set_eng_float_format",
]
# top-level read_* funcs
funcs_read = [
"read_clipboard",
"read_csv",
"read_excel",
"read_fwf",
"read_gbq",
"read_hdf",
"read_html",
"read_json",
"read_msgpack",
"read_pickle",
"read_sas",
"read_sql",
"read_sql_query",
"read_sql_table",
"read_stata",
"read_table",
"read_feather",
"read_parquet",
"read_spss",
]
# top-level to_* funcs
funcs_to = ["to_datetime", "to_msgpack", "to_numeric", "to_pickle", "to_timedelta"]
# top-level to deprecate in the future
deprecated_funcs_in_future = []
# these are already deprecated; awaiting removal
deprecated_funcs = []
# private modules in pandas namespace
private_modules = [
"_config",
"_hashtable",
"_lib",
"_libs",
"_np_version_under1p14",
"_np_version_under1p15",
"_np_version_under1p16",
"_np_version_under1p17",
"_tslib",
"_typing",
"_version",
]
def test_api(self):
self.check(
pd,
self.lib
+ self.misc
+ self.modules
+ self.deprecated_modules
+ self.classes
+ self.deprecated_classes
+ self.deprecated_classes_in_future
+ self.funcs
+ self.funcs_option
+ self.funcs_read
+ self.funcs_to
+ self.deprecated_funcs_in_future
+ self.deprecated_funcs
+ self.private_modules,
self.ignored,
)
class TestApi(Base):
allowed = ["types", "extensions"]
def test_api(self):
self.check(api, self.allowed)
class TestTesting(Base):
funcs = ["assert_frame_equal", "assert_series_equal", "assert_index_equal"]
def test_testing(self):
from pandas import testing
self.check(testing, self.funcs)
|
satovey/heekscnc | refs/heads/master | pycnc/wxProgramWindow.py | 25 | import wx
class ProgramWindow(wx.ScrolledWindow):
def __init__(self, parent):
wx.ScrolledWindow.__init__(self, parent, name = 'Program', style = wx.HSCROLL + wx.VSCROLL + wx.NO_FULL_REPAINT_ON_RESIZE)
self.textCtrl = wx.TextCtrl(self, 100, "", style = wx.TE_MULTILINE + wx.TE_DONTWRAP)
self.textCtrl.SetMaxLength(0) # Ensure the length is as long as this operating system supports. (It may be only 32kb or 64kb)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Resize()
def OnSize(self, event):
self.Resize()
event.Skip()
def Resize(self):
self.textCtrl.SetSize(self.GetClientSize())
def Clear(self):
self.textCtrl.Clear()
def AppendText(self, value):
self.textCtrl.AppendText(str(value)) |
the-deep-learners/study-group | refs/heads/master | neural-networks-and-deep-learning/src/old/mnist_autoencoder.py | 4 | """
mnist_autoencoder
~~~~~~~~~~~~~~~~~
Implements an autoencoder for the MNIST data. The program can do two
things: (1) plot the autoencoder's output for the first ten images in
the MNIST test set; and (2) use the autoencoder to build a classifier.
The program is a quick-and-dirty hack --- we'll do things in a more
systematic way in the module ``deep_autoencoder``.
"""
# My Libraries
from backprop2 import Network
import mnist_loader
# Third-party libraries
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
def autoencoder_results(hidden_units):
"""
Train an autoencoder using the MNIST training data and plot the
results when the first ten MNIST test images are passed through
the autoencoder.
"""
training_data, test_inputs, actual_test_results = \
mnist_loader.load_data_nn()
net = train_autoencoder(hidden_units, training_data)
plot_test_results(net, test_inputs)
def train_autoencoder(hidden_units, training_data):
"Return a trained autoencoder."
autoencoder_training_data = [(x, x) for x, _ in training_data]
net = Network([784, hidden_units, 784])
net.SGD(autoencoder_training_data, 6, 10, 0.01, 0.05)
return net
def plot_test_results(net, test_inputs):
"""
Plot the results after passing the first ten test MNIST digits through
the autoencoder ``net``."""
fig = plt.figure()
ax = fig.add_subplot(111)
images_in = [test_inputs[j].reshape(-1, 28) for j in range(10)]
images_out = [net.feedforward(test_inputs[j]).reshape(-1, 28)
for j in range(10)]
image_in = np.concatenate(images_in, axis=1)
image_out = np.concatenate(images_out, axis=1)
image = np.concatenate([image_in, image_out])
ax.matshow(image, cmap = matplotlib.cm.binary)
plt.xticks(np.array([]))
plt.yticks(np.array([]))
plt.show()
def classifier(hidden_units, n_unlabeled_inputs, n_labeled_inputs):
"""
Train a semi-supervised classifier. We begin with pretraining,
creating an autoencoder which uses ``n_unlabeled_inputs`` from the
MNIST training data. This is then converted into a classifier
which is fine-tuned using the ``n_labeled_inputs``.
For comparison a classifier is also created which does not make
use of the unlabeled data.
"""
training_data, test_inputs, actual_test_results = \
mnist_loader.load_data_nn()
print "\nUsing pretraining and %s items of unlabeled data" %\
n_unlabeled_inputs
net_ae = train_autoencoder(hidden_units, training_data[:n_unlabeled_inputs])
net_c = Network([784, hidden_units, 10])
net_c.biases = net_ae.biases[:1]+[np.random.randn(10, 1)/np.sqrt(10)]
net_c.weights = net_ae.weights[:1]+\
[np.random.randn(10, hidden_units)/np.sqrt(10)]
net_c.SGD(training_data[-n_labeled_inputs:], 300, 10, 0.01, 0.05)
print "Result on test data: %s / %s" % (
net_c.evaluate(test_inputs, actual_test_results), len(test_inputs))
print "Training a network with %s items of training data" % n_labeled_inputs
net = Network([784, hidden_units, 10])
net.SGD(training_data[-n_labeled_inputs:], 300, 10, 0.01, 0.05)
print "Result on test data: %s / %s" % (
net.evaluate(test_inputs, actual_test_results), len(test_inputs))
return net_c
|
grevian/GraphViz-Site | refs/heads/master | requests/packages/urllib3/packages/ssl_match_hostname/__init__.py | 315 | try:
# Python 3.2+
from ssl import CertificateError, match_hostname
except ImportError:
try:
# Backport of the function from a pypi module
from backports.ssl_match_hostname import CertificateError, match_hostname
except ImportError:
# Our vendored copy
from _implementation import CertificateError, match_hostname
# Not needed, but documenting what we provide.
__all__ = ('CertificateError', 'match_hostname')
|
oonid/dilingkari | refs/heads/master | lib/pyasn1/compat/__init__.py | 3653 | # This file is necessary to make this directory a package.
|
bnsgeyer/Copter3_4 | refs/heads/master | Tools/autotest/autotest.py | 5 | #!/usr/bin/env python
"""
APM automatic test suite
Andrew Tridgell, October 2011
"""
from __future__ import print_function
import atexit
import fnmatch
import glob
import optparse
import os
import shutil
import signal
import sys
import time
import traceback
import apmrover2
import arducopter
import arduplane
import quadplane
from pysim import util
os.environ['PYTHONUNBUFFERED'] = '1'
os.putenv('TMPDIR', util.reltopdir('tmp'))
def get_default_params(atype, binary):
"""Get default parameters."""
# use rover simulator so SITL is not starved of input
from pymavlink import mavutil
HOME = mavutil.location(40.071374969556928, -105.22978898137808, 1583.702759, 246)
if binary.find("plane") != -1 or binary.find("rover") != -1:
frame = "rover"
else:
frame = "+"
home = "%f,%f,%u,%u" % (HOME.lat, HOME.lng, HOME.alt, HOME.heading)
sitl = util.start_SITL(binary, wipe=True, model=frame, home=home, speedup=10, unhide_parameters=True)
mavproxy = util.start_MAVProxy_SITL(atype)
print("Dumping defaults")
idx = mavproxy.expect(['Please Run Setup', 'Saved [0-9]+ parameters to (\S+)'])
if idx == 0:
# we need to restart it after eeprom erase
util.pexpect_close(mavproxy)
util.pexpect_close(sitl)
sitl = util.start_SITL(binary, model=frame, home=home, speedup=10)
mavproxy = util.start_MAVProxy_SITL(atype)
idx = mavproxy.expect('Saved [0-9]+ parameters to (\S+)')
parmfile = mavproxy.match.group(1)
dest = util.reltopdir('../buildlogs/%s-defaults.parm' % atype)
shutil.copy(parmfile, dest)
util.pexpect_close(mavproxy)
util.pexpect_close(sitl)
print("Saved defaults for %s to %s" % (atype, dest))
return True
def build_all():
"""Run the build_all.sh script."""
print("Running build_all.sh")
if util.run_cmd(util.reltopdir('Tools/scripts/build_all.sh'), directory=util.reltopdir('.')) != 0:
print("Failed build_all.sh")
return False
return True
def build_binaries():
"""Run the build_binaries.sh script."""
print("Running build_binaries.sh")
import shutil
# copy the script as it changes git branch, which can change the script while running
orig = util.reltopdir('Tools/scripts/build_binaries.sh')
copy = util.reltopdir('./build_binaries.sh')
shutil.copyfile(orig, copy)
shutil.copymode(orig, copy)
if util.run_cmd(copy, directory=util.reltopdir('.')) != 0:
print("Failed build_binaries.sh")
return False
return True
def build_devrelease():
"""Run the build_devrelease.sh script."""
print("Running build_devrelease.sh")
import shutil
# copy the script as it changes git branch, which can change the script while running
orig = util.reltopdir('Tools/scripts/build_devrelease.sh')
copy = util.reltopdir('./build_devrelease.sh')
shutil.copyfile(orig, copy)
shutil.copymode(orig, copy)
if util.run_cmd(copy, directory=util.reltopdir('.')) != 0:
print("Failed build_devrelease.sh")
return False
return True
def build_examples():
"""Build examples."""
for target in 'px4-v2', 'navio':
print("Running build.examples for %s" % target)
try:
util.build_examples(target)
except Exception as e:
print("Failed build_examples on board=%s" % target)
print(str(e))
return False
return True
def build_parameters():
"""Run the param_parse.py script."""
print("Running param_parse.py")
if util.run_cmd(util.reltopdir('Tools/autotest/param_metadata/param_parse.py'), directory=util.reltopdir('.')) != 0:
print("Failed param_parse.py")
return False
return True
def convert_gpx():
"""Convert any tlog files to GPX and KML."""
import glob
mavlog = glob.glob(util.reltopdir("../buildlogs/*.tlog"))
for m in mavlog:
util.run_cmd(util.reltopdir("modules/mavlink/pymavlink/tools/mavtogpx.py") + " --nofixcheck " + m)
gpx = m + '.gpx'
kml = m + '.kml'
util.run_cmd('gpsbabel -i gpx -f %s -o kml,units=m,floating=1,extrude=1 -F %s' % (gpx, kml), checkfail=False)
util.run_cmd('zip %s.kmz %s.kml' % (m, m), checkfail=False)
util.run_cmd("mavflightview.py --imagefile=%s.png %s" % (m, m))
return True
def test_prerequisites():
"""Check we have the right directories and tools to run tests."""
print("Testing prerequisites")
util.mkdir_p(util.reltopdir('../buildlogs'))
return True
def alarm_handler(signum, frame):
"""Handle test timeout."""
global results, opts
try:
results.add('TIMEOUT', '<span class="failed-text">FAILED</span>', opts.timeout)
util.pexpect_close_all()
convert_gpx()
write_fullresults()
os.killpg(0, signal.SIGKILL)
except Exception:
pass
sys.exit(1)
############## main program #############
parser = optparse.OptionParser("autotest")
parser.add_option("--skip", type='string', default='', help='list of steps to skip (comma separated)')
parser.add_option("--list", action='store_true', default=False, help='list the available steps')
parser.add_option("--viewerip", default=None, help='IP address to send MAVLink and fg packets to')
parser.add_option("--map", action='store_true', default=False, help='show map')
parser.add_option("--experimental", default=False, action='store_true', help='enable experimental tests')
parser.add_option("--timeout", default=3000, type='int', help='maximum runtime in seconds')
parser.add_option("--valgrind", default=False, action='store_true', help='run ArduPilot binaries under valgrind')
parser.add_option("--gdb", default=False, action='store_true', help='run ArduPilot binaries under gdb')
parser.add_option("--debug", default=False, action='store_true', help='make built binaries debug binaries')
parser.add_option("-j", default=None, type='int', help='build CPUs')
opts, args = parser.parse_args()
steps = [
'prerequisites',
'build.All',
'build.Binaries',
# 'build.DevRelease',
'build.Examples',
'build.Parameters',
'build.ArduPlane',
'defaults.ArduPlane',
'fly.ArduPlane',
'fly.QuadPlane',
'build.APMrover2',
'defaults.APMrover2',
'drive.APMrover2',
'build.ArduCopter',
'defaults.ArduCopter',
'fly.ArduCopter',
'build.Helicopter',
'fly.CopterAVC',
'build.AntennaTracker',
'convertgpx',
]
skipsteps = opts.skip.split(',')
# ensure we catch timeouts
signal.signal(signal.SIGALRM, alarm_handler)
signal.alarm(opts.timeout)
if opts.list:
for step in steps:
print(step)
sys.exit(0)
def skip_step(step):
"""See if a step should be skipped."""
for skip in skipsteps:
if fnmatch.fnmatch(step.lower(), skip.lower()):
return True
return False
def binary_path(step, debug=False):
if step.find("ArduCopter") != -1:
binary_name = "arducopter-quad"
elif step.find("ArduPlane") != -1:
binary_name = "arduplane"
elif step.find("APMrover2") != -1:
binary_name = "ardurover"
elif step.find("AntennaTracker") != -1:
binary_name = "antennatracker"
elif step.find("CopterAVC") != -1:
binary_name = "arducopter-heli"
elif step.find("QuadPlane") != -1:
binary_name = "arduplane"
else:
# cope with builds that don't have a specific binary
return None
if debug:
binary_basedir = "sitl-debug"
else:
binary_basedir = "sitl"
binary = util.reltopdir(os.path.join('build', binary_basedir, 'bin', binary_name))
if not os.path.exists(binary):
if os.path.exists(binary + ".exe"):
binary += ".exe"
else:
raise ValueError("Binary (%s) does not exist" % (binary,))
return binary
def run_step(step):
"""Run one step."""
# remove old logs
util.run_cmd('/bin/rm -f logs/*.BIN logs/LASTLOG.TXT')
if step == "prerequisites":
return test_prerequisites()
if step == 'build.ArduPlane':
return util.build_SITL('bin/arduplane', j=opts.j, debug=opts.debug)
if step == 'build.APMrover2':
return util.build_SITL('bin/ardurover', j=opts.j, debug=opts.debug)
if step == 'build.ArduCopter':
return util.build_SITL('bin/arducopter-quad', j=opts.j, debug=opts.debug)
if step == 'build.AntennaTracker':
return util.build_SITL('bin/antennatracker', j=opts.j, debug=opts.debug)
if step == 'build.Helicopter':
return util.build_SITL('bin/arducopter-heli', j=opts.j, debug=opts.debug)
binary = binary_path(step, debug=opts.debug)
if step == 'defaults.ArduPlane':
return get_default_params('ArduPlane', binary)
if step == 'defaults.ArduCopter':
return get_default_params('ArduCopter', binary)
if step == 'defaults.APMrover2':
return get_default_params('APMrover2', binary)
if step == 'fly.ArduCopter':
return arducopter.fly_ArduCopter(binary, viewerip=opts.viewerip, use_map=opts.map, valgrind=opts.valgrind, gdb=opts.gdb)
if step == 'fly.CopterAVC':
return arducopter.fly_CopterAVC(binary, viewerip=opts.viewerip, use_map=opts.map, valgrind=opts.valgrind, gdb=opts.gdb)
if step == 'fly.ArduPlane':
return arduplane.fly_ArduPlane(binary, viewerip=opts.viewerip, use_map=opts.map, valgrind=opts.valgrind, gdb=opts.gdb)
if step == 'fly.QuadPlane':
return quadplane.fly_QuadPlane(binary, viewerip=opts.viewerip, use_map=opts.map, valgrind=opts.valgrind, gdb=opts.gdb)
if step == 'drive.APMrover2':
return apmrover2.drive_APMrover2(binary, viewerip=opts.viewerip, use_map=opts.map, valgrind=opts.valgrind, gdb=opts.gdb)
if step == 'build.All':
return build_all()
if step == 'build.Binaries':
return build_binaries()
if step == 'build.DevRelease':
return build_devrelease()
if step == 'build.Examples':
return build_examples()
if step == 'build.Parameters':
return build_parameters()
if step == 'convertgpx':
return convert_gpx()
raise RuntimeError("Unknown step %s" % step)
class TestResult(object):
"""Test result class."""
def __init__(self, name, result, elapsed):
self.name = name
self.result = result
self.elapsed = "%.1f" % elapsed
class TestFile(object):
"""Test result file."""
def __init__(self, name, fname):
self.name = name
self.fname = fname
class TestResults(object):
"""Test results class."""
def __init__(self):
self.date = time.asctime()
self.githash = util.run_cmd('git rev-parse HEAD', output=True, directory=util.reltopdir('.')).strip()
self.tests = []
self.files = []
self.images = []
def add(self, name, result, elapsed):
"""Add a result."""
self.tests.append(TestResult(name, result, elapsed))
def addfile(self, name, fname):
"""Add a result file."""
self.files.append(TestFile(name, fname))
def addimage(self, name, fname):
"""Add a result image."""
self.images.append(TestFile(name, fname))
def addglob(self, name, pattern):
"""Add a set of files."""
import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addfile(name, os.path.basename(f))
def addglobimage(self, name, pattern):
"""Add a set of images."""
import glob
for f in glob.glob(util.reltopdir('../buildlogs/%s' % pattern)):
self.addimage(name, os.path.basename(f))
def write_webresults(results_to_write):
"""Write webpage results."""
from pymavlink.generator import mavtemplate
t = mavtemplate.MAVTemplate()
for h in glob.glob(util.reltopdir('Tools/autotest/web/*.html')):
html = util.loadfile(h)
f = open(util.reltopdir("../buildlogs/%s" % os.path.basename(h)), mode='w')
t.write(f, html, results_to_write)
f.close()
for f in glob.glob(util.reltopdir('Tools/autotest/web/*.png')):
shutil.copy(f, util.reltopdir('../buildlogs/%s' % os.path.basename(f)))
def write_fullresults():
"""Write out full results set."""
global results
results.addglob("Google Earth track", '*.kmz')
results.addfile('Full Logs', 'autotest-output.txt')
results.addglob('DataFlash Log', '*-log.bin')
results.addglob("MAVLink log", '*.tlog')
results.addglob("GPX track", '*.gpx')
results.addfile('ArduPlane build log', 'ArduPlane.txt')
results.addfile('ArduPlane code size', 'ArduPlane.sizes.txt')
results.addfile('ArduPlane stack sizes', 'ArduPlane.framesizes.txt')
results.addfile('ArduPlane defaults', 'default_params/ArduPlane-defaults.parm')
results.addglob("ArduPlane log", 'ArduPlane-*.BIN')
results.addglob("ArduPlane core", 'ArduPlane.core')
results.addglob("ArduPlane ELF", 'ArduPlane.elf')
results.addfile('ArduCopter build log', 'ArduCopter.txt')
results.addfile('ArduCopter code size', 'ArduCopter.sizes.txt')
results.addfile('ArduCopter stack sizes', 'ArduCopter.framesizes.txt')
results.addfile('ArduCopter defaults', 'default_params/ArduCopter-defaults.parm')
results.addglob("ArduCopter log", 'ArduCopter-*.BIN')
results.addglob("ArduCopter core", 'ArduCopter.core')
results.addglob("ArduCopter elf", 'ArduCopter.elf')
results.addglob("CopterAVC log", 'CopterAVC-*.BIN')
results.addglob("CopterAVC core", 'CopterAVC.core')
results.addfile('APMrover2 build log', 'APMrover2.txt')
results.addfile('APMrover2 code size', 'APMrover2.sizes.txt')
results.addfile('APMrover2 stack sizes', 'APMrover2.framesizes.txt')
results.addfile('APMrover2 defaults', 'default_params/APMrover2-defaults.parm')
results.addglob("APMrover2 log", 'APMrover2-*.BIN')
results.addglob("APMrover2 core", 'APMrover2.core')
results.addglob("APMrover2 ELF", 'APMrover2.elf')
results.addfile('AntennaTracker build log', 'AntennaTracker.txt')
results.addfile('AntennaTracker code size', 'AntennaTracker.sizes.txt')
results.addfile('AntennaTracker stack sizes', 'AntennaTracker.framesizes.txt')
results.addglob("AntennaTracker ELF", 'AntennaTracker.elf')
results.addglob('APM:Libraries documentation', 'docs/libraries/index.html')
results.addglob('APM:Plane documentation', 'docs/ArduPlane/index.html')
results.addglob('APM:Copter documentation', 'docs/ArduCopter/index.html')
results.addglob('APM:Rover documentation', 'docs/APMrover2/index.html')
results.addglobimage("Flight Track", '*.png')
write_webresults(results)
results = TestResults()
def check_logs(step):
"""Check for log files from a step."""
print("check step: ", step)
if step.startswith('fly.'):
vehicle = step[4:]
elif step.startswith('drive.'):
vehicle = step[6:]
else:
return
logs = glob.glob("logs/*.BIN")
for log in logs:
bname = os.path.basename(log)
newname = util.reltopdir("../buildlogs/%s-%s" % (vehicle, bname))
print("Renaming %s to %s" % (log, newname))
os.rename(log, newname)
corefile = "core"
if os.path.exists(corefile):
newname = util.reltopdir("../buildlogs/%s.core" % vehicle)
print("Renaming %s to %s" % (corefile, newname))
os.rename(corefile, newname)
util.run_cmd('/bin/cp A*/A*.elf ../buildlogs', directory=util.reltopdir('.'))
def run_tests(steps):
"""Run a list of steps."""
global results
passed = True
failed = []
for step in steps:
util.pexpect_close_all()
if skip_step(step):
continue
t1 = time.time()
print(">>>> RUNNING STEP: %s at %s" % (step, time.asctime()))
try:
if not run_step(step):
print(">>>> FAILED STEP: %s at %s" % (step, time.asctime()))
passed = False
failed.append(step)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
continue
except Exception as msg:
passed = False
failed.append(step)
print(">>>> FAILED STEP: %s at %s (%s)" % (step, time.asctime(), msg))
traceback.print_exc(file=sys.stdout)
results.add(step, '<span class="failed-text">FAILED</span>', time.time() - t1)
check_logs(step)
continue
results.add(step, '<span class="passed-text">PASSED</span>', time.time() - t1)
print(">>>> PASSED STEP: %s at %s" % (step, time.asctime()))
check_logs(step)
if not passed:
print("FAILED %u tests: %s" % (len(failed), failed))
util.pexpect_close_all()
write_fullresults()
return passed
util.mkdir_p(util.reltopdir('../buildlogs'))
lckfile = util.reltopdir('../buildlogs/autotest.lck')
lck = util.lock_file(lckfile)
if lck is None:
print("autotest is locked - exiting. lckfile=(%s)" % (lckfile,))
sys.exit(0)
atexit.register(util.pexpect_close_all)
if len(args) > 0:
# allow a wildcard list of steps
matched = []
for a in args:
arg_matched = False
for s in steps:
if fnmatch.fnmatch(s.lower(), a.lower()):
matched.append(s)
arg_matched = True
if not arg_matched:
print("No steps matched argument ({})".format(a))
sys.exit(1)
steps = matched
try:
if not run_tests(steps):
sys.exit(1)
except KeyboardInterrupt:
util.pexpect_close_all()
sys.exit(1)
except Exception:
# make sure we kill off any children
util.pexpect_close_all()
raise
|
mojeto/django | refs/heads/master | django/contrib/messages/apps.py | 130 | from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class MessagesConfig(AppConfig):
name = 'django.contrib.messages'
verbose_name = _("Messages")
|
arokem/nipy | refs/heads/master | nipy/algorithms/slicetiming/setup.py | 3 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
import os
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('slicetiming', parent_package, top_path)
config.add_subpackage('tests')
config.add_include_dirs(config.name.replace('.', os.sep))
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
|
msiedlarek/wiring | refs/heads/master | tests/scanning/testmodule/ignoredsubmodule/ignored_registers.py | 1 | from wiring.scanning import register
@register.function()
def ignored_function():
pass
@register.factory()
class IgnoredFactory():
pass
|
NeuralEnsemble/python-neo | refs/heads/master | neo/rawio/brainvisionrawio.py | 2 | """
Class for reading data from BrainVision product.
This code was originally made by L. Pezard (2010), modified B. Burle and
S. More.
Author: Samuel Garcia
"""
from .baserawio import (BaseRawIO, _signal_channel_dtype, _signal_stream_dtype,
_spike_channel_dtype, _event_channel_dtype)
import numpy as np
import datetime
import os
import re
class BrainVisionRawIO(BaseRawIO):
"""
"""
extensions = ['vhdr']
rawmode = 'one-file'
def __init__(self, filename=''):
BaseRawIO.__init__(self)
self.filename = filename
def _parse_header(self):
# Read header file (vhdr)
vhdr_header = read_brainvsion_soup(self.filename)
bname = os.path.basename(self.filename)
marker_filename = self.filename.replace(bname, vhdr_header['Common Infos']['MarkerFile'])
binary_filename = self.filename.replace(bname, vhdr_header['Common Infos']['DataFile'])
assert vhdr_header['Common Infos'][
'DataFormat'] == 'BINARY', NotImplementedError
assert vhdr_header['Common Infos'][
'DataOrientation'] == 'MULTIPLEXED', NotImplementedError
nb_channel = int(vhdr_header['Common Infos']['NumberOfChannels'])
sr = 1.e6 / float(vhdr_header['Common Infos']['SamplingInterval'])
self._sampling_rate = sr
fmt = vhdr_header['Binary Infos']['BinaryFormat']
fmts = {'INT_16': np.int16, 'INT_32': np.int32, 'IEEE_FLOAT_32': np.float32, }
assert fmt in fmts, NotImplementedError
sig_dtype = fmts[fmt]
# raw signals memmap
sigs = np.memmap(binary_filename, dtype=sig_dtype, mode='r', offset=0)
if sigs.size % nb_channel != 0:
sigs = sigs[:-sigs.size % nb_channel]
self._raw_signals = sigs.reshape(-1, nb_channel)
signal_streams = np.array([('Signals', '0')], dtype=_signal_stream_dtype)
sig_channels = []
channel_infos = vhdr_header['Channel Infos']
for c in range(nb_channel):
try:
channel_desc = channel_infos['Ch%d' % (c + 1,)]
except KeyError:
channel_desc = channel_infos['ch%d' % (c + 1,)]
name, ref, res, units = channel_desc.split(',')
units = units.replace('µ', 'u')
chan_id = str(c + 1)
if sig_dtype == np.int16 or sig_dtype == np.int32:
gain = float(res)
else:
gain = 1
offset = 0
stream_id = '0'
sig_channels.append((name, chan_id, self._sampling_rate, sig_dtype,
units, gain, offset, stream_id))
sig_channels = np.array(sig_channels, dtype=_signal_channel_dtype)
# No spikes
spike_channels = []
spike_channels = np.array(spike_channels, dtype=_spike_channel_dtype)
# read all markers in memory
all_info = read_brainvsion_soup(marker_filename)['Marker Infos']
ev_types = []
ev_timestamps = []
ev_labels = []
for i in range(len(all_info)):
ev_type, ev_label, pos, size, channel = all_info[
'Mk%d' % (i + 1,)].split(',')[:5]
ev_types.append(ev_type)
ev_timestamps.append(int(pos))
ev_labels.append(ev_label)
ev_types = np.array(ev_types)
ev_timestamps = np.array(ev_timestamps)
ev_labels = np.array(ev_labels, dtype='U')
# group them by types
self._raw_events = []
event_channels = []
for c, ev_type in enumerate(np.unique(ev_types)):
ind = (ev_types == ev_type)
event_channels.append((ev_type, '', 'event'))
self._raw_events.append((ev_timestamps[ind], ev_labels[ind]))
event_channels = np.array(event_channels, dtype=_event_channel_dtype)
# fille into header dict
self.header = {}
self.header['nb_block'] = 1
self.header['nb_segment'] = [1]
self.header['signal_streams'] = signal_streams
self.header['signal_channels'] = sig_channels
self.header['spike_channels'] = spike_channels
self.header['event_channels'] = event_channels
self._generate_minimal_annotations()
if 'Coordinates' in vhdr_header:
sig_annotations = self.raw_annotations['blocks'][0]['segments'][0]['signals'][0]
all_coords = []
for c in range(sig_channels.size):
coords = vhdr_header['Coordinates']['Ch{}'.format(c + 1)]
all_coords.append([float(v) for v in coords.split(',')])
all_coords = np.array(all_coords)
for dim in range(all_coords.shape[1]):
sig_annotations['__array_annotations__'][f'coordinates_{dim}'] = all_coords[:, dim]
def _source_name(self):
return self.filename
def _segment_t_start(self, block_index, seg_index):
return 0.
def _segment_t_stop(self, block_index, seg_index):
t_stop = self._raw_signals.shape[0] / self._sampling_rate
return t_stop
###
def _get_signal_size(self, block_index, seg_index, stream_index):
assert stream_index == 0
return self._raw_signals.shape[0]
def _get_signal_t_start(self, block_index, seg_index, stream_index):
return 0.
def _get_analogsignal_chunk(self, block_index, seg_index, i_start, i_stop,
stream_index, channel_indexes):
if channel_indexes is None:
channel_indexes = slice(None)
raw_signals = self._raw_signals[slice(i_start, i_stop), channel_indexes]
return raw_signals
###
def _spike_count(self, block_index, seg_index, unit_index):
return 0
###
# event and epoch zone
def _event_count(self, block_index, seg_index, event_channel_index):
all_timestamps, all_label = self._raw_events[event_channel_index]
return all_timestamps.size
def _get_event_timestamps(self, block_index, seg_index, event_channel_index, t_start, t_stop):
timestamps, labels = self._raw_events[event_channel_index]
if t_start is not None:
keep = timestamps >= int(t_start * self._sampling_rate)
timestamps = timestamps[keep]
labels = labels[keep]
if t_stop is not None:
keep = timestamps <= int(t_stop * self._sampling_rate)
timestamps = timestamps[keep]
labels = labels[keep]
durations = None
return timestamps, durations, labels
raise (NotImplementedError)
def _rescale_event_timestamp(self, event_timestamps, dtype, event_channel_index):
event_times = event_timestamps.astype(dtype) / self._sampling_rate
return event_times
def read_brainvsion_soup(filename):
with open(filename, 'r', encoding='utf8') as f:
section = None
all_info = {}
for line in f:
line = line.strip('\n').strip('\r')
if line.startswith('['):
section = re.findall(r'\[([\S ]+)\]', line)[0]
all_info[section] = {}
continue
if line.startswith(';'):
continue
if '=' in line and len(line.split('=')) == 2:
k, v = line.split('=')
all_info[section][k] = v
return all_info
|
CodingVault/LeetCodeInPython | refs/heads/master | longest_valid_parentheses.py | 1 | #!/usr/bin/env python
# encoding: utf-8
"""
longest_valid_parentheses.py
Created by Shengwei on 2014-07-08.
"""
# https://oj.leetcode.com/problems/longest-valid-parentheses/
# tags: medium / hard, array, parentheses, stack, longest
"""
Given a string containing just the characters '(' and ')', find the length of the longest valid (well-formed) parentheses substring.
For "(()", the longest valid parentheses substring is "()", which has length = 2.
Another example is ")()())", where the longest valid parentheses substring is "()()", which has length = 4.
"""
# alternative: D&C
class Solution:
# @param s, a string
# @return an integer
def longestValidParentheses(self, s):
stack = []
# for '(' at given index, store the length of matching pair ')';
# the last one length[len(s)] is a sentinel
lengths = [0] * (len(s) + 1)
for i in xrange(len(s)):
if s[i] == '(':
stack.append(i)
if s[i] == ')':
if stack:
left_index = stack.pop()
lengths[left_index] = i + 1 - left_index
max_length = current_length = i = 0
while i < len(s):
current_length = lengths[i]
# for the last pair, i + current_length == len(s), and
# it takes advantage of the sentinel
while lengths[i + current_length] > 0:
current_length += lengths[i + current_length]
max_length = max(max_length, current_length)
i += current_length + 1
return max_length
|
ScreamingUdder/mantid | refs/heads/master | scripts/Interface/ui/sans_isis/run_selector_widget.py | 1 | from __future__ import (absolute_import, division, print_function)
from PyQt4 import QtGui, QtCore
import ui_run_selector_widget
from PyQt4.QtCore import pyqtSignal
from mantidqtpython import MantidQt
class RunSelectorWidget(QtGui.QWidget, ui_run_selector_widget.Ui_RunSelectorWidget):
manageDirectories = pyqtSignal()
browse = pyqtSignal()
addRuns = pyqtSignal()
removeRuns = pyqtSignal()
removeAllRuns = pyqtSignal()
def __init__(self, parent=None):
super(RunSelectorWidget, self).__init__(parent)
self.setupUi(self)
self._connect_signals()
def setupUi(self, other):
ui_run_selector_widget.Ui_RunSelectorWidget.setupUi(self, other)
self.runList.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection)
def show_file_picker(self, extensions, search_directories):
assert(len(extensions) > 0)
previous_directories = self._previous_directory_settings()
default_directory = search_directories[0]
directory = self._previous_or_default_directory(previous_directories, default_directory)
file_filter = self._filter_for_extensions(extensions)
chosen_files = QtGui.QFileDialog.getOpenFileNames(self, "Select files", directory, file_filter)
if chosen_files:
self._store_previous_directory(previous_directories, chosen_files[0])
return [str(chosen_file) for chosen_file in chosen_files]
def _previous_directory_settings(self):
previous_directories = QtCore.QSettings()
previous_directories.beginGroup("CustomInterfaces/SANSRunWindow/AddRuns")
return previous_directories
def _previous_or_default_directory(self, settings, default):
return settings.value("InPath", default)
def _store_previous_directory(self, settings, path):
previous_file = QtCore.QFileInfo(path)
settings.setValue("InPath", previous_file.absoluteDir().absolutePath())
def _filter_for_extensions(self, extensions):
return "Files ( *" + " *".join(extensions) + ")"
def show_directories_manager(self):
MantidQt.API.ManageUserDirectories.openUserDirsDialog(self)
def run_not_found(self):
QtGui.QMessageBox.warning(self, "Run Not Found!",
"Could not find one or more of the runs specified.")
def invalid_run_query(self, message):
QtGui.QMessageBox.warning(self, "Invalid Run Query!", message)
def run_list(self):
return str(self.runLineEdit.text())
def selected_runs(self):
selected = [runModel.row() for runModel in
self.runList.selectedIndexes()]
return selected
def draw_runs(self, runs):
model = QtGui.QStandardItemModel()
for run in runs:
item = QtGui.QStandardItem(run.display_name())
item.setToolTip(run.file_path())
model.appendRow(item)
self.runList.setModel(model)
@property
def title(self):
self.runsGroup.getTitle()
@title.setter
def title(self, new_title):
self.runsGroup.setTitle(new_title)
def _handle_add_run(self):
self.addRuns.emit()
def _handle_remove_all_runs(self):
self.removeAllRuns.emit()
def _handle_remove_run(self):
self.removeRuns.emit()
def _handle_manage_directories(self):
self.manageDirectories.emit()
def _handle_browse_files(self):
self.browse.emit()
def _connect_signals(self):
self.addRunButton.pressed.connect(self._handle_add_run)
self.runLineEdit.returnPressed.connect(self._handle_add_run)
self.removeRunButton.pressed.connect(self._handle_remove_run)
self.removeAllRunsButton.pressed.connect(self._handle_remove_all_runs)
self.manageDirectoriesButton.pressed.connect(self._handle_manage_directories)
self.browseFileButton.pressed.connect(self._handle_browse_files)
|
ilathid/ilathidEngine | refs/heads/master | setup.py | 1 | # A setup script showing how to extend py2exe.
#
# In this case, the py2exe command is subclassed to create an installation
# script for InnoSetup, which can be compiled with the InnoSetup compiler
# to a single file windows installer.
#
# By default, the installer will be created as dist\Output\setup.exe.
#@PyDevCodeAnalysisIgnore
from distutils.core import setup
import py2exe
import sys
# Those are NOT system DLLs to be ignore, manually fix py2exe's stupidity
origIsSystemDLL = py2exe.build_exe.isSystemDLL
def isSystemDLL(pathname):
if os.path.basename(pathname).lower() in ["sdl_ttf.dll", "libogg-0.dll"]:
return 0
return origIsSystemDLL(pathname)
py2exe.build_exe.isSystemDLL = isSystemDLL
################################################################
# arguments for the setup() call
Ilathid = dict(
script = "Ilathid.pyw",
icon_resources = [(1,"mandil.ico")],
dest_base = r"prog\Ilathid")
engine = dict(
script = "engine\\enginemain.py",
dest_base = r"prog\engine")
zipfile = r"lib\shardlib"
# py2exe has a hard time with OpenGL so we exclude it and then bundle it manually
options = {"py2exe": {"compressed": 1,
"optimize": 2,
"includes": ["ctypes", "logging", "pygame"],
"excludes": ["OpenGL"]}}
################################################################
import os,glob
class InnoScript:
def __init__(self,
name,
lib_dir,
dist_dir,
windows_exe_files = [],
lib_files = [],
version = "1.0"):
self.lib_dir = lib_dir
self.dist_dir = dist_dir
if not self.dist_dir[-1] in "\\/":
self.dist_dir += "\\"
self.name = name
self.version = version
self.windows_exe_files = [self.chop(p) for p in windows_exe_files]
self.lib_files = [self.chop(p) for p in lib_files]
self.data_files=[glob.glob("ilathid_low.gif"),glob.glob("Dni\\images\\*"), glob.glob("Dni\\movies\\*"),
glob.glob("Dni\\music\\*"), glob.glob("Dni\\slides\\*"),
glob.glob("Dni\\text/*"), glob.glob("engine\\enginedata\\cursors\\*")]
def chop(self, pathname):
assert pathname.startswith(self.dist_dir)
return pathname[len(self.dist_dir):]
def create(self, pathname="dist\\Ilathid.iss"):
self.pathname = pathname
ofi = self.file = open(pathname, "w")
print >> ofi, "; WARNING: This script has been created by py2exe. Changes to this script"
print >> ofi, "; will be overwritten the next time py2exe is run!"
print >> ofi, r"[Languages]"
print >> ofi, r'Name: en; MessagesFile: "compiler:Default.isl"'
print >> ofi, r'Name: fr; MessagesFile: "compiler:Languages\French.isl"'
#print >> ofi, r'Name: sp; MessagesFile: "compiler:Languages\SpanishStd-5-5.1.11.isl"'
print >> ofi, r'Name: ge; MessagesFile: "compiler:Languages\German.isl"'
#print >> ofi, r'Name: cr; MessagesFile: "compiler:Languages\Croatian-5-5.1.11.isl"'
print >> ofi, r"[Messages]"
print >> ofi, r"en.BeveledLabel=English"
print >> ofi, r"fr.BeveledLabel=French"
#print >> ofi, r"sp.BeveledLabel=Spanish"
print >> ofi, r"ge.BeveledLabel=German"
#print >> ofi, r"cr.BeveledLabel=Croatian"
print >> ofi, r"[Setup]"
print >> ofi, r"AppName=%s" % self.name
print >> ofi, r"AppVerName=%s %s" % (self.name, self.version)
print >> ofi, r"DefaultDirName={pf}\%s" % self.name
print >> ofi, r"DefaultGroupName=%s" % self.name
print >> ofi, r"Compression=lzma/max"
print >> ofi, r"SolidCompression=yes"
print >> ofi, r'SetupIconFile="..\Ilathid.ico"'
print >> ofi
print >> ofi, r"[Files]"
print >> ofi, r'Source: "..\engine\enginedata\*"; DestDir: "{app}\prog\engine\enginedata"; Flags: ignoreversion recursesubdirs'
print >> ofi, r'Source: "..\data\*"; DestDir: "{app}\prog\data"; Flags: ignoreversion recursesubdirs'
for path in self.windows_exe_files + self.lib_files:
print >> ofi, r'Source: "%s"; DestDir: "{app}\%s"; Flags: ignoreversion' % (path, os.path.dirname(path))
print >> ofi, r'Source: "..\Readme.txt"; DestName: "Readme.txt"; DestDir: "{app}\prog"; Languages: en; Flags: isreadme'
print >> ofi, r'Source: "..\ReadmeFR.txt"; DestName: "Lisezmoi.txt"; DestDir: "{app}\prog"; Languages: fr; Flags: isreadme'
print >> ofi, r'Source: "..\ReadmeSP.txt"; DestName: "Leame.txt"; DestDir: "{app}\prog"; Languages: sp; Flags: isreadme'
print >> ofi, r'Source: "..\ReadmeGE.txt"; DestName: "Liesmich.txt"; DestDir: "{app}\prog"; Languages: ge; Flags: isreadme'
print >> ofi, r'Source: "..\ReadmeCR.txt"; DestName: "Citanjemene.txt"; DestDir: "{app}\prog"; Languages: cr; Flags: isreadme'
print >> ofi
print >> ofi, r"[Icons]"
for path in self.windows_exe_files:
print >> ofi, r'Name: "{group}\%s"; Filename: "{app}\%s"; WorkingDir: "{app}\%s"' % (self.name, path,os.path.dirname(path))
print >> ofi, 'Name: "{group}\Uninstall %s"; Filename: "{uninstallexe}"' % self.name
def compile(self):
try:
import ctypes
except ImportError:
try:
import win32api
except ImportError:
import os
os.startfile(self.pathname)
else:
print "Ok, using win32api."
win32api.ShellExecute(0, "compile",
self.pathname,
None,
None,
0)
else:
print "Cool, you have ctypes installed."
res = ctypes.windll.shell32.ShellExecuteA(0, "compile",
self.pathname,
None,
None,
0)
if res < 32:
raise RuntimeError, "ShellExecute failed, error %d" % res
################################################################
from py2exe.build_exe import py2exe
import sys,os
newdirname = os.path.realpath(os.path.dirname(sys.argv[0]))
sys.path.append(os.path.join(newdirname, 'engine'))
class build_installer(py2exe):
# This class first builds the exe file(s), then creates a Windows installer.
# You need InnoSetup for it.
def run(self):
# First, let py2exe do it's work.
py2exe.run(self)
lib_dir = self.lib_dir
dist_dir = self.dist_dir
# create the Installer, using the files py2exe has created.
script = InnoScript("Ilathid",
lib_dir,
dist_dir,
self.windows_exe_files,
self.lib_files)
print "*** creating the inno setup script***"
script.create()
print "*** compiling the inno setup script***"
script.compile()
# Note: By default the final setup.exe will be in an Output subdirectory.
################################################################
setup(
options = options,
# The lib directory contains everything except the executables and the python dll.
zipfile = zipfile,
windows = [Ilathid],
#console = [engine],
# use out build_installer class as extended py2exe build command
cmdclass = {"py2exe": build_installer},
)
|
imoseyon/leanKernel-d2usc-deprecated | refs/heads/lk-tw | external/webkit/Tools/Scripts/webkitpy/tool/bot/irc_command.py | 15 | # Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import random
from webkitpy.common.config import irc as config_irc
from webkitpy.common.config import urls
from webkitpy.common.config.committers import CommitterList
from webkitpy.common.net.bugzilla import parse_bug_id
from webkitpy.common.system.executive import ScriptError
from webkitpy.tool.bot.queueengine import TerminateQueue
from webkitpy.tool.grammar import join_with_separators
# FIXME: Merge with Command?
class IRCCommand(object):
def execute(self, nick, args, tool, sheriff):
raise NotImplementedError, "subclasses must implement"
class LastGreenRevision(IRCCommand):
def execute(self, nick, args, tool, sheriff):
return "%s: %s" % (nick,
urls.view_revision_url(tool.buildbot.last_green_revision()))
class Restart(IRCCommand):
def execute(self, nick, args, tool, sheriff):
tool.irc().post("Restarting...")
raise TerminateQueue()
class Rollout(IRCCommand):
def _parse_args(self, args):
read_revision = True
rollout_reason = []
# the first argument must be a revision number
svn_revision_list = [args[0].lstrip("r")]
if not svn_revision_list[0].isdigit():
read_revision = False
for arg in args[1:]:
if arg.lstrip("r").isdigit() and read_revision:
svn_revision_list.append(arg.lstrip("r"))
else:
read_revision = False
rollout_reason.append(arg)
return svn_revision_list, rollout_reason
def execute(self, nick, args, tool, sheriff):
svn_revision_list, rollout_reason = self._parse_args(args)
if (len(svn_revision_list) == 0) or (len(rollout_reason) == 0):
tool.irc().post("%s: Usage: SVN_REVISION [SVN_REVISIONS] REASON" % nick)
return
rollout_reason = " ".join(rollout_reason)
tool.irc().post("Preparing rollout for %s..." %
join_with_separators(["r" + str(revision) for revision in svn_revision_list]))
try:
complete_reason = "%s (Requested by %s on %s)." % (
rollout_reason, nick, config_irc.channel)
bug_id = sheriff.post_rollout_patch(svn_revision_list, complete_reason)
bug_url = tool.bugs.bug_url_for_bug_id(bug_id)
tool.irc().post("%s: Created rollout: %s" % (nick, bug_url))
except ScriptError, e:
tool.irc().post("%s: Failed to create rollout patch:" % nick)
tool.irc().post("%s" % e)
bug_id = parse_bug_id(e.output)
if bug_id:
tool.irc().post("Ugg... Might have created %s" %
tool.bugs.bug_url_for_bug_id(bug_id))
class Help(IRCCommand):
def execute(self, nick, args, tool, sheriff):
return "%s: Available commands: %s" % (nick, ", ".join(commands.keys()))
class Hi(IRCCommand):
def execute(self, nick, args, tool, sheriff):
quips = tool.bugs.quips()
quips.append('"Only you can prevent forest fires." -- Smokey the Bear')
return random.choice(quips)
class Whois(IRCCommand):
def execute(self, nick, args, tool, sheriff):
if len(args) != 1:
return "%s: Usage: BUGZILLA_EMAIL" % nick
email = args[0]
committer = CommitterList().committer_by_email(email)
if not committer:
return "%s: Sorry, I don't know %s. Maybe you could introduce me?" % (nick, email)
if not committer.irc_nickname:
return "%s: %s hasn't told me their nick. Boo hoo :-(" % (nick, email)
return "%s: %s is %s. Why do you ask?" % (nick, email, committer.irc_nickname)
class Eliza(IRCCommand):
therapist = None
def __init__(self):
if not self.therapist:
import webkitpy.thirdparty.autoinstalled.eliza as eliza
Eliza.therapist = eliza.eliza()
def execute(self, nick, args, tool, sheriff):
return "%s: %s" % (nick, self.therapist.respond(" ".join(args)))
# FIXME: Lame. We should have an auto-registering CommandCenter.
commands = {
"help": Help,
"hi": Hi,
"last-green-revision": LastGreenRevision,
"restart": Restart,
"rollout": Rollout,
"whois": Whois,
}
|
infinit/grpc | refs/heads/master | src/python/grpcio/grpc/framework/foundation/future.py | 28 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A Future interface.
Python doesn't have a Future interface in its standard library. In the absence
of such a standard, three separate, incompatible implementations
(concurrent.futures.Future, ndb.Future, and asyncio.Future) have appeared. This
interface attempts to be as compatible as possible with
concurrent.futures.Future. From ndb.Future it adopts a traceback-object accessor
method.
Unlike the concrete and implemented Future classes listed above, the Future
class defined in this module is an entirely abstract interface that anyone may
implement and use.
The one known incompatibility between this interface and the interface of
concurrent.futures.Future is that this interface defines its own CancelledError
and TimeoutError exceptions rather than raising the implementation-private
concurrent.futures._base.CancelledError and the
built-in-but-only-in-3.3-and-later TimeoutError.
"""
import abc
import six
class TimeoutError(Exception):
"""Indicates that a particular call timed out."""
class CancelledError(Exception):
"""Indicates that the computation underlying a Future was cancelled."""
class Future(six.with_metaclass(abc.ABCMeta)):
"""A representation of a computation in another control flow.
Computations represented by a Future may be yet to be begun, may be ongoing,
or may have already completed.
"""
# NOTE(nathaniel): This isn't the return type that I would want to have if it
# were up to me. Were this interface being written from scratch, the return
# type of this method would probably be a sum type like:
#
# NOT_COMMENCED
# COMMENCED_AND_NOT_COMPLETED
# PARTIAL_RESULT<Partial_Result_Type>
# COMPLETED<Result_Type>
# UNCANCELLABLE
# NOT_IMMEDIATELY_DETERMINABLE
@abc.abstractmethod
def cancel(self):
"""Attempts to cancel the computation.
This method does not block.
Returns:
True if the computation has not yet begun, will not be allowed to take
place, and determination of both was possible without blocking. False
under all other circumstances including but not limited to the
computation's already having begun, the computation's already having
finished, and the computation's having been scheduled for execution on a
remote system for which a determination of whether or not it commenced
before being cancelled cannot be made without blocking.
"""
raise NotImplementedError()
# NOTE(nathaniel): Here too this isn't the return type that I'd want this
# method to have if it were up to me. I think I'd go with another sum type
# like:
#
# NOT_CANCELLED (this object's cancel method hasn't been called)
# NOT_COMMENCED
# COMMENCED_AND_NOT_COMPLETED
# PARTIAL_RESULT<Partial_Result_Type>
# COMPLETED<Result_Type>
# UNCANCELLABLE
# NOT_IMMEDIATELY_DETERMINABLE
#
# Notice how giving the cancel method the right semantics obviates most
# reasons for this method to exist.
@abc.abstractmethod
def cancelled(self):
"""Describes whether the computation was cancelled.
This method does not block.
Returns:
True if the computation was cancelled any time before its result became
immediately available. False under all other circumstances including but
not limited to this object's cancel method not having been called and
the computation's result having become immediately available.
"""
raise NotImplementedError()
@abc.abstractmethod
def running(self):
"""Describes whether the computation is taking place.
This method does not block.
Returns:
True if the computation is scheduled to take place in the future or is
taking place now, or False if the computation took place in the past or
was cancelled.
"""
raise NotImplementedError()
# NOTE(nathaniel): These aren't quite the semantics I'd like here either. I
# would rather this only returned True in cases in which the underlying
# computation completed successfully. A computation's having been cancelled
# conflicts with considering that computation "done".
@abc.abstractmethod
def done(self):
"""Describes whether the computation has taken place.
This method does not block.
Returns:
True if the computation is known to have either completed or have been
unscheduled or interrupted. False if the computation may possibly be
executing or scheduled to execute later.
"""
raise NotImplementedError()
@abc.abstractmethod
def result(self, timeout=None):
"""Accesses the outcome of the computation or raises its exception.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
finish or be cancelled, or None if this method should block until the
computation has finished or is cancelled no matter how long that takes.
Returns:
The return value of the computation.
Raises:
TimeoutError: If a timeout value is passed and the computation does not
terminate within the allotted time.
CancelledError: If the computation was cancelled.
Exception: If the computation raised an exception, this call will raise
the same exception.
"""
raise NotImplementedError()
@abc.abstractmethod
def exception(self, timeout=None):
"""Return the exception raised by the computation.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
terminate or be cancelled, or None if this method should block until
the computation is terminated or is cancelled no matter how long that
takes.
Returns:
The exception raised by the computation, or None if the computation did
not raise an exception.
Raises:
TimeoutError: If a timeout value is passed and the computation does not
terminate within the allotted time.
CancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def traceback(self, timeout=None):
"""Access the traceback of the exception raised by the computation.
This method may return immediately or may block.
Args:
timeout: The length of time in seconds to wait for the computation to
terminate or be cancelled, or None if this method should block until
the computation is terminated or is cancelled no matter how long that
takes.
Returns:
The traceback of the exception raised by the computation, or None if the
computation did not raise an exception.
Raises:
TimeoutError: If a timeout value is passed and the computation does not
terminate within the allotted time.
CancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_done_callback(self, fn):
"""Adds a function to be called at completion of the computation.
The callback will be passed this Future object describing the outcome of
the computation.
If the computation has already completed, the callback will be called
immediately.
Args:
fn: A callable taking this Future object as its single parameter.
"""
raise NotImplementedError()
|
sourtin/igem15-sw | refs/heads/master | img_processing/identificationTesting/contrastImprovement.py | 2 | #FROM COMPUTER SCIENCE LECTURE NOTES
__author__ = 'Ocean'
import cv2
import numpy
"""p328 in Comp. Sci. Graphics Lecture Notes"""
class OutOfBoundsException(Exception):
def __init__(self, val):
super(OutOfBoundsException, self).__init__(val)
self.val = val
self.printError(val)
def printError(self, val):
print ("EXCEPTION - OutOfBoundsException - Input Value out of range:\t" + str(self.val))
class contrastImprovement():
def __init__(self, maxBlackVal=15, minWhiteVal=200):
self.blackLim = maxBlackVal
self.whiteLim = minWhiteVal
@staticmethod
def improveContrastVal(val, blackLim, whiteLim):
if val < 0 or val > 255:
print (val)
raise OutOfBoundsException(val)
else:
#valid input
#blacks stay black
if val <= blackLim:
#return black. 000 = black
return 000
elif val <= whiteLim:
#return a value on the straight line joining the blackLim and whiteLim values
grad = 255/(whiteLim - blackLim) #dy/dx
temp = int(round(grad * (val - blackLim)))
return temp
else:
#return white
return 255
@staticmethod
def improveContrastGrayscale(img, blackLim = 15, whiteLim = 200):
rows, cols = img.shape
tempImg = numpy.empty([rows,cols],dtype=numpy.uint8)
for r in range(rows):
for c in range(cols):
temp = contrastImprovement.improveContrastVal(img.item(r,c), blackLim, whiteLim)
#print (str(img.item(r,c)) + "\t" + str(temp))
tempImg.itemset(r,c, temp)
return tempImg
@staticmethod
def improveContrastColor(img, blackLim = 15, whiteLim = 200):
rows, cols, channels = img.shape
tempImg = numpy.empty([rows,cols, channels],dtype=numpy.uint8)
for r in range(rows):
for c in range(cols):
for i in range(channels):
temp = contrastImprovement.improveContrastVal(img.item(r,c, i), blackLim, whiteLim)
#print (str(img.item(r,c)) + "\t" + str(temp))
tempImg.itemset((r,c,i), temp)
return tempImg
#==========TEST=========================================================#
# im1 = cv2.imread("C:\\Users\\Ocean\\Pictures\\football.jpg", cv2.IMREAD_GRAYSCALE)
# cv2.imshow("WITHOUT Contrast improvement", im1)
#
#
# for i in range(0,255,10):
# contraster = contrastImprovement(15, i)
# cont = contraster.improveContrastGrayscale(im1)
# #print(cont)
# cv2.imshow("With Contrast improvement",cont)
# cv2.waitKey(0)
# im2 = cv2.imread("C:\\Users\\Ocean\\Pictures\\football.jpg")
# cv2.imshow("WITHOUT Contrast improvement", im2)
#
#
# for i in range(0,255,10):
# contraster = contrastImprovement(15, i)
# cont = contraster.improveContrastColor(im2)
# #print(cont)
# cv2.imshow("With Contrast improvement",cont)
# cv2.waitKey(0) |
minzhang28/docker-py | refs/heads/master | docker/api/build.py | 3 | import logging
import os
import re
import json
from .. import constants
from .. import errors
from .. import auth
from .. import utils
log = logging.getLogger(__name__)
class BuildApiMixin(object):
def build(self, path=None, tag=None, quiet=False, fileobj=None,
nocache=False, rm=False, stream=False, timeout=None,
custom_context=False, encoding=None, pull=False,
forcerm=False, dockerfile=None, container_limits=None,
decode=False, buildargs=None, gzip=False):
remote = context = None
headers = {}
container_limits = container_limits or {}
if path is None and fileobj is None:
raise TypeError("Either path or fileobj needs to be provided.")
if gzip and encoding is not None:
raise errors.DockerException(
'Can not use custom encoding if gzip is enabled'
)
for key in container_limits.keys():
if key not in constants.CONTAINER_LIMITS_KEYS:
raise errors.DockerException(
'Invalid container_limits key {0}'.format(key)
)
if custom_context:
if not fileobj:
raise TypeError("You must specify fileobj with custom_context")
context = fileobj
elif fileobj is not None:
context = utils.mkbuildcontext(fileobj)
elif path.startswith(('http://', 'https://',
'git://', 'github.com/', 'git@')):
remote = path
elif not os.path.isdir(path):
raise TypeError("You must specify a directory to build in path")
else:
dockerignore = os.path.join(path, '.dockerignore')
exclude = None
if os.path.exists(dockerignore):
with open(dockerignore, 'r') as f:
exclude = list(filter(bool, f.read().splitlines()))
context = utils.tar(
path, exclude=exclude, dockerfile=dockerfile, gzip=gzip
)
encoding = 'gzip' if gzip else encoding
if utils.compare_version('1.8', self._version) >= 0:
stream = True
if dockerfile and utils.compare_version('1.17', self._version) < 0:
raise errors.InvalidVersion(
'dockerfile was only introduced in API version 1.17'
)
if utils.compare_version('1.19', self._version) < 0:
pull = 1 if pull else 0
u = self._url('/build')
params = {
't': tag,
'remote': remote,
'q': quiet,
'nocache': nocache,
'rm': rm,
'forcerm': forcerm,
'pull': pull,
'dockerfile': dockerfile,
}
params.update(container_limits)
if buildargs:
if utils.version_gte(self._version, '1.21'):
params.update({'buildargs': json.dumps(buildargs)})
else:
raise errors.InvalidVersion(
'buildargs was only introduced in API version 1.21'
)
if context is not None:
headers = {'Content-Type': 'application/tar'}
if encoding:
headers['Content-Encoding'] = encoding
if utils.compare_version('1.9', self._version) >= 0:
self._set_auth_headers(headers)
response = self._post(
u,
data=context,
params=params,
headers=headers,
stream=stream,
timeout=timeout,
)
if context is not None and not custom_context:
context.close()
if stream:
return self._stream_helper(response, decode=decode)
else:
output = self._result(response)
srch = r'Successfully built ([0-9a-f]+)'
match = re.search(srch, output)
if not match:
return None, output
return match.group(1), output
def _set_auth_headers(self, headers):
log.debug('Looking for auth config')
# If we don't have any auth data so far, try reloading the config
# file one more time in case anything showed up in there.
if not self._auth_configs:
log.debug("No auth config in memory - loading from filesystem")
self._auth_configs = auth.load_config()
# Send the full auth configuration (if any exists), since the build
# could use any (or all) of the registries.
if self._auth_configs:
log.debug(
'Sending auth config ({0})'.format(
', '.join(repr(k) for k in self._auth_configs.keys())
)
)
if utils.compare_version('1.19', self._version) >= 0:
headers['X-Registry-Config'] = auth.encode_header(
self._auth_configs
)
else:
headers['X-Registry-Config'] = auth.encode_header({
'configs': self._auth_configs
})
else:
log.debug('No auth config found')
|
HarryRybacki/osf.io | refs/heads/develop | scripts/tests/test_migrate_registration_and_fork_log.py | 19 | from website.models import Node
from framework.auth import Auth
from tests.base import OsfTestCase
from tests.factories import ProjectFactory, UserFactory, NodeFactory
from scripts.migrate_registration_and_fork_log import (
get_parent, get_all_parents
)
class TestMigrateLogs(OsfTestCase):
def tearDown(self):
OsfTestCase.tearDown(self)
Node.remove()
def test_get_parent(self):
user = UserFactory()
auth = Auth(user=user)
project1 = ProjectFactory(creator=user)
project2 = project1.fork_node(auth=auth)
forked_from = get_parent(project2)
assert forked_from is project1
project3 = project2.register_node(schema=None, auth=auth, template="foo", data="bar")
registered_from = get_parent(project3)
assert registered_from is project2
def test_get_all_parents(self):
user = UserFactory()
auth = Auth(user=user)
project1 = ProjectFactory(creator=user)
project2 = project1.fork_node(auth=auth)
project3 = project2.register_node(schema=None, auth=auth, template="foo", data="bar")
parent_list = get_all_parents(project3)
assert len(parent_list) is 2
assert project1 in parent_list
assert project2 in parent_list |
weizhenwei/wireshark | refs/heads/master | tools/dftestlib/tvb.py | 40 | # Copyright (c) 2013 by Gilbert Ramirez <gram@alumni.rice.edu>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import unittest
from dftestlib import dftest
class testTVB(dftest.DFTest):
trace_file = "http.pcap"
def test_eq_1(self):
# We expect 0 because even though this byte
# string matches the 'eth' protocol, protocols cannot
# work in an '==' comparison yet.
dfilter = "eth == 00:e0:81:00:b0:28:00:09:6b:88:f6:c9:08:00"
self.assertDFilterCount(dfilter, 0)
def test_slice_1(self):
dfilter = "ip[0:2] == 45:00"
self.assertDFilterCount(dfilter, 1)
def test_slice_2(self):
dfilter = "ip[0:2] == 00:00"
self.assertDFilterCount(dfilter, 0)
def test_slice_3(self):
dfilter = "ip[2:2] == 00:c1"
self.assertDFilterCount(dfilter, 1)
@unittest.skip("This doesn't work yet in Wireshark")
def test_slice_4(self):
dfilter = "ip[-5] == 0x86"
self.assertDFilterCount(dfilter, 0)
@unittest.skip("This doesn't work yet in Wireshark")
def test_slice_5(self):
dfilter = "ip[-1] == 0x86"
self.assertDFilterCount(dfilter, 1)
def test_contains_1(self):
dfilter = "eth contains 6b"
self.assertDFilterCount(dfilter, 1)
def test_contains_2(self):
dfilter = "eth contains 09:6b:88"
self.assertDFilterCount(dfilter, 1)
def test_contains_3(self):
dfilter = "eth contains 00:e0:81:00:b0:28:00:09:6b:88:f5:c9:08:00"
self.assertDFilterCount(dfilter, 1)
def test_contains_4(self):
dfilter = "eth contains ff:ff:ff"
self.assertDFilterCount(dfilter, 0)
def test_contains_5(self):
dfilter = 'http contains "HEAD"'
self.assertDFilterCount(dfilter, 1)
|
vikatory/kbengine | refs/heads/master | kbe/res/scripts/common/Lib/multiprocessing/__init__.py | 116 | #
# Package analogous to 'threading.py' but using processes
#
# multiprocessing/__init__.py
#
# This package is intended to duplicate the functionality (and much of
# the API) of threading.py but uses processes instead of threads. A
# subpackage 'multiprocessing.dummy' has the same API but is a simple
# wrapper for 'threading'.
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
import sys
from . import context
#
# Copy stuff from default context
#
globals().update((name, getattr(context._default_context, name))
for name in context._default_context.__all__)
__all__ = context._default_context.__all__
#
# XXX These should not really be documented or public.
#
SUBDEBUG = 5
SUBWARNING = 25
#
# Alias for main module -- will be reset by bootstrapping child processes
#
if '__main__' in sys.modules:
sys.modules['__mp_main__'] = sys.modules['__main__']
|
mermi/bedrock | refs/heads/master | bedrock/externalfiles/models.py | 18 | from django.db import models
class ExternalFile(models.Model):
name = models.CharField(max_length=50, primary_key=True)
content = models.TextField()
last_modified = models.DateTimeField(auto_now=True)
|
clumsy/intellij-community | refs/heads/master | python/lib/Lib/site-packages/django/contrib/localflavor/it/it_region.py | 437 | # -*- coding: utf-8 -*
REGION_CHOICES = (
('ABR', 'Abruzzo'),
('BAS', 'Basilicata'),
('CAL', 'Calabria'),
('CAM', 'Campania'),
('EMR', 'Emilia-Romagna'),
('FVG', 'Friuli-Venezia Giulia'),
('LAZ', 'Lazio'),
('LIG', 'Liguria'),
('LOM', 'Lombardia'),
('MAR', 'Marche'),
('MOL', 'Molise'),
('PMN', 'Piemonte'),
('PUG', 'Puglia'),
('SAR', 'Sardegna'),
('SIC', 'Sicilia'),
('TOS', 'Toscana'),
('TAA', 'Trentino-Alto Adige'),
('UMB', 'Umbria'),
('VAO', u'Valle d’Aosta'),
('VEN', 'Veneto'),
)
|
MerleLK/StudentSystem | refs/heads/master | accounts/urls.py | 1 | # coding=utf-8
from django.conf.urls import url
from .views import index, register_view, login_view, logout_view
urlpatterns = [
url(r'^$', index),
url(r'^register', register_view),
url(r'^login', login_view),
url(r'^logout', logout_view),
]
|
thanhacun/odoo | refs/heads/8.0 | addons/association/__init__.py | 886 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
dimaleks/uDeviceX | refs/heads/master | tests/walls/analytic/plates.py | 1 | #!/usr/bin/env python
import mirheo as mir
dt = 0.001
ranks = (1, 1, 1)
domain = (8, 16, 8)
force = (1.0, 0, 0)
density = 4
u = mir.Mirheo(ranks, domain, debug_level=3, log_filename='log', no_splash=True)
pv = mir.ParticleVectors.ParticleVector('pv', mass = 1)
ic = mir.InitialConditions.Uniform(number_density=density)
u.registerParticleVector(pv=pv, ic=ic)
dpd = mir.Interactions.Pairwise('dpd', rc=1.0, kind="DPD", a=10.0, gamma=50.0, kBT=1.0, power=0.5)
u.registerInteraction(dpd)
plate_lo = mir.Walls.Plane("plate_lo", (0, 0, -1), (0, 0, 1))
plate_hi = mir.Walls.Plane("plate_hi", (0, 0, 1), (0, 0, domain[2] - 1))
u.registerWall(plate_lo, 0)
u.registerWall(plate_hi, 0)
vv = mir.Integrators.VelocityVerlet("vv")
frozen = u.makeFrozenWallParticles(pvName="plates", walls=[plate_lo, plate_hi], interactions=[dpd], integrator=vv, number_density=density, dt=dt)
u.setWall(plate_lo, pv)
u.setWall(plate_hi, pv)
for p in (pv, frozen):
u.setInteraction(dpd, p, pv)
vv_dp = mir.Integrators.VelocityVerlet_withConstForce("vv_dp", force)
u.registerIntegrator(vv_dp)
u.setIntegrator(vv_dp, pv)
sample_every = 2
dump_every = 1000
bin_size = (1., 1., 0.5)
u.registerPlugins(mir.Plugins.createDumpAverage('field', [pv], sample_every, dump_every, bin_size, ["velocities"], 'h5/solvent-'))
u.run(7002, dt=dt)
# nTEST: walls.analytic.plates
# cd walls/analytic
# rm -rf h5
# mir.run --runargs "-n 2" ./plates.py
# mir.avgh5 xy velocities h5/solvent-0000[4-7].h5 | awk '{print $1}' > profile.out.txt
|
ltilve/chromium | refs/heads/igalia-sidebar | chrome/test/chromeos/utilities/vm_setup_state.py | 184 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import pyauto_functional # has to be imported before pyauto
import pyauto
import sys
VM_CHROMEDRIVER_PORT = 4444
if __name__ == '__main__':
"""Script to prepare machine state for use as a WebDriver-controlled VM.
This script is intended to be run manually over ssh on a Chromium OS virtual
machine qcow2 image. Manually create a snapshot of the VM when prompted. The
resulting VM image will have ChromeDriver listening on port 4444.
"""
pyauto_suite = pyauto.PyUITestSuite(sys.argv)
pyuitest = pyauto.PyUITest()
pyuitest.setUp()
driver = pyuitest.NewWebDriver(port=VM_CHROMEDRIVER_PORT)
logging.info('WebDriver is listening on port %d.'
% VM_CHROMEDRIVER_PORT)
logging.info('Machine prepared for VM snapshot.')
raw_input('Please snapshot the VM and hit ENTER when done to '
'terminate this script.')
pyuitest.tearDown()
del pyuitest
del pyauto_suite
|
conan-equal-newone/yenten | refs/heads/master | contrib/testgen/base58.py | 99 | # Copyright (c) 2012-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Bitcoin base58 encoding and decoding.
Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain)
'''
import hashlib
# for compatibility with following code...
class SHA256:
new = hashlib.sha256
if str != bytes:
# Python 3.x
def ord(c):
return c
def chr(n):
return bytes( (n,) )
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
b58chars = __b58chars
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length = None):
""" decode v into a string of len bytes
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = bytes()
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def checksum(v):
"""Return 32-bit checksum based on SHA256"""
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
def b58encode_chk(v):
"""b58encode a string, with 32-bit checksum"""
return b58encode(v + checksum(v))
def b58decode_chk(v):
"""decode a base58 string, check and remove checksum"""
result = b58decode(v)
if result is None:
return None
if result[-4:] == checksum(result[:-4]):
return result[:-4]
else:
return None
def get_bcaddress_version(strAddress):
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
addr = b58decode_chk(strAddress)
if addr is None or len(addr)!=21: return None
version = addr[0]
return ord(version)
if __name__ == '__main__':
# Test case (from http://gitorious.org/bitcoin/python-base58.git)
assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0
_ohai = 'o hai'.encode('ascii')
_tmp = b58encode(_ohai)
assert _tmp == 'DYB3oMS'
assert b58decode(_tmp, 5) == _ohai
print("Tests passed")
|
2013Commons/HUE-SHARK | refs/heads/master | desktop/core/ext-py/Django-1.2.3/django/contrib/gis/models.py | 624 | from django.db import connection
if (hasattr(connection.ops, 'spatial_version') and
not connection.ops.mysql):
# Getting the `SpatialRefSys` and `GeometryColumns`
# models for the default spatial backend. These
# aliases are provided for backwards-compatibility.
SpatialRefSys = connection.ops.spatial_ref_sys()
GeometryColumns = connection.ops.geometry_columns()
|
Alwnikrotikz/pyicqt | refs/heads/master | src/services/EntityTime.py | 4 | # Copyright 2004-2006 Daniel Henninger <jadestorm@nc.rr.com>
# Licensed for distribution under the GPL version 2, check COPYING for details
import utils
from twisted.words.xish.domish import Element
import config
from debug import LogEvent, INFO, WARN, ERROR
import globals
import time
class EntityTime:
def __init__(self, pytrans):
self.pytrans = pytrans
self.pytrans.disco.addFeature(globals.IQTIME, self.incomingIq, config.jid)
self.pytrans.disco.addFeature(globals.IQTIME, self.incomingIq, "USER")
def incomingIq(self, el):
eltype = el.getAttribute("type")
if eltype != "get": return # Only answer "get" stanzas
self.sendTime(el)
def sendTime(self, el):
LogEvent(INFO)
iq = Element((None, "iq"))
iq.attributes["type"] = "result"
iq.attributes["from"] = el.getAttribute("to")
iq.attributes["to"] = el.getAttribute("from")
if el.getAttribute("id"):
iq.attributes["id"] = el.getAttribute("id")
query = iq.addElement("query")
query.attributes["xmlns"] = globals.IQTIME
utc = query.addElement("utc")
utc.addContent(str(time.strftime("%Y%m%dT%H:%M:%S")))
tz = query.addElement("tz")
tz.addContent(str(time.tzname[1]))
display = query.addElement("display")
display.addContent(str(time.ctime()))
self.pytrans.send(iq)
|
e0ne/cinder | refs/heads/master | cinder/tests/fake_notifier.py | 8 | # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import functools
import anyjson
from oslo import messaging
from cinder import rpc
NOTIFICATIONS = []
def reset():
del NOTIFICATIONS[:]
FakeMessage = collections.namedtuple('Message',
['publisher_id', 'priority',
'event_type', 'payload'])
class FakeNotifier(object):
def __init__(self, transport, publisher_id, serializer=None, driver=None,
topic=None, retry=None):
self.transport = transport
self.publisher_id = publisher_id
for priority in ['debug', 'info', 'warn', 'error', 'critical']:
setattr(self, priority,
functools.partial(self._notify, priority.upper()))
self._serializer = serializer or messaging.serializer.NoOpSerializer()
self._topic = topic
self.retry = retry
def prepare(self, publisher_id=None):
if publisher_id is None:
publisher_id = self.publisher_id
return self.__class__(self.transport, publisher_id, self._serializer)
def _notify(self, priority, ctxt, event_type, payload):
payload = self._serializer.serialize_entity(ctxt, payload)
# NOTE(sileht): simulate the kombu serializer
# this permit to raise an exception if something have not
# been serialized correctly
anyjson.serialize(payload)
msg = dict(publisher_id=self.publisher_id,
priority=priority,
event_type=event_type,
payload=payload)
NOTIFICATIONS.append(msg)
def stub_notifier(stubs):
stubs.Set(messaging, 'Notifier', FakeNotifier)
if rpc.NOTIFIER:
serializer = getattr(rpc.NOTIFIER, '_serializer', None)
stubs.Set(rpc, 'NOTIFIER', FakeNotifier(rpc.NOTIFIER.transport,
rpc.NOTIFIER.publisher_id,
serializer=serializer))
|
SilentCircle/sentry | refs/heads/master | tests/sentry/interfaces/template/tests.py | 5 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import mock
from sentry.interfaces import Template
from sentry.models import Event
from sentry.testutils import TestCase
class TemplateTest(TestCase):
def test_serialize(self):
interface = Template(
filename='foo.html',
context_line='hello world',
lineno=1,
)
result = interface.serialize()
self.assertEquals(result['filename'], 'foo.html')
self.assertEquals(result['context_line'], 'hello world')
self.assertEquals(result['lineno'], 1)
def test_get_hash(self):
interface = Template(
filename='foo.html',
context_line='hello world',
lineno=1,
)
result = interface.get_hash()
self.assertEquals(result, ['foo.html', 'hello world'])
@mock.patch('sentry.interfaces.get_context')
@mock.patch('sentry.interfaces.Template.get_traceback')
def test_to_string_returns_traceback(self, get_traceback, get_context):
get_traceback.return_value = 'traceback'
event = mock.Mock(spec=Event)
interface = Template(
filename='foo.html',
context_line='hello world',
lineno=1,
)
result = interface.to_string(event)
get_traceback.assert_called_once_with(event, get_context.return_value)
self.assertEquals(result, 'Stacktrace (most recent call last):\n\ntraceback')
|
TheTimmy/spack | refs/heads/develop | var/spack/repos/builtin/packages/minigmg/package.py | 3 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import glob
class Minigmg(Package):
"""miniGMG is a compact benchmark for understanding the performance
challenges associated with geometric multigrid solvers
found in applications built from AMR MG frameworks
like CHOMBO or BoxLib when running
on modern multi- and manycore-based supercomputers.
It includes both productive reference examples as well as
highly-optimized implementations for CPUs and GPUs.
It is sufficiently general that it has been used to evaluate
a broad range of research topics including PGAS programming models
and algorithmic tradeoffs inherit in multigrid. miniGMG was developed
under the CACHE Joint Math-CS Institute.
Note, miniGMG code has been supersceded by HPGMG. """
homepage = "http://crd.lbl.gov/departments/computer-science/PAR/research/previous-projects/miniGMG/"
url = "http://crd.lbl.gov/assets/Uploads/FTG/Projects/miniGMG/miniGMG.tar.gz"
version('master', '975a2a118403fc0024b5e04cef280e95')
depends_on('mpi')
phases = ['build', 'install']
def build(self, spec, prefix):
cc = Executable(spec['mpi'].mpicc)
cc('-O3', self.compiler.openmp_flag, 'miniGMG.c',
'mg.c', 'box.c', 'solver.c', 'operators.ompif.c', 'timer.x86.c',
'-D__MPI', '-D__COLLABORATIVE_THREADING=6',
'-D__TEST_MG_CONVERGENCE', '-D__PRINT_NORM', '-D__USE_BICGSTAB',
'-o', 'run.miniGMG', '-lm')
def install(self, spec, prefix):
mkdir(prefix.bin)
install('run.miniGMG', prefix.bin)
mkdir(prefix.jobs)
files = glob.glob('job*')
for f in files:
install(f, prefix.jobs)
|
webmasterraj/FogOrNot | refs/heads/master | flask/lib/python2.7/site-packages/docutils/utils/math/tex2unichar.py | 120 | # -*- coding: utf-8 -*-
# LaTeX math to Unicode symbols translation dictionaries.
# Generated with ``write_tex2unichar.py`` from the data in
# http://milde.users.sourceforge.net/LUCR/Math/
# Includes commands from: wasysym, stmaryrd, mathdots, mathabx, esint, bbold, amsxtra, amsmath, amssymb, standard LaTeX
mathaccent = {
'acute': u'\u0301', # x́ COMBINING ACUTE ACCENT
'bar': u'\u0304', # x̄ COMBINING MACRON
'breve': u'\u0306', # x̆ COMBINING BREVE
'check': u'\u030c', # x̌ COMBINING CARON
'ddddot': u'\u20dc', # x⃜ COMBINING FOUR DOTS ABOVE
'dddot': u'\u20db', # x⃛ COMBINING THREE DOTS ABOVE
'ddot': u'\u0308', # ẍ COMBINING DIAERESIS
'dot': u'\u0307', # ẋ COMBINING DOT ABOVE
'grave': u'\u0300', # x̀ COMBINING GRAVE ACCENT
'hat': u'\u0302', # x̂ COMBINING CIRCUMFLEX ACCENT
'mathring': u'\u030a', # x̊ COMBINING RING ABOVE
'not': u'\u0338', # x̸ COMBINING LONG SOLIDUS OVERLAY
'overleftarrow': u'\u20d6', # x⃖ COMBINING LEFT ARROW ABOVE
'overleftrightarrow': u'\u20e1', # x⃡ COMBINING LEFT RIGHT ARROW ABOVE
'overline': u'\u0305', # x̅ COMBINING OVERLINE
'overrightarrow': u'\u20d7', # x⃗ COMBINING RIGHT ARROW ABOVE
'tilde': u'\u0303', # x̃ COMBINING TILDE
'underbar': u'\u0331', # x̱ COMBINING MACRON BELOW
'underleftarrow': u'\u20ee', # x⃮ COMBINING LEFT ARROW BELOW
'underline': u'\u0332', # x̲ COMBINING LOW LINE
'underrightarrow': u'\u20ef', # x⃯ COMBINING RIGHT ARROW BELOW
'vec': u'\u20d7', # x⃗ COMBINING RIGHT ARROW ABOVE
'widehat': u'\u0302', # x̂ COMBINING CIRCUMFLEX ACCENT
'widetilde': u'\u0303', # x̃ COMBINING TILDE
}
mathalpha = {
'Bbbk': u'\U0001d55c', # 𝕜 MATHEMATICAL DOUBLE-STRUCK SMALL K
'Delta': u'\u0394', # Δ GREEK CAPITAL LETTER DELTA
'Gamma': u'\u0393', # Γ GREEK CAPITAL LETTER GAMMA
'Im': u'\u2111', # ℑ BLACK-LETTER CAPITAL I
'Lambda': u'\u039b', # Λ GREEK CAPITAL LETTER LAMDA
'Omega': u'\u03a9', # Ω GREEK CAPITAL LETTER OMEGA
'Phi': u'\u03a6', # Φ GREEK CAPITAL LETTER PHI
'Pi': u'\u03a0', # Π GREEK CAPITAL LETTER PI
'Psi': u'\u03a8', # Ψ GREEK CAPITAL LETTER PSI
'Re': u'\u211c', # ℜ BLACK-LETTER CAPITAL R
'Sigma': u'\u03a3', # Σ GREEK CAPITAL LETTER SIGMA
'Theta': u'\u0398', # Θ GREEK CAPITAL LETTER THETA
'Upsilon': u'\u03a5', # Υ GREEK CAPITAL LETTER UPSILON
'Xi': u'\u039e', # Ξ GREEK CAPITAL LETTER XI
'aleph': u'\u2135', # ℵ ALEF SYMBOL
'alpha': u'\u03b1', # α GREEK SMALL LETTER ALPHA
'beta': u'\u03b2', # β GREEK SMALL LETTER BETA
'beth': u'\u2136', # ℶ BET SYMBOL
'chi': u'\u03c7', # χ GREEK SMALL LETTER CHI
'daleth': u'\u2138', # ℸ DALET SYMBOL
'delta': u'\u03b4', # δ GREEK SMALL LETTER DELTA
'digamma': u'\u03dc', # Ϝ GREEK LETTER DIGAMMA
'ell': u'\u2113', # ℓ SCRIPT SMALL L
'epsilon': u'\u03f5', # ϵ GREEK LUNATE EPSILON SYMBOL
'eta': u'\u03b7', # η GREEK SMALL LETTER ETA
'eth': u'\xf0', # ð LATIN SMALL LETTER ETH
'gamma': u'\u03b3', # γ GREEK SMALL LETTER GAMMA
'gimel': u'\u2137', # ℷ GIMEL SYMBOL
'hbar': u'\u210f', # ℏ PLANCK CONSTANT OVER TWO PI
'hslash': u'\u210f', # ℏ PLANCK CONSTANT OVER TWO PI
'imath': u'\u0131', # ı LATIN SMALL LETTER DOTLESS I
'iota': u'\u03b9', # ι GREEK SMALL LETTER IOTA
'jmath': u'\u0237', # ȷ LATIN SMALL LETTER DOTLESS J
'kappa': u'\u03ba', # κ GREEK SMALL LETTER KAPPA
'lambda': u'\u03bb', # λ GREEK SMALL LETTER LAMDA
'mu': u'\u03bc', # μ GREEK SMALL LETTER MU
'nu': u'\u03bd', # ν GREEK SMALL LETTER NU
'omega': u'\u03c9', # ω GREEK SMALL LETTER OMEGA
'phi': u'\u03d5', # ϕ GREEK PHI SYMBOL
'pi': u'\u03c0', # π GREEK SMALL LETTER PI
'psi': u'\u03c8', # ψ GREEK SMALL LETTER PSI
'rho': u'\u03c1', # ρ GREEK SMALL LETTER RHO
'sigma': u'\u03c3', # σ GREEK SMALL LETTER SIGMA
'tau': u'\u03c4', # τ GREEK SMALL LETTER TAU
'theta': u'\u03b8', # θ GREEK SMALL LETTER THETA
'upsilon': u'\u03c5', # υ GREEK SMALL LETTER UPSILON
'varDelta': u'\U0001d6e5', # 𝛥 MATHEMATICAL ITALIC CAPITAL DELTA
'varGamma': u'\U0001d6e4', # 𝛤 MATHEMATICAL ITALIC CAPITAL GAMMA
'varLambda': u'\U0001d6ec', # 𝛬 MATHEMATICAL ITALIC CAPITAL LAMDA
'varOmega': u'\U0001d6fa', # 𝛺 MATHEMATICAL ITALIC CAPITAL OMEGA
'varPhi': u'\U0001d6f7', # 𝛷 MATHEMATICAL ITALIC CAPITAL PHI
'varPi': u'\U0001d6f1', # 𝛱 MATHEMATICAL ITALIC CAPITAL PI
'varPsi': u'\U0001d6f9', # 𝛹 MATHEMATICAL ITALIC CAPITAL PSI
'varSigma': u'\U0001d6f4', # 𝛴 MATHEMATICAL ITALIC CAPITAL SIGMA
'varTheta': u'\U0001d6e9', # 𝛩 MATHEMATICAL ITALIC CAPITAL THETA
'varUpsilon': u'\U0001d6f6', # 𝛶 MATHEMATICAL ITALIC CAPITAL UPSILON
'varXi': u'\U0001d6ef', # 𝛯 MATHEMATICAL ITALIC CAPITAL XI
'varepsilon': u'\u03b5', # ε GREEK SMALL LETTER EPSILON
'varkappa': u'\U0001d718', # 𝜘 MATHEMATICAL ITALIC KAPPA SYMBOL
'varphi': u'\u03c6', # φ GREEK SMALL LETTER PHI
'varpi': u'\u03d6', # ϖ GREEK PI SYMBOL
'varrho': u'\u03f1', # ϱ GREEK RHO SYMBOL
'varsigma': u'\u03c2', # ς GREEK SMALL LETTER FINAL SIGMA
'vartheta': u'\u03d1', # ϑ GREEK THETA SYMBOL
'wp': u'\u2118', # ℘ SCRIPT CAPITAL P
'xi': u'\u03be', # ξ GREEK SMALL LETTER XI
'zeta': u'\u03b6', # ζ GREEK SMALL LETTER ZETA
}
mathbin = {
'Cap': u'\u22d2', # ⋒ DOUBLE INTERSECTION
'Circle': u'\u25cb', # ○ WHITE CIRCLE
'Cup': u'\u22d3', # ⋓ DOUBLE UNION
'LHD': u'\u25c0', # ◀ BLACK LEFT-POINTING TRIANGLE
'RHD': u'\u25b6', # ▶ BLACK RIGHT-POINTING TRIANGLE
'amalg': u'\u2a3f', # ⨿ AMALGAMATION OR COPRODUCT
'ast': u'\u2217', # ∗ ASTERISK OPERATOR
'barwedge': u'\u22bc', # ⊼ NAND
'bigtriangledown': u'\u25bd', # ▽ WHITE DOWN-POINTING TRIANGLE
'bigtriangleup': u'\u25b3', # △ WHITE UP-POINTING TRIANGLE
'bindnasrepma': u'\u214b', # ⅋ TURNED AMPERSAND
'blacklozenge': u'\u29eb', # ⧫ BLACK LOZENGE
'blacktriangledown': u'\u25be', # ▾ BLACK DOWN-POINTING SMALL TRIANGLE
'blacktriangleleft': u'\u25c2', # ◂ BLACK LEFT-POINTING SMALL TRIANGLE
'blacktriangleright': u'\u25b8', # ▸ BLACK RIGHT-POINTING SMALL TRIANGLE
'blacktriangleup': u'\u25b4', # ▴ BLACK UP-POINTING SMALL TRIANGLE
'boxast': u'\u29c6', # ⧆ SQUARED ASTERISK
'boxbar': u'\u25eb', # ◫ WHITE SQUARE WITH VERTICAL BISECTING LINE
'boxbox': u'\u29c8', # ⧈ SQUARED SQUARE
'boxbslash': u'\u29c5', # ⧅ SQUARED FALLING DIAGONAL SLASH
'boxcircle': u'\u29c7', # ⧇ SQUARED SMALL CIRCLE
'boxdot': u'\u22a1', # ⊡ SQUARED DOT OPERATOR
'boxminus': u'\u229f', # ⊟ SQUARED MINUS
'boxplus': u'\u229e', # ⊞ SQUARED PLUS
'boxslash': u'\u29c4', # ⧄ SQUARED RISING DIAGONAL SLASH
'boxtimes': u'\u22a0', # ⊠ SQUARED TIMES
'bullet': u'\u2219', # ∙ BULLET OPERATOR
'cap': u'\u2229', # ∩ INTERSECTION
'cdot': u'\u22c5', # ⋅ DOT OPERATOR
'circ': u'\u2218', # ∘ RING OPERATOR
'circledast': u'\u229b', # ⊛ CIRCLED ASTERISK OPERATOR
'circledcirc': u'\u229a', # ⊚ CIRCLED RING OPERATOR
'circleddash': u'\u229d', # ⊝ CIRCLED DASH
'cup': u'\u222a', # ∪ UNION
'curlyvee': u'\u22ce', # ⋎ CURLY LOGICAL OR
'curlywedge': u'\u22cf', # ⋏ CURLY LOGICAL AND
'dagger': u'\u2020', # † DAGGER
'ddagger': u'\u2021', # ‡ DOUBLE DAGGER
'diamond': u'\u22c4', # ⋄ DIAMOND OPERATOR
'div': u'\xf7', # ÷ DIVISION SIGN
'divideontimes': u'\u22c7', # ⋇ DIVISION TIMES
'dotplus': u'\u2214', # ∔ DOT PLUS
'doublebarwedge': u'\u2a5e', # ⩞ LOGICAL AND WITH DOUBLE OVERBAR
'intercal': u'\u22ba', # ⊺ INTERCALATE
'interleave': u'\u2af4', # ⫴ TRIPLE VERTICAL BAR BINARY RELATION
'land': u'\u2227', # ∧ LOGICAL AND
'leftthreetimes': u'\u22cb', # ⋋ LEFT SEMIDIRECT PRODUCT
'lhd': u'\u25c1', # ◁ WHITE LEFT-POINTING TRIANGLE
'lor': u'\u2228', # ∨ LOGICAL OR
'ltimes': u'\u22c9', # ⋉ LEFT NORMAL FACTOR SEMIDIRECT PRODUCT
'mp': u'\u2213', # ∓ MINUS-OR-PLUS SIGN
'odot': u'\u2299', # ⊙ CIRCLED DOT OPERATOR
'ominus': u'\u2296', # ⊖ CIRCLED MINUS
'oplus': u'\u2295', # ⊕ CIRCLED PLUS
'oslash': u'\u2298', # ⊘ CIRCLED DIVISION SLASH
'otimes': u'\u2297', # ⊗ CIRCLED TIMES
'pm': u'\xb1', # ± PLUS-MINUS SIGN
'rhd': u'\u25b7', # ▷ WHITE RIGHT-POINTING TRIANGLE
'rightthreetimes': u'\u22cc', # ⋌ RIGHT SEMIDIRECT PRODUCT
'rtimes': u'\u22ca', # ⋊ RIGHT NORMAL FACTOR SEMIDIRECT PRODUCT
'setminus': u'\u29f5', # ⧵ REVERSE SOLIDUS OPERATOR
'slash': u'\u2215', # ∕ DIVISION SLASH
'smallsetminus': u'\u2216', # ∖ SET MINUS
'smalltriangledown': u'\u25bf', # ▿ WHITE DOWN-POINTING SMALL TRIANGLE
'smalltriangleleft': u'\u25c3', # ◃ WHITE LEFT-POINTING SMALL TRIANGLE
'smalltriangleright': u'\u25b9', # ▹ WHITE RIGHT-POINTING SMALL TRIANGLE
'smalltriangleup': u'\u25b5', # ▵ WHITE UP-POINTING SMALL TRIANGLE
'sqcap': u'\u2293', # ⊓ SQUARE CAP
'sqcup': u'\u2294', # ⊔ SQUARE CUP
'sslash': u'\u2afd', # ⫽ DOUBLE SOLIDUS OPERATOR
'star': u'\u22c6', # ⋆ STAR OPERATOR
'talloblong': u'\u2afe', # ⫾ WHITE VERTICAL BAR
'times': u'\xd7', # × MULTIPLICATION SIGN
'triangle': u'\u25b3', # △ WHITE UP-POINTING TRIANGLE
'triangledown': u'\u25bf', # ▿ WHITE DOWN-POINTING SMALL TRIANGLE
'triangleleft': u'\u25c3', # ◃ WHITE LEFT-POINTING SMALL TRIANGLE
'triangleright': u'\u25b9', # ▹ WHITE RIGHT-POINTING SMALL TRIANGLE
'uplus': u'\u228e', # ⊎ MULTISET UNION
'vartriangle': u'\u25b3', # △ WHITE UP-POINTING TRIANGLE
'vee': u'\u2228', # ∨ LOGICAL OR
'veebar': u'\u22bb', # ⊻ XOR
'wedge': u'\u2227', # ∧ LOGICAL AND
'wr': u'\u2240', # ≀ WREATH PRODUCT
}
mathclose = {
'Rbag': u'\u27c6', # ⟆ RIGHT S-SHAPED BAG DELIMITER
'lrcorner': u'\u231f', # ⌟ BOTTOM RIGHT CORNER
'rangle': u'\u27e9', # ⟩ MATHEMATICAL RIGHT ANGLE BRACKET
'rbag': u'\u27c6', # ⟆ RIGHT S-SHAPED BAG DELIMITER
'rbrace': u'}', # } RIGHT CURLY BRACKET
'rbrack': u']', # ] RIGHT SQUARE BRACKET
'rceil': u'\u2309', # ⌉ RIGHT CEILING
'rfloor': u'\u230b', # ⌋ RIGHT FLOOR
'rgroup': u'\u27ef', # ⟯ MATHEMATICAL RIGHT FLATTENED PARENTHESIS
'rrbracket': u'\u27e7', # ⟧ MATHEMATICAL RIGHT WHITE SQUARE BRACKET
'rrparenthesis': u'\u2988', # ⦈ Z NOTATION RIGHT IMAGE BRACKET
'urcorner': u'\u231d', # ⌝ TOP RIGHT CORNER
'}': u'}', # } RIGHT CURLY BRACKET
}
mathfence = {
'Vert': u'\u2016', # ‖ DOUBLE VERTICAL LINE
'vert': u'|', # | VERTICAL LINE
'|': u'\u2016', # ‖ DOUBLE VERTICAL LINE
}
mathop = {
'Join': u'\u2a1d', # ⨝ JOIN
'bigcap': u'\u22c2', # ⋂ N-ARY INTERSECTION
'bigcup': u'\u22c3', # ⋃ N-ARY UNION
'biginterleave': u'\u2afc', # ⫼ LARGE TRIPLE VERTICAL BAR OPERATOR
'bigodot': u'\u2a00', # ⨀ N-ARY CIRCLED DOT OPERATOR
'bigoplus': u'\u2a01', # ⨁ N-ARY CIRCLED PLUS OPERATOR
'bigotimes': u'\u2a02', # ⨂ N-ARY CIRCLED TIMES OPERATOR
'bigsqcup': u'\u2a06', # ⨆ N-ARY SQUARE UNION OPERATOR
'biguplus': u'\u2a04', # ⨄ N-ARY UNION OPERATOR WITH PLUS
'bigvee': u'\u22c1', # ⋁ N-ARY LOGICAL OR
'bigwedge': u'\u22c0', # ⋀ N-ARY LOGICAL AND
'coprod': u'\u2210', # ∐ N-ARY COPRODUCT
'fatsemi': u'\u2a1f', # ⨟ Z NOTATION SCHEMA COMPOSITION
'fint': u'\u2a0f', # ⨏ INTEGRAL AVERAGE WITH SLASH
'iiiint': u'\u2a0c', # ⨌ QUADRUPLE INTEGRAL OPERATOR
'iiint': u'\u222d', # ∭ TRIPLE INTEGRAL
'iint': u'\u222c', # ∬ DOUBLE INTEGRAL
'int': u'\u222b', # ∫ INTEGRAL
'oiint': u'\u222f', # ∯ SURFACE INTEGRAL
'oint': u'\u222e', # ∮ CONTOUR INTEGRAL
'ointctrclockwise': u'\u2233', # ∳ ANTICLOCKWISE CONTOUR INTEGRAL
'prod': u'\u220f', # ∏ N-ARY PRODUCT
'sqint': u'\u2a16', # ⨖ QUATERNION INTEGRAL OPERATOR
'sum': u'\u2211', # ∑ N-ARY SUMMATION
'varointclockwise': u'\u2232', # ∲ CLOCKWISE CONTOUR INTEGRAL
}
mathopen = {
'Lbag': u'\u27c5', # ⟅ LEFT S-SHAPED BAG DELIMITER
'langle': u'\u27e8', # ⟨ MATHEMATICAL LEFT ANGLE BRACKET
'lbag': u'\u27c5', # ⟅ LEFT S-SHAPED BAG DELIMITER
'lbrace': u'{', # { LEFT CURLY BRACKET
'lbrack': u'[', # [ LEFT SQUARE BRACKET
'lceil': u'\u2308', # ⌈ LEFT CEILING
'lfloor': u'\u230a', # ⌊ LEFT FLOOR
'lgroup': u'\u27ee', # ⟮ MATHEMATICAL LEFT FLATTENED PARENTHESIS
'llbracket': u'\u27e6', # ⟦ MATHEMATICAL LEFT WHITE SQUARE BRACKET
'llcorner': u'\u231e', # ⌞ BOTTOM LEFT CORNER
'llparenthesis': u'\u2987', # ⦇ Z NOTATION LEFT IMAGE BRACKET
'ulcorner': u'\u231c', # ⌜ TOP LEFT CORNER
'{': u'{', # { LEFT CURLY BRACKET
}
mathord = {
'#': u'#', # # NUMBER SIGN
'$': u'$', # $ DOLLAR SIGN
'%': u'%', # % PERCENT SIGN
'&': u'&', # & AMPERSAND
'AC': u'\u223f', # ∿ SINE WAVE
'APLcomment': u'\u235d', # ⍝ APL FUNCTIONAL SYMBOL UP SHOE JOT
'APLdownarrowbox': u'\u2357', # ⍗ APL FUNCTIONAL SYMBOL QUAD DOWNWARDS ARROW
'APLinput': u'\u235e', # ⍞ APL FUNCTIONAL SYMBOL QUOTE QUAD
'APLinv': u'\u2339', # ⌹ APL FUNCTIONAL SYMBOL QUAD DIVIDE
'APLleftarrowbox': u'\u2347', # ⍇ APL FUNCTIONAL SYMBOL QUAD LEFTWARDS ARROW
'APLlog': u'\u235f', # ⍟ APL FUNCTIONAL SYMBOL CIRCLE STAR
'APLrightarrowbox': u'\u2348', # ⍈ APL FUNCTIONAL SYMBOL QUAD RIGHTWARDS ARROW
'APLuparrowbox': u'\u2350', # ⍐ APL FUNCTIONAL SYMBOL QUAD UPWARDS ARROW
'Aries': u'\u2648', # ♈ ARIES
'CIRCLE': u'\u25cf', # ● BLACK CIRCLE
'CheckedBox': u'\u2611', # ☑ BALLOT BOX WITH CHECK
'Diamond': u'\u25c7', # ◇ WHITE DIAMOND
'Finv': u'\u2132', # Ⅎ TURNED CAPITAL F
'Game': u'\u2141', # ⅁ TURNED SANS-SERIF CAPITAL G
'Gemini': u'\u264a', # ♊ GEMINI
'Jupiter': u'\u2643', # ♃ JUPITER
'LEFTCIRCLE': u'\u25d6', # ◖ LEFT HALF BLACK CIRCLE
'LEFTcircle': u'\u25d0', # ◐ CIRCLE WITH LEFT HALF BLACK
'Leo': u'\u264c', # ♌ LEO
'Libra': u'\u264e', # ♎ LIBRA
'Mars': u'\u2642', # ♂ MALE SIGN
'Mercury': u'\u263f', # ☿ MERCURY
'Neptune': u'\u2646', # ♆ NEPTUNE
'Pluto': u'\u2647', # ♇ PLUTO
'RIGHTCIRCLE': u'\u25d7', # ◗ RIGHT HALF BLACK CIRCLE
'RIGHTcircle': u'\u25d1', # ◑ CIRCLE WITH RIGHT HALF BLACK
'Saturn': u'\u2644', # ♄ SATURN
'Scorpio': u'\u264f', # ♏ SCORPIUS
'Square': u'\u2610', # ☐ BALLOT BOX
'Sun': u'\u2609', # ☉ SUN
'Taurus': u'\u2649', # ♉ TAURUS
'Uranus': u'\u2645', # ♅ URANUS
'Venus': u'\u2640', # ♀ FEMALE SIGN
'XBox': u'\u2612', # ☒ BALLOT BOX WITH X
'Yup': u'\u2144', # ⅄ TURNED SANS-SERIF CAPITAL Y
'_': u'_', # _ LOW LINE
'angle': u'\u2220', # ∠ ANGLE
'aquarius': u'\u2652', # ♒ AQUARIUS
'aries': u'\u2648', # ♈ ARIES
'ast': u'*', # * ASTERISK
'backepsilon': u'\u03f6', # ϶ GREEK REVERSED LUNATE EPSILON SYMBOL
'backprime': u'\u2035', # ‵ REVERSED PRIME
'backslash': u'\\', # \ REVERSE SOLIDUS
'because': u'\u2235', # ∵ BECAUSE
'bigstar': u'\u2605', # ★ BLACK STAR
'binampersand': u'&', # & AMPERSAND
'blacklozenge': u'\u2b27', # ⬧ BLACK MEDIUM LOZENGE
'blacksmiley': u'\u263b', # ☻ BLACK SMILING FACE
'blacksquare': u'\u25fc', # ◼ BLACK MEDIUM SQUARE
'bot': u'\u22a5', # ⊥ UP TACK
'boy': u'\u2642', # ♂ MALE SIGN
'cancer': u'\u264b', # ♋ CANCER
'capricornus': u'\u2651', # ♑ CAPRICORN
'cdots': u'\u22ef', # ⋯ MIDLINE HORIZONTAL ELLIPSIS
'cent': u'\xa2', # ¢ CENT SIGN
'centerdot': u'\u2b1d', # ⬝ BLACK VERY SMALL SQUARE
'checkmark': u'\u2713', # ✓ CHECK MARK
'circlearrowleft': u'\u21ba', # ↺ ANTICLOCKWISE OPEN CIRCLE ARROW
'circlearrowright': u'\u21bb', # ↻ CLOCKWISE OPEN CIRCLE ARROW
'circledR': u'\xae', # ® REGISTERED SIGN
'circledcirc': u'\u25ce', # ◎ BULLSEYE
'clubsuit': u'\u2663', # ♣ BLACK CLUB SUIT
'complement': u'\u2201', # ∁ COMPLEMENT
'dasharrow': u'\u21e2', # ⇢ RIGHTWARDS DASHED ARROW
'dashleftarrow': u'\u21e0', # ⇠ LEFTWARDS DASHED ARROW
'dashrightarrow': u'\u21e2', # ⇢ RIGHTWARDS DASHED ARROW
'diameter': u'\u2300', # ⌀ DIAMETER SIGN
'diamondsuit': u'\u2662', # ♢ WHITE DIAMOND SUIT
'earth': u'\u2641', # ♁ EARTH
'exists': u'\u2203', # ∃ THERE EXISTS
'female': u'\u2640', # ♀ FEMALE SIGN
'flat': u'\u266d', # ♭ MUSIC FLAT SIGN
'forall': u'\u2200', # ∀ FOR ALL
'fourth': u'\u2057', # ⁗ QUADRUPLE PRIME
'frownie': u'\u2639', # ☹ WHITE FROWNING FACE
'gemini': u'\u264a', # ♊ GEMINI
'girl': u'\u2640', # ♀ FEMALE SIGN
'heartsuit': u'\u2661', # ♡ WHITE HEART SUIT
'infty': u'\u221e', # ∞ INFINITY
'invneg': u'\u2310', # ⌐ REVERSED NOT SIGN
'jupiter': u'\u2643', # ♃ JUPITER
'ldots': u'\u2026', # … HORIZONTAL ELLIPSIS
'leftmoon': u'\u263e', # ☾ LAST QUARTER MOON
'leftturn': u'\u21ba', # ↺ ANTICLOCKWISE OPEN CIRCLE ARROW
'leo': u'\u264c', # ♌ LEO
'libra': u'\u264e', # ♎ LIBRA
'lnot': u'\xac', # ¬ NOT SIGN
'lozenge': u'\u25ca', # ◊ LOZENGE
'male': u'\u2642', # ♂ MALE SIGN
'maltese': u'\u2720', # ✠ MALTESE CROSS
'mathdollar': u'$', # $ DOLLAR SIGN
'measuredangle': u'\u2221', # ∡ MEASURED ANGLE
'mercury': u'\u263f', # ☿ MERCURY
'mho': u'\u2127', # ℧ INVERTED OHM SIGN
'nabla': u'\u2207', # ∇ NABLA
'natural': u'\u266e', # ♮ MUSIC NATURAL SIGN
'neg': u'\xac', # ¬ NOT SIGN
'neptune': u'\u2646', # ♆ NEPTUNE
'nexists': u'\u2204', # ∄ THERE DOES NOT EXIST
'notbackslash': u'\u2340', # ⍀ APL FUNCTIONAL SYMBOL BACKSLASH BAR
'partial': u'\u2202', # ∂ PARTIAL DIFFERENTIAL
'pisces': u'\u2653', # ♓ PISCES
'pluto': u'\u2647', # ♇ PLUTO
'pounds': u'\xa3', # £ POUND SIGN
'prime': u'\u2032', # ′ PRIME
'quarternote': u'\u2669', # ♩ QUARTER NOTE
'rightmoon': u'\u263d', # ☽ FIRST QUARTER MOON
'rightturn': u'\u21bb', # ↻ CLOCKWISE OPEN CIRCLE ARROW
'sagittarius': u'\u2650', # ♐ SAGITTARIUS
'saturn': u'\u2644', # ♄ SATURN
'scorpio': u'\u264f', # ♏ SCORPIUS
'second': u'\u2033', # ″ DOUBLE PRIME
'sharp': u'\u266f', # ♯ MUSIC SHARP SIGN
'sim': u'~', # ~ TILDE
'slash': u'/', # / SOLIDUS
'smiley': u'\u263a', # ☺ WHITE SMILING FACE
'spadesuit': u'\u2660', # ♠ BLACK SPADE SUIT
'spddot': u'\xa8', # ¨ DIAERESIS
'sphat': u'^', # ^ CIRCUMFLEX ACCENT
'sphericalangle': u'\u2222', # ∢ SPHERICAL ANGLE
'sptilde': u'~', # ~ TILDE
'square': u'\u25fb', # ◻ WHITE MEDIUM SQUARE
'sun': u'\u263c', # ☼ WHITE SUN WITH RAYS
'taurus': u'\u2649', # ♉ TAURUS
'therefore': u'\u2234', # ∴ THEREFORE
'third': u'\u2034', # ‴ TRIPLE PRIME
'top': u'\u22a4', # ⊤ DOWN TACK
'triangleleft': u'\u25c5', # ◅ WHITE LEFT-POINTING POINTER
'triangleright': u'\u25bb', # ▻ WHITE RIGHT-POINTING POINTER
'twonotes': u'\u266b', # ♫ BEAMED EIGHTH NOTES
'uranus': u'\u2645', # ♅ URANUS
'varEarth': u'\u2641', # ♁ EARTH
'varnothing': u'\u2205', # ∅ EMPTY SET
'virgo': u'\u264d', # ♍ VIRGO
'wasylozenge': u'\u2311', # ⌑ SQUARE LOZENGE
'wasytherefore': u'\u2234', # ∴ THEREFORE
'yen': u'\xa5', # ¥ YEN SIGN
}
mathover = {
'overbrace': u'\u23de', # ⏞ TOP CURLY BRACKET
'wideparen': u'\u23dc', # ⏜ TOP PARENTHESIS
}
mathradical = {
'sqrt': u'\u221a', # √ SQUARE ROOT
'sqrt[3]': u'\u221b', # ∛ CUBE ROOT
'sqrt[4]': u'\u221c', # ∜ FOURTH ROOT
}
mathrel = {
'Bumpeq': u'\u224e', # ≎ GEOMETRICALLY EQUIVALENT TO
'Doteq': u'\u2251', # ≑ GEOMETRICALLY EQUAL TO
'Downarrow': u'\u21d3', # ⇓ DOWNWARDS DOUBLE ARROW
'Leftarrow': u'\u21d0', # ⇐ LEFTWARDS DOUBLE ARROW
'Leftrightarrow': u'\u21d4', # ⇔ LEFT RIGHT DOUBLE ARROW
'Lleftarrow': u'\u21da', # ⇚ LEFTWARDS TRIPLE ARROW
'Longleftarrow': u'\u27f8', # ⟸ LONG LEFTWARDS DOUBLE ARROW
'Longleftrightarrow': u'\u27fa', # ⟺ LONG LEFT RIGHT DOUBLE ARROW
'Longmapsfrom': u'\u27fd', # ⟽ LONG LEFTWARDS DOUBLE ARROW FROM BAR
'Longmapsto': u'\u27fe', # ⟾ LONG RIGHTWARDS DOUBLE ARROW FROM BAR
'Longrightarrow': u'\u27f9', # ⟹ LONG RIGHTWARDS DOUBLE ARROW
'Lsh': u'\u21b0', # ↰ UPWARDS ARROW WITH TIP LEFTWARDS
'Mapsfrom': u'\u2906', # ⤆ LEFTWARDS DOUBLE ARROW FROM BAR
'Mapsto': u'\u2907', # ⤇ RIGHTWARDS DOUBLE ARROW FROM BAR
'Rightarrow': u'\u21d2', # ⇒ RIGHTWARDS DOUBLE ARROW
'Rrightarrow': u'\u21db', # ⇛ RIGHTWARDS TRIPLE ARROW
'Rsh': u'\u21b1', # ↱ UPWARDS ARROW WITH TIP RIGHTWARDS
'Subset': u'\u22d0', # ⋐ DOUBLE SUBSET
'Supset': u'\u22d1', # ⋑ DOUBLE SUPERSET
'Uparrow': u'\u21d1', # ⇑ UPWARDS DOUBLE ARROW
'Updownarrow': u'\u21d5', # ⇕ UP DOWN DOUBLE ARROW
'VDash': u'\u22ab', # ⊫ DOUBLE VERTICAL BAR DOUBLE RIGHT TURNSTILE
'Vdash': u'\u22a9', # ⊩ FORCES
'Vvdash': u'\u22aa', # ⊪ TRIPLE VERTICAL BAR RIGHT TURNSTILE
'apprge': u'\u2273', # ≳ GREATER-THAN OR EQUIVALENT TO
'apprle': u'\u2272', # ≲ LESS-THAN OR EQUIVALENT TO
'approx': u'\u2248', # ≈ ALMOST EQUAL TO
'approxeq': u'\u224a', # ≊ ALMOST EQUAL OR EQUAL TO
'asymp': u'\u224d', # ≍ EQUIVALENT TO
'backsim': u'\u223d', # ∽ REVERSED TILDE
'backsimeq': u'\u22cd', # ⋍ REVERSED TILDE EQUALS
'barin': u'\u22f6', # ⋶ ELEMENT OF WITH OVERBAR
'barleftharpoon': u'\u296b', # ⥫ LEFTWARDS HARPOON WITH BARB DOWN BELOW LONG DASH
'barrightharpoon': u'\u296d', # ⥭ RIGHTWARDS HARPOON WITH BARB DOWN BELOW LONG DASH
'between': u'\u226c', # ≬ BETWEEN
'bowtie': u'\u22c8', # ⋈ BOWTIE
'bumpeq': u'\u224f', # ≏ DIFFERENCE BETWEEN
'circeq': u'\u2257', # ≗ RING EQUAL TO
'coloneq': u'\u2254', # ≔ COLON EQUALS
'cong': u'\u2245', # ≅ APPROXIMATELY EQUAL TO
'corresponds': u'\u2259', # ≙ ESTIMATES
'curlyeqprec': u'\u22de', # ⋞ EQUAL TO OR PRECEDES
'curlyeqsucc': u'\u22df', # ⋟ EQUAL TO OR SUCCEEDS
'curvearrowleft': u'\u21b6', # ↶ ANTICLOCKWISE TOP SEMICIRCLE ARROW
'curvearrowright': u'\u21b7', # ↷ CLOCKWISE TOP SEMICIRCLE ARROW
'dashv': u'\u22a3', # ⊣ LEFT TACK
'ddots': u'\u22f1', # ⋱ DOWN RIGHT DIAGONAL ELLIPSIS
'dlsh': u'\u21b2', # ↲ DOWNWARDS ARROW WITH TIP LEFTWARDS
'doteq': u'\u2250', # ≐ APPROACHES THE LIMIT
'doteqdot': u'\u2251', # ≑ GEOMETRICALLY EQUAL TO
'downarrow': u'\u2193', # ↓ DOWNWARDS ARROW
'downdownarrows': u'\u21ca', # ⇊ DOWNWARDS PAIRED ARROWS
'downdownharpoons': u'\u2965', # ⥥ DOWNWARDS HARPOON WITH BARB LEFT BESIDE DOWNWARDS HARPOON WITH BARB RIGHT
'downharpoonleft': u'\u21c3', # ⇃ DOWNWARDS HARPOON WITH BARB LEFTWARDS
'downharpoonright': u'\u21c2', # ⇂ DOWNWARDS HARPOON WITH BARB RIGHTWARDS
'downuparrows': u'\u21f5', # ⇵ DOWNWARDS ARROW LEFTWARDS OF UPWARDS ARROW
'downupharpoons': u'\u296f', # ⥯ DOWNWARDS HARPOON WITH BARB LEFT BESIDE UPWARDS HARPOON WITH BARB RIGHT
'drsh': u'\u21b3', # ↳ DOWNWARDS ARROW WITH TIP RIGHTWARDS
'eqcirc': u'\u2256', # ≖ RING IN EQUAL TO
'eqcolon': u'\u2255', # ≕ EQUALS COLON
'eqsim': u'\u2242', # ≂ MINUS TILDE
'eqslantgtr': u'\u2a96', # ⪖ SLANTED EQUAL TO OR GREATER-THAN
'eqslantless': u'\u2a95', # ⪕ SLANTED EQUAL TO OR LESS-THAN
'equiv': u'\u2261', # ≡ IDENTICAL TO
'fallingdotseq': u'\u2252', # ≒ APPROXIMATELY EQUAL TO OR THE IMAGE OF
'frown': u'\u2322', # ⌢ FROWN
'ge': u'\u2265', # ≥ GREATER-THAN OR EQUAL TO
'geq': u'\u2265', # ≥ GREATER-THAN OR EQUAL TO
'geqq': u'\u2267', # ≧ GREATER-THAN OVER EQUAL TO
'geqslant': u'\u2a7e', # ⩾ GREATER-THAN OR SLANTED EQUAL TO
'gets': u'\u2190', # ← LEFTWARDS ARROW
'gg': u'\u226b', # ≫ MUCH GREATER-THAN
'ggcurly': u'\u2abc', # ⪼ DOUBLE SUCCEEDS
'ggg': u'\u22d9', # ⋙ VERY MUCH GREATER-THAN
'gnapprox': u'\u2a8a', # ⪊ GREATER-THAN AND NOT APPROXIMATE
'gneq': u'\u2a88', # ⪈ GREATER-THAN AND SINGLE-LINE NOT EQUAL TO
'gneqq': u'\u2269', # ≩ GREATER-THAN BUT NOT EQUAL TO
'gnsim': u'\u22e7', # ⋧ GREATER-THAN BUT NOT EQUIVALENT TO
'gtrapprox': u'\u2a86', # ⪆ GREATER-THAN OR APPROXIMATE
'gtrdot': u'\u22d7', # ⋗ GREATER-THAN WITH DOT
'gtreqless': u'\u22db', # ⋛ GREATER-THAN EQUAL TO OR LESS-THAN
'gtreqqless': u'\u2a8c', # ⪌ GREATER-THAN ABOVE DOUBLE-LINE EQUAL ABOVE LESS-THAN
'gtrless': u'\u2277', # ≷ GREATER-THAN OR LESS-THAN
'gtrsim': u'\u2273', # ≳ GREATER-THAN OR EQUIVALENT TO
'hash': u'\u22d5', # ⋕ EQUAL AND PARALLEL TO
'hookleftarrow': u'\u21a9', # ↩ LEFTWARDS ARROW WITH HOOK
'hookrightarrow': u'\u21aa', # ↪ RIGHTWARDS ARROW WITH HOOK
'iddots': u'\u22f0', # ⋰ UP RIGHT DIAGONAL ELLIPSIS
'impliedby': u'\u27f8', # ⟸ LONG LEFTWARDS DOUBLE ARROW
'implies': u'\u27f9', # ⟹ LONG RIGHTWARDS DOUBLE ARROW
'in': u'\u2208', # ∈ ELEMENT OF
'le': u'\u2264', # ≤ LESS-THAN OR EQUAL TO
'leftarrow': u'\u2190', # ← LEFTWARDS ARROW
'leftarrowtail': u'\u21a2', # ↢ LEFTWARDS ARROW WITH TAIL
'leftarrowtriangle': u'\u21fd', # ⇽ LEFTWARDS OPEN-HEADED ARROW
'leftbarharpoon': u'\u296a', # ⥪ LEFTWARDS HARPOON WITH BARB UP ABOVE LONG DASH
'leftharpoondown': u'\u21bd', # ↽ LEFTWARDS HARPOON WITH BARB DOWNWARDS
'leftharpoonup': u'\u21bc', # ↼ LEFTWARDS HARPOON WITH BARB UPWARDS
'leftleftarrows': u'\u21c7', # ⇇ LEFTWARDS PAIRED ARROWS
'leftleftharpoons': u'\u2962', # ⥢ LEFTWARDS HARPOON WITH BARB UP ABOVE LEFTWARDS HARPOON WITH BARB DOWN
'leftrightarrow': u'\u2194', # ↔ LEFT RIGHT ARROW
'leftrightarrows': u'\u21c6', # ⇆ LEFTWARDS ARROW OVER RIGHTWARDS ARROW
'leftrightarrowtriangle': u'\u21ff', # ⇿ LEFT RIGHT OPEN-HEADED ARROW
'leftrightharpoon': u'\u294a', # ⥊ LEFT BARB UP RIGHT BARB DOWN HARPOON
'leftrightharpoons': u'\u21cb', # ⇋ LEFTWARDS HARPOON OVER RIGHTWARDS HARPOON
'leftrightsquigarrow': u'\u21ad', # ↭ LEFT RIGHT WAVE ARROW
'leftslice': u'\u2aa6', # ⪦ LESS-THAN CLOSED BY CURVE
'leftsquigarrow': u'\u21dc', # ⇜ LEFTWARDS SQUIGGLE ARROW
'leq': u'\u2264', # ≤ LESS-THAN OR EQUAL TO
'leqq': u'\u2266', # ≦ LESS-THAN OVER EQUAL TO
'leqslant': u'\u2a7d', # ⩽ LESS-THAN OR SLANTED EQUAL TO
'lessapprox': u'\u2a85', # ⪅ LESS-THAN OR APPROXIMATE
'lessdot': u'\u22d6', # ⋖ LESS-THAN WITH DOT
'lesseqgtr': u'\u22da', # ⋚ LESS-THAN EQUAL TO OR GREATER-THAN
'lesseqqgtr': u'\u2a8b', # ⪋ LESS-THAN ABOVE DOUBLE-LINE EQUAL ABOVE GREATER-THAN
'lessgtr': u'\u2276', # ≶ LESS-THAN OR GREATER-THAN
'lesssim': u'\u2272', # ≲ LESS-THAN OR EQUIVALENT TO
'lightning': u'\u21af', # ↯ DOWNWARDS ZIGZAG ARROW
'll': u'\u226a', # ≪ MUCH LESS-THAN
'llcurly': u'\u2abb', # ⪻ DOUBLE PRECEDES
'lll': u'\u22d8', # ⋘ VERY MUCH LESS-THAN
'lnapprox': u'\u2a89', # ⪉ LESS-THAN AND NOT APPROXIMATE
'lneq': u'\u2a87', # ⪇ LESS-THAN AND SINGLE-LINE NOT EQUAL TO
'lneqq': u'\u2268', # ≨ LESS-THAN BUT NOT EQUAL TO
'lnsim': u'\u22e6', # ⋦ LESS-THAN BUT NOT EQUIVALENT TO
'longleftarrow': u'\u27f5', # ⟵ LONG LEFTWARDS ARROW
'longleftrightarrow': u'\u27f7', # ⟷ LONG LEFT RIGHT ARROW
'longmapsfrom': u'\u27fb', # ⟻ LONG LEFTWARDS ARROW FROM BAR
'longmapsto': u'\u27fc', # ⟼ LONG RIGHTWARDS ARROW FROM BAR
'longrightarrow': u'\u27f6', # ⟶ LONG RIGHTWARDS ARROW
'looparrowleft': u'\u21ab', # ↫ LEFTWARDS ARROW WITH LOOP
'looparrowright': u'\u21ac', # ↬ RIGHTWARDS ARROW WITH LOOP
'mapsfrom': u'\u21a4', # ↤ LEFTWARDS ARROW FROM BAR
'mapsto': u'\u21a6', # ↦ RIGHTWARDS ARROW FROM BAR
'mid': u'\u2223', # ∣ DIVIDES
'models': u'\u22a7', # ⊧ MODELS
'multimap': u'\u22b8', # ⊸ MULTIMAP
'nLeftarrow': u'\u21cd', # ⇍ LEFTWARDS DOUBLE ARROW WITH STROKE
'nLeftrightarrow': u'\u21ce', # ⇎ LEFT RIGHT DOUBLE ARROW WITH STROKE
'nRightarrow': u'\u21cf', # ⇏ RIGHTWARDS DOUBLE ARROW WITH STROKE
'nVDash': u'\u22af', # ⊯ NEGATED DOUBLE VERTICAL BAR DOUBLE RIGHT TURNSTILE
'nVdash': u'\u22ae', # ⊮ DOES NOT FORCE
'ncong': u'\u2247', # ≇ NEITHER APPROXIMATELY NOR ACTUALLY EQUAL TO
'ne': u'\u2260', # ≠ NOT EQUAL TO
'nearrow': u'\u2197', # ↗ NORTH EAST ARROW
'neq': u'\u2260', # ≠ NOT EQUAL TO
'ngeq': u'\u2271', # ≱ NEITHER GREATER-THAN NOR EQUAL TO
'ngtr': u'\u226f', # ≯ NOT GREATER-THAN
'ni': u'\u220b', # ∋ CONTAINS AS MEMBER
'nleftarrow': u'\u219a', # ↚ LEFTWARDS ARROW WITH STROKE
'nleftrightarrow': u'\u21ae', # ↮ LEFT RIGHT ARROW WITH STROKE
'nleq': u'\u2270', # ≰ NEITHER LESS-THAN NOR EQUAL TO
'nless': u'\u226e', # ≮ NOT LESS-THAN
'nmid': u'\u2224', # ∤ DOES NOT DIVIDE
'notasymp': u'\u226d', # ≭ NOT EQUIVALENT TO
'notin': u'\u2209', # ∉ NOT AN ELEMENT OF
'notowner': u'\u220c', # ∌ DOES NOT CONTAIN AS MEMBER
'notslash': u'\u233f', # ⌿ APL FUNCTIONAL SYMBOL SLASH BAR
'nparallel': u'\u2226', # ∦ NOT PARALLEL TO
'nprec': u'\u2280', # ⊀ DOES NOT PRECEDE
'npreceq': u'\u22e0', # ⋠ DOES NOT PRECEDE OR EQUAL
'nrightarrow': u'\u219b', # ↛ RIGHTWARDS ARROW WITH STROKE
'nsim': u'\u2241', # ≁ NOT TILDE
'nsubseteq': u'\u2288', # ⊈ NEITHER A SUBSET OF NOR EQUAL TO
'nsucc': u'\u2281', # ⊁ DOES NOT SUCCEED
'nsucceq': u'\u22e1', # ⋡ DOES NOT SUCCEED OR EQUAL
'nsupseteq': u'\u2289', # ⊉ NEITHER A SUPERSET OF NOR EQUAL TO
'ntriangleleft': u'\u22ea', # ⋪ NOT NORMAL SUBGROUP OF
'ntrianglelefteq': u'\u22ec', # ⋬ NOT NORMAL SUBGROUP OF OR EQUAL TO
'ntriangleright': u'\u22eb', # ⋫ DOES NOT CONTAIN AS NORMAL SUBGROUP
'ntrianglerighteq': u'\u22ed', # ⋭ DOES NOT CONTAIN AS NORMAL SUBGROUP OR EQUAL
'nvDash': u'\u22ad', # ⊭ NOT TRUE
'nvdash': u'\u22ac', # ⊬ DOES NOT PROVE
'nwarrow': u'\u2196', # ↖ NORTH WEST ARROW
'owns': u'\u220b', # ∋ CONTAINS AS MEMBER
'parallel': u'\u2225', # ∥ PARALLEL TO
'perp': u'\u27c2', # ⟂ PERPENDICULAR
'pitchfork': u'\u22d4', # ⋔ PITCHFORK
'prec': u'\u227a', # ≺ PRECEDES
'precapprox': u'\u2ab7', # ⪷ PRECEDES ABOVE ALMOST EQUAL TO
'preccurlyeq': u'\u227c', # ≼ PRECEDES OR EQUAL TO
'preceq': u'\u2aaf', # ⪯ PRECEDES ABOVE SINGLE-LINE EQUALS SIGN
'precnapprox': u'\u2ab9', # ⪹ PRECEDES ABOVE NOT ALMOST EQUAL TO
'precnsim': u'\u22e8', # ⋨ PRECEDES BUT NOT EQUIVALENT TO
'precsim': u'\u227e', # ≾ PRECEDES OR EQUIVALENT TO
'propto': u'\u221d', # ∝ PROPORTIONAL TO
'restriction': u'\u21be', # ↾ UPWARDS HARPOON WITH BARB RIGHTWARDS
'rightarrow': u'\u2192', # → RIGHTWARDS ARROW
'rightarrowtail': u'\u21a3', # ↣ RIGHTWARDS ARROW WITH TAIL
'rightarrowtriangle': u'\u21fe', # ⇾ RIGHTWARDS OPEN-HEADED ARROW
'rightbarharpoon': u'\u296c', # ⥬ RIGHTWARDS HARPOON WITH BARB UP ABOVE LONG DASH
'rightharpoondown': u'\u21c1', # ⇁ RIGHTWARDS HARPOON WITH BARB DOWNWARDS
'rightharpoonup': u'\u21c0', # ⇀ RIGHTWARDS HARPOON WITH BARB UPWARDS
'rightleftarrows': u'\u21c4', # ⇄ RIGHTWARDS ARROW OVER LEFTWARDS ARROW
'rightleftharpoon': u'\u294b', # ⥋ LEFT BARB DOWN RIGHT BARB UP HARPOON
'rightleftharpoons': u'\u21cc', # ⇌ RIGHTWARDS HARPOON OVER LEFTWARDS HARPOON
'rightrightarrows': u'\u21c9', # ⇉ RIGHTWARDS PAIRED ARROWS
'rightrightharpoons': u'\u2964', # ⥤ RIGHTWARDS HARPOON WITH BARB UP ABOVE RIGHTWARDS HARPOON WITH BARB DOWN
'rightslice': u'\u2aa7', # ⪧ GREATER-THAN CLOSED BY CURVE
'rightsquigarrow': u'\u21dd', # ⇝ RIGHTWARDS SQUIGGLE ARROW
'risingdotseq': u'\u2253', # ≓ IMAGE OF OR APPROXIMATELY EQUAL TO
'searrow': u'\u2198', # ↘ SOUTH EAST ARROW
'sim': u'\u223c', # ∼ TILDE OPERATOR
'simeq': u'\u2243', # ≃ ASYMPTOTICALLY EQUAL TO
'smallfrown': u'\u2322', # ⌢ FROWN
'smallsmile': u'\u2323', # ⌣ SMILE
'smile': u'\u2323', # ⌣ SMILE
'sqsubset': u'\u228f', # ⊏ SQUARE IMAGE OF
'sqsubseteq': u'\u2291', # ⊑ SQUARE IMAGE OF OR EQUAL TO
'sqsupset': u'\u2290', # ⊐ SQUARE ORIGINAL OF
'sqsupseteq': u'\u2292', # ⊒ SQUARE ORIGINAL OF OR EQUAL TO
'subset': u'\u2282', # ⊂ SUBSET OF
'subseteq': u'\u2286', # ⊆ SUBSET OF OR EQUAL TO
'subseteqq': u'\u2ac5', # ⫅ SUBSET OF ABOVE EQUALS SIGN
'subsetneq': u'\u228a', # ⊊ SUBSET OF WITH NOT EQUAL TO
'subsetneqq': u'\u2acb', # ⫋ SUBSET OF ABOVE NOT EQUAL TO
'succ': u'\u227b', # ≻ SUCCEEDS
'succapprox': u'\u2ab8', # ⪸ SUCCEEDS ABOVE ALMOST EQUAL TO
'succcurlyeq': u'\u227d', # ≽ SUCCEEDS OR EQUAL TO
'succeq': u'\u2ab0', # ⪰ SUCCEEDS ABOVE SINGLE-LINE EQUALS SIGN
'succnapprox': u'\u2aba', # ⪺ SUCCEEDS ABOVE NOT ALMOST EQUAL TO
'succnsim': u'\u22e9', # ⋩ SUCCEEDS BUT NOT EQUIVALENT TO
'succsim': u'\u227f', # ≿ SUCCEEDS OR EQUIVALENT TO
'supset': u'\u2283', # ⊃ SUPERSET OF
'supseteq': u'\u2287', # ⊇ SUPERSET OF OR EQUAL TO
'supseteqq': u'\u2ac6', # ⫆ SUPERSET OF ABOVE EQUALS SIGN
'supsetneq': u'\u228b', # ⊋ SUPERSET OF WITH NOT EQUAL TO
'supsetneqq': u'\u2acc', # ⫌ SUPERSET OF ABOVE NOT EQUAL TO
'swarrow': u'\u2199', # ↙ SOUTH WEST ARROW
'to': u'\u2192', # → RIGHTWARDS ARROW
'trianglelefteq': u'\u22b4', # ⊴ NORMAL SUBGROUP OF OR EQUAL TO
'triangleq': u'\u225c', # ≜ DELTA EQUAL TO
'trianglerighteq': u'\u22b5', # ⊵ CONTAINS AS NORMAL SUBGROUP OR EQUAL TO
'twoheadleftarrow': u'\u219e', # ↞ LEFTWARDS TWO HEADED ARROW
'twoheadrightarrow': u'\u21a0', # ↠ RIGHTWARDS TWO HEADED ARROW
'uparrow': u'\u2191', # ↑ UPWARDS ARROW
'updownarrow': u'\u2195', # ↕ UP DOWN ARROW
'updownarrows': u'\u21c5', # ⇅ UPWARDS ARROW LEFTWARDS OF DOWNWARDS ARROW
'updownharpoons': u'\u296e', # ⥮ UPWARDS HARPOON WITH BARB LEFT BESIDE DOWNWARDS HARPOON WITH BARB RIGHT
'upharpoonleft': u'\u21bf', # ↿ UPWARDS HARPOON WITH BARB LEFTWARDS
'upharpoonright': u'\u21be', # ↾ UPWARDS HARPOON WITH BARB RIGHTWARDS
'upuparrows': u'\u21c8', # ⇈ UPWARDS PAIRED ARROWS
'upupharpoons': u'\u2963', # ⥣ UPWARDS HARPOON WITH BARB LEFT BESIDE UPWARDS HARPOON WITH BARB RIGHT
'vDash': u'\u22a8', # ⊨ TRUE
'varpropto': u'\u221d', # ∝ PROPORTIONAL TO
'vartriangleleft': u'\u22b2', # ⊲ NORMAL SUBGROUP OF
'vartriangleright': u'\u22b3', # ⊳ CONTAINS AS NORMAL SUBGROUP
'vdash': u'\u22a2', # ⊢ RIGHT TACK
'vdots': u'\u22ee', # ⋮ VERTICAL ELLIPSIS
}
mathunder = {
'underbrace': u'\u23df', # ⏟ BOTTOM CURLY BRACKET
}
space = {
':': u'\u205f', # MEDIUM MATHEMATICAL SPACE
'medspace': u'\u205f', # MEDIUM MATHEMATICAL SPACE
'quad': u'\u2001', # EM QUAD
}
|
shuzi0/ss | refs/heads/master | shadowsocks/shell.py | 12 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import json
import sys
import getopt
import logging
from shadowsocks.common import to_bytes, to_str, IPNetwork
from shadowsocks import encrypt
VERBOSE_LEVEL = 5
verbose = 0
def check_python():
info = sys.version_info
if info[0] == 2 and not info[1] >= 6:
print('Python 2.6+ required')
sys.exit(1)
elif info[0] == 3 and not info[1] >= 3:
print('Python 3.3+ required')
sys.exit(1)
elif info[0] not in [2, 3]:
print('Python version not supported')
sys.exit(1)
def print_exception(e):
global verbose
logging.error(e)
if verbose > 0:
import traceback
traceback.print_exc()
def print_shadowsocks():
version = ''
try:
import pkg_resources
version = pkg_resources.get_distribution('shadowsocks').version
except Exception:
pass
print('Shadowsocks %s' % version)
def find_config():
config_path = 'config.json'
if os.path.exists(config_path):
return config_path
config_path = os.path.join(os.path.dirname(__file__), '../', 'config.json')
if os.path.exists(config_path):
return config_path
return None
def check_config(config, is_local):
if config.get('daemon', None) == 'stop':
# no need to specify configuration for daemon stop
return
if is_local and not config.get('password', None):
logging.error('password not specified')
print_help(is_local)
sys.exit(2)
if not is_local and not config.get('password', None) \
and not config.get('port_password', None):
logging.error('password or port_password not specified')
print_help(is_local)
sys.exit(2)
if 'local_port' in config:
config['local_port'] = int(config['local_port'])
if 'server_port' in config and type(config['server_port']) != list:
config['server_port'] = int(config['server_port'])
if config.get('local_address', '') in [b'0.0.0.0']:
logging.warn('warning: local set to listen on 0.0.0.0, it\'s not safe')
if config.get('server', '') in ['127.0.0.1', 'localhost']:
logging.warn('warning: server set to listen on %s:%s, are you sure?' %
(to_str(config['server']), config['server_port']))
if (config.get('method', '') or '').lower() == 'table':
logging.warn('warning: table is not safe; please use a safer cipher, '
'like AES-256-CFB')
if (config.get('method', '') or '').lower() == 'rc4':
logging.warn('warning: RC4 is not safe; please use a safer cipher, '
'like AES-256-CFB')
if config.get('timeout', 300) < 100:
logging.warn('warning: your timeout %d seems too short' %
int(config.get('timeout')))
if config.get('timeout', 300) > 600:
logging.warn('warning: your timeout %d seems too long' %
int(config.get('timeout')))
if config.get('password') in [b'mypassword']:
logging.error('DON\'T USE DEFAULT PASSWORD! Please change it in your '
'config.json!')
sys.exit(1)
if config.get('user', None) is not None:
if os.name != 'posix':
logging.error('user can be used only on Unix')
sys.exit(1)
encrypt.try_cipher(config['password'], config['method'])
def get_config(is_local):
global verbose
logging.basicConfig(level=logging.INFO,
format='%(levelname)-s: %(message)s')
if is_local:
shortopts = 'hd:s:b:p:k:l:m:o:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'user=',
'version']
else:
shortopts = 'hd:s:p:k:m:o:c:t:vq'
longopts = ['help', 'fast-open', 'pid-file=', 'log-file=', 'workers=',
'forbidden-ip=', 'user=', 'manager-address=', 'version']
try:
config_path = find_config()
optlist, args = getopt.getopt(sys.argv[1:], shortopts, longopts)
for key, value in optlist:
if key == '-c':
config_path = value
if config_path:
logging.info('loading config from %s' % config_path)
with open(config_path, 'rb') as f:
try:
config = parse_json_in_str(f.read().decode('utf8'))
except ValueError as e:
logging.error('found an error in config.json: %s',
e.message)
sys.exit(1)
else:
config = {}
v_count = 0
for key, value in optlist:
if key == '-p':
config['server_port'] = int(value)
elif key == '-k':
config['password'] = to_bytes(value)
elif key == '-l':
config['local_port'] = int(value)
elif key == '-s':
config['server'] = to_str(value)
elif key == '-m':
config['method'] = to_str(value)
elif key == '-o':
config['obfs'] = to_str(value)
elif key == '-b':
config['local_address'] = to_str(value)
elif key == '-v':
v_count += 1
# '-vv' turns on more verbose mode
config['verbose'] = v_count
elif key == '-t':
config['timeout'] = int(value)
elif key == '--fast-open':
config['fast_open'] = True
elif key == '--workers':
config['workers'] = int(value)
elif key == '--manager-address':
config['manager_address'] = value
elif key == '--user':
config['user'] = to_str(value)
elif key == '--forbidden-ip':
config['forbidden_ip'] = to_str(value).split(',')
elif key in ('-h', '--help'):
if is_local:
print_local_help()
else:
print_server_help()
sys.exit(0)
elif key == '--version':
print_shadowsocks()
sys.exit(0)
elif key == '-d':
config['daemon'] = to_str(value)
elif key == '--pid-file':
config['pid-file'] = to_str(value)
elif key == '--log-file':
config['log-file'] = to_str(value)
elif key == '-q':
v_count -= 1
config['verbose'] = v_count
except getopt.GetoptError as e:
print(e, file=sys.stderr)
print_help(is_local)
sys.exit(2)
if not config:
logging.error('config not specified')
print_help(is_local)
sys.exit(2)
config['password'] = to_bytes(config.get('password', b''))
config['method'] = to_str(config.get('method', 'aes-256-cfb'))
config['protocol'] = to_str(config.get('protocol', 'origin'))
config['obfs'] = to_str(config.get('obfs', 'plain'))
config['obfs_param'] = to_str(config.get('obfs_param', ''))
config['port_password'] = config.get('port_password', None)
config['timeout'] = int(config.get('timeout', 300))
config['fast_open'] = config.get('fast_open', False)
config['workers'] = config.get('workers', 1)
config['pid-file'] = config.get('pid-file', '/var/run/shadowsocks.pid')
config['log-file'] = config.get('log-file', '/var/log/shadowsocks.log')
config['verbose'] = config.get('verbose', False)
config['local_address'] = to_str(config.get('local_address', '127.0.0.1'))
config['local_port'] = config.get('local_port', 1080)
if is_local:
if config.get('server', None) is None:
logging.error('server addr not specified')
print_local_help()
sys.exit(2)
else:
config['server'] = to_str(config['server'])
else:
config['server'] = to_str(config.get('server', '0.0.0.0'))
try:
config['forbidden_ip'] = \
IPNetwork(config.get('forbidden_ip', '127.0.0.0/8,::1/128'))
except Exception as e:
logging.error(e)
sys.exit(2)
config['server_port'] = config.get('server_port', 8388)
logging.getLogger('').handlers = []
logging.addLevelName(VERBOSE_LEVEL, 'VERBOSE')
if config['verbose'] >= 2:
level = VERBOSE_LEVEL
elif config['verbose'] == 1:
level = logging.DEBUG
elif config['verbose'] == -1:
level = logging.WARN
elif config['verbose'] <= -2:
level = logging.ERROR
else:
level = logging.INFO
verbose = config['verbose']
logging.basicConfig(level=level,
format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
check_config(config, is_local)
return config
def print_help(is_local):
if is_local:
print_local_help()
else:
print_server_help()
def print_local_help():
print('''usage: sslocal [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address
-p SERVER_PORT server port, default: 8388
-b LOCAL_ADDR local binding address, default: 127.0.0.1
-l LOCAL_PORT local port, default: 1080
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-o OBFS obfsplugin, default: http_simple
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def print_server_help():
print('''usage: ssserver [OPTION]...
A fast tunnel proxy that helps you bypass firewalls.
You can supply configurations via either config file or command line arguments.
Proxy options:
-c CONFIG path to config file
-s SERVER_ADDR server address, default: 0.0.0.0
-p SERVER_PORT server port, default: 8388
-k PASSWORD password
-m METHOD encryption method, default: aes-256-cfb
-o OBFS obfsplugin, default: http_simple
-t TIMEOUT timeout in seconds, default: 300
--fast-open use TCP_FASTOPEN, requires Linux 3.7+
--workers WORKERS number of workers, available on Unix/Linux
--forbidden-ip IPLIST comma seperated IP list forbidden to connect
--manager-address ADDR optional server manager UDP address, see wiki
General options:
-h, --help show this help message and exit
-d start/stop/restart daemon mode
--pid-file PID_FILE pid file for daemon mode
--log-file LOG_FILE log file for daemon mode
--user USER username to run as
-v, -vv verbose mode
-q, -qq quiet mode, only show warnings/errors
--version show version information
Online help: <https://github.com/shadowsocks/shadowsocks>
''')
def _decode_list(data):
rv = []
for item in data:
if hasattr(item, 'encode'):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.items():
if hasattr(value, 'encode'):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
def parse_json_in_str(data):
# parse json and convert everything from unicode to str
return json.loads(data, object_hook=_decode_dict)
|
patanpp/cobaion | refs/heads/master | themes/lte/dist/ionicons/builder/generate.py | 357 | from subprocess import call
import os
import json
BUILDER_PATH = os.path.dirname(os.path.abspath(__file__))
ROOT_PATH = os.path.join(BUILDER_PATH, '..')
FONTS_FOLDER_PATH = os.path.join(ROOT_PATH, 'fonts')
CSS_FOLDER_PATH = os.path.join(ROOT_PATH, 'css')
SCSS_FOLDER_PATH = os.path.join(ROOT_PATH, 'scss')
LESS_FOLDER_PATH = os.path.join(ROOT_PATH, 'less')
def main():
generate_font_files()
data = get_build_data()
rename_svg_glyph_names(data)
generate_scss(data)
generate_less(data)
generate_cheatsheet(data)
generate_component_json(data)
generate_composer_json(data)
generate_bower_json(data)
def generate_font_files():
print "Generate Fonts"
cmd = "fontforge -script %s/scripts/generate_font.py" % (BUILDER_PATH)
call(cmd, shell=True)
def rename_svg_glyph_names(data):
# hacky and slow (but safe) way to rename glyph-name attributes
svg_path = os.path.join(FONTS_FOLDER_PATH, 'ionicons.svg')
svg_file = open(svg_path, 'r+')
svg_text = svg_file.read()
svg_file.seek(0)
for ionicon in data['icons']:
# uniF2CA
org_name = 'uni%s' % (ionicon['code'].replace('0x', '').upper())
ion_name = 'ion-%s' % (ionicon['name'])
svg_text = svg_text.replace(org_name, ion_name)
svg_file.write(svg_text)
svg_file.close()
def generate_less(data):
print "Generate LESS"
font_name = data['name']
font_version = data['version']
css_prefix = data['prefix']
variables_file_path = os.path.join(LESS_FOLDER_PATH, '_ionicons-variables.less')
icons_file_path = os.path.join(LESS_FOLDER_PATH, '_ionicons-icons.less')
d = []
d.append('/*!');
d.append('Ionicons, v%s' % (font_version) );
d.append('Created by Ben Sperry for the Ionic Framework, http://ionicons.com/');
d.append('https://twitter.com/benjsperry https://twitter.com/ionicframework');
d.append('MIT License: https://github.com/driftyco/ionicons');
d.append('*/');
d.append('// Ionicons Variables')
d.append('// --------------------------\n')
d.append('@ionicons-font-path: "../fonts";')
d.append('@ionicons-font-family: "%s";' % (font_name) )
d.append('@ionicons-version: "%s";' % (font_version) )
d.append('@ionicons-prefix: %s;' % (css_prefix) )
d.append('')
for ionicon in data['icons']:
chr_code = ionicon['code'].replace('0x', '\\')
d.append('@ionicon-var-%s: "%s";' % (ionicon['name'], chr_code) )
f = open(variables_file_path, 'w')
f.write( '\n'.join(d) )
f.close()
d = []
d.append('// Ionicons Icons')
d.append('// --------------------------\n')
group = [ '.%s' % (data['name'].lower()) ]
for ionicon in data['icons']:
group.append('.@{ionicons-prefix}%s:before' % (ionicon['name']) )
d.append( ',\n'.join(group) )
d.append('{')
d.append(' &:extend(.ion);')
d.append('}')
for ionicon in data['icons']:
chr_code = ionicon['code'].replace('0x', '\\')
d.append('.@{ionicons-prefix}%s:before { content: @ionicon-var-%s; }' % (ionicon['name'], ionicon['name']) )
f = open(icons_file_path, 'w')
f.write( '\n'.join(d) )
f.close()
def generate_scss(data):
print "Generate SCSS"
font_name = data['name']
font_version = data['version']
css_prefix = data['prefix']
variables_file_path = os.path.join(SCSS_FOLDER_PATH, '_ionicons-variables.scss')
icons_file_path = os.path.join(SCSS_FOLDER_PATH, '_ionicons-icons.scss')
d = []
d.append('// Ionicons Variables')
d.append('// --------------------------\n')
d.append('$ionicons-font-path: "../fonts" !default;')
d.append('$ionicons-font-family: "%s" !default;' % (font_name) )
d.append('$ionicons-version: "%s" !default;' % (font_version) )
d.append('$ionicons-prefix: %s !default;' % (css_prefix) )
d.append('')
for ionicon in data['icons']:
chr_code = ionicon['code'].replace('0x', '\\')
d.append('$ionicon-var-%s: "%s";' % (ionicon['name'], chr_code) )
f = open(variables_file_path, 'w')
f.write( '\n'.join(d) )
f.close()
d = []
d.append('// Ionicons Icons')
d.append('// --------------------------\n')
group = [ '.%s' % (data['name'].lower()) ]
for ionicon in data['icons']:
group.append('.#{$ionicons-prefix}%s:before' % (ionicon['name']) )
d.append( ',\n'.join(group) )
d.append('{')
d.append(' @extend .ion;')
d.append('}')
for ionicon in data['icons']:
chr_code = ionicon['code'].replace('0x', '\\')
d.append('.#{$ionicons-prefix}%s:before { content: $ionicon-var-%s; }' % (ionicon['name'], ionicon['name']) )
f = open(icons_file_path, 'w')
f.write( '\n'.join(d) )
f.close()
generate_css_from_scss(data)
def generate_css_from_scss(data):
print "Generate CSS From SCSS"
scss_file_path = os.path.join(SCSS_FOLDER_PATH, 'ionicons.scss')
css_file_path = os.path.join(CSS_FOLDER_PATH, 'ionicons.css')
css_min_file_path = os.path.join(CSS_FOLDER_PATH, 'ionicons.min.css')
cmd = "sass %s %s --style compact" % (scss_file_path, css_file_path)
call(cmd, shell=True)
print "Generate Minified CSS From SCSS"
cmd = "sass %s %s --style compressed" % (scss_file_path, css_min_file_path)
call(cmd, shell=True)
def generate_cheatsheet(data):
print "Generate Cheatsheet"
cheatsheet_file_path = os.path.join(ROOT_PATH, 'cheatsheet.html')
template_path = os.path.join(BUILDER_PATH, 'cheatsheet', 'template.html')
icon_row_path = os.path.join(BUILDER_PATH, 'cheatsheet', 'icon-row.html')
f = open(template_path, 'r')
template_html = f.read()
f.close()
f = open(icon_row_path, 'r')
icon_row_template = f.read()
f.close()
content = []
for ionicon in data['icons']:
css_code = ionicon['code'].replace('0x', '\\')
escaped_html_code = ionicon['code'].replace('0x', '&#x') + ';'
html_code = ionicon['code'].replace('0x', '&#x') + ';'
item_row = icon_row_template
item_row = item_row.replace('{{name}}', ionicon['name'])
item_row = item_row.replace('{{prefix}}', data['prefix'])
item_row = item_row.replace('{{css_code}}', css_code)
item_row = item_row.replace('{{escaped_html_code}}', escaped_html_code)
item_row = item_row.replace('{{html_code}}', html_code)
content.append(item_row)
template_html = template_html.replace("{{font_name}}", data["name"])
template_html = template_html.replace("{{font_version}}", data["version"])
template_html = template_html.replace("{{icon_count}}", str(len(data["icons"])) )
template_html = template_html.replace("{{content}}", '\n'.join(content) )
f = open(cheatsheet_file_path, 'w')
f.write(template_html)
f.close()
def generate_component_json(data):
print "Generate component.json"
d = {
"name": data['name'],
"repo": "driftyco/ionicons",
"description": "The premium icon font for Ionic Framework.",
"version": data['version'],
"keywords": [],
"dependencies": {},
"development": {},
"license": "MIT",
"styles": [
"css/%s.css" % (data['name'].lower())
],
"fonts": [
"fonts/%s.eot" % (data['name'].lower()),
"fonts/%s.svg" % (data['name'].lower()),
"fonts/%s.ttf" % (data['name'].lower()),
"fonts/%s.woff" % (data['name'].lower())
]
}
txt = json.dumps(d, indent=4, separators=(',', ': '))
component_file_path = os.path.join(ROOT_PATH, 'component.json')
f = open(component_file_path, 'w')
f.write(txt)
f.close()
def generate_composer_json(data):
print "Generate composer.json"
d = {
"name": "driftyco/ionicons",
"description": "The premium icon font for Ionic Framework.",
"keywords": [ "fonts", "icon font", "icons", "ionic", "web font"],
"homepage": "http://ionicons.com/",
"authors": [
{
"name": "Ben Sperry",
"email": "ben@drifty.com",
"role": "Designer",
"homepage": "https://twitter.com/benjsperry"
},
{
"name": "Adam Bradley",
"email": "adam@drifty.com",
"role": "Developer",
"homepage": "https://twitter.com/adamdbradley"
},
{
"name": "Max Lynch",
"email": "max@drifty.com",
"role": "Developer",
"homepage": "https://twitter.com/maxlynch"
}
],
"extra": {},
"license": [ "MIT" ]
}
txt = json.dumps(d, indent=4, separators=(',', ': '))
composer_file_path = os.path.join(ROOT_PATH, 'composer.json')
f = open(composer_file_path, 'w')
f.write(txt)
f.close()
def generate_bower_json(data):
print "Generate bower.json"
d = {
"name": data['name'],
"version": data['version'],
"homepage": "https://github.com/driftyco/ionicons",
"authors": [
"Ben Sperry <ben@drifty.com>",
"Adam Bradley <adam@drifty.com>",
"Max Lynch <max@drifty.com>"
],
"description": "Ionicons - free and beautiful icons from the creators of Ionic Framework",
"main": [
"css/%s.css" % (data['name'].lower()),
"fonts/*"
],
"keywords": [ "fonts", "icon font", "icons", "ionic", "web font"],
"license": "MIT",
"ignore": [
"**/.*",
"builder",
"node_modules",
"bower_components",
"test",
"tests"
]
}
txt = json.dumps(d, indent=4, separators=(',', ': '))
bower_file_path = os.path.join(ROOT_PATH, 'bower.json')
f = open(bower_file_path, 'w')
f.write(txt)
f.close()
def get_build_data():
build_data_path = os.path.join(BUILDER_PATH, 'build_data.json')
f = open(build_data_path, 'r')
data = json.loads(f.read())
f.close()
return data
if __name__ == "__main__":
main()
|
bala4901/odoo | refs/heads/master | addons/stock/procurement.py | 16 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT
from openerp import SUPERUSER_ID
from dateutil.relativedelta import relativedelta
from datetime import datetime
import openerp
class procurement_group(osv.osv):
_inherit = 'procurement.group'
_columns = {
'partner_id': fields.many2one('res.partner', 'Partner')
}
class procurement_rule(osv.osv):
_inherit = 'procurement.rule'
def _get_action(self, cr, uid, context=None):
result = super(procurement_rule, self)._get_action(cr, uid, context=context)
return result + [('move', _('Move From Another Location'))]
def _get_rules(self, cr, uid, ids, context=None):
res = []
for route in self.browse(cr, uid, ids):
res += [x.id for x in route.pull_ids]
return res
_columns = {
'location_id': fields.many2one('stock.location', 'Procurement Location'),
'location_src_id': fields.many2one('stock.location', 'Source Location',
help="Source location is action=move"),
'route_id': fields.many2one('stock.location.route', 'Route',
help="If route_id is False, the rule is global"),
'procure_method': fields.selection([('make_to_stock', 'Take From Stock'), ('make_to_order', 'Create Procurement')], 'Move Supply Method', required=True,
help="""Determines the procurement method of the stock move that will be generated: whether it will need to 'take from the available stock' in its source location or needs to ignore its stock and create a procurement over there."""),
'route_sequence': fields.related('route_id', 'sequence', string='Route Sequence',
store={
'stock.location.route': (_get_rules, ['sequence'], 10),
'procurement.rule': (lambda self, cr, uid, ids, c={}: ids, ['route_id'], 10),
}),
'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type',
help="Picking Type determines the way the picking should be shown in the view, reports, ..."),
'delay': fields.integer('Number of Days'),
'partner_address_id': fields.many2one('res.partner', 'Partner Address'),
'propagate': fields.boolean('Propagate cancel and split', help='If checked, when the previous move of the move (which was generated by a next procurement) is cancelled or split, the move generated by this move will too'),
'warehouse_id': fields.many2one('stock.warehouse', 'Served Warehouse', help='The warehouse this rule is for'),
'propagate_warehouse_id': fields.many2one('stock.warehouse', 'Warehouse to Propagate', help="The warehouse to propagate on the created move/procurement, which can be different of the warehouse this rule is for (e.g for resupplying rules from another warehouse)"),
}
_defaults = {
'procure_method': 'make_to_stock',
'propagate': True,
'delay': 0,
}
class procurement_order(osv.osv):
_inherit = "procurement.order"
_columns = {
'location_id': fields.many2one('stock.location', 'Procurement Location'), # not required because task may create procurements that aren't linked to a location with project_mrp
'partner_dest_id': fields.many2one('res.partner', 'Customer Address', help="In case of dropshipping, we need to know the destination address more precisely"),
'move_ids': fields.one2many('stock.move', 'procurement_id', 'Moves', help="Moves created by the procurement"),
'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Move which caused (created) the procurement"),
'route_ids': fields.many2many('stock.location.route', 'stock_location_route_procurement', 'procurement_id', 'route_id', 'Preferred Routes', help="Preferred route to be followed by the procurement order. Usually copied from the generating document (SO) but could be set up manually."),
'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', help="Warehouse to consider for the route selection"),
'orderpoint_id': fields.many2one('stock.warehouse.orderpoint', 'Minimum Stock Rule'),
}
def propagate_cancel(self, cr, uid, procurement, context=None):
if procurement.rule_id.action == 'move' and procurement.move_ids:
self.pool.get('stock.move').action_cancel(cr, uid, [m.id for m in procurement.move_ids], context=context)
def cancel(self, cr, uid, ids, context=None):
if context is None:
context = {}
to_cancel_ids = self.get_cancel_ids(cr, uid, ids, context=context)
ctx = context.copy()
#set the context for the propagation of the procurement cancelation
ctx['cancel_procurement'] = True
for procurement in self.browse(cr, uid, to_cancel_ids, context=ctx):
self.propagate_cancel(cr, uid, procurement, context=ctx)
return super(procurement_order, self).cancel(cr, uid, to_cancel_ids, context=ctx)
def _find_parent_locations(self, cr, uid, procurement, context=None):
location = procurement.location_id
res = [location.id]
while location.location_id:
location = location.location_id
res.append(location.id)
return res
def change_warehouse_id(self, cr, uid, ids, warehouse_id, context=None):
if warehouse_id:
warehouse = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context)
return {'value': {'location_id': warehouse.lot_stock_id.id}}
return {}
def _search_suitable_rule(self, cr, uid, procurement, domain, context=None):
'''we try to first find a rule among the ones defined on the procurement order group and if none is found, we try on the routes defined for the product, and finally we fallback on the default behavior'''
pull_obj = self.pool.get('procurement.rule')
warehouse_route_ids = []
if procurement.warehouse_id:
domain += ['|', ('warehouse_id', '=', procurement.warehouse_id.id), ('warehouse_id', '=', False)]
warehouse_route_ids = [x.id for x in procurement.warehouse_id.route_ids]
product_route_ids = [x.id for x in procurement.product_id.route_ids + procurement.product_id.categ_id.total_route_ids]
procurement_route_ids = [x.id for x in procurement.route_ids]
res = pull_obj.search(cr, uid, domain + [('route_id', 'in', procurement_route_ids)], order='route_sequence, sequence', context=context)
if not res:
res = pull_obj.search(cr, uid, domain + [('route_id', 'in', product_route_ids)], order='route_sequence, sequence', context=context)
if not res:
res = warehouse_route_ids and pull_obj.search(cr, uid, domain + [('route_id', 'in', warehouse_route_ids)], order='route_sequence, sequence', context=context) or []
if not res:
res = pull_obj.search(cr, uid, domain + [('route_id', '=', False)], order='sequence', context=context)
return res
def _find_suitable_rule(self, cr, uid, procurement, context=None):
rule_id = super(procurement_order, self)._find_suitable_rule(cr, uid, procurement, context=context)
if not rule_id:
#a rule defined on 'Stock' is suitable for a procurement in 'Stock\Bin A'
all_parent_location_ids = self._find_parent_locations(cr, uid, procurement, context=context)
rule_id = self._search_suitable_rule(cr, uid, procurement, [('location_id', 'in', all_parent_location_ids)], context=context)
rule_id = rule_id and rule_id[0] or False
return rule_id
def _run_move_create(self, cr, uid, procurement, context=None):
''' Returns a dictionary of values that will be used to create a stock move from a procurement.
This function assumes that the given procurement has a rule (action == 'move') set on it.
:param procurement: browse record
:rtype: dictionary
'''
newdate = (datetime.strptime(procurement.date_planned, '%Y-%m-%d %H:%M:%S') - relativedelta(days=procurement.rule_id.delay or 0)).strftime('%Y-%m-%d %H:%M:%S')
group_id = False
if procurement.rule_id.group_propagation_option == 'propagate':
group_id = procurement.group_id and procurement.group_id.id or False
elif procurement.rule_id.group_propagation_option == 'fixed':
group_id = procurement.rule_id.group_id and procurement.rule_id.group_id.id or False
#it is possible that we've already got some move done, so check for the done qty and create
#a new move with the correct qty
already_done_qty = 0
already_done_qty_uos = 0
for move in procurement.move_ids:
already_done_qty += move.product_uom_qty if move.state == 'done' else 0
already_done_qty_uos += move.product_uos_qty if move.state == 'done' else 0
qty_left = max(procurement.product_qty - already_done_qty, 0)
qty_uos_left = max(procurement.product_uos_qty - already_done_qty_uos, 0)
vals = {
'name': procurement.name,
'company_id': procurement.rule_id.company_id.id or procurement.rule_id.location_src_id.company_id.id or procurement.rule_id.location_id.company_id.id or procurement.company_id.id,
'product_id': procurement.product_id.id,
'product_uom': procurement.product_uom.id,
'product_uom_qty': qty_left,
'product_uos_qty': (procurement.product_uos and qty_uos_left) or qty_left,
'product_uos': (procurement.product_uos and procurement.product_uos.id) or procurement.product_uom.id,
'partner_id': procurement.rule_id.partner_address_id.id or (procurement.group_id and procurement.group_id.partner_id.id) or False,
'location_id': procurement.rule_id.location_src_id.id,
'location_dest_id': procurement.rule_id.location_id.id,
'move_dest_id': procurement.move_dest_id and procurement.move_dest_id.id or False,
'procurement_id': procurement.id,
'rule_id': procurement.rule_id.id,
'procure_method': procurement.rule_id.procure_method,
'origin': procurement.origin,
'picking_type_id': procurement.rule_id.picking_type_id.id,
'group_id': group_id,
'route_ids': [(4, x.id) for x in procurement.route_ids],
'warehouse_id': procurement.rule_id.propagate_warehouse_id.id or procurement.rule_id.warehouse_id.id,
'date': newdate,
'date_expected': newdate,
'propagate': procurement.rule_id.propagate,
}
return vals
def _run(self, cr, uid, procurement, context=None):
if procurement.rule_id and procurement.rule_id.action == 'move':
if not procurement.rule_id.location_src_id:
self.message_post(cr, uid, [procurement.id], body=_('No source location defined!'), context=context)
return False
move_obj = self.pool.get('stock.move')
move_dict = self._run_move_create(cr, uid, procurement, context=context)
#create the move as SUPERUSER because the current user may not have the rights to do it (mto product launched by a sale for example)
move_obj.create(cr, SUPERUSER_ID, move_dict, context=context)
return True
return super(procurement_order, self)._run(cr, uid, procurement, context=context)
def run(self, cr, uid, ids, context=None):
res = super(procurement_order, self).run(cr, uid, ids, context=context)
#after all the procurements are run, check if some created a draft stock move that needs to be confirmed
#(we do that in batch because it fasts the picking assignation and the picking state computation)
move_to_confirm_ids = []
for procurement in self.browse(cr, uid, ids, context=context):
if procurement.state == "running" and procurement.rule_id and procurement.rule_id.action == "move":
move_to_confirm_ids += [m.id for m in procurement.move_ids if m.state == 'draft']
if move_to_confirm_ids:
self.pool.get('stock.move').action_confirm(cr, uid, move_to_confirm_ids, context=context)
return res
def _check(self, cr, uid, procurement, context=None):
''' Implement the procurement checking for rules of type 'move'. The procurement will be satisfied only if all related
moves are done/cancel and if the requested quantity is moved.
'''
if procurement.rule_id and procurement.rule_id.action == 'move':
uom_obj = self.pool.get('product.uom')
done_test_list = []
done_cancel_test_list = []
qty_done = 0
for move in procurement.move_ids:
done_test_list.append(move.state == 'done')
done_cancel_test_list.append(move.state in ('done', 'cancel'))
qty_done += move.product_qty if move.state == 'done' else 0
qty_done = uom_obj._compute_qty(cr, uid, procurement.product_id.uom_id.id, qty_done, procurement.product_uom.id)
at_least_one_done = any(done_test_list)
all_done_or_cancel = all(done_cancel_test_list)
if not all_done_or_cancel:
return False
elif all_done_or_cancel and procurement.product_qty == qty_done:
return True
elif at_least_one_done:
#some move cancelled and some validated
self.message_post(cr, uid, [procurement.id], body=_('Some stock moves have been cancelled for this procurement. Run the procurement again to trigger a move for the remaining quantity or change the procurement quantity to finish it directly'), context=context)
else:
#all move are cancelled
self.message_post(cr, uid, [procurement.id], body=_('All stock moves have been cancelled for this procurement.'), context=context)
self.write(cr, uid, [procurement.id], {'state': 'exception'}, context=context)
return False
return super(procurement_order, self)._check(cr, uid, procurement, context)
def do_view_pickings(self, cr, uid, ids, context=None):
'''
This function returns an action that display the pickings of the procurements belonging
to the same procurement group of given ids.
'''
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'stock', 'do_view_pickings')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
group_ids = set([proc.group_id.id for proc in self.browse(cr, uid, ids, context=context) if proc.group_id])
result['domain'] = "[('group_id','in',[" + ','.join(map(str, list(group_ids))) + "])]"
return result
def run_scheduler(self, cr, uid, use_new_cursor=False, context=None):
'''
Call the scheduler in order to check the running procurements (super method), to check the minimum stock rules
and the availability of moves. This function is intended to be run for all the companies at the same time, so
we run functions as SUPERUSER to avoid intercompanies and access rights issues.
@param self: The object pointer
@param cr: The current row, from the database cursor,
@param uid: The current user ID for security checks
@param ids: List of selected IDs
@param use_new_cursor: False or the dbname
@param context: A standard dictionary for contextual values
@return: Dictionary of values
'''
super(procurement_order, self).run_scheduler(cr, uid, use_new_cursor=use_new_cursor, context=context)
if context is None:
context = {}
try:
if use_new_cursor:
cr = openerp.registry(use_new_cursor).cursor()
move_obj = self.pool.get('stock.move')
#Minimum stock rules
company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id
self._procure_orderpoint_confirm(cr, SUPERUSER_ID, use_new_cursor=False, company_id=company.id, context=context)
#Search all confirmed stock_moves and try to assign them
confirmed_ids = move_obj.search(cr, uid, [('state', '=', 'confirmed')], limit=None, order='picking_priority desc, date_expected asc', context=context)
for x in xrange(0, len(confirmed_ids), 100):
move_obj.action_assign(cr, uid, confirmed_ids[x:x + 100], context=context)
if use_new_cursor:
cr.commit()
if use_new_cursor:
cr.commit()
finally:
if use_new_cursor:
try:
cr.close()
except Exception:
pass
return {}
def _get_orderpoint_date_planned(self, cr, uid, orderpoint, start_date, context=None):
date_planned = start_date
return date_planned.strftime(DEFAULT_SERVER_DATE_FORMAT)
def _prepare_orderpoint_procurement(self, cr, uid, orderpoint, product_qty, context=None):
return {
'name': orderpoint.name,
'date_planned': self._get_orderpoint_date_planned(cr, uid, orderpoint, datetime.today(), context=context),
'product_id': orderpoint.product_id.id,
'product_qty': product_qty,
'company_id': orderpoint.company_id.id,
'product_uom': orderpoint.product_uom.id,
'location_id': orderpoint.location_id.id,
'origin': orderpoint.name,
'warehouse_id': orderpoint.warehouse_id.id,
'orderpoint_id': orderpoint.id,
'group_id': orderpoint.group_id.id,
}
def _product_virtual_get(self, cr, uid, order_point):
product_obj = self.pool.get('product.product')
return product_obj._product_available(cr, uid,
[order_point.product_id.id],
context={'location': order_point.location_id.id})[order_point.product_id.id]['virtual_available']
def _procure_orderpoint_confirm(self, cr, uid, use_new_cursor=False, company_id=False, context=None):
'''
Create procurement based on Orderpoint
use_new_cursor: False or the dbname
@return: Dictionary of values
"""
'''
if context is None:
context = {}
if use_new_cursor:
cr = openerp.registry(use_new_cursor).db.cursor()
orderpoint_obj = self.pool.get('stock.warehouse.orderpoint')
procurement_obj = self.pool.get('procurement.order')
offset = 0
ids = [1]
while ids:
ids = orderpoint_obj.search(cr, uid, [('company_id', '=', company_id)], offset=offset, limit=100)
for op in orderpoint_obj.browse(cr, uid, ids, context=context):
prods = self._product_virtual_get(cr, uid, op)
if prods is None:
continue
if prods < op.product_min_qty:
qty = max(op.product_min_qty, op.product_max_qty) - prods
reste = qty % op.qty_multiple
if reste > 0:
qty += op.qty_multiple - reste
if qty <= 0:
continue
qty -= orderpoint_obj.subtract_procurements(cr, uid, op, context=context)
if qty > 0:
proc_id = procurement_obj.create(cr, uid,
self._prepare_orderpoint_procurement(cr, uid, op, qty, context=context),
context=context)
self.check(cr, uid, [proc_id])
self.run(cr, uid, [proc_id])
offset += len(ids)
if use_new_cursor:
cr.commit()
if use_new_cursor:
cr.commit()
cr.close()
return {}
|
sonaht/ansible | refs/heads/devel | test/units/modules/network/iosxr/iosxr_module.py | 56 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
def set_module_args(args):
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args)
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class AnsibleExitJson(Exception):
pass
class AnsibleFailJson(Exception):
pass
class TestIosxrModule(unittest.TestCase):
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
self.load_fixtures(commands)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
else:
self.assertEqual(commands, result['commands'], result['commands'])
return result
def failed(self):
def fail_json(*args, **kwargs):
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
with patch.object(basic.AnsibleModule, 'fail_json', fail_json):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
def exit_json(*args, **kwargs):
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
with patch.object(basic.AnsibleModule, 'exit_json', exit_json):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None):
pass
|
pycontw/pycontw2016 | refs/heads/master | src/proposals/tests/test_management.py | 1 | import collections
import datetime
import json
import re
import unittest.mock
import pytest
import pytz
from django.utils.timezone import now
from django.conf import settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import override_settings
from proposals.models import TalkProposal
taiwan_tz = pytz.timezone('Asia/Taipei')
FakeHTTPResponse = collections.namedtuple('FakeHTTPResponse', 'status data')
# Define fixtures
@pytest.fixture()
def today_valid_hour():
"""The valid hour that has been presented today"""
return taiwan_tz.normalize(now()).hour
def make_proposal_created_earlier(proposal, days=1):
dt_dayago = now() - datetime.timedelta(days=days)
proposal.created_at = dt_dayago
proposal.save()
@pytest.fixture()
def dayago_talk_proposal(user):
proposal = TalkProposal.objects.create(
id=77,
submitter=user,
title='How to build a time machine in 1 day',
category='SCI'
)
make_proposal_created_earlier(proposal)
return proposal
@pytest.fixture()
def another_user_dayago_talk_proposal(another_user):
proposal = TalkProposal.objects.create(
id=9527,
submitter=another_user,
title='Transition from Ruby to Python',
category='CORE',
)
make_proposal_created_earlier(proposal)
return proposal
@pytest.fixture()
def weekago_talk_proposal(user):
proposal = TalkProposal.objects.create(
id=56,
submitter=user,
title='Long long time ago when Python was still 2.x',
)
make_proposal_created_earlier(proposal, days=7)
return proposal
def test_weekago_talk_created_datetime(weekago_talk_proposal):
proposal_lifetime = now() - weekago_talk_proposal.created_at
print('The proposal has been created for %d days' % proposal_lifetime.days)
assert proposal_lifetime >= datetime.timedelta(weeks=1)
def test_recent_proposal_default_command(
dayago_talk_proposal, weekago_talk_proposal,
another_user_dayago_talk_proposal,
today_valid_hour, capsys,
):
call_command('recent_proposals', hour=today_valid_hour)
out, err = capsys.readouterr()
print(out)
# Test only two talk proposals are retrieved
assert re.search(r"^Got total 2 new proposals", out, re.MULTILINE)
# Test total there are three proposals submitted
assert re.search(
r"^So far 3 talk and 0 tutorial proposals", out, re.MULTILINE
)
# Test the title of these two proposals are in the output
for proposal in [dayago_talk_proposal, another_user_dayago_talk_proposal]:
assert re.search(proposal.title, out, re.MULTILINE)
# Test the title of outdated proposals are not in the output
assert not re.search(weekago_talk_proposal.title, out, re.MULTILINE)
def test_cancelled_proposal_not_shown_in_recent_proposals(
cancelled_talk_proposal,
another_user_dayago_talk_proposal,
weekago_talk_proposal,
today_valid_hour, capsys,
):
call_command('recent_proposals', days=6, hour=today_valid_hour)
out, err = capsys.readouterr()
# Test only one talk proposal is retrieved
assert re.search(r"^Got total 1 new proposals", out, re.MULTILINE)
assert re.search(
another_user_dayago_talk_proposal.title, out, re.MULTILINE,
)
for proposal in [cancelled_talk_proposal, weekago_talk_proposal]:
assert not re.search(
proposal.title, out, re.MULTILINE
)
# Make sure total proposal number does not count the cancelled talks in
assert re.search(
r"^So far 2 talk and 0 tutorial proposals", out, re.MULTILINE
)
def test_recent_tutorial_proposals_only(
tutorial_proposal,
today_valid_hour, capsys
):
make_proposal_created_earlier(tutorial_proposal)
call_command('recent_proposals', hour=today_valid_hour)
out, err = capsys.readouterr()
print(out)
assert 'Talks:\n' not in out
assert 'Tutorials:\n' in out
assert re.search(r"^Got total 1 new proposals", out, re.MULTILINE)
# Testing mailing ability
@pytest.mark.parametrize('receivers', [
['receiver@pycon.tw'],
['receiver@pycon.tw', 'another.receiver@pycon.tw']
])
@override_settings( # Make sure we don't really send an email.
EMAIL_BACKEND='django.core.mail.backends.locmem.EmailBackend',
DEFAULT_FROM_EMAIL='dev@pycon.tw',
)
def test_command_send_mail(
dayago_talk_proposal,
today_valid_hour,
receivers,
):
call_command(
'recent_proposals',
hour=today_valid_hour,
mailto=receivers,
)
assert len(mail.outbox) == 1
email = mail.outbox[0]
assert email.from_email == 'dev@pycon.tw'
assert email.to == receivers
assert email.subject.startswith(
'[PyConTW2016][Program] Proposal submission summary'
)
# Testing Slack
@override_settings( # Make sure we don't really talk to Slack.
SLACK_WEBHOOK_URL="https://fake.slack.hook/services/myuniquesignal",
)
def test_slack_connect():
from proposals.management.commands.slack import Slack
webhook_url = settings.SLACK_WEBHOOK_URL
slack = Slack(url=webhook_url)
slack.pool.urlopen = unittest.mock.MagicMock(
return_value=FakeHTTPResponse(200, b'ok')
)
slack.notify(text='Test')
slack.pool.urlopen.assert_called_once_with(
"POST",
webhook_url,
headers={'Content-Type': "application/json"},
body=json.dumps({"text": "Test"})
)
@pytest.mark.django_db
@override_settings(
SLACK_WEBHOOK_URL=None
)
def test_command_improper_slack_url():
with pytest.raises(ImproperlyConfigured):
call_command(
'recent_proposals',
slack=True,
)
@pytest.mark.django_db
@override_settings( # Make sure we don't really talk to Slack.
SLACK_WEBHOOK_URL="https://fake.slack.hook/services/myuniquesignal"
)
def test_command_with_slack():
from proposals.management.commands.slack import Slack
with unittest.mock.patch.object(
Slack, 'notify',
unittest.mock.MagicMock(
return_value=(200, b'ok')
)
):
call_command(
'recent_proposals',
slack=True,
)
assert Slack.notify.call_count == 1
# Testing edge cases
@pytest.mark.django_db
@pytest.mark.parametrize('days', [-1, 0])
def test_nonpositive_recent_days(days, today_valid_hour):
with pytest.raises(CommandError) as e:
call_command('recent_proposals', days=days, hour=today_valid_hour)
assert 'not a positive number' in str(e.value)
@pytest.mark.django_db
def test_no_recent_proposal(today_valid_hour, capsys):
call_command('recent_proposals', hour=today_valid_hour)
out, err = capsys.readouterr()
print(err)
assert re.search('^No proposals are recently submitted', out, re.MULTILINE)
def test_justly_created_proposal(talk_proposal, today_valid_hour, capsys):
call_command('recent_proposals', hour=today_valid_hour)
out, err = capsys.readouterr()
assert re.search('^No proposals are recently submitted', out, re.MULTILINE)
@pytest.mark.parametrize('hour', [-1, 25])
def test_invalid_hour(hour):
with pytest.raises(CommandError) as e:
call_command('recent_proposals', hour=hour)
assert 'Given hour %d is invalid' % hour in str(e.value)
@pytest.mark.django_db
def test_default_hour_option(capsys):
now_dt = taiwan_tz.normalize(now())
call_command('recent_proposals')
out, err = capsys.readouterr()
assert re.search(
r'to {:%Y-%m-%d %H}:00$'.format(now_dt),
out, re.MULTILINE
)
def test_yet_present_hour():
now_dt = taiwan_tz.normalize(now())
if now_dt.hour == 23:
# FIXME: cannot test this between 23:00 - 23:59
return
with pytest.raises(CommandError) as e:
call_command('recent_proposals', hour=now_dt.hour + 1)
assert 'yet present' in str(e.value)
def test_output_table_trimming(
another_user_dayago_talk_proposal,
today_valid_hour, capsys
):
call_command('recent_proposals', hour=today_valid_hour)
out, err = capsys.readouterr()
assert re.search(r'^Python Core \(...\s+', out, re.MULTILINE)
|
openhatch/oh-mainline | refs/heads/master | vendor/packages/Django/django/conf/locale/te/formats.py | 433 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y'
TIME_FORMAT = 'g:i:s A'
# DATETIME_FORMAT =
# YEAR_MONTH_FORMAT =
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'j M Y'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
# DECIMAL_SEPARATOR =
# THOUSAND_SEPARATOR =
# NUMBER_GROUPING =
|
hntrmrrs/capnproto | refs/heads/master | mega-test.py | 43 | #! /usr/bin/env python
# MEGA TEST
#
# usage: mega-test.py <config>
#
# This runs several tests in parallel and shows progress bars for each, based on a config file.
#
# <config> is a file containing a list of commands to run along with the expected number of lines
# they will output (to stdout and stderr combined), which is how the progress bar is calculated.
# The format of the file is simply one test per line, with the line containing the test name,
# the number of output lines expected, and the test command. Example:
#
# mytest 1523 ./my-test --foo bar
# another 862 ./another-test --baz
#
# Each command is interpreted by `sh -euc`, therefore it is acceptable to use environment
# variables and other shell syntax.
#
# After all tests complete, the config file will be rewritten to update the line counts to the
# actual number of lines seen for all passing tests (failing tests are not updated).
import sys
import re
import os
from errno import EAGAIN
from fcntl import fcntl, F_GETFL, F_SETFL
from select import poll, POLLIN, POLLHUP
from subprocess import Popen, PIPE, STDOUT
CONFIG_LINE = re.compile("^([^ ]+) +([0-9]+) +(.*)$")
if len(sys.argv) != 2:
sys.stderr.write("Wrong number of arguments.\n");
sys.exit(1)
if not os.access("/tmp/test-output", os.F_OK):
os.mkdir("/tmp/test-output")
config = open(sys.argv[1], 'r')
tests = []
class Test:
def __init__(self, name, command, lines):
self.name = name
self.command = command
self.lines = lines
self.count = 0
self.done = False
def start(self, poller):
self.proc = Popen(["sh", "-euc", test.command], stdin=dev_null, stdout=PIPE, stderr=STDOUT)
fd = self.proc.stdout.fileno()
flags = fcntl(fd, F_GETFL)
fcntl(fd, F_SETFL, flags | os.O_NONBLOCK)
poller.register(self.proc.stdout, POLLIN)
self.log = open("/tmp/test-output/" + self.name + ".log", "w")
def update(self):
try:
while True:
text = self.proc.stdout.read()
if text == "":
self.proc.wait()
self.done = True
self.log.close()
return True
self.count += text.count("\n")
self.log.write(text)
except IOError as e:
if e.errno == EAGAIN:
return False
raise
def print_bar(self):
percent = self.count * 100 / self.lines
status = "(%3d%%)" % percent
color_on = ""
color_off = ""
if self.done:
if self.proc.returncode == 0:
color_on = "\033[0;32m"
status = "PASS"
else:
color_on = "\033[0;31m"
status = "FAIL: /tmp/test-output/%s.log" % self.name
color_off = "\033[0m"
print "%s%-16s |%-25s| %6d/%6d %s%s " % (
color_on, self.name, '=' * min(percent / 4, 25), self.count, self.lines, status, color_off)
def passed(self):
return self.proc.returncode == 0
for line in config:
if len(line) > 0 and not line.startswith("#"):
match = CONFIG_LINE.match(line)
if not match:
sys.stderr.write("Invalid config syntax: %s\n" % line);
sys.exit(1)
test = Test(match.group(1), match.group(3), int(match.group(2)))
tests.append(test)
config.close()
dev_null = open("/dev/null", "rw")
poller = poll()
fd_map = {}
for test in tests:
test.start(poller)
fd_map[test.proc.stdout.fileno()] = test
active_count = len(tests)
def print_bars():
for test in tests:
test.print_bar()
print_bars()
while active_count > 0:
for (fd, event) in poller.poll():
if fd_map[fd].update():
active_count -= 1
poller.unregister(fd)
sys.stdout.write("\033[%dA\r" % len(tests))
print_bars()
new_config = open(sys.argv[1], "w")
for test in tests:
if test.passed():
new_config.write("%-16s %6d %s\n" % (test.name, test.count, test.command))
else:
new_config.write("%-16s %6d %s\n" % (test.name, test.lines, test.command))
for test in tests:
if not test.passed():
sys.exit(1)
sys.exit(0)
|
florentx/OpenUpgrade | refs/heads/8.0 | addons/payment_paypal/tests/test_paypal.py | 378 | # -*- coding: utf-8 -*-
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment.tests.common import PaymentAcquirerCommon
from openerp.addons.payment_paypal.controllers.main import PaypalController
from openerp.tools import mute_logger
from lxml import objectify
import urlparse
class PaypalCommon(PaymentAcquirerCommon):
def setUp(self):
super(PaypalCommon, self).setUp()
cr, uid = self.cr, self.uid
self.base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# get the paypal account
model, self.paypal_id = self.registry('ir.model.data').get_object_reference(cr, uid, 'payment_paypal', 'payment_acquirer_paypal')
# tde+seller@openerp.com - tde+buyer@openerp.com - tde+buyer-it@openerp.com
# some CC
self.amex = (('378282246310005', '123'), ('371449635398431', '123'))
self.amex_corporate = (('378734493671000', '123'))
self.autralian_bankcard = (('5610591081018250', '123'))
self.dinersclub = (('30569309025904', '123'), ('38520000023237', '123'))
self.discover = (('6011111111111117', '123'), ('6011000990139424', '123'))
self.jcb = (('3530111333300000', '123'), ('3566002020360505', '123'))
self.mastercard = (('5555555555554444', '123'), ('5105105105105100', '123'))
self.visa = (('4111111111111111', '123'), ('4012888888881881', '123'), ('4222222222222', '123'))
self.dankord_pbs = (('76009244561', '123'), ('5019717010103742', '123'))
self.switch_polo = (('6331101999990016', '123'))
class PaypalServer2Server(PaypalCommon):
def test_00_tx_management(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
paypal = self.payment_acquirer.browse(self.cr, self.uid, self.paypal_id, None)
self.assertEqual(paypal.environment, 'test', 'test without test environment')
res = self.payment_acquirer._paypal_s2s_get_access_token(cr, uid, [self.paypal_id], context=context)
self.assertTrue(res[self.paypal_id] is not False, 'paypal: did not generate access token')
tx_id = self.payment_transaction.s2s_create(
cr, uid, {
'amount': 0.01,
'acquirer_id': self.paypal_id,
'currency_id': self.currency_euro_id,
'reference': 'test_reference',
'partner_id': self.buyer_id,
}, {
'number': self.visa[0][0],
'cvc': self.visa[0][1],
'brand': 'visa',
'expiry_mm': 9,
'expiry_yy': 2015,
}, context=context
)
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertTrue(tx.paypal_txn_id is not False, 'paypal: txn_id should have been set after s2s request')
self.payment_transaction.write(cr, uid, tx_id, {'paypal_txn_id': False}, context=context)
class PaypalForm(PaypalCommon):
def test_10_paypal_form_render(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
self.payment_acquirer.write(cr, uid, self.paypal_id, {'fees_active': False}, context)
paypal = self.payment_acquirer.browse(cr, uid, self.paypal_id, context)
self.assertEqual(paypal.environment, 'test', 'test without test environment')
# ----------------------------------------
# Test: button direct rendering
# ----------------------------------------
# render the button
res = self.payment_acquirer.render(
cr, uid, self.paypal_id,
'test_ref0', 0.01, self.currency_euro_id,
partner_id=None,
partner_values=self.buyer_values,
context=context)
form_values = {
'cmd': '_xclick',
'business': 'tde+paypal-facilitator@openerp.com',
'item_name': 'test_ref0',
'item_number': 'test_ref0',
'first_name': 'Buyer',
'last_name': 'Norbert',
'amount': '0.01',
'currency_code': 'EUR',
'address1': 'Huge Street 2/543',
'city': 'Sin City',
'zip': '1000',
'country': 'Belgium',
'email': 'norbert.buyer@example.com',
'return': '%s' % urlparse.urljoin(self.base_url, PaypalController._return_url),
'notify_url': '%s' % urlparse.urljoin(self.base_url, PaypalController._notify_url),
'cancel_return': '%s' % urlparse.urljoin(self.base_url, PaypalController._cancel_url),
}
# check form result
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://www.sandbox.paypal.com/cgi-bin/webscr', 'paypal: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['submit']:
continue
self.assertEqual(
form_input.get('value'),
form_values[form_input.get('name')],
'paypal: wrong value for input %s: received %s instead of %s' % (form_input.get('name'), form_input.get('value'), form_values[form_input.get('name')])
)
def test_11_paypal_form_with_fees(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
paypal = self.payment_acquirer.browse(self.cr, self.uid, self.paypal_id, None)
self.assertEqual(paypal.environment, 'test', 'test without test environment')
# update acquirer: compute fees
self.payment_acquirer.write(cr, uid, self.paypal_id, {
'fees_active': True,
'fees_dom_fixed': 1.0,
'fees_dom_var': 0.35,
'fees_int_fixed': 1.5,
'fees_int_var': 0.50,
}, context)
# render the button
res = self.payment_acquirer.render(
cr, uid, self.paypal_id,
'test_ref0', 12.50, self.currency_euro,
partner_id=None,
partner_values=self.buyer_values,
context=context)
# check form result
handling_found = False
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://www.sandbox.paypal.com/cgi-bin/webscr', 'paypal: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['handling']:
handling_found = True
self.assertEqual(form_input.get('value'), '1.57', 'paypal: wrong computed fees')
self.assertTrue(handling_found, 'paypal: fees_active did not add handling input in rendered form')
@mute_logger('openerp.addons.payment_paypal.models.paypal', 'ValidationError')
def test_20_paypal_form_management(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
paypal = self.payment_acquirer.browse(cr, uid, self.paypal_id, context)
self.assertEqual(paypal.environment, 'test', 'test without test environment')
# typical data posted by paypal after client has successfully paid
paypal_post_data = {
'protection_eligibility': u'Ineligible',
'last_name': u'Poilu',
'txn_id': u'08D73520KX778924N',
'receiver_email': u'tde+paypal-facilitator@openerp.com',
'payment_status': u'Pending',
'payment_gross': u'',
'tax': u'0.00',
'residence_country': u'FR',
'address_state': u'Alsace',
'payer_status': u'verified',
'txn_type': u'web_accept',
'address_street': u'Av. de la Pelouse, 87648672 Mayet',
'handling_amount': u'0.00',
'payment_date': u'03:21:19 Nov 18, 2013 PST',
'first_name': u'Norbert',
'item_name': u'test_ref_2',
'address_country': u'France',
'charset': u'windows-1252',
'custom': u'',
'notify_version': u'3.7',
'address_name': u'Norbert Poilu',
'pending_reason': u'multi_currency',
'item_number': u'test_ref_2',
'receiver_id': u'DEG7Z7MYGT6QA',
'transaction_subject': u'',
'business': u'tde+paypal-facilitator@openerp.com',
'test_ipn': u'1',
'payer_id': u'VTDKRZQSAHYPS',
'verify_sign': u'An5ns1Kso7MWUdW4ErQKJJJ4qi4-AVoiUf-3478q3vrSmqh08IouiYpM',
'address_zip': u'75002',
'address_country_code': u'FR',
'address_city': u'Paris',
'address_status': u'unconfirmed',
'mc_currency': u'EUR',
'shipping': u'0.00',
'payer_email': u'tde+buyer@openerp.com',
'payment_type': u'instant',
'mc_gross': u'1.95',
'ipn_track_id': u'866df2ccd444b',
'quantity': u'1'
}
# should raise error about unknown tx
with self.assertRaises(ValidationError):
self.payment_transaction.form_feedback(cr, uid, paypal_post_data, 'paypal', context=context)
# create tx
tx_id = self.payment_transaction.create(
cr, uid, {
'amount': 1.95,
'acquirer_id': self.paypal_id,
'currency_id': self.currency_euro_id,
'reference': 'test_ref_2',
'partner_name': 'Norbert Buyer',
'partner_country_id': self.country_france_id,
}, context=context
)
# validate it
self.payment_transaction.form_feedback(cr, uid, paypal_post_data, 'paypal', context=context)
# check
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertEqual(tx.state, 'pending', 'paypal: wrong state after receiving a valid pending notification')
self.assertEqual(tx.state_message, 'multi_currency', 'paypal: wrong state message after receiving a valid pending notification')
self.assertEqual(tx.paypal_txn_id, '08D73520KX778924N', 'paypal: wrong txn_id after receiving a valid pending notification')
self.assertFalse(tx.date_validate, 'paypal: validation date should not be updated whenr receiving pending notification')
# update tx
self.payment_transaction.write(cr, uid, [tx_id], {
'state': 'draft',
'paypal_txn_id': False,
}, context=context)
# update notification from paypal
paypal_post_data['payment_status'] = 'Completed'
# validate it
self.payment_transaction.form_feedback(cr, uid, paypal_post_data, 'paypal', context=context)
# check
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertEqual(tx.state, 'done', 'paypal: wrong state after receiving a valid pending notification')
self.assertEqual(tx.paypal_txn_id, '08D73520KX778924N', 'paypal: wrong txn_id after receiving a valid pending notification')
self.assertEqual(tx.date_validate, '2013-11-18 03:21:19', 'paypal: wrong validation date')
|
jakereps/qiime-workshops | refs/heads/master | payments/migrations/0017_billed_total_charfield.py | 1 | # ----------------------------------------------------------------------------
# Copyright (c) 2016-2018, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('payments', '0016_order_contact_name'),
]
operations = [
migrations.AlterField(
model_name='order',
name='billed_total',
field=models.CharField(blank=True, default='', help_text='This is the confirmed paid amount from NAU', max_length=300, verbose_name='billed total (USD)'),
preserve_default=False,
),
]
|
indrajitr/ansible | refs/heads/devel | test/units/inventory/test_group.py | 53 | # Copyright 2018 Alan Rominger <arominge@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from units.compat import unittest
from ansible.inventory.group import Group
from ansible.inventory.host import Host
from ansible.errors import AnsibleError
class TestGroup(unittest.TestCase):
def test_depth_update(self):
A = Group('A')
B = Group('B')
Z = Group('Z')
A.add_child_group(B)
A.add_child_group(Z)
self.assertEqual(A.depth, 0)
self.assertEqual(Z.depth, 1)
self.assertEqual(B.depth, 1)
def test_depth_update_dual_branches(self):
alpha = Group('alpha')
A = Group('A')
alpha.add_child_group(A)
B = Group('B')
A.add_child_group(B)
Z = Group('Z')
alpha.add_child_group(Z)
beta = Group('beta')
B.add_child_group(beta)
Z.add_child_group(beta)
self.assertEqual(alpha.depth, 0) # apex
self.assertEqual(beta.depth, 3) # alpha -> A -> B -> beta
omega = Group('omega')
omega.add_child_group(alpha)
# verify that both paths are traversed to get the max depth value
self.assertEqual(B.depth, 3) # omega -> alpha -> A -> B
self.assertEqual(beta.depth, 4) # B -> beta
def test_depth_recursion(self):
A = Group('A')
B = Group('B')
A.add_child_group(B)
# hypothetical of adding B as child group to A
A.parent_groups.append(B)
B.child_groups.append(A)
# can't update depths of groups, because of loop
with self.assertRaises(AnsibleError):
B._check_children_depth()
def test_loop_detection(self):
A = Group('A')
B = Group('B')
C = Group('C')
A.add_child_group(B)
B.add_child_group(C)
with self.assertRaises(AnsibleError):
C.add_child_group(A)
def test_direct_host_ordering(self):
"""Hosts are returned in order they are added
"""
group = Group('A')
# host names not added in alphabetical order
host_name_list = ['z', 'b', 'c', 'a', 'p', 'q']
expected_hosts = []
for host_name in host_name_list:
h = Host(host_name)
group.add_host(h)
expected_hosts.append(h)
assert group.get_hosts() == expected_hosts
def test_sub_group_host_ordering(self):
"""With multiple nested groups, asserts that hosts are returned
in deterministic order
"""
top_group = Group('A')
expected_hosts = []
for name in ['z', 'b', 'c', 'a', 'p', 'q']:
child = Group('group_{0}'.format(name))
top_group.add_child_group(child)
host = Host('host_{0}'.format(name))
child.add_host(host)
expected_hosts.append(host)
assert top_group.get_hosts() == expected_hosts
def test_populates_descendant_hosts(self):
A = Group('A')
B = Group('B')
C = Group('C')
h = Host('h')
C.add_host(h)
A.add_child_group(B) # B is child of A
B.add_child_group(C) # C is descendant of A
A.add_child_group(B)
self.assertEqual(set(h.groups), set([C, B, A]))
h2 = Host('h2')
C.add_host(h2)
self.assertEqual(set(h2.groups), set([C, B, A]))
def test_ancestor_example(self):
# see docstring for Group._walk_relationship
groups = {}
for name in ['A', 'B', 'C', 'D', 'E', 'F']:
groups[name] = Group(name)
# first row
groups['A'].add_child_group(groups['D'])
groups['B'].add_child_group(groups['D'])
groups['B'].add_child_group(groups['E'])
groups['C'].add_child_group(groups['D'])
# second row
groups['D'].add_child_group(groups['E'])
groups['D'].add_child_group(groups['F'])
groups['E'].add_child_group(groups['F'])
self.assertEqual(
set(groups['F'].get_ancestors()),
set([
groups['A'], groups['B'], groups['C'], groups['D'], groups['E']
])
)
def test_ancestors_recursive_loop_safe(self):
'''
The get_ancestors method may be referenced before circular parenting
checks, so the method is expected to be stable even with loops
'''
A = Group('A')
B = Group('B')
A.parent_groups.append(B)
B.parent_groups.append(A)
# finishes in finite time
self.assertEqual(A.get_ancestors(), set([A, B]))
|
Tigge/openant | refs/heads/master | setup.py | 1 | #!/usr/bin/env python
#
# openant distutils setup script
#
# Copyright (c) 2012, Gustav Tiger <gustav@tiger.name>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import os
import shutil
from distutils.util import execute
from distutils.cmd import Command
from subprocess import call
from setuptools.command.install import install
from setuptools.command.develop import develop
from setuptools import setup, find_packages
def udev_reload_rules():
call(["udevadm", "control", "--reload-rules"])
def udev_trigger():
call(
[
"udevadm",
"trigger",
"--subsystem-match=usb",
"--attr-match=idVendor=0fcf",
"--action=add",
]
)
def install_udev_rules(raise_exception):
if check_root():
shutil.copy("resources/42-ant-usb-sticks.rules", "/etc/udev/rules.d")
execute(udev_reload_rules, [], "Reloading udev rules")
execute(udev_trigger, [], "Triggering udev rules")
else:
msg = 'You must have root privileges to install udev rules. Run "sudo python setup.py udev_rules"'
if raise_exception:
raise OSError(msg)
else:
print(msg)
def check_root():
return os.geteuid() == 0
class InstallUdevRules(Command):
description = "install udev rules (requires root privileges)"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
install_udev_rules(True)
class CustomInstall(install):
def run(self):
install.run(self)
install_udev_rules(True)
class CustomDevelop(develop):
def run(self):
develop.run(self)
install_udev_rules(False)
try:
with open("README.md") as file:
long_description = file.read()
except IOError:
long_description = ""
setup(
name="openant",
version="0.4",
description="ANT and ANT-FS Python Library",
long_description=long_description,
author="Gustav Tiger",
author_email="gustav@tiger.name",
url="https://github.com/Tigge/openant",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Science/Research"
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=find_packages(),
install_requires=["pyusb>=1.0a2"],
cmdclass={
"udev_rules": InstallUdevRules,
"install": CustomInstall,
"develop": CustomDevelop,
},
test_suite="ant.tests",
)
|
saisankargochhayat/algo_quest | refs/heads/master | hackerearth/temp.py | 1 | import math
class segment_tree():
def __init__(self,mini,maxi):
self.root = self.build(mini,maxi)
def build(self,left,right):
if left == right:
node = {}
node['left'] = None
node['right'] = None
node['decrements'] = 0
return node
else:
node = {}
mid = int((left+right)/2)
node['left'] = self.build(left,mid)
node['right'] = self.build(mid+1,right)
node['decrements'] = 0
return node
def query(self,node,index,left,right,dec):
# print("Querying "+str(left)+" "+str(right)+" with dec "+str(dec)+" str index is "+str(index))
if left==right:
return node['decrements'] + dec
else:
i = index-node['decrements']
mid = int((left+right)/2)
if left<=index and index<=mid:
return self.query(node['left'],index,left,mid,dec+node['decrements'])
else:
return self.query(node['right'],index,mid+1,right,dec+node['decrements'])
def update(self,node,start,end,left,right,dec):
print("Updating "+str(left)+" "+str(right)+" with start and end "+str(start)+" "+str(end))
if start>right or end<left:
return
if start<=left and right<=end:
node['decrements'] = node['decrements'] + 1
return
dec = dec+node['decrements']
left = left-dec
mid = int((left+right)/2)
self.update(node['left'],start,end,left,mid,dec)
self.update(node['right'],start,end,mid+1,right,dec)
n = int(input())
l = list(map(int,input().split(' ')))
mini = min(l)
maxi = max(l)
s = segment_tree(mini,maxi)
m = int(input())
for i in range(m):
k = int(input())
s.update(s.root,k,n,mini,maxi,0)
ans = []
for i in range(len(l)):
ans.append(l[i] - s.query(s.root,l[i],mini,maxi,0))
print(*ans)
|
seshin/namebench | refs/heads/master | nb_third_party/dns/rdtypes/IN/SRV.py | 248 | # Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.name
class SRV(dns.rdata.Rdata):
"""SRV record
@ivar priority: the priority
@type priority: int
@ivar weight: the weight
@type weight: int
@ivar port: the port of the service
@type port: int
@ivar target: the target host
@type target: dns.name.Name object
@see: RFC 2782"""
__slots__ = ['priority', 'weight', 'port', 'target']
def __init__(self, rdclass, rdtype, priority, weight, port, target):
super(SRV, self).__init__(rdclass, rdtype)
self.priority = priority
self.weight = weight
self.port = port
self.target = target
def to_text(self, origin=None, relativize=True, **kw):
target = self.target.choose_relativity(origin, relativize)
return '%d %d %d %s' % (self.priority, self.weight, self.port,
target)
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
priority = tok.get_uint16()
weight = tok.get_uint16()
port = tok.get_uint16()
target = tok.get_name(None)
target = target.choose_relativity(origin, relativize)
tok.get_eol()
return cls(rdclass, rdtype, priority, weight, port, target)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
three_ints = struct.pack("!HHH", self.priority, self.weight, self.port)
file.write(three_ints)
self.target.to_wire(file, compress, origin)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
(priority, weight, port) = struct.unpack('!HHH',
wire[current : current + 6])
current += 6
rdlen -= 6
(target, cused) = dns.name.from_wire(wire[: current + rdlen],
current)
if cused != rdlen:
raise dns.exception.FormError
if not origin is None:
target = target.relativize(origin)
return cls(rdclass, rdtype, priority, weight, port, target)
from_wire = classmethod(from_wire)
def choose_relativity(self, origin = None, relativize = True):
self.target = self.target.choose_relativity(origin, relativize)
def _cmp(self, other):
sp = struct.pack("!HHH", self.priority, self.weight, self.port)
op = struct.pack("!HHH", other.priority, other.weight, other.port)
v = cmp(sp, op)
if v == 0:
v = cmp(self.target, other.target)
return v
|
min2209/dwt | refs/heads/master | WTN/lossFunction.py | 1 | import tensorflow as tf
def depthCELoss2(pred, gt, weight, ss, outputChannels=16):
with tf.name_scope("depth_CE_loss"):
pred = tf.reshape(pred, (-1, outputChannels))
epsilon = tf.constant(value=1e-25)
predSoftmax = tf.to_float(tf.nn.softmax(pred))
gt = tf.one_hot(indices=tf.to_int32(tf.squeeze(tf.reshape(gt, (-1, 1)))), depth=outputChannels, dtype=tf.float32)
ss = tf.to_float(tf.reshape(ss, (-1, 1)))
weight = tf.to_float(tf.reshape(weight, (-1, 1)))
crossEntropyScaling = tf.to_float([3.0, 3.0, 3.0, 2.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])
crossEntropy = -tf.reduce_sum(((1-gt)*tf.log(tf.maximum(1-predSoftmax, epsilon))
+ gt*tf.log(tf.maximum(predSoftmax, epsilon)))*ss*crossEntropyScaling*weight,
reduction_indices=[1])
crossEntropySum = tf.reduce_sum(crossEntropy, name="cross_entropy_sum")
return crossEntropySum
def depthCELoss(pred, gt, ss, outputChannels=16):
with tf.name_scope("depth_CE_loss"):
pred = tf.reshape(pred, (-1, outputChannels))
epsilon = tf.constant(value=1e-25)
#pred = pred + epsilon
predSoftmax = tf.to_float(tf.nn.softmax(pred))
predSoftmax = predSoftmax + epsilon
gt = tf.one_hot(indices=tf.to_int32(tf.squeeze(tf.reshape(gt, (-1, 1)))), depth=outputChannels, dtype=tf.float32)
ss = tf.to_float(tf.reshape(ss, (-1, 1)))
crossEntropy = -tf.reduce_sum(gt * tf.log(predSoftmax) * ss, reduction_indices=[1])
crossEntropySum = tf.reduce_sum(crossEntropy, name="cross_entropy_sum")
return crossEntropySum
def modelTotalLoss(pred, gt, weight, ss, outputChannels=1):
lossDepthTotal = depthCELoss2(pred=pred, gt=gt, weight=weight, ss=ss,
outputChannels=outputChannels) / (countTotalWeighted(ss, weight) + 1)
tf.add_to_collection('losses', lossDepthTotal)
totalLoss = tf.add_n(tf.get_collection('losses'), name='total_loss')
return totalLoss
def countTotal(ss):
with tf.name_scope("total"):
ss = tf.to_float(tf.reshape(ss, (-1, 1)))
total = tf.reduce_sum(ss)
return total
def countCorrect(pred, gt, ss, k, outputChannels):
with tf.name_scope("correct"):
pred = tf.argmax(tf.reshape(pred, (-1, outputChannels)), 1)
gt = tf.one_hot(indices=tf.to_int32(tf.squeeze(tf.reshape(gt, (-1, 1)))), depth=outputChannels, dtype=tf.float32)
ss = tf.to_float(tf.reshape(ss, (-1, 1)))
correct = tf.reduce_sum(tf.mul(tf.reshape(tf.to_float(tf.nn.in_top_k(gt, pred, k)), (-1, 1)), ss), reduction_indices=[0])
return correct
def countTotalWeighted(ss, weight):
with tf.name_scope("total"):
ss = tf.to_float(tf.reshape(ss, (-1, 1)))
weight = tf.to_float(tf.reshape(weight, (-1, 1)))
total = tf.reduce_sum(ss * weight)
return total |
palaniyappanBala/python-mongor | refs/heads/master | mongor/__init__.py | 2 | from random import randint
from sys import stderr as log_error
import gzip
import datetime
#needed external deps
import pymongo
from pymongo.errors import CollectionInvalid
import bson
class Config(object):
"""
Used to configure the mongodb curator.
Configuration class should only be used directly
when performing the original setup
"""
def __init__(self, host="localhost",
port=27017,
ssl=False,
ssl_cert_reqs=0,
ssl_keyfile=None,
ssl_certfile=None,
ssl_ca_certs=None,
ssl_match_hostname=False):
self.mongo_client = pymongo.MongoClient(host=host,
port=port,
ssl=ssl,
ssl_cert_reqs=ssl_cert_reqs,
ssl_keyfile=ssl_keyfile,
ssl_certfile=ssl_certfile,
ssl_ca_certs=ssl_ca_certs,
ssl_match_hostname=ssl_match_hostname)
self.database = pymongo.database.Database(self.mongo_client, "config_db")
def setup(self, background=False, unique_uids=False):
'''
few hard coded keywords in this function:
- nodes, a node is a target database for document insertion
- schedule, the order in which the node will be available for insertions
- db_type, arbitrary name used for grouping nodes.
'''
try:
self.database.create_collection("nodes")
except CollectionInvalid:
self.database.drop_collection("nodes")
self.database.create_collection("nodes")
self.database['nodes'].create_index("uid",
background=background,
unique=unique_uids)
self.database['nodes'].create_index([("name", 1),
("host", 1),
("port", 1)],
background=background,
unique=True)
self.database['nodes'].create_index("schedule")
self.database['indexes'].create_index("db_type")
self.database['QFD'].create_index("db_type")
def close(self):
'''
give a function to allow the client to close resources
'''
self.mongo_client.disconnect()
def add_index(self, db_type="", #<str>
fields=None, #<list>
background=False, #<bool>
unique=False, #<bool>
sparse=True, #<bool>
text=False #<bool>
):
if isinstance(fields, basestring):
fields = [fields] #load into a list so the loop works
for field in fields:
self.database['indexes'].insert_one({"db_type":db_type,
"field":field,
"background":background,
"unique":unique,
"sparse":sparse,
"text":text
})
def remove_index(self, db_type="", #<str>
fields=None): #<List>
if isinstance(fields, basestring):
fields = [fields] #load into a list so the loop works
for field in fields:
self.database['indexes'].remove({"db_type":db_type, "field":field})
def get_indexes(self, db_type, fields_only=False):
indexes = list(self.database['indexes'].find({"db_type":db_type}))
for index in indexes:
if isinstance(index['field'], list):
fields = []
for field in index['field']:
fields.append(tuple(field))
index['field'] = fields
if fields_only:
indexes = [x['field'] for x in indexes]
return indexes
def add_qfd(self,
field,
uid="",
db_type="",
window=60*24, #<int> in minutes default 1 day
):
if field:
self.database['QFD'].insert_one({"db_type":db_type,
"window":window,
"field":field,
"uid":uid,
"last_id":bson.objectid.ObjectId.from_datetime(datetime.datetime(2000, 1, 1))})
return field
def add_node(self,
uid="",
name="",
db_type="",
db_tags=None, #list<str>
capability='r', #r: read, rw=read/write
host="localhost",
port=27017,
max_size=21474836480, #20GB in bytes
ssl=False, #boolean
ssl_cert_reqs=0, # ssl.CERT_NONE (0), ssl.CERT_OPTIONAL (1), ssl.CERT_REQUIRED (2)
ssl_keyfile=None,
ssl_certfile=None,
ssl_ca_certs=None,
ssl_match_hostname=False,
passwd_file="", #file that should contain "username:password"
raw_args=None):
'''
Could make a Node class, but that is probably overkill (zen#3) since it would be internal only
'''
node = {}
if isinstance(raw_args, dict):
node = raw_args
node['uid'] = uid
node['name'] = name
node['host'] = host
node['port'] = port
node['ssl'] = ssl
node['ssl_cert_reqs'] = ssl_cert_reqs
node['ssl_keyfile'] = ssl_keyfile
node['ssl_certfile'] = ssl_certfile
node['ssl_ca_certs'] = ssl_ca_certs
node['ssl_match_hostname'] = ssl_match_hostname
node['max_size'] = max_size
node['passwd_file'] = passwd_file
node['db_type'] = db_type
node['capability'] = capability
if db_tags:
if not isinstance(db_tags, list):
node['db_tags'] = [db_tags]
else:
node['db_tags'] = db_tags
if "rw" in capability: #only schedule rotations for write nodes
node['schedule'] = self._get_next_sequence_number(db_type=db_type)
self.database['nodes'].insert_one(node)
return self.get_node(uid)
def set_node_tags(self, uid, db_tags):
nodes = self.get_node(uid)
if isinstance(db_tags, list):
for node in nodes:
tags = {"$set":{"db_tags":db_tags}}
self.database['nodes'].update_one(node, tags)
def rotate_schedule(self, db_type=""):
'''
assumes that appropriate maintenence has already been done
and the incoming database is clean/prepared for inserts
'''
nodes = list(self.database['nodes'].find({"$and":[
{"host":"localhost"},
{"db_type":db_type},
{'schedule':{"$exists":True}}]}))
for node in nodes:
schedule = {"$set": {"schedule": (node["schedule"] + 1) % len(nodes)}}
self.database['nodes'].update_one(node, schedule)
def get_current_local_node(self, db_type="", limit1=True):
log_error.write("USING OLD FUNCTION, move to get_current_write_node\n")
return self.get_current_write_node(db_type=db_type, limit1=limit1)
def get_current_write_node(self, db_type="", limit1=True):
selected_nodes = None
current_nodes = list(self.database['nodes'].find({"$and":[
{"db_type":db_type},
{"capability":"rw"},
{'schedule':0}]}))
if limit1:
selected_nodes = self.select_random_from_nodes(current_nodes)
else:
selected_nodes = current_nodes
return selected_nodes
def get_local_nodes(self, db_type=""):
log_error.write("USING OLD FUNCTION, move to get_write_nodes\n")
return self.get_write_nodes(db_type)
def get_write_nodes(self, db_type=""):
return self.get_nodes({"$and":[{"capability":"rw"}, {"db_type":db_type}]})
def get_read_nodes(self, db_type=""):
return self.get_nodes({"$and":[{"db_type":db_type}]})
def get_nodes(self, criteria):
return list(self.database['nodes'].find(criteria).sort("schedule", pymongo.ASCENDING))
def select_random_from_nodes(self, current_nodes):
return current_nodes[randint(0, len(current_nodes)-1)] #allow possible randomization
def get_node(self, uid):
nodes = list(self.database['nodes'].find({"uid":uid}))
for node in nodes:
node['host'], node['port'] = self.mongo_client.address #ensure all node hosts are relative to the caller.
return nodes
def remove_node(self, db_type, uid):
'''removes the nodes from the rotation
this has a giant atomicity problem
any command seeking to read from the database inluding:
get the current node
rotate the schedule
add/delete other nodes
during this command may have uncontrolled output
It would be best to lock all applications from accessing mongor
while this command takes place
'''
delete_nodes = self.get_node(uid)
self.database['nodes'].remove({"uid":uid})
for node in delete_nodes:
self.database.client.drop_database(node['name'])
nodes_to_reorder = self.get_write_nodes(db_type=db_type)
schedule = 0
for node in nodes_to_reorder:
node['schedule'] = schedule
schedule += 1
self.database['nodes'].save(node)
return self.get_node(uid)
def _get_next_sequence_number(self, db_type=""):
try:
return list(self.database['nodes'].find({"$and":[
{"db_type":db_type},
{'schedule':{"$exists":True}}]}).
sort("schedule", pymongo.DESCENDING).
limit(1))[0]['schedule'] + 1
except KeyError:
return 0
except IndexError:
return 0
class Maintenence:
"""
Used to maintain the mongodb curator.
Maintenece class should be called by an external script
"""
def __init__(self, config_host, config_port, config_ssl):
self.config = Config(host=config_host, port=config_port, ssl=config_ssl)
def close(self):
'''
give a function to allow the client to close resources
'''
self.config.mongo_client.disconnect()
def ensure_indexes(self,
collection,
indexes,
db_type=""):
assert isinstance(indexes, list)
for node in self.config.get_write_nodes(db_type=db_type):
node['host'], node['port'] = self.config.mongo_client.address #ensure all node hosts are relative to the caller.
database = Database().node_to_database(node)
for index in indexes:
database[collection].create_index(index['field'],
background=index['background'],
sparse=index['sparse'],
unique=index['unique'])
def clean_incoming(self, db_type="", dump_bson=False):
nodes = self._index_of_schedule_change(self.config.get_write_nodes(db_type=db_type)[::-1]) #reverse the order to get the next nodes to the front of the list
for node in nodes:
database = Database().node_to_database(node)
database.client.drop_database(node['name'])
def current_tailable_cursor(self, db_type, collection, _id="", node=None):
'''
_id <bson.objectid.ObjectId> start somewhere other than the beginning of the database
db_type <basestring> which type of database to use
node <dict> In the node format, allow the
Returns:
None - Returns None <NoneType> at the end of each while loop
Dict - Returns a Dictionary representative of the mongo document
Notes: caller is responsible for throttling if required (at end of cursor)
TODO: No good way to start the cursor at a time other than the live or immediately previous database
'''
if not _id:
_id = bson.objectid.ObjectId('000000000000000000000000')
while True: #this while loop supports both cursor catching up to live and switching db
if not node:
node = self.config.get_current_write_node(db_type=db_type)
database = Database().node_to_database(node)
node = None
print database
for document in database[collection].find({"_id":{'$gt':_id}}).sort('$natural', pymongo.ASCENDING):
_id = document['_id']
yield document
yield None
def _index_of_schedule_change(self, nodes):
index = 0
schedule = nodes[0]['schedule']
for position in range(len(nodes)): #do this way in order to maintain the index variable
if nodes[position]['schedule'] < schedule:
index = position
break
return nodes[:index]
def need_to_rotate(self, db_type=""):
rotate = False
current_nodes = self.config.get_current_write_node(db_type=db_type, limit1=False)
for node in current_nodes:
database = Database().node_to_database(node)
data_size = database.command("dbstats")["fileSize"]
if int(node['max_size']) < int(data_size):
rotate = True #if any one database in the current 'random' databases is full, rotate them all
return rotate, data_size
def rotate_schedule(self, db_type=""):
self.config.rotate_schedule(db_type=db_type)
def dump_bson(self, node, filename):
if ".gz" not in filename[len(filename)-4:]:
filename += ".gz" #put a 'meaningless' extension on there for readability
database = Database().node_to_database(node)
#TODO: Add some error handling around this to ensure f gets closed.
f = gzip.open(filename, 'wb')#gzip strea
#dump in reverse chronological order as the newer stuff may be more important than older stuff
for document in database.find().sort('$natural', pymongo.DESCENDING):
f.write(bson.BSON.encode(document))
return filename
class Database:
"""
The most common used class.
A client will generally call this class looking for one or more database handles
The purpose of this library is to abstract and curate the database and return
database handles
it is up to client applications to obtain new handles every now and then
mongoclient under the hook will happily maintain persistent connections forever
this library makes no attempt to force the client into a new handle.
"""
def __init__(self, config_host="", config_port=27017, config_ssl=False):
self.config = None
self.mongo_clients = {} #allows MongoClient to manage the connection pool for each host
if config_host:
self.config = Config(host=config_host, port=config_port, ssl=config_ssl)
def close(self):
'''
give a function to allow the client to close resources
'''
self.config.mongo_client.disconnect() #only resource that needs closed
def get_current_write_node(self, db_type=""):
return self.node_to_database(self.config.get_current_write_node(db_type))
def get_write_nodes(self, db_type="metadata"):
return [self.node_to_database(node) for node in self.config.get_write_nodes(db_type=db_type)]
def get_read_nodes(self, db_type="metadata", with_bson=False):
return [self.node_to_database(node) for node in self.config.get_read_nodes(db_type=db_type)]
def node_to_database(self, node, protect=False):
node_name = "%s:%s" %(node['host'], node['port'])
if node_name not in self.mongo_clients:
self.mongo_clients[node_name] = pymongo.MongoClient(host=node['host'],
port=node['port'],
ssl=node['ssl'],
ssl_cert_reqs=node['ssl_cert_reqs'],
ssl_keyfile=node['ssl_keyfile'],
ssl_certfile=node['ssl_certfile'],
ssl_ca_certs=node['ssl_ca_certs'],
ssl_match_hostname=node['ssl_match_hostname'])
return_database = None #worst case, a None will be returned.
if protect: #hand back a database that has customized protections enabled
return_database = ProtectedDatabase(self.mongo_clients[node_name], node['name'])
else:#by default, hand back a standard database handle.
return_database = pymongo.database.Database(self.mongo_clients[node_name], node['name'])
return return_database
def node_to_file_handle(self, node):
return
class Query:
def __init__(self, databases=None, config_host="", config_port=27017, config_ssl=False, db_type=""):
self.databases = databases
self.cursors = {}
if not self.databases:
self.databases = Database(config_host=config_host,
config_port=config_port,
config_ssl=config_ssl).get_read_nodes(db_type)
def count(self, collection, criteria, date_start=None, date_end=None):
"""
Returns a count of the number of documents which match the provided criteria.
"""
total_count = 0
for db in self.databases:
if isinstance(db, pymongo.database.Database):
# Get the count for this database
result_count = db[collection].find(criteria).count()
# Add it to the total count
total_count += result_count
return total_count
def find(self, collection, criteria, projection=None, sort=None, limit=101, skip=0, bson_pre_match="", date_start=None, date_end=None):
'''
'''
if not sort:
sort = [("_id", pymongo.DESCENDING)] #sort in reverse chronological order
for db in self.databases:
if isinstance(db, pymongo.database.Database): #allow growth to query raw bson documents.
#limit +1 to be absolutely sure that getMore isnt called until your really mean it.
cursor = db[collection].find(criteria, projection).sort(sort).batch_size(limit+1)
result_count = cursor.count()
next_skip = skip - result_count
if next_skip > 0: #no ramaining documents in this database
skip = next_skip #send updated skip to next loop
continue #on to the next loop, there was nothing in this db
else: #there are remaining items in this db
cursor = cursor.skip(skip) #apply the skip to this db lookup
skip = 0 #set the skip to 0 for the next loop
for item in cursor: #begin dereferencing the cursor
yield item #allows the caller to determine when to stop requesting data
class GlobalQuery:
def __init__(self, config_host, config_port=27017, config_ssl=False, db_type="", db_tags=None):
if not db_tags:
db_tags = []
self.local_config = Config(host=config_host, port=config_port, ssl=config_ssl)
self.remote_databases = {} #what users will probably iterate
self.remote_configs = {} #what system uses to refresh
self.load_remote_configs(db_type, db_tags=db_tags)
def load_remote_configs(self, db_type, db_tags):
config_servers = self.local_config.get_nodes({"db_tags": {'$in': db_tags}})
for node in config_servers:
node_name = "%s:%s" %(node['host'], node['port'])
self.remote_databases[node_name] = []
try:
self.remote_configs[node_name] = Config(host=node['host'],
port=node['port'],
ssl=node['ssl'])
except pymongo.errors.ConnectionFailure:
self.remote_databases[node_name] = None
continue #move to the next possible node
database = Database()
node_databases = []
for remote_read_node in self.remote_configs[node_name].get_read_nodes(db_type):
remote_read_node['host'] = node['host']
try:
node_databases.append(database.node_to_database(remote_read_node))
except pymongo.errors.ConnectionFailure:
#can connect to config but not data
#you get here if you colocate a edge node
#with the central server. Gracefully allow this to pass
pass
if node_databases:
self.remote_databases[node_name] = Query(databases=node_databases)
return self
class ProtectedDatabase(pymongo.database.Database):
def __getattr__(self, name):
'''
may be worth performing some type of logging
or decision making based on the user that got to this point.
somewhat pointless since savvy users could just open up a real database handle
perhaps has worthyness in an enterprise environment, maybe not
'''
return ProtectedCollection(self, name)
def drop(self):
raise ValueError("Drop Not Allowed")
def drop_collection(self, name_or_collection):
raise ValueError("Drop Not Allowed")
class ProtectedCollection(pymongo.collection.Collection):
def find(self, *args, **kwargs):
'''
might be worth doing a bit of logging here to monitor queries as they go by
or take action based on the user and the criteria
'''
#syslog.openlog("TKSearch", 0, syslog.LOG_LOCAL0)
#info = self.__dict__
#syslog.syslog(syslog.LOG_DEBUG, "%s|%s|%s|%s"%(str(getpass.getuser()), info['_Collection__database'], info['_Collection__full_name'], str(args)))
#syslog.closelog()
if not 'slave_okay' in kwargs:
kwargs['slave_okay'] = self.slave_okay
if not 'read_preference' in kwargs:
kwargs['read_preference'] = self.read_preference
return pymongo.cursor.Cursor(self, *args, **kwargs)
def drop(self):
raise ValueError("Drop Not Allowed")
def drop_index(self, index_or_name):
raise ValueError("Drop Not Allowed")
def drop_indexes(self):
raise ValueError("drop_indexes Not Allowed")
def reindex(self):
raise ValueError("reindex Not Allowed")
def find_and_modify(self, query={}, update=None, upsert=False, **kwargs):
#interesting that query is set to a blank dict here,
#that may cause problems in the future if this class does anything other than raise
raise ValueError("find_and_modify Not Allowed")
def create_index(self, key_or_list, deprecated_unique=None,
ttl=300, **kwargs):
raise ValueError("create_index Not Allowed")
|
pkruskal/scikit-learn | refs/heads/master | examples/neighbors/plot_nearest_centroid.py | 264 | """
===============================
Nearest Centroid Classification
===============================
Sample usage of Nearest Centroid classification.
It will plot the decision boundaries for each class.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn import datasets
from sklearn.neighbors import NearestCentroid
n_neighbors = 15
# import some data to play with
iris = datasets.load_iris()
X = iris.data[:, :2] # we only take the first two features. We could
# avoid this ugly slicing by using a two-dim dataset
y = iris.target
h = .02 # step size in the mesh
# Create color maps
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
for shrinkage in [None, 0.1]:
# we create an instance of Neighbours Classifier and fit the data.
clf = NearestCentroid(shrink_threshold=shrinkage)
clf.fit(X, y)
y_pred = clf.predict(X)
print(shrinkage, np.mean(y == y_pred))
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold)
plt.title("3-Class classification (shrink_threshold=%r)"
% shrinkage)
plt.axis('tight')
plt.show()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.