code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
import time
import scipypy
start = time.time()
a = 0.0
for i in xrange(1, 1000000, 1):
f = i / 100.0
a += scipypy.psi(f) / float(i + 1) * i
end = time.time()
print '%s: took %s seconds' % (a, end - start)
start = time.time()
i = 0.01
while i < 10000:
scipypy.psi(i)
i += 0.01
end = time.time()
print 'bare metal took %s seconds' % (end - start)
start = time.time()
scipypy.psi(1000)
end = time.time()
print 'one step took %s seconds' % (end - start)
start = time.time()
scipypy.psi(999)
end = time.time()
print 'one step again took %s seconds' % (end - start)
start = time.time()
for i in xrange(10):
scipypy.psi(1000 + i)
end = time.time()
print 'ten steps took %s seconds' % (end - start)
start = time.time()
for i in xrange(100):
scipypy.psi(1000 + i)
end = time.time()
print 'one hundred steps took %s seconds' % (end - start)
| jperla/happynews | model/scipypy/speed_pypy.py | Python | agpl-3.0 | 867 |
"""Test."""
import pytest
TM_TABLE = [
([0, 1, 1, 0, 1], True),
([0], True),
([1], False),
([0, 1, 0, 0], False),
]
@pytest.mark.parametrize("n, result", TM_TABLE)
def test_is_thue_morse(n, result):
"""Test."""
from is_thue_morse import is_thue_morse
assert is_thue_morse(n) == result
| rrustia/code-katas | src/test_is_thue_morse.py | Python | mit | 318 |
#!/usr/bin/env python
# coding: utf-8
#[(1, 5), (1, 3, 4), (3, 6), (9, ), (-1, 0), (10, 1, 3)] 按每个tuple的最大值进行比较排序
list_1 = [(1, 5), (1, 3, 4), (3, 6), (9, ), (-1, 0), (10, 1, 3)]
print sorted(list_1,key = lambda x:max(x))
print sorted(list_1,key = lambda x:min(x))
| 51reboot/actual_13_homework | 04/leon/1.py | Python | mit | 294 |
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Philippe Biondi <phil@secdev.org>
# This program is published under a GPLv2 license
"""
All layers. Configurable with conf.load_layers.
"""
from __future__ import absolute_import
from scapy.config import conf
from scapy.error import log_loading
from scapy.main import load_layer
import logging
import scapy.modules.six as six
ignored = list(six.moves.builtins.__dict__) + ["sys"]
log = logging.getLogger("scapy.loading")
__all__ = []
for _l in conf.load_layers:
log_loading.debug("Loading layer %s" % _l)
try:
load_layer(_l, globals_dict=globals(), symb_list=__all__)
except Exception as e:
log.warning("can't import layer %s: %s", _l, e)
try:
del _l
except NameError:
pass
| mtury/scapy | scapy/layers/all.py | Python | gpl-2.0 | 825 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import os
import stat
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.process import remotetransfer
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.util import bytes2unicode
from buildbot.worker_transition import deprecatedWorkerClassMethod
class WorkerBuildStep(buildstep.BuildStep):
pass
class SetPropertiesFromEnv(WorkerBuildStep):
"""
Sets properties from environment variables on the worker.
Note this is transferred when the worker first connects
"""
name = 'SetPropertiesFromEnv'
description = ['Setting']
descriptionDone = ['Set']
def __init__(self, variables, source="WorkerEnvironment", **kwargs):
buildstep.BuildStep.__init__(self, **kwargs)
self.variables = variables
self.source = source
def start(self):
# on Windows, environment variables are case-insensitive, but we have
# a case-sensitive dictionary in worker_environ. Fortunately, that
# dictionary is also folded to uppercase, so we can simply fold the
# variable names to uppercase to duplicate the case-insensitivity.
fold_to_uppercase = (self.worker.worker_system == 'win32')
properties = self.build.getProperties()
environ = self.worker.worker_environ
variables = self.variables
log = []
if isinstance(variables, str):
variables = [self.variables]
for variable in variables:
key = variable
if fold_to_uppercase:
key = variable.upper()
value = environ.get(key, None)
if value:
# note that the property is not uppercased
properties.setProperty(variable, value, self.source,
runtime=True)
log.append("%s = %r" % (variable, value))
self.addCompleteLog("properties", "\n".join(log))
self.finished(SUCCESS)
class FileExists(WorkerBuildStep):
"""
Check for the existence of a file on the worker.
"""
name = 'FileExists'
renderables = ['file']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, file, **kwargs):
buildstep.BuildStep.__init__(self, **kwargs)
self.file = file
def start(self):
self.checkWorkerHasCommand('stat')
cmd = remotecommand.RemoteCommand('stat', {'file': self.file})
d = self.runCommand(cmd)
d.addCallback(lambda res: self.commandComplete(cmd))
d.addErrback(self.failed)
def commandComplete(self, cmd):
if cmd.didFail():
self.descriptionDone = ["File not found."]
self.finished(FAILURE)
return
s = cmd.updates["stat"][-1]
if stat.S_ISREG(s[stat.ST_MODE]):
self.descriptionDone = ["File found."]
self.finished(SUCCESS)
else:
self.descriptionDone = ["Not a file."]
self.finished(FAILURE)
class CopyDirectory(WorkerBuildStep):
"""
Copy a directory tree on the worker.
"""
name = 'CopyDirectory'
description = ['Copying']
descriptionDone = ['Copied']
renderables = ['src', 'dest']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, src, dest, timeout=None, maxTime=None, **kwargs):
buildstep.BuildStep.__init__(self, **kwargs)
self.src = src
self.dest = dest
self.timeout = timeout
self.maxTime = maxTime
def start(self):
self.checkWorkerHasCommand('cpdir')
args = {'fromdir': self.src, 'todir': self.dest}
if self.timeout:
args['timeout'] = self.timeout
if self.maxTime:
args['maxTime'] = self.maxTime
cmd = remotecommand.RemoteCommand('cpdir', args)
d = self.runCommand(cmd)
d.addCallback(lambda res: self.commandComplete(cmd))
d.addErrback(self.failed)
def commandComplete(self, cmd):
if cmd.didFail():
self.step_status.setText(["Copying", self.src, "to", self.dest, "failed."])
self.finished(FAILURE)
return
self.step_status.setText(self.describe(done=True))
self.finished(SUCCESS)
# TODO: BuildStep subclasses don't have a describe()....
def getResultSummary(self):
src = bytes2unicode(self.src, errors='replace')
dest = bytes2unicode(self.dest, errors='replace')
copy = u"{} to {}".format(src, dest)
if self.results == SUCCESS:
rv = u'Copied ' + copy
else:
rv = u'Copying ' + copy + ' failed.'
return {u'step': rv}
class RemoveDirectory(WorkerBuildStep):
"""
Remove a directory tree on the worker.
"""
name = 'RemoveDirectory'
description = ['Deleting']
descriptionDone = ['Deleted']
renderables = ['dir']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, dir, **kwargs):
buildstep.BuildStep.__init__(self, **kwargs)
self.dir = dir
def start(self):
self.checkWorkerHasCommand('rmdir')
cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.dir})
d = self.runCommand(cmd)
d.addCallback(lambda res: self.commandComplete(cmd))
d.addErrback(self.failed)
def commandComplete(self, cmd):
if cmd.didFail():
self.step_status.setText(["Delete failed."])
self.finished(FAILURE)
return
self.finished(SUCCESS)
class MakeDirectory(WorkerBuildStep):
"""
Create a directory on the worker.
"""
name = 'MakeDirectory'
description = ['Creating']
descriptionDone = ['Created']
renderables = ['dir']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, dir, **kwargs):
buildstep.BuildStep.__init__(self, **kwargs)
self.dir = dir
def start(self):
self.checkWorkerHasCommand('mkdir')
cmd = remotecommand.RemoteCommand('mkdir', {'dir': self.dir})
d = self.runCommand(cmd)
d.addCallback(lambda res: self.commandComplete(cmd))
d.addErrback(self.failed)
def commandComplete(self, cmd):
if cmd.didFail():
self.step_status.setText(["Create failed."])
self.finished(FAILURE)
return
self.finished(SUCCESS)
class CompositeStepMixin():
def workerPathToMasterPath(self, path):
return os.path.join(*self.worker.path_module.split(path))
def addLogForRemoteCommands(self, logname):
"""This method must be called by user classes
composite steps could create several logs, this mixin functions will write
to the last one.
"""
self.rc_log = self.addLog(logname)
return self.rc_log
def runRemoteCommand(self, cmd, args, abandonOnFailure=True,
evaluateCommand=lambda cmd: cmd.didFail()):
"""generic RemoteCommand boilerplate"""
cmd = remotecommand.RemoteCommand(cmd, args)
if hasattr(self, "rc_log"):
cmd.useLog(self.rc_log, False)
d = self.runCommand(cmd)
def commandComplete(cmd):
if abandonOnFailure and cmd.didFail():
raise buildstep.BuildStepFailed()
return evaluateCommand(cmd)
d.addCallback(lambda res: commandComplete(cmd))
return d
def runRmdir(self, dir, timeout=None, **kwargs):
""" remove a directory from the worker """
cmd_args = {'dir': dir, 'logEnviron': self.logEnviron}
if timeout:
cmd_args['timeout'] = timeout
return self.runRemoteCommand('rmdir', cmd_args, **kwargs)
def runRmFile(self, path, timeout=None, **kwargs):
""" remove a file from the worker """
cmd_args = {'path': path, 'logEnviron': self.logEnviron}
if timeout:
cmd_args['timeout'] = timeout
if self.workerVersionIsOlderThan('rmfile', '3.1'):
cmd_args['dir'] = os.path.abspath(path)
return self.runRemoteCommand('rmdir', cmd_args, **kwargs)
return self.runRemoteCommand('rmfile', cmd_args, **kwargs)
def pathExists(self, path):
""" test whether path exists"""
def commandComplete(cmd):
return not cmd.didFail()
return self.runRemoteCommand('stat', {'file': path,
'logEnviron': self.logEnviron, },
abandonOnFailure=False,
evaluateCommand=commandComplete)
def runMkdir(self, _dir, **kwargs):
""" create a directory and its parents"""
return self.runRemoteCommand('mkdir', {'dir': _dir,
'logEnviron': self.logEnviron, },
**kwargs)
def runGlob(self, path, **kwargs):
""" find files matching a shell-style pattern"""
def commandComplete(cmd):
return cmd.updates['files'][-1]
return self.runRemoteCommand('glob', {'path': path,
'logEnviron': self.logEnviron, },
evaluateCommand=commandComplete, **kwargs)
def getFileContentFromWorker(self, filename, abandonOnFailure=False):
self.checkWorkerHasCommand("uploadFile")
fileWriter = remotetransfer.StringFileWriter()
# default arguments
args = {
'workdir': self.workdir,
'writer': fileWriter,
'maxsize': None,
'blocksize': 32 * 1024,
}
if self.workerVersionIsOlderThan('uploadFile', '3.0'):
args['slavesrc'] = filename
else:
args['workersrc'] = filename
def commandComplete(cmd):
if cmd.didFail():
return None
return fileWriter.buffer
return self.runRemoteCommand('uploadFile', args,
abandonOnFailure=abandonOnFailure,
evaluateCommand=commandComplete)
deprecatedWorkerClassMethod(locals(), getFileContentFromWorker)
def downloadFileContentToWorker(self, workerdest, strfile,
abandonOnFailure=False, mode=None,
workdir=None):
if workdir is None:
workdir = self.workdir
self.checkWorkerHasCommand("downloadFile")
fileReader = remotetransfer.StringFileReader(strfile)
# default arguments
args = {
'workdir': workdir,
'maxsize': None,
'mode': mode,
'reader': fileReader,
'blocksize': 32 * 1024,
}
if self.workerVersionIsOlderThan('downloadFile', '3.0'):
args['slavedest'] = workerdest
else:
args['workerdest'] = workerdest
def commandComplete(cmd):
if cmd.didFail():
return None
return fileReader
return self.runRemoteCommand('downloadFile', args,
abandonOnFailure=abandonOnFailure,
evaluateCommand=commandComplete)
| seankelly/buildbot | master/buildbot/steps/worker.py | Python | gpl-2.0 | 12,102 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Adapted by Nicolas Bessi. Copyright Camptocamp SA
# Based on Florent Xicluna original code. Copyright Wingo SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import locale
import os
import platform
import subprocess
from openerp import release
from openerp.tools.config import config
def _get_output(cmd):
bindir = config['root_path']
p = subprocess.Popen(cmd, shell=True, cwd=bindir, stdout=subprocess.PIPE)
return p.communicate()[0].rstrip()
def get_server_environment():
# inspired by server/bin/service/web_services.py
try:
rev_id = _get_output('bzr revision-info')
except Exception, e:
rev_id = 'Exception: %s' % (e,)
os_lang = '.'.join([x for x in locale.getdefaultlocale() if x])
if not os_lang:
os_lang = 'NOT SET'
if os.name == 'posix' and platform.system() == 'Linux':
lsbinfo = _get_output('lsb_release -a')
else:
lsbinfo = 'not lsb compliant'
return (
('platform', platform.platform()),
('os.name', os.name),
('lsb_release', lsbinfo),
('release', platform.release()),
('version', platform.version()),
('architecture', platform.architecture()[0]),
('locale', os_lang),
('python', platform.python_version()),
('openerp', release.version),
('revision', rev_id),
)
| hbrunn/server-tools | __unported__/server_environment/system_info.py | Python | agpl-3.0 | 2,161 |
# to load adapter configs
from . import actions # noqa
from . import schema # noqa
| plone/plone.server | src/plone.server/plone/server/framing/__init__.py | Python | bsd-2-clause | 85 |
# coding: utf-8
from .base import Service
| osantana/servicy | servicy/__init__.py | Python | isc | 44 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Patrik Lundin <patrik@sigterm.se>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: openbsd_pkg
author: "Patrik Lundin (@eest)"
version_added: "1.1"
short_description: Manage packages on OpenBSD.
description:
- Manage packages on OpenBSD using the pkg tools.
requirements: [ "python >= 2.5" ]
options:
name:
required: true
description:
- Name of the package.
state:
required: true
choices: [ present, latest, absent ]
description:
- C(present) will make sure the package is installed.
C(latest) will make sure the latest version of the package is installed.
C(absent) will make sure the specified package is not installed.
build:
required: false
choices: [ yes, no ]
default: no
description:
- Build the package from source instead of downloading and installing
a binary. Requires that the port source tree is already installed.
Automatically builds and installs the 'sqlports' package, if it is
not already installed.
version_added: "2.1"
ports_dir:
required: false
default: /usr/ports
description:
- When used in combination with the 'build' option, allows overriding
the default ports source directory.
version_added: "2.1"
clean:
required: false
choices: [ yes, no ]
default: no
description:
- When updating or removing packages, delete the extra configuration
file(s) in the old packages which are annotated with @extra in
the packaging-list.
version_added: "2.3"
quick:
required: false
choices: [ yes, no ]
default: no
description:
- Replace or delete packages quickly; do not bother with checksums
before removing normal files.
version_added: "2.3"
'''
EXAMPLES = '''
# Make sure nmap is installed
- openbsd_pkg:
name: nmap
state: present
# Make sure nmap is the latest version
- openbsd_pkg:
name: nmap
state: latest
# Make sure nmap is not installed
- openbsd_pkg:
name: nmap
state: absent
# Make sure nmap is installed, build it from source if it is not
- openbsd_pkg:
name: nmap
state: present
build: yes
# Specify a pkg flavour with '--'
- openbsd_pkg:
name: vim--no_x11
state: present
# Specify the default flavour to avoid ambiguity errors
- openbsd_pkg:
name: vim--
state: present
# Specify a package branch (requires at least OpenBSD 6.0)
- openbsd_pkg:
name: python%3.5
state: present
# Update all packages on the system
- openbsd_pkg:
name: '*'
state: latest
# Purge a package and it's configuration files
- openbsd_pkg: name=mpd clean=yes state=absent
# Quickly remove a package without checking checksums
- openbsd_pkg: name=qt5 quick=yes state=absent
'''
import os
import platform
import re
import shlex
import sqlite3
from distutils.version import StrictVersion
# Function used for executing commands.
def execute_command(cmd, module):
# Break command line into arguments.
# This makes run_command() use shell=False which we need to not cause shell
# expansion of special characters like '*'.
cmd_args = shlex.split(cmd)
return module.run_command(cmd_args)
# Function used to find out if a package is currently installed.
def get_package_state(names, pkg_spec, module):
info_cmd = 'pkg_info -Iq'
for name in names:
command = "%s inst:%s" % (info_cmd, name)
rc, stdout, stderr = execute_command(command, module)
if stderr:
module.fail_json(msg="failed in get_package_state(): " + stderr)
if stdout:
# If the requested package name is just a stem, like "python", we may
# find multiple packages with that name.
pkg_spec[name]['installed_names'] = [installed_name for installed_name in stdout.splitlines()]
module.debug("get_package_state(): installed_names = %s" % pkg_spec[name]['installed_names'])
pkg_spec[name]['installed_state'] = True
else:
pkg_spec[name]['installed_state'] = False
# Function used to make sure a package is present.
def package_present(names, pkg_spec, module):
build = module.params['build']
for name in names:
# It is possible package_present() has been called from package_latest().
# In that case we do not want to operate on the whole list of names,
# only the leftovers.
if pkg_spec['package_latest_leftovers']:
if name not in pkg_spec['package_latest_leftovers']:
module.debug("package_present(): ignoring '%s' which is not a package_latest() leftover" % name)
continue
else:
module.debug("package_present(): handling package_latest() leftovers, installing '%s'" % name)
if module.check_mode:
install_cmd = 'pkg_add -Imn'
else:
if build is True:
port_dir = "%s/%s" % (module.params['ports_dir'], get_package_source_path(name, pkg_spec, module))
if os.path.isdir(port_dir):
if pkg_spec[name]['flavor']:
flavors = pkg_spec[name]['flavor'].replace('-', ' ')
install_cmd = "cd %s && make clean=depends && FLAVOR=\"%s\" make install && make clean=depends" % (port_dir, flavors)
elif pkg_spec[name]['subpackage']:
install_cmd = "cd %s && make clean=depends && SUBPACKAGE=\"%s\" make install && make clean=depends" % (port_dir,
pkg_spec[name]['subpackage'])
else:
install_cmd = "cd %s && make install && make clean=depends" % (port_dir)
else:
module.fail_json(msg="the port source directory %s does not exist" % (port_dir))
else:
install_cmd = 'pkg_add -Im'
if pkg_spec[name]['installed_state'] is False:
# Attempt to install the package
if build is True and not module.check_mode:
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = module.run_command(install_cmd, module, use_unsafe_shell=True)
else:
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (install_cmd, name), module)
# The behaviour of pkg_add is a bit different depending on if a
# specific version is supplied or not.
#
# When a specific version is supplied the return code will be 0 when
# a package is found and 1 when it is not. If a version is not
# supplied the tool will exit 0 in both cases.
#
# It is important to note that "version" relates to the
# packages-specs(7) notion of a version. If using the branch syntax
# (like "python%3.5") even though a branch name may look like a
# version string it is not used an one by pkg_add.
if pkg_spec[name]['version'] or build is True:
# Depend on the return code.
module.debug("package_present(): depending on return code for name '%s'" % name)
if pkg_spec[name]['rc']:
pkg_spec[name]['changed'] = False
else:
# Depend on stderr instead.
module.debug("package_present(): depending on stderr for name '%s'" % name)
if pkg_spec[name]['stderr']:
# There is a corner case where having an empty directory in
# installpath prior to the right location will result in a
# "file:/local/package/directory/ is empty" message on stderr
# while still installing the package, so we need to look for
# for a message like "packagename-1.0: ok" just in case.
match = re.search("\W%s-[^:]+: ok\W" % pkg_spec[name]['stem'], pkg_spec[name]['stdout'])
if match:
# It turns out we were able to install the package.
module.debug("package_present(): we were able to install package for name '%s'" % name)
else:
# We really did fail, fake the return code.
module.debug("package_present(): we really did fail for name '%s'" % name)
pkg_spec[name]['rc'] = 1
pkg_spec[name]['changed'] = False
else:
module.debug("package_present(): stderr was not set for name '%s'" % name)
if pkg_spec[name]['rc'] == 0:
pkg_spec[name]['changed'] = True
else:
pkg_spec[name]['rc'] = 0
pkg_spec[name]['stdout'] = ''
pkg_spec[name]['stderr'] = ''
pkg_spec[name]['changed'] = False
# Function used to make sure a package is the latest available version.
def package_latest(names, pkg_spec, module):
if module.params['build'] is True:
module.fail_json(msg="the combination of build=%s and state=latest is not supported" % module.params['build'])
upgrade_cmd = 'pkg_add -um'
if module.check_mode:
upgrade_cmd += 'n'
if module.params['clean']:
upgrade_cmd += 'c'
if module.params['quick']:
upgrade_cmd += 'q'
for name in names:
if pkg_spec[name]['installed_state'] is True:
# Attempt to upgrade the package.
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (upgrade_cmd, name), module)
# Look for output looking something like "nmap-6.01->6.25: ok" to see if
# something changed (or would have changed). Use \W to delimit the match
# from progress meter output.
pkg_spec[name]['changed'] = False
for installed_name in pkg_spec[name]['installed_names']:
module.debug("package_latest(): checking for pre-upgrade package name: %s" % installed_name)
match = re.search("\W%s->.+: ok\W" % installed_name, pkg_spec[name]['stdout'])
if match:
module.debug("package_latest(): pre-upgrade package name match: %s" % installed_name)
pkg_spec[name]['changed'] = True
break
# FIXME: This part is problematic. Based on the issues mentioned (and
# handled) in package_present() it is not safe to blindly trust stderr
# as an indicator that the command failed, and in the case with
# empty installpath directories this will break.
#
# For now keep this safeguard here, but ignore it if we managed to
# parse out a successful update above. This way we will report a
# successful run when we actually modify something but fail
# otherwise.
if pkg_spec[name]['changed'] is not True:
if pkg_spec[name]['stderr']:
pkg_spec[name]['rc'] = 1
else:
# Note packages that need to be handled by package_present
module.debug("package_latest(): package '%s' is not installed, will be handled by package_present()" % name)
pkg_spec['package_latest_leftovers'].append(name)
# If there were any packages that were not installed we call
# package_present() which will handle those.
if pkg_spec['package_latest_leftovers']:
module.debug("package_latest(): calling package_present() to handle leftovers")
package_present(names, pkg_spec, module)
# Function used to make sure a package is not installed.
def package_absent(names, pkg_spec, module):
remove_cmd = 'pkg_delete -I'
if module.check_mode:
remove_cmd += 'n'
if module.params['clean']:
remove_cmd += 'c'
if module.params['quick']:
remove_cmd += 'q'
for name in names:
if pkg_spec[name]['installed_state'] is True:
# Attempt to remove the package.
(pkg_spec[name]['rc'], pkg_spec[name]['stdout'], pkg_spec[name]['stderr']) = execute_command("%s %s" % (remove_cmd, name), module)
if pkg_spec[name]['rc'] == 0:
pkg_spec[name]['changed'] = True
else:
pkg_spec[name]['changed'] = False
else:
pkg_spec[name]['rc'] = 0
pkg_spec[name]['stdout'] = ''
pkg_spec[name]['stderr'] = ''
pkg_spec[name]['changed'] = False
# Function used to parse the package name based on packages-specs(7).
# The general name structure is "stem-version[-flavors]".
#
# Names containing "%" are a special variation not part of the
# packages-specs(7) syntax. See pkg_add(1) on OpenBSD 6.0 or later for a
# description.
def parse_package_name(names, pkg_spec, module):
# Initialize empty list of package_latest() leftovers.
pkg_spec['package_latest_leftovers'] = []
for name in names:
module.debug("parse_package_name(): parsing name: %s" % name)
# Do some initial matches so we can base the more advanced regex on that.
version_match = re.search("-[0-9]", name)
versionless_match = re.search("--", name)
# Stop if someone is giving us a name that both has a version and is
# version-less at the same time.
if version_match and versionless_match:
module.fail_json(msg="package name both has a version and is version-less: " + name)
# All information for a given name is kept in the pkg_spec keyed by that name.
pkg_spec[name] = {}
# If name includes a version.
if version_match:
match = re.search("^(?P<stem>[^%]+)-(?P<version>[0-9][^-]*)(?P<flavor_separator>-)?(?P<flavor>[a-z].*)?(%(?P<branch>.+))?$", name)
if match:
pkg_spec[name]['stem'] = match.group('stem')
pkg_spec[name]['version_separator'] = '-'
pkg_spec[name]['version'] = match.group('version')
pkg_spec[name]['flavor_separator'] = match.group('flavor_separator')
pkg_spec[name]['flavor'] = match.group('flavor')
pkg_spec[name]['branch'] = match.group('branch')
pkg_spec[name]['style'] = 'version'
module.debug("version_match: stem: %s, version: %s, flavor_separator: %s, flavor: %s, branch: %s, style: %s" %
(
pkg_spec[name]['stem'],
pkg_spec[name]['version'],
pkg_spec[name]['flavor_separator'],
pkg_spec[name]['flavor'],
pkg_spec[name]['branch'],
pkg_spec[name]['style']
)
)
else:
module.fail_json(msg="unable to parse package name at version_match: " + name)
# If name includes no version but is version-less ("--").
elif versionless_match:
match = re.search("^(?P<stem>[^%]+)--(?P<flavor>[a-z].*)?(%(?P<branch>.+))?$", name)
if match:
pkg_spec[name]['stem'] = match.group('stem')
pkg_spec[name]['version_separator'] = '-'
pkg_spec[name]['version'] = None
pkg_spec[name]['flavor_separator'] = '-'
pkg_spec[name]['flavor'] = match.group('flavor')
pkg_spec[name]['branch'] = match.group('branch')
pkg_spec[name]['style'] = 'versionless'
module.debug("versionless_match: stem: %s, flavor: %s, branch: %s, style: %s" %
(
pkg_spec[name]['stem'],
pkg_spec[name]['flavor'],
pkg_spec[name]['branch'],
pkg_spec[name]['style']
)
)
else:
module.fail_json(msg="unable to parse package name at versionless_match: " + name)
# If name includes no version, and is not version-less, it is all a
# stem, possibly with a branch (%branchname) tacked on at the
# end.
else:
match = re.search("^(?P<stem>[^%]+)(%(?P<branch>.+))?$", name)
if match:
pkg_spec[name]['stem'] = match.group('stem')
pkg_spec[name]['version_separator'] = None
pkg_spec[name]['version'] = None
pkg_spec[name]['flavor_separator'] = None
pkg_spec[name]['flavor'] = None
pkg_spec[name]['branch'] = match.group('branch')
pkg_spec[name]['style'] = 'stem'
module.debug("stem_match: stem: %s, branch: %s, style: %s" %
(
pkg_spec[name]['stem'],
pkg_spec[name]['branch'],
pkg_spec[name]['style']
)
)
else:
module.fail_json(msg="unable to parse package name at else: " + name)
# Verify that the managed host is new enough to support branch syntax.
if pkg_spec[name]['branch']:
branch_release = "6.0"
if StrictVersion(platform.release()) < StrictVersion(branch_release):
module.fail_json(msg="package name using 'branch' syntax requires at least OpenBSD %s: %s" % (branch_release, name))
# Sanity check that there are no trailing dashes in flavor.
# Try to stop strange stuff early so we can be strict later.
if pkg_spec[name]['flavor']:
match = re.search("-$", pkg_spec[name]['flavor'])
if match:
module.fail_json(msg="trailing dash in flavor: " + pkg_spec[name]['flavor'])
# Function used for figuring out the port path.
def get_package_source_path(name, pkg_spec, module):
pkg_spec[name]['subpackage'] = None
if pkg_spec[name]['stem'] == 'sqlports':
return 'databases/sqlports'
else:
# try for an exact match first
sqlports_db_file = '/usr/local/share/sqlports'
if not os.path.isfile(sqlports_db_file):
module.fail_json(msg="sqlports file '%s' is missing" % sqlports_db_file)
conn = sqlite3.connect(sqlports_db_file)
first_part_of_query = 'SELECT fullpkgpath, fullpkgname FROM ports WHERE fullpkgname'
query = first_part_of_query + ' = ?'
module.debug("package_package_source_path(): exact query: %s" % query)
cursor = conn.execute(query, (name,))
results = cursor.fetchall()
# next, try for a fuzzier match
if len(results) < 1:
looking_for = pkg_spec[name]['stem'] + (pkg_spec[name]['version_separator'] or '-') + (pkg_spec[name]['version'] or '%')
query = first_part_of_query + ' LIKE ?'
if pkg_spec[name]['flavor']:
looking_for += pkg_spec[name]['flavor_separator'] + pkg_spec[name]['flavor']
module.debug("package_package_source_path(): fuzzy flavor query: %s" % query)
cursor = conn.execute(query, (looking_for,))
elif pkg_spec[name]['style'] == 'versionless':
query += ' AND fullpkgname NOT LIKE ?'
module.debug("package_package_source_path(): fuzzy versionless query: %s" % query)
cursor = conn.execute(query, (looking_for, "%s-%%" % looking_for,))
else:
module.debug("package_package_source_path(): fuzzy query: %s" % query)
cursor = conn.execute(query, (looking_for,))
results = cursor.fetchall()
# error if we don't find exactly 1 match
conn.close()
if len(results) < 1:
module.fail_json(msg="could not find a port by the name '%s'" % name)
if len(results) > 1:
matches = map(lambda x:x[1], results)
module.fail_json(msg="too many matches, unsure which to build: %s" % ' OR '.join(matches))
# there's exactly 1 match, so figure out the subpackage, if any, then return
fullpkgpath = results[0][0]
parts = fullpkgpath.split(',')
if len(parts) > 1 and parts[1][0] == '-':
pkg_spec[name]['subpackage'] = parts[1]
return parts[0]
# Function used for upgrading all installed packages.
def upgrade_packages(pkg_spec, module):
if module.check_mode:
upgrade_cmd = 'pkg_add -Imnu'
else:
upgrade_cmd = 'pkg_add -Imu'
# Create a minimal pkg_spec entry for '*' to store return values.
pkg_spec['*'] = {}
# Attempt to upgrade all packages.
pkg_spec['*']['rc'], pkg_spec['*']['stdout'], pkg_spec['*']['stderr'] = execute_command("%s" % upgrade_cmd, module)
# Try to find any occurrence of a package changing version like:
# "bzip2-1.0.6->1.0.6p0: ok".
match = re.search("\W\w.+->.+: ok\W", pkg_spec['*']['stdout'])
if match:
pkg_spec['*']['changed'] = True
else:
pkg_spec['*']['changed'] = False
# It seems we can not trust the return value, so depend on the presence of
# stderr to know if something failed.
if pkg_spec['*']['stderr']:
pkg_spec['*']['rc'] = 1
else:
pkg_spec['*']['rc'] = 0
# ===========================================
# Main control flow.
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True, type='list'),
state = dict(required=True, choices=['absent', 'installed', 'latest', 'present', 'removed']),
build = dict(default='no', type='bool'),
ports_dir = dict(default='/usr/ports'),
quick = dict(default='no', type='bool'),
clean = dict(default='no', type='bool')
),
supports_check_mode = True
)
name = module.params['name']
state = module.params['state']
build = module.params['build']
ports_dir = module.params['ports_dir']
rc = 0
stdout = ''
stderr = ''
result = {}
result['name'] = name
result['state'] = state
result['build'] = build
# The data structure used to keep track of package information.
pkg_spec = {}
if build is True:
if not os.path.isdir(ports_dir):
module.fail_json(msg="the ports source directory %s does not exist" % (ports_dir))
# build sqlports if its not installed yet
parse_package_name(['sqlports'], pkg_spec, module)
get_package_state(['sqlports'], pkg_spec, module)
if not pkg_spec['sqlports']['installed_state']:
module.debug("main(): installing 'sqlports' because build=%s" % module.params['build'])
package_present(['sqlports'], pkg_spec, module)
asterisk_name = False
for n in name:
if n == '*':
if len(name) != 1:
module.fail_json(msg="the package name '*' can not be mixed with other names")
asterisk_name = True
if asterisk_name:
if state != 'latest':
module.fail_json(msg="the package name '*' is only valid when using state=latest")
else:
# Perform an upgrade of all installed packages.
upgrade_packages(pkg_spec, module)
else:
# Parse package names and put results in the pkg_spec dictionary.
parse_package_name(name, pkg_spec, module)
# Not sure how the branch syntax is supposed to play together
# with build mode. Disable it for now.
for n in name:
if pkg_spec[n]['branch'] and module.params['build'] is True:
module.fail_json(msg="the combination of 'branch' syntax and build=%s is not supported: %s" % (module.params['build'], n))
# Get state for all package names.
get_package_state(name, pkg_spec, module)
# Perform requested action.
if state in ['installed', 'present']:
package_present(name, pkg_spec, module)
elif state in ['absent', 'removed']:
package_absent(name, pkg_spec, module)
elif state == 'latest':
package_latest(name, pkg_spec, module)
# The combined changed status for all requested packages. If anything
# is changed this is set to True.
combined_changed = False
# We combine all error messages in this comma separated string, for example:
# "msg": "Can't find nmapp\n, Can't find nmappp\n"
combined_error_message = ''
# Loop over all requested package names and check if anything failed or
# changed.
for n in name:
if pkg_spec[n]['rc'] != 0:
if pkg_spec[n]['stderr']:
if combined_error_message:
combined_error_message += ", %s" % pkg_spec[n]['stderr']
else:
combined_error_message = pkg_spec[n]['stderr']
else:
if combined_error_message:
combined_error_message += ", %s" % pkg_spec[n]['stdout']
else:
combined_error_message = pkg_spec[n]['stdout']
if pkg_spec[n]['changed'] is True:
combined_changed = True
# If combined_error_message contains anything at least some part of the
# list of requested package names failed.
if combined_error_message:
module.fail_json(msg=combined_error_message)
result['changed'] = combined_changed
module.exit_json(**result)
# Import module snippets.
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| DazWorrall/ansible | lib/ansible/modules/packaging/os/openbsd_pkg.py | Python | gpl-3.0 | 26,447 |
#!/usr/bin/env python
from setuptools import setup
import os
package_name = "ROPGadget"
package_dir = "ropgadget"
package_description = """
This tool lets you search your gadgets on your binaries to facilitate your ROP exploitation.
ROPgadget supports ELF, PE and Mach-O format on x86, x64, ARM, ARM64, PowerPC, SPARC and MIPS architectures.
http://www.shell-storm.org/project/ROPgadget/
""".strip()
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk(package_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames]])
version = "5.3"
setup(
name = package_name,
version = version,
description = package_description,
packages = packages,
license = "GLPv2",
author = "Jonathan Salwan",
author_email = "jonathan.salwan@gmail.com",
install_requires = ['capstone'],
url = "https://github.com/JonathanSalwan/ROPgadget",
scripts = ['scripts/ROPgadget'],
classifiers = [
'Topic :: Security',
'Environment :: Console',
'Operating System :: OS Independent',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers'
]
)
| bl4ckic3/ROPgadget | setup.py | Python | gpl-2.0 | 2,198 |
hdata = hdata.crop(*hdata.span.contract(1))
hfilt = hfilt.crop(*hfilt.span.contract(1)) | gwpy/gwpy.github.io | docs/latest/examples/signal/gw150914-6.py | Python | gpl-3.0 | 87 |
# models.card
# -*- coding: UTF-8 -*-
from operator import itemgetter, attrgetter
from wand.image import Image
from config import DEVELOP_MODE
from datetime import datetime, timedelta
from libs import doubandb, doubanfs, Employee, doubanmc, store, User
from webapp.models.consts import *
from webapp.models.notify import Notify
from webapp.models.profile import Profile
from webapp.models.badage import Badage
from webapp.models.question import Question, Answer
from config import SITE
import json
import math
CHART_BLACK_LIST_UIDS = [
'46555103', #Ruby = =
]
class Comment(object):
def __init__(self, id, card_id, author_id, content, rtime):
self.id = str(id)
self.card_id = str(card_id)
self.author_id = str(author_id)
self.content = content
self.rtime = rtime
@property
def html(self):
from webapp.models.utils import mention_text
ret = mention_text(self.content)
return ret['html']
@property
def author(self):
return User(id=self.author_id)
@property
def author_card(self):
return Card.get(self.author_id)
@classmethod
def remove(cls, author_id, comment_id):
store.execute("delete from me_comment where"
" `author_id`=%s and `id`=%s", (author_id, comment_id))
store.commit()
class Card(object):
FLAG_NORMAL = 'N'
FLAG_HIDE = 'H'
MC_KEY = 'me-card:%s'
def __init__(self, id, uid, email, skype, name, alias, phone, photo_id, flag, join_time, rtime, ctime):
self.id = str(id)
self.uid = uid
self.email = email
self.skype = skype
self.name = name
self.alias = alias
self.phone = phone
self.join_time = join_time or ''
self.photo_id = photo_id
self.flag = flag
self.rtime = rtime
self.ctime = ctime
@property
def sort_date(self):
return self.join_time
@property
def sort_time(self):
return self.rtime
@property
def profile(self):
return Profile.get(self.id)
def set_profile2(self, profile):
doubandb.set("me/card/profile2-%s" % self.id, profile)
@property
def profile2(self):
return doubandb.get("me/card/profile2-%s" % self.id, {})
@property
def department(self):
return self.profile2.get('department', '')
@property
def selfintro(self):
return self.profile2.get('selfintro', '') or self.profile.intro
@property
def position(self):
return self.profile2.get('position', '')
@property
def path(self):
return "/card/%s/" % self.uid
@property
def url(self):
return "%s/card/%s/" % (SITE, self.uid)
@property
def is_basic(self):
return self.email and self.name
@property
def is_hide(self):
return self.flag == self.FLAG_HIDE
@property
def is_full(self):
return self.is_basic and self.skype and self.join_time and self.alias and self.photo > 0 and self.profile.sex and self.profile.love and self.profile.marriage and self.profile.birthday and self.profile.hometown
def json_dict(self, user):
ret = {}
if not self.owner:
return {}
ret['id'] = self.id
ret['alt'] = self.url
ret['uid'] = self.uid
ret['name'] = self.screen_name
ret['icon'] = self.icon
ret['email'] = self.email
ret['skype'] = self.skype
ret['alias'] = self.alias
ret['join_time'] = self.join_time and self.join_time.strftime('%Y-%m-%d')
if not DEVELOP_MODE:
ret['city'] = self.owner.profile().get('city','城市')
else:
ret['city'] = 'city'
ret['reg_time'] = self.owner.reg_time.strftime('%Y-%m-%d')
ret['photo'] = SITE + self.photo
ret['updated'] = self.rtime.strftime('%Y-%m-%d')
ret['created'] = self.ctime.strftime('%Y-%m-%d')
ret['like_num'] = self.like_num
ret['comment_num'] = self.comment_num
ret['url'] = self.url
ret['province'] = self.profile.province
ret['hometown'] = self.profile.hometown
ret['resume'] = self.profile.resume
ret['intro'] = self.profile.intro
ret['weibo'] = self.profile.weibo
ret['instagram'] = self.profile.instagram
ret['blog'] = self.profile.blog
ret['code'] = self.profile.code
ret['github'] = self.profile.github
ret['tags'] = self.tags
ret['badages'] = [b.json_dict() for b in self.badages]
ret['sex'] = self.profile.sex
ret['love'] = self.profile.love
ret['marriage'] = self.profile.marriage
ret['zodiac'] = self.profile.zodiac
ret['astro'] = self.profile.astro
if user:
ret['is_liked'] = self.is_liked(user.id)
ret['user_tags'] = [t.json_dict(user) for t in self.ptags]
return ret
def can_view(self, user, attr=None):
if user:
if user.id == self.id:
return True
if self.flag == self.FLAG_HIDE:
return False
card = Card.get(user.id)
if not attr:
return card and card.is_full
else:
v = getattr(card.profile, attr)
return v
return False
@classmethod
def search(cls, q):
cids = []
rs = store.execute("select user_id from me_card where name like %s and flag=%s", (q + '%%', cls.FLAG_NORMAL))
if rs:
cids = [str(r[0]) for r in rs]
else:
rs = store.execute("select user_id from me_card where uid like %s and flag=%s", (q + '%%', cls.FLAG_NORMAL))
if rs:
cids = [str(r[0]) for r in rs]
else:
rs = store.execute("select user_id from me_card where alias like %s and flag=%s", (q + '%%', cls.FLAG_NORMAL))
if rs:
cids = [str(r[0]) for r in rs]
else:
rs = store.execute("select user_id from me_card where email like %s and flag=%s", (q + '%%', cls.FLAG_NORMAL))
if rs:
cids = [str(r[0]) for r in rs]
else:
rs = store.execute("select user_id from me_card where skype like %s"
" and flag=%s", (q + '%%', cls.FLAG_NORMAL))
if rs:
cids = [str(r[0]) for r in rs]
return [cls.get(i) for i in cids]
@property
def owner(self):
return User(id=self.id)
@property
def icon(self):
return self.owner and self.owner.picture(default=True) or 'http://img3.douban.com/icon/user_normal.jpg'
@property
def screen_name(self):
return self.owner and self.owner.name
@classmethod
def hide(cls, card_id, admin_id):
if admin_id in ADMINS:
store.execute("update me_card set flag=%s where user_id=%s", (cls.FLAG_HIDE, card_id))
store.commit()
card = cls.get(card_id)
if card:
doubanmc.delete(cls.MC_KEY % card.id)
doubanmc.delete(cls.MC_KEY % card.uid)
@classmethod
def new(cls, user_id, uid):
now = datetime.now()
store.execute("insert into me_card(`user_id`,`uid`, `ctime`) values(%s,%s,%s)"
" on duplicate key update rtime=%s", (user_id, uid, now, now))
store.commit()
doubanmc.delete("me:users:dict")
@classmethod
def get_by_ldap(cls, email):
if not email.endswith('@douban.com'):
email = email + '@douban.com'
r = store.execute("select `user_id` from me_card where email=%s and flag=%s", (email, cls.FLAG_NORMAL))
if r and r[0]:
return cls.get(r[0])
@classmethod
def get(cls, id):
r = store.execute("select `user_id`, `uid`, `email`, `skype`, `name`, `alias`, `phone`, `photo`,"
" `flag`, `join_time`, `rtime`, `ctime`"
" from me_card where `user_id`=%s", id)
card = None
if r and r[0]:
card = cls(*r[0])
else:
r = store.execute("select `user_id`, `uid`, `email`, `skype`, `name`, `alias`, `phone`, `photo`,"
" `flag`, `join_time`, `rtime`, `ctime`"
" from me_card where `uid`=%s", id)
if r and r[0]:
card = cls(*r[0])
if card:
try:
employee = Employee.dget(card.id)
if employee:
#print 'get info by dae service', employee.fullname, employee.douban_mail, employee.entry_date
if employee.fullname:
card.name = employee.fullname
if employee.douban_mail:
card.email = employee.douban_mail
if employee.entry_date:
card.join_time = datetime.strptime(employee.entry_date, '%Y-%m-%d')
except:
print "dae service EmployeeClient error user_id %s" % (card and card.id or '0')
print 'card', id, card.id, card.uid, card.email, card.name
return card
def update_account(self, name, email):
store.execute("update me_card set email=%s, `name`=%s where `user_id`=%s", (email, name, self.id))
store.commit()
doubanmc.delete(self.MC_KEY % self.id)
doubanmc.delete(self.MC_KEY % self.uid)
def update_basic(self, name, skype, alias, join_time):
store.execute("update me_card set `name`=%s, skype=%s, `alias`=%s, join_time=%s where"
" `user_id`=%s", (name, skype, alias, join_time, self.id))
store.commit()
doubanmc.delete(self.MC_KEY % self.id)
doubanmc.delete(self.MC_KEY % self.uid)
def update_profile(self, sex, love, zodiac, astro, birthday, marriage, province, hometown,
weibo, instagram, blog, code, github, resume, intro):
Profile.update(self.id, sex, love, zodiac, astro, birthday, marriage, province, hometown,
weibo, instagram, blog, code, github, resume, intro)
def update_photo(self, filename):
data = open(filename).read()
#print 'update photo old_id', old_id
if len(data) > MAX_SIZE:
return "too_large"
return self.update_photo_data(data)
def update_photo_id(self, photo_id):
store.execute("update me_card set `photo`=%s where `user_id`=%s", (photo_id, self.id))
store.commit()
doubanmc.delete(self.MC_KEY % self.id)
doubanmc.delete(self.MC_KEY % self.uid)
def update_photo_data(self, data):
success = False
old_id = self.photo_id
try:
new_id = old_id + 1
doubanfs.set("/me/card/%s/photo/%s/%s" % (self.id, new_id, Cate.ORIGIN), data)
from webapp.models.utils import scale
d = scale(data, Cate.LARGE, DEFAULT_CONFIG)
doubanfs.set("/me/card/%s/photo/%s/%s" % (self.id, new_id, Cate.LARGE), d)
print "update photo success photo_id=%s" % new_id
store.execute("update me_card set `photo`=%s where `user_id`=%s", (new_id, self.id))
store.commit()
success = True
except:
print "doubanfs write fail!!! %s" % self.id
self.photo_id = old_id
store.execute("update me_card set `photo`=`photo`-1 where `user_id`=%s", self.id)
store.commit()
doubanfs.delete("/me/card/%s/photo/%s/%s" % (self.id, new_id, Cate.LARGE))
doubanfs.delete("/me/card/%s/photo/%s/%s" % (self.id, new_id, Cate.ORIGIN))
print 'rollback photo to old_id', old_id
if success:
Notify.new(self.id, self.id, Notify.TYPE_CHANGE_PHOTO, extra={'photo_id':new_id})
print "send change photo blog"
from webapp.models.blog import Blog
Blog.new(self.id, Blog.TYPE_BLOG, Blog.BLOG_ICON, extra={'photo_id':new_id})
doubanmc.delete(self.MC_KEY % self.id)
doubanmc.delete(self.MC_KEY % self.uid)
def recreate_photo(self):
try:
data = doubanfs.get("/me/card/%s/photo/%s/%s" % (self.id, self.photo_id, Cate.ORIGIN))
d = scale(data, Cate.LARGE, DEFAULT_CONFIG)
doubanfs.set("/me/card/%s/photo/%s/%s" % (self.id, self.photo_id, Cate.LARGE), d)
except:
print "doubanfs write fail!!! %s" % self.id
@property
def photo(self):
if self.photo_id > 0:
return "/p/%s-%s-%s.jpg" % (self.id, self.photo_id, Cate.LARGE)
return ''
def origin_photo(self, photo_id):
if photo_id <= self.photo_id:
return "/p/%s-%s-%s.jpg" % (self.id, photo_id, Cate.ORIGIN)
return ''
def photo_url(self, photo_id):
if photo_id <= self.photo_id:
return "/p/%s-%s-%s.jpg" % (self.id, photo_id, Cate.LARGE)
return ''
def dynamic_photo(self, x, y, scale='center-crop', photo_id=0):
if photo_id < 1:
photo_id = self.photo_id
if photo_id > 0:
s = 'fs'
if scale == 'center-crop':
s = 'cc'
return "/p/%s-%s-r_%s_%sx%s.jpg" % (self.id, photo_id, s, x, y)
return ''
@property
def photo_urls(self):
return ["/p/%s-%s-%s.jpg" % (self.id, i, Cate.LARGE) for i in xrange(0, self.photo_id)]
@property
def like_num(self):
r = store.execute("select count(1) from me_like where user_id=%s", self.id)
if r and r[0]:
return r[0][0]
def likers(self):
rs = store.execute("select liker_id from me_like where user_id=%s", self.id)
cids = []
if rs:
cids = [str(r[0]) for r in rs]
return [User(id=i) for i in cids]
def is_liked(self, liker_id):
r = store.execute("select 1 from me_like where user_id=%s and liker_id=%s", (self.id, liker_id))
if r and r[0][0]:
return True
return False
@classmethod
def gets(cls, cate='', start=0, limit=20):
cids = []
if cate == 'photo':
r = store.execute("select count(1) user_id from me_card where photo>0 and flag=%s", cls.FLAG_NORMAL)
n = r and r[0][0]
rs = store.execute("select user_id from me_card where photo>0 and flag=%s order by rtime desc"
" limit %s, %s", (cls.FLAG_NORMAL, start, limit))
else:
r = store.execute("select count(1) user_id from me_card where flag=%s", cls.FLAG_NORMAL)
n = r and r[0][0]
rs = store.execute("select user_id from me_card where flag=%s order by rtime desc"
" limit %s, %s", (cls.FLAG_NORMAL, start, limit))
if rs:
cids = [str(r[0]) for r in rs]
return n, [cls.get(i) for i in cids]
@classmethod
def gets_by_time(cls, year='', start=0, limit=20):
n, cids = cls.gets_ids_by_time(year)
return n, [cls.get(i) for i in cids[start:start+limit]]
@classmethod
def gets_ids_by_time(cls, year=''):
cids = []
if not year:
r = store.execute("select count(1) from me_card where photo>0 and flag=%s", cls.FLAG_NORMAL)
n = r and r[0][0]
rs = store.execute("select user_id from me_card where flag=%s order by join_time desc", cls.FLAG_NORMAL)
else:
start = '%s-01-01 00:00:00' % year
end = '%s-12-31 23:00:00' % year
rs = store.execute("select user_id from me_card where photo>0 and flag=%s and"
" join_time > %s and join_time < %s order by join_time desc", (cls.FLAG_NORMAL, start, end))
n = len(rs)
if rs:
cids = [str(r[0]) for r in rs]
return n, cids
@classmethod
def gets_all(cls):
rs = store.execute("select user_id from me_card where join_time > 0 and flag=%s order by join_time", cls.FLAG_NORMAL)
cids = []
if rs:
cids = [str(r[0]) for r in rs]
return [cls.get(i) for i in cids]
@classmethod
def gets_by_astro(cls, astro):
astros = [r[1] for r in ASTROS]
index = astros.index(astro)
rs = store.execute("select user_id from me_profile where astro=%s", index)
return [cls.get(r[0]) for r in rs]
@classmethod
def gets_by_zodiac(cls, zodiac):
zodiacs = [r[1] for r in ZODIACS]
index = zodiacs.index(zodiac)
rs = store.execute("select user_id from me_profile where zodiac=%s", index)
return [cls.get(r[0]) for r in rs]
@classmethod
def gets_by_province(cls, province):
rs = store.execute("select user_id from me_profile where province=%s", province)
return [cls.get(r[0]) for r in rs]
@classmethod
def gets_by_hometown(cls, city):
rs = store.execute("select user_id from me_profile where hometown=%s", city)
return [cls.get(r[0]) for r in rs]
@classmethod
def gets_by_tag(cls, tag):
r = store.execute("select id from me_tag where name=%s", tag)
if r and r[0]:
tag_id = r[0][0]
if tag_id:
rs = store.execute("select distinct(user_id) from me_user_tag where tag_id=%s", tag_id)
return sorted([cls.get(r[0]) for r in rs if str(r[0]) not in CHART_BLACK_LIST_UIDS], key=attrgetter('score'), reverse=True)
return []
@classmethod
def gets_by_card(cls, card_id, start=0, limit=10):
r = store.execute("select count(1) user_id from me_like where liker_id=%s", card_id)
n = r and r[0][0]
rs = store.execute("select user_id from me_like where liker_id=%s"
" order by rtime desc limit %s, %s", (card_id, start, limit))
cids = []
if rs:
cids = [str(r[0]) for r in rs]
return n, [cls.get(i) for i in cids]
def like(self, liker_id):
store.execute("replace into me_like(user_id, liker_id) values(%s,%s)", (self.id, liker_id))
store.commit()
Notify.new(self.id, liker_id, Notify.TYPE_LIKE)
def tag(self, tagger_id, tags=[]):
from webapp.models.tag import Tag
Tag.tag(self.id, tagger_id, tags=tags)
@property
def tags(self):
from webapp.models.tag import Tag
return Tag.get_user_tag_names(self.id, self.id)
def user_tags(self, tagger_id):
from webapp.models.tag import Tag
return Tag.get_user_tag_names(self.id, tagger_id)
@property
def badages(self):
return Badage.gets_by_card(self.id)
@classmethod
def gets_by_badage(cls, badage_id):
rs = store.execute("select user_id from me_user_badage where badage_id=%s", badage_id)
cids = [str(r[0]) for r in rs]
return [cls.get(i) for i in cids]
@property
def ptags(self):
from webapp.models.tag import Tag
return Tag.get_user_tags(self.id)
@property
def ptag_names(self):
return [t.name for t in self.ptags]
@property
def comment_num(self):
r = store.execute("select count(1) from me_comment where user_id=%s", self.id)
if r and r[0]:
return r[0][0]
def comment(self, author_id, content):
store.execute("insert into me_comment(`user_id`,`author_id`,`content`)"
" values(%s,%s,%s)", (self.id, author_id, content));
store.commit()
cid = store.get_cursor(table="me_comment").lastrowid
Notify.new(self.id, author_id, Notify.TYPE_COMMENT, extra={"comment_id":cid})
if '@' in content:
from webapp.models.utils import mention_text
ret = mention_text(content)
for b, e, card_id, kind in ret['postions']:
Notify.new(card_id, author_id, Notify.TYPE_MENTION, extra={"card_id":self.id, "comment_id":cid})
@property
def comments(self):
rs = store.execute("select id, user_id, author_id, content, rtime"
" from me_comment where user_id=%s order by rtime", self.id)
return [Comment(*r) for r in rs]
@property
def questions(self):
return Question.gets_by_card(self.id)
@property
def answer_num(self):
return Answer.num_by_card(self.id)
@property
def notify_num(self):
r = store.execute("select count(1) from me_notify where user_id=%s"
" and flag=%s", (self.id, Notify.FLAG_NEW))
if r and r[0]:
return r[0][0]
@property
def notifications(self):
return Notify.gets(self.id)
def photo_data(self, id, cate=Cate.LARGE):
if not id:
id = self.photo_id
if id > 0:
return doubanfs.get("/me/card/%s/photo/%s/%s" % (self.id, id, cate))
@property
def score(self):
r = store.execute("select score from me_card where user_id=%s", self.id)
return r and r[0][0]
@property
def activities(self):
r = store.execute("select activities from me_card where user_id=%s", self.id)
return r and r[0][0]
@classmethod
def max_score(cls):
r = store.execute("select max(score) from me_card")
return r and r[0][0]
@classmethod
def max_activities(cls):
r = store.execute("select max(activities) from me_card")
return r and r[0][0]
@property
def percent_activities(self):
MAX = self.max_activities() or 100
return int(round(float(self.activities)/ MAX, 2)*100)
@property
def percent_score(self):
MAX = self.max_score() or 100
return int(round(float(self.score)/ MAX, 2)*100)
@classmethod
def calculate_score(cls, id):
d = cls.get(id)
c = 0
if d.email:
c = c + 2
if d.skype:
c = c + 2
if d.alias:
c = c + 2
if d.photo_id > 0:
c = c + 6
p = d.profile
sex = int(p.sex)
if sex == 1:
c = c + 2
elif sex == 2:
c = c + 4
love = int(p.love)
if 0 < love < 3:
c = c + 4
elif 3 <= love < 5:
c = c + 1
m = int(p.marriage)
if 0 < m < 4:
c = c + 3*sex
elif 4 <= m:
c = c + sex
if p.birthday:
if sex == 2:
now = datetime.now()
old = now.year - p.birthday.year
if old < 30:
c = c + (35 - old)*2
else:
c = c + 4
if p.zodiac:
c = c + 1
if p.astro:
c = c + 1
if p.province:
c = c + 1
if p.hometown:
c = c + 1
if p.weibo:
c = c + 1
if p.instagram:
c = c + 2
if p.code:
c = c + 1
if p.github:
c = c + 1
if p.resume:
c = c + 1
if p.intro:
c = c + 4
#print 'profile score=', c
now = datetime.now()
if d.join_time and isinstance(d.join_time, datetime) and d.join_time < now:
c = c + get_value_by_time(0, 100, d.join_time, 30, -0.2)
#print 'ctime score=', c
rs = store.execute("select rtime from me_like where user_id=%s", id)
for r in rs:
c = c + get_value_by_time(2, 2, r[0])
#print 'like score=', c
rs = store.execute("select rtime from me_comment where user_id=%s", id)
for r in rs:
c = c + get_value_by_time(3, 2, r[0])
#print 'comment score=', c
rs = store.execute("select rtime from me_user_tag where user_id=%s", id)
for r in rs:
c = c + get_value_by_time(3, 2, r[0])
#print 'tag score=', c
for tag in d.ptags:
t = tag.name
if sex == 1:
if t in ['少年', '萌', '闷骚', '帅', '傲娇', '四大萌神之一', '老师', '少男杀手', '少女杀手',
'单身', '小王子', '正太', '娘', 'gay', '音乐人', '骚年', '很萌']:
##print 'score add tag=', t, ' c=', c
c = c + 5
elif t in ['已婚', '小孩党', '车党']:
c = c + 2
elif sex == 2:
if t in ['妹子', '萝莉', '萌', '90s', '闷骚', '女神', '美女', '萌妹子', '傲娇', '少女', '实在太漂亮了',
'单身', '软妹纸', '温柔如水', '美女不解释', '仙女', '音乐人', '妹纸', '大萝莉', '美少女',
'小清新美女', '姐姐']:
##print 'score add tag=', t, ' c=', c
c = c + 6
elif t in ['已婚', '小孩党']:
c = c + 3
#print 'add tag score=', c
rs = store.execute("select rtime from me_blog, me_blog_like"
" where user_id=%s and id=blog_id", id)
for r in rs:
c = c + get_value_by_time(2, 3, r[0])
rs = store.execute("select rtime from me_blog as b, me_blog_comment as c"
" where user_id=%s and b.id=c.blog_id", id)
for r in rs:
c = c + get_value_by_time(3, 3, r[0])
#print 'add blog score=', c
#metion
rs = store.execute("select rtime from me_notify where user_id=%s"
" and ntype=%s", (id, Notify.TYPE_MENTION))
for r in rs:
c = c + get_value_by_time(2, 2, r[0])
rs = store.execute("select rtime from me_notify where user_id=%s"
" and ntype=%s", (id, Notify.TYPE_BLOG_MENTION))
for r in rs:
c = c + get_value_by_time(3, 2, r[0])
rs = store.execute("select rtime from me_notify where user_id=%s"
" and ntype=%s", (id, Notify.TYPE_BLOG_COMMENT_MENTION))
for r in rs:
c = c + get_value_by_time(3, 3, r[0])
#print 'mention score=', c
rs = store.execute("select rtime from me_notify where user_id=%s"
" and ntype=%s", (id, Notify.TYPE_AWARD_VOTED))
for r in rs:
c = c + get_value_by_time(5, 2, r[0])
#print 'vote score=', c
rs = store.execute("select rtime from me_notify where user_id=%s"
" and ntype=%s", (id, Notify.TYPE_CHANGE_PHOTO))
for r in rs:
c = c + get_value_by_time(2, 4, r[0])
#print 'update photo score=', c
rs = store.execute("select rtime from me_notify where user_id=%s"
" and ntype=%s", (id, Notify.TYPE_REQUEST_PHOTO))
for r in rs:
c = c + get_value_by_time(2, 2, r[0])
#print 'request photo score=', c
rs = store.execute("select c.rtime from me_event_photo as p, me_photo_comment as c"
" where p.author_id=%s and p.id=c.photo_id", id)
for r in rs:
c = c + get_value_by_time(2, 2, r[0])
rs = store.execute("select c.rtime from me_event_photo as p, me_photo_like as c"
" where p.author_id=%s and p.id=c.photo_id", id)
for r in rs:
c = c + get_value_by_time(2, 2, r[0])
rs = store.execute("select rtime from me_photo_tag where user_id=%s", id)
for r in rs:
c = c + get_value_by_time(2, 2, r[0])
return c
@classmethod
def gets_by_score(cls, limit=20):
rs = store.execute("select user_id from me_card where score > 0 and flag=%s"
" order by score desc limit %s", (cls.FLAG_NORMAL, limit))
return [cls.get(str(r[0])) for r in rs if str(r[0]) not in CHART_BLACK_LIST_UIDS]
def get_value_by_time(base, value, time, days=2, factor=-0.1):
if not time or not isinstance(time, datetime):
return value
now = datetime.now()
delta = now - time
return get_value_by_day(base, value, delta.days/float(days), factor)
def get_value_by_day(base, value, day, factor=-0.1):
ret = base + value*math.exp(factor*day)
#print "day=%s, value=%s" % (day, ret)
return ret
| liangsun/me | webapp/models/card.py | Python | mit | 28,108 |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main command group for gcloud bigquery.
"""
import urlparse
from googlecloudsdk.api_lib.bigquery import bigquery
from googlecloudsdk.calliope import base
from googlecloudsdk.core import apis
from googlecloudsdk.core import cli
from googlecloudsdk.core import properties
from googlecloudsdk.core import resolvers
from googlecloudsdk.core import resources
from googlecloudsdk.core.credentials import store as c_store
SERVICE_NAME = 'bigquery'
BIGQUERY_MESSAGES_MODULE_KEY = 'bigquery-messages-module'
APITOOLS_CLIENT_KEY = 'bigquery-apitools-client'
BIGQUERY_REGISTRY_KEY = 'bigquery-registry'
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Bigquery(base.Group):
"""A group of commands for using BigQuery.
"""
def Filter(self, context, args):
"""Initialize context for bigquery commands.
Args:
context: The current context.
args: The argparse namespace that was specified on the CLI or API.
Returns:
The updated context.
"""
resources.SetParamDefault(
api='bigquery', collection=None, param='projectId',
resolver=resolvers.FromProperty(properties.VALUES.core.project))
# TODO(user): remove command dependence on these.
context[BIGQUERY_MESSAGES_MODULE_KEY] = apis.GetMessagesModule(
'bigquery', 'v2')
context[APITOOLS_CLIENT_KEY] = apis.GetClientInstance(
'bigquery', 'v2', http=self.Http())
context[BIGQUERY_REGISTRY_KEY] = resources.REGISTRY
# Inject bigquery backend params.
bigquery.Bigquery.SetResourceParser(resources.REGISTRY)
bigquery.Bigquery.SetApiEndpoint(
self.Http(), properties.VALUES.api_endpoint_overrides.bigquery.Get())
@staticmethod
def Args(parser):
parser.add_argument(
'--fingerprint-job-id',
action='store_true',
help='Whether to use a job id that is derived from a fingerprint of '
'the job configuration.')
| flgiordano/netcash | +/google-cloud-sdk/lib/surface/bigquery/__init__.py | Python | bsd-3-clause | 2,500 |
#!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
import os
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
XGETTEXT=os.getenv('XGETTEXT', 'xgettext')
child = Popen([XGETTEXT,'--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""
#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcredit-core", %s),\n' % ('\n'.join(msgid)))
f.write('};\n')
f.close()
| credits-currency/credits | share/qt/extract_strings_qt.py | Python | mit | 1,902 |
from flask_script import Command, Option
from openedoo import app
class GunicornServer(Command):
help = "Start the Server with Gunicorn"
option_list = (
Option('-h', '--host', dest='host', default='127.0.0.1'),
Option('-p', '--port', dest='port', type=int, default=5000),
Option('-w', '--workers', dest='workers', type=int, default=3)
)
def run(self, host, port, workers):
"""Start the Server with Gunicorn"""
from gunicorn.app.base import Application
class FlaskApplication(Application):
def init(self, parser, opts, args):
return {
'bind': '{0}:{1}'.format(host, port),
'workers': workers
}
def load(self):
return app
application = FlaskApplication()
return application.run()
| openedoo/openedoo | openedoo/template_conf/project_template/project_name/management/commands/gunicornserver.py | Python | mit | 875 |
import os
import redis
from mock import patch, MagicMock
from ...helpers import assert_raises, assert_equal, assert_not_equal
from tagalog.shipper.redis import RoundRobinConnectionPool,RedisShipper
from tagalog.shipper.shipper_error import ShipperError
class MockConnection(object):
def __init__(self, **kwargs):
self.pid = os.getpid()
self.disconnected = 0
for k, v in kwargs.items():
setattr(self, k, v)
def disconnect(self):
self.disconnected += 1
class TestRoundRobinConnectionPool(object):
def setup(self):
self.p = RoundRobinConnectionPool(patterns=[{'name': 'a'},
{'name': 'b'}],
connection_class=MockConnection)
def test_get_connection(self):
a = self.p.get_connection('SET')
b = self.p.get_connection('SET')
c = self.p.get_connection('SET')
assert_equal(a.name, 'a')
assert_equal(b.name, 'b')
assert_equal(c.name, 'a')
def test_add_pattern(self):
a = self.p.get_connection('SET')
self.p.add_pattern({'name': 'c'})
b = self.p.get_connection('SET')
c = self.p.get_connection('SET')
assert_equal(a.name, 'a')
assert_equal(b.name, 'b')
assert_equal(c.name, 'c')
def test_remove_pattern(self):
self.p.remove_pattern({'name': 'a'})
a = self.p.get_connection('SET')
b = self.p.get_connection('SET')
assert_equal(a.name, 'b')
assert_equal(b.name, 'b')
def test_remove_pattern_disconnects(self):
a1 = self.p.get_connection('SET')
b1 = self.p.get_connection('SET')
self.p.remove_pattern({'name': 'a'})
assert_equal(a1.disconnected, 1)
def test_remove_pattern_during_cycle_new_conn(self):
a1 = self.p.get_connection('SET')
b1 = self.p.get_connection('SET')
self.p.remove_pattern({'name': 'a'})
b2 = self.p.get_connection('SET')
assert_equal(b2.name, 'b')
def test_remove_earlier_pattern_during_cycle(self):
a1 = self.p.get_connection('SET')
self.p.remove_pattern({'name': 'a'})
b1 = self.p.get_connection('SET')
assert_equal(b1.name, 'b')
def test_remove_next_pattern_during_cycle(self):
self.p.add_pattern({'name': 'c'})
a1 = self.p.get_connection('SET')
self.p.remove_pattern({'name': 'b'})
c1 = self.p.get_connection('SET')
assert_equal(c1.name, 'c')
def test_remove_next_pattern_at_end_during_cycle(self):
self.p.add_pattern({'name': 'c'})
a1 = self.p.get_connection('SET')
b1 = self.p.get_connection('SET')
self.p.remove_pattern({'name': 'c'})
a2 = self.p.get_connection('SET')
assert_equal(a2.name, 'a')
def test_remove_pattern_during_cycle_released_conn(self):
a1 = self.p.get_connection('SET')
b1 = self.p.get_connection('SET')
self.p.remove_pattern({'name': 'a'})
self.p.release(b1)
b2 = self.p.get_connection('SET')
assert_equal(b1, b2)
def test_release(self):
a1 = self.p.get_connection('SET')
b1 = self.p.get_connection('SET')
self.p.release(a1)
self.p.release(b1)
a2 = self.p.get_connection('SET')
b2 = self.p.get_connection('SET')
assert_equal(a1, a2)
assert_equal(b1, b2)
def test_purge_in_use(self):
a1 = self.p.get_connection('SET')
b1 = self.p.get_connection('SET')
self.p.purge(a1)
self.p.release(b1)
a2 = self.p.get_connection('SET')
b2 = self.p.get_connection('SET')
assert_not_equal(a1, a2)
assert_equal(b1, b2)
assert_equal(a1.disconnected, 1)
def test_purge_released(self):
a1 = self.p.get_connection('SET')
self.p.release(a1)
self.p.purge(a1)
self.p.get_connection('SET') # skip one
a2 = self.p.get_connection('SET')
assert_not_equal(a1, a2)
assert_equal(a1.disconnected, 1)
def test_disconnect(self):
a1 = self.p.get_connection('SET')
b1 = self.p.get_connection('SET')
self.p.release(a1)
self.p.release(b1)
a2 = self.p.get_connection('SET')
b2 = self.p.get_connection('SET')
self.p.disconnect()
assert_equal(a1.disconnected, 1)
assert_equal(b1.disconnected, 1)
assert_equal(a2.disconnected, 1)
assert_equal(b2.disconnected, 1)
def test_too_many_connections(self):
p = RoundRobinConnectionPool(patterns=[{'name': 'a'},
{'name': 'b'}],
connection_class=MockConnection,
max_connections_per_pattern=1)
p.get_connection('SET')
p.get_connection('SET')
assert_raises(redis.ConnectionError, p.get_connection, 'SET')
class TestRedisShipper(object):
def setup(self):
self.urls = ["redis://foo", "redis://bar"]
@patch('tagalog.shipper.redis.ResilientStrictRedis')
def test_ship_writes_json_messages(self, redis_mock):
rs = RedisShipper(urls=self.urls, key='redis_key')
rs.ship({'@message':'logLine'})
redis_mock.return_value.lpush.assert_called_with('redis_key','{"@message": "logLine"}')
@patch('tagalog.shipper.redis.ResilientStrictRedis')
def test_ship_writes_elasticsearch_bulk_messages(self, redis_mock):
rs = RedisShipper(urls=self.urls, key='redis_key', bulk=True)
rs.ship({'@message':'logLine'})
redis_mock.return_value.lpush.assert_called_once()
call_args, _ = redis_mock.return_value.lpush.call_args
assert_equal('redis_key', call_args[0])
# Bulk format contains 2 lines per entry. Don't test the exact format as
# that's covered already by test_formatter.
assert_equal(2, len(call_args[1].splitlines()))
@patch('tagalog.shipper.redis.ResilientStrictRedis')
def test_ship_catches_connection_errors(self, redis_mock):
rs = RedisShipper(urls=self.urls)
redis_mock.return_value.lpush.side_effect = redis.ConnectionError("Boom!")
# should not raise:
rs.ship({'@message':'foo'})
@patch('tagalog.shipper.redis.ResilientStrictRedis')
def test_ship_catches_response_errors(self, redis_mock):
rs = RedisShipper(urls=self.urls)
redis_mock.return_value.lpush.side_effect = redis.ResponseError("Boom!")
# should not raise:
rs.ship({'@message':'foo'})
| alphagov/tagalog | test/unit/shipper/test_redis.py | Python | mit | 6,620 |
import os
import shutil
import setup
from project_cron.utils import processutil
USER_ROOT = os.path.expanduser('~')
APP_NAME = setup.APP_NAME + '.app'
APP_ROOT = os.path.join('/usr/local/bin', setup.APP_NAME)
DST_PATH = os.path.join(APP_ROOT, '%s-%05X' % (setup.VERSION, setup.BUILD_NUMBER), APP_NAME)
SYMLINK_PATH = os.path.join(USER_ROOT, 'Applications', APP_NAME)
def update():
if not os.path.exists('/usr/local/bin/terminal-notifier'):
processutil.call(['/usr/local/bin/brew', 'install', 'terminal-notifier'])
command = ['pip3', 'install', '-r', 'requirements.txt', '-U']
processutil.call(command)
command = ['python3', 'setup.py', 'py2app']
processutil.call(command)
if os.path.exists(SYMLINK_PATH):
os.remove(SYMLINK_PATH)
shutil.move(os.path.abspath(os.path.join('dist', APP_NAME)), DST_PATH)
shutil.rmtree('dist')
shutil.rmtree('build')
os.symlink(DST_PATH, SYMLINK_PATH)
def increase_build_number():
SCRIPT_ROOT = os.path.dirname(__file__)
SETUP = os.path.join(SCRIPT_ROOT, 'setup.py')
lines = open(SETUP).readlines()
with open(SETUP, 'w') as file:
while len(lines) > 0:
line = lines[0]
if 'BUILD_NUMBER = ' in line:
line = 'BUILD_NUMBER = %d\n' % (setup.BUILD_NUMBER + 1)
file.write(line)
lines.pop(0)
if __name__ == "__main__" and not os.path.exists(DST_PATH):
increase_build_number()
update()
| ecleya/project_cron | update.py | Python | mit | 1,472 |
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nose.plugins.skip import SkipTest
from proboscis.asserts import assert_equal
from proboscis.asserts import assert_false
from proboscis.asserts import assert_not_equal
from proboscis.asserts import assert_raises
from proboscis.asserts import assert_true
from proboscis import before_class
from proboscis import test
from troveclient.compat import exceptions
from trove import tests
from trove.tests.api.databases import TestMysqlAccess
from trove.tests.api.instances import instance_info
from trove.tests.api.users import TestUsers
from trove.tests import util
from trove.tests.util import test_config
GROUP = "dbaas.api.root"
@test(depends_on_classes=[TestMysqlAccess],
runs_after=[TestUsers],
groups=[tests.DBAAS_API, GROUP, tests.INSTANCES])
class TestRoot(object):
"""
Test the root operations
"""
root_enabled_timestamp = 'Never'
system_users = ['root', 'debian_sys_maint']
@before_class
def setUp(self):
self.dbaas = util.create_dbaas_client(instance_info.user)
self.dbaas_admin = util.create_dbaas_client(instance_info.admin_user)
def _verify_root_timestamp(self, id):
reh = self.dbaas_admin.management.root_enabled_history(id)
timestamp = reh.enabled
assert_equal(self.root_enabled_timestamp, timestamp)
assert_equal(id, reh.id)
def _root(self):
global root_password
self.dbaas.root.create(instance_info.id)
assert_equal(200, self.dbaas.last_http_code)
reh = self.dbaas_admin.management.root_enabled_history
self.root_enabled_timestamp = reh(instance_info.id).enabled
@test
def test_root_initially_disabled(self):
"""Test that root is disabled."""
enabled = self.dbaas.root.is_root_enabled(instance_info.id)
assert_equal(200, self.dbaas.last_http_code)
is_enabled = enabled
if hasattr(enabled, 'rootEnabled'):
is_enabled = enabled.rootEnabled
assert_false(is_enabled, "Root SHOULD NOT be enabled.")
@test
def test_create_user_os_admin_failure(self):
users = [{"name": "os_admin", "password": "12345"}]
assert_raises(exceptions.BadRequest, self.dbaas.users.create,
instance_info.id, users)
@test
def test_delete_user_os_admin_failure(self):
assert_raises(exceptions.BadRequest, self.dbaas.users.delete,
instance_info.id, "os_admin")
@test(depends_on=[test_root_initially_disabled],
enabled=not test_config.values['root_removed_from_instance_api'])
def test_root_initially_disabled_details(self):
"""Use instance details to test that root is disabled."""
instance = self.dbaas.instances.get(instance_info.id)
assert_true(hasattr(instance, 'rootEnabled'),
"Instance has no rootEnabled property.")
assert_false(instance.rootEnabled, "Root SHOULD NOT be enabled.")
assert_equal(self.root_enabled_timestamp, 'Never')
@test(depends_on=[test_root_initially_disabled_details])
def test_root_disabled_in_mgmt_api(self):
"""Verifies in the management api that the timestamp exists."""
self._verify_root_timestamp(instance_info.id)
@test(depends_on=[test_root_initially_disabled_details])
def test_root_disable_when_root_not_enabled(self):
reh = self.dbaas_admin.management.root_enabled_history
self.root_enabled_timestamp = reh(instance_info.id).enabled
assert_raises(exceptions.NotFound, self.dbaas.root.delete,
instance_info.id)
self._verify_root_timestamp(instance_info.id)
@test(depends_on=[test_root_disable_when_root_not_enabled])
def test_enable_root(self):
self._root()
@test(depends_on=[test_enable_root])
def test_enabled_timestamp(self):
assert_not_equal(self.root_enabled_timestamp, 'Never')
@test(depends_on=[test_enable_root])
def test_root_not_in_users_list(self):
"""
Tests that despite having enabled root, user root doesn't appear
in the users list for the instance.
"""
users = self.dbaas.users.list(instance_info.id)
usernames = [user.name for user in users]
assert_true('root' not in usernames)
@test(depends_on=[test_enable_root])
def test_root_now_enabled(self):
"""Test that root is now enabled."""
enabled = self.dbaas.root.is_root_enabled(instance_info.id)
assert_equal(200, self.dbaas.last_http_code)
assert_true(enabled, "Root SHOULD be enabled.")
@test(depends_on=[test_root_now_enabled],
enabled=not test_config.values['root_removed_from_instance_api'])
def test_root_now_enabled_details(self):
"""Use instance details to test that root is now enabled."""
instance = self.dbaas.instances.get(instance_info.id)
assert_true(hasattr(instance, 'rootEnabled'),
"Instance has no rootEnabled property.")
assert_true(instance.rootEnabled, "Root SHOULD be enabled.")
assert_not_equal(self.root_enabled_timestamp, 'Never')
self._verify_root_timestamp(instance_info.id)
@test(depends_on=[test_root_now_enabled_details])
def test_reset_root(self):
if test_config.values['root_timestamp_disabled']:
raise SkipTest("Enabled timestamp not enabled yet")
old_ts = self.root_enabled_timestamp
self._root()
assert_not_equal(self.root_enabled_timestamp, 'Never')
assert_equal(self.root_enabled_timestamp, old_ts)
@test(depends_on=[test_reset_root])
def test_root_still_enabled(self):
"""Test that after root was reset it's still enabled."""
enabled = self.dbaas.root.is_root_enabled(instance_info.id)
assert_equal(200, self.dbaas.last_http_code)
assert_true(enabled, "Root SHOULD still be enabled.")
@test(depends_on=[test_root_still_enabled],
enabled=not test_config.values['root_removed_from_instance_api'])
def test_root_still_enabled_details(self):
"""Use instance details to test that after root was reset,
it's still enabled.
"""
instance = self.dbaas.instances.get(instance_info.id)
assert_true(hasattr(instance, 'rootEnabled'),
"Instance has no rootEnabled property.")
assert_true(instance.rootEnabled, "Root SHOULD still be enabled.")
assert_not_equal(self.root_enabled_timestamp, 'Never')
self._verify_root_timestamp(instance_info.id)
@test(depends_on=[test_enable_root])
def test_root_cannot_be_deleted(self):
"""Even if root was enabled, the user root cannot be deleted."""
assert_raises(exceptions.BadRequest, self.dbaas.users.delete,
instance_info.id, "root")
@test(depends_on=[test_root_still_enabled_details])
def test_root_disable(self):
reh = self.dbaas_admin.management.root_enabled_history
self.root_enabled_timestamp = reh(instance_info.id).enabled
self.dbaas.root.delete(instance_info.id)
assert_equal(200, self.dbaas.last_http_code)
self._verify_root_timestamp(instance_info.id)
| mmasaki/trove | trove/tests/api/root.py | Python | apache-2.0 | 7,830 |
#!/usr/bin/env python
"""
================
sMRI: FreeSurfer
================
This script, smri_freesurfer.py, demonstrates the ability to call reconall on
a set of subjects and then make an average subject.
python smri_freesurfer.py
Import necessary modules from nipype.
"""
import os
import nipype.pipeline.engine as pe
import nipype.interfaces.io as nio
from nipype.interfaces.freesurfer.preprocess import ReconAll
from nipype.interfaces.freesurfer.utils import MakeAverageSubject
subject_list = ['s1', 's3']
data_dir = os.path.abspath('data')
subjects_dir = os.path.abspath('amri_freesurfer_tutorial/subjects_dir')
wf = pe.Workflow(name="l1workflow")
wf.base_dir = os.path.abspath('amri_freesurfer_tutorial/workdir')
"""
Grab data
"""
datasource = pe.MapNode(interface=nio.DataGrabber(infields=['subject_id'],
outfields=['struct']),
name='datasource',
iterfield=['subject_id'])
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '%s/%s.nii'
datasource.inputs.template_args = dict(struct=[['subject_id', 'struct']])
datasource.inputs.subject_id = subject_list
"""
Run recon-all
"""
recon_all = pe.MapNode(interface=ReconAll(), name='recon_all',
iterfield=['subject_id', 'T1_files'])
recon_all.inputs.subject_id = subject_list
if not os.path.exists(subjects_dir):
os.mkdir(subjects_dir)
recon_all.inputs.subjects_dir = subjects_dir
wf.connect(datasource, 'struct', recon_all, 'T1_files')
"""
Make average subject
"""
average = pe.Node(interface=MakeAverageSubject(), name="average")
average.inputs.subjects_dir = subjects_dir
wf.connect(recon_all, 'subject_id', average, 'subjects_ids')
wf.run("MultiProc", plugin_args={'n_procs': 4})
| FredLoney/nipype | examples/smri_freesurfer.py | Python | bsd-3-clause | 1,804 |
from typing import Union
from .common import ScyllaManagerError, TaskStatus, HostStatus, HostSsl, HostRestStatus
from .cli import ScyllaManagerToolRedhatLike, ScyllaManagerToolNonRedhat, ManagerCluster
from .operator import ScyllaManagerToolOperator, OperatorManagerCluster
AnyManagerTool = Union[ScyllaManagerToolOperator, ScyllaManagerToolRedhatLike, ScyllaManagerToolNonRedhat]
AnyManagerCluster = Union[OperatorManagerCluster, ManagerCluster]
def get_scylla_manager_tool(manager_node, scylla_cluster=None) -> AnyManagerTool:
if manager_node.is_kubernetes():
return ScyllaManagerToolOperator(manager_node=manager_node, scylla_cluster=scylla_cluster)
if manager_node.is_rhel_like():
return ScyllaManagerToolRedhatLike(manager_node=manager_node)
return ScyllaManagerToolNonRedhat(manager_node=manager_node)
| scylladb/scylla-cluster-tests | sdcm/mgmt/__init__.py | Python | agpl-3.0 | 841 |
'''
deploy virtual router test environment, without reinstall zstack. Will redeploy
database.
@author: Youyk
'''
import os
import zstackwoodpecker.operations.deploy_operations as deploy_operations
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_util as test_util
USER_PATH = os.path.expanduser('~')
EXTRA_SUITE_SETUP_SCRIPT = '%s/.zstackwoodpecker/extra_suite_setup_config.sh' % USER_PATH
def test():
#If test execution machine is not the same one as Host machine, deploy work is needed to separated to 2 steps(deploy_test_agent, execute_plan_without_deploy_test_agent). And it can not directly call SetupAction.run()
test_lib.setup_plan.deploy_db_without_reinstall_zstack()
deploy_operations.deploy_initial_database(test_lib.deploy_config)
if os.path.exists(EXTRA_SUITE_SETUP_SCRIPT):
os.system("bash %s" % EXTRA_SUITE_SETUP_SCRIPT)
test_util.test_pass('Suite Setup Success')
| zstackio/zstack-woodpecker | integrationtest/vm/vm_password/suite_setup_no_install_zs.py | Python | apache-2.0 | 962 |
#!/usr/bin/env python
'''
Description:
This is the handler for the Social Engineering Toolkit (SET) trying to overcome the limitations of set-automate
'''
from framework.dependency_management.dependency_resolver import BaseComponent
from framework.lib.general import *
import time
SCRIPT_DELAY = 2
class SpearPhishing(BaseComponent):
COMPONENT_NAME = "spear_phishing"
def __init__(self, set):
self.register_in_service_locator()
self.config = self.get_component("config")
self.error_handler = self.get_component("error_handler")
self.set = set
def Run(self, Args, PluginInfo):
Output = ''
if self.Init(Args):
self.set.Open({
'ConnectVia': self.config.GetResources('OpenSET')
, 'InitialCommands': None
, 'ExitMethod': Args['ISHELL_EXIT_METHOD']
, 'CommandsBeforeExit': Args['ISHELL_COMMANDS_BEFORE_EXIT']
, 'CommandsBeforeExitDelim': Args['ISHELL_COMMANDS_BEFORE_EXIT_DELIM']
}, PluginInfo)
if Args['PHISHING_CUSTOM_EXE_PAYLOAD_DIR']: # Prepend directory to payload
Args['PHISHING_CUSTOM_EXE_PAYLOAD'] = Args['PHISHING_CUSTOM_EXE_PAYLOAD_DIR'] + "/" + Args[
'PHISHING_CUSTOM_EXE_PAYLOAD']
for Script in self.GetSETScripts(Args):
cprint("Running SET script: " + Script)
Output += self.set.RunScript(Script, Args, Debug=False)
cprint("Sleeping " + str(SCRIPT_DELAY) + " seconds..")
time.sleep(int(SCRIPT_DELAY))
# Output += self.set.RunScript(self.SETScript, Args, Debug=False)
self.set.Close(PluginInfo)
return Output
def GetSETScripts(self, Args):
return [
Args['PHISHING_SCRIPT_DIR'] + "/start_phishing.set"
, Args['PHISHING_SCRIPT_DIR'] + "/payload_" + Args['PHISHING_PAYLOAD'] + ".set"
, Args['PHISHING_SCRIPT_DIR'] + "/send_email_smtp.set"
]
def InitPaths(self, Args):
MandatoryPaths = self.config.GetAsList(
['TOOL_SET_DIR', '_PDF_TEMPLATE', '_WORD_TEMPLATE', '_EMAIL_TARGET'])
if not PathsExist(MandatoryPaths) or not PathsExist(self.GetSETScripts(Args)):
self.error_handler.FrameworkAbort("USER ERROR: Some mandatory paths were not found your filesystem", 'user')
return False
return True
def Init(self, Args):
if not self.InitPaths(Args):
return False
return True
| sharad1126/owtf | framework/wrappers/set/spear_phishing.py | Python | bsd-3-clause | 2,650 |
import pandas as pd
from requests import get
from StringIO import StringIO
from pandas.io.common import ZipFile
def get_movielens_data(local_file=None, get_genres=False):
'''Downloads movielens data and stores it in pandas dataframe.
'''
if not local_file:
#print 'Downloading data...'
zip_file_url = 'http://files.grouplens.org/datasets/movielens/ml-1m.zip'
zip_response = get(zip_file_url)
zip_contents = StringIO(zip_response.content)
#print 'Done.'
else:
zip_contents = local_file
#print 'Loading data into memory...'
with ZipFile(zip_contents) as zfile:
zip_files = pd.Series(zfile.namelist())
zip_file = zip_files[zip_files.str.contains('ratings')].iat[0]
zdata = zfile.read(zip_file)
if 'latest' in zip_file:
header = 0
else:
header = None
delimiter = ','
zdata = zdata.replace('::', delimiter) # makes data compatible with pandas c-engine
ml_data = pd.read_csv(StringIO(zdata), sep=delimiter, header=header, engine='c',
names=['userid', 'movieid', 'rating', 'timestamp'],
usecols=['userid', 'movieid', 'rating'])
if get_genres:
zip_file = zip_files[zip_files.str.contains('movies')].iat[0]
with zfile.open(zip_file) as zdata:
if 'latest' in zip_file:
delimiter = ','
else:
delimiter = '::'
genres_data = pd.read_csv(zdata, sep=delimiter, header=header, engine='python',
names=['movieid', 'movienm', 'genres'])
ml_genres = split_genres(genres_data)
ml_data = (ml_data, ml_genres)
return ml_data
def split_genres(genres_data):
genres_data.index.name = 'movie_idx'
genres_stacked = genres_data.genres.str.split('|', expand=True).stack().to_frame('genreid')
ml_genres = genres_data[['movieid', 'movienm']].join(genres_stacked).reset_index(drop=True)
return ml_genres
def filter_short_head(data, threshold=0.01):
short_head = data.groupby('movieid', sort=False)['userid'].nunique()
short_head.sort_values(ascending=False, inplace=True)
ratings_perc = short_head.cumsum()*1.0/short_head.sum()
movies_perc = pd.np.arange(1, len(short_head)+1, dtype=pd.np.float64) / len(short_head)
long_tail_movies = ratings_perc[movies_perc > threshold].index
return long_tail_movies
| Evfro/fifty-shades | polara/tools/movielens.py | Python | mit | 2,538 |
""" Module that takes a configuration and input signal from a JSON file
and calculates the output signal, saving it as a JSON file.
"""
import sys
import json
from pymongo import MongoClient
from graph import Node, Edge, Graph
from resistor import Resistor
from capacitor import Capacitor
from diode import Diode
from opamp import Opamp
from wire import Wire
from units import Units
from filter import Filter
from sound_handler import SoundHandler as SH
from config import Config
def make_filter(config):
""" Returns a Filter object. """
wires = config["wires"]
resistors = config["resistors"]
capacitors = config["capacitors"]
inductors = config["inductors"]
opamps = config["opamps"]
grounds = config["grounds"]
v_srcs = config["v_srcs"]
v_ins = config["v_ins"]
v_outs = config["v_outs"]
pt2node = {}
graph = Graph()
def get_node(pt, value=0, fixed=False, source=False, output=False):
if pt not in pt2node:
s = "Node(graph"
if value:
s += ", value=True"
if fixed:
s += ", fixed=True"
if source:
s += ", source=True"
if output:
s += ", output=True"
s += ") at {0}"
print(s.format(pt))
pt2node[pt] = Node(graph, value=value, fixed=fixed,
source=source, output=output)
return pt2node[pt]
# Special nodes have priority on creation
# TODO cleanup
vin_ = None
vout_ = None
for [x, y] in grounds:
pt = (x, y)
get_node(pt, value=0, fixed=True, source=True)
for [x, y, v] in v_srcs:
pt = (x, y)
get_node(pt, value=v, fixed=True, source=True)
for [x, y] in v_ins:
pt = (x, y)
vin_ = get_node(pt, fixed=True, source=True)
for [x, y] in v_outs:
pt = (x, y)
vout_ = get_node(pt, output=True)
# Other components
for [x, y] in opamps:
pt_minus = (x - 2, y - 1)
pt_plus = (x - 2, y + 1)
pt_out = (x + 1, y)
node_minus = get_node(pt_minus)
node_plus = get_node(pt_plus)
node_out = get_node(pt_out, source=True)
s = "Opamp(graph, node_a={0}, node_b={1}, node_out={2})"
print(s.format(pt_minus, pt_plus, pt_out))
opamp = Opamp(graph, node_a=node_minus, node_b=node_plus,
node_out=node_out)
graph.add_component(opamp)
for [x1, y1, x2, y2] in wires:
pt1 = (x1, y1)
pt2 = (x2, y2)
node1 = get_node(pt1)
node2 = get_node(pt2)
#s = "Edge(graph, {0}, {1})"
#print(s.format(pt1, pt2))
edge = Edge(graph, node1, node2)
s = "Wire(graph, {0}, {1}, (edge))"
print(s.format(pt1, pt2))
wire = Wire(graph, node1, node2, edge)
graph.add_component(wire)
for [x, y, v] in resistors:
pt1 = (x - 1, y)
pt2 = (x + 1, y)
node1 = get_node(pt1)
node2 = get_node(pt2)
#s = "Edge(graph, {0}, {1})"
#print(s.format(pt1, pt2))
edge = Edge(graph, node1, node2)
s = "Resistor(graph, {0}, {1}, (edge))"
print(s.format(pt1, pt2))
resistor = Resistor(graph, v, node1, node2, edge)
graph.add_component(resistor)
for [x, y, v] in capacitors:
pt1 = (x - 1, y)
pt2 = (x + 1, y)
node1 = get_node(pt1)
node2 = get_node(pt2)
#s = "Edge(graph, {0}, {1})"
#print(s.format(pt1, pt2))
edge = Edge(graph, node1, node2)
s = "Capacitor(graph, {0}, {1}, (edge))"
print(s.format(pt1, pt2))
capacitor = Capacitor(graph, v, node1, node2, edge)
graph.add_component(capacitor)
for [x, y, v] in inductors:
pt1 = (x - 1, y)
pt2 = (x + 1, y)
node1 = get_node(pt1)
node2 = get_node(pt2)
#s = "Edge(graph, {0}, {1})"
#print(s.format(pt1, pt2))
edge = Edge(graph, node1, node2)
s = "Inductor(graph, {0}, {1}, (edge))"
print(s.format(pt1, pt2))
inductor = Inductor(graph, v, node1, node2, edge)
graph.add_component(inductor)
return Filter(graph, vin_, vout_)
def play(id_):
client = MongoClient("mongodb://127.0.0.1:3001/meteor")
db = client["meteor"]
match = db["sessions"].find_one(id_)
config = match["config"]
# Build filter
filter_ = make_filter(config)
samples = None
with open(Config.samples_dir + "samples.json", "r") as f:
samples = json.loads(f.read())["samples"]
sample_id = int(config["sample"])
sample_fname = "../" + samples[sample_id][1]
input_signal = SH.load(sample_fname, sampleperiod=Config.time_step, peak=1)
output_signal = filter_.execute(input_signal)
SH.save(output_signal, Config.output_dir + str(id_) + ".wav",
bytespersample=2, peak=1)
# Tell meteor it is done
db["sessions"].update_one({ "_id" : id_ }, { "$set" : { "pydone": True }})
| ThatSnail/impede | impede-app/server/py/js_play.py | Python | mit | 5,067 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
for cs in orm.ContentSticker.objects.all():
print cs,
try:
[first_comment] = cs.content.used_in_comments.order_by('-timestamp')[:1]
except ValueError:
first_comment = orm.Comment(
anonymous=True,
reply_content=cs.content,
timestamp=cs.content.timestamp,
)
first_comment.save()
if not orm.CommentSticker.objects.filter(user=cs.user, comment=first_comment).exists():
print 'making CommentSticker'
orm.CommentSticker(
user=cs.user,
comment=first_comment,
timestamp=cs.timestamp,
type_id=cs.type_id,
ip=cs.ip,
).save()
else:
print 'existing CommentSticker found'
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'canvas.bestof': {
'Meta': {'object_name': 'BestOf'},
'chosen_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'content': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'best_of'", 'unique': 'True', 'to': "orm['canvas.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('django.db.models.fields.FloatField', [], {})
},
'canvas.comment': {
'Meta': {'object_name': 'Comment'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'parent_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'replied_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'used_in_comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.FloatField', [], {}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.commentsticker': {
'Meta': {'object_name': 'CommentSticker'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stickers'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('django.db.models.fields.FloatField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'canvas.content': {
'Meta': {'object_name': 'Content'},
'alpha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'animated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'remixes'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'remix_text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'blank': 'True'}),
'stamps_used': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'used_as_stamp'", 'blank': 'True', 'to': "orm['canvas.Content']"}),
'timestamp': ('django.db.models.fields.FloatField', [], {}),
'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.ContentUrlMapping']", 'null': 'True', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.contentsticker': {
'Meta': {'object_name': 'ContentSticker'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stickers'", 'to': "orm['canvas.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('django.db.models.fields.FloatField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'canvas.contenturlmapping': {
'Meta': {'object_name': 'ContentUrlMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.facebookuser': {
'Meta': {'object_name': 'FacebookUser'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fb_uid': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'canvas.invitecode': {
'Meta': {'object_name': 'InviteCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'invited_from'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'sent_invites'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"})
},
'canvas.stashcontent': {
'Meta': {'object_name': 'StashContent'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.userinfo': {
'Meta': {'object_name': 'UserInfo'},
'free_invites': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_check': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'post_anonymously': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'power_level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['canvas']
| canvasnetworks/canvas | website/canvas/migrations/0062_copy_content_sticker_into_comment_sticker.py | Python | bsd-3-clause | 11,713 |
# P2P concurrency test cases
# Copyright (c) 2013-2015, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
from remotehost import remote_compatible
import logging
logger = logging.getLogger()
import subprocess
import time
import hwsim_utils
import hostapd
from p2p_utils import *
from test_ap_ht import clear_scan_cache
from utils import HwsimSkip
@remote_compatible
def test_concurrent_autogo(dev, apdev):
"""Concurrent P2P autonomous GO"""
logger.info("Connect to an infrastructure AP")
dev[0].request("P2P_SET cross_connect 0")
hapd = hostapd.add_ap(apdev[0], {"ssid": "test-open"})
dev[0].connect("test-open", key_mgmt="NONE", scan_freq="2412")
hwsim_utils.test_connectivity(dev[0], hapd)
logger.info("Start a P2P group while associated to an AP")
dev[0].global_request("SET p2p_no_group_iface 0")
dev[0].p2p_start_go()
pin = dev[1].wps_read_pin()
dev[0].p2p_go_authorize_client(pin)
dev[1].p2p_connect_group(dev[0].p2p_dev_addr(), pin, timeout=60,
social=True)
hwsim_utils.test_connectivity_p2p(dev[0], dev[1])
dev[0].remove_group()
dev[1].wait_go_ending_session()
logger.info("Confirm AP connection after P2P group removal")
hwsim_utils.test_connectivity(dev[0], hapd)
def test_concurrent_autogo_5ghz_ht40(dev, apdev):
"""Concurrent P2P autonomous GO on 5 GHz and HT40 co-ex"""
clear_scan_cache(apdev[1])
try:
hapd = None
hapd2 = None
params = {"ssid": "ht40",
"hw_mode": "a",
"channel": "153",
"country_code": "US",
"ht_capab": "[HT40-]"}
hapd2 = hostapd.add_ap(apdev[1], params)
params = {"ssid": "test-open-5",
"hw_mode": "a",
"channel": "149",
"country_code": "US"}
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("P2P_SET cross_connect 0")
dev[0].scan_for_bss(apdev[0]['bssid'], freq=5745)
dev[0].scan_for_bss(apdev[1]['bssid'], freq=5765)
dev[0].connect("test-open-5", key_mgmt="NONE", scan_freq="5745")
dev[0].global_request("SET p2p_no_group_iface 0")
if "OK" not in dev[0].global_request("P2P_GROUP_ADD ht40"):
raise Exception("P2P_GROUP_ADD failed")
ev = dev[0].wait_global_event(["P2P-GROUP-STARTED"], timeout=5)
if ev is None:
raise Exception("GO start up timed out")
dev[0].group_form_result(ev)
pin = dev[1].wps_read_pin()
dev[0].p2p_go_authorize_client(pin)
dev[1].p2p_find(freq=5745)
addr0 = dev[0].p2p_dev_addr()
count = 0
while count < 10:
time.sleep(0.25)
count += 1
if dev[1].peer_known(addr0):
break
dev[1].p2p_connect_group(addr0, pin, timeout=60)
dev[0].remove_group()
dev[1].wait_go_ending_session()
finally:
dev[0].request("REMOVE_NETWORK all")
if hapd:
hapd.request("DISABLE")
if hapd2:
hapd2.request("DISABLE")
subprocess.call(['iw', 'reg', 'set', '00'])
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
def test_concurrent_autogo_crossconnect(dev, apdev):
"""Concurrent P2P autonomous GO"""
dev[0].global_request("P2P_SET cross_connect 1")
hapd = hostapd.add_ap(apdev[0], {"ssid": "test-open"})
dev[0].connect("test-open", key_mgmt="NONE", scan_freq="2412")
dev[0].global_request("SET p2p_no_group_iface 0")
dev[0].p2p_start_go(no_event_clear=True)
ev = dev[0].wait_global_event("P2P-CROSS-CONNECT-ENABLE", timeout=10)
if ev is None:
raise Exception("Timeout on cross connection enabled event")
if dev[0].group_ifname + " " + dev[0].ifname not in ev:
raise Exception("Unexpected interfaces: " + ev)
dev[0].dump_monitor()
dev[0].global_request("P2P_SET cross_connect 0")
ev = dev[0].wait_global_event("P2P-CROSS-CONNECT-DISABLE", timeout=10)
if ev is None:
raise Exception("Timeout on cross connection disabled event")
if dev[0].group_ifname + " " + dev[0].ifname not in ev:
raise Exception("Unexpected interfaces: " + ev)
dev[0].remove_group()
dev[0].global_request("P2P_SET cross_connect 1")
dev[0].p2p_start_go(no_event_clear=True)
ev = dev[0].wait_global_event("P2P-CROSS-CONNECT-ENABLE", timeout=10)
if ev is None:
raise Exception("Timeout on cross connection enabled event")
if dev[0].group_ifname + " " + dev[0].ifname not in ev:
raise Exception("Unexpected interfaces: " + ev)
dev[0].dump_monitor()
dev[0].remove_group()
ev = dev[0].wait_global_event("P2P-CROSS-CONNECT-DISABLE", timeout=10)
if ev is None:
raise Exception("Timeout on cross connection disabled event")
dev[0].global_request("P2P_SET cross_connect 0")
@remote_compatible
def test_concurrent_p2pcli(dev, apdev):
"""Concurrent P2P client join"""
logger.info("Connect to an infrastructure AP")
hapd = hostapd.add_ap(apdev[0], {"ssid": "test-open"})
dev[0].connect("test-open", key_mgmt="NONE", scan_freq="2412")
hwsim_utils.test_connectivity(dev[0], hapd)
logger.info("Join a P2P group while associated to an AP")
dev[0].global_request("SET p2p_no_group_iface 0")
dev[1].p2p_start_go(freq=2412)
pin = dev[0].wps_read_pin()
dev[1].p2p_go_authorize_client(pin)
dev[0].p2p_connect_group(dev[1].p2p_dev_addr(), pin, timeout=60,
social=True)
hwsim_utils.test_connectivity_p2p(dev[0], dev[1])
dev[1].remove_group()
dev[0].wait_go_ending_session()
logger.info("Confirm AP connection after P2P group removal")
hwsim_utils.test_connectivity(dev[0], hapd)
@remote_compatible
def test_concurrent_grpform_go(dev, apdev):
"""Concurrent P2P group formation to become GO"""
logger.info("Connect to an infrastructure AP")
hapd = hostapd.add_ap(apdev[0], {"ssid": "test-open"})
dev[0].connect("test-open", key_mgmt="NONE", scan_freq="2412")
hwsim_utils.test_connectivity(dev[0], hapd)
logger.info("Form a P2P group while associated to an AP")
dev[0].global_request("SET p2p_no_group_iface 0")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0)
check_grpform_results(i_res, r_res)
remove_group(dev[0], dev[1])
logger.info("Confirm AP connection after P2P group removal")
hwsim_utils.test_connectivity(dev[0], hapd)
@remote_compatible
def test_concurrent_grpform_cli(dev, apdev):
"""Concurrent P2P group formation to become P2P Client"""
logger.info("Connect to an infrastructure AP")
hapd = hostapd.add_ap(apdev[0], {"ssid": "test-open"})
dev[0].connect("test-open", key_mgmt="NONE", scan_freq="2412")
hwsim_utils.test_connectivity(dev[0], hapd)
logger.info("Form a P2P group while associated to an AP")
dev[0].global_request("SET p2p_no_group_iface 0")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=0,
r_dev=dev[1], r_intent=15)
check_grpform_results(i_res, r_res)
remove_group(dev[0], dev[1])
logger.info("Confirm AP connection after P2P group removal")
hwsim_utils.test_connectivity(dev[0], hapd)
@remote_compatible
def test_concurrent_grpform_while_connecting(dev, apdev):
"""Concurrent P2P group formation while connecting to an AP"""
logger.info("Start connection to an infrastructure AP")
hapd = hostapd.add_ap(apdev[0], {"ssid": "test-open"})
dev[0].connect("test-open", key_mgmt="NONE", wait_connect=False)
logger.info("Form a P2P group while connecting to an AP")
dev[0].global_request("SET p2p_no_group_iface 0")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_freq=2412,
r_dev=dev[1], r_freq=2412)
check_grpform_results(i_res, r_res)
remove_group(dev[0], dev[1])
logger.info("Confirm AP connection after P2P group removal")
hwsim_utils.test_connectivity(dev[0], hapd)
@remote_compatible
def test_concurrent_grpform_while_connecting2(dev, apdev):
"""Concurrent P2P group formation while connecting to an AP (2)"""
logger.info("Start connection to an infrastructure AP")
hapd = hostapd.add_ap(apdev[0], {"ssid": "test-open"})
dev[0].connect("test-open", key_mgmt="NONE", wait_connect=False)
dev[1].flush_scan_cache()
logger.info("Form a P2P group while connecting to an AP")
dev[0].global_request("SET p2p_no_group_iface 0")
[i_res, r_res] = go_neg_pbc(i_dev=dev[0], i_intent=15, i_freq=2412,
r_dev=dev[1], r_intent=0, r_freq=2412)
check_grpform_results(i_res, r_res)
remove_group(dev[0], dev[1])
logger.info("Confirm AP connection after P2P group removal")
dev[0].wait_completed()
hwsim_utils.test_connectivity(dev[0], hapd)
@remote_compatible
def test_concurrent_grpform_while_connecting3(dev, apdev):
"""Concurrent P2P group formation while connecting to an AP (3)"""
logger.info("Start connection to an infrastructure AP")
hapd = hostapd.add_ap(apdev[0], {"ssid": "test-open"})
dev[0].connect("test-open", key_mgmt="NONE", wait_connect=False)
logger.info("Form a P2P group while connecting to an AP")
dev[0].global_request("SET p2p_no_group_iface 0")
[i_res, r_res] = go_neg_pbc(i_dev=dev[1], i_intent=15, i_freq=2412,
r_dev=dev[0], r_intent=0, r_freq=2412)
check_grpform_results(i_res, r_res)
remove_group(dev[0], dev[1])
logger.info("Confirm AP connection after P2P group removal")
dev[0].wait_completed()
hwsim_utils.test_connectivity(dev[0], hapd)
@remote_compatible
def test_concurrent_persistent_group(dev, apdev):
"""Concurrent P2P persistent group"""
logger.info("Connect to an infrastructure AP")
hostapd.add_ap(apdev[0], {"ssid": "test-open", "channel": "2"})
dev[0].global_request("SET p2p_no_group_iface 0")
dev[0].connect("test-open", key_mgmt="NONE", scan_freq="2417")
logger.info("Run persistent group test while associated to an AP")
form(dev[0], dev[1])
[go_res, cli_res] = invite_from_cli(dev[0], dev[1])
if go_res['freq'] != '2417':
raise Exception("Unexpected channel selected: " + go_res['freq'])
[go_res, cli_res] = invite_from_go(dev[0], dev[1])
if go_res['freq'] != '2417':
raise Exception("Unexpected channel selected: " + go_res['freq'])
def test_concurrent_invitation_channel_mismatch(dev, apdev):
"""P2P persistent group invitation and channel mismatch"""
if dev[0].get_mcc() > 1:
raise HwsimSkip("Skip due to MCC being enabled")
form(dev[0], dev[1])
dev[0].dump_monitor()
dev[1].dump_monitor()
logger.info("Connect to an infrastructure AP")
hostapd.add_ap(apdev[0], {"ssid": "test-open", "channel": "2"})
dev[0].global_request("SET p2p_no_group_iface 0")
dev[0].connect("test-open", key_mgmt="NONE", scan_freq="2417")
invite(dev[1], dev[0], extra="freq=2412")
ev = dev[1].wait_global_event(["P2P-INVITATION-RESULT"], timeout=15)
if ev is None:
raise Exception("P2P invitation result not received")
if "status=7" not in ev:
raise Exception("Unexpected P2P invitation result: " + ev)
| s0lst1c3/eaphammer | local/hostapd-eaphammer/tests/hwsim/test_p2p_concurrency.py | Python | gpl-3.0 | 11,502 |
from som.interpreter.ast.nodes.message.abstract_node import AbstractMessageNode
from som.interpreter.ast.nodes.message.generic_node import (
UnarySend,
BinarySend,
TernarySend,
NArySend,
)
from som.interpreter.ast.nodes.specialized.down_to_do_node import (
IntDownToIntDoNode,
IntDownToDoubleDoNode,
)
from som.interpreter.ast.nodes.specialized.if_true_false import (
IfTrueIfFalseNode,
IfNode,
)
from som.interpreter.ast.nodes.specialized.to_by_do_node import (
IntToIntByDoNode,
IntToDoubleByDoNode,
)
from som.interpreter.ast.nodes.specialized.to_do_node import (
IntToIntDoNode,
IntToDoubleDoNode,
)
class UninitializedMessageNode(AbstractMessageNode):
def execute(self, frame):
rcvr, args = self._evaluate_rcvr_and_args(frame)
return self._specialize(frame, rcvr, args).execute_evaluated(frame, rcvr, args)
def _specialize(self, _frame, rcvr, args):
if args:
for specialization in [
IntToIntDoNode,
IntToDoubleDoNode,
IntToIntByDoNode,
IntToDoubleByDoNode,
IntDownToIntDoNode,
IntDownToDoubleDoNode,
IfTrueIfFalseNode,
IfNode,
]:
if specialization.can_specialize(self._selector, rcvr, args, self):
return specialization.specialize_node(
self._selector, rcvr, args, self
)
num_args = len(args) + 1
if num_args == 1:
node = UnarySend(
self._selector, self.universe, self._rcvr_expr, self.source_section
)
elif num_args == 2:
node = BinarySend(
self._selector,
self.universe,
self._rcvr_expr,
self._arg_exprs[0],
self.source_section,
)
elif num_args == 3:
node = TernarySend(
self._selector,
self.universe,
self._rcvr_expr,
self._arg_exprs[0],
self._arg_exprs[1],
self.source_section,
)
else:
node = NArySend(
self._selector,
self.universe,
self._rcvr_expr,
self._arg_exprs,
self.source_section,
)
return self.replace(node)
| SOM-st/PySOM | src/som/interpreter/ast/nodes/message/uninitialized_node.py | Python | mit | 2,458 |
#
#
#
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
sys.path += (sys.path[0] + '/../lib',)
import hidapi
from logitech.unifying_receiver.base import DEVICE_UNIFYING_RECEIVER
from logitech.unifying_receiver.base import DEVICE_UNIFYING_RECEIVER_2
from logitech.unifying_receiver.base import DEVICE_NANO_RECEIVER
def print_event(action, device):
print ("~~~~ device [%s] %s" % (action, device))
hidapi.monitor(print_event,
DEVICE_UNIFYING_RECEIVER,
DEVICE_UNIFYING_RECEIVER_2,
DEVICE_NANO_RECEIVER
)
| marcbelmont/Solaar | tools/monitor.py | Python | gpl-2.0 | 559 |
# -*- coding: utf-8 -*-
"""
tclient
~~~~~~~~
:copyright: (c) 2013 by Rhett Garber.
:license: ISC, see LICENSE for more details.
"""
__title__ = 'tclient'
__version__ = '0.0.4'
__description__ = 'HTTP client for parallel http requests'
__url__ = 'https://github.com/rhettg/tclient'
__build__ = 0
__author__ = 'Rhett Garber'
__author_email__ = 'rhettg@gmail.com'
__license__ = 'ISC'
__copyright__ = 'Copyright 2013 Rhett Garber'
from .core import fetch_all
from .core import fetch
from .request import Request
from .utils import segment_requests
# flake8: noqa
| rhettg/tclient | tclient/__init__.py | Python | isc | 566 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-10-19 16:10:19
from .result_worker import ResultWorker
| ubear/Pyspider | pyspider/result/__init__.py | Python | apache-2.0 | 225 |
# Copyright 2014 Tesora, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for working with WSGI servers."""
from __future__ import print_function
import eventlet
eventlet.patcher.monkey_patch(all=False, socket=True)
import datetime
import errno
import socket
import sys
import time
import eventlet.wsgi
from oslo_config import cfg
from oslo_log import log as logging
from oslo_log import loggers
from oslo_serialization import jsonutils
from oslo_service import service
from oslo_service import sslutils
import routes
import routes.middleware
import webob.dec
import webob.exc
from xml.dom import minidom
from xml.parsers import expat
from trove.common import base_exception
from trove.common.i18n import _
from trove.common import xmlutils
socket_opts = [
cfg.IntOpt('backlog',
default=4096,
help="Number of backlog requests to configure the socket with"),
cfg.IntOpt('tcp_keepidle',
default=600,
help="Sets the value of TCP_KEEPIDLE in seconds for each "
"server socket. Not supported on OS X."),
]
CONF = cfg.CONF
CONF.register_opts(socket_opts)
LOG = logging.getLogger(__name__)
def run_server(application, port, **kwargs):
"""Run a WSGI server with the given application."""
sock = eventlet.listen(('0.0.0.0', port))
eventlet.wsgi.server(sock, application, **kwargs)
class Service(service.Service):
"""
Provides a Service API for wsgi servers.
This gives us the ability to launch wsgi servers with the
Launcher classes in oslo_service.service.py.
"""
def __init__(self, application, port,
host='0.0.0.0', backlog=4096, threads=1000):
self.application = application
self._port = port
self._host = host
self._backlog = backlog if backlog else CONF.backlog
self._socket = self._get_socket(host, port, self._backlog)
super(Service, self).__init__(threads)
def _get_socket(self, host, port, backlog):
# TODO(dims): eventlet's green dns/socket module does not actually
# support IPv6 in getaddrinfo(). We need to get around this in the
# future or monitor upstream for a fix
info = socket.getaddrinfo(host,
port,
socket.AF_UNSPEC,
socket.SOCK_STREAM)[0]
family = info[0]
bind_addr = info[-1]
sock = None
retry_until = time.time() + 30
while not sock and time.time() < retry_until:
try:
sock = eventlet.listen(bind_addr,
backlog=backlog,
family=family)
if sslutils.is_enabled(CONF):
sock = sslutils.wrap(CONF, sock)
except socket.error as err:
if err.args[0] != errno.EADDRINUSE:
raise
eventlet.sleep(0.1)
if not sock:
raise RuntimeError(_("Could not bind to %(host)s:%(port)s "
"after trying for 30 seconds") %
{'host': host, 'port': port})
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# sockets can hang around forever without keepalive
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
# This option isn't available in the OS X version of eventlet
if hasattr(socket, 'TCP_KEEPIDLE'):
sock.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPIDLE,
CONF.tcp_keepidle)
return sock
def start(self):
"""Start serving this service using the provided server instance.
:returns: None
"""
super(Service, self).start()
self.tg.add_thread(self._run, self.application, self._socket)
@property
def backlog(self):
return self._backlog
@property
def host(self):
return self._socket.getsockname()[0] if self._socket else self._host
@property
def port(self):
return self._socket.getsockname()[1] if self._socket else self._port
def stop(self):
"""Stop serving this API.
:returns: None
"""
super(Service, self).stop()
def _run(self, application, socket):
"""Start a WSGI server in a new green thread."""
logger = logging.getLogger('eventlet.wsgi')
eventlet.wsgi.server(socket,
application,
custom_pool=self.tg.pool,
log=loggers.WritableLogger(logger))
class Middleware(object):
"""
Base WSGI middleware wrapper. These classes require an application to be
initialized that will be called next. By default the middleware will
simply call its wrapped app, or you can override __call__ to customize its
behavior.
"""
def __init__(self, application):
self.application = application
def process_request(self, req):
"""
Called on each request.
If this returns None, the next application down the stack will be
executed. If it returns a response then that response will be returned
and execution will stop here.
"""
return None
def process_response(self, response):
"""Do whatever you'd like to the response."""
return response
@webob.dec.wsgify
def __call__(self, req):
response = self.process_request(req)
if response:
return response
response = req.get_response(self.application)
return self.process_response(response)
class Debug(Middleware):
"""
Helper class that can be inserted into any WSGI application chain
to get information about the request and response.
"""
@webob.dec.wsgify
def __call__(self, req):
print(("*" * 40) + " REQUEST ENVIRON")
for key, value in req.environ.items():
print(key, "=", value)
print()
resp = req.get_response(self.application)
print(("*" * 40) + " RESPONSE HEADERS")
for (key, value) in resp.headers.iteritems():
print(key, "=", value)
print()
resp.app_iter = self.print_generator(resp.app_iter)
return resp
@staticmethod
def print_generator(app_iter):
"""
Iterator that prints the contents of a wrapper string iterator
when iterated.
"""
print(("*" * 40) + " BODY")
for part in app_iter:
sys.stdout.write(part)
sys.stdout.flush()
yield part
print()
class Router(object):
"""
WSGI middleware that maps incoming requests to WSGI apps.
"""
def __init__(self, mapper):
"""
Create a router for the given routes.Mapper.
Each route in `mapper` must specify a 'controller', which is a
WSGI app to call. You'll probably want to specify an 'action' as
well and have your controller be a wsgi.Controller, who will route
the request to the action method.
Examples:
mapper = routes.Mapper()
sc = ServerController()
# Explicit mapping of one route to a controller+action
mapper.connect(None, "/svrlist", controller=sc, action="list")
# Actions are all implicitly defined
mapper.resource("server", "servers", controller=sc)
# Pointing to an arbitrary WSGI app. You can specify the
# {path_info:.*} parameter so the target app can be handed just that
# section of the URL.
mapper.connect(None, "/v1.0/{path_info:.*}", controller=BlogApp())
"""
self.map = mapper
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
self.map)
@webob.dec.wsgify
def __call__(self, req):
"""
Route the incoming request to a controller based on self.map.
If no match, return a 404.
"""
return self._router
@staticmethod
@webob.dec.wsgify
def _dispatch(req):
"""
Called by self._router after matching the incoming request to a route
and putting the information into req.environ. Either returns 404
or the routed WSGI app's response.
"""
match = req.environ['wsgiorg.routing_args'][1]
if not match:
return webob.exc.HTTPNotFound()
app = match['controller']
return app
class Request(webob.Request):
"""Add some OpenStack API-specific logic to the base webob.Request."""
default_request_content_types = ('application/json', 'application/xml')
default_accept_types = ('application/json', 'application/xml')
default_accept_type = 'application/json'
def best_match_content_type(self, supported_content_types=None):
"""Determine the requested response content-type.
Based on the query extension then the Accept header.
Defaults to default_accept_type if we don't find a preference
"""
supported_content_types = (supported_content_types or
self.default_accept_types)
parts = self.path.rsplit('.', 1)
if len(parts) > 1:
ctype = 'application/{0}'.format(parts[1])
if ctype in supported_content_types:
return ctype
bm = self.accept.best_match(supported_content_types)
return bm or self.default_accept_type
def get_content_type(self, allowed_content_types=None):
"""Determine content type of the request body.
Does not do any body introspection, only checks header
"""
if "Content-Type" not in self.headers:
return None
content_type = self.content_type
allowed_content_types = (allowed_content_types or
self.default_request_content_types)
if content_type not in allowed_content_types:
raise base_exception.InvalidContentType(content_type=content_type)
return content_type
class Resource(object):
"""
WSGI app that handles (de)serialization and controller dispatch.
Reads routing information supplied by RoutesMiddleware and calls
the requested action method upon its deserializer, controller,
and serializer. Those three objects may implement any of the basic
controller action methods (create, update, show, index, delete)
along with any that may be specified in the api router. A 'default'
method may also be implemented to be used in place of any
non-implemented actions. Deserializer methods must accept a request
argument and return a dictionary. Controller methods must accept a
request argument. Additionally, they must also accept keyword
arguments that represent the keys returned by the Deserializer. They
may raise a webob.exc exception or return a dict, which will be
serialized by requested content type.
"""
def __init__(self, controller, deserializer=None, serializer=None):
"""
:param controller: object that implement methods created by routes lib
:param deserializer: object that supports webob request deserialization
through controller-like actions
:param serializer: object that supports webob response serialization
through controller-like actions
"""
self.controller = controller
self.serializer = serializer or ResponseSerializer()
self.deserializer = deserializer or RequestDeserializer()
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, request):
"""WSGI method that controls (de)serialization and method dispatch."""
try:
action, action_args, accept = self.deserialize_request(request)
except base_exception.InvalidContentType:
msg = _("Unsupported Content-Type")
return webob.exc.HTTPUnsupportedMediaType(explanation=msg)
except base_exception.MalformedRequestBody:
msg = _("Malformed request body")
return webob.exc.HTTPBadRequest(explanation=msg)
action_result = self.execute_action(action, request, **action_args)
try:
return self.serialize_response(action, action_result, accept)
# return unserializable result (typically a webob exc)
except Exception:
return action_result
def deserialize_request(self, request):
return self.deserializer.deserialize(request)
def serialize_response(self, action, action_result, accept):
return self.serializer.serialize(action_result, accept, action)
def execute_action(self, action, request, **action_args):
return self.dispatch(self.controller, action, request, **action_args)
def dispatch(self, obj, action, *args, **kwargs):
"""Find action-specific method on self and call it."""
try:
method = getattr(obj, action)
except AttributeError:
method = getattr(obj, 'default')
return method(*args, **kwargs)
def get_action_args(self, request_environment):
"""Parse dictionary created by routes library."""
try:
args = request_environment['wsgiorg.routing_args'][1].copy()
except Exception:
return {}
try:
del args['controller']
except KeyError:
pass
try:
del args['format']
except KeyError:
pass
return args
class ActionDispatcher(object):
"""Maps method name to local methods through action name."""
def dispatch(self, *args, **kwargs):
"""Find and call local method."""
action = kwargs.pop('action', 'default')
action_method = getattr(self, str(action), self.default)
return action_method(*args, **kwargs)
def default(self, data):
raise NotImplementedError()
class DictSerializer(ActionDispatcher):
"""Default request body serialization."""
def serialize(self, data, action='default'):
return self.dispatch(data, action=action)
def default(self, data):
return ""
class JSONDictSerializer(DictSerializer):
"""Default JSON request body serialization."""
def default(self, data):
def sanitizer(obj):
if isinstance(obj, datetime.datetime):
_dtime = obj - datetime.timedelta(microseconds=obj.microsecond)
return _dtime.isoformat()
return obj
# return six.text_type(obj)
return jsonutils.dumps(data, default=sanitizer)
class XMLDictSerializer(DictSerializer):
def __init__(self, metadata=None, xmlns=None):
"""
:param metadata: information needed to deserialize xml into
a dictionary.
:param xmlns: XML namespace to include with serialized xml
"""
super(XMLDictSerializer, self).__init__()
self.metadata = metadata or {}
self.xmlns = xmlns
def default(self, data):
# We expect data to contain a single key which is the XML root.
root_key = list(data.keys())[0]
doc = minidom.Document()
node = self._to_xml_node(doc, self.metadata, root_key, data[root_key])
return self.to_xml_string(node)
def to_xml_string(self, node, has_atom=False):
self._add_xmlns(node, has_atom)
return node.toprettyxml(indent=' ', encoding='UTF-8')
# NOTE (ameade): the has_atom should be removed after all of the
# xml serializers and view builders have been updated to the current
# spec that required all responses include the xmlns:atom, the has_atom
# flag is to prevent current tests from breaking
def _add_xmlns(self, node, has_atom=False):
if self.xmlns is not None:
node.setAttribute('xmlns', self.xmlns)
if has_atom:
node.setAttribute('xmlns:atom', "http://www.w3.org/2005/Atom")
def _to_xml_node(self, doc, metadata, nodename, data):
"""Recursive method to convert data members to XML nodes."""
result = doc.createElement(nodename)
# Set the xml namespace if one is specified
# TODO(justinsb): We could also use prefixes on the keys
xmlns = metadata.get('xmlns', None)
if xmlns:
result.setAttribute('xmlns', xmlns)
# TODO(bcwaldon): accomplish this without a type-check
if type(data) is list:
collections = metadata.get('list_collections', {})
if nodename in collections:
metadata = collections[nodename]
for item in data:
node = doc.createElement(metadata['item_name'])
node.setAttribute(metadata['item_key'], str(item))
result.appendChild(node)
return result
singular = metadata.get('plurals', {}).get(nodename, None)
if singular is None:
if nodename.endswith('s'):
singular = nodename[:-1]
else:
singular = 'item'
for item in data:
node = self._to_xml_node(doc, metadata, singular, item)
result.appendChild(node)
# TODO(bcwaldon): accomplish this without a type-check
elif type(data) is dict:
collections = metadata.get('dict_collections', {})
if nodename in collections:
metadata = collections[nodename]
for k, v in data.items():
node = doc.createElement(metadata['item_name'])
node.setAttribute(metadata['item_key'], str(k))
text = doc.createTextNode(str(v))
node.appendChild(text)
result.appendChild(node)
return result
attrs = metadata.get('attributes', {}).get(nodename, {})
for k, v in data.items():
if k in attrs:
result.setAttribute(k, str(v))
else:
node = self._to_xml_node(doc, metadata, k, v)
result.appendChild(node)
else:
# Type is atom
node = doc.createTextNode(str(data))
result.appendChild(node)
return result
def _create_link_nodes(self, xml_doc, links):
link_nodes = []
for link in links:
link_node = xml_doc.createElement('atom:link')
link_node.setAttribute('rel', link['rel'])
link_node.setAttribute('href', link['href'])
if 'type' in link:
link_node.setAttribute('type', link['type'])
link_nodes.append(link_node)
return link_nodes
class ResponseHeadersSerializer(ActionDispatcher):
"""Default response headers serialization."""
def serialize(self, response, data, action):
self.dispatch(response, data, action=action)
def default(self, response, data):
response.status_int = 200
class ResponseSerializer(object):
"""Encode the necessary pieces into a response object."""
def __init__(self, body_serializers=None, headers_serializer=None):
self.body_serializers = {
'application/xml': XMLDictSerializer(),
'application/json': JSONDictSerializer(),
}
self.body_serializers.update(body_serializers or {})
self.headers_serializer = (headers_serializer or
ResponseHeadersSerializer())
def serialize(self, response_data, content_type, action='default'):
"""Serialize a dict into a string and wrap in a wsgi.Request object.
:param response_data: dict produced by the Controller
:param content_type: expected mimetype of serialized response body
"""
response = webob.Response()
self.serialize_headers(response, response_data, action)
self.serialize_body(response, response_data, content_type, action)
return response
def serialize_headers(self, response, data, action):
self.headers_serializer.serialize(response, data, action)
def serialize_body(self, response, data, content_type, action):
response.headers['Content-Type'] = content_type
if data is not None:
serializer = self.get_body_serializer(content_type)
response.body = serializer.serialize(data, action)
def get_body_serializer(self, content_type):
try:
return self.body_serializers[content_type]
except (KeyError, TypeError):
raise base_exception.InvalidContentType(content_type=content_type)
class RequestHeadersDeserializer(ActionDispatcher):
"""Default request headers deserializer"""
def deserialize(self, request, action):
return self.dispatch(request, action=action)
def default(self, request):
return {}
class RequestDeserializer(object):
"""Break up a Request object into more useful pieces."""
def __init__(self, body_deserializers=None, headers_deserializer=None,
supported_content_types=None):
self.supported_content_types = supported_content_types
self.body_deserializers = {
'application/xml': XMLDeserializer(),
'application/json': JSONDeserializer(),
}
self.body_deserializers.update(body_deserializers or {})
self.headers_deserializer = (headers_deserializer or
RequestHeadersDeserializer())
def deserialize(self, request):
"""Extract necessary pieces of the request.
:param request: Request object
:returns: tuple of (expected controller action name, dictionary of
keyword arguments to pass to the controller, the expected
content type of the response)
"""
action_args = self.get_action_args(request.environ)
action = action_args.pop('action', None)
action_args.update(self.deserialize_headers(request, action))
action_args.update(self.deserialize_body(request, action))
accept = self.get_expected_content_type(request)
return (action, action_args, accept)
def deserialize_headers(self, request, action):
return self.headers_deserializer.deserialize(request, action)
def deserialize_body(self, request, action):
if not len(request.body) > 0:
LOG.debug("Empty body provided in request")
return {}
try:
content_type = request.get_content_type()
except base_exception.InvalidContentType:
LOG.debug("Unrecognized Content-Type provided in request")
raise
if content_type is None:
LOG.debug("No Content-Type provided in request")
return {}
try:
deserializer = self.get_body_deserializer(content_type)
except base_exception.InvalidContentType:
LOG.debug("Unable to deserialize body as provided Content-Type")
raise
return deserializer.deserialize(request.body, action)
def get_body_deserializer(self, content_type):
try:
return self.body_deserializers[content_type]
except (KeyError, TypeError):
raise base_exception.InvalidContentType(content_type=content_type)
def get_expected_content_type(self, request):
return request.best_match_content_type(self.supported_content_types)
def get_action_args(self, request_environment):
"""Parse dictionary created by routes library."""
try:
args = request_environment['wsgiorg.routing_args'][1].copy()
except Exception:
return {}
try:
del args['controller']
except KeyError:
pass
try:
del args['format']
except KeyError:
pass
return args
class TextDeserializer(ActionDispatcher):
"""Default request body deserialization."""
def deserialize(self, datastring, action='default'):
return self.dispatch(datastring, action=action)
def default(self, datastring):
return {}
class JSONDeserializer(TextDeserializer):
def _from_json(self, datastring):
try:
return jsonutils.loads(datastring)
except ValueError:
msg = _("cannot understand JSON")
raise base_exception.MalformedRequestBody(reason=msg)
def default(self, datastring):
return {'body': self._from_json(datastring)}
class XMLDeserializer(TextDeserializer):
def __init__(self, metadata=None):
"""
:param metadata: information needed to deserialize xml into
a dictionary.
"""
super(XMLDeserializer, self).__init__()
self.metadata = metadata or {}
def _from_xml(self, datastring):
plurals = set(self.metadata.get('plurals', {}))
try:
node = xmlutils.safe_minidom_parse_string(datastring).childNodes[0]
return {node.nodeName: self._from_xml_node(node, plurals)}
except expat.ExpatError:
msg = _("cannot understand XML")
raise base_exception.MalformedRequestBody(reason=msg)
def _from_xml_node(self, node, listnames):
"""Convert a minidom node to a simple Python type.
:param listnames: list of XML node names whose subnodes should
be considered list items.
"""
if len(node.childNodes) == 1 and node.childNodes[0].nodeType == 3:
return node.childNodes[0].nodeValue
elif node.nodeName in listnames:
return [self._from_xml_node(n, listnames) for n in node.childNodes]
else:
result = dict()
for attr in node.attributes.keys():
result[attr] = node.attributes[attr].nodeValue
for child in node.childNodes:
if child.nodeType != node.TEXT_NODE:
result[child.nodeName] = self._from_xml_node(child,
listnames)
return result
def find_first_child_named(self, parent, name):
"""Search a nodes children for the first child with a given name."""
for node in parent.childNodes:
if node.nodeName == name:
return node
return None
def find_children_named(self, parent, name):
"""Return all of a nodes children who have the given name."""
for node in parent.childNodes:
if node.nodeName == name:
yield node
def extract_text(self, node):
"""Get the text field contained by the given node."""
if len(node.childNodes) == 1:
child = node.childNodes[0]
if child.nodeType == child.TEXT_NODE:
return child.nodeValue
return ""
def default(self, datastring):
return {'body': self._from_xml(datastring)}
| redhat-openstack/trove | trove/common/base_wsgi.py | Python | apache-2.0 | 27,852 |
#!/usr/bin/env python
"""Starts the Flask app in debug mode.
Do not use in production.
"""
from bot.webapp import goldstarsapp
if __name__ == "__main__":
goldstarsapp.debug = True
goldstarsapp.run(port=8042, host='0.0.0.0', processes=2)
| barentsen/AstroGoldStars | run-devserver.py | Python | mit | 247 |
"""Python challenge #7:
http://www.pythonchallenge.com/pc/def/oxygen.html"""
import urllib
from PIL import Image
def main():
png = urllib.urlopen('http://www.pythonchallenge.com/pc/def/oxygen.png')
png = Image.open(png)
output = []
png_matrix = png.load()
for x in xrange(0, int(png.size[0]), 7):
temp = png_matrix[x, 47]
output.append(chr(temp[0]))
return ''.join(output)
if __name__ == "__main__":
output = main()
array = [105, 110, 116, 101, 103, 114, 105, 116, 121]
print ''.join([chr(x) for x in array])
| bm5w/pychal | 7.py | Python | mit | 565 |
"""
Tests for tools
Author: Chad Fulton
License: Simplified-BSD
"""
from __future__ import division, absolute_import, print_function
import numpy as np
import pandas as pd
from statsmodels.tsa.statespace import tools
# from .results import results_sarimax
from numpy.testing import (
assert_equal, assert_array_equal, assert_almost_equal, assert_raises
)
class TestCompanionMatrix(object):
cases = [
(2, np.array([[0,1],[0,0]])),
([1,-1,-2], np.array([[1,1],[2,0]])),
([1,-1,-2,-3], np.array([[1,1,0],[2,0,1],[3,0,0]]))
]
def test_cases(self):
for polynomial, result in self.cases:
assert_equal(tools.companion_matrix(polynomial), result)
class TestDiff(object):
x = np.arange(10)
cases = [
# diff = 1
([1,2,3], 1, None, 1, [1, 1]),
# diff = 2
(x, 2, None, 1, [0]*8),
# diff = 1, seasonal_diff=1, k_seasons=4
(x, 1, 1, 4, [0]*5),
(x**2, 1, 1, 4, [8]*5),
(x**3, 1, 1, 4, [60, 84, 108, 132, 156]),
# diff = 1, seasonal_diff=2, k_seasons=2
(x, 1, 2, 2, [0]*5),
(x**2, 1, 2, 2, [0]*5),
(x**3, 1, 2, 2, [24]*5),
(x**4, 1, 2, 2, [240, 336, 432, 528, 624]),
]
def test_cases(self):
# Basic cases
for series, diff, seasonal_diff, k_seasons, result in self.cases:
# Test numpy array
x = tools.diff(series, diff, seasonal_diff, k_seasons)
assert_almost_equal(x, result)
# Test as Pandas Series
series = pd.Series(series)
# Rewrite to test as n-dimensional array
series = np.c_[series, series]
result = np.c_[result, result]
# Test Numpy array
x = tools.diff(series, diff, seasonal_diff, k_seasons)
assert_almost_equal(x, result)
# Test as Pandas Dataframe
series = pd.DataFrame(series)
x = tools.diff(series, diff, seasonal_diff, k_seasons)
assert_almost_equal(x, result)
class TestIsInvertible(object):
cases = [
([1, -0.5], True),
([1, 1-1e-9], True),
([1, 1], False),
([1, 0.9,0.1], True),
(np.array([1,0.9,0.1]), True),
(pd.Series([1,0.9,0.1]), True)
]
def test_cases(self):
for polynomial, invertible in self.cases:
assert_equal(tools.is_invertible(polynomial), invertible)
class TestConstrainStationaryUnivariate(object):
cases = [
(np.array([2.]), -2./((1+2.**2)**0.5))
]
def test_cases(self):
for unconstrained, constrained in self.cases:
result = tools.constrain_stationary_univariate(unconstrained)
assert_equal(result, constrained)
class TestValidateMatrixShape(object):
# name, shape, nrows, ncols, nobs
valid = [
('TEST', (5,2), 5, 2, None),
('TEST', (5,2), 5, 2, 10),
('TEST', (5,2,10), 5, 2, 10),
]
invalid = [
('TEST', (5,), 5, None, None),
('TEST', (5,1,1,1), 5, 1, None),
('TEST', (5,2), 10, 2, None),
('TEST', (5,2), 5, 1, None),
('TEST', (5,2,10), 5, 2, None),
('TEST', (5,2,10), 5, 2, 5),
]
def test_valid_cases(self):
for args in self.valid:
# Just testing that no exception is raised
tools.validate_matrix_shape(*args)
def test_invalid_cases(self):
for args in self.invalid:
assert_raises(
ValueError, tools.validate_matrix_shape, *args
)
class TestValidateVectorShape(object):
# name, shape, nrows, ncols, nobs
valid = [
('TEST', (5,), 5, None),
('TEST', (5,), 5, 10),
('TEST', (5,10), 5, 10),
]
invalid = [
('TEST', (5,2,10), 5, 10),
('TEST', (5,), 10, None),
('TEST', (5,10), 5, None),
('TEST', (5,10), 5, 5),
]
def test_valid_cases(self):
for args in self.valid:
# Just testing that no exception is raised
tools.validate_vector_shape(*args)
def test_invalid_cases(self):
for args in self.invalid:
assert_raises(
ValueError, tools.validate_vector_shape, *args
)
| hlin117/statsmodels | statsmodels/tsa/statespace/tests/test_tools.py | Python | bsd-3-clause | 4,268 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This module implements map function with various backends and
caching.
"""
from __future__ import division, print_function, absolute_import
from __future__ import unicode_literals
from sys import version
try:
import cPickle as pickle
except ImportError:
import pickle
import hashlib
import os
import time
import datetime
import numpy as np
import sys
import logging
import inspect
import socket
import subprocess
import multiprocessing
import shutil
import tempfile
import string
import imp
import functools
import pandas as pd
import itertools
import warnings
logger = logging.getLogger('thorns')
is_inside_map = False
class _FuncWrap(object):
def __init__(self, func):
self.func = func
def __call__(self, data):
func = self.func
## Trim the data dict to the function spec
arg_names = inspect.getargspec(func).args
data_filtered = {}
for k in arg_names:
if k in data:
data_filtered[k] = data[k]
global is_inside_map
is_inside_map = True
start = time.time()
ans = func(**data_filtered)
dt = time.time() - start
is_inside_map = False
return ans,dt
def _pkl_name(obj, func, cachedir):
src = inspect.getsource(func)
pkl = pickle.dumps((obj, src), -1)
h = hashlib.sha1(pkl).hexdigest()
pkl_name = os.path.join(
cachedir,
h + '.pkl'
)
return pkl_name
def _load_cache(fname):
logger.info("Loading cache from {}".format(fname))
with open(fname, 'rb') as f:
data = pickle.load(f)
return data
def _dump_cache(fname, obj):
dirname = os.path.dirname(fname)
if not os.path.exists(dirname):
os.makedirs(dirname)
logger.info("Dumping cache to {}".format(fname))
tmp_fname = fname + ".tmp"
with open(tmp_fname, 'wb') as f:
pickle.dump(obj, f, -1)
os.rename(tmp_fname, fname)
def _serial_map(func, iterable, cfg):
wrap = _FuncWrap(func)
for args in iterable:
result = wrap(args)
yield result
def _serial_isolated_map(func, iterable, cfg):
for args in iterable:
dirname = tempfile.mkdtemp()
fname = os.path.join(
dirname,
'mar_maps_socket'
)
p = subprocess.Popen(
['python', '-m', 'thorns.util.run_func', fname]
)
module_name = inspect.getfile(func)
func_name = func.__name__
data = (module_name, func_name, args)
### make a socket
s = socket.socket(socket.AF_UNIX)
s.bind(fname)
s.listen(1)
conn, addr = s.accept()
### send the function and data to the child
f = conn.makefile('wb')
pickle.dump(data, f, -1)
f.close()
### receive results from the child
f = conn.makefile('rb')
result = pickle.load(f)
f.close()
### cleanup
conn.close()
s.close()
shutil.rmtree(dirname)
yield result
def _multiprocessing_map(func, iterable, cfg):
wrap = _FuncWrap(func)
pool = multiprocessing.Pool()
results = []
for args in iterable:
results.append( pool.apply_async(wrap, (args,)) )
for result in results:
yield result.get(9999999)
def _ipython_map(func, iterable, cfg):
import IPython
if IPython.version_info[0] < 4:
from IPython.parallel import Client
else:
from ipyparallel import Client
rc = Client()
rc[:].clear()
### Make modules for all dependencies on the engines
for dep in cfg['dependencies']:
mod_name = os.path.splitext(
os.path.basename(dep)
)[0]
with open(dep) as f:
code = f.read()
code = code.encode('string_escape')
rc[:].execute(
"""
import imp
import sys
_mod = imp.new_module('{mod_name}')
sys.modules['{mod_name}'] = _mod
exec '''{code}''' in _mod.__dict__
del _mod
""".format(code=code, mod_name=mod_name),
block=True
)
### Make sure all definitions surrounding the func are present on
### the engines (evaluate the code from the file of the func)
fname = inspect.getfile(func)
with open(fname) as f:
code = f.read()
logger.info("IPython engine IDs: {}".format(rc.ids))
## Need to escape all ' and " in order to embed the code into
## execute string
# code = code.replace("\'", "\\\'")
# code = code.replace("\"", "\\\"")
code = code.encode('string_escape')
## The trick with `exec in {}' is done because we want to avoid
## executing `__main__'
rc[:].execute(
"""
_tmp_dict = dict()
exec '''{code}''' in _tmp_dict
globals().update(_tmp_dict)
del _tmp_dict
""".format(code=code),
block=True
)
# status.wait()
# res = rc[:].apply(dir)
# print(res.get())
wrap = _FuncWrap(func)
pool = rc.load_balanced_view()
results = []
for args in iterable:
results.append( pool.apply_async(wrap, args) )
for result in results:
yield result.get()
def _publish_status(status, where='stdout', func_name=""):
name = os.path.splitext(
os.path.basename(sys.argv[0])
)[0]
### Bar
bar_len = 10
bar = (
"O" * int(round(bar_len * status['loaded']/status['all'])) +
"#" * int(round(bar_len * status['processed']/status['all']))
)
bar += "." * (bar_len - len(bar))
seconds = time.time() - status['start_time']
msg = "[{bar}] {loaded}|{processed}/{all} {time} ({func_name})".format(
loaded=status['loaded'],
processed=status['processed'],
all=status['all'],
bar=bar,
time=datetime.timedelta(seconds=seconds),
func_name=func_name
)
if where == 'file':
dirname = status['dir']
fname = os.path.join(dirname, name)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(fname, 'w') as f:
f.write(msg)
elif where == 'stdout':
sys.stderr.write(msg)
sys.stderr.write('\n')
sys.stderr.flush()
elif where == 'title':
sys.stderr.write("\033]2;{}\007\r".format(msg))
sys.stderr.flush()
elif (where == 'notify') and (seconds > 5):
try:
import pynotify
pynotify.init(name)
notice = pynotify.Notification(name, msg)
notice.show()
except Exception:
### ImportError, GError (?)
pass
def _get_options(backend, cache, dependencies):
cfg = {}
global is_inside_map
if is_inside_map:
cfg['backend'] = 'serial'
elif backend is not None:
cfg['backend'] = backend
elif 'THmap' in os.environ:
cfg['backend'] = os.environ['THmap']
else:
cfg['backend'] = 'serial'
if 'THmachines' in os.environ:
# TODO: must be parsed
cfg['machines'] = os.environ['THmachines']
else:
cfg['machines'] = []
if 'THdependencies' in os.environ:
# TODO: must be parsed
cfg['dependencies'] = os.environ['THdependencies']
elif dependencies is not None:
cfg['dependencies'] = dependencies
else:
cfg['dependencies'] = []
if cache is not None:
cfg['cache'] = cache
elif 'THcache' in os.environ:
cfg['cache'] = os.environ['THcache']
else:
cfg['cache'] = 'yes'
cfg['publish_status'] = not is_inside_map
return cfg
def cache(func, workdir='work'):
"""Wrap a function and cache its output.
"""
@functools.wraps(func)
def wrap(**kwargs):
cachedir = os.path.join(workdir, 'map_cache')
fname = _pkl_name(kwargs, func, cachedir)
if os.path.exists(fname):
result = _load_cache(fname)
else:
result = func(**kwargs)
_dump_cache(fname, result)
return result
return wrap
def map(
func,
space,
backend=None,
cache=None,
workdir='work',
dependencies=None,
kwargs=None
):
"""Apply func to every item of iterable and return a list of the
results. This map supports multiple backends, e.g. 'serial',
'multiprocessing', 'ipcluster'.
Parameters
----------
func : function
The function to be applied to the data.
space : (list of dicts) or (dict of lists)
Parameter space, where the keys of the dictonary(s) correspond
to the keyward arguments of the function.
In the case of a list of dicts, each entry of the list is applied
to the function.
In the case of a dict of lists, the parameter space is built
by using all possible permutations of the list entries.
backend : {'serial', 'ipcluster', 'multiprocessing', 'serial_isolated'}
Choose a backend for the map.
cache : bool or {'yes', 'no', 'redo'}
If True, each result is loaded instead calculated again.
workdir : str, optional
Directory in which to store cache.
dependencies : list, optional
List of python files that will be imported on the remote site
before executing the `func`.
kwargs : dict, optional
Extra parameters for the `func`.
Returns
-------
pd.DataFrame
Table with parameters (MultiIndex) and results.
"""
cfg = _get_options(
backend=backend,
cache=cache,
dependencies=dependencies
)
status = {
'all':0,
'loaded':0,
'processed':0,
'bar': [],
'times':[],
'start_time':time.time(),
'dir': os.path.join(workdir, 'status')
}
cachedir = os.path.join(workdir, 'map_cache')
cache_files = []
hows = []
todos = []
all_kwargs_names = set()
### Convert a dict of lists into a list of dicts
if isinstance(space, dict):
all_values = itertools.product(*space.values())
keys = space.keys()
iterable = [dict(zip(keys, values)) for values in all_values]
else:
iterable = space
### Go through the parameter space and check what should be
### calculated (todos) and what recalled from the cache
for args in iterable:
all_kwargs_names.update(args)
args = dict(args)
if kwargs is not None:
args.update(kwargs)
fname = _pkl_name(args, func, cachedir)
cache_files.append(fname)
status['all'] += 1
if (cfg['cache'] == 'yes') and os.path.exists(fname):
hows.append('load')
else:
hows.append('process')
todos.append(args)
### Submit the parameter space to one of the backends
if cfg['backend'] == 'serial':
results = _serial_map(func, todos, cfg)
elif cfg['backend'] in ('multiprocessing', 'm'):
results = _multiprocessing_map(func, todos, cfg)
elif cfg['backend'] in ('ipython', 'ipcluster'):
results = _ipython_map(func, todos, cfg)
elif cfg['backend'] == 'serial_isolated':
results = _serial_isolated_map(func, todos, cfg)
else:
raise RuntimeError("Unknown map() backend: {}".format(cfg['backend']))
### Generate reults by either using cache (how == 'load') or
### calculate using func (how == 'process')
answers = []
for how,fname in zip(hows,cache_files):
if cfg['publish_status']:
_publish_status(status, 'file', func_name=func.__name__)
_publish_status(status, 'title', func_name=func.__name__)
if how == 'load':
result = _load_cache(fname)
status['loaded'] += 1
elif how == 'process':
result = next(results)
status['processed'] += 1
if cfg['cache'] in ('yes', 'refresh', 'redo'):
_dump_cache(fname, result)
else:
raise RuntimeError("Should never reach this point.")
ans,dt = result
status['times'].append(dt)
answers.append(ans)
### Prepare DataFrame output
iterable = pd.DataFrame(iterable)
answers = pd.DataFrame(answers)
out = pd.concat((iterable, answers), axis=1)
out = out.set_index(list(all_kwargs_names))
if cfg['publish_status']:
_publish_status(status, 'file', func_name=func.__name__)
_publish_status(status, 'title', func_name=func.__name__)
_publish_status(status, 'stdout', func_name=func.__name__)
_publish_status(status, 'notify', func_name=func.__name__)
return out
| mrkrd/thorns | thorns/util/maps.py | Python | gpl-3.0 | 12,672 |
#! /usr/bin/python3
import struct
import decimal
D = decimal.Decimal
from fractions import Fraction
from counterpartylib.lib import (config, exceptions, util)
"""Burn {} to earn {} during a special period of time.""".format(config.BTC, config.XCP)
ID = 60
def initialise (db):
cursor = db.cursor()
cursor.execute('''CREATE TABLE IF NOT EXISTS burns(
tx_index INTEGER PRIMARY KEY,
tx_hash TEXT UNIQUE,
block_index INTEGER,
source TEXT,
burned INTEGER,
earned INTEGER,
status TEXT,
FOREIGN KEY (tx_index, tx_hash, block_index) REFERENCES transactions(tx_index, tx_hash, block_index))
''')
cursor.execute('''CREATE INDEX IF NOT EXISTS
status_idx ON burns (status)
''')
cursor.execute('''CREATE INDEX IF NOT EXISTS
address_idx ON burns (source)
''')
def validate (db, source, destination, quantity, block_index, overburn=False):
problems = []
# Check destination address.
if destination != config.UNSPENDABLE:
problems.append('wrong destination address')
if not isinstance(quantity, int):
problems.append('quantity must be in satoshis')
return problems
if quantity < 0: problems.append('negative quantity')
# Try to make sure that the burned funds won't go to waste.
if block_index < config.BURN_START - 1:
problems.append('too early')
elif block_index > config.BURN_END:
problems.append('too late')
return problems
def compose (db, source, quantity, overburn=False):
cursor = db.cursor()
destination = config.UNSPENDABLE
problems = validate(db, source, destination, quantity, util.CURRENT_BLOCK_INDEX, overburn=overburn)
if problems: raise exceptions.ComposeError(problems)
# Check that a maximum of 1 BTC total is burned per address.
burns = list(cursor.execute('''SELECT * FROM burns WHERE (status = ? AND source = ?)''', ('valid', source)))
already_burned = sum([burn['burned'] for burn in burns])
if quantity > (1 * config.UNIT - already_burned) and not overburn:
raise exceptions.ComposeError('1 {} may be burned per address'.format(config.BTC))
cursor.close()
return (source, [(destination, quantity)], None)
def parse (db, tx, MAINNET_BURNS, message=None):
burn_parse_cursor = db.cursor()
if config.TESTNET:
status = 'valid'
if status == 'valid':
problems = validate(db, tx['source'], tx['destination'], tx['btc_amount'], tx['block_index'], overburn=False)
if problems: status = 'invalid: ' + '; '.join(problems)
if tx['btc_amount'] != None:
sent = tx['btc_amount']
else:
sent = 0
if status == 'valid':
# Calculate quantity of XCP earned. (Maximum 1 BTC in total, ever.)
cursor = db.cursor()
cursor.execute('''SELECT * FROM burns WHERE (status = ? AND source = ?)''', ('valid', tx['source']))
burns = cursor.fetchall()
already_burned = sum([burn['burned'] for burn in burns])
ONE = 1 * config.UNIT
max_burn = ONE - already_burned
if sent > max_burn: burned = max_burn # Exceeded maximum burn; earn what you can.
else: burned = sent
total_time = config.BURN_END - config.BURN_START
partial_time = config.BURN_END - tx['block_index']
multiplier = (1000 + (500 * Fraction(partial_time, total_time)))
earned = round(burned * multiplier)
# Credit source address with earned XCP.
util.credit(db, tx['source'], config.XCP, earned, action='burn', event=tx['tx_hash'])
else:
burned = 0
earned = 0
tx_index = tx['tx_index']
tx_hash = tx['tx_hash']
block_index = tx['block_index']
source = tx['source']
else:
# Mainnet burns are hard‐coded.
try:
line = MAINNET_BURNS[tx['tx_hash']]
except KeyError:
return
util.credit(db, line['source'], config.XCP, int(line['earned']), action='burn', event=line['tx_hash'])
tx_index = tx['tx_index']
tx_hash = line['tx_hash']
block_index = line['block_index']
source = line['source']
burned = line['burned']
earned = line['earned']
status = 'valid'
# Add parsed transaction to message-type–specific table.
# TODO: store sent in table
bindings = {
'tx_index': tx_index,
'tx_hash': tx_hash,
'block_index': block_index,
'source': source,
'burned': burned,
'earned': earned,
'status': status,
}
sql='insert into burns values(:tx_index, :tx_hash, :block_index, :source, :burned, :earned, :status)'
burn_parse_cursor.execute(sql, bindings)
burn_parse_cursor.close()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| tokenly/counterparty-lib | counterpartylib/lib/messages/burn.py | Python | mit | 5,150 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RCodetools(RPackage):
"""Code analysis tools for R."""
homepage = "https://cloud.r-project.org/package=codetools"
url = "https://cloud.r-project.org/src/contrib/codetools_0.2-15.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/codetools"
version('0.2-18', sha256='1a9ea6b9792dbd1688078455929385acc3a5e4bef945c77bec1261fa4a084c28')
version('0.2-16', sha256='c276757c3adabaf700f2ea25835892b09bc1bd438ebd17c805ea9073ed8a74b6')
version('0.2-15', sha256='4e0798ed79281a614f8cdd199e25f2c1bd8f35ecec902b03016544bd7795fa40')
version('0.2-14', sha256='270d603b89076081af8d2db0256927e55ffeed4c27309d50deea75b444253979')
depends_on('r@2.1:', type=('build', 'run'))
| LLNL/spack | var/spack/repos/builtin/packages/r-codetools/package.py | Python | lgpl-2.1 | 943 |
"""
Django settings for social_network project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'wdzh^!frqj-coxd27ckfsi09mg-^%=jvp@xs425w)cuq5mpi0&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'user_profile',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'framework',
'friends',
'posts',
'comments',
'tags',
'api',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'user_profile.middleware.ProfileMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'social_network.urls'
WSGI_APPLICATION = 'social_network.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'MST'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
# Templates
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates').replace('\\', '/'),
)
# Static files
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
# Login URL to redirect to when the user fails the check.
LOGIN_URL = "/login/"
# Template Additions
TEMPLATE_CONTEXT_PROCESSORS = ('django.core.context_processors.csrf',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.media')
# Fixtures for loading in dummy data
FIXTURE_DIRS = (
os.path.join(BASE_DIR, "fixtures"),
)
# Upload directory for media from MEDIA_URL tags
MEDIA_ROOT = os.path.join(BASE_DIR, 'uploads')
MEDIA_URL = '/uploads/'
#
# CUSTOM SETTINGS
# These are application specific and not for Django but rather the underlining application
# As such, they need to be prefixed with CUSTOM_ to prevent collision
#
CUSTOM_HOST_DEFAULT = "http://projecthub.ca"
| grepme/cmput410-project | social_network/settings.py | Python | apache-2.0 | 3,160 |
# Django settings for satchmo project.
# If you have an existing project, then ensure that you modify local_settings-customize.py
# and import it from your main settings file. (from local_settings import *)
import os
DIRNAME = os.path.dirname(__file__)
DJANGO_PROJECT = 'simple'
DJANGO_SETTINGS_MODULE = 'simple.settings'
ADMINS = (
('', ''),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'NAME': 'dbname',
'ENGINE': 'django.db.backends.mysql',
'USER': 'username',
'PASSWORD': 'passwd',
'HOST': 'localhost',
'PORT': '3306',
#'OPTIONS': { "init_command": "SET FOREIGN_KEY_CHECKS = 0",},
}
}
# Local time zone for this installation. All choices can be found here:
# http://www.postgresql.org/docs/current/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
TIME_ZONE = 'US/Pacific'
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
# http://blogs.law.harvard.edu/tech/stories/storyReader$15
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
#Image files will be stored off of this path
MEDIA_ROOT = os.path.normpath(os.path.join(DIRNAME, 'media/'))
STATIC_ROOT = os.path.normpath(os.path.join(DIRNAME, 'media/static/'))
MEDIA_URL = '/media/'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.normpath(os.path.join(DIRNAME, '../../static/')),
)
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
#ADMIN_MEDIA_PREFIX = '/media/'
ADMIN_MEDIA_PREFIX = STATIC_URL + "grappelli/"
ADMIN_MEDIA_ROOT = os.path.join(DIRNAME, 'media/static/grappelli/')
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.middleware.doc.XViewMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"threaded_multihost.middleware.ThreadLocalMiddleware",
"satchmo_store.shop.SSLMiddleware.SSLRedirect",
#"satchmo_ext.recentlist.middleware.RecentProductMiddleware",
#'debug_toolbar.middleware.DebugToolbarMiddleware',
)
#this is used to add additional config variables to each request
# NOTE: overridden in local_settings.py
# NOTE: If you enable the recent_products context_processor, you MUST have the
# 'satchmo_ext.recentlist' app installed.
TEMPLATE_CONTEXT_PROCESSORS = ('satchmo_store.shop.context_processors.settings',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
#'satchmo_ext.recentlist.context_processors.recent_products',
)
#ROOT_URLCONF = 'satchmo.urls'
ROOT_URLCONF = 'simple.urls'
INSTALLED_APPS = (
'grappelli',
'django.contrib.sites',
'satchmo_store.shop',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.comments',
'django.contrib.sessions',
'django.contrib.sitemaps',
'django.contrib.staticfiles',
'registration',
'sorl.thumbnail',
'south',
'keyedcache',
'livesettings',
'l10n',
'satchmo_utils.thumbnail',
'satchmo_store.contact',
'tax',
'tax.modules.no',
'tax.modules.area',
'tax.modules.percent',
#
'shipping',
#'satchmo_store.contact.supplier',
#'shipping.modules.tiered',
#'satchmo_ext.newsletter',
#'satchmo_ext.recentlist',
#'testimonials', # dependency on http://www.assembla.com/spaces/django-testimonials/
'product',
'product.modules.configurable',
#'product.modules.custom',
#'product.modules.downloadable',
#'product.modules.subscription',
#'satchmo_ext.product_feeds',
#'satchmo_ext.brand',
'payment',
'payment.modules.dummy',
'payment.modules.paypal',
#'payment.modules.giftcertificate',
#'satchmo_ext.wishlist',
#'satchmo_ext.upsell',
#'satchmo_ext.productratings',
'satchmo_ext.satchmo_toolbar',
'satchmo_utils',
#'shipping.modules.tieredquantity',
'django_extensions', # dependency on https://github.com/django-extensions/django-extensions/
#'satchmo_ext.tieredpricing',
#'typogrify', # dependency on http://code.google.com/p/typogrify/
#'debug_toolbar',
'app_plugins',
'simple.localsite',
)
AUTHENTICATION_BACKENDS = (
'satchmo_store.accounts.email-auth.EmailBackend',
'django.contrib.auth.backends.ModelBackend',
)
#DEBUG_TOOLBAR_CONFIG = {
# 'INTERCEPT_REDIRECTS' : False,
#}
L10N_SETTINGS = {
}
#### Satchmo unique variables ####
#from django.conf.urls.defaults import patterns, include
SATCHMO_SETTINGS = {
'SHOP_BASE' : '',
'MULTISHOP' : False,
#'SHOP_URLS' : patterns('satchmo_store.shop.views',)
}
SKIP_SOUTH_TESTS=True
# Load the local settings
from local_settings import *
| grengojbo/satchmo | satchmo/projects/simple/settings.py | Python | bsd-3-clause | 5,650 |
(1, 2)
( 1, 2 )
(1, )
(1, )
( 1, 2, )
( 3,
4,
5 )
1 ,
1, 2,
( (1) * (2),
(3) * (4),
(5) * (6) )
((1),)
fun(2, ((1),))
fun(((1),))
(1,), (2,)
(a, b) = [
5, 6]
(x, y) = [7, 8] # prevent this last comma being used
| RyanDJLee/pyta | examples/ending_locations/Tuple.py | Python | gpl-3.0 | 256 |
# Mailanie - Cute Mailbox Explorator
# Copyright 2009 Guillaume Ayoub <guillaume.ayoub@kozea.fr>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation
# Mailanie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Mailanie. If not, see <http://www.gnu.org/licenses/>.
import os
import gtk
from mailanie import config
_license = _("""\
Mailanie is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3 of the License, or (at your option)
any later version.
Mailanie is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along
with Mailanie; if not, write to the Free Software Foundation, Inc., 51
Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.""")
_authors = ("Guillaume Ayoub <guillaume.ayoub@kozea.fr>",)
class About(gtk.AboutDialog):
def __init__(self, *args):
super(About, self).__init__()
self.set_name("Mailanie")
self.set_version(config.get("mailanie", "version"))
self.set_comments(_("Small and Cute Mailbox Explorer"))
self.set_copyright(u"Copyright \u00a9 2009 Guillaume Ayoub")
self.set_license(_license)
self.set_authors(_authors)
self.connect("response", lambda widget, data: self.destroy())
# Note for translators:
# Please translate "translator-credits" in
# "Your Name (Nick) <your.adress@something.org>"
self.set_translator_credits(_("translator-credits"))
self.show_all()
self.set_website("http://www.mailanie.org/")
| liZe/Mailanie | mailanie/ui/about.py | Python | gpl-3.0 | 2,202 |
#!/usr/bin/python3
# vim: set et ts=4 sw=4: #
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*-
#
# main.py
# Copyright (C) 2017 Carlos Penaranda <cpenar@MR032028>
#
# example_use_glade_anjuta is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# example_use_glade_anjuta is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with thiprogram. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk, GdkPixbuf, Gdk
import os, sys
#Comment the first line and uncomment the second before installing
#or making the tarball (alternatively, use project variables)
UI_FILE = "src/single_data_set.ui"
#UI_FILE = "/usr/local/share/example_use_glade_anjuta/ui/example_use_glade_anjuta.ui"
# importing lib tools
from lib.colorMap import windowFromFile
class GUI:
def __init__(self):
self.builder = Gtk.Builder()
self.builder.add_from_file(UI_FILE)
self.builder.connect_signals(self)
window = self.builder.get_object('single_data_set_window')
window.show_all()
def destroy(window, self):
Gtk.main_quit()
def supColorMap16(self, *args):
mapFile = os.path.dirname(__file__) + '/ColorMap/Supervised_ColorMap16.pal'
windowFromFile(mapFile, self)
def main():
app = GUI()
Gtk.main()
if __name__ == "__main__":
sys.exit(main())
| cpenar/PolSARpro_gtk | single_data_set/src/single_data_set.py | Python | gpl-3.0 | 1,806 |
# -*- test-case-name: mamba.test.test_camelcase -*-
# Copyright (c) 2012 - Oscar Campos <oscar.campos@member.fsf.org>
# See LICENSE for more details
"""
.. module:: borg
:platform: Unix, Windows
:synopsys: Stupid class that just offers stupid CamelCase functionality
.. moduleauthor:: Oscar Campos <oscar.campos@member.fsf.org>
"""
class CamelCase(object):
"""
Stupid class that just offers stupid CamelCase funcionallity
:param camelize: the string to camelize
:type camelize: str
"""
def __init__(self, camelize):
self._camelized = None
self._camelize = camelize
super(CamelCase, self).__init__()
def camelize(self, union=False):
"""
Camelize and return camelized string
:param union: if true is will use a space between words
:type union: bool
"""
if not union:
joiner = ' '
else:
joiner = ''
if type(self._camelize) is str:
self._camelized = joiner.join(
[p.capitalize() for p in self._camelize.split(' ')])
elif type(self._camelize) is tuple or type(self._camelize) is list:
self._camelized = joiner.join([
p.capitalize() for p in self._camelize])
elif type(self._camelize) is unicode:
self._camelized = joiner.join([
p.capitalize() for p in self._camelize.split(' ')])
else:
raise ValueError('Expected str, tuple or list get %s instead.' % (
type(self._camelize)))
return self._camelized
| Kelfast/mamba-framework | mamba/utils/camelcase.py | Python | gpl-3.0 | 1,598 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from twisted.trial.unittest import TestCase
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from twisted.internet.task import Clock
from twisted.test.proto_helpers import MemoryReactorClock, StringTransport
from txamqp.testlib import TestBase as IntegrationTest
from txamqp.factory import AMQFactory
from txamqp.endpoint import AMQEndpoint
class AMQEndpointTest(TestCase):
def setUp(self):
super(AMQEndpointTest, self).setUp()
self.reactor = MemoryReactorClock()
self.factory = AMQFactory(clock=Clock())
def test_connect(self):
"""
The endpoint connects to the broker and performs the AMQP
authentication.
"""
endpoint = AMQEndpoint(self.reactor, "1.2.3.4", "1234", username="me", password="pw")
endpoint.connect(self.factory)
self.assertEqual(("1.2.3.4", 1234), self.reactor.tcpClients[0][:2])
# _WrappingFactory from twisted.internet.endpoints
factory = self.reactor.tcpClients[0][2]
protocol = factory.buildProtocol(None)
protocol.makeConnection(StringTransport())
client = protocol._wrappedProtocol
self.assertEqual({"LOGIN": "me", "PASSWORD": "pw"}, client.response)
self.assertEqual("AMQPLAIN", client.mechanism)
def test_connect_with_vhost_and_heartbeat(self):
"""
It's possible to specify a custom vhost and a custom heartbeat.
"""
endpoint = AMQEndpoint(self.reactor, "1.2.3.4", "1234", username="me", password="pw", vhost="foo", heartbeat=10)
endpoint.connect(self.factory)
# _WrappingFactory from twisted.internet.endpoints
factory = self.reactor.tcpClients[0][2]
protocol = factory.buildProtocol(None)
protocol.makeConnection(StringTransport())
client = protocol._wrappedProtocol
self.assertEqual("foo", client.vhost)
self.assertEqual(10, client.heartbeatInterval)
def test_from_uri(self):
"""
It's possible to build an AMQEndpoint from an AMQP URI string.
"""
endpoint = AMQEndpoint.from_uri(
self.reactor, "amqp://me:pw@some.broker/foo?heartbeat=10")
endpoint.connect(self.factory)
self.assertEqual(("some.broker", 5672), self.reactor.tcpClients[0][:2])
# _WrappingFactory from twisted.internet.endpoints
factory = self.reactor.tcpClients[0][2]
protocol = factory.buildProtocol(None)
protocol.makeConnection(StringTransport())
client = protocol._wrappedProtocol
self.assertEqual("foo", client.vhost)
self.assertEqual(10, client.heartbeatInterval)
self.assertEqual({"LOGIN": "me", "PASSWORD": "pw"}, client.response)
self.assertEqual("AMQPLAIN", client.mechanism)
class AMQEndpointIntegrationTest(IntegrationTest):
@inlineCallbacks
def test_connect(self):
"""
The endpoint returns a connected and authenticated client.
"""
factory = AMQFactory(spec=self.spec)
endpoint = AMQEndpoint(
reactor, self.host, self.port, username=self.user,
password=self.password, vhost=self.vhost)
client = yield endpoint.connect(factory)
channel = yield client.channel(1)
yield channel.channel_open()
yield client.close()
| txamqp/txamqp | src/txamqp/test/test_endpoint.py | Python | apache-2.0 | 4,136 |
from setuptools import setup
setup(name='beerlistit',
version='0.1',
description='Quickly get the BeerAdvocate ratings for all the beers listed on a page.',
url='http://github.com/serhalp/beerlist.it',
author='Philippe Serhal and Curtis Heberle',
author_email='philippe.serhal@gmail.com',
license='GPLv2',
packages=['beerlistit'],
install_requires=[
'BeautifulSoup4',
'grequests'
'django'
],
zip_safe=True)
| serhalp/beerlist.it | setup.py | Python | gpl-2.0 | 499 |
# This file is part of fedmsg
# Copyright (C) 2012 Red Hat, Inc.
#
# fedmsg is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# fedmsg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with fedmsg; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Ralph Bean <rbean@redhat.com>
#
""" Tests for mailman3 messages """
import unittest
from fedmsg_meta_fedora_infrastructure.tests.base import Base
from .common import add_doc
class TestMailman3NewReply(Base):
""" `Discussion lists for the Fedora Project
<https://lists.fedoraproject.org>`_ run on mailman3. When a new
message is published on a list, fedmsg will pop out one of these messages.
The following is an example of a reply to a thread.
"""
expected_title = "mailman.receive"
expected_subti = ("On the devel list, nim replied to "
"'[Devel] Re:Software Management call for RFEs'")
expected_link = ("https://lists.fedoraproject.org/archives/list/"
"devel@mm3test.fedoraproject.org/message/"
"S3PHLMD7PGWXXLBN3GENHVK7JJ37UWLJ/")
expected_secondary_icon = (
"https://seccdn.libravatar.org/avatar/"
"e13c266c61de5e79cf37a99b94d4d4462d6857fe8a5dbdde9e041ae01b14a7db"
"?s=64&d=retro")
expected_packages = set([])
expected_usernames = set([
# We convert emails to fas usernames if we can.
'nim',
])
expected_objects = set([
# There is only one item in this set, not four. It's just long.
"devel/5de4f14ae46cce6de03cf68ca06526a9.squirrel@arekh.dyndns.org/"
"519DFB93.1060502@laiskiainen.org/"
"d4f0cefb4a7b845451ecab2c4026fe4d.squirrel@arekh.dyndns.org/"
"message",
])
msg = {
"i": 4,
"msg": {
"mlist": {
"list_name": "devel"
},
"msg": {
"delivered-to": "devel@lists.fedoraproject.org",
"from": "\"Nicolas Mailhot\" <nicolas.mailhot@laposte.net>",
"x-mailman-rule-hits": "nonmember-moderation",
"to": "\"Development discussions related to Fedora\" "
"<devel@lists.fedoraproject.org>",
"cc": None,
"in-reply-to": "<519DFB93.1060502@laiskiainen.org>",
"x-message-id-hash": "S3PHLMD7PGWXXLBN3GENHVK7JJ37UWLJ",
"x-mailman-rule-misses": "approved; emergency; loop; "
"member-moderation; administrivia; implicit-dest; "
"max-recipients; max-size; news-moderation; no-subject; "
"suspicious-header",
"references": "<5de4f14ae46cce6de03cf68ca06526a9.squirrel@"
"arekh.dyndns.org>\n\t<519DFB93.1060502@laiskiainen.org>",
"archived-at": "<https://lists.fedoraproject.org/archives/"
"list/devel@mm3test.fedoraproject.org/message/"
"S3PHLMD7PGWXXLBN3GENHVK7JJ37UWLJ/>",
"message-id": "<d4f0cefb4a7b845451ecab2c4026fe4d.squirrel@"
"arekh.dyndns.org>",
"subject": "[Devel] Re:Software Management call for RFEs"
}
},
"topic": "org.fedoraproject.dev.mailman.receive",
"username": "mailman",
"timestamp": 1369322289.6794021
}
class TestMailman3NewMail(Base):
""" `Discussion lists for the Fedora Project
<https://lists.fedoraproject.org>`_ run on mailman3. When a new
message is published on a list, fedmsg will pop out one of these messages.
The following is an example of a new thread being started.
"""
expected_title = "mailman.receive"
expected_subti = ("jreznik@redhat.com wrote '[Devel] Fedora 19 Beta status"
" is Go, release on May 28, 2013' to the devel list")
expected_link = ("https://lists.fedoraproject.org/archives/list/"
"devel@mm3test.fedoraproject.org/message/"
"HDMTECNRNUHZTSDGM2FDK6LGCMAS2PZ4/")
expected_secondary_icon = (
"https://seccdn.libravatar.org/avatar/"
"35012533ff5290bd2231c7133bd07896?s=64&d=retro")
expected_packages = set([])
expected_usernames = set([])
expected_objects = set([
"devel/306436886.6773069.1369333725371.JavaMail.root@redhat.com/message",
])
msg = {
"i": 1,
"msg": {
"mlist": {
"list_name": "devel"
},
"msg": {
"delivered-to": "devel@lists.fedoraproject.org",
"from": "Jaroslav Reznik <jreznik@redhat.com>",
"x-mailman-rule-hits": "nonmember-moderation",
"to": "devel-announce@lists.fedoraproject.org,\n\t"
"test-announce@lists.fedoraproject.org,\n\t"
"Fedora Logistics List <logistics@lists.fedoraproject.org>",
"cc": None,
"in-reply-to": None,
"x-message-id-hash": "HDMTECNRNUHZTSDGM2FDK6LGCMAS2PZ4",
"x-mailman-rule-misses": "approved; emergency; loop; "
"member-moderation; administrivia; implicit-dest; "
"max-recipients; max-size; news-moderation; no-subject; "
"suspicious-header",
"references": None,
"archived-at": "/list/devel@mm3test.fedoraproject.org/"
"message/HDMTECNRNUHZTSDGM2FDK6LGCMAS2PZ4/",
"message-id": "<306436886.6773069.1369333725371.JavaMail."
"root@redhat.com>",
"subject": "[Devel] Fedora 19 Beta status is Go, release on "
"May 28, 2013"
}
},
"topic": "org.fedoraproject.dev.mailman.receive",
"username": "mailman",
"timestamp": 1369334087.9298041
}
add_doc(locals())
| fedora-infra/fedmsg_meta_fedora_infrastructure | fedmsg_meta_fedora_infrastructure/tests/mailman3.py | Python | lgpl-2.1 | 6,354 |
from __future__ import unicode_literals
import random
import re
import six
EC2_RESOURCE_TO_PREFIX = {
'customer-gateway': 'cgw',
'dhcp-options': 'dopt',
'image': 'ami',
'instance': 'i',
'internet-gateway': 'igw',
'network-acl': 'acl',
'network-acl-subnet-assoc': 'aclassoc',
'network-interface': 'eni',
'network-interface-attachment': 'eni-attach',
'reserved-instance': 'uuid4',
'route-table': 'rtb',
'route-table-association': 'rtbassoc',
'security-group': 'sg',
'snapshot': 'snap',
'spot-instance-request': 'sir',
'subnet': 'subnet',
'reservation': 'r',
'volume': 'vol',
'vpc': 'vpc',
'vpc-elastic-ip': 'eipalloc',
'vpc-elastic-ip-association': 'eipassoc',
'vpc-peering-connection': 'pcx',
'vpn-connection': 'vpn',
'vpn-gateway': 'vgw'}
EC2_PREFIX_TO_RESOURCE = dict((v, k) for (k, v) in EC2_RESOURCE_TO_PREFIX.items())
def random_id(prefix=''):
size = 8
chars = list(range(10)) + ['a', 'b', 'c', 'd', 'e', 'f']
resource_id = ''.join(six.text_type(random.choice(chars)) for x in range(size))
return '{0}-{1}'.format(prefix, resource_id)
def random_ami_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['image'])
def random_instance_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['instance'])
def random_reservation_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['reservation'])
def random_security_group_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['security-group'])
def random_snapshot_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['snapshot'])
def random_spot_request_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['spot-instance-request'])
def random_subnet_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['subnet'])
def random_subnet_association_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['route-table-association'])
def random_network_acl_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['network-acl'])
def random_network_acl_subnet_association_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['network-acl-subnet-assoc'])
def random_vpn_gateway_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['vpn-gateway'])
def random_volume_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['volume'])
def random_vpc_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['vpc'])
def random_vpc_peering_connection_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['vpc-peering-connection'])
def random_eip_association_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['vpc-elastic-ip-association'])
def random_internet_gateway_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['internet-gateway'])
def random_route_table_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['route-table'])
def random_eip_allocation_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['vpc-elastic-ip'])
def random_dhcp_option_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['dhcp-options'])
def random_eni_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['network-interface'])
def random_eni_attach_id():
return random_id(prefix=EC2_RESOURCE_TO_PREFIX['network-interface-attachment'])
def random_public_ip():
return '54.214.{0}.{1}'.format(random.choice(range(255)),
random.choice(range(255)))
def random_private_ip():
return '10.{0}.{1}.{2}'.format(random.choice(range(255)),
random.choice(range(255)),
random.choice(range(255)))
def random_ip():
return "127.{0}.{1}.{2}".format(
random.randint(0, 255),
random.randint(0, 255),
random.randint(0, 255)
)
def generate_route_id(route_table_id, cidr_block):
return "%s~%s" % (route_table_id, cidr_block)
def split_route_id(route_id):
values = route_id.split('~')
return values[0], values[1]
def instance_ids_from_querystring(querystring_dict):
instance_ids = []
for key, value in querystring_dict.items():
if 'InstanceId' in key:
instance_ids.append(value[0])
return instance_ids
def image_ids_from_querystring(querystring_dict):
image_ids = []
for key, value in querystring_dict.items():
if 'ImageId' in key:
image_ids.append(value[0])
return image_ids
def route_table_ids_from_querystring(querystring_dict):
route_table_ids = []
for key, value in querystring_dict.items():
if 'RouteTableId' in key:
route_table_ids.append(value[0])
return route_table_ids
def network_acl_ids_from_querystring(querystring_dict):
network_acl_ids = []
for key, value in querystring_dict.items():
if 'NetworkAclId' in key:
network_acl_ids.append(value[0])
return network_acl_ids
def vpc_ids_from_querystring(querystring_dict):
vpc_ids = []
for key, value in querystring_dict.items():
if 'VpcId' in key:
vpc_ids.append(value[0])
return vpc_ids
def sequence_from_querystring(parameter, querystring_dict):
parameter_values = []
for key, value in querystring_dict.items():
if parameter in key:
parameter_values.append(value[0])
return parameter_values
def tags_from_query_string(querystring_dict):
prefix = 'Tag'
suffix = 'Key'
response_values = {}
for key, value in querystring_dict.items():
if key.startswith(prefix) and key.endswith(suffix):
tag_index = key.replace(prefix + ".", "").replace("." + suffix, "")
tag_key = querystring_dict.get("Tag.{0}.Key".format(tag_index))[0]
tag_value_key = "Tag.{0}.Value".format(tag_index)
if tag_value_key in querystring_dict:
response_values[tag_key] = querystring_dict.get(tag_value_key)[0]
else:
response_values[tag_key] = None
return response_values
def dhcp_configuration_from_querystring(querystring, option=u'DhcpConfiguration'):
"""
turn:
{u'AWSAccessKeyId': [u'the_key'],
u'Action': [u'CreateDhcpOptions'],
u'DhcpConfiguration.1.Key': [u'domain-name'],
u'DhcpConfiguration.1.Value.1': [u'example.com'],
u'DhcpConfiguration.2.Key': [u'domain-name-servers'],
u'DhcpConfiguration.2.Value.1': [u'10.0.0.6'],
u'DhcpConfiguration.2.Value.2': [u'10.0.0.7'],
u'Signature': [u'uUMHYOoLM6r+sT4fhYjdNT6MHw22Wj1mafUpe0P0bY4='],
u'SignatureMethod': [u'HmacSHA256'],
u'SignatureVersion': [u'2'],
u'Timestamp': [u'2014-03-18T21:54:01Z'],
u'Version': [u'2013-10-15']}
into:
{u'domain-name': [u'example.com'], u'domain-name-servers': [u'10.0.0.6', u'10.0.0.7']}
"""
key_needle = re.compile(u'{0}.[0-9]+.Key'.format(option), re.UNICODE)
response_values = {}
for key, value in querystring.items():
if key_needle.match(key):
values = []
key_index = key.split(".")[1]
value_index = 1
while True:
value_key = u'{0}.{1}.Value.{2}'.format(option, key_index, value_index)
if value_key in querystring:
values.extend(querystring[value_key])
else:
break
value_index += 1
response_values[value[0]] = values
return response_values
def optional_from_querystring(parameter, querystring):
parameter_array = querystring.get(parameter)
return parameter_array[0] if parameter_array else None
def filters_from_querystring(querystring_dict):
response_values = {}
for key, value in querystring_dict.items():
match = re.search(r"Filter.(\d).Name", key)
if match:
filter_index = match.groups()[0]
value_prefix = "Filter.{0}.Value".format(filter_index)
filter_values = [filter_value[0] for filter_key, filter_value in querystring_dict.items() if
filter_key.startswith(value_prefix)]
response_values[value[0]] = filter_values
return response_values
def dict_from_querystring(parameter, querystring_dict):
use_dict = {}
for key, value in querystring_dict.items():
match = re.search(r"{0}.(\d).(\w+)".format(parameter), key)
if match:
use_dict_index = match.groups()[0]
use_dict_element_property = match.groups()[1]
if not use_dict.get(use_dict_index):
use_dict[use_dict_index] = {}
use_dict[use_dict_index][use_dict_element_property] = value[0]
return use_dict
def keypair_names_from_querystring(querystring_dict):
keypair_names = []
for key, value in querystring_dict.items():
if 'KeyName' in key:
keypair_names.append(value[0])
return keypair_names
def get_object_value(obj, attr):
keys = attr.split('.')
val = obj
for key in keys:
if hasattr(val, key):
val = getattr(val, key)
elif isinstance(val, dict):
val = val[key]
else:
return None
return val
def is_tag_filter(filter_name):
return (filter_name.startswith('tag:') or
filter_name.startswith('tag-value') or
filter_name.startswith('tag-key'))
def get_obj_tag(obj, filter_name):
tag_name = filter_name.replace('tag:', '', 1)
tags = dict((tag['key'], tag['value']) for tag in obj.get_tags())
return tags.get(tag_name)
def get_obj_tag_names(obj):
tags = set((tag['key'] for tag in obj.get_tags()))
return tags
def get_obj_tag_values(obj):
tags = set((tag['value'] for tag in obj.get_tags()))
return tags
def tag_filter_matches(obj, filter_name, filter_values):
if filter_name == 'tag-key':
tag_names = get_obj_tag_names(obj)
return len(set(filter_values).intersection(tag_names)) > 0
elif filter_name == 'tag-value':
tag_values = get_obj_tag_values(obj)
return len(set(filter_values).intersection(tag_values)) > 0
else:
tag_value = get_obj_tag(obj, filter_name)
return tag_value in filter_values
filter_dict_attribute_mapping = {
'instance-state-name': 'state',
'instance-id': 'id',
'state-reason-code': '_state_reason.code',
'source-dest-check': 'source_dest_check',
'vpc-id': 'vpc_id',
'group-id': 'security_groups',
'instance.group-id': 'security_groups',
'instance-type': 'instance_type'
}
def passes_filter_dict(instance, filter_dict):
for filter_name, filter_values in filter_dict.items():
if filter_name in filter_dict_attribute_mapping:
instance_attr = filter_dict_attribute_mapping[filter_name]
instance_value = get_object_value(instance, instance_attr)
if not instance_value_in_filter_values(instance_value, filter_values):
return False
elif is_tag_filter(filter_name):
if not tag_filter_matches(instance, filter_name, filter_values):
return False
else:
raise NotImplementedError(
"Filter dicts have not been implemented in Moto for '%s' yet. Feel free to open an issue at https://github.com/spulec/moto/issues",
filter_name)
return True
def instance_value_in_filter_values(instance_value, filter_values):
if isinstance(instance_value, list):
if not set(filter_values).intersection(set(instance_value)):
return False
elif instance_value not in filter_values:
return False
return True
def filter_reservations(reservations, filter_dict):
result = []
for reservation in reservations:
new_instances = []
for instance in reservation.instances:
if passes_filter_dict(instance, filter_dict):
new_instances.append(instance)
if new_instances:
reservation.instances = new_instances
result.append(reservation)
return result
filter_dict_igw_mapping = {
"attachment.vpc-id": "vpc.id",
"attachment.state": "attachment_state",
"internet-gateway-id": "id",
}
def passes_igw_filter_dict(igw, filter_dict):
for filter_name, filter_values in filter_dict.items():
if filter_name in filter_dict_igw_mapping:
igw_attr = filter_dict_igw_mapping[filter_name]
if get_object_value(igw, igw_attr) not in filter_values:
return False
elif is_tag_filter(filter_name):
if not tag_filter_matches(igw, filter_name, filter_values):
return False
else:
raise NotImplementedError(
"Internet Gateway filter dicts have not been implemented in Moto for '%s' yet. Feel free to open an issue at https://github.com/spulec/moto/issues",
filter_name)
return True
def filter_internet_gateways(igws, filter_dict):
result = []
for igw in igws:
if passes_igw_filter_dict(igw, filter_dict):
result.append(igw)
return result
def is_filter_matching(obj, filter, filter_value):
value = obj.get_filter_value(filter)
if isinstance(value, six.string_types):
return value in filter_value
try:
value = set(value)
return (value and value.issubset(filter_value)) or value.issuperset(filter_value)
except TypeError:
return value in filter_value
def generic_filter(filters, objects):
if filters:
for (_filter, _filter_value) in filters.items():
objects = [obj for obj in objects if is_filter_matching(obj, _filter, _filter_value)]
return objects
def simple_aws_filter_to_re(filter_string):
import fnmatch
tmp_filter = filter_string.replace('\?', '[?]')
tmp_filter = tmp_filter.replace('\*', '[*]')
tmp_filter = fnmatch.translate(tmp_filter)
return tmp_filter
# not really random ( http://xkcd.com/221/ )
def random_key_pair():
return {
'fingerprint': ('1f:51:ae:28:bf:89:e9:d8:1f:25:5d:37:2d:'
'7d:b8:ca:9f:f5:f1:6f'),
'material': """---- BEGIN RSA PRIVATE KEY ----
MIICiTCCAfICCQD6m7oRw0uXOjANBgkqhkiG9w0BAQUFADCBiDELMAkGA1UEBhMC
VVMxCzAJBgNVBAgTAldBMRAwDgYDVQQHEwdTZWF0dGxlMQ8wDQYDVQQKEwZBbWF6
b24xFDASBgNVBAsTC0lBTSBDb25zb2xlMRIwEAYDVQQDEwlUZXN0Q2lsYWMxHzAd
BgkqhkiG9w0BCQEWEG5vb25lQGFtYXpvbi5jb20wHhcNMTEwNDI1MjA0NTIxWhcN
MTIwNDI0MjA0NTIxWjCBiDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAldBMRAwDgYD
VQQHEwdTZWF0dGxlMQ8wDQYDVQQKEwZBbWF6b24xFDASBgNVBAsTC0lBTSBDb25z
b2xlMRIwEAYDVQQDEwlUZXN0Q2lsYWMxHzAdBgkqhkiG9w0BCQEWEG5vb25lQGFt
YXpvbi5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMaK0dn+a4GmWIWJ
21uUSfwfEvySWtC2XADZ4nB+BLYgVIk60CpiwsZ3G93vUEIO3IyNoH/f0wYK8m9T
rDHudUZg3qX4waLG5M43q7Wgc/MbQITxOUSQv7c7ugFFDzQGBzZswY6786m86gpE
Ibb3OhjZnzcvQAaRHhdlQWIMm2nrAgMBAAEwDQYJKoZIhvcNAQEFBQADgYEAtCu4
nUhVVxYUntneD9+h8Mg9q6q+auNKyExzyLwaxlAoo7TJHidbtS4J5iNmZgXL0Fkb
FFBjvSfpJIlJ00zbhNYS5f6GuoEDmFJl0ZxBHjJnyp378OD8uTs7fLvjx79LjSTb
NYiytVbZPQUQ5Yaxu2jXnimvw3rrszlaEXAMPLE
-----END RSA PRIVATE KEY-----"""
}
def get_prefix(resource_id):
resource_id_prefix, separator, after = resource_id.partition('-')
if resource_id_prefix == EC2_RESOURCE_TO_PREFIX['network-interface']:
if after.startswith('attach'):
resource_id_prefix = EC2_RESOURCE_TO_PREFIX['network-interface-attachment']
if resource_id_prefix not in EC2_RESOURCE_TO_PREFIX.values():
uuid4hex = re.compile('[0-9a-f]{12}4[0-9a-f]{3}[89ab][0-9a-f]{15}\Z', re.I)
if uuid4hex.match(resource_id) is not None:
resource_id_prefix = EC2_RESOURCE_TO_PREFIX['reserved-instance']
else:
return None
return resource_id_prefix
def is_valid_resource_id(resource_id):
valid_prefixes = EC2_RESOURCE_TO_PREFIX.values()
resource_id_prefix = get_prefix(resource_id)
if resource_id_prefix not in valid_prefixes:
return False
resource_id_pattern = resource_id_prefix + '-[0-9a-f]{8}'
resource_pattern_re = re.compile(resource_id_pattern)
return resource_pattern_re.match(resource_id) is not None
def is_valid_cidr(cird):
cidr_pattern = '^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/(\d|[1-2]\d|3[0-2]))$'
cidr_pattern_re = re.compile(cidr_pattern)
return cidr_pattern_re.match(cird) is not None
| rouge8/moto | moto/ec2/utils.py | Python | apache-2.0 | 16,489 |
# My files
from handlers import MainPage
from handlers import WelcomePage
from handlers import SignUpPage
from handlers import SignIn
from handlers import SignOut
from handlers import NewPost
from handlers import EditPost
from handlers import DeletePost
from handlers import SinglePost
from handlers import LikePost
from handlers import DislikePost
from handlers import EditComment
from handlers import DeleteComment
import webapp2
app = webapp2.WSGIApplication([
('/', MainPage),
('/signup', SignUpPage),
('/welcome', WelcomePage),
('/post/([0-9]+)', SinglePost),
('/new-post', NewPost),
('/edit-post/([0-9]+)', EditPost),
('/delete-post', DeletePost),
('/like-post', LikePost),
('/dislike-post', DislikePost),
('/edit-comment', EditComment),
('/delete-comment', DeleteComment),
('/login', SignIn),
('/logout', SignOut)
], debug=True)
| joepettigrew/multi-blog | main.py | Python | mit | 890 |
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import TestCase, assert_equal, assert_almost_equal
from scipy.special import logit, expit
class TestLogit(TestCase):
def check_logit_out(self, dtype, expected):
a = np.linspace(0,1,10)
a = np.array(a, dtype=dtype)
olderr = np.seterr(divide='ignore')
try:
actual = logit(a)
finally:
np.seterr(**olderr)
if np.__version__ >= '1.6':
assert_almost_equal(actual, expected)
else:
assert_almost_equal(actual[1:-1], expected[1:-1])
assert_equal(actual.dtype, np.dtype(dtype))
def test_float32(self):
expected = np.array([-np.inf, -2.07944155,
-1.25276291, -0.69314718,
-0.22314353, 0.22314365,
0.6931473, 1.25276303,
2.07944155, np.inf], dtype=np.float32)
self.check_logit_out('f4', expected)
def test_float64(self):
expected = np.array([-np.inf, -2.07944154,
-1.25276297, -0.69314718,
-0.22314355, 0.22314355,
0.69314718, 1.25276297,
2.07944154, np.inf])
self.check_logit_out('f8', expected)
def test_nan(self):
expected = np.array([np.nan]*4)
olderr = np.seterr(invalid='ignore')
try:
actual = logit(np.array([-3., -2., 2., 3.]))
finally:
np.seterr(**olderr)
assert_equal(expected, actual)
class TestExpit(TestCase):
def check_expit_out(self, dtype, expected):
a = np.linspace(-4,4,10)
a = np.array(a, dtype=dtype)
actual = expit(a)
assert_almost_equal(actual, expected)
assert_equal(actual.dtype, np.dtype(dtype))
def test_float32(self):
expected = np.array([0.01798621, 0.04265125,
0.09777259, 0.20860852,
0.39068246, 0.60931754,
0.79139149, 0.9022274,
0.95734876, 0.98201376], dtype=np.float32)
self.check_expit_out('f4',expected)
def test_float64(self):
expected = np.array([0.01798621, 0.04265125,
0.0977726, 0.20860853,
0.39068246, 0.60931754,
0.79139147, 0.9022274,
0.95734875, 0.98201379])
self.check_expit_out('f8', expected)
| beiko-lab/gengis | bin/Lib/site-packages/scipy/special/tests/test_logit.py | Python | gpl-3.0 | 2,675 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 João Pedro Rodrigues
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Rudimentarily converts the PDB file to mmCIF format.
Will convert only the coordinate section.
Usage:
python pdb_tocif.py <pdb file>
Example:
python pdb_tocif.py 1CTF.pdb
This program is part of the `pdb-tools` suite of utilities and should not be
distributed isolatedly. The `pdb-tools` were created to quickly manipulate PDB
files using the terminal, and can be used sequentially, with one tool streaming
data to another. They are based on old FORTRAN77 code that was taking too much
effort to maintain and compile. RIP.
"""
import os
import sys
__author__ = "Joao Rodrigues"
__email__ = "j.p.g.l.m.rodrigues@gmail.com"
def check_input(args):
"""Checks whether to read from stdin/file and validates user input/options.
"""
# Defaults
fh = sys.stdin # file handle
if not len(args):
# Reading from pipe with default option
if sys.stdin.isatty():
sys.stderr.write(__doc__)
sys.exit(1)
elif len(args) == 1:
if not os.path.isfile(args[0]):
emsg = 'ERROR!! File not found or not readable: \'{}\'\n'
sys.stderr.write(emsg.format(args[0]))
sys.stderr.write(__doc__)
sys.exit(1)
fh = open(args[0], 'r')
else: # Whatever ...
emsg = 'ERROR!! Script takes 1 argument, not \'{}\'\n'
sys.stderr.write(emsg.format(len(args)))
sys.stderr.write(__doc__)
sys.exit(1)
return fh
def pad_line(line):
"""Helper function to pad line to 80 characters in case it is shorter"""
size_of_line = len(line)
if size_of_line < 80:
padding = 80 - size_of_line + 1
line = line.strip('\n') + ' ' * padding + '\n'
return line[:81] # 80 + newline character
def convert_to_mmcif(fhandle):
"""Converts a structure in PDB format to mmCIF format.
"""
_pad_line = pad_line
# The spacing here is just aesthetic purposes when printing the file
_a = '{:<6s} {:5d} {:2s} {:6s} {:1s} {:3s} {:3s} {:1s} {:5s} {:1s} '
_a += '{:10.3f} {:10.3f} {:10.3f} {:10.3f} {:10.3f} {:1s} '
_a += '{:5s} {:3s} {:1s} {:4s} {:1d}\n'
yield '# Converted to mmCIF by pdb-tools\n'
yield '#\n'
# Headers
fname, _ = os.path.splitext(os.path.basename(fhandle.name))
if fname == '<stdin>':
fname = 'cell'
yield 'data_{}\n'.format(fname)
yield '#\n'
yield 'loop_\n'
yield '_atom_site.group_PDB\n'
yield '_atom_site.id\n'
yield '_atom_site.type_symbol\n'
yield '_atom_site.label_atom_id\n'
yield '_atom_site.label_alt_id\n'
yield '_atom_site.label_comp_id\n'
yield '_atom_site.label_asym_id\n'
yield '_atom_site.label_entity_id\n'
yield '_atom_site.label_seq_id\n'
yield '_atom_site.pdbx_PDB_ins_code\n'
yield '_atom_site.Cartn_x\n'
yield '_atom_site.Cartn_y\n'
yield '_atom_site.Cartn_z\n'
yield '_atom_site.occupancy\n'
yield '_atom_site.B_iso_or_equiv\n'
yield '_atom_site.pdbx_formal_charge\n'
yield '_atom_site.auth_seq_id\n'
yield '_atom_site.auth_comp_id\n'
yield '_atom_site.auth_asym_id\n'
yield '_atom_site.auth_atom_id\n'
yield '_atom_site.pdbx_PDB_model_num\n'
# Coordinate data
model_no = 1
serial = 0
records = (('ATOM', 'HETATM'))
for line in fhandle:
if line.startswith(records):
line = _pad_line(line)
record = line[0:6].strip()
serial += 1
element = line[76:78].strip()
if not element:
element = '?'
atname = line[12:16].strip()
atname = atname.replace('"', "'")
if "'" in atname:
atname = '"{}"'.format(atname)
altloc = line[16]
if altloc == ' ':
altloc = '?'
resname = line[17:20]
chainid = line[21]
if chainid == ' ':
chainid = '?'
resnum = line[22:26].strip()
icode = line[26]
if icode == ' ':
icode = '?'
x = float(line[30:38])
y = float(line[38:46])
z = float(line[46:54])
occ = float(line[54:60])
bfac = float(line[60:66])
charge = line[78:80].strip()
if charge == '':
charge = '?'
yield _a.format(record, serial, element, atname, altloc,
resname, chainid, '?', resnum, icode, x, y, z, occ,
bfac, charge, resnum, resname, chainid, atname,
model_no)
elif line.startswith('ENDMDL'):
model_no += 1
else:
continue
yield '#' # close block
def main():
# Check Input
pdbfh = check_input(sys.argv[1:])
# Do the job
new_cif = convert_to_mmcif(pdbfh)
try:
_buffer = []
_buffer_size = 5000 # write N lines at a time
for lineno, line in enumerate(new_cif):
if not (lineno % _buffer_size):
sys.stdout.write(''.join(_buffer))
_buffer = []
_buffer.append(line)
sys.stdout.write(''.join(_buffer))
sys.stdout.flush()
except IOError:
# This is here to catch Broken Pipes
# for example to use 'head' or 'tail' without
# the error message showing up
pass
# last line of the script
# We can close it even if it is sys.stdin
pdbfh.close()
sys.exit(0)
if __name__ == '__main__':
main()
| JoaoRodrigues/pdb-tools | pdbtools/pdb_tocif.py | Python | apache-2.0 | 6,175 |
import json
import os
from datetime import datetime
CONCEPT_MAP = {'people': 'Person',
'places': 'Place',
'things': 'Thing',
'activities': 'Activity',
'times': 'Time',
'mood_words': 'Mood'}
def dump_mem_to_json(mem_dict, save=None):
"""
Convert mem_dict into a JSON file following the schema and write
Args:
mem_dict: dictionary of mem information
save: path to save JSON to [default: None]
Returns:
mem_json: JSON object for memory
"""
node = {'name': '',
'label': '',
'imageURL': mem_dict.get('img_url', ''),
'iconURL': '',
'created': '',
'updated': ''}
default_mood_weights = {'weight': 0.5,
'joy': 0.0,
'fear': 0.0,
'surprise': 0.0,
'sadness': 0.0,
'disgust': 0.0,
'anger': 0.0}
relation = mem_dict.get('mood_weight', default_mood_weights)
concepts = []
for concept_type, concept_items in mem_dict.items():
if concept_items is None:
continue
if concept_type in ('img_url', 'narrative', 'mood_weight'):
continue
for concept_item in concept_items:
clean_text = concept_item.replace(' ', '_')
clean_text = clean_text.lower()
concept = {'node': {}, 'relation': {}}
concept['node'] = {'concept': CONCEPT_MAP[concept_type],
'name': clean_text,
'label': '',
'iconURL': '',
'imageURL': ''}
concept_relation = 'Has_{}'.format(concept['node']['concept'])
concept['relation'] = {'relation': concept_relation,
'name': '',
'iconURL': '',
'imageURL': '',
'weight': 0.5,
'created': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.000Z'),
'updated': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.000Z'),
'originType': 'OriginUserDefined',
'joy': 0.0,
'fear': 0.0,
'surprise': 0.0,
'sadness': 0.0,
'disgust': 0.0,
'anger': 0.0}
concepts.append(concept)
narrative = {'node': {'name': '',
'label': 'title',
'text': mem_dict['narrative']},
'relation': {'weight': 0.5}}
mem = {'memory': '',
'node': node,
'relation': relation,
'concepts': concepts,
'narrative': narrative}
if save is not None:
with open(os.path.abspath(save), 'w') as f:
json.dump(mem, f)
return dict(mem)
| CDIPS-AI-2017/pensieve | pensieve/json_dump.py | Python | apache-2.0 | 3,188 |
# Copyright 2015-present Scikit Flow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division, print_function, absolute_import
import tensorflow as tf
import random
from sklearn import datasets, metrics
from sklearn.cross_validation import train_test_split
from tensorflow.contrib.skflow.python import skflow
class CustomOptimizer(tf.test.TestCase):
def testIrisMomentum(self):
random.seed(42)
iris = datasets.load_iris()
X_train, X_test, y_train, y_test = train_test_split(iris.data,
iris.target,
test_size=0.2,
random_state=42)
# setup exponential decay function
def exp_decay(global_step):
return tf.train.exponential_decay(
learning_rate=0.1, global_step=global_step,
decay_steps=100, decay_rate=0.001)
custom_optimizer = lambda x: tf.train.MomentumOptimizer(x, 0.9)
classifier = skflow.TensorFlowDNNClassifier(hidden_units=[10, 20, 10],
n_classes=3, steps=800,
learning_rate=exp_decay,
optimizer=custom_optimizer)
classifier.fit(X_train, y_train)
score = metrics.accuracy_score(y_test, classifier.predict(X_test))
self.assertGreater(score, 0.7, "Failed with score = {0}".format(score))
if __name__ == "__main__":
tf.test.main()
| panmari/tensorflow | tensorflow/contrib/skflow/python/skflow/tests/test_estimators.py | Python | apache-2.0 | 2,170 |
import numpy as np
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import cuda, optimizers, serializers, Variable
from chainer import training
from chainer.training import extensions
import argparse
import sys
import json
sys.path.insert(0, '../')
import common.datasets as datasets
from common.models.generators import *
from common.models.discriminators import *
from common.utils import *
import settings
def main():
parser = argparse.ArgumentParser(
description='Check Dataset')
parser.add_argument("--load_dataset", default='game_faces_tags_train', help='load dataset')
parser.add_argument("--dataset_path", "-d", default=settings.GAME_FACE_PATH,
help='dataset directory')
args = parser.parse_args()
print(args)
train_dataset = getattr(datasets, args.load_dataset)(path=args.dataset_path)
with open(args.dataset_path + "/attr.json") as f:
attr = json.load(f)
for i in range(10):
print("Image %d:" % i)
img, tags = train_dataset.get_example(i)
save_single_image(img, "samples_"+str(i)+".jpg")
for j in range(len(attr)):
print(j, end=" ")
print(attr[j][0],end=" ")
print(tags[j])
print("")
if __name__ == '__main__':
main()
| Aixile/chainer-gan-experiments | tools/check_dataset.py | Python | mit | 1,320 |
import requests
from requests import exceptions
import sys
import json
class TagsConn(object):
base_url = None
info_uri = None
role_id = None
response = None
def __init__(self, access_token, api_key, base_url, info_uri, tag_id=None, tag_name=None, delete_tag=False):
self.base_url = base_url
self.info_uri = info_uri
self._set_access_token(access_token)
self._set_api_key(api_key)
self.tag_id = tag_id
self.tag_name = tag_name
self.delete_tag = delete_tag
self.headers = {
"Vnd-HMH-Api-Key": self.api_key,
"Authorization": self.access_token,
"Accept": "application/json",
"Content-Type": "application/json"
}
def _set_access_token(self, access_token):
assert isinstance(access_token, (str, unicode)), access_token
self.access_token = access_token
def _set_api_key(self, api_key):
assert isinstance(api_key, (str, unicode)), api_key
self.api_key = api_key
def _delete_request(self):
try:
request_url = self.base_url + self.info_uri + "/" + self.tag_id
r = requests.delete(request_url, headers=self.headers)
r.raise_for_status()
return r.json()
except exceptions.RequestException as e:
print e
sys.exit(-1)
def _put_request(self):
try:
request_url = self.base_url + self.info_uri + "/" + self.tag_id
r = requests.put(url=request_url, data=json.dumps(self.tag_name), headers=self.headers)
r.raise_for_status()
return r.json()
except exceptions.RequestException as e:
print e
sys.exit(-1)
def _get_request(self):
try:
request_url = self.base_url + self.info_uri + "/" + self.tag_id if self.tag_id else \
self.base_url + self.info_uri
r = requests.get(request_url, headers=self.headers)
r.raise_for_status()
return r.json()
except exceptions.RequestException as e:
print e
sys.exit(-1)
def _post_request(self):
try:
request_url = self.base_url + self.info_uri
r = requests.post(url=request_url, data=json.dumps({"name": self.tag_name}), headers=self.headers)
r.raise_for_status()
return r.json()
except exceptions.RequestException as e:
print e
sys.exit(-1)
def set_request(self):
if self.tag_name:
if self.tag_id:
response = self._put_request()
else:
response = self._post_request()
else:
if self.tag_id:
if self.delete_tag:
response = self._delete_request()
else:
response = self._get_request()
else:
response = self._get_request()
self.response = response
def get_response(self):
return self.response
class Tags(object):
response = None
def __init__(self):
self.access_token = None
self.api_key = None
def set_access_token(self, access_token):
assert isinstance(access_token, (str, unicode)), access_token
self.access_token = access_token
def set_api_key(self, api_key):
assert isinstance(api_key, (str, unicode)), api_key
self.api_key = api_key
def get_all(self, base_url, info_uri):
print "Requesting all", self.__class__.__name__, "type tags"
req = TagsConn(access_token=self.access_token, api_key=self.api_key, base_url=base_url, info_uri=info_uri)
req.set_request()
response = req.get_response()
self.response = response
def get_by_id(self, base_url, info_uri, tag_id):
print "Requesting", self.__class__.__name__, "having ID", tag_id
req = TagsConn(access_token=self.access_token, api_key=self.api_key, base_url=base_url, info_uri=info_uri,
tag_id=tag_id)
req.set_request()
response = req.get_response()
self.response = response
def add(self, base_url, info_uri, name):
req = TagsConn(access_token=self.access_token, api_key=self.api_key, base_url=base_url, info_uri=info_uri,
tag_name=name)
req.set_request()
response = req.get_response()
self.response = response
def modify(self, base_url, info_uri, tag_id, new_name):
req = TagsConn(access_token=self.access_token, api_key=self.api_key, base_url=base_url, info_uri=info_uri,
tag_id=tag_id, tag_name=new_name)
req.set_request()
response = req.get_response()
self.response = response
def delete(self, base_url, info_uri, tag_id):
req = TagsConn(access_token=self.access_token, api_key=self.api_key, base_url=base_url, info_uri=info_uri,
tag_id=tag_id, delete_tag=True)
req.set_request()
response = req.get_response()
self.response = response
def get_response(self):
return self.response
| apanimesh061/hmh-client | hmh-client/API/Tags/Tags.py | Python | gpl-2.0 | 5,349 |
#!/usr/bin/env python3
import sys, base64, io
import xml.etree.ElementTree as ET
import xml.dom.minidom as md
from PIL import Image
from struct import unpack, unpack_from, iter_unpack
from pprint import pprint
from collections import defaultdict, namedtuple
def unpack_15bpp(pixel):
return (
(8 * (pixel & 0b0111110000000000) >> 10),
(8 * (pixel & 0b0000001111100000) >> 5),
(8 * (pixel & 0b0000000000011111) >> 0)
)
class DataElement(ET.Element):
def __init__(self, tag, attrib={}):
ET.Element.__init__(self, tag, attrib=attrib)
self.data = None
def __setattr__(self, name, value):
if name is "data":
self.__dict__["data"] = value
else:
ET.Element.__setattr__(self, name, value)
def __getattr__(self, name):
if name is "data":
return self.__dict__["data"]
else:
return ET.Element.__getattr__(self, name)
def element(name, attrib={}, text=None):
elem = DataElement(name, attrib)
if not text is None:
elem.text = str(text)
return elem
def parse_null(tree, node, data):
pass
def to_hex(string):
return ''.join(format(x, '02x') for x in string)
def parse_hex(tree, node, data):
node.text = to_hex(data)
def parse_string(tree, node, data):
node.text = data[0:data.index(0)].decode()
def parse_ifhd(tree, node, data):
node.text = to_hex(data.rstrip(b'\x00'))
def parse_bhdr(tree, node, data):
u0, u1, width, height, u2, frames, u3, u4, u5, u6, flags = unpack("<4H6s6H", data)
node.append(element("width", text=width))
node.append(element("height", text=height))
node.append(element("frames", text=frames))
node.append(element("u0", text=u0)) #corolation with width,
node.append(element("u1", text=u1)) #some kind of offset
node.append(element("u2", text=to_hex(u2))) #01, 02, or 03
node.append(element("u3", text=u3)) #values; 0 71 171 157 100 85 214 128 42 114 142 257 200 57 228
node.append(element("u4", text=u4)) #0 or 2
node.append(element("u5", text=u5)) #always 0
node.append(element("u6", text=u6)) #values 256 92 115 122 174 130 153 51 76 43 56 64 163 46 110 148 156 140 112 53 120 135 125 102 97 145 99 61 38 40 58 33 74 79 35 184 158 202 143 189 171 161 117 220 197 66 151 104 138 107 207 94 186 87 243 71 235 227 84 168
node.append(element("flags", text=flags)) #0 = uncompressed, 4 and 6 = compressed
def parse_cmap(tree, node, data):
i = -1
for g, b, r in iter_unpack("3B", data):
i += 1
if r == g == b == 0:
continue
node.append(element("colour", text="%d %d %d" % (r, g, b), attrib={"id": str(i)}))
def parse_hicl(tree, node, data):
i = -1
for x in iter_unpack(">H", data):
i += 1
r, g, b = unpack_15bpp(x[0])
if r == g == b == 0:
continue
node.append(element("colour", text="%d %d %d" % (r, g, b), attrib={"id": str(i)}))
def parse_htbl(tree, node, data):
i = -1
res = []
for x in iter_unpack("256s", data):
i += 1
node.append(element("unknown", text=to_hex(x[0]), attrib={"id": str(i)}))
res.append(x[0])
node.data = res
def parse_unknown(tree, node, data):
sys.stderr.write("Unknown block type: %s\n" % node.tag)
parse_raw(tree, node, data)
def constant_factory(value):
return lambda: value
parsers = defaultdict(constant_factory(parse_unknown))
parsers.update({
"IFHD": parse_ifhd, #44 bytes fixed
"BHDR": parse_bhdr, #26 bytes fixed
"NAME": parse_string, #variable
"DATA": parse_null, #variable
"HICL": parse_hicl, #256 fixed
"HTBL": parse_htbl, #8192 fixed
"CMAP": parse_cmap, #768 fixed #parse_cmap
"END ": parse_null,
})
def blocks(data):
while len(data) >= 8:
name, length = unpack_from('>4sI', data, 0)
childdata = data[8:8+length]
yield (name.decode(), length, childdata)
data = data[8+length:]
def readGroup(tree, bsif, data):
# Battlespires engine does the same
if b'BSIF' == unpack_from('>4s', data, 0)[0]:
data = data[8:]
for name, length, childdata in blocks(data):
node = element(name, {"_length": str(length)})
node.data = childdata
parsers[name](tree, node, childdata)
bsif.append(node)
return bsif
data = b''
with open(sys.argv[1], "rb") as fd:
data = fd.read()
root = ET.Element("BSIF", {"_length": str(len(data))})
tree = ET.ElementTree(root)
readGroup(tree, root, data)
xml = md.parseString(ET.tostring(root)).toprettyxml()
if len(sys.argv) > 3:
with open(sys.argv[3], "w") as fd:
fd.write(xml)
else:
print(xml)
if len(sys.argv) < 3:
exit()
flags = int(tree.find(".//BHDR/flags").text)
width = int(tree.find(".//BHDR/width").text)
height = int(tree.find(".//BHDR/height").text)
frames = int(tree.find(".//BHDR/frames").text)
cmap = tree.find(".//CMAP").data
data = tree.find(".//DATA").data
hicl = tree.find(".//HICL").data
htbl = tree.find(".//HTBL")
def decomp(data, width, height):
out = []
for offset, comp in iter_unpack("<3sB", data[:height*4]):
offset = int.from_bytes(offset, 'little')
if comp == 0x80: #rle compression
f = io.BytesIO(data[offset:])
i = 0
while i < width:
c = f.read(1)[0]
if c & 0x80:
pixel = f.read(1)[0]
c &= 0x7F
for count in range(0, c):
out.append(pixel)
i += c
else:
for pixel in f.read(c):
out.append(pixel)
i += c
elif comp == 0:
[out.append(x) for x in data[offset:offset+width]]
else:
raise Exception("Unknown compression %s", hex(comp))
return(bytes(out))
if flags != 0:
data = decomp(data, width, height * frames)
data = [x & 0x7F for x in data] #how to handle values over 127?
def bpp15_to_rgb(data):
ret = []
for x in unpack("<128H", data):
for y in unpack_15bpp(x):
ret.append(y)
return ret
palettes = dict()
if htbl:
for x in range(0, 32):
palettes["htbl-%2d" % x] = bpp15_to_rgb(htbl.data[x])
palettes["hicl"] = bpp15_to_rgb(hicl)
palettes["cmap"] = [x * 4 for x in cmap]
out = Image.new("P", (width, height * frames))
out.putdata(data)
for name, palette in palettes.items():
out.putpalette(palette)
out.save(sys.argv[2] % name)
exit(0)
palette = bpp15_to_rgb(hicl)
#palette = [x*4 for x in cmap]
#palette = []
#for x in iter_unpack("<H", hicl):
# x = x[0]
# palette.append(4 * (x & 0b0111110000000000) >> 10)
# palette.append(4 * (x & 0b0000001111100000) >> 5)
# palette.append(4 * (x & 0b0000000000011111) >> 0)
#alpha = [0 if x == 0 else 1 for x in data]
out = Image.new("P", (width, height * frames))
out.putpalette(palette)
out.putdata(data)
#pil sucks
#out = out.convert(mode="RGBA")
#alphalayer = Image.new("1", (width, height * frames))
#alphalayer.putdata(alpha)
#out.putalpha(alphalayer)
out.save(sys.argv[2])
| ariscop/battlespire-tools | bsitool/bsitool.py | Python | unlicense | 7,153 |
#!/usr/bin/env python
"""Configuration parameters for the client."""
from grr.lib import config_lib
from grr.lib.rdfvalues import crypto
# General Client options.
config_lib.DEFINE_string("Client.name", "GRR",
"The name of the client. This will be used as a base "
"name to generate many other default parameters such "
"as binary names and service names. Note that on "
"Linux we lowercase the name to confirm with most "
"linux naming conventions.")
config_lib.DEFINE_string("Client.binary_name", "%(Client.name)",
"The name of the client binary.")
config_lib.DEFINE_list("Client.labels", [],
"Labels for this client.")
config_lib.DEFINE_string("Client.company_name", "GRR Project",
"The name of the company which made the client.")
config_lib.DEFINE_string("Client.description", "%(name) %(platform) %(arch)",
"A description of this specific client build.")
config_lib.DEFINE_string("Client.platform", "windows",
"The platform we are running on.")
config_lib.DEFINE_string("Client.arch", "amd64",
"The architecture we are running on.")
config_lib.DEFINE_string("Client.build_time", "Unknown",
"The time the client was built.")
config_lib.DEFINE_string(
name="Client.install_path",
default=r"%(SystemRoot|env)\\System32\\%(name)\\%(version_string)",
help="Where the client binaries are installed.")
config_lib.DEFINE_string(
name="Client.rekall_profile_cache_path",
default=r"%(Client.install_path)\\rekall_profiles",
help="Where GRR stores cached Rekall profiles needed for memory analysis")
config_lib.DEFINE_list("Client.control_urls",
["http://www.example.com/control"],
"List of URLs of the controlling server.")
config_lib.DEFINE_string("Client.plist_path",
"/Library/LaunchDaemons/com.google.code.grrd.plist",
"Location of our launchctl plist.")
config_lib.DEFINE_string("Client.plist_filename", None,
"Filename of launchctl plist.")
config_lib.DEFINE_string("Client.plist_label",
None,
"Identifier label for launchd")
config_lib.DEFINE_string("Client.plist_label_prefix", None,
"Domain for launchd label.")
config_lib.DEFINE_float("Client.poll_min", 0.2,
"Minimum time between polls in seconds.")
config_lib.DEFINE_float("Client.poll_max", 600,
"Maximum time between polls in seconds.")
config_lib.DEFINE_float("Client.error_poll_min", 15,
"Minimum time between polls in seconds if the server "
"reported an error.")
config_lib.DEFINE_float("Client.poll_slew", 1.15,
"Slew of poll time.")
config_lib.DEFINE_integer("Client.connection_error_limit", 60 * 24,
"If the client encounters this many connection "
"errors, it exits and restarts. Retries are one "
"minute apart.")
config_lib.DEFINE_list(
name="Client.proxy_servers",
help="List of valid proxy servers the client should try.",
default=[])
config_lib.DEFINE_integer("Client.max_post_size", 8000000,
"Maximum size of the post.")
config_lib.DEFINE_integer("Client.max_out_queue", 10240000,
"Maximum size of the output queue.")
config_lib.DEFINE_integer("Client.foreman_check_frequency", 1800,
"The minimum number of seconds before checking with "
"the foreman for new work.")
config_lib.DEFINE_float("Client.rss_max", 500,
"Maximum memory footprint in MB.")
config_lib.DEFINE_string(
name="Client.tempfile_prefix",
help="Prefix to use for temp files created by the GRR client.",
default="tmp%(Client.name)")
config_lib.DEFINE_string(
name="Client.tempdir",
help="Default temporary directory to use on the client.",
default="/var/tmp/%(Client.name)/")
config_lib.DEFINE_integer("Client.version_major", 0,
"Major version number of client binary.")
config_lib.DEFINE_integer("Client.version_minor", 0,
"Minor version number of client binary.")
config_lib.DEFINE_integer("Client.version_revision", 0,
"Revision number of client binary.")
config_lib.DEFINE_integer("Client.version_release", 0,
"Release number of client binary.")
config_lib.DEFINE_string("Client.version_string",
"%(version_major).%(version_minor)."
"%(version_revision).%(version_release)",
"Version string of the client.")
config_lib.DEFINE_integer("Client.version_numeric",
"%(version_major)%(version_minor)"
"%(version_revision)%(version_release)",
"Version string of the client as an integer.")
config_lib.DEFINE_list("Client.plugins", [],
help="Additional Plugin paths loaded by the client.")
# Windows client specific options.
config_lib.DEFINE_string("Client.config_hive", r"HKEY_LOCAL_MACHINE",
help="The registry hive where the client "
"configuration will be stored.")
config_lib.DEFINE_string("Client.config_key", r"Software\\GRR",
help="The registry key where client configuration "
"will be stored.")
# Client Cryptographic options.
config_lib.DEFINE_semantic(
crypto.PEMPrivateKey, "Client.private_key",
description="Client private key in pem format. If not provided this "
"will be generated by the enrollment process.",
)
config_lib.DEFINE_semantic(
crypto.RDFX509Cert, "CA.certificate",
description="Trusted CA certificate in X509 pem format",
)
config_lib.DEFINE_semantic(
crypto.PEMPublicKey, "Client.executable_signing_public_key",
description="public key for verifying executable signing.")
config_lib.DEFINE_semantic(
crypto.PEMPrivateKey, "PrivateKeys.executable_signing_private_key",
description="Private keys for signing executables. NOTE: This "
"key is usually kept offline and is thus not present in the "
"configuration file.")
config_lib.DEFINE_semantic(
crypto.PEMPublicKey, "Client.driver_signing_public_key",
description="public key for verifying driver signing.")
config_lib.DEFINE_semantic(
crypto.PEMPrivateKey, "PrivateKeys.driver_signing_private_key",
description="Private keys for signing drivers. NOTE: This "
"key is usually kept offline and is thus not present in the "
"configuration file.")
config_lib.DEFINE_integer("Client.server_serial_number", 0,
"Minimal serial number we accept for server cert.")
# The following configuration options are defined here but are used in
# the windows nanny code (grr/client/nanny/windows_nanny.h).
config_lib.DEFINE_string("Nanny.child_binary", "GRR.exe",
help="The location to the client binary.")
config_lib.DEFINE_string("Nanny.child_command_line", "%(Nanny.child_binary)",
help="The command line to launch the client binary.")
config_lib.DEFINE_string("Nanny.logfile", "%(Logging.path)/nanny.log",
"The file where we write the nanny transaction log.")
config_lib.DEFINE_string("Nanny.service_name", "GRR Service",
help="The name of the nanny.")
config_lib.DEFINE_string("Nanny.service_description", "GRR Service",
help="The description of the nanny service.")
config_lib.DEFINE_string("Nanny.service_key", r"%(Client.config_key)",
help="The registry key of the nanny service.")
config_lib.DEFINE_string("Nanny.service_key_hive", r"%(Client.config_hive)",
help="The registry key of the nanny service.")
config_lib.DEFINE_string("Nanny.statusfile", "%(Logging.path)/nanny.status",
"The file where we write the nanny status.")
config_lib.DEFINE_string("Nanny.status", "",
"The regkey where we write the nanny status.")
config_lib.DEFINE_string("Nanny.binary",
r"%(Client.install_path)\\%(service_binary_name)",
help="The full location to the nanny binary.")
config_lib.DEFINE_string("Nanny.service_binary_name",
"%(Client.name)service.exe",
help="The executable name of the nanny binary.")
config_lib.DEFINE_integer("Nanny.unresponsive_kill_period", 60,
"The time in seconds after which the nanny kills us.")
config_lib.DEFINE_integer("Network.api", 3,
"The version of the network protocol the client "
"uses.")
config_lib.DEFINE_string("Network.compression", default="ZCOMPRESS",
help="Type of compression (ZCOMPRESS, UNCOMPRESSED)")
# Installer options.
config_lib.DEFINE_string(
name="Installer.logfile",
default="%(Logging.path)/%(Client.name)_installer.txt",
help=("A specific log file which is used for logging the "
"installation process."))
config_lib.DEFINE_list(
"Installer.old_key_map", [
"HKEY_LOCAL_MACHINE\\Software\\GRR\\certificate->Client.private_key",
"HKEY_LOCAL_MACHINE\\Software\\GRR\\server_serial_number"
"->Client.server_serial_number",
],
"""
A mapping of old registry values which will be copied to new values. The old
value location must start with a valid hive name, followed by a key name, and
end with the value name. The source location must be separated from the new
parameter name by a -> symbol.
This setting allows to carry over settings from obsolete client installations to
newer versions of the client which may store the same information in other
locations.
For example:
HKEY_LOCAL_MACHINE\\Software\\GRR\\certificate -> Client.private_key
""")
config_lib.DEFINE_string("Installer.old_writeback", "/usr/lib/grr/grrd.conf",
"OS X and linux client installers will check this "
"location for old config data that should be "
"preserved.")
| ksmaheshkumar/grr | config/client.py | Python | apache-2.0 | 10,670 |
import os
from pulp.common.compat import unittest
from mock import patch, Mock, PropertyMock, ANY, call
from pulp.common.plugins import importer_constants
from pulp.server.exceptions import PulpCodedException
from mongoengine import NotUniqueError
from pulp_ostree.plugins.lib import LibError, Commit
from pulp_ostree.plugins.importers.steps import (
Main, Create, Summary, Pull, Add, Clean, Remote, Repair)
from pulp_ostree.common import constants, errors
# The module being tested
MODULE = 'pulp_ostree.plugins.importers.steps'
class TestMainStep(unittest.TestCase):
@patch('pulp_ostree.plugins.db.model.generate_remote_id')
def test_init(self, fake_generate):
repo = Mock(id='id-123')
conduit = Mock()
working_dir = 'dir-123'
url = 'url-123'
branches = ['branch-1', 'branch-2']
depth = 3
digest = 'digest-123'
fake_generate.return_value = digest
config = {
importer_constants.KEY_FEED: url,
constants.IMPORTER_CONFIG_KEY_BRANCHES: branches,
constants.IMPORTER_CONFIG_KEY_DEPTH: depth,
constants.IMPORTER_CONFIG_REPAIR: True,
}
# test
step = Main(repo=repo, conduit=conduit, config=config, working_dir=working_dir)
# validation
self.assertEqual(step.step_id, constants.IMPORT_STEP_MAIN)
self.assertEqual(step.repo, repo)
self.assertEqual(step.conduit, conduit)
self.assertEqual(step.config, config)
self.assertEqual(step.working_dir, working_dir)
self.assertEqual(step.plugin_type, constants.WEB_IMPORTER_TYPE_ID)
self.assertEqual(step.feed_url, url)
self.assertEqual(step.remote_id, digest)
self.assertEqual(step.branches, branches)
self.assertEqual(step.depth, depth)
self.assertEqual(step.repo_id, repo.id)
self.assertEqual(len(step.children), 6)
self.assertTrue(isinstance(step.children[0], Repair))
self.assertTrue(isinstance(step.children[1], Create))
self.assertTrue(isinstance(step.children[2], Summary))
self.assertTrue(isinstance(step.children[3], Pull))
self.assertTrue(isinstance(step.children[4], Add))
self.assertTrue(isinstance(step.children[5], Clean))
def test_init_no_feed(self):
repo = Mock(id='id-123')
url = None
config = {
importer_constants.KEY_FEED: url,
constants.IMPORTER_CONFIG_KEY_BRANCHES: []
}
# test and validation
with self.assertRaises(PulpCodedException) as assertion:
Main(repo=repo, config=config)
self.assertEqual(assertion.exception.error_code, errors.OST0004)
@patch(MODULE + '.SharedStorage')
def test_storage_dir(self, storage):
url = 'url-123'
repo = Mock(id='id-123')
config = {
importer_constants.KEY_FEED: url,
}
st = Mock()
st.__enter__ = Mock(return_value=st)
st.__exit__ = Mock()
storage.return_value = st
# test
step = Main(repo=repo, config=config)
path = step.storage_dir
storage.assert_called_once_with(constants.STORAGE_PROVIDER, step.remote_id)
st.__enter__.assert_called_once_with()
st.__exit__.assert_called_once_with(None, None, None)
self.assertEqual(path, st.content_dir)
class TestCreate(unittest.TestCase):
def test_init(self):
step = Create()
self.assertEqual(step.step_id, constants.IMPORT_STEP_CREATE_REPOSITORY)
self.assertTrue(step.description is not None)
@patch(MODULE + '.lib')
@patch(MODULE + '.Remote')
def test_process_main(self, fake_remote, fake_lib):
url = 'url-123'
remote_id = 'remote-123'
repo_id = 'repo-123'
parent = Mock(
feed_url=url,
remote_id=remote_id,
repo_id=repo_id,
storage_dir='root/path-123')
fake_lib.LibError = LibError
fake_lib.Repository.return_value.open.side_effect = LibError
# test
step = Create()
step.parent = parent
step.process_main()
# validation
fake_remote.assert_called_once_with(step, fake_lib.Repository.return_value)
fake_lib.Repository.assert_called_once_with(parent.storage_dir)
fake_lib.Repository.return_value.open.assert_called_once_with()
fake_lib.Repository.return_value.create.assert_called_once_with()
fake_remote.return_value.add.assert_called_once_with()
@patch(MODULE + '.lib')
@patch(MODULE + '.Remote')
def test_process_main_repository_exists(self, fake_remote, fake_lib):
url = 'url-123'
remote_id = 'remote-123'
repo_id = 'repo-xyz'
path = 'root/path-123'
parent = Mock(
feed_url=url,
remote_id=remote_id,
repo_id=repo_id,
storage_dir='root/path-123')
# test
step = Create()
step.parent = parent
step.process_main()
# validation
fake_remote.assert_called_once_with(step, fake_lib.Repository.return_value)
fake_lib.Repository.assert_called_once_with(path)
fake_lib.Repository.return_value.open.assert_called_once_with()
fake_remote.return_value.add.assert_called_once_with()
@patch(MODULE + '.lib')
def test_process_main_repository_exception(self, fake_lib):
fake_lib.LibError = LibError
fake_lib.Repository.side_effect = LibError
step = Create()
step.parent = Mock(feed_url='', remote_id='')
with self.assertRaises(PulpCodedException) as assertion:
step.process_main()
self.assertEqual(assertion.exception.error_code, errors.OST0001)
class TestRepair(unittest.TestCase):
def test_init(self):
step = Repair()
self.assertEqual(step.step_id, constants.IMPORT_STEP_REPAIR_REPOSITORY)
self.assertTrue(step.description is not None)
@patch(MODULE + '.lib')
@patch(MODULE + '.shutil')
def test_process_main(self, fake_shutil, fake_lib):
url = 'url-123'
repo_id = 'repo-123'
parent = Mock(
feed_url=url,
repo_id=repo_id,
storage_dir='root/path-123')
fake_lib.LibError = LibError
# test
step = Repair()
step.parent = parent
step.process_main()
# validation
fake_shutil.rmtree.assert_called_once_with(parent.storage_dir, ignore_errors=True)
fake_lib.Repository.assert_called_once_with(parent.storage_dir)
fake_lib.Repository.return_value.create.assert_called_once_with()
@patch(MODULE + '.lib')
@patch(MODULE + '.shutil', Mock())
def test_process_main_repository_exception(self, fake_lib):
fake_lib.LibError = LibError
fake_lib.Repository.side_effect = LibError
step = Repair()
step.parent = Mock(feed_url='')
with self.assertRaises(PulpCodedException) as assertion:
step.process_main()
self.assertEqual(assertion.exception.error_code, errors.OST0007)
class TestPull(unittest.TestCase):
def test_init(self):
step = Pull()
self.assertEqual(step.step_id, constants.IMPORT_STEP_PULL)
self.assertTrue(step.description is not None)
def test_process_main(self):
repo_id = 'repo-xyz'
path = 'root/path-123'
branches = ['branch-1', 'branch-2']
depth = 3
# test
step = Pull()
step.parent = Mock(storage_dir=path, repo_id=repo_id, branches=branches, depth=depth)
step._pull = Mock()
step.process_main()
# validation
step._pull.assert_called_once_with(path, repo_id, branches, depth)
@patch(MODULE + '.lib')
def test_pull(self, fake_lib):
remote_id = 'remote-123'
path = 'root/path-123'
branches = ['branch-1']
depth = 3
repo = Mock()
fake_lib.Repository.return_value = repo
report = Mock(fetched=1, requested=2, percent=50)
def fake_pull(remote_id, branch, listener, depth):
listener(report)
repo.pull.side_effect = fake_pull
# test
step = Pull()
step.report_progress = Mock()
step._pull(path, remote_id, branches, depth)
# validation
fake_lib.Repository.assert_called_once_with(path)
repo.pull.assert_called_once_with(remote_id, branches, ANY, depth)
step.report_progress.assert_called_with(force=True)
self.assertEqual(step.progress_details, 'fetching 1/2 50%')
@patch(MODULE + '.lib')
def test_pull_raising_exception(self, fake_lib):
fake_lib.LibError = LibError
fake_lib.Repository.return_value.pull.side_effect = LibError
step = Pull()
with self.assertRaises(PulpCodedException) as assertion:
step._pull('', '', '', 0)
self.assertEqual(assertion.exception.error_code, errors.OST0002)
class TestAdd(unittest.TestCase):
def test_init(self):
step = Add()
self.assertEqual(step.step_id, constants.IMPORT_STEP_ADD_UNITS)
self.assertTrue(step.description is not None)
@patch(MODULE + '.lib')
@patch(MODULE + '.model')
@patch(MODULE + '.associate_single_unit')
def test_process_main(self, fake_associate, fake_model, fake_lib):
def history(commit_id):
return [
Commit(id='{}head'.format(commit_id), metadata={'md': 0}),
Commit(id='{}parent-1'.format(commit_id), metadata={'md': 1}),
Commit(id='{}parent-2'.format(commit_id), metadata={'md': 2}),
]
repo_id = 'r-1234'
remote_id = 'remote-1'
refs = [
Mock(path='branch:1', commit='commit:1', metadata='md:1'),
Mock(path='branch:2', commit='commit:2', metadata='md:2'),
Mock(path='branch:3', commit='commit:3', metadata='md:3'),
Mock(path='branch:4', commit='commit:4', metadata='md:4'),
Mock(path='branch:5', commit='commit:5', metadata='md:5'),
]
units = [
Mock(remote_id=remote_id,
branch=r.path.split(':')[-1],
commit=c.id,
metadata=c.metadata,
unit_key={}) for r in refs[:-1] for c in reversed(history(r.commit))
]
units[0].save.side_effect = NotUniqueError # duplicate
fake_model.Branch.side_effect = units
fake_model.Branch.objects.get.return_value = units[0]
branches = [r.path.split(':')[-1] for r in refs[:-1]]
repository = Mock()
repository.list_refs.return_value = refs
repository.history.side_effect = history
fake_lib.Repository.return_value = repository
parent = Mock(remote_id=remote_id, storage_dir='/tmp/xyz', branches=branches)
parent.get_repo.return_value = Mock(id=repo_id)
fake_conduit = Mock()
# test
step = Add()
step.parent = parent
step.get_conduit = Mock(return_value=fake_conduit)
step.process_main()
# validation
fake_lib.Repository.assert_called_once_with(step.parent.storage_dir)
self.assertEqual(
fake_model.Branch.call_args_list,
[
call(remote_id=u.remote_id,
branch=u.branch,
commit=u.commit,
metadata=u.metadata) for u in units
])
self.assertEqual(
fake_associate.call_args_list,
[
((parent.get_repo.return_value.repo_obj, u), {}) for u in units
])
class TestSummary(unittest.TestCase):
@patch(MODULE + '.lib')
def test_process_main(self, fake_lib):
refs = [
Mock(),
Mock(),
Mock(),
]
ref_dicts = [
{'commit': 'abc', 'name': 'foo', 'metadata': {'a.b': 'x'}},
{'commit': 'def', 'name': 'bar', 'metadata': {'a.b': 'y'}},
{'commit': 'hij', 'name': 'baz', 'metadata': {'a.b': 'z'}},
]
for ref, d in zip(refs, ref_dicts):
ref.dict.return_value = d
remote = Mock()
remote.list_refs.return_value = refs
lib_repository = Mock()
repository = Mock(id='1234')
fake_lib.Remote.return_value = remote
fake_lib.Repository.return_value = lib_repository
parent = Mock(storage_dir='/tmp/xx', repo_id=repository.id)
parent.get_repo.return_value = repository
# test
step = Summary()
step.parent = parent
step.process_main()
# validation
fake_lib.Repository.assert_called_once_with(step.parent.storage_dir)
fake_lib.Remote.assert_called_once_with(step.parent.repo_id, lib_repository)
repository.repo_obj.scratchpad.update.assert_called_once_with(
{
constants.REMOTE: {
constants.SUMMARY: [
{'commit': 'abc', 'name': 'foo', 'metadata': {'a-b': 'x'}},
{'commit': 'def', 'name': 'bar', 'metadata': {'a-b': 'y'}},
{'commit': 'hij', 'name': 'baz', 'metadata': {'a-b': 'z'}},
]
}
})
repository.repo_obj.save.assert_called_once_with()
@patch(MODULE + '.lib')
def test_process_main_fetch_failed(self, fake_lib):
remote = Mock()
remote.list_refs.side_effect = LibError
lib_repository = Mock()
repository = Mock(id='1234')
fake_lib.Remote.return_value = remote
fake_lib.Repository.return_value = lib_repository
fake_lib.LibError = LibError
parent = Mock(storage_dir='/tmp/xx', repo_id=repository.id)
parent.get_repo.return_value = repository
# test and validation
step = Summary()
step.parent = parent
with self.assertRaises(PulpCodedException) as assertion:
step.process_main()
self.assertEqual(assertion.exception.error_code, errors.OST0005)
def test_clean_metadata(self):
commit = 'abc'
name = 'foo'
metadata = {
'a.b': '123',
'a.b.c': '456',
'created': '2016-02-23T22:49:05Z'
}
ref = {
'commit': commit,
'name': name,
'metadata': metadata
}
cleaned = dict((k.replace('.', '-'), v) for k, v in metadata.items())
# test
Summary.clean_metadata(ref)
# validation
self.assertDictEqual(
ref,
{
'commit': commit,
'name': name,
'metadata': cleaned
})
class TestClean(unittest.TestCase):
def test_init(self):
step = Clean()
self.assertEqual(step.step_id, constants.IMPORT_STEP_CLEAN)
self.assertTrue(step.description is not None)
@patch(MODULE + '.lib')
def test_process_main(self, fake_lib):
path = 'root/path-123'
repo_id = 'repo-123'
# test
step = Clean()
step.parent = Mock(storage_dir=path, repo_id=repo_id)
step.process_main()
# validation
fake_lib.Repository.assert_called_once_with(path)
fake_lib.Remote.assert_called_once_with(repo_id, fake_lib.Repository.return_value)
fake_lib.Remote.return_value.delete.assert_called_once_with()
@patch(MODULE + '.lib')
def test_process_main_exception(self, fake_lib):
path = 'root/path-123'
importer_id = 'importer-xyz'
fake_lib.LibError = LibError
fake_lib.Remote.return_value.delete.side_effect = LibError
# test
step = Clean()
step.parent = Mock(storage_dir=path, importer_id=importer_id)
with self.assertRaises(PulpCodedException) as assertion:
step.process_main()
self.assertEqual(assertion.exception.error_code, errors.OST0003)
class TestRemote(unittest.TestCase):
def test_init(self):
step = Mock()
repository = Mock()
remote = Remote(step, repository)
self.assertEqual(remote.step, step)
self.assertEqual(remote.repository, repository)
def test_url(self):
step = Mock()
step.parent = Mock(feed_url='http://')
remote = Remote(step, None)
self.assertEqual(remote.url, step.parent.feed_url)
def test_remote_id(self):
step = Mock()
step.parent = Mock(repo_id='123')
remote = Remote(step, None)
self.assertEqual(remote.remote_id, step.parent.repo_id)
def test_working_dir(self):
step = Mock()
remote = Remote(step, None)
self.assertEqual(remote.working_dir, step.get_working_dir.return_value)
def test_config(self):
step = Mock()
remote = Remote(step, None)
self.assertEqual(remote.config, step.get_config.return_value)
@patch('os.chmod')
@patch('__builtin__.open')
def test_ssl_key_path(self, fake_open, fake_chmod):
key = 'test-key'
config = {
importer_constants.KEY_SSL_CLIENT_KEY: key
}
working_dir = '/tmp/test'
step = Mock()
step.get_config.return_value = config
step.get_working_dir.return_value = working_dir
fp = Mock(__enter__=Mock(), __exit__=Mock())
fp.__enter__.return_value = fp
fake_open.return_value = fp
# test
remote = Remote(step, None)
path = remote.ssl_key_path
# validation
expected_path = os.path.join(working_dir, 'key.pem')
fake_open.assert_called_once_with(expected_path, 'w+')
fp.write.assert_called_once_with(key)
fp.__enter__.assert_called_once_with()
fp.__exit__.assert_called_once_with(None, None, None)
fake_chmod.assert_called_once_with(expected_path, 0600)
self.assertEqual(path, expected_path)
@patch('__builtin__.open')
def test_ssl_cert_path(self, fake_open):
cert = 'test-key'
config = {
importer_constants.KEY_SSL_CLIENT_CERT: cert
}
working_dir = '/tmp/test'
step = Mock()
step.get_config.return_value = config
step.get_working_dir.return_value = working_dir
fp = Mock(__enter__=Mock(), __exit__=Mock())
fp.__enter__.return_value = fp
fake_open.return_value = fp
# test
remote = Remote(step, None)
path = remote.ssl_cert_path
# validation
expected_path = os.path.join(working_dir, 'cert.pem')
fake_open.assert_called_once_with(expected_path, 'w+')
fp.write.assert_called_once_with(cert)
fp.__enter__.assert_called_once_with()
fp.__exit__.assert_called_once_with(None, None, None)
self.assertEqual(path, expected_path)
@patch('__builtin__.open')
def test_ssl_ca_path(self, fake_open):
cert = 'test-key'
config = {
importer_constants.KEY_SSL_CA_CERT: cert
}
working_dir = '/tmp/test'
step = Mock()
step.get_config.return_value = config
step.get_working_dir.return_value = working_dir
fp = Mock(__enter__=Mock(), __exit__=Mock())
fp.__enter__.return_value = fp
fake_open.return_value = fp
# test
remote = Remote(step, None)
path = remote.ssl_ca_path
# validation
expected_path = os.path.join(working_dir, 'ca.pem')
fake_open.assert_called_once_with(expected_path, 'w+')
fp.write.assert_called_once_with(cert)
fp.__enter__.assert_called_once_with()
fp.__exit__.assert_called_once_with(None, None, None)
self.assertEqual(path, expected_path)
def test_ssl_validation(self):
config = {
importer_constants.KEY_SSL_VALIDATION: True
}
step = Mock()
step.get_config.return_value = config
# test
remote = Remote(step, None)
validation = remote.ssl_validation
# validation
self.assertTrue(validation)
self.assertTrue(isinstance(validation, bool))
def test_ssl_validation_not_specified(self):
config = {}
step = Mock()
step.get_config.return_value = config
# test
remote = Remote(step, None)
validation = remote.ssl_validation
# validation
self.assertFalse(validation)
self.assertTrue(isinstance(validation, bool))
@patch(MODULE + '.GPG')
def test_gpg_key(self, fake_gpg):
keys = [1, 2, 3]
key_list = [dict(keyid=k) for k in keys]
working_dir = '/tmp/test'
config = {
constants.IMPORTER_CONFIG_KEY_GPG_KEYS: keys
}
step = Mock()
step.get_config.return_value = config
step.get_working_dir.return_value = working_dir
fake_gpg.return_value.list_keys.return_value = key_list
# test
remote = Remote(step, None)
path, key_ids = remote.gpg_keys
# validation
fake_gpg.assert_called_once_with(gnupghome=working_dir)
self.assertEqual(
fake_gpg.return_value.import_keys.call_args_list,
[((k,), {}) for k in keys])
self.assertEqual(path, os.path.join(working_dir, 'pubring.gpg'))
self.assertEqual(key_ids, [k['keyid'] for k in key_list])
def test_proxy_url(self):
host = 'http://dog.com'
port = '3128'
user = 'jake'
password = 'bark'
config = {
importer_constants.KEY_PROXY_HOST: host,
importer_constants.KEY_PROXY_PORT: port,
importer_constants.KEY_PROXY_USER: user,
importer_constants.KEY_PROXY_PASS: password,
}
step = Mock()
step.get_config.return_value = config
proxy_url = 'http://jake:bark@dog.com:3128'
# test
remote = Remote(step, None)
# validation
self.assertEqual(remote.proxy_url, proxy_url)
def test_proxy_url_without_scheme(self):
host = 'dog.com'
port = '3128'
user = 'jake'
password = 'bark'
config = {
importer_constants.KEY_PROXY_HOST: host,
importer_constants.KEY_PROXY_PORT: port,
importer_constants.KEY_PROXY_USER: user,
importer_constants.KEY_PROXY_PASS: password,
}
step = Mock()
step.get_config.return_value = config
proxy_url = 'http://jake:bark@dog.com:3128'
# test
remote = Remote(step, None)
# validation
self.assertEqual(remote.proxy_url, proxy_url)
def test_proxy_url_without_port(self):
host = 'http://dog.com'
port = None
user = 'jake'
password = 'bark'
config = {
importer_constants.KEY_PROXY_HOST: host,
importer_constants.KEY_PROXY_PORT: port,
importer_constants.KEY_PROXY_USER: user,
importer_constants.KEY_PROXY_PASS: password,
}
step = Mock()
step.get_config.return_value = config
proxy_url = 'http://jake:bark@dog.com'
# test
remote = Remote(step, None)
# validation
self.assertEqual(remote.proxy_url, proxy_url)
def test_proxy_without_auth(self):
host = 'http://dog.com'
port = '3128'
config = {
importer_constants.KEY_PROXY_HOST: host,
importer_constants.KEY_PROXY_PORT: port,
}
step = Mock()
step.get_config.return_value = config
proxy_url = 'http://dog.com:3128'
# test
remote = Remote(step, None)
# validation
self.assertEqual(remote.proxy_url, proxy_url)
def test_proxy_without_host(self):
config = {
}
step = Mock()
step.get_config.return_value = config
# test
remote = Remote(step, None)
# validation
self.assertEqual(remote.proxy_url, None)
@patch(MODULE + '.lib')
@patch(MODULE + '.Remote.url', PropertyMock())
@patch(MODULE + '.Remote.remote_id', PropertyMock())
@patch(MODULE + '.Remote.ssl_key_path', PropertyMock())
@patch(MODULE + '.Remote.ssl_cert_path', PropertyMock())
@patch(MODULE + '.Remote.ssl_ca_path', PropertyMock())
@patch(MODULE + '.Remote.ssl_validation', PropertyMock())
@patch(MODULE + '.Remote.proxy_url', PropertyMock())
@patch(MODULE + '.Remote.gpg_keys', new_callable=PropertyMock)
def test_add(self, fake_gpg, fake_lib):
step = Mock()
repository = Mock()
path = Mock()
key_ids = [1, 2, 3]
fake_gpg.return_value = (path, key_ids)
# test
remote = Remote(step, repository)
remote.add()
# validation
fake_lib.Remote.assert_called_once_with(remote.remote_id, repository)
fake_lib.Remote.return_value.update.assert_called_once_with()
fake_lib.Remote.return_value.import_key.assert_called_once_with(path, key_ids)
self.assertEqual(fake_lib.Remote.return_value.url, remote.url)
self.assertEqual(fake_lib.Remote.return_value.ssl_key_path, remote.ssl_key_path)
self.assertEqual(fake_lib.Remote.return_value.ssl_cert_path, remote.ssl_cert_path)
self.assertEqual(fake_lib.Remote.return_value.ssl_ca_path, remote.ssl_ca_path)
self.assertEqual(fake_lib.Remote.return_value.ssl_validation, remote.ssl_validation)
self.assertEqual(fake_lib.Remote.return_value.proxy_url, remote.proxy_url)
self.assertTrue(fake_lib.Remote.return_value.gpg_validation, remote.ssl_validation)
| pcreech/pulp_ostree | plugins/test/unit/plugins/importers/test_steps.py | Python | gpl-2.0 | 25,771 |
# -*- coding: utf-8 -*-
from .context import import
import unittest
class BasicTestSuite(unittest.TestCase):
"""Basic test cases."""
def test_absolute_truth_and_meaning(self):
assert True
if __name__ == '__main__':
unittest.main()
| happykhan/igor | tests/test_basic.py | Python | gpl-3.0 | 258 |
'''
07. テンプレートによる文生成
引数x, y, zを受け取り「x時のyはz」という文字列を返す関数を実装せよ.
さらに,x=12, y="気温", z=22.4として,実行結果を確認せよ.
'''
def temp_sentence(time,temp,value):
return (str(time) + "時の" + temp + "は" + str(value))
print(temp_sentence(12,"気温",22.4))
| yasutaka/nlp_100 | kiyota/07.py | Python | mit | 373 |
# -*- coding: utf-8 -*-
import django.contrib.messages as django_messages
from django.template import loader
from django.contrib.messages.storage import default_storage
from django.http import HttpRequest
from django.utils.translation import ugettext
import pytest
from olympia.amo.messages import _make_message, info
pytestmark = pytest.mark.django_db
def test_xss():
title = "<script>alert(1)</script>"
message = "<script>alert(2)</script>"
r = _make_message(title)
assert "<script>alert(1)</script>" in r
r = _make_message(None, message)
assert "<script>alert(2)</script>" in r
r = _make_message(title, title_safe=True)
assert "<script>alert(1)</script>" in r
r = _make_message(None, message, message_safe=True)
assert "<script>alert(2)</script>" in r
# Make sure safe flags are independent
r = _make_message(title, message_safe=True)
assert "<script>alert(1)</script>" in r
r = _make_message(None, message, title_safe=True)
assert "<script>alert(2)</script>" in r
def test_no_dupes():
"""Test that duplicate messages aren't saved."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
info(request, 'Title', 'Body')
info(request, 'Title', 'Body')
info(request, 'Another Title', 'Another Body')
storage = django_messages.get_messages(request)
assert len(storage) == 2, 'Too few or too many messages recorded.'
def test_l10n_dups():
"""Test that L10n values are preserved."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
info(request, ugettext('Title'), ugettext('Body'))
info(request, ugettext('Title'), ugettext('Body'))
info(request, ugettext('Another Title'), ugettext('Another Body'))
storage = django_messages.get_messages(request)
assert len(storage) == 2, 'Too few or too many messages recorded.'
def test_unicode_dups():
"""Test that unicode values are preserved."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
info(request, u'Titlé', u'Body')
info(request, u'Titlé', u'Body')
info(request, u'Another Titlé', u'Another Body')
storage = django_messages.get_messages(request)
assert len(storage) == 2, 'Too few or too many messages recorded.'
def test_html_rendered_properly():
"""Html markup is properly displayed in final template."""
request = HttpRequest()
setattr(request, '_messages', default_storage(request))
# This will call _file_message, which in turn calls _make_message, which in
# turn renders the message_content.html template, which adds html markup.
# We want to make sure this markup reaches the final rendering unescaped.
info(request, 'Title', 'Body')
messages = django_messages.get_messages(request)
template = loader.get_template('messages.html')
html = template.render({'messages': messages})
assert "<h2>" in html # The html from _make_message is not escaped.
| tsl143/addons-server | src/olympia/amo/tests/test_messages.py | Python | bsd-3-clause | 3,072 |
def check_first_pages(first_pages):
page_links = set([page.get("url") for page in first_pages])
assert len(set(page_links)) == len(page_links)
assert len(page_links) == len(first_pages)
list_of_images = [page.get("image_urls") for page in first_pages]
image_links = [image for page in list_of_images for image in page]
assert len(set(image_links)) == len(image_links)
assert len(image_links) >= len(first_pages)
| J-CPelletier/WebComicToCBZ | webcomix/util.py | Python | mit | 440 |
def extractKoongKoongTranslations(item):
"""
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
return False
| fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractKoongKoongTranslations.py | Python | bsd-3-clause | 223 |
import datetime
import os
import re
import unittest
import uuid
from decimal import ROUND_DOWN, ROUND_UP, Decimal
import pytest
from django.http import QueryDict
from django.test import TestCase, override_settings
from django.utils import six
from django.utils.timezone import activate, deactivate, utc
import rest_framework
from rest_framework import compat, serializers
from rest_framework.fields import DjangoImageField, is_simple_callable
try:
import pytz
except ImportError:
pytz = None
try:
import typings
except ImportError:
typings = False
# Tests for helper functions.
# ---------------------------
class TestIsSimpleCallable:
def test_method(self):
class Foo:
@classmethod
def classmethod(cls):
pass
def valid(self):
pass
def valid_kwargs(self, param='value'):
pass
def valid_vargs_kwargs(self, *args, **kwargs):
pass
def invalid(self, param):
pass
assert is_simple_callable(Foo.classmethod)
# unbound methods
assert not is_simple_callable(Foo.valid)
assert not is_simple_callable(Foo.valid_kwargs)
assert not is_simple_callable(Foo.valid_vargs_kwargs)
assert not is_simple_callable(Foo.invalid)
# bound methods
assert is_simple_callable(Foo().valid)
assert is_simple_callable(Foo().valid_kwargs)
assert is_simple_callable(Foo().valid_vargs_kwargs)
assert not is_simple_callable(Foo().invalid)
def test_function(self):
def simple():
pass
def valid(param='value', param2='value'):
pass
def valid_vargs_kwargs(*args, **kwargs):
pass
def invalid(param, param2='value'):
pass
assert is_simple_callable(simple)
assert is_simple_callable(valid)
assert is_simple_callable(valid_vargs_kwargs)
assert not is_simple_callable(invalid)
def test_4602_regression(self):
from django.db import models
class ChoiceModel(models.Model):
choice_field = models.CharField(
max_length=1, default='a',
choices=(('a', 'A'), ('b', 'B')),
)
class Meta:
app_label = 'tests'
assert is_simple_callable(ChoiceModel().get_choice_field_display)
@unittest.skipUnless(typings, 'requires python 3.5')
def test_type_annotation(self):
# The annotation will otherwise raise a syntax error in python < 3.5
exec("def valid(param: str='value'): pass", locals())
valid = locals()['valid']
assert is_simple_callable(valid)
# Tests for field keyword arguments and core functionality.
# ---------------------------------------------------------
class TestEmpty:
"""
Tests for `required`, `allow_null`, `allow_blank`, `default`.
"""
def test_required(self):
"""
By default a field must be included in the input.
"""
field = serializers.IntegerField()
with pytest.raises(serializers.ValidationError) as exc_info:
field.run_validation()
assert exc_info.value.detail == ['This field is required.']
def test_not_required(self):
"""
If `required=False` then a field may be omitted from the input.
"""
field = serializers.IntegerField(required=False)
with pytest.raises(serializers.SkipField):
field.run_validation()
def test_disallow_null(self):
"""
By default `None` is not a valid input.
"""
field = serializers.IntegerField()
with pytest.raises(serializers.ValidationError) as exc_info:
field.run_validation(None)
assert exc_info.value.detail == ['This field may not be null.']
def test_allow_null(self):
"""
If `allow_null=True` then `None` is a valid input.
"""
field = serializers.IntegerField(allow_null=True)
output = field.run_validation(None)
assert output is None
def test_disallow_blank(self):
"""
By default '' is not a valid input.
"""
field = serializers.CharField()
with pytest.raises(serializers.ValidationError) as exc_info:
field.run_validation('')
assert exc_info.value.detail == ['This field may not be blank.']
def test_allow_blank(self):
"""
If `allow_blank=True` then '' is a valid input.
"""
field = serializers.CharField(allow_blank=True)
output = field.run_validation('')
assert output == ''
def test_default(self):
"""
If `default` is set, then omitted values get the default input.
"""
field = serializers.IntegerField(default=123)
output = field.run_validation()
assert output is 123
class TestSource:
def test_source(self):
class ExampleSerializer(serializers.Serializer):
example_field = serializers.CharField(source='other')
serializer = ExampleSerializer(data={'example_field': 'abc'})
assert serializer.is_valid()
assert serializer.validated_data == {'other': 'abc'}
def test_redundant_source(self):
class ExampleSerializer(serializers.Serializer):
example_field = serializers.CharField(source='example_field')
with pytest.raises(AssertionError) as exc_info:
ExampleSerializer().fields
assert str(exc_info.value) == (
"It is redundant to specify `source='example_field'` on field "
"'CharField' in serializer 'ExampleSerializer', because it is the "
"same as the field name. Remove the `source` keyword argument."
)
def test_callable_source(self):
class ExampleSerializer(serializers.Serializer):
example_field = serializers.CharField(source='example_callable')
class ExampleInstance(object):
def example_callable(self):
return 'example callable value'
serializer = ExampleSerializer(ExampleInstance())
assert serializer.data['example_field'] == 'example callable value'
def test_callable_source_raises(self):
class ExampleSerializer(serializers.Serializer):
example_field = serializers.CharField(source='example_callable', read_only=True)
class ExampleInstance(object):
def example_callable(self):
raise AttributeError('method call failed')
with pytest.raises(ValueError) as exc_info:
serializer = ExampleSerializer(ExampleInstance())
serializer.data.items()
assert 'method call failed' in str(exc_info.value)
class TestReadOnly:
def setup(self):
class TestSerializer(serializers.Serializer):
read_only = serializers.ReadOnlyField()
writable = serializers.IntegerField()
self.Serializer = TestSerializer
def test_validate_read_only(self):
"""
Read-only serializers.should not be included in validation.
"""
data = {'read_only': 123, 'writable': 456}
serializer = self.Serializer(data=data)
assert serializer.is_valid()
assert serializer.validated_data == {'writable': 456}
def test_serialize_read_only(self):
"""
Read-only serializers.should be serialized.
"""
instance = {'read_only': 123, 'writable': 456}
serializer = self.Serializer(instance)
assert serializer.data == {'read_only': 123, 'writable': 456}
class TestWriteOnly:
def setup(self):
class TestSerializer(serializers.Serializer):
write_only = serializers.IntegerField(write_only=True)
readable = serializers.IntegerField()
self.Serializer = TestSerializer
def test_validate_write_only(self):
"""
Write-only serializers.should be included in validation.
"""
data = {'write_only': 123, 'readable': 456}
serializer = self.Serializer(data=data)
assert serializer.is_valid()
assert serializer.validated_data == {'write_only': 123, 'readable': 456}
def test_serialize_write_only(self):
"""
Write-only serializers.should not be serialized.
"""
instance = {'write_only': 123, 'readable': 456}
serializer = self.Serializer(instance)
assert serializer.data == {'readable': 456}
class TestInitial:
def setup(self):
class TestSerializer(serializers.Serializer):
initial_field = serializers.IntegerField(initial=123)
blank_field = serializers.IntegerField()
self.serializer = TestSerializer()
def test_initial(self):
"""
Initial values should be included when serializing a new representation.
"""
assert self.serializer.data == {
'initial_field': 123,
'blank_field': None
}
class TestInitialWithCallable:
def setup(self):
def initial_value():
return 123
class TestSerializer(serializers.Serializer):
initial_field = serializers.IntegerField(initial=initial_value)
self.serializer = TestSerializer()
def test_initial_should_accept_callable(self):
"""
Follows the default ``Field.initial`` behaviour where they accept a
callable to produce the initial value"""
assert self.serializer.data == {
'initial_field': 123,
}
class TestLabel:
def setup(self):
class TestSerializer(serializers.Serializer):
labeled = serializers.IntegerField(label='My label')
self.serializer = TestSerializer()
def test_label(self):
"""
A field's label may be set with the `label` argument.
"""
fields = self.serializer.fields
assert fields['labeled'].label == 'My label'
class TestInvalidErrorKey:
def setup(self):
class ExampleField(serializers.Field):
def to_native(self, data):
self.fail('incorrect')
self.field = ExampleField()
def test_invalid_error_key(self):
"""
If a field raises a validation error, but does not have a corresponding
error message, then raise an appropriate assertion error.
"""
with pytest.raises(AssertionError) as exc_info:
self.field.to_native(123)
expected = (
'ValidationError raised by `ExampleField`, but error key '
'`incorrect` does not exist in the `error_messages` dictionary.'
)
assert str(exc_info.value) == expected
class TestBooleanHTMLInput:
def test_empty_html_checkbox(self):
"""
HTML checkboxes do not send any value, but should be treated
as `False` by BooleanField.
"""
class TestSerializer(serializers.Serializer):
archived = serializers.BooleanField()
serializer = TestSerializer(data=QueryDict(''))
assert serializer.is_valid()
assert serializer.validated_data == {'archived': False}
def test_empty_html_checkbox_not_required(self):
"""
HTML checkboxes do not send any value, but should be treated
as `False` by BooleanField, even if the field is required=False.
"""
class TestSerializer(serializers.Serializer):
archived = serializers.BooleanField(required=False)
serializer = TestSerializer(data=QueryDict(''))
assert serializer.is_valid()
assert serializer.validated_data == {'archived': False}
class TestHTMLInput:
def test_empty_html_charfield_with_default(self):
class TestSerializer(serializers.Serializer):
message = serializers.CharField(default='happy')
serializer = TestSerializer(data=QueryDict(''))
assert serializer.is_valid()
assert serializer.validated_data == {'message': 'happy'}
def test_empty_html_charfield_without_default(self):
class TestSerializer(serializers.Serializer):
message = serializers.CharField(allow_blank=True)
serializer = TestSerializer(data=QueryDict('message='))
assert serializer.is_valid()
assert serializer.validated_data == {'message': ''}
def test_empty_html_charfield_without_default_not_required(self):
class TestSerializer(serializers.Serializer):
message = serializers.CharField(allow_blank=True, required=False)
serializer = TestSerializer(data=QueryDict('message='))
assert serializer.is_valid()
assert serializer.validated_data == {'message': ''}
def test_empty_html_integerfield(self):
class TestSerializer(serializers.Serializer):
message = serializers.IntegerField(default=123)
serializer = TestSerializer(data=QueryDict('message='))
assert serializer.is_valid()
assert serializer.validated_data == {'message': 123}
def test_empty_html_uuidfield_with_default(self):
class TestSerializer(serializers.Serializer):
message = serializers.UUIDField(default=uuid.uuid4)
serializer = TestSerializer(data=QueryDict('message='))
assert serializer.is_valid()
assert list(serializer.validated_data) == ['message']
def test_empty_html_uuidfield_with_optional(self):
class TestSerializer(serializers.Serializer):
message = serializers.UUIDField(required=False)
serializer = TestSerializer(data=QueryDict('message='))
assert serializer.is_valid()
assert list(serializer.validated_data) == []
def test_empty_html_charfield_allow_null(self):
class TestSerializer(serializers.Serializer):
message = serializers.CharField(allow_null=True)
serializer = TestSerializer(data=QueryDict('message='))
assert serializer.is_valid()
assert serializer.validated_data == {'message': None}
def test_empty_html_datefield_allow_null(self):
class TestSerializer(serializers.Serializer):
expiry = serializers.DateField(allow_null=True)
serializer = TestSerializer(data=QueryDict('expiry='))
assert serializer.is_valid()
assert serializer.validated_data == {'expiry': None}
def test_empty_html_charfield_allow_null_allow_blank(self):
class TestSerializer(serializers.Serializer):
message = serializers.CharField(allow_null=True, allow_blank=True)
serializer = TestSerializer(data=QueryDict('message='))
assert serializer.is_valid()
assert serializer.validated_data == {'message': ''}
def test_empty_html_charfield_required_false(self):
class TestSerializer(serializers.Serializer):
message = serializers.CharField(required=False)
serializer = TestSerializer(data=QueryDict(''))
assert serializer.is_valid()
assert serializer.validated_data == {}
def test_querydict_list_input(self):
class TestSerializer(serializers.Serializer):
scores = serializers.ListField(child=serializers.IntegerField())
serializer = TestSerializer(data=QueryDict('scores=1&scores=3'))
assert serializer.is_valid()
assert serializer.validated_data == {'scores': [1, 3]}
def test_querydict_list_input_only_one_input(self):
class TestSerializer(serializers.Serializer):
scores = serializers.ListField(child=serializers.IntegerField())
serializer = TestSerializer(data=QueryDict('scores=1&'))
assert serializer.is_valid()
assert serializer.validated_data == {'scores': [1]}
class TestCreateOnlyDefault:
def setup(self):
default = serializers.CreateOnlyDefault('2001-01-01')
class TestSerializer(serializers.Serializer):
published = serializers.HiddenField(default=default)
text = serializers.CharField()
self.Serializer = TestSerializer
def test_create_only_default_is_provided(self):
serializer = self.Serializer(data={'text': 'example'})
assert serializer.is_valid()
assert serializer.validated_data == {
'text': 'example', 'published': '2001-01-01'
}
def test_create_only_default_is_not_provided_on_update(self):
instance = {
'text': 'example', 'published': '2001-01-01'
}
serializer = self.Serializer(instance, data={'text': 'example'})
assert serializer.is_valid()
assert serializer.validated_data == {
'text': 'example',
}
def test_create_only_default_callable_sets_context(self):
"""
CreateOnlyDefault instances with a callable default should set_context
on the callable if possible
"""
class TestCallableDefault:
def set_context(self, serializer_field):
self.field = serializer_field
def __call__(self):
return "success" if hasattr(self, 'field') else "failure"
class TestSerializer(serializers.Serializer):
context_set = serializers.CharField(default=serializers.CreateOnlyDefault(TestCallableDefault()))
serializer = TestSerializer(data={})
assert serializer.is_valid()
assert serializer.validated_data['context_set'] == 'success'
class Test5087Regression:
def test_parent_binding(self):
parent = serializers.Serializer()
field = serializers.CharField()
assert field.root is field
field.bind('name', parent)
assert field.root is parent
# Tests for field input and output values.
# ----------------------------------------
def get_items(mapping_or_list_of_two_tuples):
# Tests accept either lists of two tuples, or dictionaries.
if isinstance(mapping_or_list_of_two_tuples, dict):
# {value: expected}
return mapping_or_list_of_two_tuples.items()
# [(value, expected), ...]
return mapping_or_list_of_two_tuples
class FieldValues:
"""
Base class for testing valid and invalid input values.
"""
def test_valid_inputs(self):
"""
Ensure that valid values return the expected validated data.
"""
for input_value, expected_output in get_items(self.valid_inputs):
assert self.field.run_validation(input_value) == expected_output, \
'input value: {}'.format(repr(input_value))
def test_invalid_inputs(self):
"""
Ensure that invalid values raise the expected validation error.
"""
for input_value, expected_failure in get_items(self.invalid_inputs):
with pytest.raises(serializers.ValidationError) as exc_info:
self.field.run_validation(input_value)
assert exc_info.value.detail == expected_failure, \
'input value: {}'.format(repr(input_value))
def test_outputs(self):
for output_value, expected_output in get_items(self.outputs):
assert self.field.to_representation(output_value) == expected_output, \
'output value: {}'.format(repr(output_value))
# Boolean types...
class TestBooleanField(FieldValues):
"""
Valid and invalid values for `BooleanField`.
"""
valid_inputs = {
'true': True,
'false': False,
'1': True,
'0': False,
1: True,
0: False,
True: True,
False: False,
}
invalid_inputs = {
'foo': ['"foo" is not a valid boolean.'],
None: ['This field may not be null.']
}
outputs = {
'true': True,
'false': False,
'1': True,
'0': False,
1: True,
0: False,
True: True,
False: False,
'other': True
}
field = serializers.BooleanField()
def test_disallow_unhashable_collection_types(self):
inputs = (
[],
{},
)
field = self.field
for input_value in inputs:
with pytest.raises(serializers.ValidationError) as exc_info:
field.run_validation(input_value)
expected = ['"{0}" is not a valid boolean.'.format(input_value)]
assert exc_info.value.detail == expected
class TestNullBooleanField(TestBooleanField):
"""
Valid and invalid values for `BooleanField`.
"""
valid_inputs = {
'true': True,
'false': False,
'null': None,
True: True,
False: False,
None: None
}
invalid_inputs = {
'foo': ['"foo" is not a valid boolean.'],
}
outputs = {
'true': True,
'false': False,
'null': None,
True: True,
False: False,
None: None,
'other': True
}
field = serializers.NullBooleanField()
# String types...
class TestCharField(FieldValues):
"""
Valid and invalid values for `CharField`.
"""
valid_inputs = {
1: '1',
'abc': 'abc'
}
invalid_inputs = {
(): ['Not a valid string.'],
True: ['Not a valid string.'],
'': ['This field may not be blank.']
}
outputs = {
1: '1',
'abc': 'abc'
}
field = serializers.CharField()
def test_trim_whitespace_default(self):
field = serializers.CharField()
assert field.to_internal_value(' abc ') == 'abc'
def test_trim_whitespace_disabled(self):
field = serializers.CharField(trim_whitespace=False)
assert field.to_internal_value(' abc ') == ' abc '
def test_disallow_blank_with_trim_whitespace(self):
field = serializers.CharField(allow_blank=False, trim_whitespace=True)
with pytest.raises(serializers.ValidationError) as exc_info:
field.run_validation(' ')
assert exc_info.value.detail == ['This field may not be blank.']
class TestEmailField(FieldValues):
"""
Valid and invalid values for `EmailField`.
"""
valid_inputs = {
'example@example.com': 'example@example.com',
' example@example.com ': 'example@example.com',
}
invalid_inputs = {
'examplecom': ['Enter a valid email address.']
}
outputs = {}
field = serializers.EmailField()
class TestRegexField(FieldValues):
"""
Valid and invalid values for `RegexField`.
"""
valid_inputs = {
'a9': 'a9',
}
invalid_inputs = {
'A9': ["This value does not match the required pattern."]
}
outputs = {}
field = serializers.RegexField(regex='[a-z][0-9]')
class TestiCompiledRegexField(FieldValues):
"""
Valid and invalid values for `RegexField`.
"""
valid_inputs = {
'a9': 'a9',
}
invalid_inputs = {
'A9': ["This value does not match the required pattern."]
}
outputs = {}
field = serializers.RegexField(regex=re.compile('[a-z][0-9]'))
class TestSlugField(FieldValues):
"""
Valid and invalid values for `SlugField`.
"""
valid_inputs = {
'slug-99': 'slug-99',
}
invalid_inputs = {
'slug 99': ['Enter a valid "slug" consisting of letters, numbers, underscores or hyphens.']
}
outputs = {}
field = serializers.SlugField()
def test_allow_unicode_true(self):
field = serializers.SlugField(allow_unicode=True)
validation_error = False
try:
field.run_validation(u'slug-99-\u0420')
except serializers.ValidationError:
validation_error = True
assert not validation_error
class TestURLField(FieldValues):
"""
Valid and invalid values for `URLField`.
"""
valid_inputs = {
'http://example.com': 'http://example.com',
}
invalid_inputs = {
'example.com': ['Enter a valid URL.']
}
outputs = {}
field = serializers.URLField()
class TestUUIDField(FieldValues):
"""
Valid and invalid values for `UUIDField`.
"""
valid_inputs = {
'825d7aeb-05a9-45b5-a5b7-05df87923cda': uuid.UUID('825d7aeb-05a9-45b5-a5b7-05df87923cda'),
'825d7aeb05a945b5a5b705df87923cda': uuid.UUID('825d7aeb-05a9-45b5-a5b7-05df87923cda'),
'urn:uuid:213b7d9b-244f-410d-828c-dabce7a2615d': uuid.UUID('213b7d9b-244f-410d-828c-dabce7a2615d'),
284758210125106368185219588917561929842: uuid.UUID('d63a6fb6-88d5-40c7-a91c-9edf73283072')
}
invalid_inputs = {
'825d7aeb-05a9-45b5-a5b7': ['"825d7aeb-05a9-45b5-a5b7" is not a valid UUID.'],
(1, 2, 3): ['"(1, 2, 3)" is not a valid UUID.']
}
outputs = {
uuid.UUID('825d7aeb-05a9-45b5-a5b7-05df87923cda'): '825d7aeb-05a9-45b5-a5b7-05df87923cda'
}
field = serializers.UUIDField()
def _test_format(self, uuid_format, formatted_uuid_0):
field = serializers.UUIDField(format=uuid_format)
assert field.to_representation(uuid.UUID(int=0)) == formatted_uuid_0
assert field.to_internal_value(formatted_uuid_0) == uuid.UUID(int=0)
def test_formats(self):
self._test_format('int', 0)
self._test_format('hex_verbose', '00000000-0000-0000-0000-000000000000')
self._test_format('urn', 'urn:uuid:00000000-0000-0000-0000-000000000000')
self._test_format('hex', '0' * 32)
class TestIPAddressField(FieldValues):
"""
Valid and invalid values for `IPAddressField`
"""
valid_inputs = {
'127.0.0.1': '127.0.0.1',
'192.168.33.255': '192.168.33.255',
'2001:0db8:85a3:0042:1000:8a2e:0370:7334': '2001:db8:85a3:42:1000:8a2e:370:7334',
'2001:cdba:0:0:0:0:3257:9652': '2001:cdba::3257:9652',
'2001:cdba::3257:9652': '2001:cdba::3257:9652'
}
invalid_inputs = {
'127001': ['Enter a valid IPv4 or IPv6 address.'],
'127.122.111.2231': ['Enter a valid IPv4 or IPv6 address.'],
'2001:::9652': ['Enter a valid IPv4 or IPv6 address.'],
'2001:0db8:85a3:0042:1000:8a2e:0370:73341': ['Enter a valid IPv4 or IPv6 address.'],
1000: ['Enter a valid IPv4 or IPv6 address.'],
}
outputs = {}
field = serializers.IPAddressField()
class TestIPv4AddressField(FieldValues):
"""
Valid and invalid values for `IPAddressField`
"""
valid_inputs = {
'127.0.0.1': '127.0.0.1',
'192.168.33.255': '192.168.33.255',
}
invalid_inputs = {
'127001': ['Enter a valid IPv4 address.'],
'127.122.111.2231': ['Enter a valid IPv4 address.'],
}
outputs = {}
field = serializers.IPAddressField(protocol='IPv4')
class TestIPv6AddressField(FieldValues):
"""
Valid and invalid values for `IPAddressField`
"""
valid_inputs = {
'2001:0db8:85a3:0042:1000:8a2e:0370:7334': '2001:db8:85a3:42:1000:8a2e:370:7334',
'2001:cdba:0:0:0:0:3257:9652': '2001:cdba::3257:9652',
'2001:cdba::3257:9652': '2001:cdba::3257:9652'
}
invalid_inputs = {
'2001:::9652': ['Enter a valid IPv4 or IPv6 address.'],
'2001:0db8:85a3:0042:1000:8a2e:0370:73341': ['Enter a valid IPv4 or IPv6 address.'],
}
outputs = {}
field = serializers.IPAddressField(protocol='IPv6')
class TestFilePathField(FieldValues):
"""
Valid and invalid values for `FilePathField`
"""
valid_inputs = {
__file__: __file__,
}
invalid_inputs = {
'wrong_path': ['"wrong_path" is not a valid path choice.']
}
outputs = {
}
field = serializers.FilePathField(
path=os.path.abspath(os.path.dirname(__file__))
)
# Number types...
class TestIntegerField(FieldValues):
"""
Valid and invalid values for `IntegerField`.
"""
valid_inputs = {
'1': 1,
'0': 0,
1: 1,
0: 0,
1.0: 1,
0.0: 0,
'1.0': 1
}
invalid_inputs = {
0.5: ['A valid integer is required.'],
'abc': ['A valid integer is required.'],
'0.5': ['A valid integer is required.']
}
outputs = {
'1': 1,
'0': 0,
1: 1,
0: 0,
1.0: 1,
0.0: 0
}
field = serializers.IntegerField()
class TestMinMaxIntegerField(FieldValues):
"""
Valid and invalid values for `IntegerField` with min and max limits.
"""
valid_inputs = {
'1': 1,
'3': 3,
1: 1,
3: 3,
}
invalid_inputs = {
0: ['Ensure this value is greater than or equal to 1.'],
4: ['Ensure this value is less than or equal to 3.'],
'0': ['Ensure this value is greater than or equal to 1.'],
'4': ['Ensure this value is less than or equal to 3.'],
}
outputs = {}
field = serializers.IntegerField(min_value=1, max_value=3)
class TestFloatField(FieldValues):
"""
Valid and invalid values for `FloatField`.
"""
valid_inputs = {
'1': 1.0,
'0': 0.0,
1: 1.0,
0: 0.0,
1.0: 1.0,
0.0: 0.0,
}
invalid_inputs = {
'abc': ["A valid number is required."]
}
outputs = {
'1': 1.0,
'0': 0.0,
1: 1.0,
0: 0.0,
1.0: 1.0,
0.0: 0.0,
}
field = serializers.FloatField()
class TestMinMaxFloatField(FieldValues):
"""
Valid and invalid values for `FloatField` with min and max limits.
"""
valid_inputs = {
'1': 1,
'3': 3,
1: 1,
3: 3,
1.0: 1.0,
3.0: 3.0,
}
invalid_inputs = {
0.9: ['Ensure this value is greater than or equal to 1.'],
3.1: ['Ensure this value is less than or equal to 3.'],
'0.0': ['Ensure this value is greater than or equal to 1.'],
'3.1': ['Ensure this value is less than or equal to 3.'],
}
outputs = {}
field = serializers.FloatField(min_value=1, max_value=3)
class TestDecimalField(FieldValues):
"""
Valid and invalid values for `DecimalField`.
"""
valid_inputs = {
'12.3': Decimal('12.3'),
'0.1': Decimal('0.1'),
10: Decimal('10'),
0: Decimal('0'),
12.3: Decimal('12.3'),
0.1: Decimal('0.1'),
'2E+1': Decimal('20'),
}
invalid_inputs = (
('abc', ["A valid number is required."]),
(Decimal('Nan'), ["A valid number is required."]),
(Decimal('Inf'), ["A valid number is required."]),
('12.345', ["Ensure that there are no more than 3 digits in total."]),
(200000000000.0, ["Ensure that there are no more than 3 digits in total."]),
('0.01', ["Ensure that there are no more than 1 decimal places."]),
(123, ["Ensure that there are no more than 2 digits before the decimal point."]),
('2E+2', ["Ensure that there are no more than 2 digits before the decimal point."])
)
outputs = {
'1': '1.0',
'0': '0.0',
'1.09': '1.1',
'0.04': '0.0',
1: '1.0',
0: '0.0',
Decimal('1.0'): '1.0',
Decimal('0.0'): '0.0',
Decimal('1.09'): '1.1',
Decimal('0.04'): '0.0'
}
field = serializers.DecimalField(max_digits=3, decimal_places=1)
class TestMinMaxDecimalField(FieldValues):
"""
Valid and invalid values for `DecimalField` with min and max limits.
"""
valid_inputs = {
'10.0': Decimal('10.0'),
'20.0': Decimal('20.0'),
}
invalid_inputs = {
'9.9': ['Ensure this value is greater than or equal to 10.'],
'20.1': ['Ensure this value is less than or equal to 20.'],
}
outputs = {}
field = serializers.DecimalField(
max_digits=3, decimal_places=1,
min_value=10, max_value=20
)
class TestNoMaxDigitsDecimalField(FieldValues):
field = serializers.DecimalField(
max_value=100, min_value=0,
decimal_places=2, max_digits=None
)
valid_inputs = {
'10': Decimal('10.00')
}
invalid_inputs = {}
outputs = {}
class TestNoStringCoercionDecimalField(FieldValues):
"""
Output values for `DecimalField` with `coerce_to_string=False`.
"""
valid_inputs = {}
invalid_inputs = {}
outputs = {
1.09: Decimal('1.1'),
0.04: Decimal('0.0'),
'1.09': Decimal('1.1'),
'0.04': Decimal('0.0'),
Decimal('1.09'): Decimal('1.1'),
Decimal('0.04'): Decimal('0.0'),
}
field = serializers.DecimalField(
max_digits=3, decimal_places=1,
coerce_to_string=False
)
class TestLocalizedDecimalField(TestCase):
@override_settings(USE_L10N=True, LANGUAGE_CODE='pl')
def test_to_internal_value(self):
field = serializers.DecimalField(max_digits=2, decimal_places=1, localize=True)
assert field.to_internal_value('1,1') == Decimal('1.1')
@override_settings(USE_L10N=True, LANGUAGE_CODE='pl')
def test_to_representation(self):
field = serializers.DecimalField(max_digits=2, decimal_places=1, localize=True)
assert field.to_representation(Decimal('1.1')) == '1,1'
def test_localize_forces_coerce_to_string(self):
field = serializers.DecimalField(max_digits=2, decimal_places=1, coerce_to_string=False, localize=True)
assert isinstance(field.to_representation(Decimal('1.1')), six.string_types)
class TestQuantizedValueForDecimal(TestCase):
def test_int_quantized_value_for_decimal(self):
field = serializers.DecimalField(max_digits=4, decimal_places=2)
value = field.to_internal_value(12).as_tuple()
expected_digit_tuple = (0, (1, 2, 0, 0), -2)
assert value == expected_digit_tuple
def test_string_quantized_value_for_decimal(self):
field = serializers.DecimalField(max_digits=4, decimal_places=2)
value = field.to_internal_value('12').as_tuple()
expected_digit_tuple = (0, (1, 2, 0, 0), -2)
assert value == expected_digit_tuple
def test_part_precision_string_quantized_value_for_decimal(self):
field = serializers.DecimalField(max_digits=4, decimal_places=2)
value = field.to_internal_value('12.0').as_tuple()
expected_digit_tuple = (0, (1, 2, 0, 0), -2)
assert value == expected_digit_tuple
class TestNoDecimalPlaces(FieldValues):
valid_inputs = {
'0.12345': Decimal('0.12345'),
}
invalid_inputs = {
'0.1234567': ['Ensure that there are no more than 6 digits in total.']
}
outputs = {
'1.2345': '1.2345',
'0': '0',
'1.1': '1.1',
}
field = serializers.DecimalField(max_digits=6, decimal_places=None)
class TestRoundingDecimalField(TestCase):
def test_valid_rounding(self):
field = serializers.DecimalField(max_digits=4, decimal_places=2, rounding=ROUND_UP)
assert field.to_representation(Decimal('1.234')) == '1.24'
field = serializers.DecimalField(max_digits=4, decimal_places=2, rounding=ROUND_DOWN)
assert field.to_representation(Decimal('1.234')) == '1.23'
def test_invalid_rounding(self):
with pytest.raises(AssertionError) as excinfo:
serializers.DecimalField(max_digits=1, decimal_places=1, rounding='ROUND_UNKNOWN')
assert 'Invalid rounding option' in str(excinfo.value)
# Date & time serializers...
class TestDateField(FieldValues):
"""
Valid and invalid values for `DateField`.
"""
valid_inputs = {
'2001-01-01': datetime.date(2001, 1, 1),
datetime.date(2001, 1, 1): datetime.date(2001, 1, 1),
}
invalid_inputs = {
'abc': ['Date has wrong format. Use one of these formats instead: YYYY[-MM[-DD]].'],
'2001-99-99': ['Date has wrong format. Use one of these formats instead: YYYY[-MM[-DD]].'],
datetime.datetime(2001, 1, 1, 12, 00): ['Expected a date but got a datetime.'],
}
outputs = {
datetime.date(2001, 1, 1): '2001-01-01',
'2001-01-01': '2001-01-01',
six.text_type('2016-01-10'): '2016-01-10',
None: None,
'': None,
}
field = serializers.DateField()
class TestCustomInputFormatDateField(FieldValues):
"""
Valid and invalid values for `DateField` with a custom input format.
"""
valid_inputs = {
'1 Jan 2001': datetime.date(2001, 1, 1),
}
invalid_inputs = {
'2001-01-01': ['Date has wrong format. Use one of these formats instead: DD [Jan-Dec] YYYY.']
}
outputs = {}
field = serializers.DateField(input_formats=['%d %b %Y'])
class TestCustomOutputFormatDateField(FieldValues):
"""
Values for `DateField` with a custom output format.
"""
valid_inputs = {}
invalid_inputs = {}
outputs = {
datetime.date(2001, 1, 1): '01 Jan 2001'
}
field = serializers.DateField(format='%d %b %Y')
class TestNoOutputFormatDateField(FieldValues):
"""
Values for `DateField` with no output format.
"""
valid_inputs = {}
invalid_inputs = {}
outputs = {
datetime.date(2001, 1, 1): datetime.date(2001, 1, 1)
}
field = serializers.DateField(format=None)
class TestDateTimeField(FieldValues):
"""
Valid and invalid values for `DateTimeField`.
"""
valid_inputs = {
'2001-01-01 13:00': datetime.datetime(2001, 1, 1, 13, 00, tzinfo=utc),
'2001-01-01T13:00': datetime.datetime(2001, 1, 1, 13, 00, tzinfo=utc),
'2001-01-01T13:00Z': datetime.datetime(2001, 1, 1, 13, 00, tzinfo=utc),
datetime.datetime(2001, 1, 1, 13, 00): datetime.datetime(2001, 1, 1, 13, 00, tzinfo=utc),
datetime.datetime(2001, 1, 1, 13, 00, tzinfo=utc): datetime.datetime(2001, 1, 1, 13, 00, tzinfo=utc),
}
invalid_inputs = {
'abc': ['Datetime has wrong format. Use one of these formats instead: YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z].'],
'2001-99-99T99:00': ['Datetime has wrong format. Use one of these formats instead: YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z].'],
'2018-08-16 22:00-24:00': ['Datetime has wrong format. Use one of these formats instead: YYYY-MM-DDThh:mm[:ss[.uuuuuu]][+HH:MM|-HH:MM|Z].'],
datetime.date(2001, 1, 1): ['Expected a datetime but got a date.'],
'9999-12-31T21:59:59.99990-03:00': ['Datetime value out of range.'],
}
outputs = {
datetime.datetime(2001, 1, 1, 13, 00): '2001-01-01T13:00:00Z',
datetime.datetime(2001, 1, 1, 13, 00, tzinfo=utc): '2001-01-01T13:00:00Z',
'2001-01-01T00:00:00': '2001-01-01T00:00:00',
six.text_type('2016-01-10T00:00:00'): '2016-01-10T00:00:00',
None: None,
'': None,
}
field = serializers.DateTimeField(default_timezone=utc)
class TestCustomInputFormatDateTimeField(FieldValues):
"""
Valid and invalid values for `DateTimeField` with a custom input format.
"""
valid_inputs = {
'1:35pm, 1 Jan 2001': datetime.datetime(2001, 1, 1, 13, 35, tzinfo=utc),
}
invalid_inputs = {
'2001-01-01T20:50': ['Datetime has wrong format. Use one of these formats instead: hh:mm[AM|PM], DD [Jan-Dec] YYYY.']
}
outputs = {}
field = serializers.DateTimeField(default_timezone=utc, input_formats=['%I:%M%p, %d %b %Y'])
class TestCustomOutputFormatDateTimeField(FieldValues):
"""
Values for `DateTimeField` with a custom output format.
"""
valid_inputs = {}
invalid_inputs = {}
outputs = {
datetime.datetime(2001, 1, 1, 13, 00): '01:00PM, 01 Jan 2001',
}
field = serializers.DateTimeField(format='%I:%M%p, %d %b %Y')
class TestNoOutputFormatDateTimeField(FieldValues):
"""
Values for `DateTimeField` with no output format.
"""
valid_inputs = {}
invalid_inputs = {}
outputs = {
datetime.datetime(2001, 1, 1, 13, 00): datetime.datetime(2001, 1, 1, 13, 00),
}
field = serializers.DateTimeField(format=None)
class TestNaiveDateTimeField(FieldValues):
"""
Valid and invalid values for `DateTimeField` with naive datetimes.
"""
valid_inputs = {
datetime.datetime(2001, 1, 1, 13, 00, tzinfo=utc): datetime.datetime(2001, 1, 1, 13, 00),
'2001-01-01 13:00': datetime.datetime(2001, 1, 1, 13, 00),
}
invalid_inputs = {}
outputs = {
datetime.datetime(2001, 1, 1, 13, 00): '2001-01-01T13:00:00',
datetime.datetime(2001, 1, 1, 13, 00, tzinfo=utc): '2001-01-01T13:00:00',
}
field = serializers.DateTimeField(default_timezone=None)
@pytest.mark.skipif(pytz is None, reason='pytz not installed')
class TestTZWithDateTimeField(FieldValues):
"""
Valid and invalid values for `DateTimeField` when not using UTC as the timezone.
"""
@classmethod
def setup_class(cls):
# use class setup method, as class-level attribute will still be evaluated even if test is skipped
kolkata = pytz.timezone('Asia/Kolkata')
cls.valid_inputs = {
'2016-12-19T10:00:00': kolkata.localize(datetime.datetime(2016, 12, 19, 10)),
'2016-12-19T10:00:00+05:30': kolkata.localize(datetime.datetime(2016, 12, 19, 10)),
datetime.datetime(2016, 12, 19, 10): kolkata.localize(datetime.datetime(2016, 12, 19, 10)),
}
cls.invalid_inputs = {}
cls.outputs = {
datetime.datetime(2016, 12, 19, 10): '2016-12-19T10:00:00+05:30',
datetime.datetime(2016, 12, 19, 4, 30, tzinfo=utc): '2016-12-19T10:00:00+05:30',
}
cls.field = serializers.DateTimeField(default_timezone=kolkata)
@pytest.mark.skipif(pytz is None, reason='pytz not installed')
@override_settings(TIME_ZONE='UTC', USE_TZ=True)
class TestDefaultTZDateTimeField(TestCase):
"""
Test the current/default timezone handling in `DateTimeField`.
"""
@classmethod
def setup_class(cls):
cls.field = serializers.DateTimeField()
cls.kolkata = pytz.timezone('Asia/Kolkata')
def test_default_timezone(self):
assert self.field.default_timezone() == utc
def test_current_timezone(self):
assert self.field.default_timezone() == utc
activate(self.kolkata)
assert self.field.default_timezone() == self.kolkata
deactivate()
assert self.field.default_timezone() == utc
class TestNaiveDayLightSavingTimeTimeZoneDateTimeField(FieldValues):
"""
Invalid values for `DateTimeField` with datetime in DST shift (non-existing or ambiguous) and timezone with DST.
Timezone America/New_York has DST shift from 2017-03-12T02:00:00 to 2017-03-12T03:00:00 and
from 2017-11-05T02:00:00 to 2017-11-05T01:00:00 in 2017.
"""
valid_inputs = {}
invalid_inputs = {
'2017-03-12T02:30:00': ['Invalid datetime for the timezone "America/New_York".'],
'2017-11-05T01:30:00': ['Invalid datetime for the timezone "America/New_York".']
}
outputs = {}
class MockTimezone:
@staticmethod
def localize(value, is_dst):
raise compat.InvalidTimeError()
def __str__(self):
return 'America/New_York'
field = serializers.DateTimeField(default_timezone=MockTimezone())
class TestTimeField(FieldValues):
"""
Valid and invalid values for `TimeField`.
"""
valid_inputs = {
'13:00': datetime.time(13, 00),
datetime.time(13, 00): datetime.time(13, 00),
}
invalid_inputs = {
'abc': ['Time has wrong format. Use one of these formats instead: hh:mm[:ss[.uuuuuu]].'],
'99:99': ['Time has wrong format. Use one of these formats instead: hh:mm[:ss[.uuuuuu]].'],
}
outputs = {
datetime.time(13, 0): '13:00:00',
datetime.time(0, 0): '00:00:00',
'00:00:00': '00:00:00',
None: None,
'': None,
}
field = serializers.TimeField()
class TestCustomInputFormatTimeField(FieldValues):
"""
Valid and invalid values for `TimeField` with a custom input format.
"""
valid_inputs = {
'1:00pm': datetime.time(13, 00),
}
invalid_inputs = {
'13:00': ['Time has wrong format. Use one of these formats instead: hh:mm[AM|PM].'],
}
outputs = {}
field = serializers.TimeField(input_formats=['%I:%M%p'])
class TestCustomOutputFormatTimeField(FieldValues):
"""
Values for `TimeField` with a custom output format.
"""
valid_inputs = {}
invalid_inputs = {}
outputs = {
datetime.time(13, 00): '01:00PM'
}
field = serializers.TimeField(format='%I:%M%p')
class TestNoOutputFormatTimeField(FieldValues):
"""
Values for `TimeField` with a no output format.
"""
valid_inputs = {}
invalid_inputs = {}
outputs = {
datetime.time(13, 00): datetime.time(13, 00)
}
field = serializers.TimeField(format=None)
class TestDurationField(FieldValues):
"""
Valid and invalid values for `DurationField`.
"""
valid_inputs = {
'13': datetime.timedelta(seconds=13),
'3 08:32:01.000123': datetime.timedelta(days=3, hours=8, minutes=32, seconds=1, microseconds=123),
'08:01': datetime.timedelta(minutes=8, seconds=1),
datetime.timedelta(days=3, hours=8, minutes=32, seconds=1, microseconds=123): datetime.timedelta(days=3, hours=8, minutes=32, seconds=1, microseconds=123),
3600: datetime.timedelta(hours=1),
}
invalid_inputs = {
'abc': ['Duration has wrong format. Use one of these formats instead: [DD] [HH:[MM:]]ss[.uuuuuu].'],
'3 08:32 01.123': ['Duration has wrong format. Use one of these formats instead: [DD] [HH:[MM:]]ss[.uuuuuu].'],
}
outputs = {
datetime.timedelta(days=3, hours=8, minutes=32, seconds=1, microseconds=123): '3 08:32:01.000123',
}
field = serializers.DurationField()
# Choice types...
class TestChoiceField(FieldValues):
"""
Valid and invalid values for `ChoiceField`.
"""
valid_inputs = {
'poor': 'poor',
'medium': 'medium',
'good': 'good',
}
invalid_inputs = {
'amazing': ['"amazing" is not a valid choice.']
}
outputs = {
'good': 'good',
'': '',
'amazing': 'amazing',
}
field = serializers.ChoiceField(
choices=[
('poor', 'Poor quality'),
('medium', 'Medium quality'),
('good', 'Good quality'),
]
)
def test_allow_blank(self):
"""
If `allow_blank=True` then '' is a valid input.
"""
field = serializers.ChoiceField(
allow_blank=True,
choices=[
('poor', 'Poor quality'),
('medium', 'Medium quality'),
('good', 'Good quality'),
]
)
output = field.run_validation('')
assert output == ''
def test_allow_null(self):
"""
If `allow_null=True` then '' on HTML forms is treated as None.
"""
field = serializers.ChoiceField(
allow_null=True,
choices=[
1, 2, 3
]
)
field.field_name = 'example'
value = field.get_value(QueryDict('example='))
assert value is None
output = field.run_validation(None)
assert output is None
def test_iter_options(self):
"""
iter_options() should return a list of options and option groups.
"""
field = serializers.ChoiceField(
choices=[
('Numbers', ['integer', 'float']),
('Strings', ['text', 'email', 'url']),
'boolean'
]
)
items = list(field.iter_options())
assert items[0].start_option_group
assert items[0].label == 'Numbers'
assert items[1].value == 'integer'
assert items[2].value == 'float'
assert items[3].end_option_group
assert items[4].start_option_group
assert items[4].label == 'Strings'
assert items[5].value == 'text'
assert items[6].value == 'email'
assert items[7].value == 'url'
assert items[8].end_option_group
assert items[9].value == 'boolean'
def test_edit_choices(self):
field = serializers.ChoiceField(
allow_null=True,
choices=[
1, 2,
]
)
field.choices = [1]
assert field.run_validation(1) is 1
with pytest.raises(serializers.ValidationError) as exc_info:
field.run_validation(2)
assert exc_info.value.detail == ['"2" is not a valid choice.']
class TestChoiceFieldWithType(FieldValues):
"""
Valid and invalid values for a `Choice` field that uses an integer type,
instead of a char type.
"""
valid_inputs = {
'1': 1,
3: 3,
}
invalid_inputs = {
5: ['"5" is not a valid choice.'],
'abc': ['"abc" is not a valid choice.']
}
outputs = {
'1': 1,
1: 1
}
field = serializers.ChoiceField(
choices=[
(1, 'Poor quality'),
(2, 'Medium quality'),
(3, 'Good quality'),
]
)
class TestChoiceFieldWithListChoices(FieldValues):
"""
Valid and invalid values for a `Choice` field that uses a flat list for the
choices, rather than a list of pairs of (`value`, `description`).
"""
valid_inputs = {
'poor': 'poor',
'medium': 'medium',
'good': 'good',
}
invalid_inputs = {
'awful': ['"awful" is not a valid choice.']
}
outputs = {
'good': 'good'
}
field = serializers.ChoiceField(choices=('poor', 'medium', 'good'))
class TestChoiceFieldWithGroupedChoices(FieldValues):
"""
Valid and invalid values for a `Choice` field that uses a grouped list for the
choices, rather than a list of pairs of (`value`, `description`).
"""
valid_inputs = {
'poor': 'poor',
'medium': 'medium',
'good': 'good',
}
invalid_inputs = {
'awful': ['"awful" is not a valid choice.']
}
outputs = {
'good': 'good'
}
field = serializers.ChoiceField(
choices=[
(
'Category',
(
('poor', 'Poor quality'),
('medium', 'Medium quality'),
),
),
('good', 'Good quality'),
]
)
class TestChoiceFieldWithMixedChoices(FieldValues):
"""
Valid and invalid values for a `Choice` field that uses a single paired or
grouped.
"""
valid_inputs = {
'poor': 'poor',
'medium': 'medium',
'good': 'good',
}
invalid_inputs = {
'awful': ['"awful" is not a valid choice.']
}
outputs = {
'good': 'good'
}
field = serializers.ChoiceField(
choices=[
(
'Category',
(
('poor', 'Poor quality'),
),
),
'medium',
('good', 'Good quality'),
]
)
class TestMultipleChoiceField(FieldValues):
"""
Valid and invalid values for `MultipleChoiceField`.
"""
valid_inputs = {
(): set(),
('aircon',): {'aircon'},
('aircon', 'manual'): {'aircon', 'manual'},
}
invalid_inputs = {
'abc': ['Expected a list of items but got type "str".'],
('aircon', 'incorrect'): ['"incorrect" is not a valid choice.']
}
outputs = [
(['aircon', 'manual', 'incorrect'], {'aircon', 'manual', 'incorrect'})
]
field = serializers.MultipleChoiceField(
choices=[
('aircon', 'AirCon'),
('manual', 'Manual drive'),
('diesel', 'Diesel'),
]
)
def test_against_partial_and_full_updates(self):
field = serializers.MultipleChoiceField(choices=(('a', 'a'), ('b', 'b')))
field.partial = False
assert field.get_value(QueryDict({})) == []
field.partial = True
assert field.get_value(QueryDict({})) == rest_framework.fields.empty
class TestEmptyMultipleChoiceField(FieldValues):
"""
Invalid values for `MultipleChoiceField(allow_empty=False)`.
"""
valid_inputs = {
}
invalid_inputs = (
([], ['This selection may not be empty.']),
)
outputs = [
]
field = serializers.MultipleChoiceField(
choices=[
('consistency', 'Consistency'),
('availability', 'Availability'),
('partition', 'Partition tolerance'),
],
allow_empty=False
)
# File serializers...
class MockFile:
def __init__(self, name='', size=0, url=''):
self.name = name
self.size = size
self.url = url
def __eq__(self, other):
return (
isinstance(other, MockFile) and
self.name == other.name and
self.size == other.size and
self.url == other.url
)
class TestFileField(FieldValues):
"""
Values for `FileField`.
"""
valid_inputs = [
(MockFile(name='example', size=10), MockFile(name='example', size=10))
]
invalid_inputs = [
('invalid', ['The submitted data was not a file. Check the encoding type on the form.']),
(MockFile(name='example.txt', size=0), ['The submitted file is empty.']),
(MockFile(name='', size=10), ['No filename could be determined.']),
(MockFile(name='x' * 100, size=10), ['Ensure this filename has at most 10 characters (it has 100).'])
]
outputs = [
(MockFile(name='example.txt', url='/example.txt'), '/example.txt'),
('', None)
]
field = serializers.FileField(max_length=10)
class TestFieldFieldWithName(FieldValues):
"""
Values for `FileField` with a filename output instead of URLs.
"""
valid_inputs = {}
invalid_inputs = {}
outputs = [
(MockFile(name='example.txt', url='/example.txt'), 'example.txt')
]
field = serializers.FileField(use_url=False)
def ext_validator(value):
if not value.name.endswith('.png'):
raise serializers.ValidationError('File extension is not allowed. Allowed extensions is png.')
# Stub out mock Django `forms.ImageField` class so we don't *actually*
# call into it's regular validation, or require PIL for testing.
class PassImageValidation(DjangoImageField):
default_validators = [ext_validator]
def to_python(self, value):
return value
class FailImageValidation(PassImageValidation):
def to_python(self, value):
if value.name == 'badimage.png':
raise serializers.ValidationError(self.error_messages['invalid_image'])
return value
class TestInvalidImageField(FieldValues):
"""
Values for an invalid `ImageField`.
"""
valid_inputs = {}
invalid_inputs = [
(MockFile(name='badimage.png', size=10), ['Upload a valid image. The file you uploaded was either not an image or a corrupted image.']),
(MockFile(name='goodimage.html', size=10), ['File extension is not allowed. Allowed extensions is png.'])
]
outputs = {}
field = serializers.ImageField(_DjangoImageField=FailImageValidation)
class TestValidImageField(FieldValues):
"""
Values for an valid `ImageField`.
"""
valid_inputs = [
(MockFile(name='example.png', size=10), MockFile(name='example.png', size=10))
]
invalid_inputs = {}
outputs = {}
field = serializers.ImageField(_DjangoImageField=PassImageValidation)
# Composite serializers...
class TestListField(FieldValues):
"""
Values for `ListField` with IntegerField as child.
"""
valid_inputs = [
([1, 2, 3], [1, 2, 3]),
(['1', '2', '3'], [1, 2, 3]),
([], [])
]
invalid_inputs = [
('not a list', ['Expected a list of items but got type "str".']),
([1, 2, 'error', 'error'], {2: ['A valid integer is required.'], 3: ['A valid integer is required.']}),
({'one': 'two'}, ['Expected a list of items but got type "dict".'])
]
outputs = [
([1, 2, 3], [1, 2, 3]),
(['1', '2', '3'], [1, 2, 3])
]
field = serializers.ListField(child=serializers.IntegerField())
def test_no_source_on_child(self):
with pytest.raises(AssertionError) as exc_info:
serializers.ListField(child=serializers.IntegerField(source='other'))
assert str(exc_info.value) == (
"The `source` argument is not meaningful when applied to a `child=` field. "
"Remove `source=` from the field declaration."
)
def test_collection_types_are_invalid_input(self):
field = serializers.ListField(child=serializers.CharField())
input_value = ({'one': 'two'})
with pytest.raises(serializers.ValidationError) as exc_info:
field.to_internal_value(input_value)
assert exc_info.value.detail == ['Expected a list of items but got type "dict".']
class TestNestedListField(FieldValues):
"""
Values for nested `ListField` with IntegerField as child.
"""
valid_inputs = [
([[1, 2], [3]], [[1, 2], [3]]),
([[]], [[]])
]
invalid_inputs = [
(['not a list'], {0: ['Expected a list of items but got type "str".']}),
([[1, 2, 'error'], ['error']], {0: {2: ['A valid integer is required.']}, 1: {0: ['A valid integer is required.']}}),
([{'one': 'two'}], {0: ['Expected a list of items but got type "dict".']})
]
outputs = [
([[1, 2], [3]], [[1, 2], [3]]),
]
field = serializers.ListField(child=serializers.ListField(child=serializers.IntegerField()))
class TestEmptyListField(FieldValues):
"""
Values for `ListField` with allow_empty=False flag.
"""
valid_inputs = {}
invalid_inputs = [
([], ['This list may not be empty.'])
]
outputs = {}
field = serializers.ListField(child=serializers.IntegerField(), allow_empty=False)
class TestListFieldLengthLimit(FieldValues):
valid_inputs = ()
invalid_inputs = [
((0, 1), ['Ensure this field has at least 3 elements.']),
((0, 1, 2, 3, 4, 5), ['Ensure this field has no more than 4 elements.']),
]
outputs = ()
field = serializers.ListField(child=serializers.IntegerField(), min_length=3, max_length=4)
class TestUnvalidatedListField(FieldValues):
"""
Values for `ListField` with no `child` argument.
"""
valid_inputs = [
([1, '2', True, [4, 5, 6]], [1, '2', True, [4, 5, 6]]),
]
invalid_inputs = [
('not a list', ['Expected a list of items but got type "str".']),
]
outputs = [
([1, '2', True, [4, 5, 6]], [1, '2', True, [4, 5, 6]]),
]
field = serializers.ListField()
class TestDictField(FieldValues):
"""
Values for `DictField` with CharField as child.
"""
valid_inputs = [
({'a': 1, 'b': '2', 3: 3}, {'a': '1', 'b': '2', '3': '3'}),
]
invalid_inputs = [
({'a': 1, 'b': None, 'c': None}, {'b': ['This field may not be null.'], 'c': ['This field may not be null.']}),
('not a dict', ['Expected a dictionary of items but got type "str".']),
]
outputs = [
({'a': 1, 'b': '2', 3: 3}, {'a': '1', 'b': '2', '3': '3'}),
]
field = serializers.DictField(child=serializers.CharField())
def test_no_source_on_child(self):
with pytest.raises(AssertionError) as exc_info:
serializers.DictField(child=serializers.CharField(source='other'))
assert str(exc_info.value) == (
"The `source` argument is not meaningful when applied to a `child=` field. "
"Remove `source=` from the field declaration."
)
def test_allow_null(self):
"""
If `allow_null=True` then `None` is a valid input.
"""
field = serializers.DictField(allow_null=True)
output = field.run_validation(None)
assert output is None
class TestNestedDictField(FieldValues):
"""
Values for nested `DictField` with CharField as child.
"""
valid_inputs = [
({0: {'a': 1, 'b': '2'}, 1: {3: 3}}, {'0': {'a': '1', 'b': '2'}, '1': {'3': '3'}}),
]
invalid_inputs = [
({0: {'a': 1, 'b': None}, 1: {'c': None}}, {'0': {'b': ['This field may not be null.']}, '1': {'c': ['This field may not be null.']}}),
({0: 'not a dict'}, {'0': ['Expected a dictionary of items but got type "str".']}),
]
outputs = [
({0: {'a': 1, 'b': '2'}, 1: {3: 3}}, {'0': {'a': '1', 'b': '2'}, '1': {'3': '3'}}),
]
field = serializers.DictField(child=serializers.DictField(child=serializers.CharField()))
class TestDictFieldWithNullChild(FieldValues):
"""
Values for `DictField` with allow_null CharField as child.
"""
valid_inputs = [
({'a': None, 'b': '2', 3: 3}, {'a': None, 'b': '2', '3': '3'}),
]
invalid_inputs = [
]
outputs = [
({'a': None, 'b': '2', 3: 3}, {'a': None, 'b': '2', '3': '3'}),
]
field = serializers.DictField(child=serializers.CharField(allow_null=True))
class TestUnvalidatedDictField(FieldValues):
"""
Values for `DictField` with no `child` argument.
"""
valid_inputs = [
({'a': 1, 'b': [4, 5, 6], 1: 123}, {'a': 1, 'b': [4, 5, 6], '1': 123}),
]
invalid_inputs = [
('not a dict', ['Expected a dictionary of items but got type "str".']),
]
outputs = [
({'a': 1, 'b': [4, 5, 6]}, {'a': 1, 'b': [4, 5, 6]}),
]
field = serializers.DictField()
class TestHStoreField(FieldValues):
"""
Values for `ListField` with CharField as child.
"""
valid_inputs = [
({'a': 1, 'b': '2', 3: 3}, {'a': '1', 'b': '2', '3': '3'}),
({'a': 1, 'b': None}, {'a': '1', 'b': None}),
]
invalid_inputs = [
('not a dict', ['Expected a dictionary of items but got type "str".']),
]
outputs = [
({'a': 1, 'b': '2', 3: 3}, {'a': '1', 'b': '2', '3': '3'}),
]
field = serializers.HStoreField()
def test_child_is_charfield(self):
with pytest.raises(AssertionError) as exc_info:
serializers.HStoreField(child=serializers.IntegerField())
assert str(exc_info.value) == (
"The `child` argument must be an instance of `CharField`, "
"as the hstore extension stores values as strings."
)
def test_no_source_on_child(self):
with pytest.raises(AssertionError) as exc_info:
serializers.HStoreField(child=serializers.CharField(source='other'))
assert str(exc_info.value) == (
"The `source` argument is not meaningful when applied to a `child=` field. "
"Remove `source=` from the field declaration."
)
def test_allow_null(self):
"""
If `allow_null=True` then `None` is a valid input.
"""
field = serializers.HStoreField(allow_null=True)
output = field.run_validation(None)
assert output is None
class TestJSONField(FieldValues):
"""
Values for `JSONField`.
"""
valid_inputs = [
({
'a': 1,
'b': ['some', 'list', True, 1.23],
'3': None
}, {
'a': 1,
'b': ['some', 'list', True, 1.23],
'3': None
}),
]
invalid_inputs = [
({'a': set()}, ['Value must be valid JSON.']),
({'a': float('inf')}, ['Value must be valid JSON.']),
]
outputs = [
({
'a': 1,
'b': ['some', 'list', True, 1.23],
'3': 3
}, {
'a': 1,
'b': ['some', 'list', True, 1.23],
'3': 3
}),
]
field = serializers.JSONField()
def test_html_input_as_json_string(self):
"""
HTML inputs should be treated as a serialized JSON string.
"""
class TestSerializer(serializers.Serializer):
config = serializers.JSONField()
data = QueryDict(mutable=True)
data.update({'config': '{"a":1}'})
serializer = TestSerializer(data=data)
assert serializer.is_valid()
assert serializer.validated_data == {'config': {"a": 1}}
class TestBinaryJSONField(FieldValues):
"""
Values for `JSONField` with binary=True.
"""
valid_inputs = [
(b'{"a": 1, "3": null, "b": ["some", "list", true, 1.23]}', {
'a': 1,
'b': ['some', 'list', True, 1.23],
'3': None
}),
]
invalid_inputs = [
('{"a": "unterminated string}', ['Value must be valid JSON.']),
]
outputs = [
(['some', 'list', True, 1.23], b'["some", "list", true, 1.23]'),
]
field = serializers.JSONField(binary=True)
# Tests for FieldField.
# ---------------------
class MockRequest:
def build_absolute_uri(self, value):
return 'http://example.com' + value
class TestFileFieldContext:
def test_fully_qualified_when_request_in_context(self):
field = serializers.FileField(max_length=10)
field._context = {'request': MockRequest()}
obj = MockFile(name='example.txt', url='/example.txt')
value = field.to_representation(obj)
assert value == 'http://example.com/example.txt'
# Tests for SerializerMethodField.
# --------------------------------
class TestSerializerMethodField:
def test_serializer_method_field(self):
class ExampleSerializer(serializers.Serializer):
example_field = serializers.SerializerMethodField()
def get_example_field(self, obj):
return 'ran get_example_field(%d)' % obj['example_field']
serializer = ExampleSerializer({'example_field': 123})
assert serializer.data == {
'example_field': 'ran get_example_field(123)'
}
def test_redundant_method_name(self):
class ExampleSerializer(serializers.Serializer):
example_field = serializers.SerializerMethodField('get_example_field')
with pytest.raises(AssertionError) as exc_info:
ExampleSerializer().fields
assert str(exc_info.value) == (
"It is redundant to specify `get_example_field` on "
"SerializerMethodField 'example_field' in serializer "
"'ExampleSerializer', because it is the same as the default "
"method name. Remove the `method_name` argument."
)
| kgeorgy/django-rest-framework | tests/test_fields.py | Python | bsd-2-clause | 66,441 |
from openerp import api, models, fields, SUPERUSER_ID
class crm_phonecall(models.Model):
_inherit = "crm.phonecall"
repair_id = fields.Many2one('mrp.repair', 'Repair Order')
class mrp_repair(models.Model):
_inherit = 'mrp.repair'
@api.one
def _get_phonecall_count(self):
self.phonecall_count = self.env['crm.phonecall'].search_count([('repair_id', '=', self.id)])
phonecall_count = fields.Integer('Phonecalls Count', compute='_get_phonecall_count')
def name_get(self, cr, uid, ids, context=None):
#if not (context or {}).get('mrp_repair_extended_name'):
# return super(mrp_repair, self).name_get(cr, uid, ids, context=context)
res = []
for r in self.browse(cr, uid, ids, context=context):
rman = r.name
rmap = r.partner_id.display_name
if rmap:
name = rman + ' / ' + rmap
else:
name = rman
# name = '%s [%s]' % (r.name, r.partner_id.display_name)
res.append((r.id, name))
return res
def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
if not context:
context = {}
if name:
# Be sure name_search is symmetric to name_get
ids = []
name_splited = name.split('/')
if len(name_splited) > 1:
rman = name_splited[0]
rmap = name_splited[1]
ids += self.search(cr, uid, [('name', operator, rman),('partner_id.display_name', operator, rmap)] + args, limit=limit, context=context)
else:
ids += self.search(cr, uid, ['|',('name', operator, name),('partner_id.display_name', operator, name)] + args, limit=limit, context=context)
else:
ids = self.search(cr, uid, args, limit=limit, context=context)
return self.name_get(cr, uid, ids, context)
| gmathers/iii-addons | phonecall_repair_order/models.py | Python | agpl-3.0 | 1,991 |
from django.contrib.auth import authenticate
from django.contrib.auth.models import User, Permission
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.context_processors import PermWrapper, PermLookupDict
from django.db.models import Q
from django.test import TestCase, override_settings
from .settings import AUTH_MIDDLEWARE_CLASSES, AUTH_TEMPLATES
from .utils import skipIfCustomUser
class MockUser(object):
def has_module_perms(self, perm):
if perm == 'mockapp':
return True
return False
def has_perm(self, perm):
if perm == 'mockapp.someperm':
return True
return False
class PermWrapperTests(TestCase):
"""
Test some details of the PermWrapper implementation.
"""
class EQLimiterObject(object):
"""
This object makes sure __eq__ will not be called endlessly.
"""
def __init__(self):
self.eq_calls = 0
def __eq__(self, other):
if self.eq_calls > 0:
return True
self.eq_calls += 1
return False
def test_permwrapper_in(self):
"""
Test that 'something' in PermWrapper works as expected.
"""
perms = PermWrapper(MockUser())
# Works for modules and full permissions.
self.assertIn('mockapp', perms)
self.assertNotIn('nonexisting', perms)
self.assertIn('mockapp.someperm', perms)
self.assertNotIn('mockapp.nonexisting', perms)
def test_permlookupdict_in(self):
"""
No endless loops if accessed with 'in' - refs #18979.
"""
pldict = PermLookupDict(MockUser(), 'mockapp')
with self.assertRaises(TypeError):
self.EQLimiterObject() in pldict
@skipIfCustomUser
@override_settings(
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
ROOT_URLCONF='django.contrib.auth.tests.urls',
TEMPLATES=AUTH_TEMPLATES,
USE_TZ=False, # required for loading the fixture
)
class AuthContextProcessorTests(TestCase):
"""
Tests for the ``django.contrib.auth.context_processors.auth`` processor
"""
fixtures = ['context-processors-users.xml']
@override_settings(MIDDLEWARE_CLASSES=AUTH_MIDDLEWARE_CLASSES)
def test_session_not_accessed(self):
"""
Tests that the session is not accessed simply by including
the auth context processor
"""
response = self.client.get('/auth_processor_no_attr_access/')
self.assertContains(response, "Session not accessed")
@override_settings(MIDDLEWARE_CLASSES=AUTH_MIDDLEWARE_CLASSES)
def test_session_is_accessed(self):
"""
Tests that the session is accessed if the auth context processor
is used and relevant attributes accessed.
"""
response = self.client.get('/auth_processor_attr_access/')
self.assertContains(response, "Session accessed")
def test_perms_attrs(self):
u = User.objects.create_user(username='normal', password='secret')
u.user_permissions.add(
Permission.objects.get(
content_type=ContentType.objects.get_for_model(Permission),
codename='add_permission'))
self.client.login(username='normal', password='secret')
response = self.client.get('/auth_processor_perms/')
self.assertContains(response, "Has auth permissions")
self.assertContains(response, "Has auth.add_permission permissions")
self.assertNotContains(response, "nonexisting")
def test_perm_in_perms_attrs(self):
u = User.objects.create_user(username='normal', password='secret')
u.user_permissions.add(
Permission.objects.get(
content_type=ContentType.objects.get_for_model(Permission),
codename='add_permission'))
self.client.login(username='normal', password='secret')
response = self.client.get('/auth_processor_perm_in_perms/')
self.assertContains(response, "Has auth permissions")
self.assertContains(response, "Has auth.add_permission permissions")
self.assertNotContains(response, "nonexisting")
def test_message_attrs(self):
self.client.login(username='super', password='secret')
response = self.client.get('/auth_processor_messages/')
self.assertContains(response, "Message 1")
def test_user_attrs(self):
"""
Test that the lazy objects returned behave just like the wrapped objects.
"""
# These are 'functional' level tests for common use cases. Direct
# testing of the implementation (SimpleLazyObject) is in the 'utils'
# tests.
self.client.login(username='super', password='secret')
user = authenticate(username='super', password='secret')
response = self.client.get('/auth_processor_user/')
self.assertContains(response, "unicode: super")
self.assertContains(response, "id: 100")
self.assertContains(response, "username: super")
# bug #12037 is tested by the {% url %} in the template:
self.assertContains(response, "url: /userpage/super/")
# See if this object can be used for queries where a Q() comparing
# a user can be used with another Q() (in an AND or OR fashion).
# This simulates what a template tag might do with the user from the
# context. Note that we don't need to execute a query, just build it.
#
# The failure case (bug #12049) on Python 2.4 with a LazyObject-wrapped
# User is a fatal TypeError: "function() takes at least 2 arguments
# (0 given)" deep inside deepcopy().
#
# Python 2.5 and 2.6 succeeded, but logged internally caught exception
# spew:
#
# Exception RuntimeError: 'maximum recursion depth exceeded while
# calling a Python object' in <type 'exceptions.AttributeError'>
# ignored"
Q(user=response.context['user']) & Q(someflag=True)
# Tests for user equality. This is hard because User defines
# equality in a non-duck-typing way
# See bug #12060
self.assertEqual(response.context['user'], user)
self.assertEqual(user, response.context['user'])
| doismellburning/django | django/contrib/auth/tests/test_context_processors.py | Python | bsd-3-clause | 6,365 |
import re
try:
import rpython
from rpython.annotator import model
from rpython.annotator.bookkeeper import getbookkeeper
from rpython.rlib.objectmodel import instantiate, hlinvoke
from rpython.rlib.rsre import rsre_core
from rpython.rlib.rsre.rpy import get_code
from rpython.rtyper.annlowlevel import llstr, hlstr
from rpython.rtyper.extregistry import ExtRegistryEntry
from rpython.rtyper.lltypesystem import lltype
from rpython.rtyper.lltypesystem.rlist import FixedSizeListRepr
from rpython.rtyper.lltypesystem.rstr import STR, string_repr
from rpython.rtyper.rmodel import Repr
from rpython.tool.pairtype import pairtype
except ImportError:
rpython = None
from rply.lexer import Lexer
class Rule(object):
def __init__(self, name, pattern):
self.name = name
self.re = re.compile(pattern)
def _freeze_(self):
return True
def matches(self, s, pos):
m = self.re.match(s, pos)
return Match(*m.span(0)) if m is not None else None
class Match(object):
_attrs_ = ["start", "end"]
def __init__(self, start, end):
self.start = start
self.end = end
class LexerGenerator(object):
"""
A LexerGenerator represents a set of rules that match pieces of text that
should either be turned into tokens or ignored by the lexer.
Rules are added using the :meth:`add` and :meth:`ignore` methods:
>>> from rply import LexerGenerator
>>> lg = LexerGenerator()
>>> lg.add('NUMBER', r'\d+')
>>> lg.add('ADD', r'\+')
>>> lg.ignore(r'\s+')
You can then build a lexer with which you can lex a string to produce an
iterator yielding tokens:
>>> lexer = lg.build()
>>> iterator = lexer.lex('1 + 1')
>>> iterator.next()
Token('NUMBER', '1')
>>> iterator.next()
Token('ADD', '+')
>>> iterator.next()
Token('NUMBER', '1')
>>> iterator.next()
Traceback (most recent call last):
...
StopIteration
"""
def __init__(self):
self.rules = []
self.ignore_rules = []
def add(self, name, pattern):
"""
Adds a rule with the given `name` and `pattern`. In case of ambiguity,
the first rule added wins.
"""
self.rules.append(Rule(name, pattern))
def ignore(self, pattern):
"""
Adds a rule whose matched value will be ignored. Ignored rules will be
matched before regular ones.
"""
self.ignore_rules.append(Rule("", pattern))
def build(self):
"""
Returns a lexer instance, which provides a `lex` method that must be
called with a string and returns an iterator yielding
:class:`~rply.Token` instances.
"""
return Lexer(self.rules, self.ignore_rules)
if rpython:
class RuleEntry(ExtRegistryEntry):
_type_ = Rule
def compute_annotation(self, *args):
return SomeRule()
class SomeRule(model.SomeObject):
def rtyper_makekey(self):
return (type(self),)
def rtyper_makerepr(self, rtyper):
return RuleRepr(rtyper)
def method_matches(self, s_s, s_pos):
assert model.SomeString().contains(s_s)
assert model.SomeInteger(nonneg=True).contains(s_pos)
bk = getbookkeeper()
init_pbc = bk.immutablevalue(Match.__init__)
bk.emulate_pbc_call((self, "match_init"), init_pbc, [
model.SomeInstance(bk.getuniqueclassdef(Match)),
model.SomeInteger(nonneg=True),
model.SomeInteger(nonneg=True)
])
init_pbc = bk.immutablevalue(rsre_core.StrMatchContext.__init__)
bk.emulate_pbc_call((self, "str_match_context_init"), init_pbc, [
model.SomeInstance(bk.getuniqueclassdef(rsre_core.StrMatchContext)),
bk.newlist(model.SomeInteger(nonneg=True)),
model.SomeString(),
model.SomeInteger(nonneg=True),
model.SomeInteger(nonneg=True),
model.SomeInteger(nonneg=True),
])
match_context_pbc = bk.immutablevalue(rsre_core.match_context)
bk.emulate_pbc_call((self, "match_context"), match_context_pbc, [
model.SomeInstance(bk.getuniqueclassdef(rsre_core.StrMatchContext)),
])
return model.SomeInstance(getbookkeeper().getuniqueclassdef(Match), can_be_None=True)
def getattr(self, s_attr):
if s_attr.is_constant() and s_attr.const == "name":
return model.SomeString()
return super(SomeRule, self).getattr(s_attr)
class __extend__(pairtype(SomeRule, SomeRule)):
def union(self):
return SomeRule()
class RuleRepr(Repr):
def __init__(self, rtyper):
super(RuleRepr, self).__init__()
self.ll_rule_cache = {}
self.match_init_repr = rtyper.getrepr(
rtyper.annotator.bookkeeper.immutablevalue(Match.__init__)
)
self.match_context_init_repr = rtyper.getrepr(
rtyper.annotator.bookkeeper.immutablevalue(rsre_core.StrMatchContext.__init__)
)
self.match_context_repr = rtyper.getrepr(
rtyper.annotator.bookkeeper.immutablevalue(rsre_core.match_context)
)
list_repr = FixedSizeListRepr(rtyper, rtyper.getrepr(model.SomeInteger(nonneg=True)))
list_repr._setup_repr()
self.lowleveltype = lltype.Ptr(lltype.GcStruct(
"RULE",
("name", lltype.Ptr(STR)),
("code", list_repr.lowleveltype),
))
def convert_const(self, rule):
if rule not in self.ll_rule_cache:
ll_rule = lltype.malloc(self.lowleveltype.TO)
ll_rule.name = llstr(rule.name)
code = get_code(rule.re.pattern)
ll_rule.code = lltype.malloc(self.lowleveltype.TO.code.TO, len(code))
for i, c in enumerate(code):
ll_rule.code[i] = c
self.ll_rule_cache[rule] = ll_rule
return self.ll_rule_cache[rule]
def rtype_getattr(self, hop):
s_attr = hop.args_s[1]
if s_attr.is_constant() and s_attr.const == "name":
v_rule = hop.inputarg(self, arg=0)
return hop.gendirectcall(LLRule.ll_get_name, v_rule)
return super(RuleRepr, self).rtype_getattr(hop)
def rtype_method_matches(self, hop):
[v_rule, v_s, v_pos] = hop.inputargs(self, string_repr, lltype.Signed)
c_MATCHTYPE = hop.inputconst(lltype.Void, Match)
c_MATCH_INIT = hop.inputconst(lltype.Void, self.match_init_repr)
c_MATCH_CONTEXTTYPE = hop.inputconst(lltype.Void, rsre_core.StrMatchContext)
c_MATCH_CONTEXT_INIT = hop.inputconst(lltype.Void, self.match_context_init_repr)
c_MATCH_CONTEXT = hop.inputconst(lltype.Void, self.match_context_repr)
return hop.gendirectcall(
LLRule.ll_matches,
c_MATCHTYPE, c_MATCH_INIT, c_MATCH_CONTEXTTYPE,
c_MATCH_CONTEXT_INIT, c_MATCH_CONTEXT, v_rule, v_s, v_pos
)
class LLRule(object):
@staticmethod
def ll_get_name(ll_rule):
return ll_rule.name
@staticmethod
def ll_matches(MATCHTYPE, MATCH_INIT, MATCH_CONTEXTTYPE,
MATCH_CONTEXT_INIT, MATCH_CONTEXT, ll_rule, s, pos):
s = hlstr(s)
assert pos >= 0
ctx = instantiate(MATCH_CONTEXTTYPE)
hlinvoke(
MATCH_CONTEXT_INIT, rsre_core.StrMatchContext.__init__,
ctx, ll_rule.code, hlstr(s), pos, len(s), 0
)
matched = hlinvoke(MATCH_CONTEXT, rsre_core.match_context, ctx)
if matched:
match = instantiate(MATCHTYPE)
hlinvoke(
MATCH_INIT, Match.__init__,
match, ctx.match_start, ctx.match_end
)
return match
else:
return None
| rcarmo/rss2imap-gae | lib/rply/lexergenerator.py | Python | mit | 8,242 |
from corehq.apps.fixtures.models import FixtureDataItem
from corehq.util.quickcache import quickcache
DOMAIN = 'opm'
PREG_REG_XMLNS = "http://openrosa.org/formdesigner/D127C457-3E15-4F5E-88C3-98CD1722C625"
VHND_XMLNS = "http://openrosa.org/formdesigner/ff5de10d75afda15cddb3b00a0b1e21d33a50d59"
BIRTH_PREP_XMLNS = "http://openrosa.org/formdesigner/50378991-FEC3-408D-B4A5-A264F3B52184"
DELIVERY_XMLNS = "http://openrosa.org/formdesigner/492F8F0E-EE7D-4B28-B890-7CDA5F137194"
CHILD_FOLLOWUP_XMLNS = "http://openrosa.org/formdesigner/C90C2C1F-3B34-47F3-B3A3-061EAAC1A601"
CFU1_XMLNS = "http://openrosa.org/formdesigner/d642dd328514f2af92c093d414d63e5b2670b9c"
CFU2_XMLNS = "http://openrosa.org/formdesigner/9ef423bba8595a99976f0bc9532617841253a7fa"
CFU3_XMLNS = "http://openrosa.org/formdesigner/f15b9f8fb92e2552b1885897ece257609ed16649"
GROWTH_MONITORING_XMLNS= "http://openrosa.org/formdesigner/F1356F3F-C695-491F-9277-7F9B5522200C"
CHILDREN_FORMS = [CFU1_XMLNS, CFU2_XMLNS, CFU3_XMLNS, CHILD_FOLLOWUP_XMLNS]
# TODO Move these to a cached fixtures lookup
MONTH_AMT = 250
TWO_YEAR_AMT = 2000
THREE_YEAR_AMT = 3000
@quickcache([], timeout=30 * 60)
def get_fixture_data():
fixtures = FixtureDataItem.get_indexed_items(DOMAIN, 'condition_amounts', 'condition')
return dict((k, int(fixture['rs_amount'])) for k, fixture in fixtures.items())
class InvalidRow(Exception):
"""
Raise this in the row constructor to skip row
"""
class CaseOutOfRange(InvalidRow):
"""
The row is invalid because the window calculations are out of range.
"""
| puttarajubr/commcare-hq | custom/opm/constants.py | Python | bsd-3-clause | 1,572 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from ..cmislib import model
from ..cmislib.exceptions import InvalidArgumentException
import datetime
ALFRESCO_NS = 'http://www.alfresco.org'
ALFRESCO_NSALIAS = 'alf'
ALFRESCO_NSALIAS_DECL = 'xmlns:' + ALFRESCO_NSALIAS
ALFRESCO_NSPREFIX = ALFRESCO_NSALIAS + ':'
LOCALNAME_ASPECTS = 'aspects'
LOCALNAME_PROPERTIES = 'properties'
LOCALNAME_APPLIED_ASPECTS = 'appliedAspects'
LOCALNAME_SET_ASPECTS = 'setAspects'
LOCALNAME_ASPECTS_TO_ADD = 'aspectsToAdd'
LOCALNAME_ASPECTS_TO_REMOVE = 'aspectsToRemove'
TAGNAME_ALFRESCO_PROPERTIES = ALFRESCO_NSPREFIX + LOCALNAME_PROPERTIES
TAGNAME_SET_ASPECTS = ALFRESCO_NSPREFIX + LOCALNAME_SET_ASPECTS
TAGNAME_ASPECTS_TO_ADD = ALFRESCO_NSPREFIX + LOCALNAME_ASPECTS_TO_ADD
TAGNAME_ASPECTS_TO_REMOVE = ALFRESCO_NSPREFIX + LOCALNAME_ASPECTS_TO_REMOVE
OBJECT_TYPE_ID = 'cmis:objectTypeId'
CHANGE_TOKEN = 'cmis:changeToken'
def addSetAspectsToXMLDocument(xmldoc):
entryElements = xmldoc.getElementsByTagNameNS(model.ATOM_NS, 'entry')
entryElements[0].setAttribute(ALFRESCO_NSALIAS_DECL, ALFRESCO_NS)
propertiesElements = xmldoc.getElementsByTagNameNS(model.CMIS_NS, LOCALNAME_PROPERTIES)
if len(propertiesElements) == 0:
objectElement = xmldoc.getElementsByTagNameNS(model.CMISRA_NS, 'object')
propertiesElement = xmldoc.createElementNS(model.CMIS_NS, 'cmis:properties')
objectElement[0].appendChild(propertiesElement)
else:
propertiesElement = propertiesElements[0]
aspectsElement = xmldoc.createElementNS(ALFRESCO_NS, TAGNAME_SET_ASPECTS)
propertiesElement.appendChild(aspectsElement)
return aspectsElement
def addPropertiesToXMLElement(xmldoc, element, properties):
for propName, propValue in properties.items():
"""
the name of the element here is significant: it includes the
data type. I should be able to figure out the right type based
on the actual type of the object passed in.
I could do a lookup to the type definition, but that doesn't
seem worth the performance hit
"""
propType = type(propValue)
isList = False
if (propType == list):
propType = type(propValue[0])
isList = True
if (propType == model.CmisId):
propElementName = 'cmis:propertyId'
if isList:
propValueStrList = []
for val in propValue:
propValueStrList.append(val)
else:
propValueStrList = [propValue]
elif (propType == str):
propElementName = 'cmis:propertyString'
if isList:
propValueStrList = []
for val in propValue:
propValueStrList.append(val)
else:
propValueStrList = [propValue]
elif (propType == datetime.datetime):
propElementName = 'cmis:propertyDateTime'
if isList:
propValueStrList = []
for val in propValue:
propValueStrList.append(val.isoformat())
else:
propValueStrList = [propValue.isoformat()]
elif (propType == bool):
propElementName = 'cmis:propertyBoolean'
if isList:
propValueStrList = []
for val in propValue:
propValueStrList.append(unicode(val).lower())
else:
propValueStrList = [unicode(propValue).lower()]
elif (propType == int):
propElementName = 'cmis:propertyInteger'
if isList:
propValueStrList = []
for val in propValue:
propValueStrList.append(unicode(val))
else:
propValueStrList = [unicode(propValue)]
elif (propType == float):
propElementName = 'cmis:propertyDecimal'
if isList:
propValueStrList = []
for val in propValue:
propValueStrList.append(unicode(val))
else:
propValueStrList = [unicode(propValue)]
else:
propElementName = 'cmis:propertyString'
if isList:
propValueStrList = []
for val in propValue:
propValueStrList.append(unicode(val))
else:
propValueStrList = [unicode(propValue)]
propElement = xmldoc.createElementNS(model.CMIS_NS, propElementName)
propElement.setAttribute('propertyDefinitionId', propName)
for val in propValueStrList:
valElement = xmldoc.createElementNS(model.CMIS_NS, 'cmis:value')
valText = xmldoc.createTextNode(val)
valElement.appendChild(valText)
propElement.appendChild(valElement)
element.appendChild(propElement)
def initData(self):
model.CmisObject._initData(self)
self._aspects = {}
self._alfproperties = {}
def findAlfrescoExtensions(self):
if not hasattr(self, '_aspects'):
self._aspects = {}
if self._aspects == {}:
if self.xmlDoc == None:
self.reload()
appliedAspects = self.xmlDoc.getElementsByTagNameNS(ALFRESCO_NS, LOCALNAME_APPLIED_ASPECTS)
for node in appliedAspects:
aspectType = self._repository.getTypeDefinition(node.childNodes[0].data)
self._aspects[node.childNodes[0].data] = aspectType
def hasAspect(self, arg):
result = False
if arg is not None:
self._findAlfrescoExtensions()
if isinstance(arg, model.ObjectType):
result = arg.getTypeId() in self._aspects
else:
result = arg in self._aspects
return result
def getAspects(self):
self._findAlfrescoExtensions()
return self._aspects.values()
def findAspect(self, propertyId):
self._findAlfrescoExtensions()
if (propertyId is not None) and (len(self._aspects) > 0):
for id, aspect in self._aspects.iteritems():
props = aspect.getProperties()
if propertyId in props:
return aspect
return None
def updateAspects(self, addAspects=None, removeAspects=None):
if addAspects or removeAspects:
selfUrl = self._getSelfLink()
xmlEntryDoc = getEntryXmlDoc(self._repository)
# Patch xmlEntryDoc
setAspectsElement = addSetAspectsToXMLDocument(xmlEntryDoc)
if addAspects:
addAspectElement = xmlEntryDoc.createElementNS(ALFRESCO_NS, TAGNAME_ASPECTS_TO_ADD)
valText = xmlEntryDoc.createTextNode(addAspects)
addAspectElement.appendChild(valText)
setAspectsElement.appendChild(addAspectElement)
if removeAspects:
removeAspectElement = xmlEntryDoc.createElementNS(ALFRESCO_NS, TAGNAME_ASPECTS_TO_REMOVE)
valText = xmlEntryDoc.createTextNode(removeAspects)
removeAspectElement.appendChild(valText)
setAspectsElement.appendChild(removeAspectElement)
updatedXmlDoc = self._cmisClient.put(selfUrl.encode('utf-8'),
xmlEntryDoc.toxml(encoding='utf-8'),
model.ATOM_XML_TYPE)
self.xmlDoc = updatedXmlDoc
self._initData()
def getProperties(self):
result = model.CmisObject.getProperties(self)
if not hasattr(self, '_alfproperties'):
self._alfproperties = {}
if self._alfproperties == {}:
alfpropertiesElements = self.xmlDoc.getElementsByTagNameNS(ALFRESCO_NS, LOCALNAME_PROPERTIES)
if len(alfpropertiesElements) > 0:
for alfpropertiesElement in alfpropertiesElements:
for node in [e for e in alfpropertiesElement.childNodes if e.nodeType == e.ELEMENT_NODE and e.namespaceURI == model.CMIS_NS]:
#propertyId, propertyString, propertyDateTime
#propertyType = cpattern.search(node.localName).groups()[0]
propertyName = node.attributes['propertyDefinitionId'].value
if node.childNodes and \
node.getElementsByTagNameNS(model.CMIS_NS, 'value')[0] and \
node.getElementsByTagNameNS(model.CMIS_NS, 'value')[0].childNodes:
valNodeList = node.getElementsByTagNameNS(model.CMIS_NS, 'value')
if (len(valNodeList) == 1):
propertyValue = model.parsePropValue(valNodeList[0].
childNodes[0].data,
node.localName)
else:
propertyValue = []
for valNode in valNodeList:
propertyValue.append(model.parsePropValue(valNode.
childNodes[0].data,
node.localName))
else:
propertyValue = None
self._alfproperties[propertyName] = propertyValue
result.update(self._alfproperties)
return result
def updateProperties(self, properties):
selfUrl = self._getSelfLink()
cmisproperties = {}
alfproperties = {}
# if we have a change token, we must pass it back, per the spec
args = {}
if (self.properties.has_key(CHANGE_TOKEN) and
self.properties[CHANGE_TOKEN] != None):
self.logger.debug('Change token present, adding it to args')
args = {"changeToken": self.properties[CHANGE_TOKEN]}
objectTypeId = properties.get(OBJECT_TYPE_ID)
if (objectTypeId is None):
objectTypeId = self.properties.get(OBJECT_TYPE_ID)
objectType = self._repository.getTypeDefinition(objectTypeId)
objectTypePropsDef = objectType.getProperties()
for propertyName, propertyValue in properties.items():
if (propertyName == OBJECT_TYPE_ID) or (propertyName in objectTypePropsDef.keys()):
cmisproperties[propertyName] = propertyValue
else:
if self.findAspect(propertyName) is None:
raise InvalidArgumentException
else:
alfproperties[propertyName] = propertyValue
xmlEntryDoc = getEntryXmlDoc(self._repository, properties=cmisproperties)
# Patch xmlEntryDoc
# add alfresco properties
if len(alfproperties) > 0:
aspectsElement = addSetAspectsToXMLDocument(xmlEntryDoc)
alfpropertiesElement = xmlEntryDoc.createElementNS(ALFRESCO_NS, TAGNAME_ALFRESCO_PROPERTIES)
aspectsElement.appendChild(alfpropertiesElement)
# Like regular properties
addPropertiesToXMLElement(xmlEntryDoc, alfpropertiesElement, alfproperties)
updatedXmlDoc = self._cmisClient.put(selfUrl.encode('utf-8'),
xmlEntryDoc.toxml(encoding='utf-8'),
model.ATOM_XML_TYPE,
**args)
self.xmlDoc = updatedXmlDoc
self._initData()
return self
def addAspect(self, arg):
if arg is not None:
aspect_id = arg
if isinstance(arg, model.ObjectType):
aspect_id = arg.getTypeId()
if self._repository.getTypeDefinition(aspect_id) is None:
raise InvalidArgumentException
self._updateAspects(addAspects=aspect_id)
def removeAspect(self, arg):
if arg is not None:
aspect_id = arg
if isinstance(arg, model.ObjectType):
aspect_id = arg.getTypeId()
if self._repository.getTypeDefinition(aspect_id) is None:
raise InvalidArgumentException
self._updateAspects(removeAspects=aspect_id)
def getEntryXmlDoc(repo=None, objectTypeId=None, properties=None, contentFile=None,
contentType=None, contentEncoding=None):
return model.getEntryXmlDoc(repo, objectTypeId, properties, contentFile, contentType, contentEncoding) | CWTGMBH/ERPNext2Alfresco | erpnext2alfresco/actions/cmislibalf/extension.py | Python | mit | 13,272 |
#
# SecureDrop whistleblower submission system
# Copyright (C) 2017 Loic Dachary <loic@dachary.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time
import pytest
from tests.functional import journalist_navigation_steps
from tests.functional import source_navigation_steps
import tests.pageslayout.functional_test as pft
@pytest.mark.pagelayout
class TestJournalistLayoutDelete(
pft.FunctionalTest,
source_navigation_steps.SourceNavigationStepsMixin,
journalist_navigation_steps.JournalistNavigationStepsMixin):
def test_delete_none(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
self._source_continues_to_submit_page()
self._source_submits_a_file()
self._source_submits_a_message()
self._source_logs_out()
self._journalist_logs_in()
self._journalist_visits_col()
self._journalist_clicks_delete_selected_link()
self._journalist_confirm_delete_selected()
self._screenshot('journalist-delete_none.png')
self._save_html('journalist-delete_none.html')
def test_delete_one_confirmation(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
self._source_continues_to_submit_page()
self._source_submits_a_file()
self._source_submits_a_message()
self._source_logs_out()
self._journalist_logs_in()
self._journalist_visits_col()
self._journalist_selects_first_doc()
self._journalist_clicks_delete_selected_link()
time.sleep(1)
self._screenshot('journalist-delete_one_confirmation.png')
self._save_html('journalist-delete_one_confirmation.html')
def test_delete_all_confirmation(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
self._source_continues_to_submit_page()
self._source_submits_a_file()
self._source_submits_a_message()
self._source_logs_out()
self._journalist_logs_in()
self._journalist_visits_col()
self._journalist_delete_all_confirmation()
time.sleep(1)
self._screenshot('journalist-delete_all_confirmation.png')
self._save_html('journalist-delete_all_confirmation.html')
def test_delete_one(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
self._source_continues_to_submit_page()
self._source_submits_a_file()
self._source_submits_a_message()
self._source_logs_out()
self._journalist_logs_in()
self._journalist_visits_col()
self._journalist_delete_one()
self._journalist_confirm_delete_selected()
self._screenshot('journalist-delete_one.png')
self._save_html('journalist-delete_one.html')
def test_delete_all(self):
self._source_visits_source_homepage()
self._source_chooses_to_submit_documents()
self._source_continues_to_submit_page()
self._source_submits_a_file()
self._source_submits_a_message()
self._source_logs_out()
self._journalist_logs_in()
self._journalist_visits_col()
self._journalist_delete_all()
self._journalist_confirm_delete_selected()
self._screenshot('journalist-delete_all.png')
self._save_html('journalist-delete_all.html')
| freedomofpress/securedrop | securedrop/tests/pageslayout/test_journalist_delete.py | Python | agpl-3.0 | 4,067 |
import uuid
import threading
import time
from datetime import datetime
import logging
from core.time import IDLE_MIN_MS
class WorldRunException(Exception):
pass
class Pixel(object):
RED_INDEX = 0
GREEN_INDEX = 1
BLUE_INDEX = 2
ALPHA_INDEX = 3
@classmethod
def from_tuple(cls, rgb_or_rgba_tuple):
a = 255
if len(rgb_or_rgba_tuple) > 3:
a = rgb_or_rgba_tuple[cls.ALPHA_INDEX]
return cls( r=rgb_or_rgba_tuple[cls.RED_INDEX],
g=rgb_or_rgba_tuple[cls.GREEN_INDEX],
b=rgb_or_rgba_tuple[cls.BLUE_INDEX],
a=a )
@classmethod
def from_tuple_n(cls, rgb_or_rgba_tuple):
a = 1
if len(rgb_or_rgba_tuple) > 3:
a = rgb_or_rgba_tuple[cls.ALPHA_INDEX]
return cls( r=rgb_or_rgba_tuple[cls.RED_INDEX]*255,
g=rgb_or_rgba_tuple[cls.GREEN_INDEX]*255,
b=rgb_or_rgba_tuple[cls.BLUE_INDEX]*255,
a=a*255 )
@classmethod
def from_rgb_string(cls, rgbstring):
if len(rgbstring) >= 6:
tuple_set = (0, 2, 4)
if len(rgbstring) >= 8:
tuple_set = (0, 2, 4, 6)
rgb_tuple = tuple(int(rgbstring[i:i+2], 16) for i in tuple_set)
return cls.from_tuple(rgb_tuple)
return None
def __init__(self, r=0, g=0, b=0, a=255):
super(Pixel, self).__init__()
self._components = [0] * 4
self.set_color(r, g, b, a)
def set_color(self, r, g, b, a=255):
incoming = r / 255., g / 255., b / 255., a / 255.
self._components = incoming
def set_color_n(self, r, g, b, a=1):
incoming = r, g, b, a
self._components = incoming
@property
def r(self):
return self._components[Pixel.RED_INDEX] * 255.
@property
def g(self):
return self._components[Pixel.GREEN_INDEX] * 255.
@property
def b(self):
return self._components[Pixel.BLUE_INDEX] * 255.
@property
def a(self):
return self._components[Pixel.ALPHA_INDEX] * 255.
@property
def w(self):
return (self.r + self.g + self.b) / 255.
@property
def r_n(self):
return self._components[Pixel.RED_INDEX]
@property
def g_n(self):
return self._components[Pixel.GREEN_INDEX]
@property
def b_n(self):
return self._components[Pixel.BLUE_INDEX]
@property
def a_n(self):
return self._components[Pixel.ALPHA_INDEX]
@property
def w_n(self):
return (self.r_n + self.g_n + self.b_n) / 3.
def copy(self):
return Pixel(self.r, self.g, self.b, self.a)
def _blend_channel(self, incoming, background, alpha):
if alpha == 1:
return incoming
if alpha == 0:
return background
return (incoming - background) * alpha + background
def blend(self, other, mask=None, opacity=None, blendfunc=None):
opacity = opacity or other.a_n
r = self._blend_channel( other.r_n, self.r_n, opacity )
g = self._blend_channel( other.g_n, self.g_n, opacity )
b = self._blend_channel( other.b_n, self.b_n, opacity )
self._components = r, g, b, self._components[Pixel.ALPHA_INDEX]
# self.set_color_n( r, g, b, self.a_n )
# layer = super(Pixel, self).blend(other, mask, opacity, blendfunc)
# self.__set_color_from_layer(layer)
def adjust(self, adjustfunc):
layer = super(Pixel, self).adjust(adjustfunc)
self.__set_color_from_layer(layer)
def __set_color_from_layer(self, layer):
r, g, b, a = layer.rgba(1, 1)
self.set_color( self.__transform_component(r),
self.__transform_component(g),
self.__transform_component(b),
self.__transform_component(a))
def __transform_component(self, packed_value):
return packed_value[0][0] * 255
def __component(self, value):
return str(value).ljust(5) + " "
def __repr__(self):
return self.__component(self.r) + self.__component(self.g) + self.__component(self.b) + self.__component(self.a)
def __str__(self):
return self.__repr__()
def __eq__(self, other):
for i in range(0, len(self._components)):
if self._components[i] != other._components[i]:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
class DisplayEntity(object):
def __init__(self):
self.id = str(uuid.uuid4())
self.log = logging.getLogger()
def destroy(self):
pass
class SpriteContainer(DisplayEntity):
def __init__(self):
super(SpriteContainer, self).__init__()
self.sprites = [ ]
def clear_sprites(self):
for sprite in self.get_sprites():
sprite.destroy()
self.sprites = [ ]
def add_sprite(self, sprite):
self.sprites.append(sprite)
def remove_sprite(self, sprite, recurse=True):
if recurse:
for needle in self.sprites:
needle.remove_sprite(sprite)
self.sprites[:] = [ needle for needle in self.sprites if needle.id != sprite.id ]
def get_sprites(self):
return self.sprites[:]
def destroy(self):
super(SpriteContainer, self).destroy()
self.clear_sprites()
class Sprite(SpriteContainer):
def __init__(self, position=None):
super(Sprite, self).__init__()
self.position = position or 0
def render_to(self, pixel_buffer):
for sprite in self.sprites:
sprite.render_to(pixel_buffer)
def is_in_buffer(self, pixel_buffer, position=None):
if position is None:
position = self.position
return position >= 0 and position < len(pixel_buffer)
class DynamicSprite(Sprite):
def __init__(self, position=None):
super(DynamicSprite, self).__init__(position=position)
self.state = { }
self.dynamics = [ ]
def add_dynamic(self, dynamic):
self.dynamics.append(dynamic)
def clear_dynamics(self):
for dynamic in self.dynamics:
dynamic.destroy()
self.dynamics = [ ]
def update_from(self, world, elapsed_time_ms):
for sprite in self.sprites:
if isinstance(sprite, DynamicSprite):
sprite.update_from(world, elapsed_time_ms)
for dynamic in self.dynamics:
dynamic.act_on(self, world, elapsed_time_ms)
def destroy(self):
super(DynamicSprite, self).destroy()
self.clear_dynamics()
class Dynamic(DisplayEntity):
def __init__(self):
super(Dynamic, self).__init__()
def act_on(self, sprite, world, elapsed_time_ms):
pass
class RenderableContainer(SpriteContainer):
def __init__(self, pixel_count, fps_limit=60):
super(RenderableContainer, self).__init__()
self.pixels = [ Pixel() for i in range(pixel_count) ]
self.renderers = [ ]
self.render_last = datetime.utcnow()
self.render_ms_limit = 1000. / float(fps_limit)
@property
def size(self):
return len(self.pixels)
def clear_renderers(self):
for renderer in self.renderers:
renderer.destroy()
self.renderers = [ ]
def add_renderer(self, renderer):
renderer.setup(len(self.pixels), self)
self.renderers.append(renderer)
def get_renderers(self):
return self.renderers[:]
def render(self):
timing_current = datetime.utcnow()
timing_elapsed = (timing_current - self.render_last).microseconds / 1000.
if timing_elapsed >= self.render_ms_limit:
self.render_last = timing_current
for sprite in self.sprites:
sprite.render_to(self.pixels)
for renderer in self.renderers:
renderer.render_buffer(self.pixels)
def destroy(self):
super(RenderableContainer, self).destroy()
self.clear_renderers()
class Renderer(DisplayEntity):
def __init__(self):
super(Renderer, self).__init__()
self.previous_buffer = None
self.skip_checks_after_change = True
self.skip_frames = 30
self.skip_counter = 0
def setup(self, pixel_count, world):
pass
def _copy_to_previous_buffer(self, pixel_buffer):
self.previous_buffer = [ pixel.copy() for pixel in pixel_buffer ]
if self.skip_checks_after_change:
self.skip_counter = self.skip_frames
def _is_buffer_changed(self, pixel_buffer):
if self.skip_counter > 0:
self.skip_counter -= 1
return True
#if our historic buffer isn't the same...
if self.previous_buffer is None or len(self.previous_buffer) != len(pixel_buffer):
self._copy_to_previous_buffer(pixel_buffer)
return True
#if we find a difference in the historic buffer...
for idx, pixel in enumerate(pixel_buffer):
if pixel != self.previous_buffer[idx]:
self._copy_to_previous_buffer(pixel_buffer)
return True
return False
def render_buffer(self, pixel_buffer):
pass
class World(RenderableContainer):
def __init__(self, pixel_count, print_fps=False, timing_ms_per_update=33.3, enable_threading=True):
super(World, self).__init__(pixel_count)
self.state = { }
self.run_enable = False
self.run_thread = None
self.enable_threading = enable_threading
self.timing_previous_frame = datetime.utcnow()
self.timing_ms_per_update = timing_ms_per_update
self.timing_lag = 0.0
self.timing_elapsed_ms = 0.0
self.print_fps = print_fps
def update(self, elapsed_time_ms):
for sprite in self.sprites:
sprite.update_from(self, elapsed_time_ms)
def run(self, callback=None):
if self.run_enable or self.run_thread:
raise WorldRunException("Already running.")
self.run_enable = True
if self.enable_threading:
t = threading.Thread( name='_run_loop',
target=self._run_loop,
args=(callback,),
kwargs=None )
self.run_thread = t
t.start()
else:
self._run_loop(callback)
def _run_loop(self, callback=None):
lag = 0.0
#do at least one update before rendering
self.update(self.timing_elapsed_ms)
while self.run_enable:
timing_current = datetime.utcnow()
timing_elapsed = (timing_current - self.timing_previous_frame).microseconds / 1000.
if self.print_fps and timing_elapsed:
print '{0:.2f}'.format(1000. / float(timing_elapsed)) + " fps @ " + str(timing_elapsed) + " ms / frame"
self.timing_previous_frame = timing_current
lag += timing_elapsed
while lag > self.timing_ms_per_update:
self.update( self.timing_ms_per_update )
self.timing_elapsed_ms += self.timing_ms_per_update
lag -= self.timing_ms_per_update
if callback:
callback(self)
self.render()
if timing_elapsed < IDLE_MIN_MS:
time.sleep(IDLE_MIN_MS / 1000.)
def stop(self):
self.run_enable = False
| mackay/ble_detector | display/__init__.py | Python | mit | 11,569 |
import datetime
from django.db import models
from django.db.models import Max, Count
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from helios.models import Election
import utils
class Institution(models.Model):
name = models.CharField(max_length=250)
short_name = models.CharField(max_length=100, blank=True)
main_phone = models.CharField(max_length=25)
sec_phone = models.CharField(max_length=25, blank=True)
address = models.TextField()
idp_address = models.URLField(unique=True)
upload_voters = models.BooleanField(default=False, null=False)
def __unicode__(self):
return self.name
@property
def institution_users(self):
users = []
for user in self.institutionuserprofile_set.all():
users.append({
'pk': user.pk,
'helios_user': user.helios_user,
'email': user.email,
'role': user.institution_role,
'active': user.active,
'expires_at': user.expires_at,
})
return users
def elections_new(self, year=None):
if year is None:
elections = Election.objects.filter(admin_id__in=[
profile.helios_user_id for profile in self.institutionuserprofile_set.all()],
frozen_at__isnull=True,
archived_at__isnull=True,
voting_ended_at__isnull=True).order_by("short_name")
else:
elections = Election.objects.filter(admin_id__in=[
profile.helios_user_id for profile in self.institutionuserprofile_set.all()],
created_at__year=year,
frozen_at__isnull=True,
archived_at__isnull=True,
voting_ended_at__isnull=True).order_by("short_name")
return utils.elections_as_json(elections)
def elections_in_progress(self, year=None):
if year is None:
elections = Election.objects.filter(admin_id__in=[
profile.helios_user_id for profile in self.institutionuserprofile_set.all()],
frozen_at__isnull=False,
archived_at__isnull=True,
voting_ended_at__isnull=True).order_by("short_name")
else:
elections = Election.objects.filter(admin_id__in=[
profile.helios_user_id for profile in self.institutionuserprofile_set.all()],
created_at__year=year,
frozen_at__isnull=False,
archived_at__isnull=True,
voting_ended_at__isnull=True).order_by("short_name")
return utils.elections_as_json(elections)
def elections_done(self, year=None):
if year is None:
elections = Election.objects.filter(admin_id__in=[
profile.helios_user_id for profile in self.institutionuserprofile_set.all()],
frozen_at__isnull=False,
archived_at__isnull=True,
voting_ended_at__isnull=False).order_by("short_name")
else:
elections = Election.objects.filter(admin_id__in=[
profile.helios_user_id for profile in self.institutionuserprofile_set.all()],
created_at__year=year,
frozen_at__isnull=False,
archived_at__isnull=True,
voting_ended_at__isnull=False).order_by("short_name")
return utils.elections_as_json(elections)
@property
def elections(self):
elections = Election.objects.filter(admin__in=[
user for user in self.institutionuserprofile_set.all()]).order_by('-created_at')
return utils.elections_as_json(elections)
@property
def recently_cast_votes(self):
from helios.models import Election
recently_cast_votes = []
for election in Election.objects.filter(
voter__castvote__cast_at__gt= datetime.datetime.utcnow() - datetime.timedelta(days=1),
admin__in=[user for user in self.institutionuserprofile_set.all()]).annotate(
last_cast_vote = Max('voter__castvote__cast_at'),
num_recent_cast_votes = Count('voter__castvote')).order_by('-last_cast_vote'):
recently_cast_votes.append({
'uuid': election.uuid,
'name': election.name,
'last_cast_vote': election.last_cast_vote,
'num_recent_cast_vote': election.num_recent_cast_votes,
})
return recently_cast_votes
@property
def admins(self):
admins = []
for user in self.institutionuserprofile_set.all():
if user.is_institution_admin and user.active:
admins.append({
'name': user.helios_user.name,
'email': user.email
})
return admins
class Meta:
app_label = 'heliosinstitution'
class InstitutionUserProfile(models.Model):
helios_user = models.ForeignKey('helios_auth.User', blank=True, default=None, null=True, on_delete=models.CASCADE)
django_user = models.ForeignKey(User, unique=True, on_delete=models.CASCADE)
institution = models.ForeignKey("heliosinstitution.Institution", on_delete=models.CASCADE)
email = models.EmailField(max_length=254)
expires_at = models.DateTimeField(auto_now_add=False, default=None, null=True, blank=True)
active = models.BooleanField(default=False)
class Meta:
permissions = (
("delegate_institution_mngt", _("Can delegate institution management tasks")),
("revoke_institution_mngt", _("Can revoke institution management tasks")),
("delegate_election_mngt", _("Can delegate election management tasks")),
("revoke_election_mngt", _("Can revoke election management tasks")),
)
app_label = 'heliosinstitution'
def __unicode__(self):
return self.helios_user.name if self.helios_user is not None else self.email
@property
def is_institution_admin(self):
return self.django_user.groups.filter(name='Institution Admin').exists()
@property
def institution_role(self):
#TODO: check for user group instead
if self.is_institution_admin:
return _("Institution Admin")
if self.django_user.groups.filter(name="Election Admin").exists():
return _("Election Admin")
return _("Undefined")
@property
def institution_user_voter_attributes(self):
'''
Returns attributes to be used when constraining election voters.
These attributes are institution's election managers specific
'''
from django.conf import settings
attributes = self.helios_user.info['attributes']
if settings.USE_ELECTION_MANAGER_ATTRIBUTES:
for attribute in settings.ELECTION_MANAGER_ATTRIBUTES:
try:
attributes.pop(attribute.lower())
except KeyError:
pass
return attributes
| shirlei/helios-server | heliosinstitution/models.py | Python | apache-2.0 | 7,132 |
#!/usr/bin/env python
# -*- coding: UTF8 -*-
#
# Modplug sndfile constants.
# Copyright (C) 2012 Josiah Gordon <josiahg@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Modplug sndfile constants.
"""
# Mod type constants
MOD_TYPE_NONE = 0x00
MOD_TYPE_MOD = 0x01
MOD_TYPE_S3M = 0x02
MOD_TYPE_XM = 0x04
MOD_TYPE_MED = 0x08
MOD_TYPE_MTM = 0x10
MOD_TYPE_IT = 0x20
MOD_TYPE_669 = 0x40
MOD_TYPE_ULT = 0x80
MOD_TYPE_STM = 0x100
MOD_TYPE_FAR = 0x200
MOD_TYPE_WAV = 0x400
MOD_TYPE_AMF = 0x800
MOD_TYPE_AMS = 0x1000
MOD_TYPE_DSM = 0x2000
MOD_TYPE_MDL = 0x4000
MOD_TYPE_OKT = 0x8000
MOD_TYPE_MID = 0x10000
MOD_TYPE_DMF = 0x20000
MOD_TYPE_PTM = 0x40000
MOD_TYPE_DBM = 0x80000
MOD_TYPE_MT2 = 0x100000
MOD_TYPE_AMF0 = 0x200000
MOD_TYPE_PSM = 0x400000
MOD_TYPE_J2B = 0x800000
MOD_TYPE_ABC = 0x1000000
MOD_TYPE_PAT = 0x2000000
MOD_TYPE_UMX = 0x80000000 # Fake type
MAX_MODTYPE = 24
| zepto/musio | musio/modplug/sndfile.py | Python | gpl-3.0 | 1,502 |
class Solution(object):
def countNegatives(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
if len(grid) == 0 or len(grid[0]) == 0:
return 0
cnt = 0
rows = len(grid)
cols = len(grid[0])
for i in range(rows):
for j in range(cols):
if grid[i][j] < 0:
# prune
cnt += cols - j
break
return cnt
| kingsamchen/Eureka | crack-data-structures-and-algorithms/leetcode/count_negative_numbers_in_a_sorted_matrix_q5340.py | Python | mit | 488 |
# coding=utf-8
from datetime import datetime, timedelta
import _strptime
import time
FULL_NAMES = {
u'H.Králové': u'Hradec Králové',
u'M.Boleslav': u'Mladá Boleslav',
u'K.Vary': u'Karlovy Vary',
u'SR 20': u'Slovensko 20',
u'L.Mikuláš': u'Liptovský Mikuláš',
u'N.Zámky': u'Nové Zámky',
u'HK Poprad': u'Poprad',
u'B.Bystrica': u'Banská Bystrica',
u'Fr.Místek': u'Frýdek-Místek',
u'Ústí': u'Ústí nad Labem',
u'Benátky': u'Benátky nad Jizerou',
u'Č.Budějovice': u'České Budějovice'
}
class Match:
"""Class represents one match with additional information"""
def __init__(self, name, competition, sport, url, start_time, status, not_started, score, icon_name,
minutes_enable_before_start):
self.first_team, self.second_team, self.name = self.parse_name(name)
self.competition = competition
self.sport = sport
self.url = url
self.start_time = start_time
self.status = status
self.started = not_started in ['false', False]
self.score = score
self.icon_name = icon_name
self.minutes_enable_before_start = minutes_enable_before_start
self.match_time = self.get_match_time()
def get_match_time(self):
datetime.now()
match_time = datetime(*(time.strptime(self.start_time, '%H:%M')[0:6]))
match_time = datetime.now().replace(hour=match_time.hour, minute=match_time.minute, second=0, microsecond=0)
return match_time
def is_stream_enabled(self):
time_to_start = self.match_time - datetime.now()
if time_to_start.days < 0:
return True
return time_to_start.seconds < timedelta(minutes=self.minutes_enable_before_start).seconds
@staticmethod
def get_full_name_if_possible(name):
if name in FULL_NAMES:
return FULL_NAMES[name]
return name
@staticmethod
def parse_name(name):
try:
(first_team, second_team) = name.split('-')
first_team = Match.get_full_name_if_possible(first_team)
second_team = Match.get_full_name_if_possible(second_team)
match_name = u'{first_team} - {second_team}'.format(first_team=first_team, second_team=second_team)
return (first_team, second_team, match_name)
except ValueError:
return '', '', name
| Xsichtik/plugin.video.tipsport.elh | resources/lib/match.py | Python | gpl-2.0 | 2,411 |
class WoofConsumerError(RuntimeError):
pass
class WoofNotSupported(WoofConsumerError):
pass
CURRENT_PROD_BROKER_VERSION = '0.9'
| goibibo/woof | woof/common.py | Python | apache-2.0 | 140 |
import sys
import unittest
from dynd import nd, ndt
import ctypes
class TestCTypesDTypeInterop(unittest.TestCase):
def test_type_from_ctype_typeobject(self):
self.assertEqual(ndt.int8, ndt.type(ctypes.c_int8))
self.assertEqual(ndt.int16, ndt.type(ctypes.c_int16))
self.assertEqual(ndt.int32, ndt.type(ctypes.c_int32))
self.assertEqual(ndt.int64, ndt.type(ctypes.c_int64))
self.assertEqual(ndt.uint8, ndt.type(ctypes.c_uint8))
self.assertEqual(ndt.uint16, ndt.type(ctypes.c_uint16))
self.assertEqual(ndt.uint32, ndt.type(ctypes.c_uint32))
self.assertEqual(ndt.uint64, ndt.type(ctypes.c_uint64))
self.assertEqual(ndt.uint32, ndt.type(ctypes.c_uint32))
self.assertEqual(ndt.uint64, ndt.type(ctypes.c_uint64))
self.assertEqual(ndt.float32, ndt.type(ctypes.c_float))
self.assertEqual(ndt.float64, ndt.type(ctypes.c_double))
def test_type_from_annotated_ctype_typeobject(self):
self.assertEqual(ndt.bool, ndt.type(ndt.ctypes.c_dynd_bool))
self.assertEqual(ndt.complex_float32, ndt.type(ndt.ctypes.c_complex_float32))
self.assertEqual(ndt.complex_float64, ndt.type(ndt.ctypes.c_complex_float64))
self.assertEqual(ndt.complex_float32, ndt.type(ndt.ctypes.c_complex64))
self.assertEqual(ndt.complex_float64, ndt.type(ndt.ctypes.c_complex128))
def test_type_from_ctype_cstruct(self):
class POINT(ctypes.Structure):
_fields_ = [('x', ctypes.c_int32), ('y', ctypes.c_int32)]
self.assertEqual(ndt.make_cstruct(
[ndt.int32, ndt.int32],['x', 'y']),
ndt.type(POINT))
class DATA(ctypes.Structure):
_fields_ = [
('pos', POINT),
('flags', ctypes.c_int8),
('size', ctypes.c_float),
('vel', POINT)
]
self.assertEqual(ndt.make_cstruct([POINT, ndt.int8, ndt.float32, POINT],
['pos', 'flags', 'size', 'vel']),
ndt.type(DATA))
def test_type_from_ctypes_carray(self):
self.assertEqual(ndt.make_cfixed_dim(10, ndt.int32),
ndt.type(ctypes.c_int32 * 10))
self.assertEqual(ndt.make_cfixed_dim((10, 3), ndt.int32),
ndt.type((ctypes.c_int32 * 3) * 10))
self.assertEqual(ndt.make_cfixed_dim((10, 3, 4), ndt.int32),
ndt.type(((ctypes.c_int32 * 4) * 3) * 10))
class POINT(ctypes.Structure):
_fields_ = [('x', ctypes.c_int32), ('y', ctypes.c_int32)]
self.assertEqual(ndt.make_cfixed_dim(10, ndt.type(POINT)),
ndt.type(POINT * 10))
if __name__ == '__main__':
unittest.main()
| aterrel/dynd-python | dynd/tests/test_ctypes_interop.py | Python | bsd-2-clause | 2,803 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
# Xamcheck-Utils Stuff
from xamcheck_utils.collections import DefaultOrderedDict, OrderedSet
from .base import TestCase
class TestOrderedSet(TestCase):
def test_add(self):
ordered_set = OrderedSet()
ordered_set.add("something")
self.assertEqual(len(ordered_set), 1)
ordered_set.add("something")
self.assertEqual(len(ordered_set), 1)
ordered_set.add(1)
self.assertEqual(len(ordered_set), 2)
new_obj = object()
ordered_set.add(new_obj)
self.assertEqual(len(ordered_set), 3)
another_object = object()
ordered_set.add(another_object)
self.assertEqual(len(ordered_set), 4)
ordered_set.add(another_object)
self.assertEqual(len(ordered_set), 4)
def test_discard(self):
new_obj = object()
another_object = object()
another_objectx = another_object
ordered_set = OrderedSet(['key', 1, '123', new_obj])
ordered_set.add(another_object)
ordered_set.discard(another_objectx)
self.assertEqual(len(ordered_set), 4)
ordered_set.discard(1)
self.assertEqual(len(ordered_set), 3)
ordered_set.discard(another_object)
self.assertEqual(len(ordered_set), 3)
ordered_set.discard(new_obj)
self.assertEqual(len(ordered_set), 2)
ordered_set.discard('123')
self.assertEqual(len(ordered_set), 1)
ordered_set.discard('KEY')
self.assertEqual(len(ordered_set), 1)
ordered_set.discard('key')
self.assertEqual(len(ordered_set), 0)
def test_equal(self):
new_obj = object()
another_object = object()
ordered_set = OrderedSet(['key', 1, '123', new_obj, another_object])
another_ordered_set = OrderedSet(['key', 1, '123', new_obj])
other_ordered_set = OrderedSet(
['key', 1, '123', new_obj, another_object])
ordered_set.__eq__(other_ordered_set)
self.assertEqual(set(ordered_set), set(other_ordered_set))
ordered_set.__eq__(another_ordered_set)
set(ordered_set) != set(another_ordered_set)
def test_pop(self):
new_obj = object()
ordered_set = OrderedSet(['key', 1, '123', new_obj])
ordered_set.pop()
self.assertEqual(len(ordered_set), 3)
ordered_set.pop()
self.assertEqual(len(ordered_set), 2)
ordered_set.pop()
self.assertEqual(len(ordered_set), 1)
ordered_set.pop()
self.assertEqual(len(ordered_set), 0)
with self.assertRaises(KeyError):
ordered_set.pop()
def test_contains(self):
new_obj = object()
ordered_set = OrderedSet(['key', 1, '123', new_obj])
items = (
('key', True),
('2', False),
('123', True),
(2, False),
(new_obj, True),
)
for value, output in items:
self.check_output(ordered_set.__contains__, output, value)
class TestOrderedDict(TestCase):
def test_getitem(self):
items = [('yellow', 1), ('blue', 2),
('yellow', 3), ('blue', 4), ('red', 1), (1, 'samsung')]
default_ordered_dict = DefaultOrderedDict(list)
for k, v in items:
default_ordered_dict[k].append(v)
values = default_ordered_dict.__getitem__('yellow')
self.assertEqual(values, [1, 3])
values = default_ordered_dict.__getitem__(1)
self.assertEqual(values, ['samsung'])
values = default_ordered_dict.__getitem__('lenovo')
self.assertEqual(values, [])
# def test_missing(self):
# items = [('yellow', 1), ('blue', 2), ('yellow', 3),
# ('blue', 4), ('red', 1), (1, 'samsung')]
# default_ordered_dict = DefaultOrderedDict(list)
# for k, v in items:
# default_ordered_dict[k].append(v)
# values = default_ordered_dict.__getitem__('titan')
# self.assertEqual(values, [])
| psjinx/xamcheck-utils | src/xamcheck_utils/tests/test_collections.py | Python | mit | 4,073 |
import os
from tempfile import mkdtemp
SECRET_KEY = "Please do not spew DeprecationWarnings"
# Haystack settings for running tests.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'haystack_tests.db',
}
}
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'haystack',
'test_haystack.discovery',
'test_haystack.core',
'test_haystack.spatial',
'test_haystack.multipleindex',
]
SITE_ID = 1
ROOT_URLCONF = 'test_haystack.core.urls'
HAYSTACK_ROUTERS = ['haystack.routers.DefaultRouter', 'test_haystack.multipleindex.routers.MultipleIndexRouter']
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'test_haystack.mocks.MockEngine',
},
'whoosh': {
'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine',
'PATH': os.path.join('tmp', 'test_whoosh_query'),
'INCLUDE_SPELLING': True,
},
'filtered_whoosh': {
'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine',
'PATH': mkdtemp(prefix='haystack-multipleindex-filtered-whoosh-tests-'),
'EXCLUDED_INDEXES': ['test_haystack.multipleindex.search_indexes.BarIndex'],
},
'elasticsearch': {
'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
'URL': '127.0.0.1:9200/',
'INDEX_NAME': 'test_default',
'INCLUDE_SPELLING': True,
},
'simple': {
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
},
'solr': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://localhost:8983/solr/',
'INCLUDE_SPELLING': True,
},
}
SITE_ID = 1
| kybi/django-haystack | test_haystack/settings.py | Python | bsd-3-clause | 1,777 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import struct
from layer1 import VarLenMessage, VarLenSocket
class TagMessage:
"""
TagMessage contains a string tag and string data.
Parameters
----------
tag: string
The type of the message. Currently supported tags are "json" and
"async".
data: string
The data content of the message.
"""
def __init__(self, tag, data):
self.tag = tag
self.data = data
def toVarLenMessage(self):
"""
Convert a TagMessage to a VarLenMessage
Returns
-------
VarLenMessage
A VarLenMessage representation of this TagMessage.
"""
binData = bytearray(self.tag)
binData.append(0)
binData.extend(self.data)
return VarLenMessage(binData)
@staticmethod
def fromVarLenMessage(vlm):
"""
Create a TagMessage from a VarLenMessage.
Parameters
----------
vlm: VarLenMessage
The message to convert to a TagMessage.
Returns
-------
TagMessage
A TagMessage representation of the input VarLenMessage.
"""
# find the position of the '\0'
end = vlm.data.find('\0')
if end < 0:
raise NameError('received tag message has no null char')
# extract the tag (null terminated string)
fmt = "{0}s".format(end)
tag = struct.unpack_from(fmt, vlm.data)[0]
# the data is the rest of the message
data = vlm.data[end+1:]
# return the tag message
return TagMessage(tag,data)
class TagMessageSocket:
"""
A socket wrapper that allows sending and receiving of TagMessages.
Parameters
----------
rawSocket: socket
"""
def __init__(self, rawSocket):
self.varLenSocket = VarLenSocket(rawSocket)
def send(self, tagMessage):
"""
Send a TagMessage by converting it to a VarLenMessage.
Parameters
----------
tagMessage: TagMessage
The message to send.
"""
self.varLenSocket.send(tagMessage.toVarLenMessage())
def receive(self):
"""
Receive a TagMessage.
Returns
-------
TagMessage
A TagMessage that has been converted from a VarLenMessage.
"""
# get the VarLenMessage
vlm = self.varLenSocket.receive()
# convert the VarLenMessage to TagMessage
return TagMessage.fromVarLenMessage(vlm)
| astrilet/CARTAvis | carta/scriptedClient/layer2.py | Python | gpl-2.0 | 2,553 |
# -*- coding: utf-8 -*-
'''
Local settings
- Run in Debug mode
- Use console backend for emails
- Add Django Debug Toolbar
- Add django-extensions as app
'''
from .common import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env("EASY_BLOG_KEY")
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND',
default='django.core.mail.backends.console.EmailBackend')
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
# Example: postgres://db_owner:password@dbserver_ip:port/db_name
'default': env.db("DATABASE_URL", default="postgres://easyuser:password@localhost/easy_blog_django"),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# django-debug-toolbar
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar', )
INTERNAL_IPS = ('127.0.0.1', '10.0.2.2',)
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ('django_extensions', )
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Your local stuff: Below this line define 3rd party library settings | giantryansaul/easy_blog_django | config/settings/local.py | Python | bsd-3-clause | 2,375 |
#!/usr/bin/env python
#
# Communicate with the ulnoiot esp dongle
# and scan for uiot-node networks
import sys
import os
import optparse
import logging
import serial
import time
def do_scan(port, cutoff):
# Create a serial connection
ser = serial.Serial(port, 115200, timeout=15);
logging.info('Starting on %s.', port)
ser.read_all() # discard all
ser.write(b"\n")
ser.flush()
answer = ser.read_until(b"UED>")
if not answer.endswith(b"UED>"):
sys.stderr.write("Trouble communicating with dongle.\n")
return 1
# show options
sys.stderr.write("Scanning for nodes that can be adopted... ")
ser.write(b"scan\n")
ser.flush()
if not ser.read_until(b"!network_scan start\r\n"):
sys.stderr.write("Network scan failed.\n")
return 1
node_list = []
while(True):
l = ser.readline().strip()
if (not b" " in l) or (l == b"!network_scan end"):
break
try:
strength, name = l.split(b" ", 1)
strength = int(strength)
if strength >= cutoff:
if name.startswith(b"uiot-node-"):
node_list.append((name.decode(), strength))
except ValueError:
pass
sys.stderr.write("done.\n")
if len(node_list) == 0:
sys.stderr.write("No nodes that can be adopted found.\n")
return 1
# sort by strength
node_list.sort(key=lambda x: (x[1],x[0]))
sys.stderr.write("Following nodes found (ranked by strength):\n")
for name,s in node_list:
print("%s (%d)"%(name,s))
# while True:
# n = input("Which one should be adopted? (enter number, default=1) ")
# if not n:
# n = 1
# else:
# try:
# n = int(n)
# except ValueError:
# n = -1
# n -= 1
# if n>=0 and n<len(node_list):
# break
ser.close()
return 0
def parser(unparsed_args):
parser = optparse.OptionParser(
usage = "%prog [options]",
description = "Use a ulnoiot esp dongle to scan for adoptable nodes and" + \
"list them in oder from strongest to weakest."
)
# Configuration
group = optparse.OptionGroup(parser, "Configuration")
group.add_option("-p", "--port",
dest = "esp_port",
type = "str",
help = "ulnoiot esp dongle serial port (where the dongle is connected). Default /dev/ttyUSB0",
default = "/dev/ttyUSB0"
)
group.add_option("-c", "--cutoff",
dest = "cutoff",
type = "int",
help = "Decimal-value to use as detection of too weak networks. " + \
"Anything weaker than this will be ignored. Default: -85",
default = "-85"
)
parser.add_option_group(group)
# output group
group = optparse.OptionGroup(parser, "Output")
group.add_option("-d", "--debug",
dest = "debug",
help = "Show debug output. And override loglevel with debug.",
action = "store_true",
default = False
)
parser.add_option_group(group)
(options, args) = parser.parse_args(unparsed_args)
return options
# end parser
def main(args):
# get options
options = parser(args)
# adapt log level
loglevel = logging.WARNING
if (options.debug):
loglevel = logging.DEBUG
# end if
# logging
logging.basicConfig(level = loglevel, format = '%(asctime)-8s [%(levelname)s]: %(message)s', datefmt = '%H:%M:%S')
logging.debug("Options: %s", str(options))
return do_scan(options.esp_port, options.cutoff)
# end main
if __name__ == '__main__':
sys.exit(main(sys.argv))
| ulno/ulnoiot | bin/dongle_scan.py | Python | mit | 3,722 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from PIL import Image
def makeimageeven(image):
pixels = list(image.getdata()) # 获得 image 的像素信息列表
# 通过位运算,将 pixels 每个元素的最后一位设为0
evenpixels = [(r >> 1 << 1, g >> 1 << 1, b >> 1 << 1, t >> 1 << 1) for [r, g, b, t] in pixels]
evenimage = Image.new(image.mode, image.size) # 创建一个与 image 相同大小相同格式的图片
evenimage.putdata(evenpixels) # 给 evenimage 添加像素信息。
return evenimage
def constlenbin(intnumber): # 这个函数就是将以'ob'开头的不等长二进制数转化为8位二进制数
binary = "0" * (8-(len(bin(intnumber))-2)) + bin(intnumber).replace('0b', '') # bin 函数 讲整形数字转换成二进制形式,
return binary
def encodedatainimage(image, data): # 把数据存入到image中。
evenimage = makeimageeven(image) # 把参数 image 每个像素点的最后以为改成0
binary = ''.join(map(constlenbin, bytearray(data, 'utf-8'))) # map 高阶函数,第一个参数传入函数 constlenbin 第二个参数传入 list
if len(binary) > len(image.getdata()) * 4: # 如果 binary 长度大于 image.getdata()*4 说明需要存入的数据太大, image 不能写下
raise Exception("Error: Can't encode more than" + len(evenimage.getdata()) * 4 + " bits in this image. ")
# 这一长句的作用就是把 binary 中的每一位依次放入到 evenimage.getdata() 中的 rgbt 值的最后一位。用到了列表生成器, enumerate 函数--》同时列表索引以及值。
encodedpixels = [(r+int(binary[index*4+0]), g+int(binary[index*4+1]), b+int(binary[index*4+2]),
t+int(binary[index*4+3]))if index*4 < len(binary)
else (r, g, b, t) for index, (r, g, b, t) in enumerate(list(evenimage.getdata()))]
encodedimage = Image.new(evenimage.mode, evenimage.size)
encodedimage.putdata(encodedpixels)
return encodedimage
def binarytostring(binary): # 将二进制数据转化位 str
index = 0
string = []
rec = lambda x, i: x[2:8] + (rec(x[8:], i-1) if i > 1 else '') if x else ''
fun = lambda x, i: x[i+1:8] + rec(x[8:], i-1)
while index + 1 < len(binary):
chartype = binary[index:].index('0')
length = chartype*8 if chartype else 8
string.append(chr(int(fun(binary[index:index+length], chartype), 2)))
index += length
return ''.join(string)
def decodeimage(image): # 把 image 图片中隐藏的数据拿出来。
pixels = list(image.getdata())
binary = ''.join([str(int(r >> 1 << 1 != r))+str(int(g >> 1 << 1 != g))+str(int(b >> 1 << 1 != b)) +
str(int(t >> 1 << 1 != t)) for (r, g, b, t) in pixels])
locationdoublenull = binary.find('0000000000000000')
endindex = locationdoublenull + (8-(locationdoublenull % 8)) if locationdoublenull % 8 != 0 else locationdoublenull
data = binarytostring(binary[0:endindex])
return data
encodedatainimage(Image.open("coffee.png"), '你好世界,Hello world!').save('encodeImage.png')
print(decodeimage(Image.open("encodeImage.png")))
| dronly/python | shiyanlou/steganography/steganography.py | Python | apache-2.0 | 3,243 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('course', '0010_auto_20151214_1714'),
]
operations = [
migrations.AddField(
model_name='coursechapter',
name='parent',
field=models.ForeignKey(to='course.CourseChapter', blank=True, null=True, related_name='children'),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='coursechapter',
unique_together=set([]),
),
]
| ttsirkia/a-plus | course/migrations/0011_auto_20151215_1133.py | Python | gpl-3.0 | 624 |
# -*- coding: utf-8 -*-
import re
import json
import logging
import random
import time
import operator
import threading
from channels import Group
from channels import Channel
from channels.sessions import channel_session
from .models import Room
from .models import Player
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
log = logging.getLogger(__name__)
noEnoughPeople = '房间人数不足'
gameHasStarted = '游戏已经开始'
gameNotStarted = '游戏尚未开始'
notReady = '有人没准备好'
notRightPerson = '您本轮无法投票'
voteInfo = '您投票给 '
dayerror = '时间是白天'
nighterror = '时间是夜晚'
identification = '您的身份是 '
identificationDict = dict()
identificationDict[0] = '村民'
identificationDict[1] = '狼人'
identificationDict[2] = '预言家'
identificationDict[3] = '女巫'
identificationDict[4] = '猎人'
identificationDict[5] = '守卫'
identificationDict[6] = '白狼王'
identificationDict[7] = '丘比特'
identificationDict[8] = '盗贼'
thread_pool = dict()
def keepalive(label, name, messageInfo, typo):
message = dict()
message['handle'] = 'keepalive'
message['typo'] = typo
message['message'] = messageInfo
# try:
# room = Room.objects.get(label=label)
# except Room.DoesNotExist:
# log.debug('ws room does not exist label=%s', label)
# return
while 1:
try:
#player = room.players.filter(address=name).first()
Channel(name).send({'text': json.dumps(message)})
time.sleep(10)
except Exception as e:
break;
#改变死掉的人的状态
def executeDeath(label,listinfo):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return
if len(listinfo) == 0:
return
temp = listinfo.split(',')
for i in temp:
player = room.players.filter(position=int(i)).first()
player.alive = 0
player.save()
#单人信息
def sendMessage(label, name, messageInfo, typo):
message = dict()
message['handle'] = '系统信息'
message['typo'] = typo
message['message'] = messageInfo
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return
m = room.messages.create(**message)
try:
Channel(name).send({'text': json.dumps(m.as_dict())})
except Exception as e:
log.debug('Can not send message to ' + name)
#群体信息
def sendGroupMessage(label, messageInfo, typo):
message = dict()
message['handle'] = '系统信息'
message['typo'] = typo
message['message'] = messageInfo
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return
m = room.messages.create(**message)
for i in range(1, room.playerNumber + 1):
player = room.players.filter(position=i).first()
if player is not None and player.connection == True:
name = player.address
Channel(name).send({'text': json.dumps(m.as_dict())})
#胜负判决
def judgement(label):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return -1
roleList = room.roleList.split(",")
cunMin = int(roleList[0])
langRen = int(roleList[1])
if int(roleList[6]) == 1:
langRen = langRen + 1
shenMin = int(room.playerNumber) - cunMin - langRen
for player in room.players.all():
if player.identification == 8:
log.debug('判决胜负0')
return 0
if player.alive == 0:
if player.identification == 0:
cunMin = cunMin - 1
elif player.identification == 1 or player.identification == 6:
langRen = langRen - 1
else:
shenMin = shenMin - 1
if room.thirdteam == 1:
count1 = 0
count2 = 0
for player in room.players.all():
if player.link != -1 or player.identification == 7:
if player.jingzhang == 1:
count1 = count1 + 1.5
else:
count1 = count1 + 1
else:
if player.jingzhang == 1:
count2 = count2 + 1.5
else:
count2 = count2 + 1
if count1 >= count2:
log.debug('判决胜负3')
return 3
if cunMin == 0 or shenMin == 0 or langRen >= (cunMin + shenMin):
log.debug('判决胜负1')
return 1
elif langRen == 0:
log.debug('判决胜负2')
return 2
else:
log.debug('判决胜负0')
return 0
#法官视角
def judgementView(label, name):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
sendMessage(label, name, '房间不存在!', 'error')
return
cunmin = ''
langren = ''
yuyanjia = ''
nvwu = ''
shouwei = ''
lieren = ''
bailangwang = ''
qiubite = ''
daozei = ''
for player in room.players.all():
if player.identification == 0:
cunmin = cunmin + player.position + ' '
elif player.identification == 1:
langren = langren + player.position + ' '
elif player.identification == 2:
yuyanjia = yuyanjia + player.position + ' '
elif player.identification == 3:
lieren = lieren + player.position + ' '
elif player.identification == 4:
nvwu = nvwu + player.position + ' '
elif player.identification == 5:
shouwei = shouwei + player.position + ' '
elif player.identification == 6:
bailangwang = bailangwang + player.position + ' '
elif player.identification == 7:
qiubite = qiubite + player.position + ' '
Info = 'Identification list \n '
if len(cunmin) > 0:
Info = Info + '村民: ' + cunmin + '\n '
if len(langren) > 0:
Info = Info + '狼人: ' + langren + '\n '
if len(yuyanjia) > 0:
Info = Info + '预言家: ' + yuyanjia + '\n '
if len(lieren) > 0:
Info = Info + '猎人: ' + lieren + '\n '
if len(nvwu) > 0:
Info = Info + '女巫: ' + nvwu + '\n '
if room.jieyao != 0:
Info = Info + '女巫解药已经使用!' + '\n '
if room.duyao != 0:
Info = Info + '女巫毒药已经使用!对象是: ' + str(room.duyao) + '\n '
if len(shouwei) > 0:
Info = Info + '守卫: ' + shouwei + '\n '
if room.shou != 0:
Info = Info + '守卫昨天晚上守卫的人是: ' + str(room.shou) + '\n '
if len(bailangwang) > 0:
Info = Info + '白狼王: ' + bailangwang + '\n '
if len(qiubite) > 0:
Info = Info + '丘比特: ' + qiubite + '\n '
Info = Info + '丘比特所连的情侣是: ' + room.link + '\n '
if room.theft != -1:
Info = Info + '盗贼: ' + str(room.theft) + '\n '
temp = room.players.filter(position=room.theft).first
Info = Info + '盗贼拾取的身份是: ' + str(identificationDict[int(temp.identification)]) + '\n '
Info = Info + '盗贼掩埋的身份是: ' + str(identificationDict[int(room.burycard)]) + '\n '
if room.thirdteam == 0:
Info = Info + '不含有第三阵营!' + '\n '
else:
Info = Info + '含有第三阵营!' + '\n '
sendMessage(label, name, Info, 'message')
#发动技能,死亡结算
def skill(label, number, condition):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return 0
player = room.players.filter(position=number).first()
room.voteList = ''
room.save()
if player.jingzhang is 1:
room.voteList = ''
room.save()
sendGroupMessage(label,'警长有20s时间可以传递警徽','message')
time.sleep(20)
jinghuiList, systemInfo = processVote(label,0)
if len(jinghuiList) > 0:
jiren = room.players.filter(position=int(jinghuiList)).first()
if jiren.alive is 1:
jiren.jingzhang = 1
jiren.save()
sendGroupMessage(label,jinghuiList + '号玩家成为警长','message')
room.voteList = ''
room.save()
sendGroupMessage(label,str(player.position) +'号玩家有20s时间可以发动技能','message')
time.sleep(20)
target, systemInfo = processVote(label,0)
if player.identification is 3 and condition == 1:
if target is not '' and int(target) > 0:
x = room.players.filter(position=int(target)).first()
x.alive = 0
x.save()
sendGroupMessage(label,'猎人发动技能,带走' + target,'message')
room.voteList = ''
room.save()
skill(label, target, 1)
if player.link != -1:
qinglv = room.players.filter(position=player.link).first()
qinglv.alive = 0
qinglv.save()
sendGroupMessage(label,'情侣' + str(player.link) + '号玩家死亡','message')
skill(label, player.link, -1)
#处理投票
def processVote(label, args):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return -1, 'ws room does not exist label=' + label
count = dict()
info = dict()
vote = dict()
log.debug('投票列表现在是=%s', room.voteList)
voteList = room.voteList.split(',')
if len(voteList) is 1:
return '', '无人投票'
for i in xrange(0,len(voteList),2):
log.debug('现在i的大小是=%d', i)
voter = voteList[i]
# if args is not 0:
# if voter is not args:
# continue
target = voteList[i + 1]
log.debug('现在voter的大小是=%s', voter)
log.debug('现在target的大小是=%s', target)
if int(target) < 1 or int(target) > room.playerNumber:
continue
elif voter in vote:
continue
else:
player = room.players.filter(position=voter).first()
if player is None:
log.debug('找不到player')
continue
if player.alive is 0:
if player.identification != 3 and player.jingzhang != 1:
log.debug('player并没有存活')
continue
vote[voter] = target
if target in info:
info[target] = info[target] + ',' + voter
else:
info[target] = '' + voter
weight = 1
if player.jingzhang is 1:
weight = 1.5
log.debug('现在的weight是:%s',weight)
if target in count:
count[target] = count[target] + weight
else:
count[target] = weight
# deadman = max(count.iteritems(), key=operator.itemgetter(1))[0]
deadman = ''
currentMax = 0.0
for key,val in count.iteritems():
if val > currentMax:
deadman = '' + key
currentMax = val
elif val == currentMax:
deadman = deadman + ',' + key
systemInfo = '本轮被投的人是: ' + deadman + '\n'
for key, val in info.iteritems():
systemInfo = systemInfo + '投' + key + '的人有:' + val + '\n'
return deadman, systemInfo
#处理预选名单
def processName(label):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return ''
nameList = []
if room.voteList == '':
return nameList
voteList = room.voteList.split(',')
for i in xrange(0,len(voteList),2):
if voteList[i] in nameList:
continue
else:
nameList.append(voteList[i])
room.voteList = ''
room.save()
return nameList
#在发言阶段处理退水,爆,开始投票等function
def checkStatus(label, nameList):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return -1
log.debug('Here is the room list =%s', room.voteList)
if len(room.voteList) is 0:
return 0, nameList
voteList = room.voteList.split(',')
for i in xrange(0,len(voteList),2):
voter = voteList[i]
target = voteList[i + 1]
log.debug('Here is the target :%s',target)
if target == 'bloom':
player = room.players.filter(position=voter).first()
if player.identification == 1 or player.identification == 6:
sendGroupMessage(label,'狼人' + str(voter) + '号玩家自爆!','message')
if room.jinghui is 1:
sendGroupMessage(label,'昨天晚上死亡的人是'+room.deadman,'message')
if player.identification is 6:
room.voteList = ''
room.save()
sendMessage(label,player.address,'你有十秒钟决定带走谁','message')
time.sleep(10)
x, y = processVote(label,0)
if len(x) > 0:
deadman = room.players.filter(position=int(x)).first()
deadman.alive = 0
deadman.save()
sendGroupMessage(label,'白狼王带走' + x + '号玩家!','message')
skill(label, int(x), 1)
else:
sendMessage(label,player.address,'你并没有发动技能!','message')
player.alive = 0
player.save()
room.jinghui = 0
room.daystatus = 0
room.voteList = ''
room.deadman = ''
room.save()
return 1, nameList
else:
sendMessage(label,player.address,'您不是狼人,无法自爆','message')
elif target == 'tuishui':
if voter in nameList:
nameList.remove(voter)
sendGroupMessage(label, str(voter) + '号玩家已经退水', 'message')
elif target == 'startVote':
room.voteList = ''
room.save()
return 2, nameList
return 0, nameList
#PK台状态
def pkStatus(label):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return -1
voteList = room.voteList.split(',')
log.debug('now vote list is %s',room.voteList)
if len(room.voteList) is 0:
return 0
for i in xrange(0,len(voteList),2):
voter = voteList[i]
target = voteList[i + 1]
log.debug('now target is %s',target)
if target == 'startVote':
room.voteList = ''
room.save()
return 1
return 0
#PK竞选
def pkVote(label, nameList, count):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return []
sendGroupMessage(label,'现在进入PK台,现在是第' + str(count + 1) + '轮','message')
sendGroupMessage(label,'现在在台上的玩家是:','message')
sendGroupMessage(label,str(nameList[0:]),'message')
if count is 0:
sendGroupMessage(label,'请在PK台上的人发言!','message')
elif count is 1:
sendGroupMessage(label,'请在PK台下的人发言!','message')
sendGroupMessage(label,'输入startVote开始进行投票!','message')
if count is 2:
return []
else:
room.voteList = ''
room.save()
status = 0
while status is 0:
status = pkStatus(label)
time.sleep(10)
sendGroupMessage(label,'PK台投票开始','message')
sendGroupMessage(label,'现在在台上的玩家是:','message')
sendGroupMessage(label,str(nameList[0:]),'message')
sendGroupMessage(label,'开始20s的投票','message')
time.sleep(20)
target, systemInfo = processVote(label,0)
sendGroupMessage(label,systemInfo,'message')
return target
#丘比特连人
def processLink(label):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return -1, 0
voteList = room.voteList
if len(voteList) == 0:
number1 = random.randint(1,room.playerNumber)
number2 = random.randint(1,room.playerNumber)
while number2 != number1:
number2 = random.randint(1,room.playerNumber)
return number1, number2
temp = voteList.split(',')
number1 = int(temp[1])
if len(temp) < 4:
number2 = random.randint(1,room.playerNumber)
while number2 != number1:
number2 = random.randint(1,room.playerNumber)
return number1, number2
number2 = int(temp[3])
return number1, number2
#房间状态机
def room_status(label, number, gameStatus, playerList):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return -1
# 天黑请闭眼
if number == 0:
sendGroupMessage(label, '天黑请闭眼!', 'message1')
time.sleep(10)
if 8 in gameStatus and room.jinghui == 1 and room.theft is -1:
sendGroupMessage(label, '盗贼请睁眼!', 'message12')
sendGroupMessage(label, '盗贼有两张牌可以选择,狼人牌为必选牌,请输入1和2来决定你选的牌!', 'message')
player = room.players.filter(identification=8).first()
if player is None:
time.sleep(20)
daozei = player.address
if playerList[len(playerList) - 1] is 1 or playerList[len(playerList) - 1] is 6:
sendMessage(label, daozei, '你可选的牌组中有狼人牌,系统已经自动为您选择狼人牌: ' + identificationDict[playerList[len(playerList) - 1]], 'message')
sendMessage(label, daozei, '你埋掉的牌是: ' + identificationDict[playerList[len(playerList) - 2]], 'message')
player.identification = playerList[len(playerList) - 1]
player.save()
room.theft = player.position
room.burycard = playerList[len(playerList) - 2]
room.voteList = ''
room.save()
time.sleep(20)
elif playerList[len(playerList) - 2] is 1 or playerList[len(playerList) - 2] is 6:
sendMessage(label, daozei, '你可选的牌组中有狼人牌,系统已经自动为您选择狼人牌: ' + identificationDict[playerList[len(playerList) - 2]], 'message')
sendMessage(label, daozei, '你埋掉的牌是: ' + identificationDict[playerList[len(playerList) - 1]], 'message')
player.identification = playerList[len(playerList) - 2]
player.save()
room.theft = player.position
room.burycard = playerList[len(playerList) - 1]
room.voteList = ''
room.save()
time.sleep(20)
else:
card1 = identificationDict[playerList[len(playerList) - 2]]
card2 = identificationDict[playerList[len(playerList) - 1]]
sendMessage(label, daozei, '你可选的牌组中为: ' + card1 + ' ' + card2, 'message')
time.sleep(10)
cardnumber, message = processVote(label,0)
if len(cardnumber) is 0:
cardnumber = random.randint(1, 2)
if int(cardnumber) is 1:
sendMessage(label, daozei, '你选的牌是: ' + identificationDict[playerList[len(playerList) - 2]], 'message')
sendMessage(label, daozei, '你埋掉的牌是: ' + identificationDict[playerList[len(playerList) - 1]], 'message')
player.identification = playerList[len(playerList) - 2]
player.save()
room.theft = player.position
room.burycard = playerList[len(playerList) - 1]
room.voteList = ''
room.save()
time.sleep(10)
else:
sendMessage(label, daozei, '你选的牌是: ' + identificationDict[playerList[len(playerList) - 1]], 'message')
sendMessage(label, daozei, '你埋掉的牌是: ' + identificationDict[playerList[len(playerList) - 2]], 'message')
player.identification = playerList[len(playerList) - 1]
player.save()
room.theft = player.position
room.burycard = playerList[len(playerList) - 2]
room.voteList = ''
room.save()
time.sleep(10)
sendGroupMessage(label, '盗贼请闭眼!', 'message13')
time.sleep(10)
if 7 in gameStatus and room.jinghui == 1 and len(room.link) is 0:
sendGroupMessage(label, '丘比特请睁眼!', 'message14')
sendGroupMessage(label, '丘比特可以两次输入号码,每次请输入一个号码,这两个号码的玩家被连接为情侣!', 'message')
player = room.players.filter(identification=7).first()
if player is None:
time.sleep(20)
qiubite = player.address
time.sleep(15)
number1, number2 = processLink(label)
player1 = room.players.filter(position = number1).first()
player1.link = number2
player1.save()
player2 = room.players.filter(position = number2).first()
player2.link = number1
player2.save()
room.link = str(number1) + ',' + str(number2)
flag1 = player1.identification == 1 or player1.identification == 6
flag2 = player2.identification == 1 or player2.identification == 6
if flag1 and flag2:
room.thirdteam = 0
elif not flag1 and not flag2:
room.thirdteam = 0
else:
room.thirdteam = 1
room.save()
sendMessage(label, qiubite, '您连的两个人是' + str(number1) + ' ' + str(number2), 'message')
sendMessage(label, player1.address, '您与' + str(number2) + '号玩家被连成情侣', 'message')
sendMessage(label, player2.address, '您与' + str(number1) + '号玩家被连成情侣', 'message')
sendGroupMessage(label, '丘比特请闭眼!', 'message15')
time.sleep(5)
return 1
# 狼人杀人
elif number == 1:
sendGroupMessage(label, '狼人请睁眼!', 'message3')
if room.jinghui == 1:
sendGroupMessage(label, '狼人请确认同伴!', 'message')
time.sleep(10)
time.sleep(5)
room.voteList = ''
room.save()
sendGroupMessage(label, '狼人请确认击杀目标!', 'message')
time.sleep(20)
deadman, systemInfo = processVote(label, 0)
if len(deadman) == 0:
room.deadman = ''
room.voteList = ''
room.save()
sendGroupMessage(label, '狼人请闭眼!', 'message4')
time.sleep(10)
return 2
temp = deadman.split(',')
if len(temp) > 1:
deadman = 0
else:
deadman = int(deadman)
room.deadman = '' + str(deadman)
room.voteList = ''
room.save()
sendGroupMessage(label, '狼人请闭眼!', 'message4')
time.sleep(10)
return 2
# 预言家验人
elif number == 2:
if 2 not in gameStatus:
return 4
sendGroupMessage(label, '预言家请睁眼!', 'message5')
time.sleep(5)
room.voteList = ''
room.save()
sendGroupMessage(label, '预言家请验人!', 'message')
time.sleep(20)
if 2 in gameStatus:
number = 0
# for i in range(1, room.playerNumber + 1):
# player = room.players.filter(position=i).first()
# if player.identification == 2:
# number = i
# break
deadman, systemInfo = processVote(label,number)
if len(deadman) is 0:
sendGroupMessage(label, '预言家请闭眼!', 'message6')
time.sleep(10)
return 4
deadman = deadman.split(',')
if room.players.filter(position=int(deadman[0])).first().identification == 1:
systemInfo = '您验得人是狼人!'
else:
systemInfo = '您验得人是好人!'
for i in range(1, room.playerNumber + 1):
player = room.players.filter(position=i).first()
if player.identification == 2 and player.alive is 1:
sendMessage(label,player.address,systemInfo,'message')
break
time.sleep(5)
sendGroupMessage(label, '预言家请闭眼!', 'message6')
time.sleep(10)
return 4
# 女巫救人
elif number == 4:
if 4 not in gameStatus:
return 6
sendGroupMessage(label, '女巫请睁眼!', 'message7')
room.voteList = ''
room.save()
if room.jieyao is not 0:
time.sleep(15)
return 5
nvwu = ''
number = 0
for i in range(1, room.playerNumber + 1):
player = room.players.filter(position=i).first()
if player.identification == 4 and player.alive is 1:
nvwu = player.address
number = i
break
if len(nvwu) > 0:
if room.deadman == '':
sendMessage(label,nvwu,'今天晚上无人被杀','message')
time.sleep(5)
return 5
sendMessage(label,nvwu,'今天晚上被杀死的人是' + room.deadman + '号玩家,如果使用解药,请输入死者的id','message')
player_nvwu = room.players.filter(address=nvwu).first()
pos = 0
if room.deadman != '':
pos = int(room.deadman)
if pos is player_nvwu.position and room.jinghui is 0:
sendMessage(label,nvwu,'你无法对自己使用解药','message')
time.sleep(5)
return 5
time.sleep(15)
jieyao, systemInfo = processVote(label, 0)
log.debug('jieyao is %s', jieyao)
if len(jieyao) is 0:
sendMessage(label,nvwu,'你今晚没有使用解药','message')
time.sleep(5)
return 5
jieyaoList = jieyao.split(',')
if len(jieyao) > 0:
room.jieyao = int(jieyaoList[0])
room.voteList = ''
room.save()
sendMessage(label,nvwu,'你对' + str(room.jieyao) + '号玩家使用解药','message')
time.sleep(15)
sendGroupMessage(label, '女巫请闭眼!', 'message8')
time.sleep(5)
return 6
else:
time.sleep(15)
return 5
# 女巫毒人
elif number == 5:
if 4 not in gameStatus:
return 6
room.voteList = ''
room.save()
if room.duyao is not 0:
time.sleep(15)
sendGroupMessage(label, '女巫请闭眼!', 'message8')
time.sleep(5)
return 6
nvwu = ''
number = 0
for i in range(1, room.playerNumber + 1):
player = room.players.filter(position=i).first()
if player.identification == 4 and player.alive is 1:
nvwu = player.address
number = i
break
if len(nvwu) > 0:
sendMessage(label,nvwu,'女巫可以选择使用毒药!请输入您想毒死的人的id!','message')
time.sleep(15)
duyao, systemInfo = processVote(label,0)
if len(duyao) is 0:
sendMessage(label,nvwu,'你今晚没有使用毒药','message')
time.sleep(5)
sendGroupMessage(label, '女巫请闭眼!', 'message8')
time.sleep(5)
return 6
duyaoList = duyao.split(',')
if len(duyao) > 0:
room.duyao = int(duyaoList[0])
room.voteList = ''
room.save()
sendMessage(label,nvwu,'你对' + str(room.duyao) + '号玩家使用毒药','message')
sendGroupMessage(label, '女巫请闭眼!', 'message8')
time.sleep(5)
return 6
else:
time.sleep(15)
sendGroupMessage(label, '女巫请闭眼!', 'message8')
time.sleep(5)
return 6
#守卫护人
elif number == 6:
if 5 not in gameStatus:
return 7
sendGroupMessage(label, '护卫请睁眼', 'message9')
sendGroupMessage(label, '护卫可以选择您今晚想守卫的对象,注意两晚不能同守一个人!', 'message')
room.voteList = ''
room.save()
huwei = ''
number = 0
for i in range(1, room.playerNumber + 1):
player = room.players.filter(position=i).first()
if player.identification == 5 and player.alive is 1:
huwei = player.address
number = i
break
if len(huwei) > 0:
sendMessage(label,huwei,'请选择您今晚想守护的人!','message')
time.sleep(20)
huwei, systemInfo = processVote(label,0)
if len(huwei) is 0:
sendMessage(label,huwei,'你今晚没有守人','message')
time.sleep(5)
sendGroupMessage(label, '护卫请闭眼!', 'message10')
time.sleep(5)
return 7
huweiList = huwei.split(',')
if len(huwei) > 0:
if room.shou == int(huweiList[0]):
room.shou = 0
else:
room.shou = int(huweiList[0])
room.voteList = ''
room.save()
sendMessage(label,huwei,'你守护' + str(room.shou) + '号玩家','message')
sendGroupMessage(label, '护卫请闭眼!', 'message10')
time.sleep(5)
return 7
else:
room.voteList = ''
room.save()
sendGroupMessage(label, '护卫请闭眼!', 'message10')
time.sleep(5)
return 7
else:
time.sleep(15)
sendGroupMessage(label, '护卫请闭眼!', 'message10')
time.sleep(5)
return 7
# 处理昨晚死亡数据,并调整房间状态
elif number == 7:
sendGroupMessage(label, '天亮了!', 'message2')
room.daystatus = 1
room.save()
systemInfo = '昨天晚上死的人有:'
deadList = ''
if len(room.deadman) is 0:
deadman = 0
else:
deadman = int(room.deadman)
if deadman is not 0:
if room.jieyao == deadman and room.shou == deadman:
if len(deadList) is 0:
deadList = '' + str(deadman)
else:
deadList = deadList + ',' + deadman
player = room.players.filter(position=int(deadman)).first()
if player.link != -1:
qinglv = room.players.filter(position=player.link).first()
deadList = deadList + ',' + str(player.link)
room.save()
elif room.jieyao == deadman or room.shou == deadman:
room.deadman = 0
room.save()
else:
if len(deadList) is 0:
deadList = '' + str(deadman)
else:
deadList = deadList + ',' + deadman
player = room.players.filter(position=int(deadman)).first()
if player.link != -1:
qinglv = room.players.filter(position=player.link).first()
deadList = deadList + ',' + str(player.link)
room.save()
if room.duyao is not 0:
player = room.players.filter(position=int(room.duyao)).first()
if player.alive == 1:
if len(deadList) is 0:
deadList = '' + str(room.duyao)
else:
deadList = deadList + ',' + str(room.duyao)
if player.link != -1:
qinglv = room.players.filter(position=player.link).first()
deadList = deadList + ',' + str(player.link)
systemInfo = systemInfo + deadList
room.deadman = deadList
if room.jieyao is not 0:
room.jieyao = -1
room.info = room.info + ' ' + systemInfo
room.save()
if room.jinghui == 1:
room_status(label, 9, gameStatus, playerList)
executeDeath(label,deadList)
sendGroupMessage(label, systemInfo, 'message')
time.sleep(10)
return 8
# 死人中有猎人或者警长,可以传警徽或者发动技能
elif number == 8:
room.voteList = ''
room.save()
deadList = room.deadman
if len(deadList) is 0:
return 10
else:
temp = deadList.split(',')
room.deadman = ''
for i in temp:
log.debug('here is the i number:%s',i)
player = room.players.filter(position=int(i)).first()
if player.jingzhang is 1:
room.voteList = ''
room.save()
sendGroupMessage(label,'警长有20s时间可以传递警徽','message')
time.sleep(20)
jinghuiList, systemInfo = processVote(label,0)
log.debug('jiren is %s', jinghuiList)
if len(jinghuiList) > 0:
jiren = room.players.filter(position=int(jinghuiList)).first()
if jiren.alive is 1:
jiren.jingzhang = 1
jiren.save()
sendGroupMessage(label,jinghuiList + '号玩家成为警长','message')
else:
sendGroupMessage(label,'警长撕掉警徽','message')
room.voteList = ''
room.save()
sendGroupMessage(label,i +'玩家有20s时间可以发动技能','message')
time.sleep(20)
target, systemInfo = processVote(label,0)
log.debug('target is %s', target)
if player.identification is 3:
if target is not '' and int(target) > 0 and int(room.duyao) != player.position:
x = room.players.filter(position=int(target)).first()
x.alive = 0
x.save()
sendGroupMessage(label,'猎人发动技能,带走' + target,'message')
skill(label, target, 1)
room.voteList = ''
room.save()
sendGroupMessage(label,'遗言阶段,如果结束遗言,可以输入startVote','message')
yiyan = 0
while yiyan is 0:
yiyan, yiyan_test = checkStatus(room.label, '')
time.sleep(10)
return 10
#警长竞选
elif number== 9:
room.voteList = ''
room.save()
sendGroupMessage(label,'有二十秒钟竞选警长','message')
time.sleep(20)
nameList = processName(label)
if len(nameList) == 0:
sendGroupMessage(label,'无人竞选警长,警徽流掉','message')
room.voteList = ''
room.jinghui = 0
room.save()
return 8
sendGroupMessage(label,'参选警长的有: ' + str(nameList[0:]),'message')
sendGroupMessage(label,'如果要开始警长投票,请输入startVote','message')
status = 0
while status is 0:
status, nameList = checkStatus(label, nameList)
time.sleep(5)
if status is -1:
return -1
elif status is 1:
return 0
elif status is 2:
room.voteList = ''
room.save()
sendGroupMessage(label,'仍然在警上的有: ' + str(nameList[0:]),'message')
sendGroupMessage(label,'开始20s投票','message')
time.sleep(20)
output, systemInfo = processVote(label, 0)
sendGroupMessage(label,systemInfo,'message')
if len(output) is 0:
sendGroupMessage(label,'因无人投票,警徽流掉!','message')
return 8
nameList = output.split(',')
if output is not '' and len(nameList) is 1:
room.jinghui = 0
player = room.players.filter(position=int(nameList[0])).first()
player.jingzhang = 1
player.save()
room.save()
sendGroupMessage(label,'当选警长的人是: ' + str(nameList[0]),'message')
return 8
else:
count = 0
while len(nameList) > 1 or len(nameList) is 0:
nameList = pkVote(label, nameList, count)
count = count + 1
if len(nameList) is 0:
room.jinghui = 0
room.save()
else:
room.jinghui = 0
player = room.players.filter(position=int(nameList[0])).first()
player.jingzhang = 1
player.save()
sendGroupMessage(label,'当选警长的人是: ' + str(nameList[0]),'message')
room.save()
return 8
#发言并投票:
elif number == 10:
status = 0
sendGroupMessage(label,'请进行白天的流程,输入startVote进行投票出人环节!','message')
while status is 0:
status, test = checkStatus(label, '')
time.sleep(20)
if status is -1:
return -1
elif status is 1:
sendGroupMessage(label,'开始下一晚','message')
return 0
elif status is 2:
room.voteList = ''
room.save()
sendGroupMessage(label,'开始20s投票','message')
time.sleep(20)
output, systemInfo = processVote(label, 0)
sendGroupMessage(label,systemInfo,'message')
if len(output) is 0:
sendGroupMessage(label,'开始下一晚' + target,'message')
return 0
nameList = output.split(',')
if len(nameList) is 1:
player = room.players.filter(position=int(nameList[0])).first()
player.alive = 0
player.save()
else:
count = 0
while len(nameList) > 1 or len(nameList) is 0:
nameList = pkVote(label, nameList, count)
count = count + 1
if len(nameList) is 0:
return 0
else:
player = room.players.filter(position=int(nameList[0])).first()
player.alive = 0
player.save()
room.voteList = ''
room.save()
room.daystatus = 0
room.save()
skill(label, int(nameList[0]), 1)
sendGroupMessage(label,'遗言阶段,如果结束遗言,可以输入startVote','message')
yiyan = 0
while yiyan is 0:
yiyan, yiyan_test = checkStatus(room.label, '')
time.sleep(10)
sendGroupMessage(label,'开始下一晚','message')
return 0
def startGame(label):
try:
room = Room.objects.get(label=label)
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
sendGroupMessage(label, 'room does not exist!', 'error')
return
try:
room.gameStart = 1
room.save()
roleList = room.roleList.split(",")
playerList = []
gameStatus = []
gameStatus.append(0)
gameStatus.append(1)
for i in range(0, int(roleList[0])):
playerList.append(0)
for i in range(0, int(roleList[1])):
playerList.append(1)
for i in range(0, int(roleList[2])):
playerList.append(2)
if int(roleList[2]) is not 0:
gameStatus.append(2)
for i in range(0, int(roleList[3])):
playerList.append(3)
if int(roleList[3]) is not 0:
gameStatus.append(3)
for i in range(0, int(roleList[4])):
playerList.append(4)
if int(roleList[4]) is not 0:
gameStatus.append(4)
for i in range(0, int(roleList[5])):
playerList.append(5)
if int(roleList[5]) is not 0:
gameStatus.append(5)
for i in range(0, int(roleList[6])):
playerList.append(6)
for i in range(0, int(roleList[7])):
playerList.append(7)
if int(roleList[7]) is not 0:
gameStatus.append(7)
for i in range(0, int(roleList[8])):
playerList.append(8)
if int(roleList[8]) is not 0:
gameStatus.append(8)
random.shuffle(playerList)
if int(roleList[8]) is not 0:
flag = True
while flag:
indexofplayerList = len(playerList)
flag1 = (playerList[indexofplayerList - 1] == 1) or (playerList[indexofplayerList - 1] == 6)
flag2 = (playerList[indexofplayerList - 2] == 1) or (playerList[indexofplayerList - 2] == 6)
if flag1 and flag2:
random.shuffle(playerList)
else:
break
for i in range(1, room.playerNumber + 1):
player = room.players.filter(position=i).first()
player.identification = playerList[i - 1]
if player.identification is 0:
sendMessage(label,player.address,'您的身份是村民!','message')
if player.identification is 1:
sendMessage(label,player.address,'您的身份是狼人!','message')
if player.identification is 2:
sendMessage(label,player.address,'您的身份是预言家!','message')
if player.identification is 3:
sendMessage(label,player.address,'您的身份是猎人!','message')
if player.identification is 4:
sendMessage(label,player.address,'您的身份是女巫!','message')
if player.identification is 5:
sendMessage(label,player.address,'您的身份是守卫!','message')
if player.identification is 6:
sendMessage(label,player.address,'您的身份是白狼王!','message')
if player.identification is 7:
sendMessage(label,player.address,'您的身份是丘比特!','message')
if player.identification is 8:
sendMessage(label,player.address,'您的身份是盗贼!','message')
player.save()
sendGroupMessage(label, '身份已经准备就绪!', 'message')
log.debug('Game Status is %s', str(gameStatus[0:]))
roomStatus = 0
while judgement(label) is 0:
log.debug('房间现在的状态是%d',roomStatus)
roomStatus = room_status(label, roomStatus, gameStatus, playerList)
if roomStatus is -1:
sendGroupMessage(label, '错误发生,或者测试结束!', 'message')
break
if judgement(label) == 1:
sendGroupMessage(label, '狼人获胜!', 'message')
elif judgement(label) == 2:
sendGroupMessage(label, '好人获胜!', 'message')
else:
sendGroupMessage(label, '第三阵营获胜!', 'message')
room.voteList = ''
room.duyao = 0
room.jieyao = 0
room.shou = 0
room.jinghui = 1
room.daystatus = 0
room.deadman = ''
room.gameStart = 0
room.link = ''
room.burycard = -1
room.theft = -1
room.thirdteam = 0
room.info = ''
room.messages.all().delete()
room.save()
players = room.players.filter().all()
for player in players:
player.jingzhang = 0
player.alive = 1
player.identification = -1
player.save()
return
except Exception as e:
room.voteList = ''
room.duyao = 0
room.jieyao = 0
room.shou = 0
room.jinghui = 1
room.daystatus = 0
room.deadman = ''
room.gameStart = 0
room.link = ''
room.burycard = -1
room.theft = -1
room.thirdteam = 0
room.info = ''
room.messages.all().delete()
room.save()
players = room.players.filter().all()
for player in players:
player.jingzhang = 0
player.alive = 1
player.identification = -1
player.save()
return
@channel_session
def ws_connect(message):
# Extract the room from the message. This expects message.path to be of the
# form /chat/{label}/, and finds a Room if the message path is applicable,
# and if the Room exists. Otherwise, bails (meaning this is a some othersort
# of websocket). So, this is effectively a version of _get_object_or_404.
try:
prefix, label = message['path'].decode('ascii').strip('/').split('/')
if prefix != 'chat':
log.debug('invalid ws path=%s', message['path'])
return
room = Room.objects.get(label=label)
except ValueError:
log.debug('invalid ws path=%s', message['path'])
return
except Room.DoesNotExist:
log.debug('ws room does not exist label=%s', label)
return
log.debug('chat connect room=%s client=%s:%s',
room.label, message['client'][0], message['client'][1])
occupied = len(room.players.filter(connection=True).all())
if room.playerNumber == occupied:
log.debug('room is full')
return
# Need to be explicit about the channel layer so that testability works
# This may be a FIXME?
Group('chat-'+label).add(message.reply_channel)
message.channel_session['room'] = room.label
@channel_session
def ws_receive(message):
# Look up the room from the channel session, bailing if it doesn't exist
try:
label = message.channel_session['room']
room = Room.objects.get(label=label)
except KeyError:
log.debug('no room in channel_session')
return
except Room.DoesNotExist:
log.debug('recieved message, buy room does not exist label=%s', label)
return
# Parse out a chat message from the content text, bailing if it doesn't
# conform to the expected message format.
try:
data = json.loads(message['text'])
except ValueError:
log.debug("ws message isn't json text=%s", text)
return
if set(data.keys()) != set(('handle', 'message', 'typo')):
log.debug("ws message unexpected format data=%s", data)
return
if data:
player = None
try:
player = room.players.filter(position=data['handle']).first()
except ValueError:
log.debug("something is wrong")
if player is not None:
if player.address != message.reply_channel.name and player.connection == True:
log.debug("this room's position has been occupied by another guy")
sendMessage(room.label, message.reply_channel.name, "this room's position has been occupied by another guy", 'error')
elif player.connection == False:
player.address = message.reply_channel.name
player.connection = True
player.save()
sendGroupMessage(label,data['handle'] + '号玩家重连成功!','message')
elif data['handle'] != 0:
room.players.create(position=data['handle'],address=message.reply_channel.name)
sendGroupMessage(label,data['handle'] + '号玩家进入房间','message')
log.debug('chat message room=%s handle=%s message=%s',
room.label, data['handle'], data['message'])
if data['typo'] == 'startGame':
players = room.players.all()
room.currentNumber = len(players)
room.save()
if room.currentNumber < room.playerNumber:
sendMessage(room.label, message.reply_channel.name, noEnoughPeople, 'error')
elif room.gameStart == 1:
sendMessage(room.label, message.reply_channel.name, gameHasStarted, 'error')
elif room.players.all().count() < room.playerNumber:
sendMessage(room.label, message.reply_channel.name, notReady, 'error')
else:
sendGroupMessage(room.label, '游戏开始!', 'message11')
# startGame(label)
t = threading.Thread(target=startGame, args=(label,))
t.start()
elif data['typo'] == 'Vote':
sendMessage(room.label, message.reply_channel.name, voteInfo + data['message'].decode('utf8'), 'message')
m = threading.Thread(target=keepalive, args=(label,message.reply_channel.name,'保持连接','message'))
thread_name = str(room.label) + '-' + str(data['handle'])
if thread_name not in thread_pool:
thread_pool[thread_name] = m
m.start()
voteList = room.voteList
if len(voteList) is 0:
room.voteList = room.voteList + data['handle'] + ',' + data['message']
room.save()
else:
room.voteList = room.voteList + ',' + data['handle'] + ',' + data['message']
room.save()
elif data['typo'] == 'bloom':
if room.gameStart == 0:
sendMessage(room.label, message.reply_channel.name, gameNotStarted, 'error')
elif room.daystatus == 0:
sendMessage(room.label, message.reply_channel.name, nighterror, 'error')
else:
if len(room.voteList) is 0:
room.voteList = room.voteList + data['handle'] + ',' + 'bloom'
room.save()
else:
room.voteList = room.voteList + ',' + data['handle'] + ',' + 'bloom'
room.save()
elif data['typo'] == 'identification':
if room.gameStart == 0:
sendMessage(room.label, message.reply_channel.name, gameNotStarted, 'error')
else:
player = room.players.filter(position=data['handle']).first()
sendMessage(room.label, message.reply_channel.name, identification + identificationDict[player.identification], 'message')
elif data['typo'] == 'judgement':
if room.gameStart == 0:
sendMessage(room.label, message.reply_channel.name, gameNotStarted, 'error')
else:
player = room.players.filter(position=data['handle']).first()
if player.alive is 1:
sendMessage(room.label, message.reply_channel.name, '您在游戏中的角色还活着,无法成为法官', 'error')
else:
judgementView(room.label, message.reply_channel.name)
elif data['typo'] == 'startVote':
if room.gameStart == 0:
sendMessage(room.label, message.reply_channel.name, gameNotStarted, 'error')
else:
if len(room.voteList) is 0:
room.voteList = room.voteList + data['handle'] + ',' + 'startVote'
room.save()
else:
room.voteList = room.voteList + ',' + data['handle'] + ',' + 'startVote'
room.save()
#m = room.messages.create(**data)
# See above for the note about Group
#Group('chat-'+label).send({'text': json.dumps(m.as_dict())})
@channel_session
def ws_disconnect(message):
try:
label = message.channel_session['room']
room = Room.objects.get(label=label)
Group('chat-'+label).discard(message.reply_channel)
player = room.players.filter(address=message.reply_channel.name).first()
if player is not None:
player.connection = False
player.save()
name = str(room.label) + '-' + str(player.position)
thread_pool.pop(name)
sendGroupMessage(label,str(player.position) + '号玩家断线','message')
except (KeyError, Room.DoesNotExist):
pass
| hanchen999/werewolf-with-django-on-heroku | chat/consumers.py | Python | bsd-3-clause | 54,092 |
#!/usr/bin/env python
from setuptools import setup
setup(name='nlp_rake',
version='1.0',
description='Rapid Automatic Keyword Extraction (RAKE) algorithm',
long_description='A Python implementation of the Rapid Automatic Keyword Extraction (RAKE) algorithm as described in: Rose, S., Engel, D., Cramer, N., & Cowley, W. (2010). Automatic Keyword Extraction from Individual Documents. In M. W. Berry & J. Kogan (Eds.), Text Mining: Theory and Applications: John Wiley & Sons.',
author='zelandiya, aneesha',
url='https://www.python.org/sigs/distutils-sig/',
packages=['nlp_rake'],
package_dir={'nlp_rake': './'},
package_data={'nlp_rake': ['data/']},
include_package_data = True,
)
| zelandiya/RAKE-tutorial | setup.py | Python | mit | 741 |
import PLConstants as CONSTS
import os
def aggregate(folder, repoName, outFile):
rule1_count =0
rule2_count =0
rule3_count =0
rule4_count =0
rule5_count =0
rule6_count =0
rule7_count =0
rule8_count =0
rule9_count =0
rule10_count =0
rule11_count =0
rule12_count =0
rule13_count =0
file = getPuppetLintFile(folder)
if file:
with open(os.path.join(folder, file), 'rt', errors='ignore') as curFile:
for line in curFile:
rule1_count = getrule1_count( rule1_count, line)
rule2_count = getrule2_count(rule2_count, line)
rule3_count = getrule3_count(rule3_count, line)
rule4_count = getrule4_count(rule4_count, line)
rule5_count = getrule5_count(rule5_count, line)
rule6_count = getrule6_count(rule6_count, line)
rule7_count = getrule7_count(rule7_count, line)
rule8_count = getrule8_count(rule8_count, line)
rule9_count = getrule9_count(rule9_count, line)
rule10_count = getrule10_count(rule10_count, line)
rule11_count = getrule11_count(rule11_count, line)
rule12_count = getrule12_count(rule12_count, line)
rule13_count = getrule13_count(rule13_count, line)
outFile.write(repoName + "," + str(rule1_count) + "," + str(rule2_count) + "," + str(rule3_count) \
+ "," + str(rule4_count) + "," + str(rule5_count) + "," + str(rule6_count) + "," + str(rule7_count)\
+ "," + str(rule8_count) + "," + str(rule9_count) + "," + str(rule10_count) + "," + str(rule11_count)\
+ "," + str(rule12_count) + "," + str(rule13_count))
outFile.write("\n")
def getPuppetLintFile(folder):
for aFile in os.listdir(folder):
if aFile.endswith(CONSTS.PUPPETLINT_OUT_FILE ):
return aFile
return ""
def getrule1_count(count, line):
index = line.find(CONSTS.RULE1_1)
if index >= 0:
count += 1
return count
def getrule2_count(count, line):
index = line.find(CONSTS.RULE2_1)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE2_2)
if index >= 0:
count += 1
return count
def getrule3_count(count, line):
index = line.find(CONSTS.RULE3_1)
if index >= 0:
count += 1
return count
def getrule4_count(count, line):
index = line.find(CONSTS.RULE4_1)
if index >= 0:
count += 1
return count
def getrule5_count(count, line):
index = line.find(CONSTS.RULE5_1)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE5_2)
if index >= 0:
count += 1
return count
def getrule6_count(count, line):
index = line.find(CONSTS.RULE6_1)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE6_2)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE6_3)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE6_4)
if index >= 0:
count += 1
return count
def getrule7_count(count, line):
index = line.find(CONSTS.RULE7_1)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE7_2)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE7_3)
if index >= 0:
count += 1
return count
def getrule8_count(count, line):
index = line.find(CONSTS.RULE8_1)
if index >= 0:
count += 1
return count
def getrule9_count(count, line):
index = line.find(CONSTS.RULE9_1)
if index >= 0:
count += 1
return count
def getrule10_count(count, line):
index = line.find(CONSTS.RULE10_1)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE10_2)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE10_3)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE10_4)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE10_5)
if index >= 0:
count += 1
index = line.find(CONSTS.RULE10_6)
if index >= 0:
count += 1
return count
def getrule11_count(count, line):
index = line.find(CONSTS.RULE11_1)
if index >= 0:
count += 1
return count
def getrule12_count(count, line):
index = line.find(CONSTS.RULE12_1)
if index >= 0:
count += 1
return count
def getrule13_count(count, line):
index = line.find(CONSTS.RULE13_1)
if index >= 0:
count += 1
return count
| tushartushar/Puppeteer | Puppet-lint_aggregator/Aggregator.py | Python | apache-2.0 | 4,527 |
#-*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011-2017 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.netzob.org |
#| @contact : contact@netzob.org |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| File contributors : |
#| - Georges Bossert <georges.bossert (a) supelec.fr> |
#| - Frédéric Guihéry <frederic.guihery (a) amossys.fr> |
#+---------------------------------------------------------------------------+
from common.NetzobTestCase import NetzobTestCase
import binascii
from netzob.all import *
import logging
class test_USBMouseProtocol(NetzobTestCase):
def test_inferUSBMouseProtocol(self):
"""This method illustrates the very short script which
allows to give some insights on the over USB protocol used
by a traditionnal mouse."""
# Put samples in an array
samples = [
"00ff1f000000",
"00fe1f000000",
"00fe1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"00fe2f000000",
"00fe1f000000",
"00ff0f000000",
"000010000000",
"00fe1f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"000010000000",
"00ff0f000000",
"000010000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00fe0f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"00fe1f000000",
"00fe2f000000",
"00fe1f000000",
"00fe1f000000",
"00fe1f000000",
"000010000000",
"00ff0f000000",
"000010000000",
"00fe0f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"000010000000",
"00fe1f000000",
"00ff2f000000",
"00fe1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00fe2f000000",
"00fd2f000000",
"00fe2f000000",
"00fe1f000000",
"00ff1f000000",
"000010000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"000010000000",
"00fe1f000000",
"00ff0f000000",
"00fe2f000000",
"00fe2f000000",
"00fd2f000000",
"00ff2f000000",
"00ff1f000000",
"00fe0f000000",
"000010000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff2f000000",
"00ff0f000000",
"00fe1f000000",
"00fe2f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00fe1f000000",
"00fe1f000000",
"00fe2f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff0f000000",
"000010000000",
"00fe1f000000",
"000010000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00ff1f000000",
"00fe2f000000",
"00fe1f000000",
"00fe2f000000",
"00fe1f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00fe1f000000",
"00fe2f000000",
"00ff1f000000",
"00fe1f000000",
"00fe1f000000",
"00ff0f000000",
"00fe2f000000",
"00fe1f000000",
"00fe1f000000",
"00fe2f000000",
"00fe1f000000",
"00fe2f000000",
"00fe2f000000",
"00fd2f000000",
"00fe1f000000",
"00ff1f000000",
"00fe1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe0f000000",
"00fe1f000000",
"00fe2f000000",
"00fd1f000000",
"00fe1f000000",
"00fe1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00fe2f000000",
"00fd1f000000",
"00fe2f000000",
"00fd1f000000",
"00fe1f000000",
"00fe2f000000",
"00ff0f000000",
"00fe1f000000",
"00fe2f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"00fe1f000000",
"00ff1f000000",
"00fe1f000000",
"00fe2f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00fe1f000000",
"00ff2f000000",
"00fe1f000000",
"00fe1f000000",
"00fe2f000000",
"00fe1f000000",
"00fe2f000000",
"00ff1f000000",
"00fe1f000000",
"00fe1f000000",
"00fe2f000000",
"00ff2f000000",
"00fe1f000000",
"00ff1f000000",
"00ff2f000000",
"00ff1f000000",
"00ff1f000000",
"000020000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"000010000000",
"00fe1f000000",
"00ff0f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00fe2f000000",
"00ff1f000000",
"00fe2f000000",
"00fe1f000000",
"00fe2f000000",
"00ff2f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff2f000000",
"00ff1f000000",
"00fd1f000000",
"00fe2f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00fe1f000000",
"000010000000",
"00ff1f000000",
"00ff2f000000",
"00fe2f000000",
"00fe1f000000",
"00ff2f000000",
"00fe1f000000",
"00ff1f000000",
"00fe2f000000",
"00ff1f000000",
"00fe1f000000",
"00fe1f000000",
"00ff2f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00fe1f000000",
"00fe2f000000",
"00fc2f000000",
"00fe1f000000",
"00fd2f000000",
"00fe1f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00fe0f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00fe1f000000",
"000010000000",
"00fe1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00ff1f000000",
"00fd2f000000",
"00fe1f000000",
"00fe1f000000",
"00ff1f000000",
"00ff1f000000",
"00ff0f000000",
"00ff1f000000",
"00fe1f000000",
"00fc2f000000",
"00fb2f000000",
"00fb2f000000",
"00fa4f000000",
"00f92f000000",
"00fa3f000000",
"00fe1f000000",
"000010000000",
"00ff1f000000",
"00fd3f000000",
"00fd3f000000",
"00fe2f000000",
"00fd2f000000",
"00ff2f000000",
"00fe0f000000",
"000010000000",
"000010000000",
"00ff1f000000",
"000010000000",
"00ff1f000000",
"00ff2f000000",
"000010000000",
"00fe1f000000",
"000020000000",
"00ff1f000000",
"00ff1f000000",
"00ff2f000000",
"00fe1f000000",
"00ff2f000000"
]
# Create a message for each data
messages = [RawMessage(data=binascii.unhexlify(sample)) for sample in samples]
# Create a symbol to represent all the messages
initialSymbol = Symbol(messages=messages)
# Split following the value
Format.splitStatic(initialSymbol, mergeAdjacentDynamicFields=False)
initialSymbol.addEncodingFunction(TypeEncodingFunction(HexaString))
logging.debug(initialSymbol)
| lootr/netzob | netzob/test/src/test_netzob/test_Tutorials/test_USBMouseProtocol.py | Python | gpl-3.0 | 9,373 |
import logging
from sqlalchemy import event
from radar.api import views
from radar.api.auth import force_password_change, require_login
from radar.api.auth import set_cors_headers
from radar.api.debug import debug_before_request, debug_teardown_request
from radar.api.logs import log_request
from radar.app import Radar
from radar.auth.sessions import current_user, refresh_token
from radar.database import db
class RadarAPI(Radar):
def __init__(self, *args, **kwargs):
super(RadarAPI, self).__init__(*args, **kwargs)
self.public_endpoints = []
@event.listens_for(db.session, 'before_flush')
def before_flush(session, flush_context, instances):
if current_user.is_authenticated():
user_id = current_user.id
# Set the user_id for use by the log_changes trigger
# SET LOCAL lasts until the end of the current transaction
# http://www.postgresql.org/docs/9.4/static/sql-set.html
session.execute('SET LOCAL radar.user_id = :user_id', dict(user_id=user_id))
if self.debug:
# Debug mode
self.before_request(debug_before_request)
self.after_request(set_cors_headers)
self.teardown_request(debug_teardown_request)
else:
# Production mode
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
self.logger.addHandler(stream_handler)
self.before_request(require_login)
self.before_request(force_password_change)
self.after_request(refresh_token)
self.after_request(log_request)
views.setup(self)
def add_public_endpoint(self, endpoint):
self.public_endpoints.append(endpoint)
def is_public_endpoint(self, endpoint):
return endpoint in self.public_endpoints
| renalreg/radar | radar/api/app.py | Python | agpl-3.0 | 1,888 |
# Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from contextlib import contextmanager
from functools import wraps
from netman.core.objects.backward_compatible_switch_operations import BackwardCompatibleSwitchOperations
def not_implemented(func):
@wraps(func)
def func_wrapper(*args, **kwargs):
raise NotImplementedError("'{}' is not implemented".format(func.func_name))
return func_wrapper
class SwitchOperations(BackwardCompatibleSwitchOperations):
@not_implemented
def connect(self):
pass
@not_implemented
def disconnect(self):
pass
@not_implemented
def transaction(self):
pass
@not_implemented
def start_transaction(self):
pass
@not_implemented
def commit_transaction(self):
pass
@not_implemented
def rollback_transaction(self):
pass
@not_implemented
def end_transaction(self):
pass
@not_implemented
def get_vlan(self, number):
pass
@not_implemented
def get_vlans(self):
pass
@not_implemented
def add_vlan(self, number, name=None):
pass
@not_implemented
def remove_vlan(self, number):
pass
@not_implemented
def get_interface(self, interface_id):
pass
@not_implemented
def get_interfaces(self):
pass
@not_implemented
def set_access_vlan(self, interface_id, vlan):
pass
@not_implemented
def unset_interface_access_vlan(self, interface_id):
pass
@not_implemented
def set_access_mode(self, interface_id):
pass
@not_implemented
def set_trunk_mode(self, interface_id):
pass
@not_implemented
def add_trunk_vlan(self, interface_id, vlan):
pass
@not_implemented
def remove_trunk_vlan(self, interface_id, vlan):
pass
@not_implemented
def set_interface_state(self, interface_id, state):
pass
@not_implemented
def unset_interface_state(self, interface_id):
pass
@not_implemented
def set_interface_auto_negotiation_state(self, interface_id, negotiation_state):
pass
@not_implemented
def unset_interface_auto_negotiation_state(self, interface_id):
pass
@not_implemented
def set_interface_native_vlan(self, interface_id, vlan):
pass
@not_implemented
def unset_interface_native_vlan(self, interface_id):
pass
@not_implemented
def reset_interface(self, interface_id):
pass
@not_implemented
def get_vlan_interfaces(self, vlan_number):
pass
@not_implemented
def add_ip_to_vlan(self, vlan_number, ip_network):
pass
@not_implemented
def remove_ip_from_vlan(self, vlan_number, ip_network):
pass
@not_implemented
def set_vlan_access_group(self, vlan_number, direction, name):
pass
@not_implemented
def unset_vlan_access_group(self, vlan_number, direction):
pass
@not_implemented
def set_vlan_vrf(self, vlan_number, vrf_name):
pass
@not_implemented
def unset_vlan_vrf(self, vlan_number):
pass
@not_implemented
def set_interface_description(self, interface_id, description):
pass
@not_implemented
def unset_interface_description(self, interface_id):
pass
@not_implemented
def edit_interface_spanning_tree(self, interface_id, edge=None):
pass
@not_implemented
def add_bond(self, number):
pass
@not_implemented
def remove_bond(self, number):
pass
@not_implemented
def get_bond(self, number):
pass
@not_implemented
def get_bonds(self):
pass
@not_implemented
def add_interface_to_bond(self, interface, bond_number):
pass
@not_implemented
def remove_interface_from_bond(self, interface):
pass
@not_implemented
def set_bond_link_speed(self, number, speed):
pass
@not_implemented
def set_bond_description(self, number, description):
pass
@not_implemented
def unset_bond_description(self, number):
pass
@not_implemented
def set_bond_trunk_mode(self, number):
pass
@not_implemented
def set_bond_access_mode(self, number):
pass
@not_implemented
def add_bond_trunk_vlan(self, number, vlan):
pass
@not_implemented
def remove_bond_trunk_vlan(self, number, vlan):
pass
@not_implemented
def set_bond_native_vlan(self, number, vlan):
pass
@not_implemented
def unset_bond_native_vlan(self, number):
pass
@not_implemented
def edit_bond_spanning_tree(self, number, edge=None):
pass
@not_implemented
def add_vrrp_group(self, vlan_number, group_id, ips=None, priority=None, hello_interval=None, dead_interval=None,
track_id=None, track_decrement=None):
pass
@not_implemented
def remove_vrrp_group(self, vlan_id, group_id):
pass
@not_implemented
def add_dhcp_relay_server(self, vlan_number, ip_address):
pass
@not_implemented
def remove_dhcp_relay_server(self, vlan_number, ip_address):
pass
@not_implemented
def set_interface_lldp_state(self, interface_id, enabled):
pass
@not_implemented
def set_vlan_arp_routing_state(self, vlan_number, state):
pass
@not_implemented
def set_vlan_icmp_redirects_state(self, vlan_number, state):
pass
@not_implemented
def set_vlan_unicast_rpf_mode(self, vlan_number, mode):
pass
@not_implemented
def unset_vlan_unicast_rpf_mode(self, vlan_number):
pass
@not_implemented
def get_versions(self):
pass
@not_implemented
def set_interface_mtu(self, interface_id, size):
pass
@not_implemented
def unset_interface_mtu(self, interface_id):
pass
@not_implemented
def set_bond_mtu(self, number, size):
pass
@not_implemented
def unset_bond_mtu(self, number):
pass
@not_implemented
def set_vlan_ntp_state(self, vlan_number, state):
pass
@not_implemented
def add_vlan_varp_ip(self, vlan_number, ip_network):
pass
@not_implemented
def remove_vlan_varp_ip(self, vlan_number, ip_network):
pass
@not_implemented
def set_vlan_load_interval(self, vlan_number, time_interval):
pass
@not_implemented
def unset_vlan_load_interval(self, vlan_number):
pass
@not_implemented
def set_vlan_mpls_ip_state(self, vlan_number, state):
pass
@not_implemented
def get_mac_addresses(self):
pass
class SwitchBase(SwitchOperations):
def __init__(self, switch_descriptor):
self.switch_descriptor = switch_descriptor
self.logger = logging.getLogger("{module}.{hostname}".format(module=self.__module__, hostname=self.switch_descriptor.hostname))
self.connected = False
self.in_transaction = False
def connect(self):
self._connect()
self.connected = True
def disconnect(self):
self._disconnect()
self.connected = False
def start_transaction(self):
self._start_transaction()
self.in_transaction = True
def end_transaction(self):
self._end_transaction()
self.in_transaction = False
def _connect(self):
"""
Adpapters should implement this rather than connect
"""
raise NotImplementedError()
def _disconnect(self):
"""
Adpapters should implement this rather than disconnect
"""
raise NotImplementedError()
def _start_transaction(self):
"""
Adpapters should implement this rather than connect
"""
raise NotImplementedError()
def _end_transaction(self):
"""
Adpapters should implement this rather than disconnect
"""
raise NotImplementedError()
@contextmanager
def transaction(self):
self.start_transaction()
try:
yield self
self.commit_transaction()
except Exception as e:
if self.logger:
self.logger.exception(e)
self.rollback_transaction()
raise
finally:
self.end_transaction()
| internap/netman | netman/core/objects/switch_base.py | Python | apache-2.0 | 8,954 |
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example adds campaigns.
To get campaigns, run get_campaigns.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
import datetime
import uuid
from googleads import adwords
def main(client):
# Initialize appropriate services.
campaign_service = client.GetService('CampaignService', version='v201809')
budget_service = client.GetService('BudgetService', version='v201809')
# Create a budget, which can be shared by multiple campaigns.
budget = {
'name': 'Interplanetary budget #%s' % uuid.uuid4(),
'amount': {
'microAmount': '50000000'
},
'deliveryMethod': 'STANDARD'
}
budget_operations = [{
'operator': 'ADD',
'operand': budget
}]
# Add the budget.
budget_id = budget_service.mutate(budget_operations)['value'][0][
'budgetId']
# Construct operations and add campaigns.
operations = [{
'operator': 'ADD',
'operand': {
'name': 'Interplanetary Cruise #%s' % uuid.uuid4(),
# Recommendation: Set the campaign to PAUSED when creating it to
# stop the ads from immediately serving. Set to ENABLED once you've
# added targeting and the ads are ready to serve.
'status': 'PAUSED',
'advertisingChannelType': 'SEARCH',
'biddingStrategyConfiguration': {
'biddingStrategyType': 'MANUAL_CPC',
},
'endDate': (datetime.datetime.now() +
datetime.timedelta(365)).strftime('%Y%m%d'),
# Note that only the budgetId is required
'budget': {
'budgetId': budget_id
},
'networkSetting': {
'targetGoogleSearch': 'true',
'targetSearchNetwork': 'true',
'targetContentNetwork': 'false',
'targetPartnerSearchNetwork': 'false'
},
# Optional fields
'startDate': (datetime.datetime.now() +
datetime.timedelta(1)).strftime('%Y%m%d'),
'frequencyCap': {
'impressions': '5',
'timeUnit': 'DAY',
'level': 'ADGROUP'
},
'settings': [
{
'xsi_type': 'GeoTargetTypeSetting',
'positiveGeoTargetType': 'DONT_CARE',
'negativeGeoTargetType': 'DONT_CARE'
}
]
}
}, {
'operator': 'ADD',
'operand': {
'name': 'Interplanetary Cruise banner #%s' % uuid.uuid4(),
'status': 'PAUSED',
'biddingStrategyConfiguration': {
'biddingStrategyType': 'MANUAL_CPC'
},
'endDate': (datetime.datetime.now() +
datetime.timedelta(365)).strftime('%Y%m%d'),
# Note that only the budgetId is required
'budget': {
'budgetId': budget_id
},
'advertisingChannelType': 'DISPLAY'
}
}]
campaigns = campaign_service.mutate(operations)
# Display results.
for campaign in campaigns['value']:
print('Campaign with name "%s" and id "%s" was added.'
% (campaign['name'], campaign['id']))
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client)
| googleads/googleads-python-lib | examples/adwords/v201809/basic_operations/add_campaigns.py | Python | apache-2.0 | 4,112 |
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2016-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .tektronixMSO4000 import *
class tektronixMSO4032(tektronixMSO4000):
"Tektronix MSO4032 IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MSO4032')
super(tektronixMSO4032, self).__init__(*args, **kwargs)
self._analog_channel_count = 2
self._digital_channel_count = 16
self._bandwidth = 350e6
self._init_channels()
| python-ivi/python-ivi | ivi/tektronix/tektronixMSO4032.py | Python | mit | 1,560 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from heat.db.sqlalchemy.types import LongText
from heat.db.sqlalchemy.types import Json
from sqlalchemy import types
from sqlalchemy.dialects.mysql.base import MySQLDialect
from sqlalchemy.dialects.sqlite.base import SQLiteDialect
class LongTextTest(testtools.TestCase):
def setUp(self):
super(LongTextTest, self).setUp()
self.sqltype = LongText()
def test_load_dialect_impl(self):
dialect = MySQLDialect()
impl = self.sqltype.load_dialect_impl(dialect)
self.assertNotEqual(types.Text, type(impl))
dialect = SQLiteDialect()
impl = self.sqltype.load_dialect_impl(dialect)
self.assertEqual(types.Text, type(impl))
class JsonTest(testtools.TestCase):
def setUp(self):
super(JsonTest, self).setUp()
self.sqltype = Json()
def test_process_bind_param(self):
dialect = None
value = {'foo': 'bar'}
result = self.sqltype.process_bind_param(value, dialect)
self.assertEqual('{"foo": "bar"}', result)
def test_process_result_value(self):
dialect = None
value = '{"foo": "bar"}'
result = self.sqltype.process_result_value(value, dialect)
self.assertEqual({'foo': 'bar'}, result)
| ntt-sic/heat | heat/tests/test_sqlalchemy_types.py | Python | apache-2.0 | 1,833 |
import json
from django.conf.urls import url
from django.contrib.auth import user_logged_in, logout
from django.contrib.auth.models import User
from django.http import HttpResponse
from tastypie import fields, http
from tastypie.authentication import MultiAuthentication, SessionAuthentication, ApiKeyAuthentication, Authentication
from tastypie.authorization import Authorization
from tastypie.exceptions import Unauthorized, BadRequest
from tastypie.http import HttpUnauthorized, HttpBadRequest
from tastypie.models import ApiKey
from tastypie.utils import dict_strip_unicode_keys
from account.forms import ChangePasswordForm, SetPasswordForm
from account.models import EmailConfirmation
from account.utils import send_reset_password_email
from userprofile.models import AvatarPhoto
from userprofile.utils import check_user_profile, handle_user_profile
from socialaccount.models import GOOGLE, SocialApp, FACEBOOK, SocialToken
from socialaccount.utils import GoogleAdapter, FacebookAdapter
from ..utils import check_user
from .base import MyBaseResource
class UserAuthorization(Authorization):
def base_checks(self, request, model_klass):
# If it doesn't look like a model, we can't check permissions.
if not model_klass or not getattr(model_klass, '_meta', None):
raise Unauthorized('You are not allowed to access this resource.')
return model_klass
def read_detail(self, object_list, bundle):
self.base_checks(bundle.request, bundle.obj.__class__)
if not bundle.obj.id == bundle.request.user.id:
raise Unauthorized('You are not allowed to access this resource.')
return True
class UserResource(MyBaseResource):
profile = fields.ApiField(attribute='profile', null=True, blank=True, readonly=True)
has_usable_password = fields.BooleanField(attribute='has_usable_password', null=True, blank=True, readonly=True)
email_verified = fields.BooleanField(attribute='email_verified', null=True, blank=True, readonly=True)
class Meta:
always_return_data = True
detail_allowed_methods = ['get', 'put', 'delete']
list_allowed_methods = []
queryset = User.objects.all()
resource_name = 'user'
authentication = MultiAuthentication(SessionAuthentication(), ApiKeyAuthentication())
authorization = UserAuthorization()
excludes = ['is_active', 'is_staff', 'is_superuser', 'password']
def obj_update(self, bundle, skip_errors=False, **kwargs):
bundle = super(UserResource, self).obj_update(bundle, skip_errors, **kwargs)
handle_user_profile(bundle.obj, bundle.data)
apikey = ApiKey.objects.get_or_create(user=bundle.obj)[0].key
bundle.data['apikey'] = apikey
social_access_token = SocialToken.get_user_tokens(bundle.obj)
if social_access_token:
bundle.data['social_access_token'] = social_access_token
# bundle.data['social_access_token'] = SocialToken.get_user_tokens(bundle.obj)
return bundle
def dehydrate_profile(self, bundle):
return bundle.obj.profile.extra_data
def dehydrate_has_usable_password(self, bundle):
return bundle.obj.has_usable_password()
def dehydrate_email_verified(self, bundle):
try:
ec = EmailConfirmation.objects.get(user=bundle.obj)
return ec.verified
except EmailConfirmation.DoesNotExist:
return True
def prepend_urls(self):
return [
url(r'^(?P<resource_name>user)/google/login/$',
self.wrap_view('google_login'), name='api_google_login'),
url(r'^(?P<resource_name>user)/facebook/login/$',
self.wrap_view('facebook_login'), name='api_facebook_login'),
url(r'^(?P<resource_name>user)/login/$',
self.wrap_view('login'), name='api_login'),
url(r'^(?P<resource_name>user)/password/reset/$',
self.wrap_view('reset_password'), name='reset_password'),
url(r'^(?P<resource_name>user)/password/change/$',
self.wrap_view('change_password'), name='change_password'),
url(r'^(?P<resource_name>user)/avatarupload/$',
self.wrap_view('avatar_upload'), name='avatar_upload'),
url(r'^(?P<resource_name>user)/disconnect/(?P<provider>\w+)/$',
self.wrap_view('disconnect_socialaccount'), name='disconnect_socialaccount'),
url(r'^(?P<resource_name>user)/resend/$',
self.wrap_view('resend_confirmation'), name='resend_confirmation'),
url(r'^me/$',
self.wrap_view('me'), name='api_me'),
]
def me(self, request, **kwargs):
self.method_check(request, allowed=['post'])
apikey_auth = ApiKeyAuthentication()
if apikey_auth.is_authenticated(request) == True:
return self.generate_response(request, request.user)
else:
return self.create_response(request, {}, HttpUnauthorized)
def disconnect_socialaccount(self, request, provider, **kwargs):
self.method_check(request, allowed=['post'])
apikey_auth = ApiKeyAuthentication()
if apikey_auth.is_authenticated(request) == True:
user_provider_tokens = SocialToken.objects.filter(user=request.user, app__provider=provider)
if user_provider_tokens:
user_provider_tokens.delete()
response_data = {'status': "success"}
return HttpResponse(json.dumps(response_data), mimetype='application/json')
def change_password(self, request, **kwargs):
self.method_check(request, allowed=['post'])
apikey_auth = ApiKeyAuthentication()
if apikey_auth.is_authenticated(request) == True:
data = self.deserialize(request, request.body,
format=request.META.get('CONTENT_TYPE', 'application/json'))
if request.user.has_usable_password():
password_change_form = ChangePasswordForm(request.user, data)
else:
password_change_form = SetPasswordForm(request.user, data)
if password_change_form.is_valid():
password_change_form.save()
response_data = {'status': "success"}
if request.user.is_authenticated():
logout(request)
else:
if request.user.is_authenticated():
logout(request)
return self.create_response(request, {
'error': password_change_form.errors,
}, HttpBadRequest)
return HttpResponse(json.dumps(response_data), mimetype='application/json')
def reset_password(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.body,
format=request.META.get('CONTENT_TYPE', 'application/json'))
try:
email = data.get('email')
user = User.objects.get(email=email)
send_reset_password_email(user, request)
response_data = {'status': 'success'}
return HttpResponse(json.dumps(response_data), mimetype='application/json')
except Exception as e:
return self.create_response(request, {
'error': e.message,
}, HttpBadRequest)
def resend_confirmation(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.body,
format=request.META.get('CONTENT_TYPE', 'application/json'))
try:
email = data.get('email')
ec = EmailConfirmation.objects.get(email=email)
ec.send(request)
response_data = {'status': 'success'}
return HttpResponse(json.dumps(response_data), mimetype='application/json')
except Exception as e:
return self.create_response(request, {
'error': e.message,
}, HttpBadRequest)
def login(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.body,
format=request.META.get('CONTENT_TYPE', 'application/json'))
username_or_email = data.get('username_or_email', '')
password = data.get('password', '')
user, error = check_user(username_or_email, password)
if user:
check_user_profile(user)
return self.generate_response(request, user)
return self.create_response(request, {
'error': error,
}, HttpUnauthorized)
def avatar_upload(self, request, **kwargs):
self.method_check(request, allowed=['post'])
if 'multipart/form-data' not in str(request.META['CONTENT_TYPE']):
return self.create_response(request, {
'error': 'Unsupported media type',
}, HttpBadRequest)
else:
if ('photo' in request.FILES):
avatar_photo = AvatarPhoto(photo=request.FILES['photo'])
avatar_photo.save()
apikey_auth = ApiKeyAuthentication()
if apikey_auth.is_authenticated(request) == True:
profile = check_user_profile(request.user)
profile.update_avatar(avatar_url=avatar_photo.photo.url, force=True)
return self.create_response(
request,
{'url': avatar_photo.photo.url})
else:
return self.create_response(request, {
'error': 'No image found',
}, HttpBadRequest)
def get_request_data(self, request, provider):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.body,
format=request.META.get('CONTENT_TYPE', 'application/json'))
access_token = data.get('access_token', '')
refresh_token = data.get('refresh_token', '')
device_token = data.get('device_token', None)
if refresh_token == '' and provider == FACEBOOK:
refresh_token = access_token
return access_token, refresh_token, device_token
def generate_response(self, request, user):
user_logged_in.send(sender=user.__class__, request=request, user=user)
bundle = self.build_bundle(obj=user, request=request)
bundle = self.full_dehydrate(bundle)
apikey = ApiKey.objects.get_or_create(user=user)[0].key
bundle.data['apikey'] = apikey
social_access_token = SocialToken.get_user_tokens(user)
if social_access_token:
bundle.data['social_access_token'] = social_access_token
return self.create_response(request, bundle)
def google_login(self, request, **kwargs):
access_token, refresh_token, device_token = self.get_request_data(request, GOOGLE)
google = SocialApp.objects.get(provider=GOOGLE)
google_adapter = GoogleAdapter(app=google, login_url='https://www.googleapis.com/oauth2/v1/userinfo',
access_token=access_token, refresh_token=refresh_token)
try:
user = google_adapter.get_user()
profile = check_user_profile(user)
# Update avatar from google
google_avatar = google_adapter.token.extra_data.get('picture', None)
if google_avatar:
profile.update_avatar(google_avatar)
# If login with google then clear email confirmations
EmailConfirmation.clear_email_confirmations(user)
return self.generate_response(request, user)
except Exception as e:
return self.create_response(request, {
'error': e.message,
}, HttpBadRequest)
def facebook_login(self, request, **kwargs):
access_token, refresh_token, device_token = self.get_request_data(request, FACEBOOK)
facebook = SocialApp.objects.get(provider=FACEBOOK)
facebook_adapter = FacebookAdapter(app=facebook, login_url='https://graph.facebook.com/me',
access_token=access_token, refresh_token=refresh_token)
try:
user = facebook_adapter.get_user()
profile = check_user_profile(user)
# Save facebook username into profile for special usage
# Since the facebook graph api cannot get email
# So we can use facebook username to check contact later
token = facebook_adapter.token
facebook_username = token.extra_data.get('username', None)
if facebook_username:
profile.facebook_username = facebook_username
profile.save()
# Update avatar from facebook
facebook_uid = token.extra_data.get('id', None)
facebook_avatar = None
if facebook_uid:
facebook_avatar = "http://graph.facebook.com/%s/picture?type=large" % facebook_uid
if facebook_avatar:
profile.update_avatar(facebook_avatar)
# If login with google then clear email confirmations
EmailConfirmation.clear_email_confirmations(user)
return self.generate_response(request, user)
except Exception as e:
return self.create_response(request, {
'error': e.message,
}, HttpBadRequest)
class CreateUserResource(MyBaseResource):
class Meta:
detail_allowed_methods = []
list_allowed_methods = ['post']
queryset = User.objects.all()
include_resource_uri = False
authentication = Authentication()
authorization = Authorization()
always_return_data = True
def obj_create(self, bundle, **kwargs):
try:
email = bundle.data.get('email', None)
username = bundle.data.get('username', None)
password = bundle.data.get('password', None)
if username == None:
kwargs['username'] = email
if email == None or password == None:
raise BadRequest('email and password are required.')
if User.objects.filter(email=email).count() > 0:
raise BadRequest('A user is already registered with this e-mail address.')
bundle = super(CreateUserResource, self).obj_create(bundle, **kwargs)
bundle.obj.set_password(bundle.data.get('password'))
bundle.obj.save()
ec = EmailConfirmation.create(email)
if ec:
ec.send(bundle.request, signup=True)
device_token = bundle.data.get('device_token', None)
handle_user_profile(bundle.obj, bundle.data)
return bundle
except Exception as e:
raise BadRequest(e)
def post_list(self, request, **kwargs):
deserialized = self.deserialize(request, request.body,
format=request.META.get('CONTENT_TYPE', 'application/json'))
deserialized = self.alter_deserialized_detail_data(request, deserialized)
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized), request=request)
updated_bundle = self.obj_create(bundle, **self.remove_api_resource_names(kwargs))
user_resource = UserResource()
updated_bundle = user_resource.build_bundle(obj=updated_bundle.obj, request=request)
updated_bundle = user_resource.full_dehydrate(updated_bundle)
apikey = ApiKey.objects.get_or_create(user=updated_bundle.obj)[0].key
updated_bundle.data['apikey'] = apikey
location = user_resource.get_resource_uri(updated_bundle)
if not self._meta.always_return_data:
return http.HttpCreated(location=location)
else:
return self.create_response(request, updated_bundle, response_class=http.HttpCreated, location=location)
| lettoosoft/lettoo-weixin-platform-back | src/api/v1/user.py | Python | mit | 16,119 |
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2010-2022 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from openquake.hmtk.parsers import (catalogue)
#__all__ = [catalogue]
| gem/oq-engine | openquake/hmtk/parsers/__init__.py | Python | agpl-3.0 | 830 |
# Copyright 2014-2016 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from contextlib import contextmanager
from datetime import datetime
import collections
import os
import subprocess
import time
import pytest
from six import string_types
class Sh(object):
def __init__(self, cwd=None):
self.command = ""
self.cwd = cwd
def __getattr__(self, item):
self.command += item + " "
return self
def __call__(self, *args, **kwargs):
command = self.command + " ".join(args)
self.command = ""
return (
subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, cwd=self.cwd)
.stdout.read()
.decode()
)
class pull(object):
def __init__(self, sh):
self.sh = sh
def __enter__(self):
self.sh.git.pull("origin", "master")
def __exit__(self, exc_type, exc_val, exc_tb):
pass
class BaseTest(object):
def setup(self):
self.mount_path = "{}".format(os.environ["MOUNT_PATH"])
self.repo_name = os.environ["REPO_NAME"]
self.repo_path = os.environ["REPO_PATH"]
self.current_path = "%s/current" % self.mount_path
self.remote_repo_path = os.environ["REMOTE"]
self.sh = Sh(self.remote_repo_path)
self.last_commit_hash = self.commit_hash()
@property
def today(self):
now = datetime.now()
return now.strftime("%Y-%m-%d")
def commit_hash(self, index=0):
return self.sh.git.log("--pretty=%H").splitlines()[index]
def commit_message(self, index=0):
return self.sh.git.log("--pretty=%B").splitlines()[index]
def get_commits_by_date(self, date=None):
if date is None:
date = self.today
lines = self.sh.git.log(
"--before",
'"%s 23:59:59"' % date,
"--after",
'"%s 00:00:00"' % date,
'--pretty="%ai %H"',
).splitlines()
lines = map(lambda line: line.split(), lines)
return list(
map(
lambda tokens: "%s-%s" % (tokens[1].replace(":", "-"), tokens[3][:10]),
lines,
)
)
def get_commit_dates(self):
return list(set(self.sh.git.log("--pretty=%ad", "--date=short").splitlines()))
def assert_commit_message(self, message):
assert message == self.commit_message()
def assert_new_commit(self, steps=1):
current_index = 0
while self.commit_hash(current_index) != self.last_commit_hash:
current_index += 1
self.last_commit_hash = self.commit_hash(0)
assert current_index == steps
def assert_file_content(self, file_path, content):
with open(self.repo_path + "/" + file_path) as f:
assert f.read() == content
class GitFSLog(object):
def __init__(self, file_descriptor):
self._partial_line = None
self.line_buffer = collections.deque()
self.file_descriptor = file_descriptor
def _read_data(self):
# file should be opened in non-blocking mode, so this will
# return None if it can't read any data
data = os.read(self.file_descriptor, 2048).decode().splitlines(True)
if not data:
return False
if self._partial_line:
data[0] = self._partial_line + data[0]
if not data[-1].endswith("\n"):
self._partial_line = data[-1]
data = data[:-1] # discard the partial line
else:
self._partial_line = None
self.line_buffer.extend(data)
return True
def clear(self):
"""Discards any logs produced so far."""
# seek to the end of the file, since we want to discard old messages
os.lseek(self.file_descriptor, 0, os.SEEK_END)
self._partial_line = None
self.line_buffer = collections.deque()
def __call__(self, expected, **kwargs):
"""Returns a context manager so you can wrap operations with expected
log output.
Example usage:
with gitfs_log("Expected log output"):
do_operation_that_produces_expected_log_output()
"""
@contextmanager
def log_context(gitfs_log):
gitfs_log.clear()
yield
if isinstance(expected, string_types):
gitfs_log.expect(expected, **kwargs)
else:
gitfs_log.expect_multiple(expected, **kwargs)
return log_context(self)
def _get_line(self, timeout, pollfreq=0.01):
"""Blocks until it can return a line. Returns None if it timedout."""
if self.line_buffer:
# got buffered lines, consume from these first
return self.line_buffer.popleft()
elapsed = 0
while elapsed < timeout:
if self._read_data():
return self.line_buffer.popleft()
time.sleep(pollfreq)
elapsed += pollfreq
return None
def expect(self, expected, timeout=10):
"""Blocks untill `expected` is found in a line of the stream,
or until timeout is reached.
"""
started = time.time()
elapsed = 0
while elapsed < timeout:
line = self._get_line(timeout=(timeout - elapsed))
if line is None:
break # timed out waiting for line
elif expected in line:
return
elapsed = time.time() - started
raise AssertionError(
"Timed out waiting for '{}' in the stream".format(expected)
)
def expect_multiple(self, expected, *args, **kwargs):
"""Blocks untill all `expected` strings are found in the stream, in the
order they were passed.
"""
for exp in expected:
self.expect(exp, *args, **kwargs)
@pytest.fixture(scope="session")
def gitfs_log():
return GitFSLog(os.open("log.txt", os.O_NONBLOCK))
| PressLabs/gitfs | tests/integrations/base.py | Python | apache-2.0 | 6,485 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('candidates', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserTermsAgreement',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('assigned_to_dc', models.BooleanField(default=False)),
('user', models.OneToOneField(related_name='terms_agreement', to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
| mysociety/yournextrepresentative | candidates/migrations/0002_usertermsagreement.py | Python | agpl-3.0 | 827 |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from google.appengine.ext import ndb
class FrontendJob(ndb.Model):
"""Class representing a frontend job.
A frontend job is a Clovis task sent by the user, and associated metadata
(such as the username, the start time...).
It is persisted in the Google Cloud datastore.
All frontend jobs are ancestors of a single entity called 'FrontendJobList'.
This allows to benefit from strong consistency when querying the job
associated to a tag.
"""
# Base URL path to get information about a job.
SHOW_JOB_URL = '/show_job'
# ndb properties persisted in the datastore. Indexing is not needed.
email = ndb.StringProperty(indexed=False)
status = ndb.StringProperty(indexed=False)
task_url = ndb.StringProperty(indexed=False)
eta = ndb.DateTimeProperty(indexed=False)
start_time = ndb.DateTimeProperty(auto_now_add=True, indexed=False)
# Not indexed by default.
clovis_task = ndb.TextProperty(compressed=True, indexed=False)
log = ndb.TextProperty(indexed=False)
@classmethod
def _GetParentKeyFromTag(cls, tag):
"""Gets the key that can be used to retrieve a frontend job from the job
list.
"""
return ndb.Key('FrontendJobList', tag)
@classmethod
def CreateForTag(cls, tag):
"""Creates a frontend job associated with tag."""
parent_key = cls._GetParentKeyFromTag(tag)
return cls(parent=parent_key)
@classmethod
def GetFromTag(cls, tag):
"""Gets the frontend job associated with tag."""
parent_key = cls._GetParentKeyFromTag(tag)
return cls.query(ancestor=parent_key).get()
@classmethod
def DeleteForTag(cls, tag):
"""Deletes the frontend job assowiated with tag."""
parent_key = cls._GetParentKeyFromTag(tag)
frontend_job = cls.query(ancestor=parent_key).get(keys_only=True)
if frontend_job:
frontend_job.delete()
@classmethod
def ListJobs(cls):
"""Lists all the frontend jobs.
Returns:
list of strings: The list of tags corresponding to existing frontend jobs.
"""
return [key.parent().string_id() for key in cls.query().fetch(
100, keys_only=True)]
@classmethod
def GetJobURL(cls, tag):
"""Gets the URL that can be used to get information about a specific job."""
return cls.SHOW_JOB_URL + '?tag=' + tag
def RenderAsHtml(self):
"""Render a short job description as a HTML table.
The log and ClovisTask are not included, because they are potentially very
large.
"""
html = '<table>'
for p in FrontendJob._properties:
if p == 'log' or p == 'clovis_task':
continue
value = getattr(self, p)
if value:
html += '<tr><td>' + p + '</td><td>' + str(value) + '</td></tr>'
html += '</table>'
return html
| danakj/chromium | tools/android/loading/cloud/frontend/frontend_job.py | Python | bsd-3-clause | 2,892 |
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# https://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import re
import jmespath
from botocore import xform_name
from ..exceptions import ResourceLoadException
INDEX_RE = re.compile(r'\[(.*)\]$')
def get_data_member(parent, path):
"""
Get a data member from a parent using a JMESPath search query,
loading the parent if required. If the parent cannot be loaded
and no data is present then an exception is raised.
:type parent: ServiceResource
:param parent: The resource instance to which contains data we
are interested in.
:type path: string
:param path: The JMESPath expression to query
:raises ResourceLoadException: When no data is present and the
resource cannot be loaded.
:returns: The queried data or ``None``.
"""
# Ensure the parent has its data loaded, if possible.
if parent.meta.data is None:
if hasattr(parent, 'load'):
parent.load()
else:
raise ResourceLoadException(
f'{parent.__class__.__name__} has no load method!'
)
return jmespath.search(path, parent.meta.data)
def create_request_parameters(parent, request_model, params=None, index=None):
"""
Handle request parameters that can be filled in from identifiers,
resource data members or constants.
By passing ``params``, you can invoke this method multiple times and
build up a parameter dict over time, which is particularly useful
for reverse JMESPath expressions that append to lists.
:type parent: ServiceResource
:param parent: The resource instance to which this action is attached.
:type request_model: :py:class:`~boto3.resources.model.Request`
:param request_model: The action request model.
:type params: dict
:param params: If set, then add to this existing dict. It is both
edited in-place and returned.
:type index: int
:param index: The position of an item within a list
:rtype: dict
:return: Pre-filled parameters to be sent to the request operation.
"""
if params is None:
params = {}
for param in request_model.params:
source = param.source
target = param.target
if source == 'identifier':
# Resource identifier, e.g. queue.url
value = getattr(parent, xform_name(param.name))
elif source == 'data':
# If this is a data member then it may incur a load
# action before returning the value.
value = get_data_member(parent, param.path)
elif source in ['string', 'integer', 'boolean']:
# These are hard-coded values in the definition
value = param.value
elif source == 'input':
# This is provided by the user, so ignore it here
continue
else:
raise NotImplementedError(f'Unsupported source type: {source}')
build_param_structure(params, target, value, index)
return params
def build_param_structure(params, target, value, index=None):
"""
This method provides a basic reverse JMESPath implementation that
lets you go from a JMESPath-like string to a possibly deeply nested
object. The ``params`` are mutated in-place, so subsequent calls
can modify the same element by its index.
>>> build_param_structure(params, 'test[0]', 1)
>>> print(params)
{'test': [1]}
>>> build_param_structure(params, 'foo.bar[0].baz', 'hello world')
>>> print(params)
{'test': [1], 'foo': {'bar': [{'baz': 'hello, world'}]}}
"""
pos = params
parts = target.split('.')
# First, split into parts like 'foo', 'bar[0]', 'baz' and process
# each piece. It can either be a list or a dict, depending on if
# an index like `[0]` is present. We detect this via a regular
# expression, and keep track of where we are in params via the
# pos variable, walking down to the last item. Once there, we
# set the value.
for i, part in enumerate(parts):
# Is it indexing an array?
result = INDEX_RE.search(part)
if result:
if result.group(1):
if result.group(1) == '*':
part = part[:-3]
else:
# We have an explicit index
index = int(result.group(1))
part = part[: -len(str(index) + '[]')]
else:
# Index will be set after we know the proper part
# name and that it's a list instance.
index = None
part = part[:-2]
if part not in pos or not isinstance(pos[part], list):
pos[part] = []
# This means we should append, e.g. 'foo[]'
if index is None:
index = len(pos[part])
while len(pos[part]) <= index:
# Assume it's a dict until we set the final value below
pos[part].append({})
# Last item? Set the value, otherwise set the new position
if i == len(parts) - 1:
pos[part][index] = value
else:
# The new pos is the *item* in the array, not the array!
pos = pos[part][index]
else:
if part not in pos:
pos[part] = {}
# Last item? Set the value, otherwise set the new position
if i == len(parts) - 1:
pos[part] = value
else:
pos = pos[part]
| boto/boto3 | boto3/resources/params.py | Python | apache-2.0 | 6,112 |
import blue_yellow_app
from blue_yellow_app.data.dbsession import DbSessionFactory
def main():
blue_yellow_app.init_db(None)
add_test_data()
def add_test_data():
pass
if __name__ == '__main__':
main() | mikeckennedy/python-for-entrepreneurs-course-demos | 17_logging_and_monitoring/start_17_blue_yellow_app_monitoring/blue_yellow_app/bin/add_test_data.py | Python | mit | 222 |
#!/usr/bin/python3
from ABE_ExpanderPi import RTC
import time
"""
================================================
ABElectronics Expander Pi | RTC clock output demo
Version 1.0 Created 21/08/2014
Version 1.1 Updated 11/06/2017 updated to include changes to Expander Pi library
run with: python3 demo-rtcout.py
================================================
This demo shows how to enable the clock square wave output on the
Expander Pi real-time clock and set the frequency
"""
rtc = RTC() # create a new instance of the RTC class
# set the frequency of the output square-wave, options are: 1 = 1Hz, 2 =
# 4.096KHz, 3 = 8.192KHz, 4 = 32.768KHz
rtc.set_frequency(3)
rtc.enable_output() # enable the square-wave
| abelectronicsuk/ABElectronics_Python3_Libraries | ExpanderPi/demo-rtcout.py | Python | mit | 721 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.