repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
hedmo/compizconfig-python | setup.py | Python | gpl-2.0 | 5,125 | 0.025171 | # -*- coding: utf-8 -*-
from distutils.core import setup
from distutils.command.build import build as _build
from distutils.command.install import install as _install
from distutils.command.install_data import install_data as _install_data
from distutils.command.sdist import sdist as _sdist
from distutils.extension import Extension
import os
import subprocess
# If src/compizconfig.pyx exists, build using Cython
if os.path.exists ("src/compizconfig.pyx"):
from Cython.Distutils import build_ext
ext_module_src = "src/compizconfig.pyx"
else: # Otherwise build directly from C source
from distutils.command.build_ext import build_ext
ext_module_src = "src/compizconfig.c"
version_file = open ("VERSION", "r")
version = version_file.read ().strip ()
if "=" in version:
version = version.split ("=")[1]
def pkgconfig(*packages, **kw):
flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries', '-R': 'runtime_library_dirs'}
cmd = ['pkg-config', '--libs', '--cflags']
tokens = subprocess.Popen (cmd + list(packages), stdout=subprocess.PIPE).communicate()[0].split ()
for t in tokens:
if '-L' in t[:2]:
kw.setdefault (flag_map.get ("-L"), []).append (t[2:])
if not os.getenv ("COMPIZ_DISABLE_RPATH") is "1":
kw.setdefault (flag_map.get ("-R"), []).append (t[2:])
elif '-I' in t[:2]:
kw.setdefault (flag_map.get ("-I"), []).append (t[2:])
elif '-l' in t[:2]:
kw.setdefault (flag_map.get ("-l"), []).append (t[2:])
return kw
VERSION_FILE = os.path.join (os.path.dirname (__file__), "VERSION")
pkgconfig_libs = subprocess.Popen (["pkg-config", "--libs", "libcompizconfig"], stdout=subprocess.PIPE, stderr=open(os.devnul | l, 'w')).communicate ()[0]
if len (pkgconfig_libs) is 0:
print ("CompizConfig Python [ERROR]: No libcompizconfig.pc found in the pkg-config search path")
print ("Ensure that libcompizonfig is installed o | r libcompizconfig.pc is in your $PKG_CONFIG_PATH")
exit (1);
libs = pkgconfig_libs[2:].split (" ")[0]
INSTALLED_FILES = "installed_files"
class install (_install):
def run (self):
_install.run (self)
outputs = self.get_outputs ()
length = 0
if self.root:
length += len (self.root)
if self.prefix:
length += len (self.prefix)
if length:
for counter in xrange (len (outputs)):
outputs[counter] = outputs[counter][length:]
data = "\n".join (outputs)
try:
file = open (INSTALLED_FILES, "w")
except:
self.warn ("Could not write installed files list %s" % \
INSTALLED_FILES)
return
file.write (data)
file.close ()
class install_data (_install_data):
def run (self):
def chmod_data_file (file):
try:
os.chmod (file, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH)
except:
self.warn ("Could not chmod data file %s" % file)
_install_data.run (self)
map (chmod_data_file, self.get_outputs ())
class uninstall (_install):
def run (self):
try:
file = open (INSTALLED_FILES, "r")
except:
self.warn ("Could not read installed files list %s" % \
INSTALLED_FILES)
return
files = file.readlines ()
file.close ()
prepend = ""
if self.root:
prepend += self.root
if self.prefix:
prepend += self.prefix
if len (prepend):
for counter in xrange (len (files)):
files[counter] = prepend + files[counter].rstrip ()
for file in files:
print ("Uninstalling %s" % file)
try:
os.unlink (file)
except:
self.warn ("Could not remove file %s" % file)
class sdist (_sdist):
def run (self):
# Build C file
if os.path.exists ("src/compizconfig.pyx"):
from Cython.Compiler.Main import compile as cython_compile
cython_compile ("src/compizconfig.pyx")
# Run regular sdist
_sdist.run (self)
def add_defaults (self):
_sdist.add_defaults (self)
# Remove pyx source and add c source
if os.path.exists ("src/compizconfig.pyx"):
self.filelist.exclude_pattern ("src/compizconfig.pyx")
self.filelist.append ("src/compizconfig.c")
setup (
name = "compizconfig-python",
version = version,
description = "CompizConfig Python",
url = "http://www.compiz.org/",
license = "GPL",
maintainer = "Guillaume Seguin",
maintainer_email = "guillaume@segu.in",
cmdclass = {"uninstall" : uninstall,
"install" : install,
"install_data" : install_data,
"build_ext" : build_ext,
"sdist" : sdist},
ext_modules=[
Extension ("compizconfig", [ext_module_src],
**pkgconfig("libcompizconfig"))
]
)
|
thisisshi/cloud-custodian | tools/ops/logsetup.py | Python | apache-2.0 | 2,623 | 0.000381 | # Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
"""Cloud Watch Log Subscription Email Relay
"""
import argparse
import itertools
import logging
import sys
from c7n.credentials import SessionFactory
from c7n.mu import LambdaManager
from c7n.ufuncs import logsub
log = logging.getLog | ger("custodian.logsetup")
def setup_parser():
parser = argparse.ArgumentParser()
parser.add_argument("--role", required=True)
# Log Group match
parser.add_argument("--prefix", default=None)
parser.add_argument("-g", "--group", action="append")
parser.add_argument("--pattern", default="Traceback")
# Connection stuff
parser.add_argument("--profile")
parser.add_argument("--assume")
parser.add_argument("--region", default="us-east-1 | ")
# Delivery
parser.add_argument("--topic", required=True)
parser.add_argument("--subject", default="Custodian Ops Error")
return parser
def get_groups(session_factory, options):
session = session_factory()
logs = session.client('logs')
params = {}
if options.prefix:
params['logGroupNamePrefix'] = options.prefix
results = logs.get_paginator('describe_log_groups').paginate(**params)
groups = list(itertools.chain(*[rp['logGroups'] for rp in results]))
if options.group:
log.info("Filtering on %s for %d groups" % (
options.group,
len([g['logGroupName'] for g in groups])))
groups = [g for g in groups if g['logGroupName'] in options.group]
log.info("Subscribing to groups: %s" % (
" \n".join([g['logGroupName'] for g in groups])))
return groups
def main():
parser = setup_parser()
options = parser.parse_args()
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('botocore').setLevel(logging.ERROR)
if not options.group and not options.prefix:
print("Error: Either group or prefix must be specified")
sys.exit(1)
session_factory = SessionFactory(
options.region, options.profile, options.assume)
groups = get_groups(session_factory, options)
func = logsub.get_function(
session_factory,
"cloud-custodian-error-notify",
role=options.role,
sns_topic=options.topic,
subject=options.subject,
log_groups=groups,
pattern=options.pattern)
manager = LambdaManager(session_factory)
try:
manager.publish(func)
except Exception:
import traceback, pdb, sys
traceback.print_exc()
pdb.post_mortem(sys.exc_info()[-1])
if __name__ == '__main__':
main()
|
wwright2/dcim3-angstrom1 | sources/openembedded-core/meta/lib/oeqa/runtime/parselogs.py | Python | mit | 7,183 | 0.007379 | import os
import unittest
from oeqa.oetest import oeRuntimeTest
from oeqa.utils.decorators import *
#in the future these lists could be moved outside of module
errors = ["error", "cannot", "can\'t", "failed"]
common_errors = [
'(WW) warning, (EE) error, (NI) not implemented, (??) unknown.',
'dma timeout',
'can\'t add hid device:',
'usbhid: probe of ',
]
x86_common = [
'[drm:psb_do_init] *ERROR* Debug is',
'wrong ELF class',
'Could not enable PowerButton event',
'probe of LNXPWRBN:00 failed with error -22',
] + common_errors
qemux86_common = [
'Fast TSC calibration',
'_OSC failed (AE_NOT_FOUND); disabling ASPM',
'Open ACPI failed (/var/run/acpid.socket) (No such file or directory)',
'Failed to load module "vesa"',
'Failed to load module "modesetting"',
'Failed to load module "glx"',
'wrong ELF class',
] + common_errors
ignore_errors = {
'default' : common_errors,
'qemux86' : [
'Failed to access perfctr msr (MSR c1 is 0)',
"fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.",
] + qemux86_common,
'qemux86-64' : qemux86_common,
'qemumips' : [
'Failed to load module "glx"',
] + common_errors,
'qemuppc' : [
'PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]',
'mode "640x480" test failed',
'Failed to load module "glx"',
] + common_errors,
'qemuarm' : [
'mmci-pl18x: probe of fpga:05 failed with error -22',
'mmci-pl18x: probe of fpga:0b failed with error -22',
'Failed to load module "glx"'
] + common_errors,
'emenlow' : x86_common,
'crownbay' : x86_common,
'genericx86' : x86_common,
'genericx86-64' : x86_common,
}
log_locations = ["/var/log/","/var/log/dmesg", "/tmp/dmesg_output.log"]
class Pa | rseLogsTest(oeRuntimeTest):
@classmethod
def setUpClass(self):
self.errors = errors
self.ignore_errors = ignore_errors
self.log_locations = log_locations
self.msg = ""
def getMachine(self):
(status, output) = self.target.run("uname -n")
return output
#get some information on the CPU of the machine to display at the beginning of the output | . This info might be useful in some cases.
def getHardwareInfo(self):
hwi = ""
(status, cpu_name) = self.target.run("cat /proc/cpuinfo | grep \"model name\" | head -n1 | awk 'BEGIN{FS=\":\"}{print $2}'")
(status, cpu_physical_cores) = self.target.run("cat /proc/cpuinfo | grep \"cpu cores\" | head -n1 | awk {'print $4'}")
(status, cpu_logical_cores) = self.target.run("cat /proc/cpuinfo | grep \"processor\" | wc -l")
(status, cpu_arch) = self.target.run("uname -m")
hwi += "Machine information: \n"
hwi += "*******************************\n"
hwi += "Machine name: "+self.getMachine()+"\n"
hwi += "CPU: "+str(cpu_name)+"\n"
hwi += "Arch: "+str(cpu_arch)+"\n"
hwi += "Physical cores: "+str(cpu_physical_cores)+"\n"
hwi += "Logical cores: "+str(cpu_logical_cores)+"\n"
hwi += "*******************************\n"
return hwi
#go through the log locations provided and if it's a folder create a list with all the .log files in it, if it's a file just add
#it to that list
def getLogList(self, log_locations):
logs = []
for location in log_locations:
(status, output) = self.target.run("test -f "+str(location))
if (status == 0):
logs.append(str(location))
else:
(status, output) = self.target.run("test -d "+str(location))
if (status == 0):
(status, output) = self.target.run("find "+str(location)+"/*.log -maxdepth 1 -type f")
output = output.splitlines()
for logfile in output:
logs.append(os.path.join(location,str(logfile)))
return logs
#build the grep command to be used with filters and exclusions
def build_grepcmd(self, errors, ignore_errors, log):
grepcmd = "grep "
grepcmd +="-Ei \""
for error in errors:
grepcmd += error+"|"
grepcmd = grepcmd[:-1]
grepcmd += "\" "+str(log)+" | grep -Eiv \'"
try:
errorlist = ignore_errors[self.getMachine()]
except KeyError:
self.msg += "No ignore list found for this machine, using default\n"
errorlist = ignore_errors['default']
for ignore_error in errorlist:
ignore_error = ignore_error.replace("(", "\(")
ignore_error = ignore_error.replace(")", "\)")
ignore_error = ignore_error.replace("'", ".")
ignore_error = ignore_error.replace("?", "\?")
ignore_error = ignore_error.replace("[", "\[")
ignore_error = ignore_error.replace("]", "\]")
ignore_error = ignore_error.replace("*", "\*")
grepcmd += ignore_error+"|"
grepcmd = grepcmd[:-1]
grepcmd += "\'"
return grepcmd
#grep only the errors so that their context could be collected. Default context is 10 lines before and after the error itself
def parse_logs(self, errors, ignore_errors, logs, lines_before = 10, lines_after = 10):
results = {}
rez = []
for log in logs:
thegrep = self.build_grepcmd(errors, ignore_errors, log)
try:
(status, result) = self.target.run(thegrep)
except:
pass
if result:
results[log] = {}
rez = result.splitlines()
for xrez in rez:
command = "grep \"\\"+str(xrez)+"\" -B "+str(lines_before)+" -A "+str(lines_after)+" "+str(log)
try:
(status, yrez) = self.target.run(command)
except:
pass
results[log][xrez]=yrez
return results
#get the output of dmesg and write it in a file. This file is added to log_locations.
def write_dmesg(self):
(status, dmesg) = self.target.run("dmesg")
(status, dmesg2) = self.target.run("echo \""+str(dmesg)+"\" > /tmp/dmesg_output.log")
@skipUnlessPassed('test_ssh')
def test_parselogs(self):
self.write_dmesg()
log_list = self.getLogList(self.log_locations)
result = self.parse_logs(self.errors, self.ignore_errors, log_list)
print self.getHardwareInfo()
errcount = 0
for log in result:
self.msg += "Log: "+log+"\n"
self.msg += "-----------------------\n"
for error in result[log]:
errcount += 1
self.msg += "Central error: "+str(error)+"\n"
self.msg += "***********************\n"
self.msg += result[str(log)][str(error)]+"\n"
self.msg += "***********************\n"
self.msg += "%s errors found in logs." % errcount
self.assertEqual(errcount, 0, msg=self.msg)
|
papedaniel/oioioi | oioioi/disqualification/__init__.py | Python | gpl-3.0 | 149 | 0 | """This ap | plication provides a framework for disqualifying users for variou | s
reasons, as well as simple modeling of any custom disqualification.
"""
|
GaretJax/pop-analysis-suite | pas/bin/pas.py | Python | mit | 4,689 | 0.001493 | #!/usr/bin/env python
"""
Main command line script of the pas package.
The main function contained in this module is used ai main entry point for the
pas command line utility.
The script is automatically created by setuptool, but this file can be
directly invoked with `python path/to/pas.py` or directly if its executable
flag is set.
"""
import itertools
import logging
import logging.handlers
import os
import sys
# pylint: disable-msg=E0611
# I know relative imports are not the holy grail, but here we need them and
# it is a pylint bug not to recognized empty parent paths.
from .. import commands # Relative imports to avoid name clashing
from ..conf import settings # Relative imports to avoid name clashing
# pylint: enable-msg=E0611
# Reenable unknown name detection
from fabric.state import connections
# pylint: disable-msg=W0105
# Docstring for variables are not recognized by pylint, but epydoc parses them
LOGFILE = os.getenv('PAS_LOGFILE') or 'pas.log'
"""Logfile name, settable using the PAS_LOGFILE env variable"""
VERBOSITY = logging.INFO
"""Default verbosity for console output"""
def main():
"""
First function called upon command line invocation. Builds the command
line parser, parses the arguments, configures logging and invokes the
command.
"""
# Configure logging
file_formatter = logging.Formatter("%(asctime)s - %(levelname)10s - " \
"%(message)s (%(pathname)s:%(lineno)d)")
console_formatter = logging.Formatter("%(levelname)10s: %(message)s")
# All console output not explicitly directed to the user should be a log
# message instead
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(console_formatter)
console_handler.setLevel(20) # Don't show debug log messages until the
# verbosity is set
# Buffer the logging until no errors happen
buffered_handler = logging.handlers.MemoryHandler(9999, logging.CRITICAL)
# Capture all logging output and write it to the specified log file
file_handler = logging.FileHandler('pas.log', 'w', delay=True)
file_handler.setFormatter(file_formatter)
file_handler.setLevel(40)
logger = logging.getLogger()
logger.setLevel(1)
logger.addHandler(console_handler)
logger.addHandler(buffered_handler)
# Build base parser
| parser = commands.build_mainparser()
arguments = itertools.takewhile(lambda x: x.startswith('-'), sys.argv[1:])
arguments = (arg for arg in arguments if arg not in ('-h', '--help'))
command_line = sys.argv[:1] + list(arguments)
|
# Parse the base arguments (verbosity and settings)
args, remaining = parser.parse_known_args(command_line)
buffered_handler.setTarget(file_handler)
# Get the verbosity level
verbosity = max(1, VERBOSITY - 10 * (len(args.verbose) - len(args.quiet)))
console_handler.setLevel(verbosity)
file_handler.setLevel(1)
paramiko_logger = logging.getLogger('paramiko.transport')
paramiko_logger.setLevel(verbosity + 10)
# Load settings
try:
settings.loadfrompath(path=args.settings)
nosettings = False
except ImportError:
from ..conf import basesettings
settings.load(basesettings)
nosettings = True
# Build complete parser
parser = commands.build_subparsers(parser)
# Parse arguments
command = args = parser.parse_args()
res = 0
# Check that settings where loaded if needed
if not getattr(command.execute, 'nosettings', False) and nosettings:
logger.critical("This command requires the settings module to be " \
"present on path or defined using the " \
"PAS_SETTINGS_MODULE environment variable.")
res = 1
# Execute command
if not res:
res = command.execute(args)
# Cleanup fabric connections if needed
for key in connections.keys():
connections[key].close()
del connections[key]
# Check execution result
if res:
# ...an error occurred, write the logfile
buffered_handler.flush()
print
print "pas exited with a non-zero exit status (%d). A complete log " \
"was stored in the %s file." % (res, LOGFILE)
print
else:
# ...no errors occurred, avoid to flush the buffer
buffered_handler.setTarget(None)
# Need to close the buffered handler before sysexit is called or it will
# result in an exception
buffered_handler.close()
return res
if __name__ == '__main__':
sys.exit(main())
|
openstack/python-tripleoclient | tripleoclient/tests/v1/tripleo/test_tripleo_upgrade.py | Python | apache-2.0 | 5,026 | 0 | # Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
from osc_lib.tests import utils
# Load the plugin init module for the plugin list and show commands
from tripleoclient import exceptions
from tripleoclient.v1 import tripleo_upgrade
class TestUpgrade(utils.TestCommand):
def setUp(self):
super(TestUpgrade, self).setUp()
# Get the command object to test
self.cmd = tripleo_upgrade.Upgrade(self.app, None)
self.cmd.ansible_dir = '/tmp'
self.ansible_playbook_cmd = "ansible-playbook"
@mock.patch('tripleoclient.utils.prompt_user_for_confirmation',
return_value=True)
@mock.patch('tripleoclient.v1.tripleo_deploy.Deploy.take_action',
autospec=True)
def test_take_action(self, mock_deploy, mock_confirm):
verifylist = [
('local_ip', '127.0.0.1'),
('templates', '/tmp/thtroot'),
('stack', 'undercloud'),
('output_dir', '/my'),
]
parsed_args = self.check_parser(self.cmd,
| ['--local-ip', '127.0.0.1',
| '--templates', '/tmp/thtroot',
'--stack', 'undercloud',
'--output-dir', '/my',
'-e', '/tmp/thtroot/puppet/foo.yaml',
'-e', '/tmp/thtroot//docker/bar.yaml',
'-e', '/tmp/thtroot42/notouch.yaml',
'-e', '~/custom.yaml',
'-e', 'something.yaml',
'-e', '../../../outside.yaml'],
verifylist)
self.cmd.take_action(parsed_args)
parsed_args.standalone = True
parsed_args.upgrade = True
mock_deploy.assert_called_with(self.cmd, parsed_args)
@mock.patch('tripleoclient.utils.prompt_user_for_confirmation',
return_value=True)
@mock.patch('tripleoclient.v1.tripleo_deploy.Deploy.take_action',
autospec=True)
def test_take_action_prompt(self, mock_deploy, mock_confirm):
parsed_args = self.check_parser(self.cmd,
['--local-ip', '127.0.0.1',
'--templates', '/tmp/thtroot',
'--stack', 'undercloud',
'--output-dir', '/my',
'-e', '/tmp/thtroot/puppet/foo.yaml',
'-e', '/tmp/thtroot//docker/bar.yaml',
'-e', '/tmp/thtroot42/notouch.yaml',
'-e', '~/custom.yaml',
'-e', 'something.yaml',
'-e', '../../../outside.yaml'], [])
self.cmd.take_action(parsed_args)
parsed_args.standlone = True
parsed_args.upgrade = True
mock_deploy.assert_called_with(self.cmd, parsed_args)
@mock.patch('tripleoclient.utils.prompt_user_for_confirmation',
return_value=False)
@mock.patch('tripleoclient.v1.tripleo_deploy.Deploy',
autospec=True)
def test_take_action_prompt_no(self, mock_deploy, mock_confirm):
parsed_args = self.check_parser(self.cmd,
['--local-ip', '127.0.0.1',
'--templates', '/tmp/thtroot',
'--stack', 'undercloud',
'--output-dir', '/my',
'-e', '/tmp/thtroot/puppet/foo.yaml',
'-e', '/tmp/thtroot//docker/bar.yaml',
'-e', '/tmp/thtroot42/notouch.yaml',
'-e', '~/custom.yaml',
'-e', 'something.yaml',
'-e', '../../../outside.yaml'], [])
parsed_args.standlone = True
parsed_args.upgrade = True
self.assertRaises(exceptions.UndercloudUpgradeNotConfirmed,
self.cmd.take_action, parsed_args)
mock_deploy.assert_not_called()
|
krimkus/stipplebot | solenoid.py | Python | unlicense | 1,215 | 0.000823 | #!/usr/bin/env python
"""
Deprecated since the solenoid was not strong enough.
Was using: https://www.sparkfun.com/products/11015
5v, 4.5mm throw
"""
# Import required libraries
import time
import RPi.GPIO as GPIO
# Use BC | M GPIO references
# instead of physical pin numbers
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
# Base class for a solenoid motor controller using a stepper ULN2003 controller
class SolenoidMotor(object):
# # Define GPIO signals to use
# Pin 13, GPIO27
pin = 27
wait_time = .01
wa | it_time = .1
def __init__(self, **kwargs):
for key in kwargs:
if hasattr(self.__class__, key):
setattr(self, key, kwargs[key])
# Set pin as output
print "Setup pin"
GPIO.setup(self.pin, GPIO.OUT)
GPIO.output(self.pin, False)
def tap(self):
GPIO.output(self.pin, True)
time.sleep(self.wait_time)
GPIO.output(self.pin, False)
def reset(self):
GPIO.output(self.pin, False)
if __name__ == "__main__":
pen = SolenoidMotor()
start_time = time.time()
for i in range(8):
pen.tap()
time.sleep(.09)
print time.time() - start_time
pen.reset()
|
ad-m/claw | tests/utils_test.py | Python | apache-2.0 | 242 | 0 | # -*- coding: utf-8 -*-
from nose.tools import *
from claw import util | s
def test_get_delimite | r():
eq_('\r\n', utils.get_delimiter('abc\r\n123'))
eq_('\n', utils.get_delimiter('abc\n123'))
eq_('\n', utils.get_delimiter('abc'))
|
RKD314/yumstat | yumstat/oauth2client/clientsecrets.py | Python | mit | 4,405 | 0.006583 | # Copyright (C) 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for reading OAuth 2.0 client secret files.
A client_secrets.json file contains all the informat | ion needed to interact with
an OAuth 2.0 protected service.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
from anyjson import simplejson
# Properties that make a client_secrets.json file valid.
TYPE_WEB = 'web'
TYPE_INSTALLED = 'installed'
VALID_CLIENT = {
TYPE_WEB: {
'required': [
'client_id',
'client_secret',
'redirect_uris',
'auth_uri',
'token_uri',
| ],
'string': [
'client_id',
'client_secret',
],
},
TYPE_INSTALLED: {
'required': [
'client_id',
'client_secret',
'redirect_uris',
'auth_uri',
'token_uri',
],
'string': [
'client_id',
'client_secret',
],
},
}
class Error(Exception):
"""Base error for this module."""
pass
class InvalidClientSecretsError(Error):
"""Format of ClientSecrets file is invalid."""
pass
def _validate_clientsecrets(obj):
if obj is None or len(obj) != 1:
raise InvalidClientSecretsError('Invalid file format.')
client_type = obj.keys()[0]
if client_type not in VALID_CLIENT.keys():
raise InvalidClientSecretsError('Unknown client type: %s.' % client_type)
client_info = obj[client_type]
for prop_name in VALID_CLIENT[client_type]['required']:
if prop_name not in client_info:
raise InvalidClientSecretsError(
'Missing property "%s" in a client type of "%s".' % (prop_name,
client_type))
for prop_name in VALID_CLIENT[client_type]['string']:
if client_info[prop_name].startswith('[['):
raise InvalidClientSecretsError(
'Property "%s" is not configured.' % prop_name)
return client_type, client_info
def load(fp):
obj = simplejson.load(fp)
return _validate_clientsecrets(obj)
def loads(s):
obj = simplejson.loads(s)
return _validate_clientsecrets(obj)
def _loadfile(filename):
try:
fp = file(filename, 'r')
try:
obj = simplejson.load(fp)
finally:
fp.close()
except IOError:
raise InvalidClientSecretsError('File not found: "%s"' % filename)
return _validate_clientsecrets(obj)
def loadfile(filename, cache=None):
"""Loading of client_secrets JSON file, optionally backed by a cache.
Typical cache storage would be App Engine memcache service,
but you can pass in any other cache client that implements
these methods:
- get(key, namespace=ns)
- set(key, value, namespace=ns)
Usage:
# without caching
client_type, client_info = loadfile('secrets.json')
# using App Engine memcache service
from google.appengine.api import memcache
client_type, client_info = loadfile('secrets.json', cache=memcache)
Args:
filename: string, Path to a client_secrets.json file on a filesystem.
cache: An optional cache service client that implements get() and set()
methods. If not specified, the file is always being loaded from
a filesystem.
Raises:
InvalidClientSecretsError: In case of a validation error or some
I/O failure. Can happen only on cache miss.
Returns:
(client_type, client_info) tuple, as _loadfile() normally would.
JSON contents is validated only during first load. Cache hits are not
validated.
"""
_SECRET_NAMESPACE = 'oauth2client:secrets#ns'
if not cache:
return _loadfile(filename)
obj = cache.get(filename, namespace=_SECRET_NAMESPACE)
if obj is None:
client_type, client_info = _loadfile(filename)
obj = {client_type: client_info}
cache.set(filename, obj, namespace=_SECRET_NAMESPACE)
return obj.iteritems().next()
|
Linaro/lava-dispatcher | lava_dispatcher/actions/test/monitor.py | Python | gpl-2.0 | 8,107 | 0.00259 | # Copyright (C) 2014 Linaro Limited
#
# Author: Tyler Baker <tyler.baker@linaro.org>
#
# This file is part of LAVA Dispatcher.
#
# LAVA Dispatcher is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# LAVA Dispatcher is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along
# with this program; if not, see <http://www.gnu.org/licenses>.
import re
import pexpect
from collections import OrderedDict
from lava_dispatcher.action import (
InfrastructureError,
LAVABug,
Pipeline,
)
from lava_dispatcher.actions.test import (
TestAction,
)
from lava_dispatcher.logical import (
LavaTest,
RetryAction,
)
class TestMonitor(LavaTest):
"""
LavaTestMonitor Strategy object
"""
def __init__(self, parent, parameters):
super(TestMonitor, self).__init__(parent)
self.action = TestMonitorRetry()
self.action.job = self.job
self.action.section = self.action_type
parent.add_action(self.action, parameters)
@classmethod
def accepts(cls, device, parameters):
# TODO: Add configurable timeouts
required_parms = ['name', 'start',
'end', 'pattern']
if 'monitors' in parameters:
for monitor in parameters['monitors']:
if all([x for x in required_parms if x in monitor]):
return True, 'accepted'
return False, 'missing a required parameter from %s' % required_parms
else:
return False, '"monitors" not in parameters'
@classmethod
def needs_deployment_data(cls):
return False
@classmethod
def needs_overlay(cls):
return False
@classmethod
def has_shell(cls):
return False
class TestMonitorRetry(RetryAction):
name = "lava-test-monitor-retry"
description = "Retry wrapper for lava-test-monitor"
summary = "Retry support for Lava Test Monitoring"
def populate(self, parameters):
self.internal_pipeline = Pipeline(parent=self, job=self.job, parameters=parameters)
self.internal_pipeline.add_action(TestMonitorAction())
class TestMonitorAction(TestAction): # pylint: disable=too-many-instance-attributes
"""
Sets up and runs the LAVA Test Shell Definition scripts.
Supports a pre-command-list of operations necessary on the
booted image before the test shell can be started.
"""
name = "lava-test-monitor"
description = "Executing lava-test-monitor"
summary = "Lava Test Monitor"
def __init__(self):
super(TestMonitorAction, self).__init__()
self.test_suite_name = None
self.report = {}
self.fixupdict = {}
self.patterns = {}
def run(self, connection, max_end_time, args=None):
connection = super(TestMonitorAction, self).run(connection, max_end_time, args)
if not connection:
raise InfrastructureError("Connection closed")
for monitor in self.parameters['monitors']:
self.test_suite_name = monitor['name']
self.fixupdict = monitor.get('fixupdict')
# pattern order is important because we want to match the end before
# it can possibly get confused with a test result
self.patterns = OrderedDict()
self.patterns["eof"] = pexpect.EOF
self.patterns["timeout"] = pexpect.TIMEOUT
self.patterns["end"] = monitor['end']
self.patterns["test_result"] = monitor['pattern']
# Find the start string before parsing any output.
connection.prompt_str = monitor['start']
connection.wait()
self.logger.info("ok: start string found, lava test monitoring started")
with connection.test_connection() as test_connection:
while self._keep_running(test_connection, timeout=test_connection.timeout):
pass
return connection
def _keep_running(self, test_connection, timeout=120):
self.logger.debug("test monitoring timeout: %d seconds", timeout)
r | etval = test_connection.expect(list(self.patterns.values()), timeout=timeout)
return self.check_patterns(list(self.patterns.keys())[retval], test_connection)
def check_patterns(self, event, test_connection): # pylint: disable=too-many-branches
"""
Defines the base set of pattern responses.
Stores the resu | lts of testcases inside the TestAction
Call from subclasses before checking subclass-specific events.
"""
ret_val = False
if event == "end":
self.logger.info("ok: end string found, lava test monitoring stopped")
self.results.update({'status': 'passed'})
elif event == "timeout":
self.logger.warning("err: lava test monitoring has timed out")
self.errors = "lava test monitoring has timed out"
self.results.update({'status': 'failed'})
elif event == "test_result":
self.logger.info("ok: test case found")
match = test_connection.match.groupdict()
if 'result' in match:
if self.fixupdict:
if match['result'] in self.fixupdict:
match['result'] = self.fixupdict[match['result']]
if match['result'] not in ('pass', 'fail', 'skip', 'unknown'):
self.logger.error("error: bad test results: %s", match['result'])
else:
if 'test_case_id' in match:
case_id = match['test_case_id'].strip().lower()
# remove special characters to form a valid test case id
case_id = re.sub(r'\W+', '_', case_id)
self.logger.debug('test_case_id: %s', case_id)
results = {
'definition': self.test_suite_name.replace(' ', '-').lower(),
'case': case_id,
'level': self.level,
'result': match['result'],
'extra': {'test_case_id': match['test_case_id'].strip()}
}
if 'measurement' in match:
results.update({'measurement': match['measurement']})
if 'units' in match:
results.update({'units': match['units']})
self.logger.results(results) # pylint: disable=no-member
else:
if all(x in match for x in ['test_case_id', 'measurement']):
if match['measurement'] and match['test_case_id']:
case_id = match['test_case_id'].strip().lower()
# remove special characters to form a valid test case id
case_id = re.sub(r'\W+', '_', case_id)
self.logger.debug('test_case_id: %s', case_id)
results = {
'definition': self.test_suite_name.replace(' ', '-').lower(),
'case': case_id,
'level': self.level,
'result': 'pass',
'measurement': float(match['measurement']),
'extra': {'test_case_id': match['test_case_id'].strip()}
}
if 'units' in match:
results.update({'units': match['units']})
self.logger.results(results) # pylint: disable=no-member
ret_val = True
return ret_val
|
alex/django-old | tests/regressiontests/requests/tests.py | Python | bsd-3-clause | 3,959 | 0.003031 | from datetime import datetime, timedelta
import time
import unittest
from django.http import HttpRequest, HttpResponse, parse_cookie
from django.core.handlers.wsgi import WSGIRequest
from django.core.handlers.modpython import ModPythonRequest
from django.utils.http import cookie_date
class RequestsTests(unittest.TestCase):
def test_httprequest(self):
request = HttpRequest()
self.assertEqual(request.GET.keys(), [])
self.assertEqual(request.POST.keys(), [])
self.assertEqual(request.COOKIES.keys(), [])
self.assertEqual(request.META.keys(), [])
def test_wsgirequest(self):
request = WSGIRequest({'PATH_INFO': 'bogus', 'REQUEST_METHOD': 'bogus'})
self.assertEqual(request.GET.keys(), [])
self.assertEqual(request.POST.keys(), [])
self.assertEqual(request.COOKIES.keys(), [])
self.assertEqual(set(request.META.keys()), set(['PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME']))
self.assertEqual(request.META['PATH_INFO'], 'bogus')
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
self.assertEqual(request.META['SCRIPT_NAME'], '')
def test_modpythonrequest(self):
class FakeModPythonRequest(ModPythonRequest):
def __init__(self, *args, **kwargs):
super(FakeModPythonRequest, self).__init__(*args, **kwargs)
self._get = self._post = self._meta = self._cookies = {}
class Dummy:
def get_options(self):
return {}
req = Dummy()
req.uri = 'bogus'
request = FakeModPythonRequest(req)
self.assertEqual(request.path, 'bogus')
self.assertEqual(request.GET.keys(), [])
self.assertEqual(request.POST.keys(), [])
self.assertEqual(request.COOKIES.keys(), [])
self.assertEqual(request.META.keys(), [])
def test_parse_cookie(self):
self.assertEqual(parse_cookie('invalid:key=true'), {})
def test_httprequest_location(self):
request = HttpRequest()
self.assertEqual(request.build_absolute_uri(location="https://www.example.com/asdf"),
'https://www.example.com/asdf')
request.get_host = lambda: 'www.example.com'
request.path = ''
self.assertEqual(request.build_absolute_uri(location="/path/with:colons"),
'http://www.example.com/path/with:colons')
def test_near_expiration(self):
"Cookie will expire when an near expiration time is provided"
response = HttpResponse()
# There is a timing weakness in this test; The
# expected result for max-age requires that there be
# a very slight difference between the evaluated expiration
# time, and the time evaluated in set_cookie(). If this
# difference doesn't exist, the cookie time will be
# 1 second larger. To avoid the problem, put in a quick sleep,
# which guarantees that there will be a time difference.
expires = datetime.utcnow() + timedelta(seconds=10)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
| datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_far_expiration(self):
"Cookie will expire when an distant expiration time is provided"
response = HttpResponse | ()
response.set_cookie('datetime', expires=datetime(2028, 1, 1, 4, 5, 6))
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['expires'], 'Sat, 01-Jan-2028 04:05:06 GMT')
def test_max_age_expiration(self):
"Cookie will expire if max_age is provided"
response = HttpResponse()
response.set_cookie('max_age', max_age=10)
max_age_cookie = response.cookies['max_age']
self.assertEqual(max_age_cookie['max-age'], 10)
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time()+10))
|
nicfit/Clique | clique/app/keygen.py | Python | lgpl-3.0 | 2,677 | 0 | # -*- coding: utf-8 -*-
import json
import nicfit
from pathlib import Path
from .utils import prompt
from ..common import thumbprint, CLIQUE_D
from .. import Identity, keystore
DEFAULT_KEYFILE = None
@nicfit.command.register
class keygen(nicfit.Command):
HELP = "Generate Clique (i.e. EC 256) encryption keys."
def _initArgParser(self, parser):
global DEFAULT_KEYFILE
DEFAULT_KEYFILE = CLIQUE_D / "key"
parser.add_argument(
"-f", dest="ofile", default=None, metavar="output_file",
help="Output file for public (.pub) and private key.")
parser.add_argument(
"-c", "--comment", default=None,
help="An optional comment to include in the key.")
parser.add_argument(
"--compact", action="store_true",
help="Output the keys in compact format.")
def _run(self):
keyfile = self.args.ofile or \
prompt("Enter file in which to save the key ({}): "
.format(DEFAULT_KEYFILE), default=DEFAULT_KEYFILE)
keyfile = Path(keyfile).expanduser()
i | f keyfile.exists():
print("{} already exists.".format(keyfile))
if prompt("Overwrite (y/n)? ") != "y":
return 0
keyfile_pub = Path(str(keyfile) + ".pub")
indent = 2 if not self.args.compact else None
p | rint("Generating public/private P256 key pair.")
jwk = Identity.generateKey()
# TODO: Adding comments needs to be supported natively else it is too
# easy to lose them along the way.
'''
cmt = self.args.comment or prompt("Comment (optional): ", default=None)
if cmt:
jwk._params["cmt"] = cmt
prv_json = json.loads(jwk.export())
pub_json = json.loads(jwk.export_public())
if cmt:
prv_json["cmt"] = pub_json["cmt"] = cmt
'''
for kfile, kstr in [(keyfile, jwk.export()),
(keyfile_pub, jwk.export_public())]:
with open(str(kfile), "w") as fp:
fp.write(json.dumps(json.loads(kstr), indent=indent,
sort_keys=True))
fp.write("\n")
print("Your private key have been saved in {}".format(keyfile))
print("Your public key have been saved in {}".format(keyfile_pub))
if self.args.server:
print("## Uploading public key to " + self.args.server)
tprint = keystore().upload(jwk)
print("## Key URL: {}/keys/{}".format(self.args.server, tprint))
print("The key fingerprint is:\n{}".format(thumbprint(jwk)))
|
liampauling/flumine | tests/test_responses.py | Python | mit | 1,654 | 0 | import unittest
from unittest import mock
from flumine.order.responses import Responses
class ResponsesTest(unittest.TestCase):
def setUp(self):
self.responses = Responses()
def test_init(self):
self.assertIsNotNone(self.responses.date_time_created)
self.assertIsNone(self.responses.place_response)
self.assertEqual(self.responses.cancel_responses, [])
self.assertEqual(self.responses.replace_responses, [])
self.assertEqual(self.responses.cancel_responses, [])
self.assertIsNone(self.responses._date_time_placed)
self.assertIsNone(self.responses.current_order)
def test_placed(self):
self.responses.placed(12)
self.assertIsNotNone(self.responses._date_time_placed)
self.assertEqual(self.responses.place_response, 12)
def test_cancelled(self):
self.responses.cancelled(12)
self.assertEqual(self.responses.cancel_responses, [12])
def test_replaced(self):
self.responses.replaced(12)
# self.assertIsNotNone(self.responses.date_time_placed)
self.assertEqual(self.responses.replace_responses, [12])
def test_updated(self):
self.responses.updated(12)
self.assertEqual(self.responses.update_responses, [12])
|
def test_date_time_placed(self):
self.assertIsNone(self.responses.date_time_placed)
self.responses._date_time_placed = 1
self.responses.current_order = mock.Mock(placed_date=2)
self.assertEqual(self.responses.date_time_p | laced, 1)
self.responses._date_time_placed = None
self.assertEqual(self.responses.date_time_placed, 2)
|
jboeuf/grpc | examples/python/data_transmission/server.py | Python | apache-2.0 | 4,492 | 0.001205 | # Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The example of four ways of data transmission using gRPC in Python."""
from threading import Thread
from concurrent import futures
import grpc
import demo_pb2_grpc
import demo_pb2
__all__ = 'DemoServer'
SERVER_ADDRESS = 'localhost:23333'
SERVER_ID = 1
class DemoServer(demo_pb2_grpc.GRPCDemoServicer):
# 一元模式(在一次调用中, 客户端只能向服务器传输一次请求数据, 服务器也只能返回一次响应)
# unary-unary(In a single call, the client can only send request once, and the server can
# only respond once.)
def SimpleMethod(self, request, context):
print("SimpleMethod called by client(%d) the message: %s" %
(request.client_id, request.request_data))
response = demo_pb2.Response(
server_id=SERVER_ID,
response_data="Python server SimpleMethod Ok!!!!")
return response
# 客户端流模式(在一次调用中, 客户端可以多次向服务器传输数据, 但是服务器只能返回一次响应)
# stream-unary (In a single call, the client can transfer data to the server several times,
# but the server can only return a response once.)
def ClientStreamingMethod(self, request_iterator, context):
print("ClientStreamingMethod called by client...")
for request in request_iterator:
print("recv from client(%d), message= %s" %
(request.client_id, request.request_data))
response = demo_pb2.Response(
server_id=SERVER_ID,
response_data="Python server ClientStreamingMethod ok")
return response
# 服务端流模式(在一次调用中, 客户端只能一次向服务器传输数据, 但是服务器可以多次返回响应)
# unary-stream (In a single call, the client can only transmit data to the server at one time,
# but the server can return the response many times.)
def ServerStreamingMethod(self, request, context):
print("ServerStreamingMethod called by client(%d), message= %s" %
(request.client_id, request.request_data))
# 创建一个生成器
# create a generator
def response_messages():
for i in range(5):
response = demo_pb2.Response(
server_id=SERVER_ID,
response_data=("send by Python server, message=%d" % i))
yield response
return response_messages()
# 双向流模式 (在一次调用中, 客户端和服务器都可以向对方多次收发数据)
# stream-stream (In a single call, both client and server can send and receive data
# to each other multiple times.)
def BidirectionalStreamingMethod(self, request_iterator, context):
print("BidirectionalStreamingMethod called by client...")
# 开启一个子线程去接收数据
# Open a sub thread to receive data
def parse_request():
for request in request_iterator:
print("recv from c | lient(%d), message= %s" %
(request.client_id, request.request_data))
t = Thread(target=parse_request)
t.start()
for i in ran | ge(5):
yield demo_pb2.Response(
server_id=SERVER_ID,
response_data=("send by Python server, message= %d" % i))
t.join()
def main():
server = grpc.server(futures.ThreadPoolExecutor())
demo_pb2_grpc.add_GRPCDemoServicer_to_server(DemoServer(), server)
server.add_insecure_port(SERVER_ADDRESS)
print("------------------start Python GRPC server")
server.start()
server.wait_for_termination()
# If raise Error:
# AttributeError: '_Server' object has no attribute 'wait_for_termination'
# You can use the following code instead:
# import time
# while 1:
# time.sleep(10)
if __name__ == '__main__':
main()
|
DBuildService/atomic-reactor | tests/plugins/test_group_manifests.py | Python | bsd-3-clause | 23,328 | 0.001715 | """
Copyright (c) 2017, 2019 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from functools import partial
import pytest
import json
import re
import responses
from tempfile import mkdtemp
import os
import requests
from collections import OrderedDict
from tests.constants import SOURCE, MOCK, DOCKER0_REGISTRY
from tests.stubs import StubInsideBuilder
from atomic_reactor.core import DockerTasker
from atomic_reactor.build import BuildResult
from atomic_reactor.plugin import PostBuildPluginsRunner, PluginFailedException
from atomic_reactor.inner import DockerBuildWorkflow, TagConf
from atomic_reactor.util import (registry_hostname, ManifestDigest, get_floating_images,
get_primary_images, sha256sum)
from atomic_reactor.plugins.post_group_manifests import GroupManifestsPlugin
from atomic_reactor.plugins.pre_reactor_config import (ReactorConfigPlugin,
WORKSPACE_CONF_KEY,
ReactorConfig)
from osbs.utils import ImageName
if MOCK:
from tests.docker_mock import mock_docker
def to_bytes(value):
if isinstance(value, bytes):
return value
else:
return value.encode('utf-8')
def to_text(value):
if isinstance(value, str):
return value
else:
return str(value, 'utf-8')
make_digest = partial(sha256sum, abbrev_len=10, prefix=True)
class MockRegistry(object):
"""
This class mocks a subset of the v2 Docker Registry protocol
"""
def __init__(self, registry):
self.hostname = registry_hostname(registry)
self.repos = {}
self._add_pattern(responses.GET, r'/v2/(.*)/manifests/([^/]+)',
| self._get_manifest)
self._add_pattern(responses.HEAD, r'/v2/(.*)/manifests/([^/]+)',
| self._get_manifest)
self._add_pattern(responses.PUT, r'/v2/(.*)/manifests/([^/]+)',
self._put_manifest)
self._add_pattern(responses.GET, r'/v2/(.*)/blobs/([^/]+)',
self._get_blob)
self._add_pattern(responses.HEAD, r'/v2/(.*)/blobs/([^/]+)',
self._get_blob)
self._add_pattern(responses.POST, r'/v2/(.*)/blobs/uploads/\?mount=([^&]+)&from=(.+)',
self._mount_blob)
def get_repo(self, name):
return self.repos.setdefault(name, {
'blobs': {},
'manifests': {},
'tags': {},
})
def add_blob(self, name, blob):
repo = self.get_repo(name)
digest = make_digest(blob)
repo['blobs'][digest] = blob
return digest
def get_blob(self, name, digest):
return self.get_repo(name)['blobs'][digest]
def add_manifest(self, name, ref, manifest):
repo = self.get_repo(name)
digest = make_digest(manifest)
repo['manifests'][digest] = manifest
if ref.startswith('sha256:'):
assert ref == digest
else:
repo['tags'][ref] = digest
return digest
def get_manifest(self, name, ref):
repo = self.get_repo(name)
if not ref.startswith('sha256:'):
ref = repo['tags'][ref]
return repo['manifests'][ref]
def _add_pattern(self, method, pattern, callback):
pat = re.compile(r'^https://' + self.hostname + pattern + '$')
def do_it(req):
status, headers, body = callback(req, *(pat.match(req.url).groups()))
if method == responses.HEAD:
return status, headers, ''
else:
return status, headers, body
responses.add_callback(method, pat, do_it, match_querystring=True)
def _get_manifest(self, req, name, ref):
repo = self.get_repo(name)
if not ref.startswith('sha256:'):
try:
ref = repo['tags'][ref]
except KeyError:
return (requests.codes.NOT_FOUND, {}, b"{'error': 'NOT_FOUND'}")
try:
blob = repo['manifests'][ref]
except KeyError:
return (requests.codes.NOT_FOUND, {}, {'error': 'NOT_FOUND'})
decoded = json.loads(to_text(blob))
content_type = decoded['mediaType']
accepts = re.split(r'\s*,\s*', req.headers['Accept'])
assert content_type in accepts
headers = {
'Docker-Content-Digest': ref,
'Content-Type': content_type,
'Content-Length': str(len(blob)),
}
return (200, headers, blob)
def _put_manifest(self, req, name, ref):
try:
json.loads(to_text(req.body))
except ValueError:
return (400, {}, {'error': 'BAD_MANIFEST'})
self.add_manifest(name, ref, req.body)
return (200, {}, '')
def _get_blob(self, req, name, digest):
repo = self.get_repo(name)
assert digest.startswith('sha256:')
try:
blob = repo['blobs'][digest]
except KeyError:
return (requests.codes.NOT_FOUND, {}, {'error': 'NOT_FOUND'})
headers = {
'Docker-Content-Digest': digest,
'Content-Type': 'application/json',
'Content-Length': str(len(blob)),
}
return (200, headers, blob)
def _mount_blob(self, req, target_name, digest, source_name):
source_repo = self.get_repo(source_name)
target_repo = self.get_repo(target_name)
try:
target_repo['blobs'][digest] = source_repo['blobs'][digest]
headers = {
'Location': '/v2/{}/blobs/{}'.format(target_name, digest),
'Docker-Content-Digest': digest,
}
return (201, headers, '')
except KeyError:
headers = {
'Location': '/v2/{}/blobs/uploads/some-uuid'.format(target_name),
'Docker-Upload-UUID': 'some-uuid',
}
return (202, headers, '')
def mock_registries(registries, config, schema_version='v2', foreign_layers=False,
manifest_list_tag=None):
"""
Creates MockRegistries objects and fills them in based on config, which specifies
which registries should be prefilled (as if by workers) with platform-specific
manifests, and with what tags.
"""
reg_map = {}
for reg in registries:
reg_map[reg] = MockRegistry(reg)
worker_builds = {}
manifest_list = {
"schemaVersion": 2,
"mediaType": "application/vnd.docker.distribution.manifest.list.v2+json",
"manifests": [
{
"platform": {
"os": "linux",
"architecture": "amd64"
},
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"digest": make_digest('v2digest-amd64'),
# 'size': required by spec, skipped for test
}
]
}
for platform, regs in config.items():
digests = []
for reg, tags in regs.items():
registry = reg_map[reg]
layer_digest = make_digest('layer-' + platform)
config_digest = make_digest('config-' + platform)
if schema_version == 'v2':
manifest = {
'schemaVersion': 2,
'mediaType': 'application/vnd.docker.distribution.manifest.v2+json',
'config': {
'mediaType': 'application/vnd.docker.container.image.v1+json',
'digest': config_digest,
# 'size': required by spec, skipped for test
},
'layers': [{
'mediaType': 'application/vnd.docker.image.rootfs.diff.tar.gzip',
'digest': layer_digest,
# 'size': required, skipped for test
}]
}
if foreign_layers:
manifest['layers' |
Ophiuchus1312/enigma2-master | lib/python/Components/VolumeControl.py | Python | gpl-2.0 | 2,618 | 0.030558 | from enigma import eDVBVolumecontrol, eTimer
from Tools.Profile import profile
from Screens.Volume import Volume
from Screens.Mute import Mute
from GlobalActions import globalActionMap
from config import config, ConfigSubsection, ConfigInteger
from Components.HdmiCec import HdmiCec
profile("VolumeControl")
#TODO .. move this to a own .py file
class VolumeControl:
instance = None
"""Volume control, handles volUp, volDown, volMute actions and display
a corresponding dialog"""
def __init__(self, session):
global globalActionMap
globalActionMap.actions["volumeUp"]=self.volUp
globalActionMap.actions["volumeDown"]=self.volDown
globalActionMap.actions["volumeMute"]=self.volMute
assert not VolumeControl.instance, "only one VolumeControl instance is allowed!"
VolumeControl.instanc | e = self
config.audio = ConfigSubsection()
config.audio.volume = ConfigInteger(default = 50, limits = (0, 100))
self.volumeDialog = session.instantiateDialog(Volume)
self.muteDialog = session.instantiateDialog(Mute)
self.hideVolTimer = eTimer()
self.hideVolTimer.callback.append(self.volHide)
vol = config.audio.volume.getVa | lue()
self.volumeDialog.setValue(vol)
self.volctrl = eDVBVolumecontrol.getInstance()
self.volctrl.setVolume(vol, vol)
def volSave(self):
if self.volctrl.isMuted():
config.audio.volume.setValue(0)
else:
config.audio.volume.setValue(self.volctrl.getVolume())
config.audio.volume.save()
def volUp(self):
if HdmiCec.instance.volumeForwardingEnabled: return
self.setVolume(+1)
def volDown(self):
if HdmiCec.instance.volumeForwardingEnabled: return
self.setVolume(-1)
def setVolume(self, direction):
oldvol = self.volctrl.getVolume()
if direction > 0:
self.volctrl.volumeUp()
else:
self.volctrl.volumeDown()
is_muted = self.volctrl.isMuted()
vol = self.volctrl.getVolume()
self.volumeDialog.show()
if is_muted:
self.volMute() # unmute
elif not vol:
self.volMute(False, True) # mute but dont show mute symbol
if self.volctrl.isMuted():
self.volumeDialog.setValue(0)
else:
self.volumeDialog.setValue(self.volctrl.getVolume())
self.volSave()
self.hideVolTimer.start(3000, True)
def volHide(self):
self.volumeDialog.hide()
def volMute(self, showMuteSymbol=True, force=False):
if HdmiCec.instance.volumeForwardingEnabled: return
vol = self.volctrl.getVolume()
if vol or force:
self.volctrl.volumeToggleMute()
if self.volctrl.isMuted():
if showMuteSymbol:
self.muteDialog.show()
self.volumeDialog.setValue(0)
else:
self.muteDialog.hide()
self.volumeDialog.setValue(vol)
|
gamernetwork/gn-django | gn_django/db/db_routers.py | Python | mit | 1,915 | 0.004178 | from gn_django.exceptions import ImproperlyConfigured
class AppsRouter:
"""
A router to route DB operations for one or more django apps to a particular
database.
Requires class attributes to be specified:
- `APPS` - an iterable of django app labels
- `DB_NAME` - a string for the DB to route operations to
"""
APPS = []
DB_NAME = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
no_apps = not self.APPS
no_db_specified = not self.DB_NAME
if no_apps or no_db_specified:
message = "There was no `APPS` attribute specified for the db router '%s'" % (self.__class__.__name__)
if no_db_specified:
| message = "There was no `DB_NAME` attribute specified for the db router '%s'" % (self.__class__.__name__)
raise ImproperlyConfigured(message)
def db_for_read(self, model, **hints):
"""
Attempts to read app models route to the named DB.
"""
if model._meta.app_label in self.APPS:
return self.DB_NAME
return None
def db_for_write(self, model, **hints) | :
"""
Attempts to write app models go to the named DB.
"""
if model._meta.app_label in self.APPS:
return self.DB_NAME
return None
def allow_relation(self, obj1, obj2, **hints):
"""
Allow relations if a model in any of the apps this router manages is involved.
"""
if obj1._meta.app_label in self.APPS and \
obj2._meta.app_label in self.APPS:
return True
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
Make sure the apps' DB tables are only created in the named database.
"""
if app_label in self.APPS:
return db == self.DB_NAME
return None
|
mbalasso/mynumpy | numpy/lib/tests/test_utils.py | Python | bsd-3-clause | 1,046 | 0.008604 | from numpy.testing import *
import numpy.lib.utils as utils
from numpy.lib import deprecate
from StringIO import StringIO
def test_lookf | or():
out = StringIO()
utils.lookfor('eigenvalue', module='numpy', output=out,
import_modules=False)
out = out.getvalue()
assert_('numpy.linalg.eig' in out)
@deprecate
def old_func(self, x):
return x
@deprecate(message="Rather use new_func2")
def old_func2(self, x):
return x
def old_func3(self, x):
return x
new_func3 = depre | cate(old_func3, old_name="old_func3", new_name="new_func3")
def test_deprecate_decorator():
assert_('deprecated' in old_func.__doc__)
def test_deprecate_decorator_message():
assert_('Rather use new_func2' in old_func2.__doc__)
def test_deprecate_fn():
assert_('old_func3' in new_func3.__doc__)
assert_('new_func3' in new_func3.__doc__)
def test_safe_eval_nameconstant():
# Test if safe_eval supports Python 3.4 _ast.NameConstant
utils.safe_eval('None')
if __name__ == "__main__":
run_module_suite()
|
luogangyi/Ceilometer-oVirt | build/lib/ceilometer/tests/mytest.py | Python | apache-2.0 | 307 | 0.013029 | #!/usr/bin/python
# PBR Generated from u'console_scripts'
import sys
|
#from ceilometer.cmd.agent_compute import main
#from ceilometer.cmd.agent_notification import main
#from ceilometer.cmd.coll | ector import main
from ceilometer.cmd.agent_central import main
if __name__ == "__main__":
sys.exit(main()) |
dimagi/django-digest | django_digest/test/methods/basic.py | Python | bsd-3-clause | 744 | 0.001344 | from __future__ import absolute_import
from __future__ import unicode_literals
from base64 import b64encode
from django_digest.test.methods imp | ort WWWAuthenticateError, BaseAuth
class BasicAuth(BaseAuth):
def authorization(self, request, response):
if response is not None:
challenges = self._authenticate_headers(response)
if 'Basic' not in challenges:
raise WWWAuthenticateError(
'Basic authentication unsupported for %s to %r.' %
(response.req | uest['REQUEST_METHOD'],
response.request['PATH_INFO'])
)
return 'Basic %s' % b64encode((self.username + ':' + self.password).encode('utf-8')).decode('utf-8')
|
onepesu/django_transmission | settings/common.py | Python | mit | 93 | 0 | from set | tings.django import *
from settings.constants import *
from se | ttings.emails import *
|
indashnet/InDashNet.Open.UN2000 | android/external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/lint_test_expectations_unittest.py | Python | apache-2.0 | 6,145 | 0.001627 | # Copyright (C) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import optparse
import StringIO
import webkitpy.thirdparty.unittest2 as unittest
from webkitpy.common.host_mock import MockHost
from webkitpy.layout_tests import lint_test_expectations
class FakePort(object):
def __init__(self, host, name, path):
self.host = host
self.name = name
self.path = path
def test_configuration(self):
return None
def expectations_dict(self):
self.host.ports_parsed.append(self.name)
return {self.path: ''}
def bot_expectations(self):
return {}
def skipped_layout_tests(self, _):
return set([])
def all_test_configurations(self):
return []
def configuration_specifier_macros(self):
return []
def get_option(self, _, val):
return val
def path_to_generic_test_expectations_file(self):
return ''
class FakeFactory(object):
def __init__(self, host, ports):
self.host = host
self.ports = {}
for port in ports:
self.ports[port.name] = port
def get(self, port_name, *args, **kwargs): # pylint: disable=W0613,E0202
return self.ports[port_name]
def all_port_names(self, platform=None): # pylint: disable=W0613,E0202
return sorted(self.ports.keys())
class LintTest(unittest.TestCase):
def test_all_configurations(self):
host = MockHost()
host.ports_parsed = []
host.port_factory = FakeFactory(host, (FakePort(host, 'a', 'path-to-a'),
FakePort(host, 'b', 'path-to-b'),
FakePort(host, 'b-win', 'path-to-b')))
logging_stream = StringIO.StringIO()
options = optparse.Values({'platform': None})
res = lint_test_expectations.lint(host, options, logging_stream)
self.assertEqual(res, 0)
self.assertEqual(host.ports_parsed, ['a', 'b', 'b-win'])
def test_lint_test_files(self):
logging_stream = StringIO.StringIO()
options = optparse.Values({'platform': 'test-mac-leopard'})
host = MockHost()
# pylint appears to complain incorrectly about the method overrides pylint: disable=E0202,C0322
# FIXME: incorrect complaints about spacing pylint: disable=C0322
host.port_factory.all_port_names = lambda platform=None: [platform]
res = lint_test_expectations.lint(host, options, logging_stream)
self.assertEqual(res, 0)
self.assertIn('Lint succeeded', logging_stream.getvalue())
def test_lint_test_files__errors(self):
options = optparse.Values({'platform': 'test | ', 'debug_rwt_logging': False})
host = MockHost()
# FIXME: incorrect complaints about spacing pylint: disable=C0322
port = host.port_factory.get(options.platform, options=options)
port.expectations_dict = lambda: {'foo': '-- syntax error1', 'bar': '-- syntax error2'}
host.port_factory.get = lambda platform, options=None: port
host.port_factory.all_port_names = lambda platform=None: [port.name()]
logging_stream = StringIO.String | IO()
res = lint_test_expectations.lint(host, options, logging_stream)
self.assertEqual(res, -1)
self.assertIn('Lint failed', logging_stream.getvalue())
self.assertIn('foo:1', logging_stream.getvalue())
self.assertIn('bar:1', logging_stream.getvalue())
class MainTest(unittest.TestCase):
def test_success(self):
orig_lint_fn = lint_test_expectations.lint
# unused args pylint: disable=W0613
def interrupting_lint(host, options, logging_stream):
raise KeyboardInterrupt
def successful_lint(host, options, logging_stream):
return 0
def exception_raising_lint(host, options, logging_stream):
assert False
stdout = StringIO.StringIO()
stderr = StringIO.StringIO()
try:
lint_test_expectations.lint = interrupting_lint
res = lint_test_expectations.main([], stdout, stderr)
self.assertEqual(res, lint_test_expectations.INTERRUPTED_EXIT_STATUS)
lint_test_expectations.lint = successful_lint
res = lint_test_expectations.main(['--platform', 'test'], stdout, stderr)
self.assertEqual(res, 0)
lint_test_expectations.lint = exception_raising_lint
res = lint_test_expectations.main([], stdout, stderr)
self.assertEqual(res, lint_test_expectations.EXCEPTIONAL_EXIT_STATUS)
finally:
lint_test_expectations.lint = orig_lint_fn
|
popoffka/ponydraw | server/storage/FileStorage.py | Python | mit | 1,715 | 0.030338 | # -*- coding: utf-8 -*-
# © 2012 Aleksejs Popovs <me@popoffka.ru>
# Licensed under MIT License. See ../LICENSE for more info.
from BaseStorage import BaseStorage
try:
import cPickle as pickle
except ImportError:
import pickle
import os
class FileStorage(BaseStorage):
def __init__(self):
self.path = None
self.isOpen = False
BaseStorage.__init__(self)
def open(self, path):
if self.isOpen:
raise Exception('I\'m already open')
self.path = path
try:
os.listdir(self.path)
except:
try:
os.makedirs(self.path)
except:
raise Exception('Couldn\'t access/create specified directory')
if '.lock' in os.listdir(self.path):
raise Exception('Directory seems to be locked')
else:
try:
lock = open(self.path + os.sep + '.lock', 'w')
lock.write('ponydraw')
lock.close()
except IOError:
raise Exception('No write access to the directory')
self.isOpen = True
def close(self):
if not self.isOpen:
raise Exception('FileStorage is not open')
os.remove(self.path + os.sep + '.lock')
self.isOpen = False
def roomInStorage(self, roomName):
if not self.isOpen:
raise Exception('FileStorage is not open')
return (roomName + os.extsep + 'pdd') in os.listdir(self.path)
|
def saveRoom(self, room):
if not self.isOpen:
raise Exception('File | Storage is not open')
f = open(self.path + os.sep + room.name + os.extsep + 'pdd', 'wb')
pickle.dump(room, f, -1)
def getRoom(self, roomName):
if not self.isOpen:
raise Exception('FileStorage is not open')
if not self.roomInStorage(roomName):
raise Exception('No such room in storage')
f = open(self.path + os.sep + roomName + os.extsep + 'pdd', 'rb')
return pickle.load(f)
|
vcavallo/letsencrypt | letsencrypt/tests/achallenges_test.py | Python | apache-2.0 | 1,096 | 0 | """Tests for letsencrypt.achallenges."""
import unittest
import mock
from acme import challenges
from acme import jose
from letsencrypt.tests import acme_util
from letsencrypt.tests import test_util
class DVSNITest(unittest.TestCase):
"""Tests for letsencrypt.achallenges.DVSNI."""
def setUp(self):
self.challb = acme_util.chall_to_challb(acme_util.DVSNI, "pending")
account = mock.Mock(key=jose.JWKRSA.load(
test_util.load_vector("rsa512_key.pem")))
from letsencrypt.achallenges import DVSNI
self.achall = DVSNI(
challb=self.challb, domain="example.com", account=account)
def test_proxy(self):
self.assertEqual(self.challb.token, self. | achall.token)
def test_gen_cert_and_response(self):
response, cert_pem, key_pem = self.achall | .gen_cert_and_response()
self.assertTrue(isinstance(response, challenges.DVSNIResponse))
self.assertTrue(isinstance(cert_pem, bytes))
self.assertTrue(isinstance(key_pem, bytes))
if __name__ == "__main__":
unittest.main() # pragma: no cover
|
NendoTaka/CodeForReference | CodeWars/8kyu/planetName.py | Python | mit | 231 | 0.008658 | def get_planet_name(id):
switch = {
| 1: "Mercury",
2: "Venus",
3: "Earth",
4: "Mars",
5: "Jupiter",
| 6: "Saturn",
7: "Uranus" ,
8: "Neptune"}
return switch[id]
|
vicyangworld/AutoOfficer | CmdFormat.py | Python | mit | 2,619 | 0.0126 | import ctypes
import os
STD_INPUT_HANDLE = -10
STD_OUTPUT_HANDLE= -11
STD_ERROR_HANDLE = -12
FOREGROUND_BLACK = 0x0
FOREGROUND_BLUE = 0x01 # text color contains blue.
FOREGROUND_GREEN= 0x02 # text color contains green.
FOREGROUND_RED = 0x04 # text color contains red.
FOREGROUND_INTENSITY = 0x08 # text color is intensified.
BACKGROUND_BLUE = 0x10 # background color contains blue.
BACKGROUND_GREEN= 0x20 # background color contains green.
BACKGROUND_RED = 0x40 # background color contains red.
BACKGROUND_INTENSITY = 0x80 # background color is intensified.
class CmdFormat(object):
"""docstring for CmdFormat"""
std_out_handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
def __init__(self, WinTitle="Console Window",\
color=FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY,\
):
super( CmdFormat, self).__init__()
self.WinTitle = WinTitle
os.system("title " + WinTitle)
def set_cmd_color(self, color, handle=std_out_handle):
bool = ctypes.windll.kernel32.SetConsoleTextAttribute(handle, color)
return bool
def reset_color(self):
self.set_cmd_color(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY)
def print_white_text(self,print_text,end='\n'):
self.reset_color()
print(print_text,end=end)
def print_red_text(self, print_text,end='\n'):
self.set_cmd_color(4 | 8)
print(print_text,end=end)
self.reset_color()
def print_green_input_text(self, print_text):
self.set_cmd_color(FOREGROUND_GREEN | FOREGROUND_INTENSITY)
c = input(print_text)
self.reset_color()
return c
def print_green_text(self, print_text,end='\n'):
self.set_cmd_color(FOREGROUND_GREEN | FOREGROUND_INTENSITY)
print(print_text,end=end)
self.reset_color()
def print_yellow_text(self, print_text, | end='\n'):
self.set_cmd_color(6 | 8)
print(print_text,end=end)
self.reset_color()
def print_blue_text(self, print_text,end='\n'):
self.set_cmd_color(1 | 10)
print(print_text,end=end)
self.reset_color()
if __name__ == '__main__':
clr = CmdFormat("Window Title")
| clr.set_cmd_color(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY)
clr.print_red_text('red')
clr.print_green_text("green")
clr.print_green_input_text("input: ")
clr.print_blue_text('blue')
clr.print_yellow_text('yellow')
input() |
overfl0/Bulletproof-Arma-Launcher | tests/utils/event_bridge_test.py | Python | gpl-3.0 | 1,873 | 0.004805 | # Bulletproof Arma Launcher
# Copyright (C) 2016 Sascha Ebert
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import unittest
import time
import os
import shutil
import sys
import json
from multiprocessing import Pipe
from datetime import datetime
from datetime import timedelta
from mock import patch, Mock
from kivy.clock import Clock
from nose.plugins.attrib import attr
from utils.process import Process
def worker_func(con):
con.send('test1')
con.send('test2')
class EventBridgeTest(unittest.TestCase):
def setUp(self):
# To fix the Windows forking system it's necessary to point __main__ to
# the module we want to execute in the forked process
self.old_main = sys.modules["__main__"]
self.old_main_file = sys.modules["__main__"].__file__
sys.modules["__main__"] = sys.modules["tests.utils.event_bridge_test"]
sys.modules["__main__"].__file__ = sys.modules["tests.utils.event_bridge_test"].__file__
| def tearDown(self):
sys.modules["__main__"] = self.old_main
sys.modules[" | __main__"].__file__ = self.old_main_file
def test_connection_can_hold_more_than_one_msg(self):
parent_conn, child_conn = Pipe()
p = Process(target=worker_func, args=(child_conn,))
p.start()
# time.sleep(2)
self.assertEqual(parent_conn.recv(), 'test1')
self.assertEqual(parent_conn.recv(), 'test2')
p.join()
|
DeDop/dedop | tests/util/__init__.py | Python | gpl-3.0 | 38 | 0 | __author__ = ' | DeDop Development Team | '
|
sunlightlabs/tcamp | tcamp/sked/utils.py | Python | bsd-3-clause | 218 | 0.013761 | _cur | rent_event = None
def get_current_event():
global _current_event
if not _current_event:
from sked.mod | els import Event
_current_event = Event.objects.current()
return _current_event |
jeromecc/doctoctocbot | src/conversation/migrations/0029_tweetdj_retweeted_by.py | Python | mpl-2.0 | 478 | 0.002092 | # Generated by Django 2.2.10 on 2020-03-11 20:13
from django.db import migrations, models
class Migration(migrations.Migration):
depend | encies = [
('moderation', '0039_auto_20200226_0322'),
('conversation', '0028_retweeted_retweet'),
]
operations = [
migrations.AddField(
model_name='tweetdj',
name='retweeted_by',
field=models.ManyToManyField(blank=True, to='moderation.SocialUser'),
),
| ]
|
Tao4free/QGIS_plugins | SuperLabeling3/ui.py | Python | gpl-3.0 | 4,461 | 0.001793 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'SuperLabeling.ui'
#
# Created by: PyQt5 UI code generator 5.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(477, 195)
self.gridLayout = QtWidgets.QGridLayout(Dialog)
self.g | ridLayout.setObjectName("gridLayout")
self.lbLayer = QtWidgets.QLab | el(Dialog)
self.lbLayer.setObjectName("lbLayer")
self.gridLayout.addWidget(self.lbLayer, 0, 0, 1, 1)
self.txLayer = QtWidgets.QLineEdit(Dialog)
self.txLayer.setObjectName("txLayer")
self.gridLayout.addWidget(self.txLayer, 0, 1, 1, 5)
self.lbStep1 = QtWidgets.QLabel(Dialog)
self.lbStep1.setObjectName("lbStep1")
self.gridLayout.addWidget(self.lbStep1, 1, 0, 1, 6)
self.cbField = QtWidgets.QComboBox(Dialog)
self.cbField.setObjectName("cbField")
self.gridLayout.addWidget(self.cbField, 2, 0, 1, 2)
self.lbStep2 = QtWidgets.QLabel(Dialog)
self.lbStep2.setObjectName("lbStep2")
self.gridLayout.addWidget(self.lbStep2, 4, 0, 2, 3)
self.txX = QtWidgets.QLineEdit(Dialog)
self.txX.setObjectName("txX")
self.gridLayout.addWidget(self.txX, 4, 5, 1, 1)
self.lbPoint = QtWidgets.QLabel(Dialog)
self.lbPoint.setObjectName("lbPoint")
self.gridLayout.addWidget(self.lbPoint, 2, 5, 1, 1)
self.txY = QtWidgets.QLineEdit(Dialog)
self.txY.setObjectName("txY")
self.gridLayout.addWidget(self.txY, 5, 5, 1, 1)
self.lbX = QtWidgets.QLabel(Dialog)
self.lbX.setFrameShape(QtWidgets.QFrame.NoFrame)
self.lbX.setObjectName("lbX")
self.gridLayout.addWidget(self.lbX, 4, 4, 1, 1)
self.lbY = QtWidgets.QLabel(Dialog)
self.lbY.setObjectName("lbY")
self.gridLayout.addWidget(self.lbY, 5, 4, 1, 1)
self.pbnEdit = QtWidgets.QPushButton(Dialog)
self.pbnEdit.setObjectName("pbnEdit")
self.gridLayout.addWidget(self.pbnEdit, 2, 2, 1, 1)
self.lbStep2_2 = QtWidgets.QLabel(Dialog)
self.lbStep2_2.setObjectName("lbStep2_2")
self.gridLayout.addWidget(self.lbStep2_2, 8, 0, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setLayoutDirection(QtCore.Qt.LeftToRight)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 8, 4, 1, 2)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "SuperLabeling"))
self.lbLayer.setText(_translate("Dialog", "<html><head/><body><p>Current layer</p></body></html>"))
self.lbStep1.setText(_translate("Dialog", "<html><head/><body><p><span style=\" font-size:10pt;\">1. Choose the field you want to Label, click "Start Edition" </span></p></body></html>"))
self.lbStep2.setText(_translate("Dialog", "<html><head/><body><p align=\"center\"><span style=\" font-size:10pt;\">2. Left Click on canvas where you want to put </span></p><p align=\"center\"><span style=\" font-size:10pt;\">the Label (make click near the feature)</span></p></body></html>"))
self.lbPoint.setText(_translate("Dialog", "<html><head/><body><p align=\"center\"><span style=\" font-size:10pt;\">The point you click</span></p></body></html>"))
self.lbX.setText(_translate("Dialog", "<html><head/><body><p align=\"center\"><span style=\" font-size:10pt;\">X</span></p></body></html>"))
self.lbY.setText(_translate("Dialog", "<html><head/><body><p align=\"center\">Y</p></body></html>"))
self.pbnEdit.setText(_translate("Dialog", "Start Edition"))
self.lbStep2_2.setText(_translate("Dialog", "<html><head/><body><p align=\"center\"><span style=\" font-size:10pt;\">3. Left double click to hide label</span></p></body></html>"))
|
bhanu-mnit/EvoML | evoml/subsampling/test_auto_segmentEG_FEMPO.py | Python | gpl-3.0 | 1,357 | 0.022108 | import pandas as pd
from sklearn.datasets import load_boston
from sklearn.linear_model import LinearRegression
from sklearn.tree import DecisionTreeRegressor
from sklearn.cross_validation import train_test_split
from sklearn.metrics import mean_squared_error
from .auto_segment_FEMPO import BasicSegmenter_FEMPO
def demo(X = None, y = None, test_size = 0.1):
if X == None:
boston = load_boston()
X = pd.DataFrame(boston.data)
y = pd.DataFrame(boston.target)
base_estimator = DecisionTreeRegressor(max_depth = 5)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_size)
print X_train.shape
# If you want to compare with BaggingRegressor.
# bench = BaggingRegressor(base_estimator = base_estimator, n_estimators = 10, max_samples = 1, oob_score = True).fit(X_train, y_train)
# print bench.score(X_test, y_test)
# print mean_squared_error(bench.predict(X_test), y_test)
clf = BasicSegmenterEG_FEMPO(ngen=30,init_sample_percentage = 1, n_votes=10, n = 10, base_estimator = base_estimator,
unseen_x = X_t | est, unseen_y = y_test)
clf.fit(X_tra | in, y_train)
print clf.score(X_test,y_test)
y = clf.predict(X_test)
print mean_squared_error(y, y_test)
print y.shape
return clf, X_test, y_test
|
quozl/sugar | src/jarabe/model/shell.py | Python | gpl-3.0 | 28,028 | 0 | # Copyright (C) 2006-2007 Owen Williams.
# Copyright (C) 2006-2008 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import time
from gi.repository import Gio
from gi.repository import Wnck
from gi.repository import GObject
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GdkX11
from gi.repository import GLib
import dbus
from sugar3 import dispatch
from sugar3 import profile
from gi.repository import SugarExt
from jarabe.model.bundleregistry import get_registry
_SERVICE_NAME = 'org.laptop.Activity'
_SERVICE_PATH = '/org/laptop/Activity'
_SERVICE_INTERFACE = 'org.laptop.Activity'
_model = None
class Activity(GObject.GObject):
"""Activity which appears in the "Home View" of the Sugar shell
This class stores the Sugar Shell's metadata regarding a
given activity/application in the system. It interacts with
the sugar3.activity.* modules extensively in order to
accomplish its tasks.
"""
__gtype_name__ = 'SugarHomeActivity'
__gsignals__ = {
'pause': (GObject.SignalFlags.RUN_FIRST, None, ([])),
'resume': (GObject.SignalFlags.RUN_FIRST, None, ([])),
'stop': (GObject.SignalFlags.RUN_LAST, GObject.TYPE_BOOLEAN, ([])),
}
LAUNCHING = 0
LAUNCH_FAILED = 1
LAUNCHED = 2
def __init__(self, activity_info, activity_id, color, window=None):
"""Initialise the HomeActivity
activity_info -- sugar3.activity.registry.ActivityInfo instance,
provides the information required to actually
create the new instance. This is, in effect,
the "type" of activity being created.
activity_id -- unique identifier for this instance
of the activity type
_windows -- WnckWindows registered for the activity. The lowest
one in the stack is the main window.
"""
GObject.GObject.__init__(self)
self._windows = []
self._service = None
self._shell_windows = []
self._activity_id = activity_id
self._activity_info = activity_info
self._launch_time = time.time()
self._launch_status = Activity.LAUNCHING
if color is not None:
self._color = color
else:
self._color = profile.get_color()
if window is not None:
self.add_window(window)
self._retrieve_service()
self._name_owner_changed_handler = None
if not self._service:
bus = dbus.SessionBus()
self._name_owner_changed_handler = bus.add_signal_receiver(
self._name_owner_changed_cb,
signal_name='NameOwnerChanged',
dbus_interface='org.freedesktop.DBus')
self._launch_completed_hid = \
get_model().connect('launch-completed',
self.__launch_completed_cb)
self._launch_failed_hid = get_model().connect('launch-failed',
self.__launch_failed_cb)
def get_launch_status(self):
return self._launch_status
launch_status = GObject.Property(getter=get_launch_status)
def add_window(self, window, is_main_window=False):
"""Add a window to the windows stack."""
if not window:
raise ValueError('window must be valid')
self._windows.append(window)
if is_main_window:
window.connect('state-changed', self._state_changed_cb)
def push_shell_window(self, window):
"""Attach a shell run window (eg. view source) to the activity."""
self._shell_windows.append(window)
def pop_shell_window(self, window):
"""
Detach a shell run window (eg. view source) to the activity.
Only call this on **user initiated** deletion (loop issue).
"""
self._shell_windows.remove(window)
def has_shell_window(self):
return bool(self._shell_windows)
def stop(self):
# For web activities the Apisocket will connect to the 'stop'
# signal, thus preventing the window close. Then, on the
# 'activity.close' method, it will call close_window()
# directly.
close_window = not self.emit('stop')
if close_w | indow:
self.close_window()
def close_window(self):
if self.get_window() is not None:
self.get_window().close(GLib.get_current_time())
for w in self._shell_windows:
| w.destroy()
def remove_window_by_xid(self, xid):
"""Remove a window from the windows stack."""
for wnd in self._windows:
if wnd.get_xid() == xid:
self._windows.remove(wnd)
return True
return False
def get_service(self):
"""Get the activity service
Note that non-native Sugar applications will not have
such a service, so the return value will be None in
those cases.
"""
return self._service
def get_title(self):
"""Retrieve the application's root window's suggested title"""
if self._windows:
return self._windows[0].get_name()
else:
return None
def get_icon_path(self):
"""Retrieve the activity's icon (file) name"""
if self.is_journal():
icon_theme = Gtk.IconTheme.get_default()
info = icon_theme.lookup_icon('activity-journal',
Gtk.IconSize.SMALL_TOOLBAR, 0)
if not info:
return None
fname = info.get_filename()
del info
return fname
elif self._activity_info:
return self._activity_info.get_icon()
else:
return None
def get_icon_color(self):
"""Retrieve the appropriate icon colour for this activity
Uses activity_id to index into the PresenceService's
set of activity colours, if the PresenceService does not
have an entry (implying that this is not a Sugar-shared application)
uses the local user's profile colour for the icon.
"""
return self._color
def get_activity_id(self):
"""Retrieve the "activity_id" passed in to our constructor
This is a "globally likely unique" identifier generated by
sugar3.util.unique_id
"""
return self._activity_id
def get_bundle_id(self):
""" Returns the activity's bundle id"""
if self._activity_info is None:
return None
else:
return self._activity_info.get_bundle_id()
def get_xid(self):
"""Retrieve the X-windows ID of our root window"""
if self._windows:
return self._windows[0].get_xid()
else:
return None
def has_xid(self, xid):
"""Check if an X-window with the given xid is in the windows stack"""
if self._windows:
for wnd in self._windows:
if wnd.get_xid() == xid:
return True
return False
def get_window(self):
"""Retrieve the X-windows root window of this application
This was stored by the add_window method, which was
called by HomeModel._add_activity, which was called
via a callback that looks for all 'window-opened'
events.
We keep a stack of the windows. The lowest window in the
stack that is still valid we consider the main one.
HomeModel currently uses a dbus service query |
DBuildService/atomic-reactor | atomic_reactor/plugins/pre_download_remote_source.py | Python | bsd-3-clause | 5,554 | 0.001981 | """
Copyright (c) 2019 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
Downloads and unpacks the source code archive from Cachito and sets appropriate build args.
"""
import base64
import os
import tarfile
from shlex import quote
from atomic_reactor.constants import REMOTE_SOURCE_DIR, CACHITO_ENV_FILENAME
from atomic_reactor.download import download_url
from atomic_reactor.plugin import PreBuildPlugin
from atomic_reactor.plugins.pre_reactor_config import get_cachito
from atomic_reactor.util import get_retrying_requests_session
from atomic_reactor.utils.cachito import CFG_TYPE_B64
class DownloadRemoteSourcePlugin(PreBuildPlugin):
key = 'download_remote_source'
is_allowed_to_fail = False
REMOTE_SOURCE = 'unpacked_remote_sources'
def __init__(self, tasker, workflow, remote_source_url=None,
remote_source_build_args=None,
remote_source_configs=None):
"""
:param tasker: ContainerTasker instance
:param workflow: DockerBuildWorkflow instance
:param remote_source_url: URL to download source archive from
:param remote_source_build_args: dict of container build args
to be used when building the image
:param remote_source_configs: URL to fetch a list with configuration files data to be
injected in the exploded remote sources dir
"""
super(DownloadRemoteSourcePlugin, self).__init__(tasker, workflow)
self.url = remote_source_url
self.buildargs = remote_source_build_args or {}
self.remote_source_conf_url = remote_source_configs
def get_remote_source_config(self, session, url, insecure=False):
"""Get the configuration files associated with the remote sources
:param session: the requests HTTP session object.
:param url: str, URL to cachito remote source configurations
:param insecure: bool, whether to verify SSL certificates
:return: list[dict], configuration data for the given request.
Entries include path, type, and content.
"""
self.log.info('Checking for additional configurations at %s', url)
response = session.get(url, verify=not insecure)
response.raise_for_status()
return response.json()
def generate_cachito_env_file(self):
"""
Generate cachito.env file with exported environment variables received from
cachito request.
"""
self.log.info('Creating %s file with environment variables '
'received from cachito request', CACHITO_ENV_FILENAME)
# Use dedicated dir in container build workdir for cachito.env
abs_path = os.path.join(self.workflow.builder.df_dir,
self.REMOTE_SOURCE, CACHITO_ENV_FILENAME)
with open(abs_path, 'w') as f:
f.write('#!/bin/bash\n')
for env_var, value in self.buildargs.items():
f.write('export {}={}\n'.format(env_var, quote(value)))
def run(self):
"""
Run the plugin.
"""
if not self.url:
self.log.info('No remote source url to download, skipping plugin')
return
session = get_retrying_requests_session()
# Download the source code archive
cachito_config = get_cachito(self.workflow)
insecure_ssl_conn = cachito_config.get('insecure', False)
archive = download_url(
self.url, self.workflow.source.workdir, session=session, insecure=insecure_ssl_conn
)
# Unpack the source code archive into a dedicated dir in container build workdir
dest_dir = os | .path.join(self.workflow.builder.df_dir, self.REMOTE_SOURCE)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
else:
raise RuntimeError('Conflicting path {} al | ready exists in the dist-git repository'
.format(self.REMOTE_SOURCE))
with tarfile.open(archive) as tf:
tf.extractall(dest_dir)
config_files = (
self.get_remote_source_config(session, self.remote_source_conf_url, insecure_ssl_conn)
if self.remote_source_conf_url else []
)
# Inject cachito provided configuration files
for config in config_files:
config_path = os.path.join(dest_dir, config['path'])
if config['type'] == CFG_TYPE_B64:
data = base64.b64decode(config['content'])
with open(config_path, 'wb') as f:
f.write(data)
else:
err_msg = "Unknown cachito configuration file data type '{}'".format(config['type'])
raise ValueError(err_msg)
os.chmod(config_path, 0o444)
# Set build args
self.workflow.builder.buildargs.update(self.buildargs)
# Create cachito.env file with environment variables received from cachito request
self.generate_cachito_env_file()
# To copy the sources into the build image, Dockerfile should contain
# COPY $REMOTE_SOURCE $REMOTE_SOURCE_DIR
args_for_dockerfile_to_add = {
'REMOTE_SOURCE': self.REMOTE_SOURCE,
'REMOTE_SOURCE_DIR': REMOTE_SOURCE_DIR,
}
self.workflow.builder.buildargs.update(args_for_dockerfile_to_add)
return archive
|
dajohnso/cfme_tests | cfme/tests/cloud_infra_common/test_tag_visibility.py | Python | gpl-2.0 | 2,733 | 0.002561 | import fauxfactory
import pyte | st
import cfme.configure.access_control as ac
from cfme import test | _requirements
from cfme.base.credential import Credential
from cfme.common.vm import VM
from cfme.configure.configuration import Tag, Category
from utils import testgen
def pytest_generate_tests(metafunc):
argnames, argvalues, idlist = testgen.all_providers(metafunc, required_fields=['ownership_vm'])
testgen.parametrize(metafunc, argnames, argvalues, ids=idlist, scope="module")
pytestmark = [test_requirements.tag]
@pytest.yield_fixture(scope="module")
def new_category():
category = Category(name="tag_vis_" + fauxfactory.gen_alpha(8).lower(),
description="tag_vis_" + fauxfactory.gen_alphanumeric(),
display_name="tag_vis_" + fauxfactory.gen_alphanumeric())
category.create()
yield category
category.delete(cancel=False)
@pytest.yield_fixture(scope="module")
def new_tag(new_category):
category = Category(name=new_category.name, display_name=new_category.display_name)
tag = Tag(name="tag_vis_" + fauxfactory.gen_alphanumeric().lower(),
display_name="tag_vis_" + fauxfactory.gen_alphanumeric().lower(),
category=category)
tag.create()
yield tag
tag.delete(cancel=False)
@pytest.yield_fixture(scope="module")
def new_role():
role = ac.Role(name='tag_vis_role_' + fauxfactory.gen_alphanumeric())
role.create()
yield role
role.delete()
@pytest.yield_fixture(scope="module")
def new_group(new_tag, new_role):
group = ac.Group(description='tag_vis_group_' + fauxfactory.gen_alphanumeric(),
role=new_role.name)
group.create()
group.edit_tags(new_tag.category.display_name + " *", new_tag.display_name)
yield group
group.delete()
def new_credential():
return Credential(principal='uid' + fauxfactory.gen_alphanumeric(), secret='redhat')
@pytest.yield_fixture(scope="module")
def new_user(new_group):
user = ac.User(name='user_' + fauxfactory.gen_alphanumeric(),
credential=new_credential(),
email='abc@redhat.com',
group=new_group)
user.create()
yield user
user.appliance.server.login_admin()
user.delete()
@pytest.yield_fixture(scope="module")
def tagged_vm(new_tag, setup_provider_modscope, provider):
ownership_vm = provider.data['ownership_vm']
tag_vm = VM.factory(ownership_vm, provider)
tag_vm.add_tag(new_tag)
yield tag_vm
tag_vm.appliance.server.login_admin()
tag_vm.remove_tag(new_tag)
@pytest.mark.tier(3)
def test_tag_vis_vm(request, tagged_vm, new_user):
with new_user:
assert tagged_vm.exists, "vm not found"
|
diego-d5000/MisValesMd | env/lib/python2.7/site-packages/django/contrib/auth/hashers.py | Python | mit | 17,840 | 0.000673 | from __future__ import unicode_literals
import base64
import binascii
import hashlib
import importlib
from collections import OrderedDict
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.dispatch import receiver
from django.utils import lru_cache
from django.utils.crypto import (
constant_time_compare, get_random_string, pbkdf2,
)
from django.utils.encoding import force_bytes, force_str, force_text
from django.utils.module_loading import import_string
from django.utils.translation import ugettext_noop as _
UNUSABLE_PASSWORD_PREFIX = '!' # This will never be a valid encoded hash
UNUSABLE_PASSWORD_SUFFIX_LENGTH = 40 # number of random chars to add after UNUSABLE_PASSWORD_PREFIX
def is_password_usable(encoded):
if encoded is None or encoded.startswith(UNUSABLE_PASSWORD_PREFIX):
return False
try:
identify_hasher(encoded)
except ValueError:
return False
return True
def check_password(password, encoded, setter=None, preferred='default'):
"""
Returns a boolean of whether the raw password matches the three
part encoded digest.
If setter is specified, it'll be called when you need to
regenerate the password.
"""
if password is None or not is_password_usable(encoded):
return False
preferred = get_hasher(preferred)
hasher = identify_hasher(encoded)
must_update = hasher.algorithm != preferred.algorithm
if not must_update:
must_update = preferred.must_update(encoded)
is_correct = hasher.verify(password, encoded)
if setter and is_correct and must_update:
setter(password)
return is_correct
def make_password(password, salt=None, hasher='default'):
"""
Turn a plain-text password into a hash for database storage
Same as encode() but generates a new random salt.
If password is None then a concatenation of
UNUSABLE_PASSWORD_PREFIX and a random string will be returned
which disallows logins. Additional random string reduces chances
of gaining access to staff or superuser accounts.
See ticket #20079 for more info.
"""
if password is None:
return UNUSABLE_PASSWORD_PREFIX + get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH)
hasher = get_hasher(hasher)
if not salt:
salt = hasher.salt()
return hasher.encode(password, salt)
@lru_cache.lru_cache()
def get_hashers():
hashers = []
for hasher_path in settings.PASSWORD_HASHERS:
hasher_cls = import_string(hasher_path)
hasher = hasher_cls()
if not getattr(hasher, 'algorithm'):
raise ImproperlyConfigured("hasher doesn't specify an "
"algorithm name: %s" % hasher_path)
hashers.append(hasher)
return hashers
@lru_cache.lru_cache()
def get_hashers_by_algorithm():
return {hasher.algorithm: hasher for hasher in get_hashers()}
@receiver(setting_changed)
def reset_hashers(**kwargs):
if kwargs['setting'] == 'PASSWORD_HASHERS':
get_hashers.cache_clear()
get_hashers_by_algorithm.cache_clear()
def get_hasher(algorithm='default'):
"""
Returns an instance of a loaded password hasher.
If algorithm is 'default', the default hasher will be returned.
This function will also lazy import hashers specified in your
settings file if needed.
"""
if hasattr(algorithm, 'algorithm'):
return algorithm
elif algorithm == 'default':
return get_hashers()[0]
else:
hashers = get_hashers_by_algorithm()
try:
return hashers[algorithm]
except KeyError:
| raise ValueError("Unknown password hashing algorithm '%s'. "
"Did you specify it in the PASSWORD_HASHERS "
"setting?" % algorithm)
def identify_hasher(encoded):
"""
Returns an instance of a loaded password hasher.
Identifies hasher algorithm by examining encoded hash, and calls
get_hasher() to return hasher. Raises ValueError if
algorithm cannot be identified, or if hash | er is not loaded.
"""
# Ancient versions of Django created plain MD5 passwords and accepted
# MD5 passwords with an empty salt.
if ((len(encoded) == 32 and '$' not in encoded) or
(len(encoded) == 37 and encoded.startswith('md5$$'))):
algorithm = 'unsalted_md5'
# Ancient versions of Django accepted SHA1 passwords with an empty salt.
elif len(encoded) == 46 and encoded.startswith('sha1$$'):
algorithm = 'unsalted_sha1'
else:
algorithm = encoded.split('$', 1)[0]
return get_hasher(algorithm)
def mask_hash(hash, show=6, char="*"):
"""
Returns the given hash, with only the first ``show`` number shown. The
rest are masked with ``char`` for security reasons.
"""
masked = hash[:show]
masked += char * len(hash[show:])
return masked
class BasePasswordHasher(object):
"""
Abstract base class for password hashers
When creating your own hasher, you need to override algorithm,
verify(), encode() and safe_summary().
PasswordHasher objects are immutable.
"""
algorithm = None
library = None
def _load_library(self):
if self.library is not None:
if isinstance(self.library, (tuple, list)):
name, mod_path = self.library
else:
mod_path = self.library
try:
module = importlib.import_module(mod_path)
except ImportError as e:
raise ValueError("Couldn't load %r algorithm library: %s" %
(self.__class__.__name__, e))
return module
raise ValueError("Hasher %r doesn't specify a library attribute" %
self.__class__.__name__)
def salt(self):
"""
Generates a cryptographically secure nonce salt in ASCII
"""
return get_random_string()
def verify(self, password, encoded):
"""
Checks if the given password is correct
"""
raise NotImplementedError('subclasses of BasePasswordHasher must provide a verify() method')
def encode(self, password, salt):
"""
Creates an encoded database value
The result is normally formatted as "algorithm$salt$hash" and
must be fewer than 128 characters.
"""
raise NotImplementedError('subclasses of BasePasswordHasher must provide an encode() method')
def safe_summary(self, encoded):
"""
Returns a summary of safe values
The result is a dictionary and will be used where the password field
must be displayed to construct a safe representation of the password.
"""
raise NotImplementedError('subclasses of BasePasswordHasher must provide a safe_summary() method')
def must_update(self, encoded):
return False
class PBKDF2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the PBKDF2 algorithm (recommended)
Configured to use PBKDF2 + HMAC + SHA256 with 20000 iterations.
The result is a 64 byte binary string. Iterations may be changed
safely but you must rename the algorithm if you change SHA256.
"""
algorithm = "pbkdf2_sha256"
iterations = 20000
digest = hashlib.sha256
def encode(self, password, salt, iterations=None):
assert password is not None
assert salt and '$' not in salt
if not iterations:
iterations = self.iterations
hash = pbkdf2(password, salt, iterations, digest=self.digest)
hash = base64.b64encode(hash).decode('ascii').strip()
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
def verify(self, password, encoded):
algorithm, iterations, s |
JiaKang0615/Group-Projects | Unemployment vs. campus crime rate.py | Python | cc0-1.0 | 2,534 | 0.028019 | from pandas import DataFrame, Series
import pandas as pd
import numpy as np
import random as r
from scipy import stats
import scipy
import math
# Calculate crime_y12_y12 rate
un_rate=pd.read_csv('Zip_county.csv')
Unemp=pd.read_csv('crate_year.csv')
crime=pd.read_csv('Python_totalsheet.csv')
crime=crime[["City","INSTNM","ZIP","Total","nca_12","ncc_12","ncd_12","oca_12","occ_12","ocd_12","ppa_12","ppc_12","ppd_12","nca_11","ncc_11","ncd_11","oca_11","occ_11","ocd_11","ppa_11","ppc_11","ppd_11"]]
crime['Crime11']=crime["nca_11"]+crime["ncc_11"]+crime["ncd_11"]+crime["oca_11"]+crime["occ_11"]+crime["ocd_11"]+crime[" | ppa_11"]+crime["ppc_11"]+crime["ppd_11"]
crime['Rate11']=crime['Crime11']/crime["Total"]
#groupby institutions
sub_group1 = crime.iloc[:,0:4]
sub_grouped1 = sub_group1.groupby('INSTNM',as_index = False).mean()
# groupby to get num of crime
sub_group2 = crime.iloc[:,1:2]
sub_group2['Crime11'] = crime['Crime11']
sub_grouped2 = sub_group2.groupby('INSTNM',as_index = False).sum()
data = pd.merge(sub_grouped1,sub_grouped2, on='INSTNM')
data['CrimeRate'] = data['Crime11']/data['T | otal']
#match county to zip code
data['ZIP_county'] = data['ZIP'].astype(str).str[:5]
data['ZIP_county'] = data['ZIP_county'].str.replace('.','').astype(int)
#match county unemp data to campus
un_rate['ZIP_county']=un_rate['ZIP']
result1 = pd.merge(data,un_rate, how='inner', on ='ZIP_county')
result2 = pd.merge(result1, Unemp, how = 'inner', on = 'county')
result11=result2[["county","INSTNM","CrimeRate","Unemp_11","ZIP_county"]].dropna()
avg_crimerate = result11['CrimeRate'].mean()
avg_unem = result11['Unemp_11'].mean()
# calculate high or low crime rate and high or low unemployment
result11['CH'] = result11['CrimeRate'] > avg_crimerate
result11['CH'] = result11['CH'].astype(int)
result11['CL'] = result11['CrimeRate'] < avg_crimerate
result11['CL'] = result11['CL'].astype(int)
result11['UH'] = result11['Unemp_11'] > avg_unem
result11['UH'] = result11['UH'].astype(int)
result11['UL'] = result11['Unemp_11'] < avg_unem
result11['UL'] = result11['UL'].astype(int)
c=DataFrame(result11)
c=c.groupby(['UH','UL']).sum()
#calculating z score and p value
z_scores=[-4.76759,4.907776,4.76759,-4.907776]
z_scores=Series(z_scores)
p_values_h0 = scipy.stats.norm.sf(abs(z_scores[0]))*2
z_scores=DataFrame(z_scores)
z_scores.columns=['z score']
z_scores['p value']=z_scores.apply( lambda x: scipy.stats.norm.sf(abs(z_scores['z score']))*2)
print z_scores |
szredinger/graph-constr-group-testing | graph_constr_group_testing/core/base_types.py | Python | mit | 5,126 | 0.005267 | """
Contains base data structures for defining graph constrained group testing problem,
and interfaces to operate on them.
Basic structure to exchange graph constrained group testing problem definition is :class:`Problem`.
It consists of enumeration of faulty elements, graph of links between elements and natural language
description of the problem. Graph is described by :class:`ProblemGraph` which consists of
:class:`networkx.DiGraph`, and distinguished nodes stored in :attr:`ProblemGraph.source`,
and :attr:`ProblemGraph.sink`
Interface of every algorithm solving group constrained group testing problem is defined by
:class:`Solver`, Abstract class :class:`ExperimentStatistics` defines generic interface that can
be used by experiment runners to verify result returned by solver. Result later is stored together
with statistics (:class:`TestStatistics`) in memory, where it can be retrieved for each problem/solver pair.
Experiment runner is a function accepting :class:`Experiment` parameter that fills it during call.
"""
import collections
from abc import ABCMeta, abstractmethod
class ToDict(object):
__metaclass__ = ABCMeta
@abstractmethod
def toDict(self):
raise NotImplementedError()
class EmptyToDict(ToDict):
def toDict(self):
return {}
class Problem(collections.namedtuple("Problem", ["all_nodes", "faulty_set", "description"]), EmptyToDict):
pass
class GCGTProblem(collections.namedtuple("GCGTProblem", ["all_nodes", "faulty_set", "description", "problem_graph"]), EmptyToDict):
pass
ProblemGraph = collections.namedtuple("ProblemGraph", ["graph", "source", "sink"])
class ExperimentStatistics(object):
"""
Maintains statistics related with the experiment, | for each problem and solver statisti | cs object is gathered
"""
def __init__(self, rendererMapping):
self._renderers = rendererMapping or {}
for k, v in self._renderers.iteritems():
if v is None:
self._renderers[k] = lambda x: x.toDict()
self.results = []
self.headers = set({})
def set_result(self, objectsMapping):
result = {}
for k, v in objectsMapping.iteritems():
rendered = self._render(k, v)
self._add_headers(rendered)
result.update(rendered)
self.results.append(result)
def _add_headers(self, rendered):
for k in rendered:
self.headers.add(k)
def _render(self, rendererIdentifier, obj):
result = {}
if isinstance(obj, (int, float, str)):
defaultrenderer = lambda x: {'value': x}
else:
defaultrenderer = lambda obj: obj.toDict()
renderer = self._renderers.get(rendererIdentifier, defaultrenderer)
for k, v in renderer(obj).iteritems():
result[self._join(rendererIdentifier, k)] = v
return result
def _join(self, *args):
return ".".join(args)
def process(self):
raise NotImplementedError()
class TestStatistics(ToDict):
"""
Maintains various statistics related with the single run of group testing algorithm
"""
def __init__(self):
self.variable_dict = {}
def set_var(self, var, value):
self.variable_dict[var] = value
def get_var(self, var):
return self.variable_dict[var]
def inc_var(self, var):
self.variable_dict[var] = self.variable_dict.get(var, 0) + 1
def toDict(self):
return self.variable_dict
class Solver(ToDict):
SOLVER_TYPE_TAG = 'solver_type'
def __init__(self, problem_description, tester, *args, **kwargs):
raise NotImplementedError()
def solve(self):
"""
runs algorithm solving graph constrained group testing problem
:returns: set of nodes identified by algorithm as positive
:rtype: set
"""
raise NotImplementedError()
class SolverError(Exception):
pass
class GCGTSolver(Solver):
"""
Interface of classes implementing combinatorial group testing algorithm.
Problem description and tester object have to be inserted in constructor
"""
def __init__(self, problem_description, tester, *args, **kwargs):
"""
:param problem_description: graph constrained combinatorial problem description
:type problem_description: base_types.Problem
:param tester: tester object which will test all paths
:type tester: base_types.PathTester
"""
self.problem_description = problem_description
self.graph = self.problem_description.problem_graph.graph
self.source = self.problem_description.problem_graph.source
self.sink = self.problem_description.problem_graph.sink
self.tester = tester
class SetTester(object):
def test_paths(self, paths):
"""
check results for batch tests of paths
:param paths: paths that will be tested
:type paths: list[set]
:returns: list of boolean representing results for each of the `paths`.
:rtype: list[bool]
"""
raise NotImplementedError()
|
saeki-masaki/cinder | cinder/volume/drivers/netapp/eseries/library.py | Python | apache-2.0 | 46,616 | 0.000021 | # Copyright (c) 2015 Alex Meade
# Copyright (c) 2015 Rushil Chugh
# Copyright (c) 2015 Navneet Singh
# Copyright (c) 2015 Yogesh Kshirsagar
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import math
import socket
import time
import uuid
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
import six
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder.openstack.common import loopingcall
from cinder import utils as cinder_utils
from cinder.volume.drivers.netapp.eseries import client
from cinder.volume.drivers.netapp.eseries import exception as eseries_exc
from cinder.volume.drivers.netapp.eseries import host_mapper
from cinder.volume.drivers.netapp.eseries import utils
from cinder.volume.drivers.netapp import options as na_opts
from cinder.volume.drivers.netapp import utils as na_utils
from cinder.volume import utils as volume_utils
from cinder.zonemanager import utils as fczm_utils
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(na_opts.netapp_basicauth_opts)
CONF.register_opts(na_opts.netapp_connection_opts)
CONF.register_opts(na_opts.netapp_eseries_opts)
CONF.register_opts(na_opts.netapp_transport_opts)
CONF.register_opts(na_opts.netapp_san_opts)
class NetAppESeriesLibrary(object):
"""Executes commands relating to Volumes."""
VERSION = "1.0.0"
REQUIRED_FLAGS = ['netapp_server_hostname', 'netapp_controller_ips',
'netapp_login', 'netapp_password',
'netapp_storage_pools']
SLEEP_SECS = 5
HOST_TYPES = {'aix': 'AIX MPIO',
'avt': 'AVT_4M',
'factoryDefault': 'FactoryDefault',
'hpux': 'HP-UX TPGS',
'linux_atto': 'LnxTPGSALUA',
'linux_dm_mp': 'LnxALUA',
'linux_mpp_rdac': 'Linux',
'linux_pathmanager': 'LnxTPGSALUA_PM',
'macos': 'MacTPGSALUA',
'ontap': 'ONTAP',
'svc': 'SVC',
'solaris_v11': 'SolTPGSALUA',
| 'solaris_v10': 'Solaris',
'vmware': 'VmwTPGSALUA',
'windows':
'Windows 2000/Server 2003/Server 2008 Non-Clustered',
'windows_atto': 'WinTPGSALUA',
'windows_clustered':
'Windows 2000/Server 2003/Server 2008 Clustered'
}
# NOTE(ameade): This maps what is reported by the e-series api to a
# consistent set of values that are reported by all NetApp drivers
| # to the cinder scheduler.
SSC_DISK_TYPE_MAPPING = {
'scsi': 'SCSI',
'fibre': 'FCAL',
'sas': 'SAS',
'sata': 'SATA',
}
SSC_UPDATE_INTERVAL = 60 # seconds
WORLDWIDENAME = 'worldWideName'
DEFAULT_HOST_TYPE = 'linux_dm_mp'
def __init__(self, driver_name, driver_protocol="iSCSI",
configuration=None, **kwargs):
self.configuration = configuration
self.configuration.append_config_values(na_opts.netapp_basicauth_opts)
self.configuration.append_config_values(
na_opts.netapp_connection_opts)
self.configuration.append_config_values(na_opts.netapp_transport_opts)
self.configuration.append_config_values(na_opts.netapp_eseries_opts)
self.configuration.append_config_values(na_opts.netapp_san_opts)
self.lookup_service = fczm_utils.create_lookup_service()
self._backend_name = self.configuration.safe_get(
"volume_backend_name") or "NetApp_ESeries"
self.driver_name = driver_name
self.driver_protocol = driver_protocol
self._stats = {}
self._ssc_stats = {}
def do_setup(self, context):
"""Any initialization the volume driver does while starting."""
self.context = context
na_utils.check_flags(self.REQUIRED_FLAGS, self.configuration)
self._client = self._create_rest_client(self.configuration)
self._check_mode_get_or_register_storage_system()
if self.configuration.netapp_enable_multiattach:
self._ensure_multi_attach_host_group_exists()
def _create_rest_client(self, configuration):
port = configuration.netapp_server_port
scheme = configuration.netapp_transport_type.lower()
if port is None:
if scheme == 'http':
port = 8080
elif scheme == 'https':
port = 8443
return client.RestClient(
scheme=scheme,
host=configuration.netapp_server_hostname,
port=port,
service_path=configuration.netapp_webservice_path,
username=configuration.netapp_login,
password=configuration.netapp_password)
def _start_periodic_tasks(self):
ssc_periodic_task = loopingcall.FixedIntervalLoopingCall(
self._update_ssc_info)
ssc_periodic_task.start(interval=self.SSC_UPDATE_INTERVAL)
def check_for_setup_error(self):
self._check_host_type()
self._check_multipath()
self._check_storage_system()
self._start_periodic_tasks()
def _check_host_type(self):
host_type = (self.configuration.netapp_host_type
or self.DEFAULT_HOST_TYPE)
self.host_type = self.HOST_TYPES.get(host_type)
if not self.host_type:
raise exception.NetAppDriverException(
_('Configured host type is not supported.'))
def _check_multipath(self):
if not self.configuration.use_multipath_for_image_xfer:
LOG.warning(_LW('Production use of "%(backend)s" backend requires '
'the Cinder controller to have multipathing '
'properly set up and the configuration option '
'"%(mpflag)s" to be set to "True".'),
{'backend': self._backend_name,
'mpflag': 'use_multipath_for_image_xfer'})
def _ensure_multi_attach_host_group_exists(self):
try:
host_group = self._client.get_host_group_by_name(
utils.MULTI_ATTACH_HOST_GROUP_NAME)
LOG.info(_LI("The multi-attach E-Series host group '%(label)s' "
"already exists with clusterRef %(clusterRef)s"),
host_group)
except exception.NotFound:
host_group = self._client.create_host_group(
utils.MULTI_ATTACH_HOST_GROUP_NAME)
LOG.info(_LI("Created multi-attach E-Series host group %(label)s "
"with clusterRef %(clusterRef)s"), host_group)
def _check_mode_get_or_register_storage_system(self):
"""Does validity checks for storage system registry and health."""
def _resolve_host(host):
try:
ip = na_utils.resolve_hostname(host)
return ip
except socket.gaierror as e:
LOG.error(_LE('Error resolving host %(host)s. Error - %(e)s.'),
{'host': host, 'e': e})
raise exception.NoValidHost(
_("Controller IP '%(host)s' could not be resolved: %(e)s.")
% {'host': host, 'e': e})
ips = self.configuration.netapp_controller_ips
ips = [i.strip() for i in ips.split(",")]
ips = [x for x in ips if _resolve_host(x)]
host = na_utils.resolve_hostname(
self.configuration.netapp_server_hostname)
if host in ips:
|
grahamhayes/designate | designate/objects/adapters/api_v2/zone.py | Python | apache-2.0 | 2,504 | 0 | # Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from designate.objects.adapters.api_v2 import base
from designate import objects
LOG = logging.getLogger(__name__)
class ZoneAPIv2Adapter(base.APIv2Adapter):
ADAPTER_OBJECT = objects.Zone
MODIFICATIONS = {
'fields': {
"id": {},
"pool_id": {},
"project_id": {
'rename': 'tenant_id'
},
"name": {
'immutable': True,
},
"email": {
'read_only': False
},
"description": {
'read_only': False
},
"ttl": {
'read_only': False
},
"serial": {},
"status": {},
"action": {},
"version": {},
"type": {
'immutable': True
},
"masters": {},
"cr | eated_at": {},
"updated_at": {},
"transferred_at": {},
},
'options': {
'links': True,
| 'resource_name': 'zone',
'collection_name': 'zones',
}
}
@classmethod
def _parse_object(cls, values, object, *args, **kwargs):
if 'masters' in values:
object.masters = objects.adapters.DesignateAdapter.parse(
cls.ADAPTER_FORMAT,
values['masters'],
objects.ZoneMasterList(),
*args, **kwargs)
del values['masters']
return super(ZoneAPIv2Adapter, cls)._parse_object(
values, object, *args, **kwargs)
class ZoneListAPIv2Adapter(base.APIv2Adapter):
ADAPTER_OBJECT = objects.ZoneList
MODIFICATIONS = {
'options': {
'links': True,
'resource_name': 'zone',
'collection_name': 'zones',
}
}
|
hazelcast/hazelcast-python-client | hazelcast/protocol/codec/custom/sql_column_metadata_codec.py | Python | apache-2.0 | 1,925 | 0.003636 | from hazelcast.protocol.builtin import FixSizedTypesCodec, CodecUtil
from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import END_FRAME_BUF, END_FINAL_FRAME_BUF, SIZE_OF_FRAME_LENGTH_AND_FLAGS, create_initial_buffer_custom
from hazelcast.sql import SqlColumnMetadata
from hazelcast.protocol.builtin import StringCodec
_TYPE_ENCODE_OFFSET = 2 * SIZE_OF_FRAME_LENGTH_AND_FLAGS
_TYPE_DECODE_OFFSET = 0
_NULLABLE_ENCODE_OFFSET = _TYPE_ENCODE_OFFSET + INT_SIZE_IN_BYTES
_NULLABLE_DECODE_OFFSET = _TYPE_DECODE_OFFSET + INT_SIZE_IN_BYTES
_INITIAL_FRAME_SIZE = _NULLABLE_ENCODE_OFFSET + BOOLEAN_SIZE_IN_BYTES - SIZE_OF_FRAME_LENGTH_AND_FLAGS
class SqlColumnMetadataCodec:
@staticmethod
def encode(buf, sql_column_metadata, is_final=False):
initial_frame_buf = create_initial_buffer_custom(_INITIAL_FRAME_SIZE)
FixSizedTypesCodec.encode_int(initial_frame_buf, _TYPE_ENCODE_OFFSET, sql_column_metadata.type)
FixSizedTypesCodec.encode_boolean(initial_frame_buf, _NULLABLE_ENCODE_OFFSET, sql_column_metadata.nullable)
buf.extend(initial_frame_buf)
StringCodec.encode(buf, sql_column_metadata.name)
if is_final:
buf.extend(END_FINAL_FRAME_BUF)
else:
buf.extend(END_FRAME_BUF)
@staticmeth | od
def decode(msg):
msg.next_frame()
initial_frame = msg.next_frame()
type = FixSizedTypesCodec.decode_int(initial | _frame.buf, _TYPE_DECODE_OFFSET)
is_nullable_exists = False
nullable = False
if len(initial_frame.buf) >= _NULLABLE_DECODE_OFFSET + BOOLEAN_SIZE_IN_BYTES:
nullable = FixSizedTypesCodec.decode_boolean(initial_frame.buf, _NULLABLE_DECODE_OFFSET)
is_nullable_exists = True
name = StringCodec.decode(msg)
CodecUtil.fast_forward_to_end_frame(msg)
return SqlColumnMetadata(name, type, is_nullable_exists, nullable)
|
sburnett/seattle | seattlegeni/website/xmlrpc/dispatcher.py | Python | mit | 2,429 | 0.007822 | """
<Program>
dispatcher.py
<Started>
6 July 2009
<Author>
Justin Samuel
<Purpose>
XMLRPC handler for django.
Nothing in this file needs to be modified when adding/removing/changing
public xmlrpc methods. For that, see the views.py file in the same directory.
This xml | rpc dispatcher for django is modified from the version at:
http://code.djangoproject.com/wiki/XML-RPC
"""
from SimpleXMLRPCServer import SimpleXMLRPCDispatcher
from django.http import HttpResponse
from seattlegeni.website.xmlrpc.views import PublicXMLRPCFunctions
from django.views.decorators.csrf import csrf_exempt
|
# This is the url that will be displayed if the xmlrpc service is requested
# directory through a web browser (that is, through a GET request).
SEATTLECLEARINGHOUSE_XMLRPC_API_DOC_URL = "https://seattle.cs.washington.edu/wiki/SeattleGeniApi"
# Create a Dispatcher. This handles the calls and translates info to function maps.
# TODO: allow_none = True or False? Does using None in the api make the xmlrpc
# api python-specific?
dispatcher = SimpleXMLRPCDispatcher(allow_none=False, encoding=None)
@csrf_exempt
def rpc_handler(request):
"""
All xmlrpc requests are initially routed here by django. The actual functions we
implement are in the views.py file. This rpc_handler function will make sure that
what we return from the functions in views.py will be turned into a valid xmlrpc
response.
If POST data is defined, it assumes it's XML-RPC and tries to process as such.
If the POST data is empty or if it is a GET request, this assumes the request is
from a browser and responds saying it's an xmlrpc service.
This function does not need to be called from anywhere other than having it
defined in urls.py. That is, you generally shouldn't need to ever use this
function directly.
"""
response = HttpResponse()
if len(request.POST):
response.write(dispatcher._marshaled_dispatch(request.raw_post_data))
else:
response.write("<b>This is the SeattleGeni XML-RPC Service.</b><br>")
response.write("Please see <a href=" + SEATTLECLEARINGHOUSE_XMLRPC_API_DOC_URL + ">" + SEATTLECLEARINGHOUSE_XMLRPC_API_DOC_URL + "</a> for more information.")
response['Content-length'] = str(len(response.content))
return response
# All methods in the PublicXMLRPCFunctions class will be available as xmlrpc functions.
dispatcher.register_instance(PublicXMLRPCFunctions())
|
Pipe-s/dynamic_machine | dynamic_machine/cli_ssh.py | Python | mit | 5,503 | 0.008177 | '''
Created on Jun 16, 2014
@author: lwoydziak
'''
import pexpect
import sys
from dynamic_machine.cli_com | mands import assertResultNotEquals, Command
class SshCli(object):
LOGGED_IN = 0
def __init__(self, host, loginUser, debug = False, trace = False, log=None, port=22, pexpectObject=None):
self.pexpect = pexpect if not pexpectObject else pexpectObject
self.debug = debug
self.trace = trace
self.host = host
sel | f._port = port
self._connection = None
self.modeList = []
self._log = log
self._bufferedCommands = None
self._bufferedMode = None
self._loginUser = loginUser
self._resetExpect()
def __del__(self):
self.closeCliConnectionTo()
def showOutputOnScreen(self):
self.debug = True
self.trace = True
self._log = None
self._setupLog()
def connectWithSsh(self):
self._debugLog("Establishing connection to " + self.host)
self._connection = self.pexpect.spawn(
'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s@%s -p %d' %
(self._loginUser.username, self.host, self._port))
if self._connection is None:
raise Exception("Unable to connect via SSH perhaps wrong IP!")
self._secure = True
self._setupLog()
self._loginUser.commandLine(self)
self.modeList = [self._loginUser]
def resetLoggingTo(self, log):
self._connection.logfile = log
def _setupLog(self):
if self.trace:
class Python3BytesToStdOut:
def write(self, s):
sys.stdout.buffer.write(s)
def flush(self):
sys.stdout.flush()
self._connection.logfile = Python3BytesToStdOut()
if self._log is not None:
self._connection.logfile = self._log
def loginSsh(self):
self._setupLog()
self._debugLog("Login in as "+self._loginUser.username)
try:
self._loginUser.sendPassword()
return True
except Exception as e:
self.forceCloseCliConnectionTo()
raise Exception('Exception ('+str(e)+') '+'Expected CLI response: "Password:"' + "\n Got: \n" + self._lastExpect())
def _exit_modes_beyond(self, thisMode):
if not self.modeList: return
while len(self.modeList) > thisMode + 1:
self.modeList.pop().exit()
def exitMode(self, mode):
if mode in self.modeList:
self.modeList.remove(mode)
def check_prereq(self, prereqMode = 0):
self._exit_modes_beyond(prereqMode)
if len(self.modeList) <= prereqMode:
raise Exception("Attempted to enter menu when prerequist mode was not entered, expected: %d" % prereqMode)
def execute_as(self, user):
self.check_prereq(self.LOGGED_IN)
self._exit_modes_beyond(self.LOGGED_IN)
user.commandLine(self)
user.login()
self.modeList.append(user)
return user
def closeCliConnectionTo(self):
if self._connection == None:
return
self._exit_modes_beyond(-1)
self.modeList = []
self._debugLog("Exited all modes.")
self.forceCloseCliConnectionTo()
def forceCloseCliConnectionTo(self):
self.modeList = None
if self._connection:
self._debugLog("Closing connection.")
self._connection.close()
self._connection = None
def _debugLog(self, message):
if self.debug:
print(message)
def _resetExpect(self):
self.previousExpectLine = ""
if self._connection is not None and isinstance(self._connection.buffer, str):
self.previousExpectLine = self._connection.buffer
self._connection.buffer = ""
def _lastExpect(self):
constructLine = self.previousExpectLine
if self._connection is not None and isinstance(self._connection.before, str):
constructLine += self._connection.before
if self._connection is not None and isinstance(self._connection.after, str):
constructLine += self._connection.after
return constructLine
def send(self, command):
if self._bufferedCommands is None:
self._bufferedCommands = command
else:
self._bufferedCommands += "\n" + command
if self._bufferedMode is None:
self.flush()
else:
self._debugLog("Buffering command " + command)
def flush(self):
if self._bufferedCommands is None:
return
self._connection.sendline(str(self._bufferedCommands))
self._bufferedCommands = None
def buffering(self):
return self._bufferedMode
def bufferedMode(self, mode = True):
if mode is None:
self.flush()
self._bufferedMode = mode
def compareReceivedAgainst(self, pattern, timeout=-1, searchwindowsize=None, indexOfSuccessfulResult=0):
if self._bufferedMode is None:
index = self._connection.expect(pattern, timeout, searchwindowsize)
self._debugLog("\nLooking for " + str(pattern) + " Found ("+str(index)+")")
self._debugLog(self._lastExpect())
return index
else:
return indexOfSuccessfulResult |
PersianWikipedia/pywikibot-core | tests/uploadbot_tests.py | Python | mit | 1,769 | 0 | # -*- coding: utf-8 -*-
"""
UploadRobot test.
These tests write to the wiki.
"""
#
# (C) Pywikibot team, 2014-2019
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, division, unicode_literals
import os
from pywikibot.specialbots import UploadRobot
from tests import join_images_path
from tests.aspects import unittest, TestCase
class TestUploadbot(TestCase):
"""Test cases for upload."""
write = True
family = 'wikipedia'
code = 'test'
params = dict( # noqa: C408
description='pywikibot upload.py script test',
keepFilename=True,
aborts=set(),
ignoreWarning=True,
)
def test_png_list(self):
"""Test uploading a list of pngs using upload.py."""
image_list = []
for directory_info in os.walk(join_images_path()):
for dir_file in directory_info[2]:
image_list.append(os.path.join(directory_info[0], dir_file))
bot = UploadRobot(url=image_list, targetSite=self.get_site(),
**self.params)
bot.run()
def test_png(self):
"""Test uploading a png using upload.py."""
bot = UploadRobot(
url | =[join_images_path('MP_sounds.png')],
targetSite=self.get_site(), **self.params)
bot.run()
def | test_png_url(self):
"""Test uploading a png from url using upload.py."""
link = 'https://upload.wikimedia.org/'
link += 'wikipedia/commons/f/fc/MP_sounds.png'
bot = UploadRobot(url=[link], targetSite=self.get_site(),
**self.params)
bot.run()
if __name__ == '__main__': # pragma: no cover
try:
unittest.main()
except SystemExit:
pass
|
dmisdani/netspeed_indicator | netspeed-indicator2.py | Python | gpl-3.0 | 13,609 | 0.047054 | #/usr/bin/env python
# -*- coding: utf-8 -*-
#Indicator-Netspeed
#
#Author: Dimitris Misdanitis
#Date: 2014
#Version 0.1
#⇧ 349 B/s ⇩ 0.0 B/s
#
#▲▼ ↓↑ ✔ ⬆⬇ ⇩⇧
import commands
from gi.repository import GObject,GLib
import gtk
import appindicator
import time
import urllib2
import os, subprocess
import io
import threading
from vsgui.api import *
import stat
gtk.gdk.threads_init()
#Thread to get the Overall Network speed (all interfaces except lo)
#Updates the indicator text
class NetspeedFetcher(threading.Thread):
def __init__(self, parent):
threading.Thread.__init__(self)
self.parent = parent
def _fetch_speed(self):
sn=self.parent.get_interfaces()
try:
#get the transmitted and received bytes, wait 1 sec, get the values again and see what changed in a second. Thats the netspeed/sec
if self.parent.active_interface == "All":
R1=long(commands.getstatusoutput('ls /sys/class/net/|grep -v lo|xargs -I % cat /sys/class/net/%/statistics/rx_bytes|paste -sd+|bc 2>/dev/null')[1])
T1=long(commands.getstatusoutput('ls /sys/class/net/|grep -v lo|xargs -I % cat /sys/class/net/%/statistics/tx_bytes|paste -sd+|bc 2>/dev/null')[1])
time.sleep(1)
R2=long(commands.getstatusoutput('ls /sys/class/net/|grep -v lo|xargs -I % cat /sys/class/net/%/statistics/rx_bytes|paste -sd+|bc 2>/dev/null')[1])
T2=long(commands.getstatusoutput('ls /sys/class/net/|grep -v lo|xargs -I % cat /sys/class/net/%/statistics/tx_bytes|paste -sd+|bc 2>/dev/null')[1])
else:
R1=long(commands.getstatusoutput('cat /sys/class/net/%s/statistics/rx_bytes 2>/dev/null'%self.parent.active_interface)[1])
T1=long(commands.getstatusoutput('cat /sys/class/net/%s/statistics/tx_bytes 2>/dev/null'%self.parent.active_interface)[1])
time.sleep(1)
R2=long(commands.getstatusoutput('cat /sys/class/net/%s/statistics/rx_bytes 2>/dev/null'%self.parent.active_interface)[1])
T2=long(commands.getstatusoutput('cat /sys/class/net/%s/statistics/tx_bytes 2>/dev/null'%self.parent.active_interface)[1])
except ValueError:
R1=0
R2=0
T1=0
T2=0
down_bytes=R2-R1
up_bytes=T2-T1
downformatted=self.parent.sizeof_fmt(down_bytes)
upformatted=self.parent.sizeof_fmt(up_bytes)
#We can change the arrows, when we are offline. But i disabled it.
#if self.parent.isOnline==True:
uparrow="⬆"
downarrow="⬇"
#else:
# uparrow="⇧"
# downarrow="⇩"
summ=down_bytes+up_bytes
#This is to change the icons, based on the speed. summ stores the upload+download in bytes.
#We can make it t set the limits dynamically, based on your network speed. But this is insignificant.
if summ <= 2000:
self.parent.ind.set_icon("zero")
#print "zero"
elif summ>2000 and summ<=51200: #50KB
self.parent.ind.set_icon("light")
#print "light"
elif summ>51200 and summ<=307200: | #300KB
self.parent.ind.set_icon("medium")
#print "medium"
elif summ>307200 and summ<=819200: #800
self.parent.ind.set_icon("high")
#print "high"
else:
self.parent.ind.set_icon("full")
#print "full"
return "%s%s %s%s"%(upformatted,uparrow,downformatted,downarrow)
#this is the thread. We always loop , get the speed from the above function and set the indicator label.
def run(self):
while(self.parent.alive.isSet()):
d | ata = self._fetch_speed()
self.parent.ind.set_label(data,'⬆8888 MB/s ⬇8888 MB/s')
time.sleep(1)
class indicatorNetspeed:
interfaces = [] #This list stores the interfaces.
active_interface = 'All' #Which device is monitored? The default is All (except lo)
proc_rows = []
menu_process=[] #This list stores the 15 gtk menu items i use to display the bandwith per process
isOnline=True #We assume that initially we are online.
bandwidthPerProcess_active=False #Start monitor bandwith perprocess(nethogs) when the indicator starts
nethogs_alive=False #Is nethogs alive? At this point it's not..
nethogs_process = "" #Is there any subprocess that reads from the nethogs? At this point there isn't.
sudo_pass="" #Stores the sudo password. We nee this to run the nethogs!
def __init__(self):
self.folder=os.path.dirname(os.path.realpath(__file__)) #Get the aplication's folder.
n=self.get_interfaces() #Get the interfaces (that also fills the interfaces list
#This is our indicator object! (name, icon,type)
self.ind = appindicator.Indicator ("indicator-netspeed",
"zero",
appindicator.CATEGORY_SYSTEM_SERVICES)
self.ind.set_icon_theme_path("%s/icons"%self.folder) #Set the icon theme folder!
self.ind.set_status (appindicator.STATUS_ACTIVE)
self.ind.set_label("⇧ 0.0 B/s ⇩ 0.0 B/s",'⬆8888 MB/s ⬇8888 MB/s') #Initially set the label to ⇧ 0.0 B/s ⇩ 0.0 B/s
self.build_menu() #Build the menu
self.alive = threading.Event()
self.alive.set()
self.fetch = NetspeedFetcher(self)
self.fetch.start() # Start fetcher thread
self.nethogs_thread=threading.Thread(target=self._tail_forever, args=("",)) #set the bandwidth/process monitor thread
if self.bandwidthPerProcess_active: #If the functionality is active
self.nethogs_thread.start() #Start the thread!
#this is the method that reads from the nethogs
#I use my the latest version of nethogs, because it doesn't require to pass the device as parameter.
#I compiled it to 64bit machine. I'm not sure if this works on an 32bit ubuntu.
def _tail_forever(self,args=None):
#is_nethogs_executable=os.access('nethogs/nethogs', os.X_OK)
#if is_nethogs_executable == False:
# st = os.stat('%s/nethogs/nethogs'%(self.folder))
# os.chmod('%s/nethogs/nethogs'%(self.folder), st.st_mode | stat.S_IEXEC) #If nethogs is not executable, make it.
FNULL =io.open(os.devnull, 'w') #set an FNULL variable to point to /dev/null
args1 = ['echo', '%s'%self.sudo_pass]
args2 = ['sudo', '-S', '%s/nethogs/nethogs'%(self.folder),'-d','2','-t']
p1 = subprocess.Popen(args1, stdout=subprocess.PIPE)
self.nethogs_process = subprocess.Popen(args2, stdin=p1.stdout,stdout=subprocess.PIPE,stderr=FNULL) #run the nethogs with 2>/dev/null
data=[]
while self.bandwidthPerProcess_active:
line = self.nethogs_process.stdout.readline() #continuously read from the process
if (line=="\n" or line.count("\t")==2): #From here is the nethogs output processing
if line!="\n":
tmp=line.replace("\n","").split("\t")
name=tmp[0].split("/")
s=[]
if name[-2] != "0" and (tmp[1]!='0' and tmp[2]!='0'):
s.append(name[-2])
s.append(name[-1])
s.append(tmp[1])
s.append(tmp[2])
data.append(s)
#print ">>>>"
#print data
#print "===="
#print line.replace("\n","")
if len(data)>0 and data[0]!='':
self.proc_rows=data #the self.proc_rows stores the output each time.
self.nethogs_alive=True #And as we received data from the nethogs process, it is alive!
if line=="\n":
self.update_process_menu() #update the menu item.
data=[]
time.sleep(1) #sleep when i receive \n
if not line:
print "BREAK" #If we read null line that means something went wrong, break! kill the nethogs, uncheck the menu.
self.nethogs_alive=False
self.nethogs_process=""
self.kill_nethogs()
self.bandwidthPerProcess_active=False
self.nethogs_menu.set_active(False)
break
self.nethogs_alive=False
self.update_process_menu()
#this method gets the pid and returns the process name
def _pid_to_name(self,pid):
cmd='cat /proc/%s/comm 2>/dev/null'%pid
out=commands.getstatusoutput(cmd)
if out[1]=='' :
#out='PID:%s'%pid
out=""
else:
out=out[1]
return out
#This process updates the menu items
def update_process_menu(self,args=None):
#bandwith per process
if len(self.proc_rows)>0 and self.bandwidthPerProcess_active: #As long as we have something to update and functionality is active
#print self.proc_rows
i=0
names=[]
for row in self.proc_rows[0:15] : #Take only the 0->14 lines
#print row_items
if row[0]!='' and row[0].find("[sudo]")==-1:
name=self._pid_to_name(row[0])
if name !="":
_st |
MingStar/python-pinyin | tools/small_phrases.py | Python | mit | 562 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pypinyin.phrases_dict import phrases_dict
from pypinyin import pinyin, TONE
phrases_list = set()
phrases_same = set()
for han, pys in phrases_dict.items():
if pinyin(han, style=TONE, heteronym=True) != pys:
phrases_list.add(han)
else:
phrases_same.add(han)
if __name__ == '__m | ain__':
with open('phrases_same.txt', 'w') as f:
for x in phrases_same:
f.write(u'%s\n' % x)
print(len(phrases | _dict))
print(len(phrases_same))
print(len(phrases_list))
|
mellenburg/dcos | packages/dcos-integration-test/extra/test_endpoints.py | Python | apache-2.0 | 11,490 | 0.001218 | import ipaddress
import urllib.parse
import bs4
import pytest
from requests.exceptions import ConnectionError
from retrying import retry
__maintainer__ = 'vespian'
__contact__ = 'dcos-security@mesosphere.io'
def test_if_dcos_ui_is_up(dcos_api_session):
r = dcos_api_session.get('/')
assert r.status_code == 200
assert len(r.text) > 100
assert 'DC/OS' in r.text
# Not sure if it's really needed, seems a bit of an overkill:
soup = bs4.BeautifulSoup(r.text, "html.parser")
for link in soup.find_all(['link', 'a'], href=True):
if urllib.parse.urlparse(link.attrs['href']).netloc:
# Relative URLs only, others are to complex to handle here
continue
# Some links might start with a dot (e.g. ./img/...). Remove.
href = link.attrs['href'].lstrip('.')
link_response = dcos_api_session.head(href)
assert link_response.status_code == 200
def test_if_mesos_is_up(dcos_api_session):
r = dcos_api_session.get('/mesos')
assert r.status_code == 200
assert len(r.text) > 100
assert '<title>Mesos</title>' in r.text
def test_if_all_mesos_slaves_have_registered(dcos_api_session):
r = dcos_api_session.get('/mesos/master/slaves')
assert r.status_code == 200
data = r.json()
slaves_ips = sorted(x['hostname'] for x in data['slaves'])
assert slaves_ips == dcos_api_session.all_slaves
def test_if_exhibitor_api_is_up(dcos_api_session):
r = dcos_api_session.exhibitor.get('/exhibitor/v1/cluster/list')
assert r.status_code == 200
data = r.json()
assert data["port"] > 0
def test_if_exhibitor_ui_is_up(dcos_api_session):
r = dcos_api_session.exhibitor.get('/')
assert r.status_code == 200
assert 'Exhibitor for ZooKeeper' in r.text
def test_if_zookeeper_cluster_is_up(dcos_api_session):
r = dcos_api_session.get('/exhibitor/exhibitor/v1/cluster/status')
assert r.status_code == 200
data = r.json()
serving_zks = sum(1 for x in data if x['code'] == 3)
zks_ips = sorted(x['hostname'] for x in data)
zks_leaders = sum(1 for x in data if x['isLeader'])
assert zks_ips == dcos_api_session.masters
assert serving_zks == len(dcos_api_session.masters)
assert zks_leaders == 1
def test_if_uiconfig_is_available(dcos_api_session):
r = dcos_api_session.get('/dcos-metadata/ui-config.json')
assert r.status_code == 200
assert 'uiConfiguration' in r.json()
def test_if_dcos_history_service_is_up(dcos_api_session | ):
r = dcos_api_sess | ion.get('/dcos-history-service/ping')
assert r.status_code == 200
assert 'pong' == r.text
def test_if_marathon_is_up(dcos_api_session):
r = dcos_api_session.get('/marathon/v2/info')
assert r.status_code == 200
response_json = r.json()
assert "name" in response_json
assert "marathon" == response_json["name"]
def test_if_marathon_ui_redir_works(dcos_api_session):
r = dcos_api_session.get('/marathon')
assert r.status_code == 200
assert '<title>Marathon</title>' in r.text
def test_if_srouter_service_endpoint_works(dcos_api_session):
r = dcos_api_session.get('/service/marathon/v2/info')
assert r.status_code == 200
assert len(r.text) > 100
response_json = r.json()
assert "name" in response_json
assert "marathon" == response_json["name"]
assert "version" in response_json
def test_if_mesos_api_is_up(dcos_api_session):
r = dcos_api_session.get('/mesos_dns/v1/version')
assert r.status_code == 200
data = r.json()
assert data["Service"] == 'Mesos-DNS'
def test_if_pkgpanda_metadata_is_available(dcos_api_session):
r = dcos_api_session.get('/pkgpanda/active.buildinfo.full.json')
assert r.status_code == 200
data = r.json()
assert 'mesos' in data
assert len(data) > 5 # (prozlach) We can try to put minimal number of pacakages required
def test_if_dcos_history_service_is_getting_data(dcos_api_session):
@retry(stop_max_delay=20000, wait_fixed=500)
def check_up():
r = dcos_api_session.get('/dcos-history-service/history/last')
assert r.status_code == 200
# Make sure some basic fields are present from state-summary which the DC/OS
# UI relies upon. Their exact content could vary so don't test the value.
json = r.json()
assert {'cluster', 'frameworks', 'slaves', 'hostname'} <= json.keys()
assert len(json["slaves"]) == len(dcos_api_session.all_slaves)
check_up()
def test_if_we_have_capabilities(dcos_api_session):
"""Indirectly test that Cosmos is up since this call is handled by Cosmos.
"""
r = dcos_api_session.get(
'/capabilities',
headers={
'Accept': 'application/vnd.dcos.capabilities+json;charset=utf-8;version=v1'
}
)
assert r.status_code == 200
assert {'name': 'PACKAGE_MANAGEMENT'} in r.json()['capabilities']
def test_if_overlay_master_is_up(dcos_api_session):
r = dcos_api_session.get('/mesos/overlay-master/state')
assert r.ok, "status_code: {}, content: {}".format(r.status_code, r.content)
# Make sure the `dcos` and `dcos6` overlays have been configured.
json = r.json()
dcos_overlay_network = {
'vtep_subnet': '44.128.0.0/20',
'vtep_subnet6': 'fd01:a::/64',
'vtep_mac_oui': '70:B3:D5:00:00:00',
'overlays': [{
'name': 'dcos',
'subnet': '9.0.0.0/8',
'prefix': 24
}, {
'name': 'dcos6',
'subnet6': 'fd01:b::/64',
'prefix6': 80
}]
}
assert json['network'] == dcos_overlay_network
def test_if_overlay_master_agent_is_up(dcos_api_session):
master_response = dcos_api_session.get('/mesos/overlay-master/state')
assert master_response.ok,\
"status_code: {}, content: {}".format(master_response.status_code, master_response.content)
master_overlay_json = master_response.json()
agent_response = dcos_api_session.get('/mesos/overlay-agent/overlay')
assert agent_response.ok,\
"status_code: {}, content: {}".format(agent_response.status_code, agent_response.content)
# Make sure the `dcos` and `dcos6` overlays have been configured.
agent_overlay_json = agent_response.json()
assert 'ip' in agent_overlay_json
agent_ip = agent_overlay_json['ip']
master_agent_overlays = None
for agent in master_overlay_json['agents']:
assert 'ip' in agent
if agent['ip'] == agent_ip:
assert len(agent['overlays']) == 2
master_agent_overlays = agent['overlays']
assert 'overlays' in agent_overlay_json
assert len(agent_overlay_json['overlays']) == 2
for agent_overlay in agent_overlay_json['overlays']:
overlay_name = agent_overlay['info']['name']
if master_agent_overlays[0]['info']['name'] == overlay_name:
_validate_dcos_overlay(overlay_name, agent_overlay, master_agent_overlays[0])
else:
_validate_dcos_overlay(overlay_name, agent_overlay, master_agent_overlays[1])
def _validate_dcos_overlay(overlay_name, agent_overlay, master_agent_overlay):
if overlay_name == 'dcos':
assert 'subnet' in agent_overlay
subnet = agent_overlay.pop('subnet')
_validate_overlay_subnet(subnet, '9.0.0.0/8', 24)
elif overlay_name == 'dcos6':
assert 'subnet6' in agent_overlay
subnet6 = agent_overlay.pop('subnet6')
_validate_overlay_subnet(subnet6, 'fd01:b::/64', 80)
if 'mesos_bridge' in master_agent_overlay:
try:
agent_overlay.pop('mesos_bridge')
except KeyError as ex:
raise AssertionError("Could not find expected 'mesos_bridge' in agent:" + str(ex)) from ex
else:
# Master didn't configure a `mesos-bridge` so shouldn't be
# seeing it in the agent as well.
assert 'mesos_bridge' not in agent_overlay
if 'docker_bridge' in master_agent_overlay:
try:
agent_overlay.pop('docker_bridge')
except KeyError as ex:
raise AssertionError("Could not find expected 'docker_bridge' in agent:" + str(ex)) from ex
else:
|
wangwei7175878/tutorials | tkinterTUT/tk6_scale.py | Python | mit | 643 | 0.009331 | # View more python learning tutorial on my Youtube and Youku channel!!!
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
# Youku video tutor | ial: http://i.youku.com/pythontutorial
import tkinter as tk
window = tk.Tk()
window.title('my window')
window.geometry('200x200')
l = tk.Label(window, bg='yellow', width=20, text='empty')
l.pack()
def print_selection | (v):
l.config(text='you have selected ' + v)
s = tk.Scale(window, label='try me', from_=5, to=11, orient=tk.HORIZONTAL,
length=200, showvalue=0, tickinterval=2, resolution=0.01, command=print_selection)
s.pack()
window.mainloop() |
googleapis/python-aiplatform | google/cloud/aiplatform_v1/types/model_service.py | Python | apache-2.0 | 17,689 | 0.000735 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.aiplatform_v1.types import io
from google.cloud.aiplatform_v1.types import model as gca_model
from google.cloud.aiplatform_v1.types import model_evaluation
from google.cloud.aiplatform_v1.types import model_evaluation_slice
from google.cloud.aiplatform_v1.types import operation
from google.protobuf import field_mask_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1",
manifest={
"UploadModelRequest",
"UploadModelOperationMetadata",
"UploadModelResponse",
"GetModelRequest",
"ListModelsRequest",
"ListModelsResponse",
"UpdateModelRequest",
"DeleteModelRequest",
"ExportModelRequest",
"ExportModelOperationMetadata",
"ExportModelResponse",
"GetModelEvaluationRequest",
"ListModelEvaluationsRequest",
"ListModelEvaluationsResponse",
"GetModelEvaluationSliceRequest",
"ListModelEvaluationSlicesRequest",
"ListModelEvaluationSlicesResponse",
},
)
class UploadModelRequest(proto.Message):
r"""Request message for
[ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel].
Attributes:
parent (str):
Required. The resource name of the Location into which to
upload the Model. Format:
``projects/{project}/locations/{location}``
model (google.cloud.aiplatform_v1.types.Model):
Required. The Model to create.
"""
parent = proto.Field(proto.STRING, number=1,)
model = proto.Field(proto.MESSAGE, number=2, message=gca_model.Model,)
class UploadModelOperationMetadata(proto.Message):
r"""Details of
[ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel]
operation.
Attributes:
generic_metadata (google.cloud.aiplatform_v1.types.GenericOperationMetadata):
The common part of the operation metadata.
"""
generic_metadata = proto.Field(
proto.MESSAGE, number=1, message=operation.GenericOperationMetadata,
)
class UploadModelResponse(proto.Message):
r"""Response message of
[ModelService.UploadModel][google.cloud.aiplatform.v1.ModelService.UploadModel]
operation.
Attributes:
model (str):
The name of the uploaded Model resource. Format:
``projects/{project}/locations/{location}/models/{model}``
"""
model = proto.Field(proto.STRING, number=1,)
class GetModelRequest(proto.Message):
r"""Request message for
[ModelService.GetModel][google.cloud.aiplatform.v1.ModelService.GetModel].
Attributes:
name (str):
Required. The name of the Model resource. Format:
``projects/{project}/locations/{location}/models/{model}``
"""
name = proto.Field(proto.STRING, number=1,)
class ListModelsRequest(proto.Message):
r"""Request message for
[ModelService.ListModels][google.cloud.aiplatform.v1.ModelService.ListModels].
Attributes:
parent (str):
Required. The resource name of the Location to list the
Models from. Format:
``projects/{project}/locations/{location}``
filter (str):
An expression for filtering the results of the request. For
field names both snake_case and camelCase are supported.
- ``model`` supports = and !=. ``model`` represents the
Model ID, i.e. the last segment of the Model's [resource
name][google.cloud.aiplatform.v1.Model.name].
- ``display_name`` supports = and !=
- ``labels`` supports general map functions that is:
- ``labels.key=value`` - key:value equality
- \`labels.key:\* or labels:key - key existence
- A key including a space must be quoted.
``labels."a key"``.
Some examples:
- ``model=1234``
- ``displayName="myDisplayName"``
- ``labels.myKey="myValue"``
page_size (int):
The standard list page size.
page_token (str):
The standard list page token. Typically obtained via
[ListModelsResponse.next_page_token][google.cloud.aiplatform.v1.ListModelsResponse.next_page_token]
of the previous
[ModelService.ListModels][google.cloud.aiplatform.v1.ModelService.ListModels]
call.
read_mask (google.protobuf.field_mask_pb2.FieldMask):
Mask specifying which fields to read.
order_by (str):
A comma-separated list of fields to order by, sorted in
ascending order. Use "desc" after a field name for
descending. Supported fields:
- ``display_name``
- ``create_time``
- ``update_time``
Example: ``display_name, create_time desc``.
"""
parent = proto.Field(proto.STRING, number=1,)
filter = proto.Field(proto.STRING, number=2,)
page_size = proto.Field(proto.INT32, number=3,)
page_token = proto.Field(proto.STRING, number=4,)
read_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask_pb2.FieldMask,)
order_by = proto.Field(proto.STRING, number=6,)
class ListModelsResponse(proto.Message):
r"""Response message for
[ModelService.ListModels][google.cloud.aiplatform.v1.ModelService.ListModels]
Attributes:
models (Sequence[google.cloud.aiplatform_v1.types.Model]):
List of Models in the requested page.
next_page_token (str):
A token to retrieve next page of results. Pass to
[ListModelsRequest.page_token][google.cloud.aiplatform.v1.ListModelsRequest.page_token]
| to obtain that page.
"""
@property
def raw_page(self):
return self
models = proto.RepeatedField(proto.MESSAGE, number=1, message=gca_model.Model,)
next_page_token = proto.Field(proto.STRING, number=2,)
class UpdateModelRequest(proto.Message):
r"""Request message for
[ModelService.UpdateModel][google.cloud.aiplatform.v1.ModelService.UpdateModel].
Attributes:
model (google.cloud.aiplatform_v1.types.Model):
| Required. The Model which replaces the
resource on the server.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The update mask applies to the resource. For the
``FieldMask`` definition, see
[google.protobuf.FieldMask][google.protobuf.FieldMask].
"""
model = proto.Field(proto.MESSAGE, number=1, message=gca_model.Model,)
update_mask = proto.Field(
proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,
)
class DeleteModelRequest(proto.Message):
r"""Request message for
[ModelService.DeleteModel][google.cloud.aiplatform.v1.ModelService.DeleteModel].
Attributes:
name (str):
Required. The name of the Model resource to be deleted.
Format:
``projects/{project}/locations/{location}/models/{model}``
"""
name = proto.Field(proto.STRING, number=1,)
class ExportModelRequest(proto.Message):
r"""Request message for
[ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel].
Attributes:
name (str):
Required. The resource name of the Model to
export.
output_config (google.cloud.aiplatform_v1.types.ExportModelRequest.OutputConfig):
Requ |
pcn/filechunkio | setup.py | Python | mit | 582 | 0.003436 | #!/usr/bin/env python
import sys
from distutils.core import setup
from filechunkio import __version_ | _
PY3 = sys.version_info[0] == 3
_unicode = str if PY3 else unicode
setup(
name="filechunkio",
version=_unicode(__version__),
description="FileChunkIO represents a chunk of an OS-level file "\
"containing bytes data",
long_description=open("README", 'r').read(),
author="Fabian Topfstedt",
author_email="topfstedt@schneevonmorgen.com",
url="http://bitbucket.org/fabian | /filechunkio",
license="MIT license",
packages=["filechunkio"],
)
|
mattBrzezinski/Hydrogen | robot-controller/UI/__init__.py | Python | mit | 632 | 0.001582 | from AboutWindow import AboutWindow as AboutWindow
from ActionListWidget import ActionListWidget as ActionListWidget
from ActionPushButton import ActionPushButton as ActionPushButton
from CameraWidget import CameraWidget as CameraWidget
from ConnectDialog import ConnectDialog as ConnectDialog
from MainWindow import MainWindow as MainWind | ow
from MovementWidget import MovementWidget as MovementWidget
from Spee | chWidget import SpeechWidget as SpeechWidget
from SubmittableTextEdit import SubmittableTextEdit as SubmittableTextEdit
from SudokuBoard import SudokuBoard as SudokuBoard
from TimerWidget import TimerWidget as TimerWidget |
codeb2cc/weather | conf.py | Python | mit | 436 | 0.002577 | # -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, with_statement
import os
WEIBO_KEY = '2250230588'
WEIBO_SECRET = os.environ['WEIBO_SECRET']
# 未审核应用绑定开发者帐号Token有效期为五年, 无需动态更新
WEIBO_UID = '1652592967'
WEIBO_TOKEN = os.environ['WEIBO_TOKEN']
OAUTH2_URL = 'http://localhost.com/oauth2'
MEMCACHED = ['127 | .0.0.1:11211', ]
MEMCACHED_PREFIX | = '#W#'
|
hryamzik/ansible | lib/ansible/plugins/cache/pickle.py | Python | gpl-3.0 | 2,016 | 0.001984 | # (c) 2017, Brian Coca
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: pickle
short_description: Pickle formatted files.
description:
- This cache uses Python's pickle serialization format, in per host files, saved to the filesystem.
version_added: "2.3"
author: Brian Coca (@bcoca)
options:
_uri:
required: True
description:
- Path in which the cache plugin will save the files
type: list
env:
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
ini:
- key: fact_caching_connection
section: defaults
_prefix:
description: User defined prefix to use when creating the files
env:
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
ini:
- key: fact_caching_prefix
section: defaults
_timeout:
default: 86400
description: Expiration timeout for the cache plugin data
env:
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
ini:
- key: fact_caching_timeout
section: defaults
'''
try:
import cPickle as pickle
except ImportError:
import pickle
from ansible.module_utils.six import PY3
from ansible.plugins.cache import Ba | seFileCacheModule
class CacheModule(BaseFileCacheModule):
"""
A caching module backed by pickle files.
"""
def _load(self, filepath):
# Pickle is a binary format
with open(filepath, 'rb') as f:
if PY3:
| return pickle.load(f, encoding='bytes')
else:
return pickle.load(f)
def _dump(self, value, filepath):
with open(filepath, 'wb') as f:
# Use pickle protocol 2 which is compatible with Python 2.3+.
pickle.dump(value, f, protocol=2)
|
garlick/flux-core | t/resource/get-xml-test.py | Python | lgpl-3.0 | 2,102 | 0.000476 | ###############################################################
# Copyright 2020 Lawrence Livermore National Security, LLC
# (c.f. AUTHORS, NOTICE.LLNS, COPYING)
#
# This file is part of the Flux resource manager framework.
# For details, see https://github.com/flux-framework.
#
# SPDX-License-Identifier: LGPL-3.0
###############################################################
import sys
import errno
import logging
import subprocess
import flux
from flux.rpc import RPC
def flux_module_exec(rank="all", cmd="unload", opt=None):
command = ["flux", "exec", "-r", str(rank), "flux", "module", cmd]
if opt:
command.append(opt)
command.append("resource")
subprocess.check_call(command)
def resource_unload_all():
flux_module_exec(cmd="unload", opt="-f")
def resource_load_all_except_one(size):
"""Load all resource module except rank=size=1
Modules must be loaded sequentially
"""
for rank in range(0, size - 1):
flux_module_exec(rank=str(rank), cmd="load")
def resource_load_last(size):
rank = size - 1
flux_module_exec(rank=str(rank), cmd="load")
### Main test program
logging.basicConfig(level=logging.INFO)
log = logging.getLogger("get-xml-test")
handle = flux.Flux()
size = int(handle.attr_get("size"))
log.info("unloading resource modules across %d ranks", size)
resource_unload_all()
log.info("reloading all resource modules except rank %d", size - 1)
resource_load_all_except_one(size)
log.info("initiating resource.get-xml RPC")
future = RPC(handle, "resource.get-xml", {})
# Ens | ure get-xml initially blocks
delay = 0.5
log.info("waiting up to %.2f | s for get-xml (should block)", delay)
try:
future.wait_for(delay)
except OSError as err:
if err.errno == errno.ETIMEDOUT:
pass
else:
raise err
if future.is_ready():
log.error("resource.get-xml returned before expected")
sys.exit(1)
log.info("loading resource module on rank %d", size - 1)
resource_load_last(size)
# Ensure get-xml can now complete
log.info("get-xml RPC should now complete")
future.wait_for(5)
log.info("done")
|
linkedin/naarad | src/naarad/metrics/gc_metric.py | Python | apache-2.0 | 15,945 | 0.008028 | # coding=utf-8
"""
Copyright 2013 LinkedIn Corp. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import datetime
import logging
import os
import re
import sys
import threading
from naarad.metrics.metric import Metric
import naarad.utils
from naarad.naarad_constants import important_sub_metrics_import
logger = logging.getLogger('naarad.metrics.GCMetric')
class GCMetric(Metric):
""" Class for GC logs, deriving from class Metric """
bin_path = os.path.dirname(sys.argv[0])
clock_format = '%Y-%m-%d %H:%M:%S'
rate_types = ()
val_types = ('alloc', 'promo', 'used0', 'used1', 'used', 'commit0', 'commit1', 'commit', 'gen0', 'gen0t', 'gen0usr', 'gen0sys', 'gen0real',
'cmsIM', 'cmsRM', 'cmsRS', 'GCPause', 'cmsCM', 'cmsCP', 'cmsCS', 'cmsCR', 'safept', 'apptime', 'used0AfterGC', 'used1AfterGC', 'usedAfterGC',
'gen1t', 'g1-pause-young', 'g1-pause-mixed', 'g1-pause-remark', 'g1-pause-cleanup', 'g1-pause-remark.ref-proc', 'g1-pause-young.parallel',
'g1-pause-young.parallel.gcworkers', 'g1-pause-young.parallel.ext-root-scanning.avg', 'g1-pause-young.parallel.ext-root-scanning.max',
'g1-pause-young.parallel.update-rs.avg', 'g1-pause-young.parallel.update-rs.max', 'g1-pause-young.parallel.update-rs.processed-buffers.avg',
'g1-pause-young.parallel.update-rs.processed-buffers.max', 'g1-pause-young.parallel.scan-rs.avg', 'g1-pause-young.parallel.scan-rs.max',
'g1-pause-young.parallel.object-copy-rs.avg', 'g1-pause-young.parallel.object-copy-rs.max', 'g1-pause-young.parallel.termination.avg',
'g1-pause-young.parallel.termination.max', 'g1-pause-young.parallel.gc-worker-other.avg', 'g1-pause-young.parallel.gc-worker-other.max',
'g1-pause-young.parallel.gc-worker-total.avg', 'g1-pause-young.parallel.gc-worker-total.max', 'g1-pause-young.parallel.gc-worker-end.avg',
'g1-pause-young.parallel.gc-worker-end.max', 'g1-pause-young.code-root-fixup', 'g1-pause-young.clear-ct', 'g1-pause-young.other',
'g1-pause-young.other.choose-cset', 'g1-pause-young.other.ref-proc', 'g1-pause-young.other.reg-enq', 'g1-pause-young.other.free-cset',
'g1-pause-mixed.parallel', 'g1-pause-mixed.parallel.gcworkers', 'g1-pause-mixed.parallel.ext-root-scanning.avg',
'g1-pause-mixed.parallel.ext-root-scanning.max', 'g1-pause-mixed.parallel.update-rs.avg', 'g1-pause-mixed.parallel.update-rs.max',
'g1-pause-mixed.parallel.update-rs.processed-buffers.avg', 'g1-pause-mixed.parallel.update-rs.processed-buffers.max',
'g1-pause-mixed.parallel.scan-rs.avg', 'g1-pause-mixed.parallel.scan-rs.max', 'g1-pause-mixed.parallel.object-copy-rs.avg',
'g1-pause-mixed.parallel.object-copy-rs.max', 'g1-pause-mixed.parallel.termination.avg', 'g1-pause-mixed.parallel.termination.max',
'g1-pause-mixed.parallel.gc-worker-other.avg', 'g1-pause-mixed.parallel.gc-worker-other.max', 'g1-pause-mixed.parallel.gc-worker-total.avg',
'g1-pause-mixed.parallel.gc-worker-total.max', 'g1-pause-mixed.parallel.gc-worker-end.avg', 'g1-pause-mixed.parallel.gc-worker-end.max',
'g1-pause-mixed.code-root-fixup', 'g1-pause-mixed.clear-ct', 'g1-pause-mixed.other', 'g1-pause-mixed.other.choose-cset',
'g1-pause-mixed.other.ref-proc', 'g1-pause-mixed.other.reg-enq', 'g1-pause-mixed.other.free-cset', 'g1-pause-young.parallel.gc-worker-start.avg',
'g1-pause-young.parallel.gc-worker-start.max', 'g1-pause-mixed.parallel.gc-worker-start.avg', 'g1-pause-mixed.parallel.gc-worker-start.max',
'g1-eden-occupancy-before-gc', 'g1-eden-capacity-before-gc', 'g1-eden-occupancy-after-gc', 'g1-eden-capacity-after-gc', 'g1-survivor-before-gc',
'g1-survivor-after-gc', 'g1-heap-occupancy-before-gc', 'g1-heap-capacity-before-gc', 'g1-heap-occupancy-after-gc', 'g1-heap-capacity-after-gc',
'g1-young-cpu.sys', 'g1-young-cpu.usr', 'g1-young-cpu.real', 'g1-mixed-cpu.usr', 'g1-mixed-cpu.sys', 'g1-mixed-cpu.real')
def __init__(self, metric_type, infile_list, hostname, aggr_metrics, outdir, resource_path, label, ts_start, ts_end, rule_strings,
important_sub_metrics, anomaly_detection_metrics, **other_options):
Metric.__init__(self, metric_type, infile_list, hostname, aggr_metrics, outdir, resource_path, label, ts_start, ts_end, rule_strings,
important_sub_metrics, anomaly_detection_metrics)
if not self.important_sub_metrics:
self.important_sub_metrics = important_sub_metrics_import['GC']
self.sub_metrics = self.val_types
self.beginning_ts = None
self.beginning_date = None
for (key, val) in other_options.iteritems():
if key == 'gc-options' or key == 'sub_metrics':
self.sub_metrics = val.split()
else:
setattr(self, key, val)
self.sub_metric_description = {
'gen0': 'young gen collection time, excluding gc_prologue & gc_epilogue',
'gen0t': 'young gen collection time, including gc_prologue & gc_epilogue',
'gen0usr': 'young gen collection time in cpu user secs',
'gen0sys': 'young gen collection time in cpu sys secs',
'gen0real': 'young gen collection time in elapsed secs',
'gen1i': 'train generation incremental collection',
'gen1t': 'old generation collection or full GC',
'cmsIM': 'CMS initial mark pause',
'cmsRM': 'CMS remark pause',
'cmsRS': 'CMS resize pause',
'GCPause': 'all stop-the-world GC pauses',
'cmsCM': 'CMS concurrent mark phase',
'cmsCP': 'CMS concurrent preclean phase',
'cmsCS': 'CMS concurrent sweep phase',
'cmsCR': 'CMS concurrent reset phase',
'alloc': 'object allocation in MB (approximate***)',
'promo': 'object promotion in MB (approximate***)',
'used0': 'young gen used memory size (before gc)',
'used1': 'old gen used memory size (before gc)',
'used': 'heap space used memory size (before gc) (excludes perm gen)',
'commit0': 'young gen committed memory size (after gc)',
'commit1': 'old gen committed memory size (after gc)',
'commit': 'heap committed memory size (after gc) (excludes perm gen)',
'apptime': 'amount of time application threads were running',
'safept': 'amount of time the VM spent at safepoints (app threads stopped)',
'used0AfterGC': 'young gen used memory size (after gc)',
'used1AfterGC': 'old gen used memory size (after gc)',
'usedAfterGC': 'heap space used memory size (after gc)',
'g1-pause-young': 'G1 Young GC Pause (seconds)',
'g1-pause-mixed': 'G1 Mixed GC | Pause (seconds)',
'g1-pause-remark': 'G1 Remark Pause (seconds)',
'g1-pause-cleanup': 'G1 Cleanup Pau | se (seconds)',
'g1-pause-remark.ref-proc': 'G1 Remark: Reference Processing (seconds)',
'g1-pause-young.parallel': 'G1 Young GC Pause: Parallel Operations (ms)',
'g1-pause-young.parallel.gcworkers': 'G1 Young GC Pause: Number of Parallel GC Workers',
'g1-pause-young.parallel.gc-worker-start.avg': 'G1 Young GC Pause : Parallel : Avg Time spent in GC worker start (ms)',
'g1-pause-young.parallel.gc-worker-start.max': 'G1 Young GC Pause : Parallel : Max Time spent in GC worker start (ms)',
'g1-pause-young.parallel.ext-root-scanning.avg': 'G1 Young GC Pause: Avg Time spent in ext-root-scanning',
'g1-pause-young.parallel.ext-root-scanning.max': 'G1 Young GC Pause: Max Time spent in ext-root-scanning',
'g1-pause-young.parallel.update-rs.avg': 'G1 Young GC Pause: Parallel : Avg Time spent in updating Rsets',
'g1-pause-young.parallel.update-rs.max': 'G1 Young GC Pause: Parallel : Max Time spent in updating Rsets',
'g1 |
Seanmcn/poker | poker/website/pokerstars.py | Python | mit | 2,318 | 0.014668 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, division, print_function
from collections import namedtuple
from dateutil.parser import parse as parse_date
import requests
from lxml import etree
__all__ = ['get_current_tournaments', 'get_status', 'WEBSITE_URL', 'TOURNAMENTS_XML_URL',
'STATUS_URL']
WEBSITE_URL = 'http://www.pokerstars.eu'
TOURNAMENTS_XML_URL = WEBSITE_URL + '/datafeed_global/tournaments/all.xml'
STATUS_URL = 'http://www.psimg.com/datafeed/dyn_banners/summary.json.js'
_Tournament = namedtuple('_Tournament',
'start_date '
'name '
'game '
'buyin '
'players '
)
"""Named tuple for upcoming pokerstars tournaments."""
def get_current_tournaments():
"""Get the next 200 tournaments from pokerstars."""
schedule_page = requests.get(TOURNAMENTS_XML_URL)
root = etree.XML(schedule_page.content)
for tour in root.iter('{*}tournament'):
yield _Tournament(
start_date = parse_date(tour.findtext('{*}start_date')),
name = tour.findtext('{*}name'),
game = tour.findtext('{*}game'),
buyin = tour.findtext('{*}buy_in_fee'),
players = int(tour.get('players'))
)
_Status = namedtuple('_Status',
'updated '
'tables '
'next_update '
'players '
'clubs '
'active_tournaments '
'total_tournaments '
'sites ' # list of sites, including Play Money
'club_members '
)
"""Named tuple for PokerStars status."""
_SiteStatus = namedtuple('_SiteStatus',
'id ' # ".FR", ".ES", ".IT" or 'Play Money'
'tables '
'players '
'active_tournaments '
)
"""Named tuple for PokerStars status on different subsites like FR, ES IT or Play Money."""
def get_status():
"""Get pokerstars s | tatus: players online, number of tables, etc."""
res = requests.get(STATUS_URL)
status = res.json()['tournaments']['summary']
# move all sites under sites attribute, including play money
sites = status.pop('site')
play_money = status.pop('play_money')
play_money['id'] = 'Play Money'
sites.append(play_money)
sites = tup | le(_SiteStatus(**site) for site in sites)
updated = parse_date(status.pop('updated'))
return _Status(sites=sites, updated=updated, **status)
|
carlmjohnson/django-json-form | jsonform_example/forms.py | Python | mit | 218 | 0.004587 | from django import forms
class ExampleForm(forms.Form):
non_blank_field = forms.CharField(max_length=100,
widget=forms.TextInput(attrs={
'placeholder': "Must not be bla | nk!",
}),
)
| |
gam17/QAD | qad_highlight.py | Python | gpl-3.0 | 4,234 | 0.021749 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
QAD Quantum Aided Design plugin
Classe per gestire l'evidenziazione delle geometrie
-------------------
begin : 2015-12-12
copyright : iiiii
email : hhhhh
developers | : bbbbb aaaaa ggggg
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of th | e GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from qgis.PyQt.QtCore import *
from qgis.PyQt.QtGui import *
from qgis.core import *
from qgis.gui import *
from .qad_variables import QadVariables
#===============================================================================
# getQGISColorForHighlight
#===============================================================================
def getQGISColorForHighlight():
"""
La funzione legge il colore impostato da QGIS per il rubber band di tipo <geometryType>.
Se <alternativeBand> = True, il rubber band sarà impostato con più trasparenza
"""
settings = QSettings()
color = QColor(int(settings.value( "/qgis/digitizing/line_color_red", 1)), \
int(settings.value( "/qgis/digitizing/line_color_green", 1)), \
int(settings.value( "/qgis/digitizing/line_color_blue", 1)))
alpha = float(int(settings.value( "/qgis/digitizing/line_color_alpha", 200)) / 255.0)
color.setAlphaF(alpha)
return color
#===============================================================================
# createHighlight
#===============================================================================
def createHighlight(mapCanvas, geometry_feature, layer, borderColor = None, fillColor = None):
"""
la funzione crea un rubber band di tipo <geometryType> con le impostazioni di QGIS.
Se <alternativeBand> = True, il rubber band sarà impostato con più trasparenza e tipolinea punteggiato
"""
settings = QSettings()
width = int(settings.value( "/qgis/digitizing/line_width", 1))
hl = QgsHighlight(mapCanvas, geometry_feature, layer)
if borderColor is None:
borderColor = getQGISColorForHighlight()
hl.setColor(borderColor)
if fillColor is None:
hl.setFillColor(borderColor)
else:
hl.setFillColor(fillColor)
return hl
# Classe che gestisce l'evidenziazione delle geometrie
class QadHighlight():
def __init__(self, mapCanvas, borderColor = None, fillColor = None):
self.mapCanvas = mapCanvas
self.__highlight = []
def __del__(self):
self.reset()
for highlight in self.__highlight:
self.mapCanvas.scene().removeItem(highlight)
del self.__highlight[:]
def hide(self):
for highlight in self.__highlight:
highlight.hide()
def show(self):
for highlight in self.__highlight:
highlight.show()
def addGeometry(self, geom, layer, borderColor = None, fillColor = None):
highlight = createHighlight(self.mapCanvas, geom, layer, borderColor, fillColor)
highlight.show()
self.__highlight.append(highlight)
def addGeometries(self, geoms, layer, borderColor = None, fillColor = None):
for g in geoms:
self.addGeometry(g, layer, borderColor, fillColor)
def reset(self):
self.hide()
for highlight in self.__highlight:
self.mapCanvas.scene().removeItem(highlight)
del self.__highlight[:]
|
dlutxx/cement | releases/2014-06-24 22 59.py | Python | mit | 60,847 | 0.048787 | #!python
#-*- encoding=utf-8 -*-
import sys, sqlite3, logging, os, os.path
import wx, time, re, copy, webbrowser
import wx.grid, wx.html
import json, math
# cfg start #
class Config:
def __init__(self):
self.environment = ('production','dev')[1]
self.expiration = 20
self.rootdir = r'C:\xux'
self.datapath = os.path.join(self.rootdir, 'data', 'sqlite3.db')
self.cachepath = os.path.join(self.rootdir, 'cache')
self.logpath = os.path.join(self.rootdir, 'log', 'app.log')
self.printtpl = ''
self.user = None
self.app = None
self.sheetLabel = (u'规格',u'销售方式',u'单价',u'提货方式',u'开票员',u'日期',u'运输工具及号码',u'编号',u'计量',u'品名',u'金额',u'承运方式',u'数量',u'购货单位',u'单号')
self.sheetKey = 'spec,salemanner,price,fetchmanner,user,date,carno,no2,unit,cargo,totalprice,tspmanner,amount,client,no1'.split(',')
def getTitle(self):
pass
config = Config()
# cfg end #
# util start #
def checkNotEmpty( win ):
val = win.GetValue().strip()
if not val:
alert(u'数据不能为空', u'输入有误')
win.SetBackgroundColour( 'pink' )
win.SetFocus()
win.Refresh()
return False
else:
win.SetValue( val )
win.SetBackgroundColour(wx.SystemSettings_GetColour(wx.SYS_COLOUR_WINDOW))
win.Refresh()
return True
def validateFloat(tc):
val = tc.GetValue().strip().lower().replace('n', '') #nan, inf
if not val:
wx.MessageBox(u'数据不能为空', u'输入有误')
tc.SetFocus()
tc.SetBackgroundColour("pink")
tc.Refresh()
return False
try:
flt = float(val)
tc.SetBackgroundColour(wx.SystemSettings_GetColour(wx.SYS_COLOUR_WINDOW))
tc.SetValue( str(flt) )
tc.Refresh()
return True
except:
wx.MessageBox(u'数据格式不正确', u'输入有误')
| tc.SetFocus()
tc.S | etBackgroundColour("pink")
tc.Refresh()
return False
def cnNumber( flt ):
'''
123[.123] flt
'''
if ''==flt:
return ''
NUMBERS = u'零壹贰叁肆伍陆柒捌玖'
UNITS = u' 拾佰仟万拾佰仟亿'
ret = ''
flt = float(flt)
left, rite = str(flt).split('.')
if not left and not rite or len(left)>9:
return ''
left = ''.join( list( ( reversed(left) ) ) )
zeros={}
#12301 => 10321
for i in range(0, len(left) ):
c = left[i]
if int(c):
ret = NUMBERS[ int(c) ] + UNITS[i] + ret
else:
if not i%4:
ret = UNITS[i] + ret
else:
if zeros.get( int(i/4), False ) or left[i - i%4] == '0':
continue
zeros[ int(i/4) ] = True
ret = NUMBERS[ int(c) ] + ret
if flt>int(flt):
if not ret:
ret = NUMBERS[0]
ret += u'点'
for i in rite:
ret += NUMBERS[ int(i) ]
return ret.replace(' ', '')
def alert(msg, title=u'注意'):
wx.MessageBox(msg, title)
class XLog:
defaultLogger = None
@staticmethod
def getDefaultLogger():
if None == XLog.defaultLogger:
XLog.defaultLogger = XLog('xux', config.logpath)
return XLog.defaultLogger.log
def __init__(self, name='xux', file='app.log'):
self.log = logging.getLogger( name )
if config.environment == 'dev':
self.log.setLevel( logging.DEBUG )
else:
self.log.setLevel( logging.ERROR )
fmt = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
#file logger
fh = logging.FileHandler( file )
fh.setLevel( logging.DEBUG )
fh.setFormatter( fmt )
#console logger
ch = logging.StreamHandler()
ch.setLevel( logging.DEBUG )
ch.setFormatter( fmt )
self.log.addHandler( fh )
self.log.addHandler( ch )
def getPrintTpl():
return u'''<html>
<head><meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>发货单{no1}</title></head>
<body align="center" onload="window.print();">
<table border="0" cellpadding="1" cellspacing="0" align="center" width="100%"><!-- style="width:17cm;height:6.8cm;"-->
<tr><td colspan="7" align="center"><font size="4">共 和 县 金 河 水 泥 有 限 责 任 公 司</font></td></tr>
<tr><td colspan="7" align="center"><b><font size="5">产 成 品 发 货 单</font></b></td></tr>
<tr><td colspan="5"> </td> <td align="right">提货方式</td><td align="center">{fetchmanner}</td></tr>
<tr>
<td align="center">单号:</td><td align="center">{no1}</td>
<td align="center">日期</td><td align="center">{date}</td>
<td> </td><td> </td>
<td align="center">编号:{no2}</td>
<tr>
<tr><td colspan="7">
<table border="1" cellspacing="0" cellpadding="5" width="100%" class="containner">
<tr><td>购货单位</td><td align="center" colspan="5">{client}</td><td align="center" colspan="2">提货人签字</td><td colspan="2"> </td></tr>
<tr><td width="13%">品名</td><td width="13%">规格</td><td width="9%">计量</td><td width="10%">单价</td><td width="10%">数量</td><td width="10%">金额</td><td width="8%" rowspan="2" align="center">提货<br/>数量</td><td width="8%">大写</td><td width="20%">{bigamount}</td><td>{unit}</td></tr>
<tr><td>{cargo}</td><td>{spec}</td><td>{unit}</td><td>{price}</td><td>{amount}</td><td>{totalprice}</td><td>小写</td><td>{amount}</td><td>{unit}</td></tr>
<tr><td>承运单位</td><td colspan="5">{tspmanner}</td><td colspan="2">运输工具</td><td colspan="2">{carno}</td></tr>
<tr><td>销售方式</td><td colspan="2">{salemanner}</td><td>开票员</td><td>{user}</td><td colspan="3">收货方盖章(签字)</td><td colspan="3"> </td></tr>
</table>
</td></tr>
<tr><td colspan="7" align="center"><font size="2">1白色(存根联)2粉色(财务联)3黄色(仓库联)4浅绿(客户联)5浅蓝(运费结账联)6白色(客户签收返回联)</font></td></tr>
</table>
</body>
</html>'''
def printSheetList( sheets ):
tpl = u'''
<html><head><title>产成品货单列表</title><meta http-equiv="Content-Type" content="text/html; charset=utf-8"></head>
<body onload="window.print();"><table width="100%" border="1" cellspacing="0" cellpadding="1">
<tr><th>规格</th>
<th>销售方式</th>
<th>单价</th>
<th>提货方式</th>
<th>开票员</th>
<th>日期</th>
<th>运输工具及号码</th>
<th>编号</th>
<th>计量</th>
<th>品名</th>
<th>金额</th>
<th>承运方式</th>
<th>数量</th>
<th>购货单位</th>
<th>单号</th></tr>
{rows}
</table></body></html>
'''
rowTpl = u'''<tr><td>{spec}</td><td>{salemanner}</td><td>{price}</td><td>{fetchmanner}</td><td>{user}</td><td>{date}</td><td>{carno}</td><td>{no2}</td><td>{unit}</td><td>{cargo}</td><td>{totalprice}</td><td>{tspmanner}</td><td>{amount}</td><td>{client}</td><td>{no1}</td></tr>'''
rows = u''
for r in sheets:
r = r.getDict()
rows += rowTpl.format( **r )
html = tpl.format( rows=rows )
filepath = os.path.join(config.cachepath, 'list.html')
file = open( filepath , 'wb')
file.write( html.encode('utf-8') )
file.close()
webbrowser.open( filepath )
# util end #
# entity start #
class Entity:
def __init__(self, data):
self._data = data
def __getitem__(self, key, default=''):
return self._data.get(key, default)
def __setitem__(self, key, val):
if self._data.has_key(key):
self._data[key] = val
def getDict(self):
return copy.copy(self._data)
def __str__(self):
return json.dumps( self._data )
class Sheet(Entity):
'''
drop table if exists sheet;
create table sheet(id integer primary key autoincrement,
fetchmanner text,no1 text,date text,no2 text,client text,
amount real,price real,totalprice real,tspmanner text,carno text,
salemanner text, user text, valid integer);
'''
def __init__(self, data=dict()):
_data = dict()
_data['id'] = data.get('id', 0)
_data['fetchmanner'] = data.get('fetchmanner', '')
_data['no1'] = data.get('no1', '')
_data['date'] = data.get('date', '')
_data['no2'] = data.get('no2', '')
# _data['no2'] = data.get('no2', u'吨')
_data['client'] = data.get('client', '')
_data['cargo'] = data.get('cargo', '')
_data['spec'] = data.get('spec', '')
_data['unit'] = data.get('unit', '')
_data['amount'] = data.get('amount', '')
_data['price'] = data.get('price', '')
_data['totalprice'] = data.get('totalprice', '')
_data['tspmanner'] = data.get('tspmanner', '')
_data['carno'] = data.get('carno', '')
_data['salemanner'] = data.get('salemanner', '')
_data['user'] = data.get('user', '')
_data['title'] = data.get('title', config.getTitle())
_data['valid'] = data.get('valid', 1)
Entity.__init__(self, _data)
@staticmethod
def makeNew():
no2 = config.app.modelSheet.getLatest(1)
if no2:
no2 = no2[0]['no1']
try:
no2 = int(no2) + 1
except:
no2 = 1
else:
|
shashisp/blumix-webpy | app/gluon/storage.py | Python | mit | 8,573 | 0.001283 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Provides:
- List; like list but returns None instead of IndexOutOfBounds
- Storage; like dictionary allowing also for `obj.foo` for `obj['foo']`
"""
try:
import cPickle as pickle
except:
import pickle
import copy_reg
import gluon.portalocker as portalocker
__all__ = ['List', 'Storage', 'Settings', 'Messages',
'StorageList', 'load_storage', 'save_storage']
DEFAULT = lambda:0
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition | to `obj['foo']`, and setting obj.foo = None deletes item foo.
Exam | ple::
>>> o = Storage(a=1)
>>> print o.a
1
>>> o['a']
1
>>> o.a = 2
>>> print o['a']
2
>>> del o.a
>>> print o.a
None
"""
__slots__ = ()
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
__getitem__ = dict.get
__getattr__ = dict.get
__getnewargs__ = lambda self: getattr(dict,self).__getnewargs__(self)
__repr__ = lambda self: '<Storage %s>' % dict.__repr__(self)
# http://stackoverflow.com/questions/5247250/why-does-pickle-getstate-accept-as-a-return-value-the-very-instance-it-requi
__getstate__ = lambda self: None
__copy__ = lambda self: Storage(self)
def getlist(self, key):
"""
Returns a Storage value as a list.
If the value is a list it will be returned as-is.
If object is None, an empty list will be returned.
Otherwise, `[value]` will be returned.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlist('x')
['abc']
>>> request.vars.getlist('y')
['abc', 'def']
>>> request.vars.getlist('z')
[]
"""
value = self.get(key, [])
if value is None or isinstance(value, (list, tuple)):
return value
else:
return [value]
def getfirst(self, key, default=None):
"""
Returns the first value of a list or the value itself when given a
`request.vars` style key.
If the value is a list, its first item will be returned;
otherwise, the value will be returned as-is.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getfirst('x')
'abc'
>>> request.vars.getfirst('y')
'abc'
>>> request.vars.getfirst('z')
"""
values = self.getlist(key)
return values[0] if values else default
def getlast(self, key, default=None):
"""
Returns the last value of a list or value itself when given a
`request.vars` style key.
If the value is a list, the last item will be returned;
otherwise, the value will be returned as-is.
Simulated output with a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlast('x')
'abc'
>>> request.vars.getlast('y')
'def'
>>> request.vars.getlast('z')
"""
values = self.getlist(key)
return values[-1] if values else default
def pickle_storage(s):
return Storage, (dict(s),)
copy_reg.pickle(Storage, pickle_storage)
PICKABLE = (str, int, long, float, bool, list, dict, tuple, set)
class StorageList(Storage):
"""
Behaves like Storage but missing elements defaults to [] instead of None
"""
def __getitem__(self, key):
return self.__getattr__(key)
def __getattr__(self, key):
if key in self:
return self.get(key)
else:
r = []
self[key] = r
return r
def load_storage(filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'rb')
storage = pickle.load(fp)
finally:
if fp:
fp.close()
return Storage(storage)
def save_storage(storage, filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'wb')
pickle.dump(dict(storage), fp)
finally:
if fp:
fp.close()
class Settings(Storage):
def __setattr__(self, key, value):
if key != 'lock_keys' and self['lock_keys'] and key not in self:
raise SyntaxError('setting key \'%s\' does not exist' % key)
if key != 'lock_values' and self['lock_values']:
raise SyntaxError('setting value cannot be changed: %s' % key)
self[key] = value
class Messages(Settings):
def __init__(self, T):
Storage.__init__(self, T=T)
def __getattr__(self, key):
value = self[key]
if isinstance(value, str):
return self.T(value)
return value
class FastStorage(dict):
"""
Eventually this should replace class Storage but causes memory leak
because of http://bugs.python.org/issue1469629
>>> s = FastStorage()
>>> s.a = 1
>>> s.a
1
>>> s['a']
1
>>> s.b
>>> s['b']
>>> s['b']=2
>>> s['b']
2
>>> s.b
2
>>> isinstance(s,dict)
True
>>> dict(s)
{'a': 1, 'b': 2}
>>> dict(FastStorage(s))
{'a': 1, 'b': 2}
>>> import pickle
>>> s = pickle.loads(pickle.dumps(s))
>>> dict(s)
{'a': 1, 'b': 2}
>>> del s.b
>>> del s.a
>>> s.a
>>> s.b
>>> s['a']
>>> s['b']
"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
def __getattr__(self, key):
return getattr(self, key) if key in self else None
def __getitem__(self, key):
return dict.get(self, key, None)
def copy(self):
self.__dict__ = {}
s = FastStorage(self)
self.__dict__ = self
return s
def __repr__(self):
return '<Storage %s>' % dict.__repr__(self)
def __getstate__(self):
return dict(self)
def __setstate__(self, sdict):
dict.__init__(self, sdict)
self.__dict__ = self
def update(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
class List(list):
"""
Like a regular python list but a[i] if i is out of bounds returns None
instead of `IndexOutOfBounds`
"""
def __call__(self, i, default=DEFAULT, cast=None, otherwise=None):
"""Allows to use a special syntax for fast-check of `request.args()`
validity
Args:
i: index
default: use this value if arg not found
cast: type cast
otherwise: can be:
- None: results in a 404
- str: redirect to this address
- callable: calls the function (nothing is passed)
Example:
You can use::
request.args(0,default=0,cast=int,otherwise='http://error_url')
request.args(0,default=0,cast=int,otherwise=lambda:...)
"""
n = len(self)
if 0 <= i < n or -n <= i < 0:
value = self[i]
elif default is DEFAULT:
value = None
else:
value, cast = default, False
if cast:
try:
value = cast(value)
except (ValueError, TypeError):
from http import HTTP, redirect
if other |
nfscan/fuzzy_cnpj_matcher | service/sequential_search.py | Python | mit | 2,341 | 0.003417 | import time
from fuzzyset import FuzzySet
__author__ = 'paulo.rodenas'
class SequentialFuzzyCnpjMatcher:
"""
Class that performs fuzzy string matching on CNPJs sequentially. For small
fuzzyset this class is the easiest way to get started. However if you going
for a large fuzzyset we strongly recommend using LocalParallelFuzzyCnpjMatcher
instead.
"""
def __init__(self):
"""
Default constructor
:return: a SequentialFuzzyCnpjMatcher instance
"""
self.__cnpj_bases = []
for x in xrange(0, 100):
idx = x * 1000000
self.__cnpj_bases.append('../bulk/cnpjs_base_' + str(idx).zfill(7) +
'.txt')
self.__fuzzy_matcher = None
def match_cnpj(self, cnpj, debug=False):
"""
Search the closest valid CNPJ given a invalid one
:param cnpj: a invalid CNPJ
:param debug: whether you want to see debugging logs or not
:return: a list of the most similar valid CNPJs to the one you've provided
"""
best_matches = []
for cnpj_base_str in self.__cnpj_bases:
with open(cnpj_base_str) as f:
# temp variables
start_time = time.time()
# Searching
self.__log('Searching for %s on %s' % (cnpj, cnpj_base_str), debug)
self.__fuzzy_matcher = FuzzySet(f.read().splitlines())
match = self.__fuzzy_matcher.get(cnpj)
elapsed_time = time.time() - start_time
self.__log('Best match for this file is %s and it took %d seconds'
% (match, elapsed_time), debug)
# Appending to the best matches so far
if not match | is None:
for m in match:
best_matches.append(m[1])
# Performing Fuzzy string match | on the best results of each cnpj base file
self.__fuzzy_matcher = FuzzySet(best_matches)
return self.__fuzzy_matcher.get(cnpj)[0]
def __log(self, msg, debug=False):
"""
Prints a message to console depending on debug variable
:param msg: a message string
:param debug: a boolean value
:return:
"""
if debug:
print msg |
stafur/pyTRUST | paypal-rest-api-sdk-python/samples/subscription/billing_agreements/suspend_and_re_activate.py | Python | apache-2.0 | 1,184 | 0.004223 | from paypalrestsdk import BillingAgreement
import logging
BILLING_AGREEMENT_ID = "I-HT38K76XPMGJ"
try:
billing_agreement = BillingAgreement.find(BILLING_AGREEMENT_ID)
print("Billing Agreement [%s] has state %s" % (billing_agreement.id, billing_agreement.state))
suspend_note = {
"note": "Suspending the agreement"
}
if billing_agreement.suspend(suspend_note):
# Would expect state has changed to Suspended
billing_agreement = BillingAgreement.find(BILLING_AGREEMENT_ID)
print("Billing Agreement [%s] has state %s" % (billing_agreement.id, billing_agreement.state))
reactiva | te_note = {
"note": "Reactivating the agreement"
}
if billing_agreement.reactivate(reactivate_note):
# Would expect state has changed to Active
billing_agreement = BillingAgreement.find(BILLING_AGREEMENT_ID)
print("Billing Agreement [%s] has state %s" % (billing_agreement.id, billing_agreement.state))
else:
print(billing_agreement.error)
else:
print(billing | _agreement.error)
except ResourceNotFound as error:
print("Billing Agreement Not Found")
|
isaachenrion/jets | src/architectures/nmp/message_passing/__init__.py | Python | bsd-3-clause | 440 | 0.002273 | from .message_passing_layers import MP_LAYE | RS
#from .adjacency import construct_adjacency_matrix_layer
'''
This module implements the core message passing operations.
###adjacency.py <-- compute an adjacency matrix based on vertex data.
message_passing.py <-- run a single iteration of message passing.
message.py <-- compute a mes | sage, given a hidden state.
vertex_update.py <-- compute a vertex's new hidden state, given a message.
'''
|
ZackYovel/studybuddy | server/studybuddy/discussions/migrations/0006_auto_20150430_1648.py | Python | mit | 428 | 0 | # -*- coding: utf-8 -*-
from __future__ import unic | ode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('discussions', '0005_auto_20150430_1645'),
]
operations = [
migrations.AlterField(
model_name='post',
name='replays_to',
fie | ld=models.ForeignKey(to='discussions.Post', null=True),
),
]
|
MariusWirtz/TM1py | TM1py/Objects/NativeView.py | Python | mit | 9,988 | 0.001902 | # -*- coding: utf-8 -*-
import json
from TM1py.Objects.Axis import ViewAxisSelection, ViewTitleSelection
from TM1py.Objects.Subset import Subset, AnonymousSubset
from TM1py.Objects.View import View
class NativeView(View):
""" Abstraction of TM1 NativeView (classic cube view)
:Notes:
Complete, functional and tested
"""
def __init__(self,
cube_name,
view_name,
suppress_empty_columns=False,
suppress_empty_rows=False,
format_string="0.#########",
titles=None,
columns=None,
rows=None):
super().__init__(cube_name, view_name)
self._suppress_empty_columns = suppress_empty_columns
self._suppress_empty_rows = suppress_empty_rows
self._format_string = format_string
self._titles = titles if titles else []
self._columns = columns if columns else []
self._rows = rows if rows else []
@property
def body(self):
return self._construct_body()
@property
def rows(self):
return self._rows
@property
def columns(self):
return self._columns
@property
def MDX(self):
return self.as_MDX
@property
def as_MDX(self):
""" Build a valid MDX Query from an Existing cubeview.
Takes Zero suppression into account.
Throws an Exception when no elements are place on the columns.
Subsets are referenced in the result-MDX through the TM1SubsetToSet Function
:return: String, the MDX Query
"""
mdx = 'SELECT '
for i, axe in enumerate((self._rows, self._columns)):
for j, axis_selection in enumerate(axe):
subset = axis_selection.subset
if isinstance(subset, AnonymousSubset):
if subset.expression is not None:
if j == 0:
if self.suppress_empty_rows:
mdx += 'NON EMPTY '
mdx += subset.expression
else:
mdx += '*' + subset.expression
else:
elements_as_unique_names = ['[' + axis_selection.dimension_name + '].[' + elem + ']'
for elem
in subset.elements]
mdx_set = '{' + ','.join(elements_as_unique_names) + '}'
if j == 0:
if self.suppress_empty_columns:
mdx += 'NON EMPTY '
mdx += mdx_set
else:
mdx += '*' + mdx_set
else:
mdx_set = 'TM1SubsetToSet([{}],"{}")'.format(axis_selection.dimension_name, subset.name)
if j == 0:
if self.suppress_empty_columns:
mdx += 'NON EMPTY '
mdx += mdx_set
else:
mdx += '*' + mdx_set
if i == 0:
if len(self._rows) > 0:
mdx += ' on {}, '.format('ROWS')
else:
mdx += ' on {} '.format('COLUMNS')
# append the FROM statement
mdx += ' FROM [' + self._cube + '] '
# itarate through titles - append the WHERE statement
if len(self._titles) > 0:
unique_names = []
for title_selection in self._titles:
dimension_name = title_selection.dimension_name
selection = title_selection.selected
unique_names.append('[' + dimension_name + '].[' + selection + ']')
mdx += 'WHERE (' + ','.join(unique_names) + ') '
return mdx
@property
def suppress_empty_cells(self):
return self._suppress_empty_columns and self._suppress_empty_rows
@property
def suppress_empty_columns(self):
return self._suppress_empty_columns
@property
def suppress_empty_rows(self):
return self._suppress_empty_rows
@property
def format_string(self):
return self._format_string
@suppress_empty_cells.setter
def suppress_empty_cells(self, value):
self.suppress_empty_columns = value
self.suppress_empty_rows = value
@suppress_empty_rows.setter
def suppress_empty_rows(self, value):
self._suppress_empty_rows = value
@suppress_empty_columns.setter
def suppress_empty_columns(self, value):
self._suppress_empty_columns = value
@format_string.setter
def format_string(self, value):
self._format_string = value
def add_column(self, dimension_name, subset=None):
""" Add Dimension or Subset to the column-axis
:param dimension_name: name of the dimension
:param subset: instance of TM1py.Subset. Can be None instead.
:return:
"""
view_axis_selection = ViewAxisSelection(dimension_name=dimension_name, subset=subset)
self._columns.append(view_axis_selection)
def remove_column(self, dimension_name):
""" remove dimension from the column axis
:param dimension_name:
:return:
"""
for column in self._columns:
if column.dimension_name == dimension_name:
self._columns.remove(column)
def add_row(self, dimension_name, subset=None):
""" Add Dimension or Subset to the row-axis
:param dimension_name:
:param subset: instance of TM1py.Subset. Can be None instead.
:return:
"""
view_axis_selection = ViewAxisSelection(dimension_name=dimension_name, subset=subset)
self._rows.append(view_axis_selection)
def remove_row(self, dimension_name):
""" remove dimension from the row axis
:param dimension_name:
:return:
"""
for row in self._rows:
if row.dimension_name == dimension_name:
self._rows.remove(row)
def add_title(self, dimension_name, selection, subset=None):
""" Add subset and element to the titles-axis
:param dimension_name: name of the dimension.
| :param selection: name of an element.
:param subset: instance of TM1py.Subset. Can be None instead.
:return:
"""
view_title_selection = ViewTitleSelection(dimension_name, subset, selection)
self._titles.append(view_title_selection)
def remove_title(self, dimension_name):
""" remove dimension from the titles-axis
:param dimension_name: name of the dimension.
:return:
""" |
for title in self._titles:
if title.dimension_name == dimension_name:
self._titles.remove(title)
@classmethod
def from_json(cls, view_as_json, cube_name=None):
""" Alternative constructor
:Parameters:
`view_as_json` : string, JSON
:Returns:
`View` : an instance of this class
"""
view_as_dict = json.loads(view_as_json)
return NativeView.from_dict(view_as_dict, cube_name)
@classmethod
def from_dict(cls, view_as_dict, cube_name=None):
titles, columns, rows = [], [], []
for selection in view_as_dict['Titles']:
if selection['Subset']['Name'] == '':
subset = AnonymousSubset.from_dict(selection['Subset'])
else:
subset = Subset.from_dict(selection['Subset'])
selected = selection['Selected']['Name']
titles.append(ViewTitleSelection(dimension_name=subset.dimension_name,
subset=subset, selected=selected))
for i, axe in enumerate([view_as_dict['Columns'], view_as_dict['Rows']]):
for selection in axe:
if selection['Subset']['Name'] == '':
subset = AnonymousSubset.from_dict(selection['Subset'])
|
fghaas/django-oscar-vat_moss | setup.py | Python | bsd-3-clause | 1,343 | 0 | #!/usr/bin/env python
from setuptools import setup, find_packages
from oscar_vat_moss import get_version
setup(
name='django-oscar-vat_moss',
version=get_version(),
| url='https://github.com/hastexo/djang | o-oscar-vat_moss',
description=(
"EU VATMOSS support for django-oscar"),
long_description=open('README.rst').read(),
keywords="VATMOSS, Tax, Oscar",
license=open('LICENSE').read(),
platforms=['linux'],
packages=find_packages(exclude=['sandbox*', 'tests*']),
include_package_data=True,
install_requires=[
'requests>=1.0',
'django-localflavor'],
extras_require={
'oscar': ["django-oscar>=1.1"]
},
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Other/Nonlisted Topic'],
)
|
biothings/biothings_explorer | tests/test_apis/test_corddisease.py | Python | apache-2.0 | 4,836 | 0.000207 | import unittest
from biothings_explorer.registry import Registry
from biothings_explorer.user_query_dispatcher import SingleEdgeQueryDispatcher
from .utils import get_apis
reg = Registry()
class TestSingleHopQuery(unittest.TestCase):
def test_disease2protein(self):
"""Test gene-protein"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Protein",
input_cls="Disease",
input_id="DOID",
pred="related_to",
output_id="PR",
values="DOID:12143",
)
seqd.query()
self.assertTrue("PR:000007572" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["PR:000007572"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2genomicentity(self):
"""Test gene-protein"""
seqd = SingleEdgeQueryDispatcher(
output_cls="GenomicEntity",
input_cls="Disease",
pred="related_to",
input_id="DOID",
output_id="SO",
values="DOID:12143",
)
seqd.query()
self.assertTrue("SO:0000999" in seqd.G)
self.assertTrue("SO:0001853" in seqd.G)
def test_disease2chemicalsubstance(self):
"""Test gene-genomic entity"""
seqd = SingleEdgeQueryDispatcher(
output_cls="ChemicalSubstance",
input_cls="Disease",
input_id="DOID",
values="DOID:12143",
output_id="CHEBI",
)
seqd.query()
self.assertTrue("CHEBI:65349" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["CHEBI:65349"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2gene(self):
"""Test gene-gene"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Gene", input_cls="Disease", input_id="DOID", values="DOID:12143"
)
seqd.query()
self.assertTrue("DHDDS" in seqd.G)
self.assertTrue("RPL3" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["DHDDS"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2anatomy(self):
"""Test gene-anatomy"""
seqd = SingleEdgeQueryDispatcher(
output_cls="AnatomicalEntity",
input_cls="Disease",
input_id="DOID",
output_id="UBERON",
values="DOID:12143",
)
seqd.query()
self.assertTrue("UBERON:0007023" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["UBERON:0007023"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2ma(self):
"""Test gene-molecular_activity"""
seqd = SingleEdgeQueryDispatcher(
output_cls="MolecularActivity",
input_cls="Disease",
input_id="DOID",
output_id="GO",
values="DOID:12143",
)
seqd.query()
self.assertTrue("GO:0004935" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["GO:0004935"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2bp(self):
"""Test gene-biological_process"""
seqd = SingleEdgeQueryDispatcher(
output_cls="BiologicalProcess",
input_cls="Disease",
input_id="DOID",
values="DOID:12143",
output_id="GO",
)
seqd.query()
self.assertTrue("GO:0007605" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["GO:0007605"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2cc(self):
"""Test gene-cellular_component"""
seqd = Singl | eEdgeQueryDispatcher(
output_cls="CellularComponent",
inp | ut_cls="Disease",
input_id="DOID",
output_id="GO",
values="DOID:0001816",
)
seqd.query()
self.assertTrue("GO:0030017" in seqd.G)
edges = seqd.G["DOID:DOID:0001816"]["GO:0030017"]
self.assertTrue("CORD Disease API" in get_apis(edges))
def test_disease2cell(self):
"""Test gene-cell"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Cell",
input_cls="Disease",
input_id="DOID",
output_id="CL",
values="DOID:12143",
)
seqd.query()
self.assertTrue("CL:0000731" in seqd.G)
def test_disease2disease(self):
"""Test gene-disease"""
seqd = SingleEdgeQueryDispatcher(
output_cls="Disease",
input_cls="Disease",
input_id="DOID",
output_id="DOID",
values="DOID:12143",
)
seqd.query()
self.assertTrue("DOID:225" in seqd.G)
edges = seqd.G["DOID:DOID:12143"]["DOID:225"]
self.assertTrue("CORD Disease API" in get_apis(edges))
|
rodrigolucianocosta/ControleEstoque | rOne/Storage101/django-localflavor/django-localflavor-1.3/localflavor/mk/forms.py | Python | gpl-3.0 | 3,600 | 0.001667 | from __future__ import unicode_literals
import datetime
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import RegexField, Select
from django.utils.translation import ugettext_lazy as _
from .mk_choices import MK_MUNICIPALITIES
class MKIdentityCardNumberField(RegexField):
"""
A Macedonian ID card number. Accepts both old and new format.
"""
default_error_messages = {
'invalid': _('Identity card numbers must contain'
' either 4 to 7 digits or an uppercase letter and 7 digits.'),
}
def __init__(self, *args, **kwargs):
kwargs['min_length'] = None
kwargs['max_length'] = 8
regex = r'(^[A-Z]{1}\d{7}$)|(^\d{4,7}$)'
super(MKIdentityCardNumberField, self).__init__(regex, *args, **kwargs)
class MKMunicipalitySelect(Select):
"""
A form ``Select`` widget that uses a list of Macedonian municipalities as
choices. The label is the name of the municipality and the value
is a 2 character code for the municipality.
"""
def __init__(self, attrs=None):
super(MKMunicipalitySelect, self).__init__(attrs, choices=MK_MUNICIPALITIES)
class UMCNField(RegexField):
"""
A form field that validates input as a unique master citizen
number.
The format of the unique master citizen number has been kept the same from
Yugoslavia. It is still in use in other countries as well, it is not applicable
solely in Macedonia. For more information see:
https://secure.wikimedia.org/wikipedia/en/wiki/Unique_Master_Citizen_Number
A value will pass validation if it complies to the following rules:
* Consists of exactly 13 digits
* The first 7 digits represent a valid past date in the format DDMMYYY
* The last digit of the UMCN passes a checksum test
"""
default_error_messages = {
'invalid': _('This field should contain exactly 13 digits.'),
'date': _('The first 7 digits of the UMCN must represent a valid past date.'),
'checksum': _('The UMCN is not valid.'),
}
def __init__(self, *args, **kwargs):
kwargs['min_length'] = None
kwargs['max_length'] = 13
super(UMCNField, self).__init__(r'^\d{13}$', *args, **kwargs)
def clean(self, value):
value = super(UMCNField, | self).clean(value)
if value in EMPTY_VALUES:
return ''
if not self._validate_date_part(value):
raise ValidationError(self.error_messages['date'])
if self._validate_checksum | (value):
return value
else:
raise ValidationError(self.error_messages['checksum'])
def _validate_checksum(self, value):
a, b, c, d, e, f, g, h, i, j, k, l, K = [
int(digit) for digit in value]
m = 11 - ((7 * (a + g) + 6 * (b + h) + 5 * (
c + i) + 4 * (d + j) + 3 * (e + k) + 2 * (f + l)) % 11)
if (m >= 1 and m <= 9) and K == m:
return True
elif m == 11 and K == 0:
return True
else:
return False
def _validate_date_part(self, value):
daypart, monthpart, yearpart = int(value[:2]), int(value[2:4]), int(value[4:7])
if yearpart >= 800:
yearpart += 1000
else:
yearpart += 2000
try:
date = datetime.datetime(year=yearpart, month=monthpart, day=daypart).date()
except ValueError:
return False
if date >= datetime.datetime.now().date():
return False
return True
|
smmribeiro/intellij-community | plugins/hg4idea/testData/bin/hgext/git/gitutil.py | Python | apache-2.0 | 1,109 | 0 | """utilities to assist in working with pygit2"""
from __future__ import absolute_import
from mercurial.node import bin, hex, sha1nodeconstants
from mercurial import pycompat
pygit2_module = None
| def get_pygit2():
global pygit2_module
if pygit2_module is None:
try:
import pygit2 as pygit2_module
pygit2_module.InvalidSpecError
except (ImportError, AttributeError):
pass
return pygit2_module
def pygit2_version():
mod = get_pygit2()
v = "N/A"
if mod:
try:
v = mod.__version__
| except AttributeError:
pass
return b"(pygit2 %s)" % v.encode("utf-8")
def togitnode(n):
"""Wrapper to convert a Mercurial binary node to a unicode hexlified node.
pygit2 and sqlite both need nodes as strings, not bytes.
"""
assert len(n) == 20
return pycompat.sysstr(hex(n))
def fromgitnode(n):
"""Opposite of togitnode."""
assert len(n) == 40
if pycompat.ispy3:
return bin(n.encode('ascii'))
return bin(n)
nullgit = togitnode(sha1nodeconstants.nullid)
|
m038/superdesk-content-api | content_api/items/service.py | Python | agpl-3.0 | 17,471 | 0.000114 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import functools
import json
import logging
from datetime import datetime
from eve.utils import ParsedRequest
from flask import current_app as app
from flask import request
from content_api.errors import BadParameterValueError, UnexpectedParameterError
from content_api.items.resource import ItemsResource
from superdesk.services import BaseService
from superdesk.utc import utcnow
from urllib.parse import urljoin, urlparse, quote
from werkzeug.datastructures import MultiDict
from content_api.assets.util import url_for_media
from content_api.app.settings import ELASTIC_DATE_FORMAT
from superdesk.datalayer import InvalidSearchString
logger = logging.getLogger(__name__)
class ItemsService(BaseService):
"""
A service that knows how to perform CRUD operations on the `item`
content types.
Serves mainly as a proxy to the data layer.
"""
def find_one(self, req, **lookup):
"""Retrieve a specific item.
:param req: object representing the HTTP request
:type req: `eve.utils.ParsedRequest`
:param dict lookup: requested item lookup, contains its ID
:return: requested item (if found)
:rtype: dict or None
"""
if req is None:
req = ParsedRequest()
allowed_params = {'include_fields', 'exclude_fields'}
self._check_for_unknown_params(
req, whitelist=allowed_params, allow_filtering=False)
self._set_fields_filter(req) # Eve's "projection"
return super().find_one(req, **lookup)
def get(self, req, lookup):
"""Retrieve a list of items that match the filter criteria (if any)
pssed along the HTTP request.
:param req: object representing the HTTP request
:type req: `eve.utils.ParsedRequest`
:param dict lookup: sub-resource lookup from the endpoint URL
:return: database results cursor object
:rtype: `pymongo.cursor.Cursor`
"""
if req is None:
req = ParsedRequest()
allowed_params = {
'q', 'start_date', 'end_date',
'include_fields', 'exclude_fields'
}
self._check_for_unknown_params(req, whitelist=allowed_params)
request_params = req.args or {}
req.args = {}
# set the search query
if 'q' in request_params:
req.args['q'] = request_params['q']
req.args['default_operator'] = 'OR'
# set the date range filter
start_date, end_date = self._get_date_range(request_params)
date_filter = self._create_date_range_filter(start_date, end_date)
req.args['filter'] = json.dumps(date_filter)
self._set_fields_filter(req) # Eve's "projection"
try:
return super().get(req, lookup)
except InvalidSearchString:
raise BadParameterValueError('invalid search text')
def on_fetched_item(self, document):
"""Event handler when a single item is retrieved from database.
It triggers the post-processing of the fetched item.
:param dict document: fetched MongoDB document representing the item
"""
self._process_fetched_object(document)
def on_fetched(self, result):
"""Event handler when a collection of items is retrieved from database.
For each item in the fetched collection it triggers the post-processing
of it.
It also changes the default-generated HATEOAS "self" link so that it
does not expose the internal DB query details, but instead reflects
what the client has sent in request.
:param dict result: dictionary contaning the list of MongoDB documents
(the fetched items) and some metadata, e.g. pagination info
"""
for document in result['_items']:
self._process_fetched_object(document)
if '_links' in result: # might not be present if HATEOAS disabled
url_parts = urlparse(request.url)
result['_links']['self']['href'] = '{}?{}'.format(
url_parts.path[1:], # relative path, remove opening slash
url_parts.query
| )
def _process_fetched_object(self, document):
"""Does some processing on the raw document fetched from database.
It sets the item's `uri` field and removes all the fields added by the
`Eve` framework that are not part of the NINJS standard (except for
the HATEOAS `_links` object).
It also sets the URLs for all externally referenced media content.
:param dict document: | MongoDB document to process
"""
document['uri'] = self._get_uri(document)
for field_name in ('_id', '_etag', '_created', '_updated'):
document.pop(field_name, None)
if 'renditions' in document:
for _k, v in document['renditions'].items():
if 'media' in v:
v['href'] = url_for_media(v['media'])
def _get_uri(self, document):
"""Return the given document's `uri`.
:param dict document: MongoDB document fetched from database
"""
if document.get('type') == 'composite':
endpoint_name = 'packages'
else:
endpoint_name = 'items'
resource_url = '{api_url}/{endpoint}/'.format(
api_url=app.config['PUBLICAPI_URL'],
endpoint=app.config['URLS'][endpoint_name]
)
return urljoin(resource_url, quote(document['_id']))
def _check_for_unknown_params(
self, request, whitelist, allow_filtering=True
):
"""Check if the request contains only allowed parameters.
:param req: object representing the HTTP request
:type req: `eve.utils.ParsedRequest`
:param whitelist: iterable containing the names of allowed parameters.
:param bool allow_filtering: whether or not the filtering parameter is
allowed (True by default). Used for disallowing it when retrieving
a single object.
:raises UnexpectedParameterError:
* if the request contains a parameter that is not whitelisted
* if the request contains more than one value for any of the
parameters
"""
request_params = (request.args or MultiDict())
if not allow_filtering:
err_msg = ("Filtering{} is not supported when retrieving a "
"single object (the \"{param}\" parameter)")
if 'q' in request_params.keys():
desc = err_msg.format('', param='q')
raise UnexpectedParameterError(desc=desc)
if 'start_date' in request_params.keys():
desc = err_msg.format(' by date range', param='start_date')
raise UnexpectedParameterError(desc=desc)
if 'end_date' in request_params.keys():
desc = err_msg.format(' by date range', param='end_date')
raise UnexpectedParameterError(desc=desc)
for param_name in request_params.keys():
if param_name not in whitelist:
raise UnexpectedParameterError(
desc="Unexpected parameter ({})".format(param_name)
)
if len(request_params.getlist(param_name)) > 1:
desc = "Multiple values received for parameter ({})"
raise UnexpectedParameterError(desc=desc.format(param_name))
def _get_date_range(self, request_params):
"""Extract the start and end date limits from request parameters.
If start and/or end date parameter is not present, a default value is
returned for the missing parameter(s).
:param dict request_params: request parameter names and their
corresponding values
:return: a (start_date, end_date) tu |
IvIePhisto/Ancestration | tests/adopt_into_b.py | Python | mit | 181 | 0.005525 | class ClassC(object):
FAMILY_INHERIT = {'a'}
def c(self):
| return 'family_b: C'
@classmethod
def super_family(cls):
return cls.module.super_famil | y
|
opencast/pyCA | pyca/ui/jsonapi.py | Python | lgpl-3.0 | 9,716 | 0 | # -*- coding: utf-8 -*-
from flask import jsonify, make_response, request
from pyca.config import config
from pyca.db import Service, ServiceStatus, UpcomingEvent, \
RecordedEvent, UpstreamState
from pyca.db import with_session, Status, ServiceStates
from pyca.ui import app
from pyca.ui.utils import requires_auth, jsonapi_mediatype
from pyca.ui.opencast_commands import schedule
from pyca.utils import get_service_status, ensurelist, timestamp
import logging
import os
import psutil
import shutil
import subprocess
logger = logging.getLogger(__name__)
def make_error_response(error, status=500):
''' Return a response with a jsonapi error object
'''
content = {
'errors': [{
'status': status,
'title': error
}]
}
return make_response(jsonify(content), status)
def make_data_response(data, status=200):
''' Return a response with a list of jsonapi data objects
'''
content = {'data': ensurelist(data)}
return make_response(jsonify(content), status)
@app.route('/api/name')
@requires_auth
@jsonapi_mediatype
def get_name():
'''Serve the name of the capure agent via json.
'''
return make_response(
jsonify({'meta': {'name': config('agent', 'name')}}))
@app.route('/api/previews')
@requires_auth
@jsonapi_mediatype
def get_images():
'''Serve the list of preview images via json.
'''
# Get IDs of existing preview images
preview = config('capture', 'preview')
previewdir = config('capture', 'preview_dir')
preview = [p.replace('{{previewdir}}', previewdir) for p in preview]
preview = zip(preview, range(len(preview)))
# Create return
preview = [{'attributes': {'id': p[1]}, 'id': str(p[1]), 'type': 'preview'}
for p in preview if os.path.isfile(p[0])]
return make_data_response(preview)
@app.route('/api/services')
@requires_auth
@jsonapi_mediatype
def internal_state():
'''Serve a json representation of internal agentstate as meta data
'''
data = {'services': {
'capture': ServiceStatus.str(get_service_status(Service.CAPTURE)),
'ingest': ServiceStatus.str(get_service_status(Service.INGEST)),
'schedule': ServiceStatus.str(get_service_status(Service.SCHEDULE)),
'agentstate': ServiceStatus.str(get_service_status(Service.AGENTSTATE))
}}
return make_response(jsonify({'meta': data}))
@app.route('/api/events')
@requires_auth
@jsonapi_mediatype
@with_session
def events(db):
'''Serve a JSON representation of events splitted by upcoming and already
recorded events.
'''
upcoming_events = db.query(UpcomingEvent)\
.order_by(UpcomingEvent.start)
recorded_events = db.query(RecordedEvent)\
.order_by(RecordedEvent.start.desc())
result = [event.serialize() for event in upcoming_events]
result += [event.serialize() for event in recorded_events]
return make_data_response(result)
@app.route('/api/events/<uid>')
@requires_auth
@jsonapi_mediatype
@with_session
def event(db, uid):
'''Return a specific events JSON
'''
event = db.query(RecordedEvent).filter(RecordedEvent.uid == uid).first() \
or db.query(UpcomingEvent).filter(UpcomingEvent.uid == uid).first()
if event:
return make_data_response(event.serialize())
return make_error_response('No event with specified uid', 404)
@app.route('/api/events/<uid>', methods=['DELETE'])
@requires_auth
@jsonapi_mediatype
@with_session
def delete_event(db, uid):
'''Delete a specific event identified by its uid. Note that only recorded
events can be deleted. Events in the buffer for upcoming events are
regularly replaced anyway and a manual removal could have unpredictable
effects.
Use ?hard=true parameter to delete the recorded files on disk as well.
Returns 204 if the action was successful.
Returns 404 if event does not exist
'''
logger.info('deleting event %s via | api', uid)
events = db.query(RecordedEvent).filter(RecordedEvent.uid == uid)
if not events.count():
return make_error_response('No event with specified uid', 404)
hard_delete = request.args.get('hard', 'false')
if hard_delete == 'true':
logger.info('deleting recorded files at %s', events[0].directory())
shutil.rmtree(events[0].directory())
events.delete()
db.commit()
return make_response('', 204)
|
@app.route('/api/events/<uid>', methods=['PATCH'])
@requires_auth
@jsonapi_mediatype
@with_session
def modify_event(db, uid):
'''Modify an event specified by its uid. The modifications for the event
are expected as JSON with the content type correctly set in the request.
Note that this method works for recorded events only. Upcoming events part
of the scheduler cache cannot be modified.
'''
try:
data = request.get_json()['data']
if len(data) != 1:
return make_error_response('Invalid data', 400)
data = data[0]
if data['type'] != 'event' or data['id'] != uid:
return make_error_response('Invalid data', 400)
# Check attributes
for key in data['attributes'].keys():
if key not in ('status', 'start', 'end'):
return make_error_response('Invalid data', 400)
# Check new status
new_status = data['attributes'].get('status')
if new_status:
new_status = new_status.upper().replace(' ', '_')
data['attributes']['status'] = int(getattr(Status, new_status))
except Exception:
return make_error_response('Invalid data', 400)
event = db.query(RecordedEvent).filter(RecordedEvent.uid == uid).first()
if not event:
return make_error_response('No event with specified uid', 404)
event.start = data['attributes'].get('start', event.start)
event.end = data['attributes'].get('end', event.end)
event.status = data['attributes'].get('status', event.status)
logger.debug('Updating event %s via api', uid)
db.commit()
return make_data_response(event.serialize())
@app.route('/api/metrics', methods=['GET'])
@requires_auth
@jsonapi_mediatype
@with_session
def metrics(dbs):
'''Serve several metrics about the pyCA services and the machine via
json.'''
# Get Disk Usage
# If the capture directory do not exists, use the parent directory.
directory = config('capture', 'directory')
if not os.path.exists(directory):
directory = os.path.abspath(os.path.join(directory, os.pardir))
total, used, free = shutil.disk_usage(directory)
# Get Loads
load_1m, load_5m, load_15m = os.getloadavg()
# Get Memory
memory = psutil.virtual_memory()
# Get Services
srvs = dbs.query(ServiceStates)
services = []
for srv in srvs:
services.append({
'name': Service.str(srv.type),
'status': ServiceStatus.str(srv.status)
})
# Get Upstream State
state = dbs.query(UpstreamState).filter(
UpstreamState.url == config('server', 'url')).first()
last_synchronized = state.last_synced.isoformat() if state else None
return make_response(jsonify(
{'meta': {
'services': services,
'disk_usage_in_bytes': {
'total': total,
'used': used,
'free': free,
},
'memory_usage_in_bytes': {
'total': memory.total,
'available': memory.available,
'used': memory.used,
'free': memory.free,
'cached': memory.cached,
'buffers': memory.buffers,
},
'load': {
'1m': load_1m,
'5m': load_5m,
'15m': load_15m,
},
'upstream': {
'last_synchronized': last_synchronized,
}
}}))
@app.route('/api/logs')
@requires_auth
@jsonapi_mediatype
def logs():
'''Serve a JSON representation of logs.
'''
cmd = config('ui', 'log_command')
if not cmd:
return make_error_response('Logs are disabled.', 4 |
BertrandBordage/django-terms | terms/tests/__init__.py | Python | bsd-3-clause | 41 | 0 | from . | html im | port *
from .terms import *
|
dbk138/ImageRegionRecognition-FrontEnd | app/python - Copy/Helpers.py | Python | mit | 4,339 | 0.021203 | __author__ = 'jhala'
import types
import os.path, time
import json
import logging
import logging.config
logging.config.fileConfig('logging.conf')
logger = logging.getLogger(__name__)
import re
appInfo='appinfo.json'
''' Helper Functions '''
''' get the file as an array of arrays ( header + rows and columns) '''
def fileInfo(fil):
fileArr=[]
for i in open(fil):
fileArr.append(i.strip().split(","))
return fileArr
''' Return the header as an array '''
def getHeader(fil | eArr):
for rowOne in fileArr:
return rowOne
def fileLastTouchedTime(fileName):
mtim= int(os.path.getmtime(fileName))
ctim= int(os.pat | h.getctime(fileName))
tims = [ mtim, ctim]
tims.sort()
return tims[len(tims)-1]
def getImageLocation():
f=open(appInfo,'r')
loc=json.load(f)
return loc['imageLocation']
def getImageDataLocation():
f=open(appInfo,'r')
loc=json.load(f)
return loc['imageData']
def getMatLabFeatureExtractScript():
f=open(appInfo,'r')
loc=json.load(f)
return loc['matlabFeatureExtractScript']
def getMatLabSemanticElementsScript():
f=open(appInfo,'r')
loc=json.load(f)
return loc['matlabSemanticElementsScript']
def getMatlabSemanticElementsOutputFile():
f=open(appInfo,'r')
loc=json.load(f)
return loc['matlabSemanticElementsOutputFile']
def removeMatlabSemanticElementsOutputFile():
f=getMatlabSemanticElementsOutputFile()
if os.path.exists(f) and os.path.isfile(f):
os.remove(f)
def getMatlabFeatureOutputFile():
f=open(appInfo,'r')
loc=json.load(f)
return loc['matlabFeatureOutputFile']
def getTestImageName():
f=open(appInfo,'r')
loc=json.load(f)
return loc['testImage']
def removeMatlabFeatureOutputFile():
f=getMatlabFeatureOutputFile()
if os.path.exists(f) and os.path.isfile(f):
os.remove(f)
def checkFileNameExists(filName=str):
return os.path.exists(filName) and os.path.isfile(filName)
def getMainImageFileList():
fileList=[]
epoch=time.mktime(time.strptime('1970','%Y'))
for root, dirs, files in os.walk(getImageLocation()):
#print root
#print dirs
for fil in files:
thisFileName=os.path.join(root, fil)
dataFileExists=False
imageFileNewerThanDataFile=False
dataFileRequiresUpdate=False
if isMainImageFile(thisFileName) and checkFileNameExists(thisFileName):
mainImageLastTouched=fileLastTouchedTime(thisFileName)
expectedDataFileName = os.path.join(getImageDataLocation(), os.path.basename(root)+'_'+fil+'.json')
if checkFileNameExists(expectedDataFileName ):
dataFileExists=True
dataFileLastTouched=fileLastTouchedTime(expectedDataFileName)
else:
dataFileExists=False
dataFileLastTouched=epoch
if dataFileExists and ( mainImageLastTouched > dataFileLastTouched) :
dataFileRequiresUpdate=True
if not dataFileExists:
dataFileRequiresUpdate=True
lcImageExists=False
lcImageName = getLCImageName(thisFileName)
if lcImageName != None:
lcImageExists=True
fileList.append({ 'lcImageExists': lcImageExists , 'lcImageName' : lcImageName, 'dataFileRequiresUpdate' : dataFileRequiresUpdate, 'imageFile' : str(thisFileName), 'dataFile' : expectedDataFileName, 'imageLastTouched': mainImageLastTouched, 'dataLastTouched': dataFileLastTouched, 'dataFileExists' : dataFileExists} )
return fileList
def isMainImageFile(fileName):
if re.search('.jpg$',fileName, flags=re.IGNORECASE) and not re.search('LC.jpg$',fileName, flags=re.IGNORECASE):
return True
else:
return False
def getLCImageName(imageFileName):
r=re.match("(.*)(.jpg)", imageFileName, flags=re.IGNORECASE)
if not r:
logger.error("Invalid image file name given" + imageFileName)
return None
else:
lcImageName = r.group(1) + "LC"+ r.group(2)
if checkFileNameExists(lcImageName):
return lcImageName
else:
logger.error('Image file does not exist: ' +lcImageName)
return None
|
bk1285/rpi_wordclock | wordclock_interfaces/web_interface.py | Python | gpl-3.0 | 11,610 | 0.002929 | from flask import Flask, render_template
import _thread
import logging
from flask_restx import Api, Resource, fields
import wordclock_tools.wordclock_colors as wcc
import wordclock_tools.wordclock_display as wcd
import datetime
class web_interface:
app = Flask(__name__)
api = Api(app,
validate=True,
version='4.3',
title='Wordclock API',
description='The API to access the raspberry wordclock',
contact='Bernd',
security=None,
doc='/api',
prefix='/api',
default='API',
default_label='Endpoints to access and control the wordclock',
ordered=False)
plugin_model = api.model('plugin', {
'name': fields.String(description='Plugin name in a single word'),
'pretty_name': fields.String(
description='Pretty plugin name, which may hold capital + special characters or spaces'),
'description': fields.String(description='Sentence, which describes the plugins functionality')
})
plugin_name_model = api.model('plugin_name', {
'name': fields.String(required=True,
example='time_default',
description='Plugin name in a single word')
})
button_model = api.model('button', {
'button': fields.String(enum=['left', 'right', 'return'],
required=True,
example='return',
description='Name of a button, which will be triggered')
})
color_model = api.model('color', {
'red': fields.Integer(min=0, max=255, example=50, required=True, description='Red value'),
'green': fields.Integer(min=0, max=255, example=200, required=True, description='Green value'),
'blue': fields.Integer(min=0, max=255, example=100, required=True, description='Blue value'),
'type': fields.String(enum=['all', 'words', 'minutes', 'background'],
required=False,
example='all',
description='Set color only to specified parts of the wordclock. Defaults to all.')
})
brightness_model = api.model('brightness', {
'brightness': fields.Integer(min=0, max=255, example=180, required=True, description='Brightness value')
})
color_temperature_model = api.model('color_temperature', {
'color_temperature': fields.Integer(min=1000, max=40000, example=2000, required=True, description='Color temperature in Kelvin')
})
scrolltext_model = api.model('scrolltext', {
'scrollenable': fields.Boolean(required=True, description='Enable text to scroll'),
'scrolltext': fields.String(required=True, description='Text to scroll'),
'scrolltime': fields.String(required=True, description='Date to start scroll'),
'scrolldate': fields.String(required=True, description='Time to start scroll'),
'scrollrepeat': fields.Integer(required=True, description='Repeat of text to scroll')
})
def __init__(self, wordclock):
self.app.wclk = wordclock
self.app.debug = False
_thread.start_new_thread(self.threaded_app, ())
def threaded_app(self):
port = 8080 if self.app.wclk.developer_mode_active else 80
self.app.run(host='0.0.0.0', port=port)
def scrolltext_task():
print("scrolltext_task")
@web_interface.app.route('/')
def index():
return render_template('app.html')
@web_interface.api.route('/plugins')
class Plugins(Resource):
@web_interface.api.marshal_with(
web_interface.plugin_model,
envelope='plugins')
@web_interface.api.doc(
description='Returns a list of all available plugins',
responses={
200: 'Success',
400: 'Bad request'})
def get(self):
return web_interface.app.wclk.plugins
@web_interface.api.route('/plugin')
class Plugin(Resource):
@web_interface.api.marshal_with(
web_interface.plugin_model,
envelope='plugin')
@web_interface.api.doc(
description='Returns the currently active plugin',
responses={
200: 'Success',
400: 'Bad request'})
def get(self):
return web_interface.app.wclk.plugins[web_interface.app.wclk.plugin_index]
@web_interface.api.doc(
description='Takes a valid plugin name to make it the active plugin',
responses={
200: 'Success',
400: 'Bad request',
406: 'Bad plugin name supplied'})
@web_interface.api.expect(web_interface.plugin_name_model)
def post(self):
name = web_interface.api.payload.get('name')
plugin_list = web_interface.app.wclk.plugins
try:
pluginindex = [i for i, plugin_list in enumerate(plugin_list) if plugin_list.name == name][0]
except IndexError:
web_interface.api.abort(406, 'Request must contain a valid plugin name. Received ' + name)
web_interface.app.wclk.runNext(pluginindex)
return "Set current plugin to " + name
@web_interface.api.route('/button')
class Button(Resource):
@web_interface.api.doc(
description='Takes a name of the button, to be pressed: left, right, return',
responses={
200: 'Success',
400: 'Bad request'})
@web_interface.api.expect(web_interface.button_model)
def post(self):
button_type = web_interface.api.payload.get('button')
event = web_interface.app.wclk.wci.BUTTONS.get(button_type)
web_interface.app.wclk.wci.setEvent(event)
return "Button " + button_type + " triggered"
@web_interface.api.route('/color')
class Color(Resource):
@web_interface.api.doc(
description='Returns 8bit RGB color values of the displayed time',
responses={
200: 'Success',
400: 'Bad request'})
def get(self):
default_plugin = web_interface.app.wclk.plugins[web_interface.app.wclk.default_plugin]
channel_wise = lambda x: {'red': x.r, 'green': x.g, 'blue': x.b}
return {
'background': channel_wise(default_plugin.bg_color),
'words': channel_wise(default_plugin.word_color),
'minutes': channel_wise(default_plugin.minute_color)
}
@web_interface.api.doc(
description='Takes 8bit RGB color values to display the time with',
responses={
200: 'Success',
400: 'Bad request'})
@web_interface.api.expect(web_interface.color_model)
def post(self):
supplied_color = wcc.Color(web_interface.api.payload.get('red'),
web_interface.api.payload.get('green'),
web_interface.api.payload.get('blue'))
supplied_type = | web_interface.api.payload.get('type')
supplied_type = 'all' if supplied_type is | None else supplied_type
default_plugin_idx = web_interface.app.wclk.default_plugin
web_interface.app.wclk.runNext(default_plugin_idx)
default_plugin = web_interface.app.wclk.plugins[default_plugin_idx]
if supplied_type == 'all':
default_plugin.bg_color = wcc.BLACK
default_plugin.word_color = supplied_color
default_plugin.minute_color = supplied_color
elif supplied_type == 'words':
default_plugin.word_color = supplied_color
elif supplied_type == 'minutes':
default_plugin.minute_color = supplied_color
elif supplied_type == 'background':
default_plugin.bg_color = supplied_color
default_plugin.show_time(web_interface.app.wclk.wcd, web_interface.app.wclk.wci, animation=None)
return "Wordclock color set to " + supplied_type
@web_interface.api.route('/brightness')
class Brightness(Resource):
@web_interface.api.doc(
description= |
JIC-CSB/jobarchitect | jobarchitect/agent.py | Python | mit | 1,898 | 0 | """Jobarchitect agent."""
import os
import argparse
import subprocess
from jobarchitect.utils import (
output_path_from_hash,
mkdir_parents
)
class Agent(object):
"""Class to create commands to analyse data."""
def __init__(
self,
tool_path,
dataset_path,
output_root="/tmp"
):
self.tool_path = os.path.abspath(tool_path)
self.dataset_path = dataset_path
self.output_root = output_root
def run_tool_on_identifier(self, identifier):
"""Run the tool on an item in the dataset."""
output_path = output_path | _from_hash(
self.dataset_path, identifier, self.output_root)
mkdir_parents(output_path)
cmd = ["python",
self.tool_path,
"--dataset-path", self.dataset_path,
"--identifier", identifier,
"--output-directory", output_path]
subprocess.call(cmd)
def analyse_by_identifiers(
tool_path, dataset_path, output_root, ide | ntifiers):
"""Run analysis on identifiers.
:param tool_path: path to tool
:param dataset_path: path to input dataset
:param output_root: path to output root
:identifiers: list of identifiers
"""
agent = Agent(tool_path, dataset_path, output_root)
for i in identifiers:
agent.run_tool_on_identifier(i)
def cli():
"""Command line interface for _analyse_by_ids"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--tool_path', required=True)
parser.add_argument('--input_dataset_path', required=True)
parser.add_argument('--output_root', required=True)
parser.add_argument('identifiers', nargs=argparse.REMAINDER)
args = parser.parse_args()
analyse_by_identifiers(
args.tool_path,
args.input_dataset_path,
args.output_root,
args.identifiers)
|
caronc/newsreap | bin/nr.py | Python | gpl-3.0 | 11,500 | 0.001652 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# NewsReap Command Line Interface (CLI)
#
# Copyright (C) 2015-2016 Chris Caron <lead2gold@gmail.com>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
# This is the easiest way to interact with NewsReap from the command line
# It handles most basic functionality and offers users the ability to
# create their own plugins to enhance it's functionality further.
#
# Type nr.py --help for command help.
#
# Successfully executed commands always return a zero (0) to the command
# line, otherwise a non-zero value is returned which may or may not
# identify details on the problem that occurred.
#
# Drop a configuration file in your home directory:
# mkdir -p ~/.config/newsreap
# cp config.yaml ~/.config/newsreap
#
# Now update the configuration to reflect your NNTP Setup
#
# To initialize your workable database safely you might do the following:
#
# # Initialize our database (only needs to be ran the first time)
# nr.py db init
#
# # Poll the configured NNTP Servers in your config.yaml file for
# # all of the usenet groups supported.
# nr.py update groups
#
# # You can list them all now by typing:
# nr.py group list --all
#
# # Aliases allow you to simplify group reference so you don't have to type
# # the whole name out each time. The below creates an alias called
# # 'a.b.test' and assciates it with the group alt.binaries.test.
# nr.py alias add test alt.binaries.test
#
# # You can list the aliases and what they're associated with by typing
# nr.py alias list
#
# # You can also associate more then one group with the same alias
# nr.py alias add test alt.binaries.testing alt.binaries.test.files
#
# # Index all the groups defined by our alias.
# # the below only looks back untl Jan 1st, 2014
# # Depending on how many articles are in the group, this can take a very
# # long time. The -vv at least lets you know something is going on.
#
# # consider setting a database like PostgreSQL or MySQL for speed
# # alternatively, if you insist on using the default SQLite database, then
# # consider setting up a RAM drive for faster processing of the table
# # writes.
# nr.py -vvv update search --date-from=2014 test
#
# # Index a from/to range if you want.
# nr.py -vvv update search --date-from=2013 --date-to=2014.02 test
#
# # Newsreap also maintains a watch list which is groups you just always
# # intend to index. It can be a conbination of aliases you set up, or
# # just a few individual groups. The below adds our 'test' group to
# # a watch list.
# nr.py group watch test
#
# # We can now update our search by typing this following:
# nr.py -vvv update search --watched
# # Index the entire group (the whole retention period you have to work with)
# nr.py group index test
#
# # Now that you've got content indexed, you can browse for things
# # You must specify an alias/group as your first argument
# # From there, you can specify your search arguments
# nr.py search test "keyword or string 1" "keyword or string 2" "etc..."
#
# # Getting to many hits? Filter them; the below only shows entries
# # that scored higher then 30
# nr.py search test --score=30 "keyword or string 1" \
# "keyword or string 2" "etc..."
#
# # You can do ranges too
# # The below only shows entries that have scored between -90 and 30
# nr.py search test --score=-90-30 "keyword or string 1" \
# "keyword or string 2" "etc..."
#
# # Want to elminate crap from your database that you know is just
# # taking up useless junk (and space, and thus speed because it's indexed):
# # use the delete option; it takes all the same parameters and options the
# # search function does (--score, etc).
# nr.py delete "keyword or string 1" "etc"
#
# If you setup you filters right, you can download content by group or
# single files. Just pay attention to the id on the left (in your search)
# This monkey patching must be done before anything else to prevent
# Warnings like this on exit:
# Exception KeyError: KeyError(139667991911952,) in \
# <module 'threading' from '/usr/lib64/python2.6/threading.pyc'> ignored
import gevent.monkey
gevent.monkey.patch_all()
import click
import sys
from os.path import abspath
from os.path import dirname
from os.path import basename
from os.path import isdir
from | os.path import isfile
# Path
try:
from newsreap.NNTPSettings import CLI_PLUGINS_MAPPING
except ImportError:
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from newsreap.NNTPSettings imp | ort CLI_PLUGINS_MAPPING
# Import our file based paths
from newsreap.NNTPSettings import DEFAULT_CLI_PLUGIN_DIRECTORIES
from newsreap.NNTPSettings import NNTPSettings
from newsreap.NNTPManager import NNTPManager
from newsreap.Utils import scan_pylib
from newsreap.Utils import load_pylib
# Logging
from newsreap.Logging import *
import logging
logger = logging.getLogger(NEWSREAP_CLI)
# General Options
@click.group()
@click.option('--config', '-c',
help='Specify configuration file.')
@click.option('--verbose', '-v', count=True,
help='Verbose mode.')
@click.option('--noprompt', '-y', is_flag=True,
help='Assume `yes` to all prompts.')
@click.pass_context
def cli(ctx, config, verbose, noprompt):
ctx.obj['verbose'] = verbose
# Add our handlers at the parent level
add_handler(logging.getLogger(SQLALCHEMY_LOGGER))
add_handler(logging.getLogger(NEWSREAP_LOGGER))
# Handle Verbosity
set_verbosity(verbose)
ctx.obj['noprompt'] = noprompt
if config is not None and not isfile(config):
logger.error(
"The YAML config file '%s' was not found." % config,
)
exit(1)
# NNTPSettings() for storing and retrieving settings
ctx.obj['NNTPSettings'] = NNTPSettings(cfg_file=config)
if not ctx.obj['NNTPSettings'].is_valid():
# our configuration was invalid
logger.error("No valid config.yaml file was found.")
exit(1)
# NNTPManager() for interacting with all configured NNTP Servers
ctx.obj['NNTPManager'] = NNTPManager(
settings=ctx.obj['NNTPSettings'],
)
# Dynamically Build CLI List; This is done by iterating through
# plugin directories and looking for CLI_PLUGINS_MAPPING
# which is expected to be a dictionary containing the mapping of
# the cli group (the key) to the function prefixes defined.
#
# If we can load it we'll save it here
plugins = scan_pylib(paths=[d for d in DEFAULT_CLI_PLUGIN_DIRECTORIES \
if isdir(d) is True])
# Now we iterate over the keys
for k, v in plugins.iteritems():
for _pyfile in v:
# Apply entry
obj = load_pylib('_nrcli_%s' % k, _pyfile)
if not hasattr(obj, CLI_PLUGINS_MAPPING):
continue
if isinstance(obj.NEWSREAP_CLI_PLUGINS, basestring):
# 1-1 mapping of a function
_click_func = getattr(obj, obj.NEWSREAP_CLI_PLUGINS, None)
if callable(_click_func):
cli.add_command(_click_func)
break
elif isinstance(obj.NEWSREAP_CLI_PLUGINS, dict):
# parse format:
# shorthand:function
for sf, _meta in obj.NEWSREAP_CLI_PLUGINS.iteritems():
# A flag used to track whether at least one command was added
# otherwise why bother store the entry.
store = False
# Default Action Description
group_desc = None
if isinstance(_meta, basestring):
fn_prefix = _meta
elif isinstance(_meta, dict):
# Support Dictionarie |
lmazuel/azure-sdk-for-python | azure-mgmt-web/azure/mgmt/web/operations/domains_operations.py | Python | mit | 52,494 | 0.002534 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.exceptions import DeserializationError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class DomainsOperations(object):
"""DomainsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: API Version. Constant value: "2015-04-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2015-04-01"
self.config = config
def check_availability(
self, name=None, custom_headers=None, raw=False, **operation_config):
"""Check if a domain is available for registration.
Check if a domain is available for registration.
:param name: Name of the object.
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DomainAvailablilityCheckResult or ClientRawResponse if
raw=true
:rtype: ~azure.mgmt.web.models.DomainAvailablilityCheckResult or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
identifier = models.NameIdentifier(name=name)
# Construct URL
url = self.check_availability.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(identifier, | 'NameIdentifier')
# Construct and send request
request = self._client.post(url, quer | y_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DomainAvailablilityCheckResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
check_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/checkDomainAvailability'}
def list(
self, custom_headers=None, raw=False, **operation_config):
"""Get all domains in a subscription.
Get all domains in a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Domain
:rtype:
~azure.mgmt.web.models.DomainPaged[~azure.mgmt.web.models.Domain]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.DomainPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.DomainPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/domains'}
def get_control_center_sso_request(
self, custom_headers=None, raw=False, **operation_config):
"""Generate a single sign-on request for the domain management portal.
Generate a single sign-on request for the domain management portal.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DomainControlCenterSsoRequest or ClientRawResponse if
raw=true
:rtype: ~azure.mgmt.web.models.DomainControlCenterSsoRequest or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get_control_center_sso_request.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.co |
NickPresta/sentry | src/sentry/coreapi.py | Python | bsd-3-clause | 10,660 | 0.00075 | """
sentry.coreapi
~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
# TODO: We should make the API a class, and UDP/HTTP just inherit from it
# This will make it so we can more easily control logging with various
# metadata (rather than generic log messages which aren't useful).
from datetime import datetime, timedelta
import base64
import logging
import uuid
import zlib
from django.utils.encoding import smart_str
from sentry.app import env
from sentry.conf import settings
from sentry.exceptions import InvalidTimestamp
from sentry.models import Project, ProjectKey
from sentry.tasks.store import preprocess_event
from sentry.utils import is_float, json
from sentry.utils.auth import parse_auth_header
from sentry.utils.imports import import_string
from sentry.utils.strings import decompress, truncatechars
logger = logging.getLogger('sentry.coreapi.errors')
MAX_CULPRIT_LENGTH = 200
MAX_MESSAGE_LENGTH = 2048
INTERFACE_ALIASES = {
'exception': 'sentry.interfaces.Exception',
'request': 'sentry.interfaces.Http',
'user': 'sentry.interfaces.User',
'stacktrace': 'sentry.interfaces.Stacktrace',
'template': 'sentry.interfaces.Template',
}
RESERVED_FIELDS = (
'project',
'event_id',
'message',
'checksum',
'culprit',
'level',
'time_spent',
'logger',
'server_name',
'site',
'timestamp',
'extra',
'modules',
'tags',
'platform',
)
class APIError(Exception):
http_status = 400
msg = 'Invalid request'
def __init__(self, msg=None):
if msg:
self.msg = msg
def __str__(self):
return self.msg or ''
class APIUnauthorized(APIError):
http_status = 401
msg = 'Unauthorized'
class APIForbidden(APIError):
http_status = 403
class APITimestampExpired(APIError):
http_status = 410
def client_metadata(client=None, project=None, exception=None, tags=None, extra=None):
if not extra:
extra = {}
if not tags:
tags = {}
extra['client'] = client
extra['request'] = env.request
extra['tags'] = tags
if project:
extra['project_slug'] = project.slug
extra['project_id'] = project.id
if project.team:
extra['team_slug'] = project.team.slug
extra['team_id'] = project.team.id
tags['client'] = client
if exception:
tags['exc_type'] = type(exception).__name__
if project and project.team:
tags['project'] = '%s/%s' % (project.team.slug, project.slug)
result = {'extra': extra}
if exception:
result['exc_info'] = True
return result
def extract_auth_vars(request):
if request.META.get('HTTP_X_SENTRY_AUTH', '').startswith('Sentry'):
return parse_auth_header(request.META['HTTP_X_SENTRY_AUTH'])
elif request.META.get('HTTP_AUTHORIZATION', '').startswith('Sentry'):
return parse_auth_header(request.META['HTTP_AUTHORIZATION'])
else:
return dict(
(k, request.GET[k])
for k in request.GET.iterkeys()
if k.startswith('sentry_')
)
def project_from_auth_vars(auth_vars):
api_key = auth_vars.get('sentry_key')
if not api_key:
raise APIForbidden('Invalid api key')
try:
pk = ProjectKey.objects.get_from_cache(public_key=api_key)
except ProjectKey.DoesNotExist:
raise APIForbidden('Invalid api key')
if pk.secret_key != auth_vars.get('sentry_secret', pk.secret_key):
raise APIForbidden('Invalid api key')
project = Project.objects.get_from_cache(pk=pk.project_id)
return project, pk.user
def decode_and_decompress_data(encoded_data):
try:
try:
return decompress(encoded_data)
except zlib.error:
return base64.b64decode(encoded_data)
except Exception, e:
# This error should be caught as it suggests that there's a
# bug somewhere in the client's code.
logger.info(e, **client_metadata(exception=e))
raise APIForbidden('Bad data decoding request (%s, %s)' % (
e.__class__.__name__, e))
def safely_load_json_string(json_string):
try:
obj = json.loads(json_string)
except Exception, e:
# This error should be caught as it suggests that there's a
# bug somewhere in the client's code.
logger.info(e, **client_metadata(exception=e))
raise APIForbidden('Bad data reconstructing object (%s, %s)' % (
e.__class__.__name__, e))
# XXX: ensure keys are coerced to strings
return dict((smart_str(k), v) for k, v in obj.iteritems())
def ensure_valid_project_id(desired_project, data, client=None):
# Confirm they're using either the master key, or their specified project
# matches with the signed project.
if desired_project and data.get('project'):
if str(data.get('project')) not in [str(desired_project.id), desired_project.slug]:
logger.info(
'Project ID mismatch: %s != %s', desired_project.id, desired_project.slug,
**client_metadata(client))
raise APIForbidden('Invalid credentials')
data['project'] = desired_project.id
elif not desired_project:
data['project'] = 1
elif not data.get('project'):
data['project'] = desired_project.id
def process_data_timestamp(data):
if is_float(data['timestamp']):
try:
data['timestamp'] = datetime.fromtimestamp(float(data['timestamp']))
except Exception:
raise InvalidTimestamp('Invalid value for timestamp: %r' % data['timestamp'])
elif not isinstance(data['timestamp'], datetime):
if '.' in data['timestamp']:
| format = '%Y-%m-%dT%H:%M:%S.%f'
else:
format = '%Y-%m-%dT%H:%M:%S'
if | 'Z' in data['timestamp']:
# support UTC market, but not other timestamps
format += 'Z'
try:
data['timestamp'] = datetime.strptime(data['timestamp'], format)
except Exception:
raise InvalidTimestamp('Invalid value for timestamp: %r' % data['timestamp'])
if data['timestamp'] > datetime.now() + timedelta(minutes=1):
raise InvalidTimestamp('Invalid value for timestamp (in future): %r' % data['timestamp'])
return data
def validate_data(project, data, client=None):
ensure_valid_project_id(project, data, client=client)
if not data.get('message'):
data['message'] = '<no message value>'
elif not isinstance(data['message'], basestring):
raise APIError('Invalid value for message')
elif len(data['message']) > MAX_MESSAGE_LENGTH:
logger.info(
'Truncated value for message due to length (%d chars)',
len(data['message']), **client_metadata(client, project))
data['message'] = truncatechars(data['message'], MAX_MESSAGE_LENGTH)
if data.get('culprit') and len(data['culprit']) > MAX_CULPRIT_LENGTH:
logger.info(
'Truncated value for culprit due to length (%d chars)',
len(data['culprit']), **client_metadata(client, project))
data['culprit'] = truncatechars(data['culprit'], MAX_CULPRIT_LENGTH)
if not data.get('event_id'):
data['event_id'] = uuid.uuid4().hex
if len(data['event_id']) > 32:
logger.info(
'Discarded value for event_id due to length (%d chars)',
len(data['event_id']), **client_metadata(client, project))
data['event_id'] = uuid.uuid4().hex
if 'timestamp' in data:
try:
process_data_timestamp(data)
except InvalidTimestamp, e:
# Log the error, remove the timestamp, and continue
logger.info(
'Discarded invalid value for timestamp: %r', data['timestamp'],
**client_metadata(client, project, exception=e))
del data['timestamp']
if data.get('modules') and type(data['modules']) != dict:
logger.info(
'Discarded invalid type for modules: %s',
type(data['modules']), **client_m |
hawkowl/axiom | axiom/test/historic/test_parentHook2to3.py | Python | mit | 1,325 | 0.002264 |
"""
Test upgrading L{_SubSchedulerParentHook} from version 2 to 3.
"""
from axiom.test.historic.stubloader import StubbedTest
from axiom.scheduler import _SubSchedulerParentHook
from axiom.substore import SubStore
from axiom.dependency import _DependencyConnector
class SubSchedulerParentHookUpgradeTests(StubbedTest):
"""
Test upgrading L{_SubSchedulerParentHook} from version 2 to 3.
"""
def setUp(self):
d = StubbedTest.setUp(self)
def cbSetUp(ignored):
self.hook = self.store.findUnique(_SubSchedulerParentHook)
d.addCallback(cbSetUp)
return d
def test_attributesCopied(self):
"""
The only attribute of L{_SubSchedulerParentHook} which still exists at
the current version, version 4, C{subStore}, ought to have been
copied over.
"""
se | lf.assertIdentical(
self.hook.subStore, self.store.findUnique(SubStore))
def test_uninstalled(self):
"""
The record of the installation of L{_SubSchedulerParentHook} on the
store is deleted in the upgrad | e to schema version 4.
"""
self.assertEquals(
list(self.store.query(
_DependencyConnector,
_DependencyConnector.installee == self.hook)),
[])
|
theju/urlscript | urlscript/settings.py | Python | mit | 3,699 | 0.001081 | """
Django settings for urlscript project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+^d@j=txhj+yu39&c(!^#w177dj$-si2*lhtho-53)g-5l(w%p'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'core',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'urlscript.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
| 'django.con | trib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'urlscript.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
SITE_ID = 1
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# The path where the bwrap executable is located
BUBBLEWRAP_PATH = ""
# Custom args that can be sent to the bwrap executable e.g --dir /abc /abc etc
BWRAP_CUSTOM_OPTIONS = ""
# Preferably make the below a RAMfs to make it faster
SCRIPTS_TMP_DIR = ""
# The max time for the scripts to run
SCRIPT_TIMEOUT = 30
# A dictionary mappying the language extension with the executable
LANGUAGE_EXECUTABLE = {
'py': 'python3',
'js': 'node',
}
try:
from .local import *
except ImportError:
pass
|
eleftherioszisis/NeuroM | neurom/fst/_neuronfunc.py | Python | bsd-3-clause | 10,088 | 0.000991 | # Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''Morphometrics functions for neurons or neuron populations'''
import math
import numpy as np
from neurom.geom import bounding_box
from neurom.core.types import NeuriteType
from neurom.core.types import tree_type_checker as is_type
from neurom.core.dataformat import COLS
from neurom.core._neuron import iter_neurites, iter_segments
from neurom import morphmath
def neuron_population(nrns):
'''Makes sure `nrns` behaves like a neuron population'''
return nrns.neurons if hasattr(nrns, 'neurons') else (nrns,)
def soma_volume(nrn):
'''Get the volume of a neuron's soma.'''
return nrn.soma.volume
def soma_volumes(nrn_pop):
'''Get the volume | of the somata in a population of neurons
Note:
If a single neuron is passed, a single element list with the volume
of its soma member is returned.
'''
nrns = neuron_population(nrn_pop)
return [soma_volume(n) for n in nrns]
def soma_surface_area(nrn, neurite_type=NeuriteType.soma):
'''Get the surface area of a neuron's soma.
Note:
The surface area is calculated by assuming the soma is spherical.
'''
assert neurite_type == NeuriteType. | soma, 'Neurite type must be soma'
return 4 * math.pi * nrn.soma.radius ** 2
def soma_surface_areas(nrn_pop, neurite_type=NeuriteType.soma):
'''Get the surface areas of the somata in a population of neurons
Note:
The surface area is calculated by assuming the soma is spherical.
Note:
If a single neuron is passed, a single element list with the surface
area of its soma member is returned.
'''
nrns = neuron_population(nrn_pop)
assert neurite_type == NeuriteType.soma, 'Neurite type must be soma'
return [soma_surface_area(n) for n in nrns]
def soma_radii(nrn_pop, neurite_type=NeuriteType.soma):
''' Get the radii of the somata of a population of neurons
Note:
If a single neuron is passed, a single element list with the
radius of its soma member is returned.
'''
assert neurite_type == NeuriteType.soma, 'Neurite type must be soma'
nrns = neuron_population(nrn_pop)
return [n.soma.radius for n in nrns]
def trunk_section_lengths(nrn, neurite_type=NeuriteType.all):
'''list of lengths of trunk sections of neurites in a neuron'''
neurite_filter = is_type(neurite_type)
return [morphmath.section_length(s.root_node.points)
for s in nrn.neurites if neurite_filter(s)]
def trunk_origin_radii(nrn, neurite_type=NeuriteType.all):
'''radii of the trunk sections of neurites in a neuron'''
neurite_filter = is_type(neurite_type)
return [s.root_node.points[0][COLS.R] for s in nrn.neurites if neurite_filter(s)]
def trunk_origin_azimuths(nrn, neurite_type=NeuriteType.all):
'''Get a list of all the trunk origin azimuths of a neuron or population
The azimuth is defined as Angle between x-axis and the vector
defined by (initial tree point - soma center) on the x-z plane.
The range of the azimuth angle [-pi, pi] radians
'''
neurite_filter = is_type(neurite_type)
nrns = neuron_population(nrn)
def _azimuth(section, soma):
'''Azimuth of a section'''
vector = morphmath.vector(section[0], soma.center)
return np.arctan2(vector[COLS.Z], vector[COLS.X])
return [_azimuth(s.root_node.points, n.soma)
for n in nrns
for s in n.neurites if neurite_filter(s)]
def trunk_origin_elevations(nrn, neurite_type=NeuriteType.all):
'''Get a list of all the trunk origin elevations of a neuron or population
The elevation is defined as the angle between x-axis and the
vector defined by (initial tree point - soma center)
on the x-y half-plane.
The range of the elevation angle [-pi/2, pi/2] radians
'''
neurite_filter = is_type(neurite_type)
nrns = neuron_population(nrn)
def _elevation(section, soma):
'''Elevation of a section'''
vector = morphmath.vector(section[0], soma.center)
norm_vector = np.linalg.norm(vector)
if norm_vector >= np.finfo(type(norm_vector)).eps:
return np.arcsin(vector[COLS.Y] / norm_vector)
raise ValueError("Norm of vector between soma center and section is almost zero.")
return [_elevation(s.root_node.points, n.soma)
for n in nrns
for s in n.neurites if neurite_filter(s)]
def trunk_vectors(nrn, neurite_type=NeuriteType.all):
'''Calculates the vectors between all the trunks of the neuron
and the soma center.
'''
neurite_filter = is_type(neurite_type)
nrns = neuron_population(nrn)
return np.array([morphmath.vector(s.root_node.points[0], n.soma.center)
for n in nrns
for s in n.neurites if neurite_filter(s)])
def trunk_angles(nrn, neurite_type=NeuriteType.all):
'''Calculates the angles between all the trunks of the neuron.
The angles are defined on the x-y plane and the trees
are sorted from the y axis and anticlock-wise.
'''
vectors = trunk_vectors(nrn, neurite_type=neurite_type)
# In order to avoid the failure of the process in case the neurite_type does not exist
if not vectors.size:
return []
def _sort_angle(p1, p2):
"""Angle between p1-p2 to sort vectors"""
ang1 = np.arctan2(*p1[::-1])
ang2 = np.arctan2(*p2[::-1])
return (ang1 - ang2)
# Sorting angles according to x-y plane
order = np.argsort(np.array([_sort_angle(i / np.linalg.norm(i), [0, 1])
for i in vectors[:, 0:2]]))
ordered_vectors = vectors[order][:, [COLS.X, COLS.Y]]
return [morphmath.angle_between_vectors(ordered_vectors[i], ordered_vectors[i - 1])
for i, _ in enumerate(ordered_vectors)]
def sholl_crossings(neurites, center, radii):
'''calculate crossings of neurites
Args:
nrn(morph): morphology on which to perform Sholl analysis
radii(iterable of floats): radii for which crossings will be counted
Returns:
Array of same length as radii, with a count of the number of crossings
for the respective radius
'''
def _count_crossings(neurite, radius):
'''count_crossings of segments in neurite with radius'''
r2 = radius ** 2
count = 0
for start, end in iter_segments(neurite):
start_dist2, end_dist2 = (morphmath.point_dist2(center, start),
|
release-monitoring/anitya | anitya/tests/lib/versions/test_base.py | Python | gpl-2.0 | 8,780 | 0.000456 | # -*- coding: utf-8 -*-
#
# Copyright © 2017-2020 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2, or (at your option) any later
# version. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Any Red Hat trademarks that are incorporated in the source
# code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission
# of Red Hat, Inc.
from __future__ import unicode_literals
import unittest
import mock
from anitya.lib import exceptions
from anitya.lib.versions import base
class VersionTests(unittest.TestCase):
"""Tests for the :class:`anitya.lib.versions.Version` model."""
def test_identity_string(self):
"""Assert the generic version constant is what we expect.
.. note::
If this test starts failing because the constant was modified, you
*must* write a migration to change the type column on existing
versions.
"""
self.assertEqual("Generic Version", base.Version.name)
def test_str(self):
"""Assert __str__ calls parse"""
version = base.Version(version="v1.0.0")
self.assertEqual("1.0.0", str(version))
def test_str_parse_error(self):
"""Assert __str__ calls parse"""
version = base.Version(version="v1.0.0")
version.parse = mock.Mock(side_effect=exceptions.InvalidVersion("boop"))
self.assertEqual("v1.0.0", str(version))
def test_parse_no_v(self):
"""Assert parsing a version sans leading 'v' works."""
version = base.Ve | rsion(version="1.0.0")
self.assertEqual("1.0.0", versi | on.parse())
def test_parse_leading_v(self):
"""Assert parsing a version with a leading 'v' works."""
version = base.Version(version="v1.0.0")
self.assertEqual("1.0.0", version.parse())
def test_parse_odd_version(self):
"""Assert parsing an odd version works."""
version = base.Version(version="release_1_0_0")
self.assertEqual("release_1_0_0", version.parse())
def test_parse_v_not_alone(self):
"""Assert leading 'v' isn't stripped if it's not followed by a number."""
version = base.Version(version="version1.0.0")
self.assertEqual("version1.0.0", version.parse())
def test_parse_prefix_whitespace(self):
"""Assert prefix is stripped together with any whitespace."""
version = base.Version(version="version 1.0.0", prefix="version")
self.assertEqual("1.0.0", version.parse())
def test_parse_with_prefix_no_v(self):
version = base.Version(version="release1.0.0", prefix="release")
self.assertEqual("1.0.0", version.parse())
def test_parse_with_prefix_with_v(self):
version = base.Version(version="release-v1.0.0", prefix="release-")
self.assertEqual("1.0.0", version.parse())
def test_parse_with_multiple_prefixes(self):
"""Assert parsing is working when multiple prefixes are provided."""
version = base.Version(version="release_db-1.2.3", prefix="release_db-;release")
self.assertEqual("1.2.3", version.parse())
def test_parse_with_multiple_prefixes_one_empty(self):
"""
Assert parsing is working when multiple prefixes are provided and one
is empty string.
"""
version = base.Version(version="release_db-1.2.3", prefix="release_db-; ")
self.assertEqual("1.2.3", version.parse())
def test_prerelease(self):
"""Assert prerelease is defined and returns False"""
version = base.Version(version="v1.0.0")
self.assertFalse(version.prerelease())
def test_postrelease(self):
"""Assert postrelease is defined and returns False"""
version = base.Version(version="v1.0.0")
self.assertFalse(version.postrelease())
def test_newer_single_version(self):
"""Assert newer is functional with a single instance of Version."""
version = base.Version(version="v1.0.0")
newer_version = base.Version(version="v2.0.0")
self.assertFalse(version.newer(newer_version))
self.assertTrue(newer_version.newer(version))
def test_newer_multiple_versions(self):
"""Assert newer is functional with multiple instances of Version."""
version = base.Version(version="v1.0.0")
version2 = base.Version(version="v1.1.0")
newer_version = base.Version(version="v2.0.0")
self.assertFalse(version.newer(newer_version))
self.assertTrue(newer_version.newer([version, version2]))
def test_newer_with_strings(self):
"""Assert newer handles string arguments."""
version = base.Version(version="v1.0.0")
self.assertFalse(version.newer("v2.0.0"))
def test_lt(self):
"""Assert Version supports < comparison."""
old_version = base.Version(version="v1.0.0")
new_version = base.Version(version="v1.1.0")
self.assertTrue(old_version < new_version)
self.assertFalse(new_version < old_version)
def test_lt_one_unparsable(self):
"""Assert unparsable versions sort lower than parsable ones."""
unparsable_version = base.Version(version="blarg")
unparsable_version.parse = mock.Mock(
side_effect=exceptions.InvalidVersion("blarg")
)
new_version = base.Version(version="v1.0.0")
self.assertTrue(unparsable_version < new_version)
self.assertFalse(new_version < unparsable_version)
def test_lt_both_unparsable(self):
"""Assert unparsable versions resort to string sorting."""
alphabetically_lower = base.Version(version="arg")
alphabetically_lower.parse = mock.Mock(
side_effect=exceptions.InvalidVersion("arg")
)
alphabetically_higher = base.Version(version="blarg")
alphabetically_higher.parse = mock.Mock(
side_effect=exceptions.InvalidVersion("blarg")
)
self.assertTrue(alphabetically_lower < alphabetically_higher)
def test_le(self):
"""Assert Version supports <= comparison."""
old_version = base.Version(version="v1.0.0")
equally_old_version = base.Version(version="v1.0.0")
new_version = base.Version(version="v1.1.0")
self.assertTrue(old_version <= new_version)
self.assertTrue(old_version <= equally_old_version)
self.assertFalse(new_version <= old_version)
def test_gt(self):
"""Assert Version supports > comparison."""
old_version = base.Version(version="v1.0.0")
new_version = base.Version(version="v1.1.0")
self.assertTrue(new_version > old_version)
self.assertFalse(old_version > new_version)
def test_ge(self):
"""Assert Version supports >= comparison."""
old_version = base.Version(version="v1.0.0")
equally_new_version = base.Version(version="v1.1.0")
new_version = base.Version(version="v1.1.0")
self.assertFalse(old_version >= new_version)
self.assertTrue(new_version >= equally_new_version)
self.assertTrue(new_version >= old_version)
def test_eq(self):
"""Assert Version supports == comparison."""
v1 = base.Version(version="v1.0.0")
v2 = base.Version(version="v1.0.0")
self.assertTrue(v1 == v2)
def test_eq_one_with_v(self):
"""Assert Versions where one just has a v prefix are still equal"""
v1 = base.Version(version="1.0.0")
v2 = base.Version(version="v1.0.0")
self.assertTrue(v1 == v2)
def test_eq_one_with_prefix(self):
|
igemsoftware2017/USTC-Software-2017 | biohub/core/tasks/payload.py | Python | gpl-3.0 | 1,411 | 0 | from biohub.core.tasks.exceptions import TaskInstanceNotExists
from biohub.core.tasks.result import AsyncResult
class TaskPayload(object):
"""
A task payload carries information related to a specific task instance,
including task_id, arguments, options, etc.
"""
__slots__ = ['task_name', 'task_id', 'args', 'kwargs', 'options',
'packed_data', '_async_result']
def __init__(self, task_name, task_id, args, kwargs, options):
self.task_name = task_name
self.task_id = task_id
self.args = args
self.kwargs = kwargs or {}
self.options = options
self.packed_data = (task_name, task_id, args, kwargs, options)
self._async_result = AsyncResult(task_id)
def store(self):
"""
To store the information into redis.
"""
self._async_result._set_payload(self.packed_data)
@classmethod
def from_packed_data | (cls, packed_data):
"""
A factory function to create a payload from a tuple.
"""
return cls(*packed_data)
@c | lassmethod
def from_task_id(cls, task_id):
"""
A factory function to fetch task payload through a given task_id.
"""
packed_data = AsyncResult(task_id).payload
if packed_data is None:
raise TaskInstanceNotExists(task_id)
return cls.from_packed_data(packed_data)
|
XeCycle/indico | indico/web/menu.py | Python | gpl-3.0 | 7,214 | 0.001109 | # This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals, absolute_import
from flask import render_template, request
from operator import attrgetter
from indico.core import signals
from indico.util.signals import named_objects_from_signal
from indico.util.struct.iterables import group_list
from indico.util.string import return_ascii, format_repr
from indico.web.flask.util import url_for
class HeaderMenuEntry(object):
"""Defines a header menu entry.
:param url: the url the menu item points to
:param caption: the caption of the menu item
:param parent: when used, all menu entries with the same parent
are shown in a dropdown with the parent name as its
caption
"""
def __init__(self, url, caption, parent=None):
self.url = url
self.caption = caption
self.parent = parent
@return_ascii
def __repr__(self):
return '<HeaderMenuEntry({}, {}, {})>'.format(self.caption, self.parent, self.url)
@classmethod
def group(cls, entries):
"""Returns the given entries grouped by its parent"""
return sorted(group_list(entries, key=attrgetter('parent'), sort_by=attrgetter('caption')).items())
class MenuItem(object):
"""Defines a generic menu item
:param title: the title of the item
:param endpoint: shortcut to define a menu item that points to the
specified endpoint and is considered active only
on that endpoints. Cannot be combined with `url` or
`endpoints`.
:param url: url of the menu item
:param endpoints: set of endpoints on which this menu item is considered
active. Can also be a string if only one endpoint is
used.
"""
def __init__(self, title, endpoint=None, url=None, endpoints=None):
self.title = title
self.url = url
if endpoint is not None:
assert url is None and endpoints is None
self.url = url_for(endpoint)
self.endpoints = {endpoint}
elif endpoints is None:
self.endpoints = set()
elif isinstance(endpoints, basestring):
self.endpoints = {endpoints}
else:
self.endpoints = set(endpoints)
@return_ascii
def __repr__(self):
return '<MenuItem({}, {})>'.format(self.title, self.url)
@property
def active(self):
return request.endpoint in self.endpoints
class SideMenuSection(object):
"""Defines a side menu section (item set).
:param name: the unique name of the section
:param title: the title of the section (displayed)
:param weight: the "weight" (higher means it shows up first)
:param active: whether the section should be shown expanded by default
:param icon: icon that will be displayed next to the section title.
"""
is_section = True
def __init__(self, name, title, weight=-1, active=False, icon=None):
self.name = name
self.title = title
self._active = active
self._items = set()
self.icon = 'icon-' + icon
self.weight = weight
self._sorted_items = None
def add_item(self, item):
| self._sorted_items = None
self._items.add(item)
@property
def items(self):
if | self._sorted_items is None:
self._sorted_items = sorted(self._items, key=lambda x: (-x.weight, x.title))
return self._sorted_items
@property
def active(self):
return self._active or any(item.active for item in self._items)
@return_ascii
def __repr__(self):
return format_repr(self, 'name', 'title', active=False)
class SideMenuItem(object):
"""Defines a side menu item.
:param name: the unique name (within the menu) of the item
:param title: the title of the menu item (displayed)
:param url: the URL that the link will point to
:param weight: the "weight" (higher means it shows up first)
:param active: whether the item will be shown as active by default
:param disabled: if `True`, the item will be displayed as disabled
:param section: section the item will be put in
:param icon: icon that will be displayed next to the item
"""
is_section = False
def __init__(self, name, title, url, weight=-1, active=False, disabled=False, section=None, icon=None):
self.name = name
self.title = title
self.url = url
self.active = active
self.disabled = disabled
self.section = section
self.weight = weight
self.icon = ('icon-' + icon) if icon else None
@return_ascii
def __repr__(self):
return format_repr(self, 'name', 'title', 'url', active=False, disabled=False)
def build_menu_structure(menu_id, active_item=None, **kwargs):
"""
Build a menu (list of entries) with sections/items.
Information is provided by specific signals and filtered
by menu id.
This can be used as a very thin framework for menu
handling across the app.
:param menu_id: menu_id used to filter out signal calls
:param active_item: ID of currently active menu item
:param kwargs: extra arguments passed to the signals
:returns: properly sorted list (taking weights into account)
"""
top_level = set()
sections = {}
for id_, section in named_objects_from_signal(signals.menu.sections.send(menu_id, **kwargs)).iteritems():
sections[id_] = section
top_level.add(section)
for id_, item in named_objects_from_signal(signals.menu.items.send(menu_id, **kwargs)).iteritems():
if id_ == active_item:
item.active = True
if item.section is None:
top_level.add(item)
else:
sections[item.section].add_item(item)
return sorted(top_level, key=lambda x: (-x.weight, x.title))
def render_sidemenu(menu_id, active_item=None, old_style=False, **kwargs):
"""Render a sidemenu with sections/items.
:param menu_id: The identifier of the menu.
:param active_item: The name of the currently-active menu item.
:param old_style: Whether the menu should be rendered using the
"old" menu style.
:param kwargs: Additional arguments passed to the menu signals.
"""
items = build_menu_structure(menu_id, active_item=active_item, **kwargs)
return render_template('side_menu.html', items=items, old_style=old_style, menu_id=menu_id)
|
ChristopherUC/cucmAxlWriter | setup.py | Python | gpl-3.0 | 494 | 0 | from distutils.core import setup
setup(name='cucmAxlWriter',
version='0.4',
description='cucm object writer using AXL',
author= | 'Christopher Phillips',
author_email='christopherphillips+cucmAxlWriter@gmail.com',
url='https://github.com/ChristopherUC/cucmAxlWriter',
license='GNU GENERAL PUBLIC LICENSE Version 3',
py_modules=['appConfig', 'ucAppConfig', 'configCreator', 'cucmAxlWriter',
'cucmJabberWr | iter', 'cupiRestWriter'],
)
|
evhub/coconut | coconut/command/util.py | Python | apache-2.0 | 20,011 | 0.001299 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------------------------------------------------
# INFO:
# -----------------------------------------------------------------------------------------------------------------------
"""
Authors: Evan Hubinger, Fred Buchanan
License: Apache 2.0
Description: Utility functions for the main command module.
"""
# -----------------------------------------------------------------------------------------------------------------------
# IMPORTS:
# -----------------------------------------------------------------------------------------------------------------------
from __future__ import print_function, absolute_import, unicode_literals, division
from coconut.root import * # NOQA
import sys
import os
import traceback
import subprocess
import shutil
from select import select
from contextlib import contextmanager
from functools import partial
if PY2:
import __builtin__ as builtins
else:
import builtins
from coconut.terminal import (
logger,
complain,
internal_assert,
)
from coconut.exceptions import (
CoconutException,
get_encoding,
)
from coconut.constants import (
WINDOWS,
PY34,
PY32,
fixpath,
base_dir,
main_prompt,
more_prompt,
default_style,
prompt_histfile,
prompt_multiline,
prompt_vi_mode,
prompt_wrap_lines,
prompt_history_search,
style_env_var,
mypy_path_env_var,
tutorial_url,
documentation_url,
reserved_vars,
minimum_recursion_limit,
oserror_retcode,
base_stub_dir,
installed_stub_dir,
interpreter_uses_auto_compilation,
interpreter_uses_coconut_breakpoint,
)
if PY26:
import imp
else:
import runpy
try:
# just importing readline improves built-in input()
import readline # NOQA
except ImportError:
pass
if PY34:
from importlib import reload
else:
from imp import reload
try:
import prompt_toolkit
try:
# prompt_toolkit v2
from prompt_toolkit.lexers.pygments import PygmentsLexer
from prompt_toolkit.styles.pygments import style_from_pygments_cls
except ImportError:
# prompt_toolkit v1
from prompt_toolkit.layout.lexers import PygmentsLexer
from prompt_toolkit.styles import style_from_pygments as style_from_pygments_cls
import pygments
import pygments.styles
from coconut.highlighter import CoconutLexer
except ImportError:
prompt_toolkit = None
except KeyError:
complain(
ImportError(
"detected outdated pygments version (run '{python} -m pip install --upgrade pygments' to fix)".format(python=sys.executable),
),
)
prompt_toolkit = None
# -----------------------------------------------------------------------------------------------------------------------
# UTILITIES:
# -----------------------------------------------------------------------------------------------------------------------
def writefile(openedfile, newcontents):
"""Set the contents of a file."""
openedfile.seek(0)
openedfile.truncate()
openedfile.write(newcontents)
def readfile(openedfile):
"""Read the contents of a file."""
openedfile.seek(0)
return str(openedfile.read())
def launch_tutorial():
"""Open the Coconut tutorial."""
import webbrowser # this is expensive, so only do it here
webbrowser.open(tutorial_url, 2)
def launch_documentation():
"""Open the Coconut documentation."""
import webbrowser # this is expensive, so only do it here
webbrowser.open(documentation_url, 2)
def showpath(path):
"""Format a path for displaying."""
if logger.verbose:
return os.path.abspath(path)
else:
path = os.path.relpath(path)
if path.startswith(os.curdir + os.sep):
path = path[len(os.curdir + os.sep):]
return path
def is_special_dir(dirname):
"""Determine if a directory name is a special directory."""
return dirname == os.curdir or dirname == os.pardir
def rem_encoding(code):
"""Remove encoding declarations from compiled code so it can be passed to exec."""
old_lines = code.splitlines()
new_lines = []
for i in range(min(2, len(old_lines))):
line = old_lines[i]
if not (line.lstrip().startswith("#") and "coding" in line):
new_lines.append(line)
new_lines += old_lines[2:]
return "\n".join(new_lines)
def exec_func(code, glob_vars, loc_vars=None):
"""Wrapper around exec."""
if loc_vars is None:
loc_vars = glob_vars
exec(code, glob_vars, loc_vars)
def interpret(code, in_vars):
"""Try to evaluate the given code, otherwise ex | ecute it." | ""
try:
result = eval(code, in_vars)
except SyntaxError:
pass # exec code outside of exception context
else:
if result is not None:
print(ascii(result))
return # don't also exec code
exec_func(code, in_vars)
@contextmanager
def handling_broken_process_pool():
"""Handle BrokenProcessPool error."""
if sys.version_info < (3, 3):
yield
else:
from concurrent.futures.process import BrokenProcessPool
try:
yield
except BrokenProcessPool:
logger.log_exc()
raise KeyboardInterrupt("broken process pool")
def kill_children():
"""Terminate all child processes."""
try:
import psutil
except ImportError:
logger.warn(
"missing psutil; --jobs may not properly terminate",
extra="run '{python} -m pip install coconut[jobs]' to fix".format(python=sys.executable),
)
else:
parent = psutil.Process()
children = parent.children(recursive=True)
while children:
for child in children:
try:
child.terminate()
except psutil.NoSuchProcess:
pass # process is already dead, so do nothing
children = parent.children(recursive=True)
def splitname(path):
"""Split a path into a directory, name, and extensions."""
dirpath, filename = os.path.split(path)
# we don't use os.path.splitext here because we want all extensions,
# not just the last, to be put in exts
name, exts = filename.split(os.extsep, 1)
return dirpath, name, exts
def run_file(path):
"""Run a module from a path and return its variables."""
if PY26:
dirpath, name, _ = splitname(path)
found = imp.find_module(name, [dirpath])
module = imp.load_module("__main__", *found)
return vars(module)
else:
return runpy.run_path(path, run_name="__main__")
def call_output(cmd, stdin=None, encoding_errors="replace", **kwargs):
"""Run command and read output."""
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
stdout, stderr, retcode = [], [], None
while retcode is None:
if stdin is not None:
logger.log_prefix("<0 ", stdin.rstrip())
raw_out, raw_err = p.communicate(stdin)
stdin = None
out = raw_out.decode(get_encoding(sys.stdout), encoding_errors) if raw_out else ""
if out:
logger.log_prefix("1> ", out.rstrip())
stdout.append(out)
err = raw_err.decode(get_encoding(sys.stderr), encoding_errors) if raw_err else ""
if err:
logger.log_prefix("2> ", err.rstrip())
stderr.append(err)
retcode = p.poll()
return stdout, stderr, retcode
def run_cmd(cmd, show_output=True, raise_errs=True, **kwargs):
"""Run a console command.
When show_output=True, prints output and returns exit code, otherwise returns output.
When raise_errs=True, raises a subprocess.CalledProcessError if the command fails.
"""
internal_assert(cmd and isinstance(cmd, list), "console commands must be passed as non-empty lists")
if hasattr(shutil, "which"):
cmd[0] = shutil.which(cmd[0]) or cmd[0]
logger.log_cmd(cmd)
try:
if show_output and raise_errs:
return subprocess.check_c |
genome/flow-workflow | flow_workflow/entities/converge/future_nets.py | Python | agpl-3.0 | 1,639 | 0.006101 | from flow.petri_net.future import FutureAction
from flow_workflow.entities.converge.actions import ConvergeAction
from flow_workflow.future_nets import WorkflowNetBase
from flow_workflow.historian import actions
class ConvergeNet(WorkflowNetBase):
def __init__(self, operation_id, name, input_property_order,
output_properties):
WorkflowNetBase.__init__(self, operation_id=operation_id, name=name)
self.converge_action = FutureAction(cls=ConvergeAction,
operation_id=operation_id,
input_property_order=input_property_order,
output_properties=output_properties)
self.converge_transition = self.add_basic_transition(
name='converge(%s)' % operation_id,
action=self.converge_action)
self.starting_place = self.bridge_transitions(
self.internal_start_transition,
self.converge_transition,
name='starting')
| self.succeeding_place = self.bridge_transitions(
self.converge_transition,
self.internal_success_transition,
name='succeeding')
self.observe_transition(self.internal_star | t_transition,
FutureAction(actions.UpdateOperationStatus,
operation_id=operation_id, status='running',
calculate_start_time=True))
self.observe_transition(self.internal_success_transition,
FutureAction(actions.UpdateOperationStatus,
operation_id=operation_id, status='done',
calculate_end_time=True))
|
kawamon/hue | desktop/core/ext-py/docutils-0.14/test/functional/tests/standalone_rst_docutils_xml.py | Python | apache-2.0 | 461 | 0 | exec(open('functional/test | s/_standalone_rst_defaults.py').read())
# Source and destination file names.
test_source = "standalone_rst_docutils_xml.txt"
test_destination = "standalone_rst_docutils_xml.xml"
# Keyword parameters passed to publish_file.
writer_name = "docutils_xml"
# Settings
# enable INFO-level system messages in this test:
settings_overrides['report_level'] = 1
# format output with indents and newlines
settings_overri | des['indents'] = True
|
editxt/editxt | editxt/theme.py | Python | gpl-3.0 | 3,505 | 0 | # -*- coding: utf-8 -*-
# EditXT
# Copyright 2007-2015 Daniel Miller <millerdev@gmail.com>
#
# This file is part of EditXT, a programmer's text editor for Mac OS X,
# which can be found at http://editxt.org/.
#
# EditXT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# EditXT is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EditXT. If not, see <http://www.gnu.org/licenses/>.
import logging
import AppKit as ak
from editxt.config import Color, NOT_SET
from editxt.util import get_color, hex_value, rgb2gray
log = logging.getLogger(__name__)
class Theme(object):
class derived:
def selection_secondary_color(config):
"""Unused proof of concept
Der | ived theme value that can be overridden in the config
"""
value = config.get("theme.selection_secondary_color", Color(None))
if value is None:
color = hex_value(config["theme.selection_color"])
value = get_color(rgb2gray(color))
return value
def __init__(self, config):
| self.config = config
self.cached = set()
self.reset()
def reset(self):
self.syntax = self.config.lookup("theme.syntax", True)
self.default = self.syntax.get("default", {})
assert self.default is not self.config.get("theme.syntax.default"), \
"{!r} will be mutated".format(self.default)
for name in self.cached:
delattr(self, name)
self.cached.clear()
def __getattr__(self, name):
try:
value = self.config["theme." + name]
except KeyError:
value = getattr(self.derived, name)(self.config)
self.cached.add(name)
setattr(self, name, value)
return value
def __getitem__(self, name):
return getattr(self, name)
def get_syntax_color(self, name):
try:
value = self.default[name]
except KeyError:
lang, token_name = name.rsplit(" ", 1)
if token_name:
value = self._get(lang, token_name)
if value is None:
value = self._get("default", token_name)
if value is not None:
try:
value = get_color(value)
except Exception:
log.warn("unknown color: %s -> %r", name, value)
value = None
if value is None:
if "background" in name.lower():
value = self.background_color
else:
value = self.text_color
else:
value = self.text_color
self.default[name] = value
return value
def _get(self, lang, name):
data = self.syntax.get(lang)
if data:
while name:
try:
return data[name]
except KeyError:
pass
name = name.rpartition(".")[0]
return None
|
gov-cjwaszczuk/notifications-admin | tests/app/main/test_permissions.py | Python | mit | 3,760 | 0.00133 | import pytest
from app.utils import user_has_permissions
from app.main.views.index import index
from werkzeug.exceptions import Forbidden, Unauthorized
from flask import request
def _test_permissions(
client,
usr,
permissions,
service_id,
will_succeed,
any_=False,
admin_override=False,
):
request.view_args.update({'service_id': service_id})
if usr:
client.login(usr)
decorator = user_has_permissions(*permissions, any_=any_, admin_override=admin_override)
decorated_index = decorator(index)
if will_succeed:
decorated_index()
else:
try:
decorated_index()
pytest.fail("Failed to throw a forbidden or unauthorised exception")
except (Forbidden, Unauthorized):
pass
def test_user_has_permissions_on_endpoint_fail(
client,
mocker,
):
user = _user_with_permissions()
mocker.patch('app.user_api_client.get_user', return_value=user)
_test_permissions(
client,
user,
['send_texts'],
'',
False)
def test_user_has_permissions_success(
client,
mocker,
):
user = _user_with_permissions()
mocker.patch('app.user_api_client.get_user', return_value=user)
_test_permissions(
client,
user,
['manage_users'],
'',
True | )
def | test_user_has_permissions_or(
client,
mocker,
):
user = _user_with_permissions()
mocker.patch('app.user_api_client.get_user', return_value=user)
_test_permissions(
client,
user,
['send_texts', 'manage_users'],
'',
True,
any_=True)
def test_user_has_permissions_multiple(
client,
mocker,
):
user = _user_with_permissions()
mocker.patch('app.user_api_client.get_user', return_value=user)
_test_permissions(
client,
user,
['manage_templates', 'manage_users'],
'',
will_succeed=True)
def test_exact_permissions(
client,
mocker,
):
user = _user_with_permissions()
mocker.patch('app.user_api_client.get_user', return_value=user)
_test_permissions(
client,
user,
['manage_users', 'manage_templates', 'manage_settings'],
'',
True)
def test_platform_admin_user_can_access_page(
client,
platform_admin_user,
mocker,
):
mocker.patch('app.user_api_client.get_user', return_value=platform_admin_user)
_test_permissions(
client,
platform_admin_user,
[],
'',
will_succeed=True,
admin_override=True)
def test_platform_admin_user_can_not_access_page(
client,
platform_admin_user,
mocker,
):
mocker.patch('app.user_api_client.get_user', return_value=platform_admin_user)
_test_permissions(
client,
platform_admin_user,
[],
'',
will_succeed=False,
admin_override=False)
def test_no_user_returns_401_unauth(
client
):
from flask_login import current_user
assert not current_user.is_authenticated
_test_permissions(
client,
None,
[],
'',
will_succeed=False)
def _user_with_permissions():
from app.notify_client.user_api_client import User
user_data = {'id': 999,
'name': 'Test User',
'password': 'somepassword',
'email_address': 'test@user.gov.uk',
'mobile_number': '+4412341234',
'state': 'active',
'failed_login_count': 0,
'permissions': {'': ['manage_users', 'manage_templates', 'manage_settings']},
'platform_admin': False
}
user = User(user_data)
return user
|
bosscha/alma-calibrator | notebooks/selecting_source/allskymap.py | Python | gpl-2.0 | 16,580 | 0.007419 | from __future__ import unicode_literals
"""
AllSkyMap is a subclass of Basemap, specialized for handling common plotting
tasks for celestial data.
It is essentially equivalent to using Basemap with full-sphere projections
(e.g., 'hammer' or 'moll') and the `celestial` keyword set to `True`, but
it adds a few new methods:
* label_meridians for, well, labeling me | ridians with their longitude values;
* geodesic, a replacement for Basemap.drawgreatcircle, that can correctly
handle geodesics that cross the limb of the map, and providing the user
easy control over clipping (which affects thick lines at or near the limb);
* tissot, which overrides Base | map.tissot, correctly handling geodesics that
cross the limb of the map.
Created Jan 2011 by Tom Loredo, based on Jeff Whitaker's code in Basemap's
__init__.py module.
"""
from numpy import *
import matplotlib.pyplot as pl
from matplotlib.pyplot import *
from mpl_toolkits.basemap import Basemap
import pyproj
from pyproj import Geod
__all__ = ['AllSkyMap']
def angle_symbol(angle, round_to=1.0):
"""
Return a string representing an angle, rounded and with a degree symbol.
This is adapted from code in mpl's projections.geo module.
"""
value = np.round(angle / round_to) * round_to
if pl.rcParams['text.usetex'] and not pl.rcParams['text.latex.unicode']:
return r'$%0.0f^\circ$' % value
else:
return '%0.0f\N{DEGREE SIGN}' % value
class AllSkyMap(Basemap):
"""
AllSkyMap is a subclass of Basemap, specialized for handling common plotting
tasks for celestial data.
It is essentially equivalent to using Basemap with full-sphere projections
(e.g., 'hammer' or 'moll') and the `celestial` keyword set to `True`, but
it adds a few new methods:
* label_meridians for, well, labeling meridians with their longitude values;
* geodesic, a replacement for Basemap.drawgreatcircle, that can correctly
handle geodesics that cross the limb of the map, and providing the user
easy control over clipping (which affects thick lines at or near the
limb);
* tissot, which overrides Basemap.tissot, correctly handling geodesics that
cross the limb of the map.
"""
# Longitudes corresponding to east and west edges, reflecting the
# convention that 180 deg is the eastern edge, according to basemap's
# underlying projections:
east_lon = 180.
west_lon = 180.+1.e-10
def __init__(self,
projection='hammer',
lat_0=0., lon_0=0.,
suppress_ticks=True,
boundinglat=None,
fix_aspect=True,
anchor=str('C'),
ax=None):
if projection != 'hammer' and projection !='moll':
raise ValueError('Only hammer and moll projections supported!')
# Use Basemap's init, enforcing the values of many parameters that
# aren't used or whose Basemap defaults would not be altered for all-sky
# celestial maps.
Basemap.__init__(self, llcrnrlon=None, llcrnrlat=None,
urcrnrlon=None, urcrnrlat=None,
llcrnrx=None, llcrnry=None,
urcrnrx=None, urcrnry=None,
width=None, height=None,
projection=projection, resolution=None,
area_thresh=None, rsphere=1.,
lat_ts=None,
lat_1=None, lat_2=None,
lat_0=lat_0, lon_0=lon_0,
suppress_ticks=suppress_ticks,
satellite_height=1.,
boundinglat=None,
fix_aspect=True,
anchor=anchor,
celestial=True,
ax=ax)
# Keep a local ref to lon_0 for hemisphere checking.
self._lon_0 = self.projparams['lon_0']
self._limb = None
def drawmapboundary(self,color='k',linewidth=1.0,fill_color=None,\
zorder=None,ax=None):
"""
draw boundary around map projection region, optionally
filling interior of region.
.. tabularcolumns:: |l|L|
============== ====================================================
Keyword Description
============== ====================================================
linewidth line width for boundary (default 1.)
color color of boundary line (default black)
fill_color fill the map region background with this
color (default is no fill or fill with axis
background color).
zorder sets the zorder for filling map background
(default 0).
ax axes instance to use
(default None, use default axes instance).
============== ====================================================
returns matplotlib.collections.PatchCollection representing map boundary.
"""
# Just call the base class version, but keep a copy of the limb
# polygon for clipping.
self._limb = Basemap.drawmapboundary(self, color=color,
linewidth=linewidth, fill_color=fill_color, zorder=zorder, ax=ax)
return self._limb
def label_meridians(self, lons, fontsize=10, valign='bottom', vnudge=0,
halign='center', hnudge=0, color='black'):
"""
Label meridians with their longitude values in degrees.
This labels meridians with negative longitude l with the value 360-l;
for maps in celestial orientation, this means meridians to the right
of the central meridian are labeled from 360 to 180 (left to right).
`vnudge` and `hnudge` specify amounts in degress to nudge the labels
from their default placements, vertically and horizontally. This
values obey the map orientation, so to nudge to the right, use a
negative `hnudge` value.
"""
# Run through (lon, lat) pairs, with lat=0 in each pair.
lats = len(lons)*[0.]
for lon,lat in zip(lons, lats):
x, y = self(lon+hnudge, lat+vnudge)
if lon < 0:
lon_lbl = 360 + lon
else:
lon_lbl = lon
pl.text(x, y, angle_symbol(lon_lbl), fontsize=fontsize,
verticalalignment=valign,
horizontalalignment=halign,color=color)
def east_hem(self, lon):
"""
Return True if lon is in the eastern hemisphere of the map wrt lon_0.
"""
if (lon-self._lon_0) % 360. <= self.east_lon:
return True
else:
return False
def geodesic(self, lon1, lat1, lon2, lat2, del_s=.01, clip=True, **kwargs):
"""
Plot a geodesic curve from (lon1, lat1) to (lon2, lat2), with
points separated by arc length del_s. Return a list of Line2D
instances for the curves comprising the geodesic. If the geodesic does
not cross the map limb, there will be only a single curve; if it
crosses the limb, there will be two curves.
"""
# TODO: Perhaps return a single Line2D instance when there is only a
# single segment, and a list of segments only when there are two segs?
# TODO: Check the units of del_s.
# This is based on Basemap.drawgreatcircle (which draws an *arc* of a
# great circle), but addresses a limitation of that method, supporting
# geodesics that cross the map boundary by breaking them into two
# segments, one in the eastern hemisphere and the other in the western.
gc = pyproj.Geod(a=self.rmajor,b=self.rminor)
az12,az21,dist = gc.inv(lon1,lat1,lon2,lat2)
npoints = int((dist+0.5**del_s)/del_s)
# Calculate lon & lat for points on the arc.
lonlats = gc.npts(lon1,lat1,lon2,lat2,npoints)
lons = |
50thomatoes50/blender.io_mqo | io_scene_mqo/__init__.py | Python | gpl-2.0 | 8,539 | 0.01136 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Script copyright (C) Thomas PORTASSAU (50thomatoes50)
# Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone, Thomas Larsson, http://blender.stackexchange.com/users/185/adhi
# <pep8-80 compliant>
bl_info = {
"name": "Metasequoia format (.mqo)",
"author": "Thomas Portassau (50thomatoes50), sapper-trle@github, jacquesmn@github",
"blender": (2, 80, 0),
"version": (0, 2, 1),
"location": "File > Import-Export",
"description": "Import-Export MQO, UV's, "
"materials and textures",
"warning": "Work In Progress, never use the exported file to overwrite original Metasequoia files",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"
"Scripts/Import-Export/MQO",
"tracker_url": "https://github.com/50thomatoes50/blender.io_mqo/issues",
"category": "Import-Export"}
#http://wiki.blender.org/index.php/Dev:2.5/Py/Scripts/Cookbook/Code_snippets/Multi-File_packages#init_.py
if "bpy" in locals():
import imp
if "import_mqo" in locals():
imp.reload(import_mqo)
if "export_mqo" in locals():
imp.reload(export_mqo)
import os
import bpy
from bpy.props import (BoolProperty,
FloatProperty,
StringProperty,
EnumProperty,
)
from bpy_extras.io_utils import (ExportHelper,
ImportHelper,
path_reference_mode,
axis_conversion,
)
class SCRIPT_OT_export_mqo(bpy.types.Operator, ExportHelper):
bl_idname = "script.export_mqo"
bl_description = 'Export to Metasequoia file format (.mqo)'
bl_label = "Export mqo"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
# From ExportHelper. Filter filenames.
filename_ext = ".mqo"
filter_glob = StringProperty(default="*.mqo", options={'HIDDEN'})
scale: bpy.props.FloatProperty(
name = "Scale",
description="Scale mesh. Value > 1 means bigger, value < 1 means smaller",
default = 1, min = 0.001, max = 1000.0)
rot90: bpy.props.BoolProperty(
name = "Up axis correction",
description="Blender up axis is Z but metasequoia up axis is Y\nExporter will invert value to be in the correcte direction",
default = True)
invert: bpy.props.BoolProperty(
name = "Correction of inverted faces",
description="Correction of inverted faces",
default = True)
edge: bpy.props.BoolProperty(
name = "Export lost edge",
description="Export edge with is not attached to a polygon",
default = True)
uv_exp: bpy.props.BoolProperty(
name = "Export UV",
description="Export UV",
default = True)
uv_cor: bpy.props.BoolProperty(
name = "Convert UV",
description="invert UV map to be in the direction has metasequoia",
default = True)
mat_exp: bpy.props.BoolProperty(
name = "Export Materials",
description="...",
default = True)
mod_exp: bpy.props.BoolProperty(
name = "Export Modifier",
description="Export modifier like mirror or/and subdivision surface",
default = True)
vcol_exp: bpy.props.Bool | Property(
name = "Export Vertex Colors",
description="Export vertex colors",
default = True)
def execute(self, context):
msg = ".mqo export: Executing"
self.report({'INFO'}, msg)
print(msg)
if self.scale < 1:
s = "%.0f times smaller" % 1.0/self.scale
elif self.scale > 1:
s = "%.0f times bigg | er" % self.scale
else:
s = "same size"
msg = ".mqo export: Objects will be %s"%(s)
print(msg)
self.report({'INFO'}, msg)
from . import export_mqo
meshobjects = [ob for ob in context.scene.objects if ob.type == 'MESH']
export_mqo.export_mqo(self,
self.properties.filepath,
meshobjects,
self.rot90, self.invert, self.edge, self.uv_exp, self.uv_cor, self.mat_exp, self.mod_exp, self.vcol_exp,
self.scale)
return {'FINISHED'}
def invoke(self, context, event):
meshobjects = [ob for ob in context.scene.objects if ob.type == 'MESH']
if not meshobjects:
msg = ".mqo export: Cancelled - No MESH objects to export."
self.report({'ERROR'}, msg)
print(msg,"\n")
return{'CANCELLED'}
pth, fn = os.path.split(bpy.data.filepath)
nm, xtn = os.path.splitext(fn)
if nm =="":
nm = meshobjects[0].name
self.properties.filepath = nm
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class SCRIPT_OT_import_mqo(bpy.types.Operator, ImportHelper):
bl_idname = "script.import_mqo"
bl_description = 'Import from Metasequoia file format (.mqo)'
bl_label = "Import mqo"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
# From ImportHelper. Filter filenames.
filename_ext = ".mqo"
filter_glob: StringProperty(default="*.mqo", options={'HIDDEN'})
scale: bpy.props.FloatProperty(
name = "Scale",
description="Scale mesh. Value > 1 means bigger, value < 1 means smaller",
default = 1, min = 0.001, max = 1000.0)
rot90: bpy.props.BoolProperty(
name = "Up axis correction",
description="Blender up axis is Z but metasequoia up axis is Y\nExporter will invert value to be in the correcte direction",
default = True)
txtenc: bpy.props.EnumProperty(
name="Text encoding", description="Set the text encoding used to write the file (ignored for 4.7+)",
default='ascii', items=[
('ascii', "Ascii", ""),
('cp1252', "CP1252", "Code Page 1252 Western Europe"),
('shift_jis', "Shift JIS", "Shift Japanese Industrial Standards"),
('utf_8', "UTF8", ""),
])
debug: bpy.props.BoolProperty(
name = "Show debug text",
description="Print debug text to console",
default = False)
def execute(self, context):
msg = ".mqo import: Opening %s"% self.properties.filepath
print(msg)
self.report({'INFO'}, msg)
if self.scale < 1:
s = "%.0f times smaller" % (1.0/self.scale)
elif self.scale > 1:
s = "%.0f times bigger" % self.scale
else:
s = "same size"
msg = ".mqo import: Objects will be %s"%(s)
print(msg)
self.report({'INFO'}, msg)
from . import import_mqo
r = import_mqo.import_mqo(self,
self.properties.filepath,
self.rot90,
self.scale,
self.txtenc,
self.debug)
return {r[0]}
def menu_func_import(self, context):
self.layout.operator(SCRIPT_OT_import_mqo.bl_idname, text="Metasequoia (.mqo)")
def menu_func_export(self, context):
self.layout.operator(SCRIPT_OT_export_mqo.bl_idname, text="Metasequoia (.mqo)")
classes = (SCRIPT_OT_import_mqo,
SCRIPT_OT_export_mqo
)
def register():
#bpy.utils.register_module(__name__)
for c in classes:
bpy.utils.register_class(c)
bpy.types.TOPBAR_MT_file_import.append(menu_func_import)
|
fosstp/fosstp | fosstp/views/news.py | Python | mit | 2,170 | 0.002765 | from pyramid.view import view_config
from pyramid.httpexceptions import HTTPFound
from pyramid_sqlalchemy import Session
from ..forms.news import NewsAddForm, NewsForm
from ..models.news import NewsModel
@view_config(route_name='news', renderer='templates/news.jinja2')
def news_view(request):
news = Session.query(NewsModel).all()
return {'news': news}
@view_config(route_name='news_add', renderer='templates/news_add.jinja2', request_method='GET', permission='admin')
def news_add_view_get(request):
form = NewsAddForm()
return {'form': form}
@view_config(route_name='news_add', renderer='templates/news_add.jinja2', request_method='POST', permission='admin')
def news_add_view_post(request):
import transaction
form = NewsAddForm(request.POST)
if form.validate():
with transaction.manager:
news = NewsModel()
form.populate_obj(news)
news.user_id = request.session['id']
Session.add(news)
raise HTTPFound(location=request.route_path('news'))
else:
return {'form': form}
@view_config(route_name='news_show', renderer='templates/news_show.jinja2')
def news_show_view_get(request):
news_id = int(request.matchdict['id'])
news = Session.query(NewsModel).get(news_id)
return {'news': news}
@view_config(route_name= | 'news_edit', renderer='templates/news_edit.jinja2', request_method='GET', permission='admin')
def news_edit_view_get(request):
news_id = int(request.matchdict['id'])
news = Session.query(NewsModel).get(news_id)
form = NewsForm(obj=news)
return {'form': form}
@view_config(route_name='news_edit', renderer='templates/news_edit.jinja2', request_method='POST', pe | rmission='admin')
def news_edit_view_post(request):
import transaction
form = NewsForm(request.POST)
if form.validate():
news_id = int(request.matchdict['id'])
with transaction.manager:
news = Session.query(NewsModel).get(news_id)
form.populate_obj(news)
Session.add(news)
raise HTTPFound(location=request.route_path('news_show', id=news_id))
else:
return {'form': form} |
tomaslaz/KLMC_Analysis | thirdparty/JPype-0.5.4.2/test/jpypetest/exc.py | Python | gpl-3.0 | 2,462 | 0.029651 | #*****************************************************************************
# Copyright 2004-2008 Steve Menard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#*****************************************************************************
from jpype import JException, java, JavaException, JProxy, JPackage
import unittest, common
import traceback
def throwIOException() :
raise java.io.IOException.PYEXC("Test throw")
def throwByJavaException() :
JPackage('jpype').exc.ExceptionTest.throwIOException()
def suite() :
return unittest.makeSuite(ExceptionTestCase)
class ExceptionTestCase(common.JPypeTestCase) :
def testExceptionThrown(self) :
try :
self.jpype.exc.ExceptionTest.throwRuntime()
assert False
except JavaException, ex :
print 'Caught a Java exception ...'
if ex.javaClass() is java.lang.RuntimeException :
print "Caught the exception", ex.message()
print ex.stacktrace()
else:
assert False
except Exception, ex:
print ex.__class__, isinstance(ex, JavaException)
print ex.__class__.__bases__[0].__ | bases__[0].__bases__
print JavaException
assert False
print 'if here, everything is fine'
def testExceptionByJavaClass(self) :
try :
self.jpype.exc.ExceptionTest.throwRuntime()
assert False
except JException(java.lang.RuntimeException), ex :
print "Caught the exception", ex.message(), "->", ex.javaClass()
print ex.stacktrace()
except Exception, ex:
print ex
assert False
# def | testThrowException(self) :
# d = {"throwIOException" : throwIOException, }
# p = JProxy(self.jpype.exc.ExceptionThrower, dict=d)
#
# assert self.jpype.exc.ExceptionTest.delegateThrow(p)
def testThrowException3(self) :
d = {"throwIOException" : throwByJavaException, }
p = JProxy(self.jpype.exc.ExceptionThrower, dict=d)
assert self.jpype.exc.ExceptionTest.delegateThrow(p)
|
henriquebastos/itauscraper | itauscraper/converter.py | Python | lgpl-3.0 | 2,425 | 0.001249 | """Funções de conversão usada pelo scraper do Itaú."""
import datetime
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from decimal import Decimal
def date(s):
"""Converte strings 'DD/MM' em datetime.date.
Leva em consideração ano anterior para meses maiores que o mês corrente.
"""
dt = parse(s, dayfirst=True)
# Se o mês do lançamento > mês atual, o lançamento é do ano passado.
if dt.month > datetime.date.today().month:
dt += relativedelta(years=-1)
dt = dt.date()
return dt
def decimal(s):
"""Converte strings para Decimal('-9876.54').
>>> assert decimal('9.876,54-') == Decimal('-9876.54')
>>> assert decimal('9.876,54 D') == Decimal('-9876.54')
>>> assert decimal('9.876,54 C') == Decimal('9876.54')
>>> assert decimal('R$ 9.876,54') == Decimal('9876.54')
>>> assert decimal('R$ -9.876,54') == Decimal('-9876.54')
"""
s = s.replace('.', '')
s = s.replace(',', '.')
if s.startswith('R$ '):
s = s[3:]
if s.endswith('-'):
s = s[-1] + s[:-1]
elif s.endswith(' D'):
s = '-' + s[:-2]
elif s.endswith(' C'):
s = s[:-2]
return Decimal(s)
def is_balance(s):
"""Retorna True quando s é uma entrada de saldo em vez de lançamento."""
return s in ('S A L D O',
'(-) SALDO A | LIBERAR',
'SALDO FINAL DISPONIVEL',
'SALDO ANTERIOR')
def statements(iterable):
"""Converte dados do extrato de texto para tipos Python.
Linhas de saldo são | ignoradas.
Entrada: (('21/07', 'Lançamento', '9.876,54-'), ...)
Saída..: ((datetime.date(2017, 7, 21), 'Lançamento', Decimal('-9876.54')), ...)
"""
return ((date(a), b, decimal(c)) for a, b, c in iterable if not is_balance(b))
def card_statements(iterable):
"""Converte dados do extrato do cartão de texto para tipos Python.
Entrada: (('21/07', 'Lançamento', '9.876,54 D'), ...)
Saída..: ((datetime.date(2017, 7, 21), 'Lançamento', Decimal('-9876.54')), ...)
"""
return ((date(a), b, decimal(c)) for a, b, c in iterable)
def card_summary(iterable):
"""Converte dados do resumo do cartão de texto para tipos Python.
Entrada: (('Item do Resumo', 'R$ -9.876,54'), ...)
Saída..: (('Item do Resumo', Decimal('-9876.54')), ...)
"""
return ((a, decimal(b)) for a, b in iterable)
|
gis-support/DIVI-QGIS-Plugin | widgets/ImageViewerQt.py | Python | gpl-2.0 | 9,390 | 0.005538 | # -*- coding: utf-8 -*-
""" ImageViewer.py: PyQt image viewer widget for a QPixmap in a QGraphicsView scene with mouse zooming and panning. """
import os.path
from PyQt5.QtCore import Qt, QRectF, pyqtSignal, QT_VERSION_STR
from PyQt5.QtGui import QImage, QPixmap, QPainterPath, QWheelEvent
from PyQt5.QtWidgets import QGraphicsView, QGraphicsScene, QFileDialog
__author__ = "Marcel Goldschen-Ohm <marcel.goldschen@gmail.com>"
__version__ = '0.9.0'
class ImageViewerQt(QGraphicsView):
""" PyQt image viewer widget for a QPixmap in a QGraphicsView scene with mouse zooming and panning.
Displays a QImage or QPixmap (QImage is internally converted to a QPixmap).
To display any other image format, you must first convert it to a QImage or QPixmap.
Some useful image format conversion utilities:
qimage2ndarray: NumPy ndarray <==> QImage (https://github.com/hmeine/qimage2ndarray)
ImageQt: PIL Image <==> QImage (https://github.com/python-pillow/Pillow/blob/master/PIL/ImageQt.py)
Mouse interaction:
Left mouse button drag: Pan image.
Right mouse button drag: Zoom box.
Right mouse button doubleclick: Zoom to show entire image.
"""
# Mouse button signals emit image scene (x, y) coordinates.
# !!! For image (row, column) matrix indexing, row = y and column = x.
leftMouseButtonPressed = pyqtSignal(float, float)
rightMouseButtonPressed = pyqtSignal(float, float)
leftMouseButtonReleased = pyqtSignal(float, float)
rightMouseButtonReleased = pyqtSignal(float, float)
leftMouseButtonDoubleClicked = pyqtSignal(float, float)
rightMouseButtonDoubleClicked = pyqtSignal(float, float)
def __init__(self):
QGraphicsView.__init__(self)
# Image is displayed as a QPixmap in a QGraphicsScene attached to this QGraphicsView.
self.scene = QGraphicsScene()
self.setScene(self.scene)
# Store a local handle to the scene's current image pixmap.
self._pixmapHandle = None
# Image aspect ratio mode.
# !!! ONLY applies to full image. Aspect ratio is always ignored when zooming.
# Qt.IgnoreAspectRatio: Scale image to fit viewport.
# Qt.KeepAspectRatio: Scale image to fit inside viewport, preserving aspect ratio.
# Qt.KeepAspectRatioByExpanding: Scale image to fill the viewport, preserving aspect ratio.
self.aspectRatioMode = Qt.KeepAspectRatio
# Scroll bar behaviour.
# Qt.ScrollBarAlwaysOff: Never shows a scroll bar.
# Qt.ScrollBarAlwaysOn: Always shows a scroll bar.
# Qt.ScrollBarAsNeeded: Shows a scroll bar only when zoomed.
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
# Stack of QRectF zoom boxes in scene coordinates.
self.zoomStack = []
# Flags for enabling/disabling mouse interaction.
self.canZoom = True
self.canPan = True
def hasImage(self):
""" Returns whether or not the scene contains an image pixmap.
"""
return self._pixmapHandle is not None
def clearImage(self):
""" Removes the current image pixmap from the scene if it exists.
"""
if self.hasImage():
self.scene.removeItem(self._pixmapHandle)
self._pixmapHandle = None
def pixmap(self):
""" Returns the scene's current image pixmap as a QPixmap, or else None if no image exists.
:rtype: QPixmap | None
"""
if self.hasImage():
return self._pixmapHandle.pixmap()
return None
def image(self):
""" Returns the scene's current image pixmap as a QImage, or else None if no image exists.
:rtype: QImage | None
"""
if self.hasImage():
return self._pixmapHandle.pixmap().toImage()
return None
def setImage(self, image):
""" Set the scene's current image pixmap to the input QImage or QPixmap.
Raises a RuntimeError if the input image has type other than QImage or QPixmap.
:type image: QImage | QPixmap
"""
if type(image) is QPixmap:
pixmap = image
elif type(image) is QImage:
pixmap = QPixmap.fromImage(image)
elif image is None:
pixmap = QPixmap()
else:
raise RuntimeError("ImageViewer.setImage: Argument must be a QImage or QPixmap.")
if self.hasImage():
self._pixmapHandle.setPixmap(pixmap)
else:
self._pixmapHandle = self.scene.addPixmap(pixmap)
self.setSceneRect(QRectF(pixmap.rect())) # Set scene size to image size.
self.zoomStack = []
self.updateViewer()
def loadImageFromFile(self, fileName=""):
""" Load an image from file.
Without any arguments, loadImageFromFile() will popup a file dialog to choose the image file.
With a fileName argument, loadImageFromFile(fileName) will attempt to load the specified image file directly.
"""
if len(fileName) == 0:
if QT_VERSION_STR[0] == '4':
fileName = QFileDialog.getOpenFileName(self, "Open image file.")
elif QT_VERSION_STR[0] == '5':
fileName, dummy = QFileDialog.getOpenFileName(self, "Open image file.")
if len(fileName) and os.path.isfile(fileName):
image = QImage(fileName)
self.setImage(image)
def updateViewer(self):
""" Show current zoom (if showing entire image, apply current aspect ratio mode).
"""
if not self.hasImage():
return
if len(self.zoomStack) and self.sceneRect().contains(self.zoomStack[-1]):
#self.fitInView(self.zoomStack[-1], Qt.IgnoreAspectRatio) # Show zoomed rect (ignore aspect ratio).
self.fitInView(self.zoomStack[-1], self.aspectRatioMode) # Show zoomed rect (ignore aspect ratio).
else:
self.zoomStack = [] # Clear the zoom stack (in case we got here because of an invalid zoom).
self.fitInView(self.sceneRect(), self.aspectRatioMode) # Show entire image (use current aspect ratio mode).
def resizeEvent(self, event):
""" Maintain current zoom on resize.
"""
self.updateViewer()
def mousePressEvent(self, event):
""" Start mouse pan or zoom mode.
"" | "
scenePos = self.mapToScene(event.pos())
if event.button() == Qt.LeftButton:
if self.canPan:
self.setDragMode(QGraphicsView.ScrollHandDrag)
self.leftMouseButtonPressed.emit(scenePos.x(), scenePos.y())
elif event.button() == Qt.RightButton:
if self.canZoom:
self.setDragMode(QGraphicsView.RubberBandDrag)
| self.rightMouseButtonPressed.emit(scenePos.x(), scenePos.y())
QGraphicsView.mousePressEvent(self, event)
def mouseReleaseEvent(self, event):
""" Stop mouse pan or zoom mode (apply zoom if valid).
"""
QGraphicsView.mouseReleaseEvent(self, event)
scenePos = self.mapToScene(event.pos())
if event.button() == Qt.LeftButton:
self.setDragMode(QGraphicsView.NoDrag)
self.leftMouseButtonReleased.emit(scenePos.x(), scenePos.y())
elif event.button() == Qt.RightButton:
if self.canZoom:
#viewBBox = self.zoomStack[-1] if len(self.zoomStack) else self.sceneRect()
viewBBox = self.sceneRect()
selectionBBox = self.scene.selectionArea().boundingRect().intersected(viewBBox)
self.scene.setSelectionArea(QPainterPath()) # Clear current selection area.
if selectionBBox.isValid() and (selectionBBox != viewBBox):
self.zoomStack.append(selectionBBox)
self.updateViewer()
self.setDragMode(QGraphicsView.NoDrag)
self.rightMouseButtonReleased.emit(scenePos.x(), scenePos.y())
def mouseDoubleClickEvent(self, event):
""" Show entire image.
"""
scenePos = self.mapToScene(event.pos())
|
15klli/WeChat-Clone | main/plugins/weather.py | Python | mit | 4,482 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import app, celery
import hashlib
import hmac
import time
import requests
from . import wechat_custom
@celery.task
def get(openid):
"""获取天气与空气质量预报"""
content = []
current_hour = time.strftime('%H')
try:
pm_25_info = get_pm2_5_info()
except Exception, e:
app.logger.warning(u'PM2.5 API 超时出错:%s' % e)
else:
title_aqi = u'空气质量等级:%s\n%s点的平均PM2.5:%s' % (
pm_25_info[0]['quality'], current_hour, pm_25_info[0]['pm2_5'])
content.append({"title": title_aqi})
try:
weather_info = get_weather_info()
except Exception, e:
app.logger.warning(u'天气 API 超时出错:%s' % e)
content = u"天气查询超时\n请稍后重试"
wechat_custom.send_text(openid, content)
else:
for index, data in enumerate(weather_info):
title_weather = u'%s %s℃\n%s %s ' % (
day_of_week(offset=index),
data['temp'],
data['weather'],
data['wind'])
content.append({"title": title_weather, "picurl": data['img_url']})
wechat_custom.send_news(openid, content)
def get_weather_info():
"""
查询气象
API 详情:http://openweather.weather.com.cn/Home/Help/Product.html
气象图片下载:http://openweather.weather.com.cn/Home/Help/icon/iid/10.html
"""
private_key = app.config['WEATHER_PRIVATE_KEY']
appid = app.config['WEATHER_APPID']
appid_six = appid[:6]
areaid = '101281601' # 东莞代号
date = time.strftime('%Y%m%d%H%M')
# 根据 API 文档生成请求 URL
public_key = 'http://open.weather.com.cn/data/?' +\
'areaid=%s&type=forecast_v&date=%s&appid=%s' % (areaid, date, appid)
key = hmac.new(private_key, public_key, hashlib.sha1).digest().encode(
'base64').rstrip()
url = 'http://open.weather.com.cn/data/?' +\
'areaid=%s&type=forecast_v&date=%s&appid=%s&key=%s' % (
areaid, date, appid_six, key)
res = requests.get(url, timeout=7)
weather_info = res.json()['f']['f1']
# 解析为可读数据
img_url = "http://gxgk-wechat.b0.upaiyun.com/weather/day/%s.jpeg"
data = []
for weather in weather_info:
# 到了晚上,当日白天的数据为空,所以使用晚上的数据
if weather['fa'] == u'':
temp = weather['fd']
weather_code = weather['fb']
wind_code = weather['ff']
else:
temp = weather['fc']
weather_code = weather['fa']
wind_code = weather['fe']
data.append({
"temp": temp,
"weather": weather_code_to_text(weather_code),
"wind": wind_code_to_text(wind_code),
"img_url": img_url % weather_code
})
return data
def get_pm2_5_info():
"""
空气质量
API 详情:http://www.pm25.in/api_doc
"""
url = 'http://www.pm25.in/api/querys/pm2_5.json?' +\
'city=dongguan&token=%s&stations=no' % app.config['PM2_5_TOKEN']
res = requests.get(url, timeout=7)
return res.json()
def day_of_week(offset=0):
"""获取星期几"""
day_of_week = int(time.st | rftime('%w')) + offset
day | s = [u'周日', u'周一', u'周二', u'周三', u'周四', u'周五', u'周六',
u'周日', u'周一']
prefix = [u'今天', u'明天', u'后天']
return prefix[offset] + days[day_of_week]
def weather_code_to_text(code):
"""转换天气代码为文字"""
weather_list = [u'晴', u'多云', u'阴', u'阵雨', u'雷阵雨', u'雷阵雨伴有冰雹',
u'雨夹雪', u'小雨', u'中雨', u'大雨', u'暴雨', u'大暴雨',
u'特大暴雨', u'阵雪', u'小雪', u'中雪', u'大雪', u'暴雪', u'雾',
u'冻雨', u'沙尘暴', u'小到中雨', u'中到大雨', u'大到暴雨',
u'暴雨到大暴雨', u'大暴雨到特大暴雨', u'小到中雪', u'中到大雪',
u'大到暴雪', u'浮尘', u'扬沙', u'强沙尘暴', u'霾', u'无']
return weather_list[int(code)]
def wind_code_to_text(code):
"""转换风向代码为文字"""
wind_list = [u'微风', u'东北风', u'东风', u'东南风', u'南风', u'西南风',
u'西风', u'西北风', u'北风', u'旋转风']
return wind_list[int(code)]
|
bohdon/maya-pulse | src/pulse/scripts/pulse/actions/joints/twist_joints_pulseaction.py | Python | mit | 3,957 | 0.004802 | import pymel.core as pm
import pulse.nodes
import pulse.utilnodes
from pulse.buildItems import BuildAction, BuildActionError
class TwistJointsAction(BuildAction):
def validate(self):
if not self.twistJoint:
raise BuildActionError('twistJoint must be set')
if not self.alignJoint:
raise BuildActionError('alignJoint must be set')
def run(self):
twist_blend = None
if self.twistControls:
# add attr to first control, then add proxy to the rest
for twist_ctl in self.twistControls:
if twist_blend is None:
twist_ctl.addAttr('twistBlend', at='double', min=0, max=1, keyable=True, defaultValue=1)
twist_blend = twist_ctl.attr('twistBlend')
else:
twist_ctl.addAttr('twistBlend', proxy=twist_blend)
else:
# add attr directly to joint, not usually desired because this can export with the joints as a curve
self.twistJoint.addAttr('twistBlend', at='double', min=0, max=1, keyable=True, defaultValue=1)
twist_blend = self.twistJoint.attr('twistBlend')
# get parent world matrix
parent_mtx = self.getParentMatrix(self.twistJoint)
if self.alignToRestPose:
# Use the current world matrix of the align joint to calculate and store the resting position as
# an offset applied to the joints parent node. The align joint's parentMatrix attr can't be used here,
# since it may be driven directly by a control, and would then be equal to the animated world matrix.
| # TODO: might want to expose an option for | selecting the parent node explicitly.
align_parent = self.alignJoint.getParent()
if align_parent:
# get align joint matrix relative to it's parent,
# don't trust the local matrix since inheritsTransform may not be used
offset_mtx = self.alignJoint.wm.get() * align_parent.wim.get()
align_tgt_mtx = pulse.utilnodes.multMatrix(offset_mtx, align_parent.wm)
pass
else:
# no parent node, just store the current 'resting' matrix in a multMatrix
align_tgt_mtx = pulse.utilnodes.multMatrix(self.alignJoint.wm.get())
else:
# use the align joint world matrix directly
align_tgt_mtx = self.alignJoint.wm
# create aligned version of the parent matrix
aligned_pm = pulse.utilnodes.alignMatrixToDirection(parent_mtx, self.forwardAxis, self.alignAxis,
self.alignAxis, align_tgt_mtx)
# blend aligned matrix with default parent matrix
blend_mtx = pulse.utilnodes.blendMatrix(parent_mtx, aligned_pm, twist_blend)
pulse.nodes.connectMatrix(blend_mtx, self.twistJoint, pulse.nodes.ConnectMatrixMethod.CREATE_OFFSET)
def getParentMatrix(self, node):
"""
Return the parent world matrix to use for a node, checking first for inputs to offsetParentMatrix,
then for a parent node if available. Does not support nodes that have a connection to offsetParentMatrix
while also having inheritsTransform set to True.
"""
# look for and use input to offsetParentMatrix if available
offset_mtx_inputs = node.offsetParentMatrix.inputs(plugs=True)
if offset_mtx_inputs:
if node.inheritsTransform.get():
raise BuildActionError(f"{node} cannot have an offsetParentMatrix connection "
"while also having inheritsTransform set to True")
return offset_mtx_inputs[0]
# get matrix from parent node
parent_node = node.getParent()
if parent_node:
return parent_node.wm
# no parent, use identity matrix
return pm.dt.Matrix()
|
tdyas/pants | src/python/pants/base/parse_context.py | Python | apache-2.0 | 3,532 | 0.003398 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import functools
import threading
class Storage(threading.local):
def __init__(self, rel_path):
self.clear(rel_path)
def clear(self, rel_path):
self.rel_path = rel_path
self.objects_by_name = dict()
self.objects = []
def add(self, obj, name=None):
if name is not None:
# NB: `src/python/pants/engine/mapper.py` will detect an overwritten object later.
self.objects_by_name[name] = obj
self.objects.append(obj)
def add_if_not_exists(self, name, obj_creator):
if name is None:
raise ValueError("Method requires a `name`d object.")
obj = self.objects_by_name.get(name)
if obj is None:
obj = self.objects_by_name[name] = obj_creator()
return obj
class ParseContext:
"""The build file context that context aware objects - aka BUILD macros - operate against.
All fields of the ParseContext must be assumed to be mutable by macros, and should
thus only be consumed in the context of a macro's `__call__` method (rather than
in its `__init__`).
"""
def __init__(self, rel_path, type_aliases):
"""Create a ParseContext.
:param rel_path: The (build file) path that the parse is currently operating on: initially None.
:param type_aliases: A dictionary of alias name strings or alias classes to a callable
constructor for the alias.
"""
self._type_aliases = type_aliases
self._storage = Storage(rel_path)
def create_object(self, alias, *args, **kwargs):
"""Constructs the type with the given alias using the given args and kwargs.
NB: aliases may be the alias' object type itself if that type is known.
:API: public
:param alias: Either the type alias or the type itself.
:type alias: string|type
:param *args: These pass through to the underlying callable object.
:param **kwargs: These pass through to the underlying callable object.
:returns: The created object.
"""
object_type = self._type_aliases.get(alias)
if object_type is None:
raise KeyError("There is no type registered for alias {0}".format(alias))
return object_type(*args, **kwargs)
def create_object_if_not_exists(self, alias, name=None, *args, **kwargs):
"""Constructs the type with the given | alias using the given args and kwargs.
NB: aliases may be the alias' object type itself if that type is known.
:API: public
:param alias: Either the type alias or the type itself.
:type alias: string|type
:param *args: These pass through to the underlying callable object.
:param **kwargs: These pass through to the underlying callabl | e object.
:returns: The created object, or an existing object with the same `name`.
"""
if name is None:
raise ValueError("Method requires an object `name`.")
obj_creator = functools.partial(self.create_object, alias, name=name, *args, **kwargs)
return self._storage.add_if_not_exists(name, obj_creator)
@property
def rel_path(self):
"""Relative path from the build root to the BUILD file the context aware object is called
in.
:API: public
:rtype string
"""
return self._storage.rel_path
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.