repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
tb0hdan/voiceplay | setup.py | Python | unlicense | 3,908 | 0.004094 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
import os
import platform
import re
import sys
import subprocess
from distutils.sysconfig import get_python_lib
from setuptools import setup, find_packages
from voiceplay import (__title__,
__version__,
__description__,
__author__,
__author_email__,
__copyright__,
| __file__ as vpfile)
if os.path.exists('README.rst'):
readme = io.open('README.rst', mode='r', encoding='utf8' | ).read()
else:
readme = ''
system_specific_packages = ['pyobjc'] if platform.system() == 'Darwin' else ['pyfestival', 'Skype4Py']
# hook to pip install for package sideloading
# broken pyaudio package
# snowboy extension
if sys.argv[1] in ['bdist_wheel', 'install']:
# pyaudio
if platform.system() == 'Darwin':
subprocess.call(['pip', 'install', '--global-option=build_ext',
'--global-option=-I/usr/local/include',
'--global-option=-L/usr/local/lib', 'pyaudio'])
else:
subprocess.call(['pip', 'install', 'pyaudio'])
# snowboy
from voiceplay.utils.snowboydownloader import SnowboyDownloader
sd = SnowboyDownloader()
# get_python_lib returns different directory when installing packages system-wide
# TODO: FIX THIS!
sd.download_and_unpack(os.path.join(get_python_lib(), 'voiceplay', 'extlib', 'snowboydetect'))
sd.download_and_unpack(os.path.join(get_python_lib().replace('/usr/lib', '/usr/local/lib'), 'voiceplay', 'extlib', 'snowboydetect'))
setup(name='voiceplay',
version=__version__,
description=__description__,
author=__author__,
author_email=__author_email__,
url='https://github.com/tb0hdan/voiceplay',
packages=find_packages(exclude=['snowboy', 'vlcpython', 'docs', 'tests*']),
package_data={'': ['snowboydetect/resources/*.pmdl', 'snowboydetect/resources/*.res',
'snowboydetect/*.py', 'config/*.sample']},
zip_safe=False,
license=__copyright__,
keywords='voiceplay music playlists vlc player',
long_description=readme,
dependency_links=['https://github.com/tb0hdan/mplayer.py/tarball/master#egg=mplayer.py-0.7.0beta'],
install_requires=['Babel', 'beautifulsoup4', 'colorama', 'dailymotion', 'filemagic', 'flake8',
'oauth2client>=1.5.2,<4.0.0', 'requests', 'lxml', 'flask-classy', 'flask-restful',
'future', 'gntp', 'dropbox',
'google-api-python-client', 'ipython>=5.0.0,<6.0.0', 'kaptan', 'monotonic', 'gunicorn',
'musicbrainzngs', 'mutagen', 'mplayer.py>=0.7.0beta',
'piprot', 'pocketsphinx', 'pony',
'pylast', 'pylint', 'pytest', 'pytest-coverage', 'PyVimeo', 'rl',
'SpeechRecognition', 'tqdm', 'youtube-dl', 'zeroconf'] + system_specific_packages,
entry_points={'console_scripts': [
'voiceplay=voiceplay.cli:main']},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: Public Domain',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet',
'Topic :: Multimedia :: Sound/Audio :: Players',
'Topic :: Terminals',
],
)
|
christianrenier/dynamic-dns-updater | __main__.py | Python | mit | 3,280 | 0.019512 | import ConfigParser
import os
import sys
import utils
## Create global names and functions ##
# Load file locations and configuration options
site_list_location = os.path.dirname(__file__) + '/sitelist.txt'
parser = ConfigParser.RawConfigParser()
parser.read(os.path.dirname(__file__) + '/config.cfg')
general = dict(parser.items('general'))
gmail_account = dict(parser.items('gmail_account'))
write_error = parser.getboolean('logging', 'log_errors')
write_change = parser.getboolean('logging', 'log_changes')
write_unchanged = parser.getboolean('logging', 'log_unchanged')
receiver = parser.get('mailing', 'mail_receivers')
mail_error = parser.getboolean('mailing', 'send_errors')
mail_change = parser.getboolean('mailing', 'send_changes')
# Name of this tool
tool_name = 'Dynamic DNS Updater'
# Tracks if a logger was created
logger = Fa | lse
# Tracks if a mailer was created
mailer = False
# Dictionary of error codes and their corresponding messages
error_messages = {
'invalid_login' : 'Your Gmail username or password is incorrect.',
'logger_missing' : 'Problem writing to log file.',
'read_cache' : 'Problem reading from IP cache.',
'read_sitelist' : 'Problem reading the sitelist.',
'empty_url' : 'You have not provided an update URL.',
'check_ip' : 'Problem checking your IP address.',
'upd | ate_dns' : 'Problem updating your Dynamic DNS.'
}
# Handles logging and mailing of errors, as enabled by the user
def error_processor(code):
if write_error and logger: logger.log_error(error_messages[code])
if mail_error and mailer:
mailer.send_error(receiver, error_messages[code])
print '%s: Error - %s' % (tool_name, error_messages[code])
sys.exit()
## Create instances of utility classes ##
# Only create logger object if the user has chosen to log an event
if write_error or write_change or write_unchanged:
try: logger = utils.logger.Logger(general['log_file'])
except: logger = False
# Only create mailer object if user has chosen to mail an event
if mail_error or mail_change:
try: mailer = utils.mailer.Mailer(
gmail_account['gmail_user'],
gmail_account['gmail_password'])
except: error_processor('invalid_login')
# Notify user by mail that initializing a logger has failed, if they
# enabled any logging of events
if not logger and mailer:
if write_error or write_change or write_unchanged:
error_processor('logger_missing')
try: cacher = utils.cacher.Cacher(general['ip_cache_file'])
except: error_processor('read_cache')
try: checker = utils.checker.Checker(site_list_location)
except: error_processor('read_sitelist')
try: updater = utils.updater.Updater(general['update_urls'])
except: error_processor('empty_url')
## Main ##
old_ip = cacher.get_ip()
try: current_ip = checker.get_ip()
except: error_processor('check_ip')
# If IP has not changed, exit the program
if old_ip == current_ip:
if write_unchanged:
logger.log_no_change(old_ip)
print '%s: %s remains unchanged.' % (tool_name, old_ip)
sys.exit()
try: updater.update_dns()
except: error_processor('update_dns')
cacher.store_ip(current_ip)
print '%s: %s has been updated to %s' % (tool_name, old_ip, current_ip)
if write_change: logger.log_change(old_ip, current_ip)
if mail_change and mailer:
mailer.send_change(receiver, old_ip, current_ip)
|
bdaroz/the-blue-alliance | database/dict_converters/team_converter.py | Python | mit | 1,224 | 0.000817 | from database.dict_converters.converter_base import ConverterBase
class TeamConverter(ConverterBase):
SUBVERSIONS = { # Increment every time a change to the dict is made
3: 4,
}
@classmethod
def _convert(cls, teams, dict_version):
CONVERTERS = {
3: cls.teamsConverter_v3,
}
return CONVERTERS[dict_version](teams)
@classmethod
def teamsConverter_v3(cls, teams):
return map(cls.teamConverter_v3, teams)
@classmethod
def teamConverter_v3(cls, team):
has_nl = team.nl and team.nl.city and team.nl.state_prov and team.nl.country
default_name = "Team {}".format(team.team_number)
team_d | ict = {
'key': team.key.id(),
'team_number': team.team_number,
'nickname': team.nickname if team.nickname else default_n | ame,
'name': team.name if team.name else default_name,
'website': team.website,
'rookie_year': team.rookie_year,
'motto': None,
'home_championship': team.championship_location,
'school_name': team.school_name,
}
team_dict.update(cls.constructLocation_v3(team))
return team_dict
|
qk4l/Flexget | flexget/plugins/modify/convert_magnet.py | Python | mit | 4,421 | 0.002488 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import os
import time
import logging
from flexget import plugin
from flexget.event import event
from flexget.utils.tools import parse_timedelta
from flexget.utils.pathscrub import pathscrub
log = logging.getLogger('convert_magnet')
class ConvertMagnet(object):
"""Convert magnet only entries to a torrent file"""
schema = {
"oneOf": [
# Allow convert_magnet: no form to turn off plugin altogether
{"type": "boolean"},
{
"type": "object",
"properties": {
"timeout": {"type": "string", "format": "interval"},
"force": {"type": "boolean"}
},
"additionalProperties": False
}
]
}
def magnet_to_torrent(self, magnet_uri, destination_folder, timeout):
import libtorrent
params = libtorrent.parse_magnet_uri(magnet_uri)
session = libtorrent.session()
lt_version = [int(v) for v in libtorrent.version.split('.')]
if lt_version > [0,16,13,0]:
# for some reason the info_hash needs to be bytes but it's a struct called sha1_hash
params['info_hash'] = params['info_hash'].to_bytes()
handle = libtorrent.add_magnet_uri(session, magnet_uri, params)
log.debug('Acquiring torrent metadata for magnet %s', magnet_uri)
timeout_value = timeout
while not handle.has_metadata():
time.sleep(0.1)
timeout_value -= 0.1
if timeout_value <= 0:
raise plugin.PluginError('Timed out after {} seconds trying to magnetize'.format(timeout))
log.debug('Metadata acquired')
torrent_info = handle.get_torrent_info()
torrent_file = libtorrent.create_torrent(torrent_info)
torrent_path = pathscrub(os.path.join(destination_folder, torrent_info.name() + ".torrent"))
with open(torrent_path, "wb") as f:
f.write(libtorrent.bencode(torrent_file.generate()))
log.debug('Torrent file wrote to %s', torrent_path)
return torrent_path
def prepare_config(self, config):
if not isinstance(config, dict):
config = {}
| config.setdefault('timeout', '30 seconds')
config.setdefault('force', False)
return config
@plugin.priority(255)
def on_task_start(self, task, config):
if config is False:
r | eturn
try:
import libtorrent # noqa
except ImportError:
raise plugin.DependencyError('convert_magnet', 'libtorrent', 'libtorrent package required', log)
@plugin.priority(130)
def on_task_download(self, task, config):
if config is False:
return
config = self.prepare_config(config)
# Create the conversion target directory
converted_path = os.path.join(task.manager.config_base, 'converted')
timeout = parse_timedelta(config['timeout']).total_seconds()
if not os.path.isdir(converted_path):
os.mkdir(converted_path)
for entry in task.accepted:
if entry['url'].startswith('magnet:'):
entry.setdefault('urls', [entry['url']])
try:
log.info('Converting entry {} magnet URI to a torrent file'.format(entry['title']))
torrent_file = self.magnet_to_torrent(entry['url'], converted_path, timeout)
except (plugin.PluginError, TypeError) as e:
log.error('Unable to convert Magnet URI for entry %s: %s', entry['title'], e)
if config['force']:
entry.fail('Magnet URI conversion failed')
continue
# Windows paths need an extra / prepended to them for url
if not torrent_file.startswith('/'):
torrent_file = '/' + torrent_file
entry['url'] = torrent_file
entry['file'] = torrent_file
# make sure it's first in the list because of how download plugin works
entry['urls'].insert(0, 'file://{}'.format(torrent_file))
@event('plugin.register')
def register_plugin():
plugin.register(ConvertMagnet, 'convert_magnet', api_ver=2)
|
razvan9310/barrelfish | tools/grader/subprocess_timeout.py | Python | mit | 894 | 0.001119 | ##########################################################################
# Copyright (c) 2009-2016 ETH Zurich.
# All rights reserved.
#
# This file is distributed under the terms in the attached LICENSE file.
# If you do not find this file, copies can be found by | writing to:
# ETH Zurich D-INFK, Universitaetstr 6, CH-8092 Zurich. Attn: Systems Group.
##########################################################################
from threading import Timer
# Wait for Popen instance p for timeout seconds and terminate/kill it after
# the tim | eout expires
# Adapted from
# http://stackoverflow.com/questions/1191374/using-module-subprocess-with-timeout
def wait_or_kill(p, timeout=5):
# Kill process if it doesn't voluntarily exit in `timeout` seconds
timer = Timer(timeout, lambda x: x.kill(), [p])
try:
timer.start()
p.wait()
finally:
timer.cancel()
|
dtroyer/dwarf | dwarf/task.py | Python | apache-2.0 | 2,171 | 0 | #!/usr/bin/env python
#
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import time
from threading import Thread
LOG = logging.getLogger(__name__)
_TASKS = {}
class _Task(Thread):
def __init__(self, tid, interval, repeat, func, *args, **kwargs):
super(_Task, self).__init__()
self.tid = tid
self.interval = interval
self.repeat = repeat
self.func = func
self.args = | args
sel | f.kwargs = kwargs
self._stop = False
_TASKS[tid] = self
self.start()
def run(self):
for dummy in range(self.repeat):
if self._stop:
break
retval = self.func(*self.args, **self.kwargs)
if retval is not None:
break
time.sleep(self.interval)
_TASKS.pop(self.tid, None)
def stop(self):
self._stop = True
def start(tid, interval, repeat, func, *args, **kwargs):
"""
Start a new task
"""
LOG.info('start(tid=%s, interval=%s, repeat=%s, func=%s, args=%s, '
'kwargs=%s)', tid, interval, repeat, func.__name__, args, kwargs)
_Task(tid, interval, repeat, func, *args, **kwargs)
def stop(tid):
"""
Stop a running task
"""
LOG.info('stop(tid=%s)', tid)
t = _TASKS.get(tid, None)
if t is not None:
t.stop()
def stop_all(wait=False):
"""
Stop all running tasks
"""
LOG.info('stop_all()')
for tid in _TASKS:
stop(tid)
if wait:
while _TASKS:
time.sleep(0.5)
|
cmvelo/ansible | lib/ansible/executor/module_common.py | Python | gpl-3.0 | 33,348 | 0.002699 | # (c) 2013-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2015 Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import base64
import imp
import json
import os
import shlex
import zipfile
from io import BytesIO
# from Ansible
from ansible.release import __version__, __author__
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.utils.unicode import to_bytes, to_unicode
# Must import strategy and use write_locks from there
# If we import write_locks directly then we end up binding a
# variable to the object and then it never gets updated.
from ansible.plugins import strategy
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
REPLACER = b"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>"
REPLACER_VERSION = b"\"<<ANSIBLE_VERSION>>\""
REPLACER_COMPLEX = b"\"<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>\""
REPLACER_WINDOWS = b"# POWERSHELL_COMMON"
REPLACER_JSONARGS = b"<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>"
REPLACER_SELINUX = b"<<SELINUX_SPECIAL_FILESYSTEMS>>"
# We could end up writing out parameters with unicode characters so we need to
# specify an encoding for the python source file
ENCODING_STRING = u'# -*- coding: utf-8 -*-'
# we've moved the module_common relative to the snippets, so fix the path
_SNIPPET_PATH = os.path.join(os.path.dirname(__file__), '..', 'module_utils')
# ******************************************************************************
ZIPLOADER_TEMPLATE = u'''%(shebang)s
%(coding)s
ZIPLOADER_WRAPPER = True # For test-module script to tell this is a ZIPLOADER_WRAPPER
# This code is part of Ansible, but is an independent component.
# The code in this particular templatable string, and this templatable string
# only, is BSD licensed. Modules which end up using this snippet, which is
# dynamically combined together by Ansible still belong to the author of the
# module, and they may assign their own license to the complete work.
#
# Copyright (c), James Cammarata, 2016
# Copyright (c), Toshio Kuratomi, 2016
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import base64
import shutil
import zipfile
import tempfile
import subprocess
if sys.version_info < (3,):
bytes = str
PY3 = False
else:
unicode = str
PY3 = True
try:
# Python-2.6+
from io import BytesIO as IOStream
except ImportError:
# Python < 2.6
from StringIO import StringIO as IOStream
ZIPDATA = """%(zipdata)s"""
def invoke_module(module, modlib_path, json_params):
pythonpath = os.environ.get('PYTHONPATH')
if pythonpath:
os.environ['PYTHONPATH'] = ':'.join((modlib_path, pythonpath))
else:
os.environ['PYTHONPATH'] = modlib_path
p = subprocess.Popen([%(interpreter)s, module], env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate(json_params)
if not isinstance(stderr, (bytes, unicode)):
stderr = stderr.read()
if not isinstance(stdout, (bytes, unicode)):
stdout = stdout.read()
if PY3:
sys.stderr.buffer.write(stderr)
sys.stdout.buffer.write(stdout)
else:
sys.stderr.write(stderr)
sys.stdout.write(stdout)
return p.returncode
def debug(command, zipped_mod, json_params):
# The code here normally doesn't run. It's only used for debugging on the
# remote machine.
#
# The subcommands in this function make it easier to debug ziploader
# modules. Here's the basic steps:
#
# Run ansible with the environment variable: ANSIBLE_KEEP_REMOTE_FILES=1 and -vvv
# to save the module file remotely::
# $ ANSIBLE_KEEP_REMOTE_FILES=1 ansible host1 -m ping -a 'data=october' -vvv
#
# Part of the verbose output will tell you where on the remote machine the
# module was written to::
# [...]
# <host1> SSH: EXEC ssh -C -q -o ControlMaster=auto -o ControlPersist=60s -o KbdInteractiveAuthentication=no -o
# PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o ConnectTimeout=10 -o
# ControlPath=/home/badger/.ansible/cp/ansible-ssh-%%h-%%p-%%r -tt rhel7 '/bin/sh -c '"'"'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8
# LC_MESSAGES=en_US.UTF-8 /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping'"'"''
# [...]
#
# Login to the remote machine and run the module file via from the previous
# step with the explode subcommand to extract the module payload into
# source files::
# $ ssh host1
# $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping explode
# Module expanded into:
# /home/badger/.ansible/tmp/ansible-tmp-1461173408.08-279692652635227/ansible
#
# You can now edit the source files to instrument t | he code or experiment with
# different parameter values. When you're ready to run the code you've modified
# (instead of the code from the actual zipped module), use the execute subcommand like this::
# $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping execute
# Okay to use __file__ here because we're running from a kept file
basedir = os. | path.join(os.path.abspath(os.path.dirname(__file__)), 'debug_dir')
args_path = os.path.join(basedir, 'args')
script_path = os.path.join(basedir, 'ansible_module_%(ansible_module)s.py')
if command == 'explode':
# transform the ZIPDATA into an exploded directory of code and then
# print the path to the code. This is an easy way for people to look
# at the code on the remote machine for debugging it in that
# environment
z = zipfile.ZipFile(zipped_mod)
for filename in z.namelist():
if filename.startswith('/'):
raise Exception('Something wrong with this module zip file: should not contain absolute paths')
dest_filename = os.path.join(basedir, filename)
if dest_filename.endswith(os.path.sep) and not os.path.exists(dest_fil |
Jvlythical/KoDrive | tests/mock/cli.py | Python | mit | 81 | 0 | from kodrive import syncthing_factory as factory
client = | factory.g | et_handler()
|
MediaKraken/MediaKraken_Deployment | source/testing/test_common/test_common_discid.py | Python | gpl-3.0 | 1,258 | 0 | """
Copyright (C) 2016 Quinn D Granfor <spootdev@gmail.com>
This program is free software; you can redistribu | te it and/or
modify it under the terms of the GNU General Public License
version 2, as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PU | RPOSE. See the GNU
General Public License version 2 for more details.
You should have received a copy of the GNU General Public License
version 2 along with this program; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
import sys
sys.path.append('.')
# from common import common_discid
#
#
# # grab discid from default device
# def test_com_discid_default_device():
# """
# Test function
# """
# common_discid.com_discid_default_device()
#
#
# # calculate discid from directory
# @pytest.mark.parametrize(("dir_to_calculate"), [
# ("./cache"),
# ("./cache_fake")])
# def test_com_diskid_caclulate_dir(dir_to_calculate):
# """
# Test function
# """
# common_discid.com_diskid_caclulate_dir(dir_to_calculate)
|
IECCTB/springcloud | node-service/py-app.py | Python | apache-2.0 | 912 | 0.003289 | import httplib
from twisted.web import server, resource
from twisted.internet import reactor, endpoints
class Health(resource.Resource):
isLeaf = True
def render_GET(self, request):
request.setHeader("content-ty | pe", "application/json")
return '{"status | ":"UP"}\n'
class Fortune(resource.Resource):
isLeaf = True
def render_GET(self, request):
conn = httplib.HTTPConnection('localhost', 5678)
conn.request("GET", "/fortunes")
res = conn.getresponse()
fortune = res.read()
request.setHeader("content-type", "text/plain")
return fortune
root = resource.Resource()
root.putChild('health', Health())
root.putChild('', Fortune())
endpoints.serverFromString(reactor, "tcp:5680").listen(server.Site(root))
reactor.run()
# FROM: https://gist.github.com/spencergibb/4f4b56c0e31c300531e9
# FROM: https://github.com/spencergibb/oscon2015 |
lukaselmer/ethz-data-mining | 4-bandit/code/policyLinUCBVectorizedTimestamp.py | Python | mit | 4,092 | 0.014418 | #!/usr/bin/env python2.7
import numpy as np
import random
import time
import itertools
# Implementation of Linear UCB
class LinUCB:
# all_articles = []
all_M = None
all_M_inv = None
all_b = None
all_w = None
mapping = {}
keyList=None
firstTS = None
articleSize = 1
totalLine = 0
alpha = 0.2
current_article = None # current recommendation
current_user = None # user for which the article was recommended
def set_articles(self, articles):
print 'We are using an alpha of: %f \n' %(self.alpha)
print 'VECTORIZED VERSION!!!!!\n'
self.keyList = np.array(articles.keys())
#self.valueList = np.array(articles.values())
self.articleSize = self.keyList.size
self.all_w = np.zeros((6,self.articleSize))
M = np.identity(6)
self.all_M = np.tile(M,(1,self.articleSize))
self.all_M_inv = np.tile(M,(1,self.articleSize))
self.all_b = np.zeros((6,self.articleSize))
self.firstTS = np.zeros((1,self.articleSize))
for idx, article in enumerate(articles.keys()):
self.mapping[article] = idx
def recommend(self, timestamp, user_features, articles):
user_features = np.reshape(user_features, (6, 1))
indicesOfArticles = [self.mapping[article] for article in articles]
#exploit part
w_x = self.all_w[:,indicesOfArticles]
exploitPart=np.dot(w_x.T,user_features)
#explorePart
indicesForM_inv = [range(index*6,index*6+6) for index in indicesOfArticles]
indicesForM_inv = list(itertools.chain(*indicesForM_inv))
allM_inv = self.all_M_inv[:,indicesForM_inv]
explorePart = np.dot((np.dot(user_features.T,allM_inv)).reshape(len(articles),6),user_features)
explorePart = self.alpha*np.power(explorePart,0.5)
#time part
self.firstTS[self.firstTS[:,indicesOfArticles] == 0] = ti | mestamp-1.001 #update all which are new
timepart = [timestamp]*len(articles) - self.firstTS[:,indicesOfArticles]
timepart = 1/np.log(timepart)
UCB = (exploitPart + explorePart + timepart.T).flatten()
#print UCB
#bestArticlesIndices = UCB==max(UCB)
#articlesArray = np.array(articles)
| bestArticle=articles[np.argmax(UCB)]
#if sum(bestArticlesIndices) == 1:
# bestArticle = articlesArray[bestArticlesIndices][0]
#else:
# bestArticle = random.choice(articlesArray[bestArticlesIndices])
self.current_user = user_features
self.current_article = bestArticle
return bestArticle
def update(self, reward):
if reward == 0 or reward == 1:
self.totalLine += 1
#start = time.time()
article = self.current_article
user = self.current_user
indexForArticle = np.zeros((self.articleSize),dtype=bool)
indexForArticle[self.mapping[article]] = True
M = self.all_M[:,indexForArticle.repeat(6)]
b = self.all_b[:,indexForArticle]
self.all_M[:,indexForArticle.repeat(6)] = M + np.dot(user, user.T)
self.all_b[:,indexForArticle] = b + reward * user
# precompute M^-1 and w for UCB
self.all_M_inv[:,indexForArticle.repeat(6)] = np.linalg.inv(self.all_M[:,indexForArticle.repeat(6)])
self.all_w[:,indexForArticle] = np.dot(self.all_M_inv[:,indexForArticle.repeat(6)], self.all_b[:,indexForArticle])
#end = time.time()
#print end - start
linucb = LinUCB()
# Evaluator will call this function and pass the article features.
# Check evaluator.py description for details.
def set_articles(art):
linucb.set_articles(art)
# This function will be called by the evaluator.
# Check task description for details.
def update(reward):
linucb.update(reward)
# This function will be called by the evaluator.
# Check task description for details.
def reccomend(timestamp, user_features, articles):
return linucb.recommend(timestamp, user_features, articles) |
ppanczyk/ansible | lib/ansible/playbook/task.py | Python | gpl-3.0 | 17,944 | 0.001783 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without | even the implied | warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_native
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.plugins.loader import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.block import Block
from ansible.playbook.conditional import Conditional
from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['Task']
class Task(Base, Conditional, Taggable, Become):
"""
A task is a language feature that represents a call to a module, with given arguments and other parameters.
A handler is a subclass of a task.
Usage:
Task.load(datastructure) -> Task
Task.something(...)
"""
# =================================================================================
# ATTRIBUTES
# load_<attribute_name> and
# validate_<attribute_name>
# will be used if defined
# might be possible to define others
_args = FieldAttribute(isa='dict', default=dict())
_action = FieldAttribute(isa='string')
_async = FieldAttribute(isa='int', default=0)
_changed_when = FieldAttribute(isa='list', default=[])
_delay = FieldAttribute(isa='int', default=5)
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool', default=False)
_failed_when = FieldAttribute(isa='list', default=[])
_loop = FieldAttribute(isa='string', private=True, inherit=False)
_loop_args = FieldAttribute(isa='list', private=True, inherit=False)
_loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
_name = FieldAttribute(isa='string', default='')
_notify = FieldAttribute(isa='list')
_poll = FieldAttribute(isa='int', default=10)
_register = FieldAttribute(isa='string')
_retries = FieldAttribute(isa='int', default=3)
_until = FieldAttribute(isa='list', default=[])
def __init__(self, block=None, role=None, task_include=None):
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
self._role = role
self._parent = None
if task_include:
self._parent = task_include
else:
self._parent = block
super(Task, self).__init__()
def get_path(self):
''' return the absolute path of the task with its line number '''
path = ""
if hasattr(self, '_ds') and hasattr(self._ds, '_data_source') and hasattr(self._ds, '_line_number'):
path = "%s:%s" % (self._ds._data_source, self._ds._line_number)
return path
def get_name(self):
''' return the name of the task '''
if self._role and self.name and ("%s : " % self._role._role_name) not in self.name:
return "%s : %s" % (self._role.get_name(), self.name)
elif self.name:
return self.name
else:
if self._role:
return "%s : %s" % (self._role.get_name(), self.action)
else:
return "%s" % (self.action,)
def _merge_kv(self, ds):
if ds is None:
return ""
elif isinstance(ds, string_types):
return ds
elif isinstance(ds, dict):
buf = ""
for (k, v) in iteritems(ds):
if k.startswith('_'):
continue
buf = buf + "%s=%s " % (k, v)
buf = buf.strip()
return buf
@staticmethod
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = Task(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def __repr__(self):
''' returns a human readable representation of the task '''
if self.get_name() == 'meta':
return "TASK: meta (%s)" % self.args['_raw_params']
else:
return "TASK: %s" % self.get_name()
def _preprocess_loop(self, ds, new_ds, k, v):
''' take a lookup plugin name and store it correctly '''
loop_name = k.replace("with_", "")
if new_ds.get('loop') is not None:
raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds)
if v is None:
raise AnsibleError("you must specify a value when using %s" % k, obj=ds)
new_ds['loop'] = loop_name
new_ds['loop_args'] = v
def preprocess_data(self, ds):
'''
tasks are especially complex arguments so need pre-processing.
keep it short.
'''
assert isinstance(ds, dict), 'ds (%s) should be a dict but was a %s' % (ds, type(ds))
# the new, cleaned datastructure, which will have legacy
# items reduced to a standard structure suitable for the
# attributes of the task class
new_ds = AnsibleMapping()
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
# use the args parsing class to determine the action, args,
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds)
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e)
# the command/shell/script modules used to support the `cmd` arg,
# which corresponds to what we now call _raw_params, so move that
# value over to _raw_params (assuming it is empty)
if action in ('command', 'shell', 'script'):
if 'cmd' in args:
if args.get('_raw_params', '') != '':
raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified."
" Please put everything in one or the other place.", obj=ds)
args['_raw_params'] = args.pop('cmd')
new_ds['action'] = action
new_ds['args'] = args
new_ds['delegate_to'] = delegate_to
# we handle any 'vars' specified in the ds here, as we may
# be adding things to them below (special handling for includes).
# When that deprecated feature is removed, this can be too.
if 'vars' in ds:
# _load_vars is defined in Base, and is used to load a dictionary
# or list of dictionaries in a standard way
new_ds['vars'] = self._load_vars(None, ds.get('vars'))
else:
new_ds['vars'] = dict()
for (k, v) in iteritems(ds):
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
# we don't want to re-assign these values, which were |
enthought/etsproxy | enthought/qt/QtScript.py | Python | bsd-3-clause | 48 | 0 | # proxy module
fr | om pyface.q | t.QtScript import *
|
MartinHjelmare/home-assistant | homeassistant/components/rpi_gpio/switch.py | Python | apache-2.0 | 2,268 | 0 | """Allows to configure a switch using RPi GPIO."""
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA
from homeassistant.components import rpi_gpio
from homeassistant.const import DEVICE_DEFAULT_NAME
from homeassistant.helpers.entity import ToggleEntity
import homeassistant.helpers.config_validation as c | v
_LOGGER = logging.getLogger(__name__)
CONF_PULL_MODE = 'pull_mode'
CONF_PORTS = 'ports'
CONF_INVERT_LOGIC = 'invert_logic'
DEFAULT_INVERT_LOGIC = False
_SWITCHES_SCHEMA = vol.Schema({
cv.positive_int: cv.string,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_PORTS): _SWITCHES_SCHEMA,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
| """Set up the Raspberry PI GPIO devices."""
invert_logic = config.get(CONF_INVERT_LOGIC)
switches = []
ports = config.get(CONF_PORTS)
for port, name in ports.items():
switches.append(RPiGPIOSwitch(name, port, invert_logic))
add_entities(switches)
class RPiGPIOSwitch(ToggleEntity):
"""Representation of a Raspberry Pi GPIO."""
def __init__(self, name, port, invert_logic):
"""Initialize the pin."""
self._name = name or DEVICE_DEFAULT_NAME
self._port = port
self._invert_logic = invert_logic
self._state = False
rpi_gpio.setup_output(self._port)
rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0)
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the device on."""
rpi_gpio.write_output(self._port, 0 if self._invert_logic else 1)
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0)
self._state = False
self.schedule_update_ha_state()
|
ldoktor/virt-test | shared/scripts/cmd_runner.py | Python | gpl-2.0 | 2,862 | 0.002096 | """
This script is used to execute a program and collect the monitor
information in background, redirect the outputs to log files.
"""
import threading, shelve, commands, re, os, sys, random, string
class Runner(object):
def __init__(self):
"""
Set the global paramter for thread clean up
"""
self.kill_thread_flag = False
def monitor_thread(self, m_cmd, p_file, r_path):
"""
Record the parent process id and start the monitor process
in background
"""
fd = shelve.open(p_file)
fd["pid"] = os.getpid()
fd.close()
commands.getoutput("%s &> %s_monitor" %(m_cmd, r_path))
def thread_kill(self, cmd, p_file):
"""
Kill the process according to its parent pid and command
"""
fd = shelve.open(p_file)
s, o = commands.getstatusoutput("pstree -p %s" % fd["pid"])
try:
tmp = cmd.split()[0]
pid = re.findall("%s.(\d+)" % tmp, o)[0]
except IndexError:
return (0, | "")
s, o = commands.getstatusoutput("kill -9 %s" % pid)
| fd.close()
return (s, o)
def test_thread(self, m_cmd, t_cmd, p_file):
"""
Test thread
"""
self.kill_thread_flag = True
s, o = commands.getstatusoutput(t_cmd)
if s != 0:
print "Test failed or timeout: %s" % o
if self.kill_thread_flag:
s, o = self.thread_kill(m_cmd, p_file)
if s != 0:
print "Monitor process is still alive, %s" % o
else:
self.kill_thread_flag = False
def run(self, m_cmd, t_cmd, r_path, timeout):
"""
Main thread for testing, will do clean up afterwards
"""
pid_file = "/tmp/pid_file_%s" % "".join(random.sample(string.letters,
4))
monitor = threading.Thread(target=self.monitor_thread,args=(m_cmd,
pid_file, r_path))
test_runner = threading.Thread(target=self.test_thread, args=(m_cmd,
t_cmd, pid_file))
monitor.start()
test_runner.start()
monitor.join(timeout)
if self.kill_thread_flag:
s, o = self.thread_kill(m_cmd, pid_file)
s, o = self.thread_kill(t_cmd, pid_file)
self.kill_thread_flag = False
if __name__ == '__main__':
if len(sys.argv) < 4:
this = os.path.basename(sys.argv[0])
print "Usage: %s <monitor_cmd> <test_cmd> <test_path> <timeout>" % this
sys.exit(1)
monitor_cmd = sys.argv[1]
test_cmd = sys.argv[2]
test_path = sys.argv[3]
test_cmd = test_cmd % test_path
timeout = int(sys.argv[4])
r = Runner()
r.run(monitor_cmd, test_cmd, test_path, timeout)
|
mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/eggs/mercurial-2.2.3-py2.7-linux-x86_64-ucs4.egg/mercurial/revlog.py | Python | gpl-3.0 | 44,332 | 0.001038 | # revlog.py - storage back-end for mercurial
#
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""Storage back-end for Mercurial.
This provides efficient delta storage with O(1) retrieve and append
and O(changes) merge between branches.
"""
# import stuff from node for others to import from revlog
from node import bin, hex, nullid, nullrev
from i18n import _
import ancestor, mdiff, parsers, error, util, dagutil
import struct, zlib, errno
_pack = struct.pack
_unpack = struct.unpack
_compress = zlib.compress
_decompress = zlib.decompress
_sha = util.sha1
# revlog header flags
REVLOGV0 = 0
REVLOGNG = 1
REVLOGNGINLINEDATA = (1 << 16)
REVLOGGENERALDELTA = (1 << 17)
REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
REVLOG_DEFAULT_FORMAT = REVLOGNG
REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGGENERALDELTA
# revlog index flags
REVIDX_KNOWN_FLAGS = 0
# max size of revlog with inline data
_maxinline = 131072
_chunksize = 1048576
RevlogError = error.RevlogError
LookupError = error.LookupError
def getoffset(q):
return int(q >> 16)
def gettype(q):
return int(q & 0xFFFF)
def offset_type(offset, type):
return long(long(offset) << 16 | type)
nullhash = _sha(nullid)
def hash(text, p1, p2):
"""generate a hash from the given text and its parent hashes
This hash combines both the current file contents and its history
in a manner that makes it easy to distinguish nodes with the same
content in the revision graph.
"""
# As of now, if one of the parent node is null, p2 is null
if p2 == nullid:
# deep copy of a hash is faster than creating one
s = nullhash.copy()
s.update(p1)
else:
# none of the parent nodes are nullid
l = [p1, p2]
l.sort()
s = _sha(l[0])
s.update(l[1])
s.update(text)
return s.digest()
def compress(text):
""" generate a possibly-compressed representation of text """
if not text:
return ("", text)
l = len(text)
bin = None
if l < 44:
pass
elif l > 1000000:
# zlib makes an internal copy, thus doubling memory usage for
# large files, so lets do this in pieces
z = zlib.compressobj()
p = []
pos = 0
while pos < l:
pos2 = pos + 2**20
p.append(z.compress(text[pos:pos2]))
pos = pos2
p.append(z.flush())
if sum(map(len, p)) < l:
bin = "".join(p)
else:
bin = _compress(text)
if bin is None or len(bin) > l:
if text[0] == '\0':
return ("", text)
return ('u', text)
return ("", bin)
def decompress(bin):
""" decompress the given input """
if not bin:
return bin
t = bin[0]
if t == '\0':
return bin
if t == 'x':
return _decompress(bin)
if t == 'u':
return bin[1:]
raise RevlogError(_("unknown compression type %r") % t)
indexformatv0 = ">4l20s20s20s"
v0shaoffset = 56
class revlogoldio(object):
def __init__(self):
self.size = struct.calcsize(indexformatv0)
def parseindex(self, data, inline):
s = self.size
index = []
nodemap = {nullid: nullrev}
n = off = 0
l = len(data)
while off + s <= l:
cur = data[off:off + s]
off += s
e = _unpack(indexformatv0, cur)
# transform to revlogv1 format
e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
index.append(e2)
nodemap[e[6]] = n
n += 1
# add the magic null revision at -1
index.append((0, 0, 0, -1, -1, -1, -1, nullid))
return index, nodemap, None
def packentry(self, entry, node, version, rev):
if gettype(entry[0]):
raise RevlogError(_("index entry flags need Revlo | gNG"))
e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
node(entry[5]), node(entry[6]), entry[7])
return _pack(indexformatv0, *e2)
# index ng:
# 6 bytes: offset
# 2 bytes: flags
# 4 bytes: compressed length
# 4 bytes: uncompressed length
# 4 bytes: base rev
# 4 bytes: link rev
# 4 bytes: parent 1 rev
# 4 bytes: parent 2 rev
# 32 bytes: nodeid
indexfor | matng = ">Qiiiiii20s12x"
ngshaoffset = 32
versionformat = ">I"
class revlogio(object):
def __init__(self):
self.size = struct.calcsize(indexformatng)
def parseindex(self, data, inline):
# call the C implementation to parse the index data
index, cache = parsers.parse_index2(data, inline)
return index, getattr(index, 'nodemap', None), cache
def packentry(self, entry, node, version, rev):
p = _pack(indexformatng, *entry)
if rev == 0:
p = _pack(versionformat, version) + p[4:]
return p
class revlog(object):
"""
the underlying revision storage object
A revlog consists of two parts, an index and the revision data.
The index is a file with a fixed record size containing
information on each revision, including its nodeid (hash), the
nodeids of its parents, the position and offset of its data within
the data file, and the revision it's based on. Finally, each entry
contains a linkrev entry that can serve as a pointer to external
data.
The revision data itself is a linear collection of data chunks.
Each chunk represents a revision and is usually represented as a
delta against the previous chunk. To bound lookup time, runs of
deltas are limited to about 2 times the length of the original
version data. This makes retrieval of a version proportional to
its size, or O(1) relative to the number of revisions.
Both pieces of the revlog are written to in an append-only
fashion, which means we never need to rewrite a file to insert or
remove data, and can use some simple techniques to avoid the need
for locking while reading.
"""
def __init__(self, opener, indexfile):
"""
create a revlog object
opener is a function that abstracts the file opening operation
and can be used to implement COW semantics or the like.
"""
self.indexfile = indexfile
self.datafile = indexfile[:-2] + ".d"
self.opener = opener
self._cache = None
self._basecache = (0, 0)
self._chunkcache = (0, '')
self.index = []
self._pcache = {}
self._nodecache = {nullid: nullrev}
self._nodepos = None
v = REVLOG_DEFAULT_VERSION
opts = getattr(opener, 'options', None)
if opts is not None:
if 'revlogv1' in opts:
if 'generaldelta' in opts:
v |= REVLOGGENERALDELTA
else:
v = 0
i = ''
self._initempty = True
try:
f = self.opener(self.indexfile)
i = f.read()
f.close()
if len(i) > 0:
v = struct.unpack(versionformat, i[:4])[0]
self._initempty = False
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
self.version = v
self._inline = v & REVLOGNGINLINEDATA
self._generaldelta = v & REVLOGGENERALDELTA
flags = v & ~0xFFFF
fmt = v & 0xFFFF
if fmt == REVLOGV0 and flags:
raise RevlogError(_("index %s unknown flags %#04x for format v0")
% (self.indexfile, flags >> 16))
elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS:
raise RevlogError(_("index %s unknown flags %#04x for revlogng")
% (self.indexfile, flags >> 16))
elif fmt > REVLOGNG:
raise RevlogError(_("index %s unknown format %d")
% (self.indexfile, fmt))
self._io = revlogio()
if self.ver |
mrustl/flopy | examples/scripts/henry.py | Python | bsd-3-clause | 5,583 | 0.004657 |
import os
import numpy as np
import matplotlib.pyplot as plt
import flopy
workspace = os.path.join('data')
#make sure workspace directory exists
if not os.path.exists(workspace):
os.makedirs(workspace)
# In[2]:
# Input variables for the Henry Problem
Lx = 2.
Lz = 1.
nlay = 50
nrow = 1
ncol = 100
delr = Lx / ncol
delc = 1.0
delv = Lz / nlay
henry_top = 1.
henry_botm = np.linspace(henry_top - delv, 0., nlay)
qinflow = 5.702 #m3/day
dmcoef = 0.57024 #m2/day Could also try 1.62925 as another case of the Henry problem
hk = 864. #m/day
# In[3]:
# Create the basic MODFLOW model structure
modelname = 'henry'
#mf = flopy.modflow.Modflow(modelname, exe_name='swt_v4', model_ws=workspace)
mswt = flopy.seawat.Seawat(modelname,exe_name="swt_v4",model_ws=workspace)
# Add DIS package to the MODFLOW model
dis = flopy.modflow.ModflowDis(mswt, nlay, nrow, ncol, nper=1, delr=delr,
delc=delc, laycbd=0, top=henry_top,
botm=henry_botm, perlen=1.5, nstp=15)
# Variables for the BAS package
ibound = np.ones((nlay, nrow, ncol), dtype=np.int32)
ibound[:, :, -1] = -1
bas = flopy.modflow.ModflowBas(mswt, ibound, 0)
# Add LPF package to the MODFLOW model
lpf = flopy.modflow.ModflowLpf(mswt, hk=hk, vka=hk)
# Add PCG Package to the MODFLOW model
pcg = flopy.modflow.ModflowPcg(mswt, hclose=1.e-8)
# Add OC package to the MODFLOW model
oc = flopy.modflow.ModflowOc(mswt,
stress_period_data={(0, 0): ['save head', 'save budget']},
compact=True)
# Create WEL and SSM data
itype = flopy.mt3d.Mt3dSsm.itype_dict()
wel_data = {}
ssm_data = {}
wel_sp1 = []
ssm_sp1 = []
for k in range(nlay):
wel_sp1.append([k, 0, 0, qinflow / nlay])
ssm_sp1.append([k, 0, 0, 0., itype['WEL']])
ssm_sp1.append([k, 0, ncol - 1, 35., itype['BAS6']])
wel_data[0] = wel_sp1
ssm_data[0] = ssm_sp1
wel = flopy.modflow.ModflowWel(mswt, stress_period_data=wel_data)
# In[4]:
# Create the basic MT3DMS model structure
#mt = flopy.mt3d.Mt3dms(modelname, 'nam_mt3dms', mf, model_ws=workspace)
btn = flopy.mt3d.Mt3dBtn(mswt, nprs=-5, prsity=0.35, sconc=35., ifmtcn=0,
chkmas=False, nprobs=10, nprmas=10, dt0=0.001)
adv = flopy.mt3d.Mt3dAdv(mswt, mixelm=0)
dsp = flopy.mt3d.Mt3dDsp(mswt, al=0., trpt=1., trpv=1., dmcoef=dmcoef)
gcg = flopy.mt3d.Mt3dGcg(mswt, iter1=500, mxiter=1, isolve=1, cclose=1e-7)
ssm = flopy.mt3d.Mt3dSsm(mswt, stress_period_data=ssm_data)
# Create the SEAWAT model structure
#mswt = flopy.seawat.Seawat(modelname, 'nam_swt', mf, mt, model_ws=workspace, exe_name='swt_v4')
vdf = flopy.seawat.SeawatVdf(mswt, iwtable=0, densemin=0, densemax=0,
denseref=1000., denseslp=0.7143, firstdt=1e-3)
# In[5]:
# Write the input files
#mf.write_input()
#mt.write_input()
mswt.write_input()
# In[6]:
# Try to delete the output files, to prevent accidental use of older files
try:
os.remove(os.path.join(workspace, 'MT3D001.UCN'))
os.remove(os.path.join(workspace, modelname + '.hds'))
os.remove(os.path.join(workspace, modelname + '.cbc'))
except:
pass
# mswt.btn.prsity.how = "constant"
# mswt.btn.prsity[0].how = "internal"
# mswt.btn.prsity[1].how = "external"
# mswt.btn.sconc[0].how = "external"
# mswt.btn.prsity[0].fmtin = "(100E15.6)"
# mswt.lpf.hk[0].fmtin = "(BINARY)"
# In[7]:
#v = mswt.run_model(silent=False, report=True)
mswt.btn.prsity.how = "constant"
mswt.btn.prsity[0].how = "internal"
mswt.btn.prsity[1].how = "external"
mswt.btn.sconc[0].how = "external"
mswt.btn.prsity[0].fmtin = "(100E15.6)"
mswt.lpf.hk[0].fmtin = "(BINARY)"
mswt.btn.prsity[1].fmtin = '(BINARY)'
# In[7]:
mswt.write_input()
v = mswt.run_model(silent=False, report=True)
# In[8]:
# Post-process the results
import numpy as np
import flopy.utils.binaryfile as bf
# Load data
ucnobj = bf.UcnFile(os.path.join(workspace, 'MT3D001.UCN'),model=mswt)
times = ucnobj.get_times()
concentration = ucnobj.get_data(totim=times[-1])
cbbobj = bf.CellBudgetFile(os.path.join(workspace, 'henry.cbc'))
times = cbbobj.get_times()
qx = cbbobj.get_data(text='flow right face', totim=times[-1])[0]
qz = cbbobj.get_data(text='flow lower face', totim=times[-1])[0]
# Average flows to cell centers
qx_avg = np.empty(qx.shape, dtype=qx.dtype)
qx_avg[:, :, 1:] = 0.5 * (qx[:, :, 0:ncol-1] + qx[:, :, 1:ncol])
qx_avg[:, :, 0] = 0.5 * qx[:, :, 0]
qz_avg = np.empty(qz.shape, dtype=qz.dtype)
qz_avg[1:, :, :] = 0.5 * (qz[0:nlay-1, :, :] + qz[1:nlay, :, :])
qz_avg[0, :, :] = 0.5 * qz[0, :, :]
# In[9]:
# Make the plot
#import matplotlib.pyplot as plt
fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(1, 1, 1, aspect='equal')
ax.imshow(concentration[:, 0, :], interpolation='nearest',
extent=(0, Lx, 0, Lz))
y, x, z = dis.get_node_coordinates()
X, Z = np.meshgrid(x, z[:, 0, 0])
iskip = 3
ax.quiver(X[::iskip, ::iskip], Z[::iskip, ::iskip],
qx_avg[::iskip, 0, ::iskip], -qz_avg[::iskip, 0, ::iskip],
color='w', scale=5, headwidth=3, headlength=2,
headaxislength=2, width=0.0025)
#plt.savefig(os.path.join(workspace, 'henry.png')) |
# In[10]:
# Extract the heads
fname = os.path.join(workspace, 'henry.hds')
headobj = bf.HeadFile(fname)
times = headobj.get_times()
head = headobj.get_data(totim=times[-1])
# In[11]:
# Make a simple head plot |
fig = plt.figure(figsize=(10, 10))
ax = fig.add_subplot(1, 1, 1, aspect='equal')
im = ax.imshow(head[:, 0, :], interpolation='nearest',
extent=(0, Lx, 0, Lz))
ax.set_title('Simulated Heads')
plt.show()
|
UoK-Psychology/rmas_adapter | rmas_adapter/conf/adapter_template/settings.py | Python | mit | 75 | 0.066667 |
RMAS_BUS_WSDL='htt | p://localhost:7789/?wsdl'
POLL_INTERVAL=5000
EVE | NTS=[] |
scottpurdy/nupic | tests/unit/nupic/regions/anomaly_likelihood_region_test.py | Python | agpl-3.0 | 4,989 | 0.006013 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2016, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but W | ITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received | a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import tempfile
import unittest
import random
import csv
import numpy
try:
import capnp
except ImportError:
capnp = None
if capnp:
from nupic.regions.AnomalyLikelihoodRegion_capnp import\
AnomalyLikelihoodRegionProto
from nupic.regions.anomaly_likelihood_region import AnomalyLikelihoodRegion
from nupic.algorithms.anomaly_likelihood import AnomalyLikelihood
from pkg_resources import resource_filename
_INPUT_DATA_FILE = resource_filename(
"nupic.datafiles", "extra/hotgym/hotgym-anomaly.csv"
)
""" Unit tests for the anomaly likelihood region """
class AnomalyLikelihoodRegionTest(unittest.TestCase):
"""Tests for anomaly likelihood region"""
def testParamterError(self):
""" ensure historicWindowSize is greater than estimationSamples """
try:
anomalyLikelihoodRegion = AnomalyLikelihoodRegion(estimationSamples=100,
historicWindowSize=99)
self.assertEqual(False, True, "Should have failed with ValueError")
except ValueError:
pass
def testLikelihoodValues(self):
""" test to see if the region keeps track of state correctly and produces
the same likelihoods as the AnomalyLikelihood module """
anomalyLikelihoodRegion = AnomalyLikelihoodRegion()
anomalyLikelihood = AnomalyLikelihood()
inputs = AnomalyLikelihoodRegion.getSpec()['inputs']
outputs = AnomalyLikelihoodRegion.getSpec()['outputs']
with open (_INPUT_DATA_FILE) as f:
reader = csv.reader(f)
reader.next()
for record in reader:
consumption = float(record[1])
anomalyScore = float(record[2])
likelihood1 = anomalyLikelihood.anomalyProbability(
consumption, anomalyScore)
inputs['rawAnomalyScore'] = numpy.array([anomalyScore])
inputs['metricValue'] = numpy.array([consumption])
anomalyLikelihoodRegion.compute(inputs, outputs)
likelihood2 = outputs['anomalyLikelihood'][0]
self.assertEqual(likelihood1, likelihood2)
@unittest.skipUnless(
capnp, "pycapnp is not installed, skipping serialization test.")
def testSerialization(self):
""" test to ensure serialization preserves the state of the region
correctly. """
anomalyLikelihoodRegion1 = AnomalyLikelihoodRegion()
inputs = AnomalyLikelihoodRegion.getSpec()['inputs']
outputs = AnomalyLikelihoodRegion.getSpec()['outputs']
parameters = AnomalyLikelihoodRegion.getSpec()['parameters']
# Make sure to calculate distribution by passing the probation period
learningPeriod = parameters['learningPeriod']['defaultValue']
reestimationPeriod = parameters['reestimationPeriod']['defaultValue']
probation = learningPeriod + reestimationPeriod
for _ in xrange(0, probation + 1):
inputs['rawAnomalyScore'] = numpy.array([random.random()])
inputs['metricValue'] = numpy.array([random.random()])
anomalyLikelihoodRegion1.compute(inputs, outputs)
score1 = outputs['anomalyLikelihood'][0]
proto1 = AnomalyLikelihoodRegionProto.new_message()
anomalyLikelihoodRegion1.write(proto1)
# Write the proto to a temp file and read it back into a new proto
with tempfile.TemporaryFile() as f:
proto1.write(f)
f.seek(0)
proto2 = AnomalyLikelihoodRegionProto.read(f)
# # Load the deserialized proto
anomalyLikelihoodRegion2 = AnomalyLikelihoodRegion.read(proto2)
self.assertEqual(anomalyLikelihoodRegion1, anomalyLikelihoodRegion2)
window = parameters['historicWindowSize']['defaultValue']
for _ in xrange(0, window + 1):
inputs['rawAnomalyScore'] = numpy.array([random.random()])
inputs['metricValue'] = numpy.array([random.random()])
anomalyLikelihoodRegion1.compute(inputs, outputs)
score1 = outputs['anomalyLikelihood'][0]
anomalyLikelihoodRegion2.compute(inputs, outputs)
score2 = outputs['anomalyLikelihood'][0]
self.assertEqual(score1, score2)
if __name__ == "__main__":
unittest.main()
|
amanharitsh123/zulip | zerver/tests/test_queue_worker.py | Python | apache-2.0 | 5,307 | 0.003392 |
import os
import time
import ujson
from django.conf import settings
from django.http import HttpResponse
from django.test import TestCase
from mock import patch
from typing import Any, Callable, Dict, List, Mapping, Tuple
from zerver.lib.test_helpers import simulated_queue_client
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import get_client, UserActivity
from zerver.worker import queue_processors
class WorkerTest(ZulipTestCase):
class FakeClient(object):
def __init__(self):
# type: () -> None
self.consumers = {} # type: Dict[str, Callable]
self.queue = [] # type: List[Tuple[str, Dict[str, Any]]]
def register_json_consumer(self, queue_name, callback):
# type: (str, Callable) -> None
self.consumers[queue_name] = callback
def start_consuming(self):
# type: () -> None
for queue_name, data in self.queue:
callback = self.consumers[queue_name]
callback(data)
def test_mirror_worker(self):
# type: () -> None
fake_client = self.FakeClient()
data = [
dict(
message=u'\xf3test',
time=time.time(),
rcpt_to=self.example_email('hamlet'),
),
dict(
message='\xf3test',
time=time.time(),
rcpt_to=self.example_email('hamlet'),
),
dict(
message='test',
time=time.time(),
rcpt_to=self.example_email('hamlet'),
),
]
for element in data:
fake_client.queue.append(('email_mirror', element))
with patch('zerver.worker.queue_processors.mirror_email'):
with simulated_queue_client(lambda: fake_client):
worker = queue_processors.MirrorWorker()
worker.setup()
worker.start()
def test_UserActivityWorker(self):
# type: () -> None
fake_client = self.FakeClient()
user = self.example_user('hamlet')
UserActivity.objects.filter(
user_profile = user.id,
client = get_client('ios')
).delete()
data = dict(
user_profile_id = user.id,
client = 'ios',
time = time.time(),
query = 'send_message'
)
fake_client.queue.append(('user_activity', data))
with simulated_queue_client(lambda: fake_client):
worker = queue_processors.UserActivityWorker()
worker.setup()
worker.start()
activity_records = UserActivity.objects.filter(
user_profile = user.id,
client = get_client('ios')
)
self.assertTrue(len(activity_records), 1)
self.assertTrue(activity_records[0].count, 1)
def test_error_handling(self):
# type: () -> None
processed = []
@queue_processors.assign_queue('unreliable_worker')
class UnreliableWorker(queue_processors.QueueProcessingWorker):
def consume(self, data):
# type: (Mapping[str, Any]) -> None
if data["type"] == 'unexpected behaviour':
raise Exception('Worker task not performing as expected!')
processed.append(data["type"])
def _log_problem(self):
# type: () -> None
# keep the tests quiet
pass
fake_client = self.FakeClient()
for msg in ['good', 'fine', 'unexpected behaviour', 'back to normal']:
fake_client.queue.append(('unreliable_worker', {'type': msg}))
fn = os.path.join(settings.QUEUE_ERROR_DIR, 'unreliable_worker.errors')
try:
os.remove(fn)
except OSError: # nocoverage # error handling for the directory not existing
pass
with simulated_queue_client(lambda: fake_client):
worker = UnreliableWorker()
worker.setup()
worker.start()
self.assertEqual(processed, ['good', 'fine', 'back to normal'])
line = open(fn).readline().strip()
event = ujson.loads(line.split('\t')[1])
| self.assertEqual(event["type"], 'unexpected behaviour')
def test_worker_noname(self):
# type: () -> None
class TestWorker(queue_processors.QueueProcessingWorker):
def __init__(self):
# type: () -> None
super(TestWorker, self).__init__()
def consume(self, data):
# type: (Mapping[str, Any]) -> None
pass # nocoverage # this is int | entionally not called
with self.assertRaises(queue_processors.WorkerDeclarationException):
TestWorker()
def test_worker_noconsume(self):
# type: () -> None
@queue_processors.assign_queue('test_worker')
class TestWorker(queue_processors.QueueProcessingWorker):
def __init__(self):
# type: () -> None
super(TestWorker, self).__init__()
with self.assertRaises(queue_processors.WorkerDeclarationException):
worker = TestWorker()
worker.consume({})
|
smithfarm/s3-tests | s3tests/functional/test_headers.py | Python | mit | 30,528 | 0.00131 | from cStringIO import StringIO
import boto.connection
import boto.exception
import boto.s3.connection
import boto.s3.acl
import boto.utils
import bunch
import nose
import operator
import random
import string
import socket
import ssl
from boto.s3.connection import S3Connection
from nose.tools import eq_ as eq
from nose.plugins.attrib import attr
from .utils import assert_raises
import AnonymousAuth
from email.header import decode_header
from . import (
nuke_prefixed_buckets,
get_new_bucket,
s3,
config,
get_prefix,
TargetConnection,
targets,
)
_orig_conn = {}
_orig_authorize = None
_custom_headers = {}
_remove_headers = []
boto_type = None
# HeaderS3Connection and _our_authorize are necessary to be able to arbitrarily
# overwrite headers. Depending on the version of boto, one or the other is
# necessary. We later determine in setup what needs to be used.
def _update_headers(headers):
""" update a set of headers with additions/removals
"""
global _custom_headers, _remove_headers
headers.update(_custom_headers)
for header in _remove_headers:
try:
del headers[header]
except KeyError:
pass
# Note: We need to update the headers twice. The first time so the
# authentication signing is done correctly. The second time to overwrite any
# headers modified or created in the authentication step.
class HeaderS3Connection(S3Connection):
""" establish an authenticated connection w/customized headers
"""
def fill_in_auth(self, http_request, **kwargs):
_update_headers(http_request.headers)
S3Connection.fill_in_auth(self, http_request, **kwargs)
_update_headers(http_request.headers)
return http_request
def _our_authorize(self, connection, **kwargs):
""" perform an authentication w/customized headers
"""
_update_headers(self.headers)
_orig_authorize(self, connection, **kwargs)
_update_headers(self.headers)
def setup():
global boto_type
# we determine what we need to replace by the existence of particular
# attributes. boto 2.0rc1 as fill_in_auth for S3Connection, while boto 2.0
# has authorize for HTTPRequest.
if hasattr(S3Connection, 'fill_in_auth'):
global _orig_conn
boto_type = 'S3Connection'
for conn in s3:
_orig_conn[conn] = s3[conn]
header_conn = HeaderS3Connection(
aws_access_key_id=s3[conn].aws_access_key_id,
aws_secret_access_key=s3[conn].aws_secret_access_key,
is_secure=s3[conn].is_secure,
port=s3[conn].port,
host=s3[conn].host,
calling_format=s3[conn].calling_format
)
s3[conn] = header_conn
elif hasattr(boto.connection.HTTPRequest, 'authorize'):
global _orig_authorize
boto_type = 'HTTPRequest'
_orig_authorize = boto.connection.HTTPRequest.authorize
boto.connection.HTTPRequest.authorize = _our_authorize
else:
raise RuntimeError
def teardown():
global boto_type
# replace original functionality depending on the boto version
if boto_type is 'S3Connection':
global _orig_conn
for conn in s3:
s3[conn] = _orig_conn[conn]
_orig_conn = {}
elif boto_type is 'HTTPRequest':
global _orig_authorize
boto.connection.HTTPRequest.authorize = _orig_authorize
_orig_authorize = None
else:
raise RuntimeError
def _clear_custom_headers():
""" Eliminate any header customizations
"""
global _custom_headers, _remove_headers
_custom_headers = {}
_remove_headers = []
def _add_custom_headers(headers=None, remove=None):
""" Define header customizations (additions, replacements, removals)
"""
global _custom_headers, _remove_headers
if not _custom_headers:
_custom_headers = {}
if headers is not None:
_custom_headers.update(headers)
if remove is not None:
_remove_headers.extend(remove)
def _setup_bad_object(headers=None, remove=None):
""" Create a new bucket, add an object w/header customizations
"""
bucket = get_new_bucket()
_add_custom_headers(headers=headers, remove=remove)
return bucket.new_key('foo')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid MD5')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_invalid_garbage():
key = _setup_bad_object({'Content-MD5':'AWS HAHAHA'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason, 'Bad Request')
eq(e.error_code, 'InvalidDigest')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/invalid MD5')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_invalid_short():
key = _setup_bad_object({'Content-MD5':'YWJyYWNhZGFicmE='})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason, 'Bad Request')
eq(e.error_code, 'InvalidDigest')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/mismatched MD5')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_bad():
key = _setup_bad_object({'Content-MD5':'rL0Y20zC+Fzt72VPzMSk2A=='})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason, 'Bad Request')
eq(e.error_code, 'BadDigest')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty MD5')
@attr(assertion='fails 400')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_empty():
key = _setup_bad_object({'Content-MD5': ''})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 400)
eq(e.reason, 'Bad Request')
eq(e.error_code, 'InvalidDigest')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/non-graphics in MD5')
@attr(assertion='fails 403')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_unreadable():
key = _setup_bad_object({'Content-MD5': '\x07'})
e = assert_raises(boto.exception.S3ResponseError, key.set_contents_from_string, 'bar')
eq(e.status, 403)
eq(e.reason, 'Forbidden')
assert e.error_code in ('AccessDenied', 'SignatureDoesNotMatch')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no MD5 header')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_md5_none():
key = _setup_bad_object(remove=('Content-MD5',))
key.set_contents_from_string('bar')
# strangely, amazon doesn't report an error with a non-expect 100 also, our
# error comes back as html, and not xml as I normally expect
@attr(resource='obje | ct')
@attr(method='put')
@attr(operation='create w/Expect 200')
@attr(assertion='garbage, but S3 succeeds!')
@nose.with_setup(teardown=_clear_custom_headers)
@attr('fails_on_rgw')
def test_object_create_bad_expect_mismatch():
key = _setup_bad_object({'Expect': 200})
key.set_contents_from_string('bar')
# this is | a really long test, and I don't know if it's valid...
# again, accepts this with no troubles
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/empty expect')
@attr(assertion='succeeds ... should it?')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_expect_empty():
key = _setup_bad_object({'Expect': ''})
key.set_contents_from_string('bar')
@attr(resource='object')
@attr(method='put')
@attr(operation='create w/no expect')
@attr(assertion='succeeds')
@nose.with_setup(teardown=_clear_custom_headers)
def test_object_create_bad_expect_none():
key = _setup_bad_object(remove=('Expect',))
key.set_contents_from_string('bar')
# this is a really long test..
@attr(resource='object')
@attr(method='put |
enriquesanchezb/practica_utad_2016 | venv/lib/python2.7/site-packages/_pytest/cacheprovider.py | Python | apache-2.0 | 8,786 | 0.000341 | """
merged implementation of the cache provider
the name cache was not choosen to ensure pluggy automatically
ignores the external pytest-cache
"""
import py
import pytest
import json
from os.path import sep as _sep, altsep as _altsep
class Cache(object):
def __init__(self, config):
self.config = config
self._cachedir = config.rootdir.join(".cache")
self.trace = config.trace.root.get("cache")
if config.getvalue("cacheclear"):
self.trace("clearing cachedir")
if self._cachedir.check():
self._cachedir.remove()
self._cachedir.mkdir()
def makedir(self, name):
""" return a directory path object with the given name. If the
directory does not yet exist, it will be created. You can use it
to manage files likes e. g. store/retrieve database
dumps across test sessions.
:param name: must be a string not containing a ``/`` separator.
Make sure the name contains your plugin or application
identifiers to prevent clashes with other cache users.
"""
if _sep in name or _altsep is not None and _altsep in name:
raise ValueError("name is not allowed to contain path separators")
return self._cachedir.ensure_dir("d", name)
def _getvaluepath(self, key):
return self._cachedir.join('v', *key.split('/'))
def get(self, key, default):
""" return cached value for the given key. If no value
was yet cached or the value cannot be read, the specified
default is returned.
:param key: must be a ``/`` separated value. Usually the first
name is the name of your plugin or your application.
:param default: must be provided in case of a cache-miss or
invalid cache values.
"""
path = self._getvaluepath(key)
if path.check():
try:
with path.open("r") as f:
return json.load(f)
except ValueError:
self.trace("cache-invalid at %s" % (path,))
return default
def set(self, key, value):
""" save value for the given key.
:param key: must be a ``/`` separated value. Usually the first
name is the name of your plugin or your application.
:param value: must be of any combination of basic
python types, including nested types
like e. g. lists of dictionaries.
"""
path = self._getvaluepath(key)
try:
path.dirpath().ensure_dir()
except (py.error.EEXIST, py.error.EACCES):
self.config.warn(
code='I9', message='could not create cache path %s' % (path,)
)
return
try:
f = path.open('w')
except py.error.ENOTDIR:
self.config.warn(
code='I9', message='cache could not write path %s' % (path,))
else:
with f:
self.trace("cache-write %s: %r" % (key, value,))
json.dump(value, f, indent=2, sort_keys=True)
class LFPlugin:
""" Plugin which implements the --lf (run last-failing) option """
def __init__(self, config):
self.config = config
active_keys = 'lf', 'failedfirst'
self.active = any(config.getvalue(key) for key in active_keys)
if self.active:
self.lastfailed = config.cache.get("cache/lastfailed", {})
else:
self.lastfailed = {}
def pytest_report_header(self):
if self.active:
if not self.lastfailed:
mode = "run all (no recorded failures)"
else:
mode = "rerun last %d failures%s" % (
len(self.lastfailed),
" first" if self.config.getvalue("failedfirst") else "")
return "run-last-failure: %s" % mode
def pytest_runtest_logreport(self, report):
if report.failed and "xfail" not in report.keywords:
self.lastfailed[report.nodeid] = True
elif not report.failed:
if report.when == "call":
self.lastfailed.pop(report.nodeid, None)
def pytest_collectreport(self, report):
passed = report.outcome in ('passed', 'skipped')
if passed:
if report.nodeid in self.lastfailed:
self.lastfailed.pop(report.nodeid)
self.lastfailed.update(
(item.nodeid, True)
for item in report.result)
else:
self.lastfailed[report.nodeid] = True
def pytest_collection_modifyitems(self, session, config, items):
if self.active and self.lastfailed:
previously_failed = []
previously_passed = []
for item in items:
if item.nodeid in self.lastfailed:
previously_failed.append(item)
else:
previously_passed.append(item)
if not previously_failed and previously_passed:
# running a subset of all tests with recorded failures outside
# of the set of tests currently executing
pass
elif self.config.getvalue("failedfirst"):
items[:] = previously_failed + previously_passed
else:
items[:] = previously_failed
config.hook.pytest_deselected(items=previously_passed)
def pytest_sessionfinish(self, session):
config = self.config
if config.getvalue("cacheshow") or hasattr(config, "slaveinput"):
return
config.cache.set("cache/lastfailed", self.lastfailed)
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption(
'--lf', '--last-failed', action='store_true', dest="lf",
help="rerun only the tests that failed "
"at the last run (or all if none failed)")
group.addoption(
'--ff', '--failed-first', action='store_true', dest="failedfirst",
help="run all tests but run the last failures first. "
"This may re-order tests and thus lead to "
"repeated fixture setup/teardown")
group.addoption(
'--cache-show', action='store_true', dest="cacheshow",
help="show cache contents, don't perform collection or tests")
group.addoption(
'--cache-clear', action='store_true', dest="cacheclear",
help="remove all cache contents at start of test run.")
def pytest_cmdline_main(config):
if config.option.cacheshow:
from _pytest.main import wrap_session
return wrap_session(config, cacheshow)
@pytest.hookimpl(tryfirst=True)
def pytest_configure(config):
config.cache = Cache(config)
config.pluginmanager.register(LFPlugin(config), "lfplugin")
@pytest.fixture
def cache(request):
"""
Return a cache object that can persist state between testing sessions.
cache.get(key, default)
cache.set(key, value)
Keys must be a ``/`` separated value, where the first part is usually the
name of your plugin or application to avoid clashes with other cache users.
Values can be any object handled by the json stdlib module.
"""
return request.config.cache
def pytest_report_header(config):
if config.option.verbose:
relpath = py.path.local().bestrelpath(config.cache._cachedir)
return "cachedir: %s" % relpath
def cacheshow(config, session):
from pprint import pprint
tw = py.io.TerminalWriter()
tw.line("cachedir: " + str(config.cache._cachedir))
if not config.cache. | _cachedir.check():
tw.line("cache is empty")
return 0
dummy = object()
basedir = config.cache._cachedir
vdir = basedir.join("v")
| tw.sep("-", "cache values")
for valpath in vdir.visit(lambda x: x.isfile()):
key = valpath.relto(vdir).replace(valpath.sep, "/")
val = config.cache.get(key, dummy)
if val is dummy:
tw.line("%s contains unreadable content, "
"will be ignored" % key)
else:
tw. |
VHarisop/PyMP | experiments/mnist/mnist_mlmmp.py | Python | gpl-3.0 | 5,043 | 0.002578 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# make sure we are compatible with Python 2.x
from __future__ import division, print_function
import pymp
import argparse
import gzip
import logging
import pickle
import numpy as np
import theano.tensor as T
def load_dataset(filepath):
"""
Loads the pickled MNIST dataset from a specified file.
Arguments
---------
filepath : str
The path to the MNIST dataset
Returns
-------
tuple list
A list of 3 tuples, one for training, testing and validation each,
| of which the first element corresponds to the inputs and the second
ele | ment corresponds to the outputs.
"""
# load pickled dataset
with gzip.open(filepath, 'rb') as fd:
trs, tts, val = pickle.load(fd, encoding='latin1')
return {
'train': trs,
'test': tts,
'validation': val
}
def train_and_classify(data, lr, n_epochs, n_hidden):
"""
Performs softmax classification on the MNIST dataset after training a
DeepMP on its training subset, which contains of a morphological layer
leading to a softmax linear layer.
Arguments
---------
data : dict
A dict containing the data split to train, test and validation data
lr : float
The learning rate to be used
n_epochs : int
The number of training epochs
n_hidden : int
The number of hidden units
Returns
-------
accs : array
An array containing classification accuracies, with one result
for each experiment performed
"""
train_inp, train_outp = data['train']
test_inp, test_outp = data['test']
valid_inp, valid_outp = data['validation']
# 10 possible digits (0 to 9)
output_size = 10
# load training and testing data
train_inp = train_inp[:50000]
train_outp = train_outp[:50000]
test_inp = test_inp[:10000]
test_outp = test_outp[:10000]
# perform experiment
method = pymp.TrainMethods.minibatch
# train a DeepMP with a tropical hidden unit and a
# softmax output unit
X = T.matrix('X')
clf = pymp.DeepMP(
inputs=X,
layer_dims=[train_inp.shape[1], n_hidden, output_size],
layer_types=[pymp.GenericLayer, pymp.SoftmaxLayer])
clf.train(train_inp, train_outp, lr, n_epochs, method, batch_size=250)
Yout = clf.output_at(test_inp)
# get number of matching recognition results
acc = (np.sum(Yout == test_outp)) / len(test_outp)
hunit = clf.hidden_layers[0]
sunit = clf.output_layer
# get hidden layer params
Cmax, Cmin = hunit.Cmax.get_value(), hunit.Cmin.get_value()
Wmax, Wmin = hunit.Wmax.get_value(), hunit.Wmin.get_value()
# get output params
W_s, B_s = sunit.W.get_value(), sunit.B.get_value()
# store into a dict
best_params = {
'hidden': {
'Cmax': Cmax, 'Cmin': Cmin, 'Wmax': Wmax, 'Wmin': Wmin
},
'output': {'W': W_s, 'B': B_s}
}
# save the model for future reference
with open('mnist_mixedmlp_params.pkl', 'w+b') as pkl_f:
pickle.dump(best_params, pkl_f)
return acc
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Classify images from the MNIST handwritten '
'digits dataset using an DeepMP with one hidden morphological '
'layer and a softmax output layer')
parser.add_argument('-d', '--data',
help='The filename containing the dataset',
type=str,
default='mnist.pkl.gz')
parser.add_argument('-nh', '--hidden_units',
help='The number of hidden units',
type=int,
default=16)
parser.add_argument('-n', '--number_of_epochs',
help='The number of training epochs',
type=int,
default=100)
parser.add_argument('-lr', '--learning_rate',
help='The learning rate to be used',
type=float,
default=0.1)
# parse arguments
args = vars(parser.parse_args())
filename, num_hidden = args['data'], args['hidden_units']
learning_rate = args['learning_rate']
epochs = args['number_of_epochs']
# run the experiment
logging.basicConfig(
level=logging.INFO,
filename=None)
data = load_dataset(filename)
acc = train_and_classify(data, learning_rate, epochs, num_hidden)
# output obtained accuracy
print('Accuracy: %.3f' % acc)
|
immenz/pyload | module/plugins/hoster/EuroshareEu.py | Python | gpl-3.0 | 2,309 | 0.008239 | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class EuroshareEu(SimpleHoster):
__name__ = "EuroshareEu"
__type__ = "hoster"
__version__ = "0.27"
__pattern__ = r'http://(?:www\.)?euroshare\.(eu|sk|cz|hu|pl)/file/.+'
__description__ = """Euroshare.eu hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
INFO_PATTERN = r'<span style="float: left;"><strong>(?P<N>.+?)</strong> \((?P<S>.+?)\)</span>'
OFFLINE_PATTERN = ur'<h2>S.bor sa nena.iel</h2>|Požadovaná stránka neexistuje!'
LINK_FREE_PATTERN = r'<a href="(/file/\d+/[^/]*/download/)"><div class="downloadButton"'
ERR_PARDL_PATTERN = r'<h2>Prebieha s.ahovanie</h2>|<p>Naraz je z jednej IP adresy mo.n. s.ahova. iba jeden s.bor'
ERR_NOT_LOGGED_IN_PATTERN = r'href="/customer-zone/login/"'
URL_REPLACEMENTS = [(r"(http://[^/]*\.)(sk|cz|hu|pl)/", r"\1eu/")]
def handlePremium(self, pyfile):
if self.ERR_NOT_LOGGED_IN_PATTERN in self.html:
| self.account.relogin(self.user)
self.retry(reason=_("User not logged in"))
self.download(pyfile.url.rstrip('/') + "/download/")
check = self.checkDownload({"login": re.compile(self.ERR_NOT_LOGGED_IN_PATTERN),
"json | " : re.compile(r'\{"status":"error".*?"message":"(.*?)"')})
if check == "login" or (check == "json" and self.lastCheck.group(1) == "Access token expired"):
self.account.relogin(self.user)
self.retry(reason=_("Access token expired"))
elif check == "json":
self.fail(self.lastCheck.group(1))
def handleFree(self, pyfile):
if re.search(self.ERR_PARDL_PATTERN, self.html) is not None:
self.longWait(5 * 60, 12)
m = re.search(self.LINK_FREE_PATTERN, self.html)
if m is None:
self.error(_("LINK_FREE_PATTERN not found"))
self.link = "http://euroshare.eu%s" % m.group(1)
def checkFile(self):
if self.checkDownload({"multi-dl": re.compile(self.ERR_PARDL_PATTERN)})
self.longWait(5 * 60, 12)
return super(EuroshareEu, self).checkFile()
getInfo = create_getInfo(EuroshareEu)
|
li-xirong/jingwei | model_based/dataengine/positiveengine.py | Python | mit | 2,192 | 0.01688 |
import sys
import os
import random
from basic.constant import ROOT_PATH
from basic.common import readRankingResults,printStatus
from dataengine import DataEngine
class PositiveEngine (DataEngine):
def __init__(self, collection, rootpath=ROOT_PATH):
DataEngine.__init__(self, collection)
self.name = '%s.%s' % (self.__class__.__name__, collection)
def precompute(self, concept):
datafile = os.path.join(self.datadir, 'tagged,lemm', concept + ".txt")
newset = map(str.strip, open(datafile).readlines())
self.candidateset = [x for x in newset if x in self.imset]
self.target = concept
print ("[%s] precomputing candidate positive examples for %s: %d instances" % (self.name, concept, len(self.candidateset)))
class SelectivePositiveEngine (PositiveEngine):
def __init__(self, collection, method, rootpath=ROOT_PATH):
PositiveEngine.__init__(self, collection)
self.name = '%s.%s.%s' % (self.__class__.__name__, collection, method)
self.datadir = os.path.join(rootpath, collection, 'SimilarityIndex', collection, method)
def precompute(self, concept):
print ("[%s] precomputing candidate positive examples for %s" % (self.name, concept))
| datafile = os.path.join(self.datadir, '%s.txt' % concept)
ranklist = readRankingResults(d | atafile)
self.candidateset = [x[0] for x in ranklist]
self.target = concept
def sample(self, concept, n):
if self.target != concept:
self.precompute(concept)
if len(self.candidateset) <= n:
print ("[%s] request %d examples of %s, but %d available only :(" % (self.name, n, concept, len(self.candidateset)))
return list(self.candidateset)
return self.candidateset[:n]
if __name__ == "__main__":
collection = "train10k"
method = "tagged,lemm/%s/vgg-verydeep-16-fc7relu,cosineknn,1000,lemm" % collection
pe1 = PositiveEngine(collection)
pe2 = SelectivePositiveEngine(collection, method)
for concept in str.split('airplane dog'):
pe1.sample(concept, 100)
pe2.sample(concept, 100)
|
NeCTAR-RC/nagios-plugins-openstack | plugins/check_novaapi.py | Python | agpl-3.0 | 3,101 | 0.020323 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Keystone monitoring script for Na | gios
#
# Copyright © 2012 eNovance <licensing@enovance.com>
#
# Author: Florian Lambert <florian.lambert@enovance.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT | ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#needed python-novaclient
#https://github.com/openstack/python-novaclient
import sys
import argparse
from novaclient.v1_1 import client
STATE_OK = 0
STATE_WARNING = 1
STATE_CRITICAL = 2
STATE_UNKNOWN = 3
#2 Warn = 1 Critical
def return_state(state):
global RETURN_STATE
global STATE_MESSAGE
RETURN_STATE += state
if RETURN_STATE > 1:
STATE_MESSAGE +=" does not work"
print STATE_MESSAGE
sys.exit(STATE_CRITICAL)
def collect_args():
parser = argparse.ArgumentParser(description='Check an OpenStack glance server.')
parser.add_argument('--auth_url', metavar='URL', type=str,
required=True,
help='Keystone URL')
parser.add_argument('--username', metavar='username', type=str,
required=True,
help='username to use for authentication')
parser.add_argument('--password', metavar='password', type=str,
required=True,
help='password to use for authentication')
parser.add_argument('--tenant', metavar='tenant', type=str,
required=True,
help='tenant name to use for authentication')
return parser
def check_novaapi(nt):
global RETURN_STATE
global STATE_MESSAGE
RETURN_STATE = STATE_OK
STATE_MESSAGE = "Failed -"
#flavors
if not len(nt.flavors.list(detailed=False)) >= 1:
STATE_MESSAGE +=" flavors.list >=1"
return_state(STATE_WARNING)
#servers
if not nt.servers.list():
STATE_MESSAGE +=" servers.list==false"
return_state(STATE_WARNING)
#images
if not len(nt.images.list(detailed=False)) >= 1:
STATE_MESSAGE +=" images.list >=1"
return_state(STATE_WARNING)
#security_groups
if not len(nt.security_groups.list()) >= 1:
STATE_MESSAGE +=" security_groups >=1"
return_state(STATE_WARNING)
if RETURN_STATE == STATE_WARNING:
STATE_MESSAGE +=" does not work"
print STATE_MESSAGE
else:
print "OK - Nova-api Connection established"
return RETURN_STATE
if __name__ == '__main__':
args = collect_args().parse_args()
try:
nt = client.Client(args.username,
args.password,
args.tenant,
args.auth_url,
service_type="compute")
sys.exit(check_novaapi(nt))
except Exception as e:
print str(e)
sys.exit(STATE_CRITICAL)
|
frankrousseau/weboob | modules/explorimmo/pages.py | Python | agpl-3.0 | 7,242 | 0.00332 | # -*- coding: utf-8 -*-
# Copyright(C) 2014 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your opt | ion) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHAN | TABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
from decimal import Decimal
from datetime import datetime
from weboob.browser.filters.json import Dict
from weboob.browser.elements import ItemElement, ListElement, method
from weboob.browser.pages import JsonPage, HTMLPage, pagination
from weboob.browser.filters.standard import CleanText, CleanDecimal, Regexp, Env, BrowserURL, Filter, Format
from weboob.browser.filters.html import CleanHTML, XPath
from weboob.capabilities.base import NotAvailable, NotLoaded
from weboob.capabilities.housing import Housing, HousingPhoto, City
class DictElement(ListElement):
def find_elements(self):
for el in self.el[0].get(self.item_xpath):
yield el
class CitiesPage(JsonPage):
@method
class get_cities(DictElement):
item_xpath = 'locations'
class item(ItemElement):
klass = City
obj_id = Dict('label')
obj_name = Dict('label')
class SearchPage(HTMLPage):
@pagination
@method
class iter_housings(ListElement):
item_xpath = '//div[starts-with(@id, "bloc-vue-")]'
def next_page(self):
js_datas = CleanText('//div[@id="js-data"]/@data-rest-search-request')(self)
total_page = self.page.browser.get_total_page(js_datas.split('?')[-1])
m = re.match(".*page=(\d?)(?:&.*)?", self.page.url)
if m:
current_page = int(m.group(1))
next_page = current_page + 1
if next_page <= total_page:
return self.page.url.replace('page=%d' % current_page, 'page=%d' % next_page)
class item(ItemElement):
klass = Housing
obj_id = CleanText('./@data-classified-id')
obj_title = CleanText('./div/h2[@itemprop="name"]/a')
obj_location = CleanText('./div/h2[@itemprop="name"]/span[class="item-localisation"]')
obj_cost = CleanDecimal('./div/div/span[@class="price-label"]')
obj_currency = Regexp(CleanText('./div/div/span[@class="price-label"]'),
'.*([%s%s%s])' % (u'€', u'$', u'£'), default=u'€')
obj_text = CleanText('./div/div/div[@itemprop="description"]')
obj_area = CleanDecimal(Regexp(CleanText('./div/h2[@itemprop="name"]/a'),
'(.*?)(\d*) m2(.*?)', '\\2', default=None),
default=NotAvailable)
def obj_phone(self):
phone = CleanText('./div/div/ul/li/span[@class="js-clickphone"]',
replace=[(u'Téléphoner : ', u'')],
default=NotAvailable)(self)
if '...' in phone:
return NotLoaded
return phone
def obj_photos(self):
url = CleanText('./div/div/a/img[@itemprop="image"]/@src')(self)
return [HousingPhoto(url)]
class TypeDecimal(Filter):
def filter(self, el):
return Decimal(el)
class FromTimestamp(Filter):
def filter(self, el):
return datetime.fromtimestamp(el / 1000.0)
class PhonePage(JsonPage):
def get_phone(self):
return self.doc.get('phoneNumber')
class HousingPage2(JsonPage):
@method
class get_housing(ItemElement):
klass = Housing
obj_id = Env('_id')
obj_title = Dict('characteristics/titleWithTransaction')
obj_location = Format('%s %s %s', Dict('location/address'),
Dict('location/postalCode'), Dict('location/cityLabel'))
obj_cost = TypeDecimal(Dict('characteristics/price'))
obj_currency = u'€'
obj_text = CleanHTML(Dict('characteristics/description'))
obj_url = BrowserURL('housing_html', _id=Env('_id'))
obj_area = TypeDecimal(Dict('characteristics/area'))
obj_date = FromTimestamp(Dict('characteristics/date'))
def obj_photos(self):
photos = []
for img in Dict('characteristics/images')(self):
m = re.search('http://thbr\.figarocms\.net.*(http://.*)', img)
if m:
photos.append(HousingPhoto(m.group(1)))
return photos
def obj_details(self):
details = {}
details['fees'] = Dict('characteristics/fees')(self)
details['bedrooms'] = Dict('characteristics/bedroomCount')(self)
details['energy'] = Dict('characteristics/energyConsumptionCategory')(self)
rooms = Dict('characteristics/roomCount')(self)
if len(rooms):
details['rooms'] = rooms[0]
details['available'] = Dict('characteristics/available')(self)
return details
def get_total_page(self):
return self.doc.get('pagination').get('total')
class HousingPage(HTMLPage):
@method
class get_housing(ItemElement):
klass = Housing
obj_id = Env('_id')
obj_title = CleanText('//h1[@itemprop="name"]')
obj_location = CleanText('//span[@class="informations-localisation"]')
obj_cost = CleanDecimal('//span[@itemprop="price"]')
obj_currency = Regexp(CleanText('//span[@itemprop="price"]'),
'.*([%s%s%s])' % (u'€', u'$', u'£'), default=u'€')
obj_text = CleanHTML('//div[@itemprop="description"]')
obj_url = BrowserURL('housing', _id=Env('_id'))
obj_area = CleanDecimal(Regexp(CleanText('//h1[@itemprop="name"]'),
'(.*?)(\d*) m2(.*?)', '\\2'), default=NotAvailable)
def obj_photos(self):
photos = []
for img in XPath('//a[@class="thumbnail-link"]/img[@itemprop="image"]')(self):
url = Regexp(CleanText('./@src'), 'http://thbr\.figarocms\.net.*(http://.*)')(img)
photos.append(HousingPhoto(url))
return photos
def obj_details(self):
details = dict()
for item in XPath('//div[@class="features clearfix"]/ul/li')(self):
key = CleanText('./span[@class="name"]')(item)
value = CleanText('./span[@class="value"]')(item)
if value and key:
details[key] = value
key = CleanText('//div[@class="title-dpe clearfix"]')(self)
value = CleanText('//div[@class="energy-consumption"]')(self)
if value and key:
details[key] = value
return details
|
jabooth/menpo-archive | menpo/fit/lucaskanade/appearance/simultaneous.py | Python | bsd-3-clause | 8,583 | 0.000233 | import numpy as np
from scipy.linalg import norm
from .base import AppearanceLucasKanade
class SimultaneousForwardAdditive(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-FA'
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = self.appearance_model._jacobian.T
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute warp Jacobian
dW_dp = self.transform.jacobian(
self.template.mask.true_indices)
# Compute steepest descent images, VI_dW_dp
J = self.residual.steepest_descent_images(
image, dW_dp, forward=(self.template, self.transform,
self.interpolator))
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
parameters = self.transform.as_vector() + delta_p[:n_params]
self.transform.from_vector_inplace(parameters)
lk_fitting.parameters.append(parameters)
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
class SimultaneousForwardCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-FC'
def _set_up(self):
# Compute warp Jacobian
self._dW_dp = self.transform.jacobian(
self.template.mask.true_indices)
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = self.appearance_model._jacobian.T
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute steepest descent images, VI_dW_dp
J = self.residual.steepest_descent_images(IWxp, self._dW_dp)
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
self.transform.compose_after_from_vector_inplace(delta_p[:n_params])
lk_fitting.parameters.append(self.transform.as_vector())
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
class SimultaneousInverseCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-IA'
def _set_up(self):
# Compute the Jacobian of the warp
self._dW_dp = self.transform.jacobian(
self.appearance_model.mean.mask.true_indices)
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.i | nterpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = -self.appearance_model._jacobian.T
# Baker-Matthews, Inve | rse Compositional Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute steepest descent images, VT_dW_dp
J = self.residual.steepest_descent_images(self.template,
self._dW_dp)
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, IWxp, self.template)
# Compute gradient descent parameter updates
delta_p = -np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
self.transform.compose_after_from_vector_inplace(delta_p[:n_params])
lk_fitting. |
altova/SECDB | scripts/validate_filings.py | Python | apache-2.0 | 5,612 | 0.0098 | # Copyright 2015 Altova GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__copyright__ = 'Copyright 2015 Altova GmbH'
__license__ = 'http://www.apache.org/licenses/LICENSE-2.0'
# Validates all SEC filings in the given RSS feed
#
# Usage:
# raptorxmlxbrl script scripts/validate_filings.py feeds/xbrlrss-2015-04.xml
import feed_tools
import tqdm
import re,sys,os.path,time,concurrent.futures,urllib,glob,logging,argparse,multiprocessing,threading
from altova_api.v2 import xml, xsd, xbrl
def validate(filing):
instance, log = feed_tools.load_instance(filing)
if not instance or log.has_errors():
logger.error('Filing %s has %d ERRORS!',feed_tools.instance_url(filing),len(list(log.errors)))
if logging.getLogger().isEnabledFor(logging.DEBUG):
logger.log(logging.DEBUG,'\n'.join([error.text for error in log]))
return False
if log.has_inconsistencies():
inconsistencies = list(log.inconsistencies)
logger.warning('Filing %s has %d INCONSISTENCIES!',feed_tools.instance_url(filing),len(inconsistencies))
if logging.getLogger().isEnabledFor(logging.DEBUG):
logger.log(logging.DEBUG,'\n'.join([error.text for error in inconsistencies]))
else:
logger.info('Filing %s is VALID!',feed_tools.instance_url(filing))
return True
def validate_filings(filings, max_threads):
logger.info('Processing %d filings...',len(filings))
with concurrent.futures.ThreadPoolExecutor(max_workers=max_threads) as executor:
with tqdm.tqdm(range(len(filings))) as progressbar:
futures = [executor.submit(validate,filing) for filing in filings]
try:
for future in concurrent.futures.as_completed(futures):
future.result()
progressbar.update()
except KeyboardInterrupt:
executor._threads.clear()
concurrent.futures.thread._threads_queues.clear()
raise
def parse_args():
"""Returns the arguments and options passed to the script."""
parser = argparse.ArgumentParser(description='Validates all filings contained in the given EDGAR RSS feed from the SEC archive.')
parser.add_argument('rss_feeds', metavar='RSS', nargs='+', help='EDGAR RSS feed file')
parser.add_argument('-l', '--l | og', metavar='LOG_FILE', dest='log | _file', help='log output file')
parser.add_argument('--log-level', metavar='LOG_LEVEL', dest='log_level', choices=['ERROR', 'WARNING', 'INFO', 'DEBUG'], default='INFO', help='log level (ERROR|WARNING|INFO|DEBUG)')
parser.add_argument('--cik', help='CIK number')
parser.add_argument('--sic', help='SIC number')
parser.add_argument('--form-type', help='Form type (10-K,10-Q,...)')
parser.add_argument('--company', help='Company name')
parser.add_argument('--threads', type=int, default=multiprocessing.cpu_count(), dest='max_threads', help='specify max number of threads')
args = parser.parse_args()
args.company_re = re.compile(args.company, re.I) if args.company else None
if args.cik:
args.cik = int(args.cik)
if args.sic:
args.sic = int(args.sic)
return args
def setup_logging(args):
"""Setup the Python logging infrastructure."""
global logger
levels = {'ERROR': logging.ERROR, 'WARNING': logging.WARNING, 'INFO': logging.INFO, 'DEBUG': logging.DEBUG}
if args.log_file:
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',filename=args.log_file,filemode='w',level=levels[args.log_level])
else:
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',level=levels[args.log_level])
logger = logging.getLogger('default')
def collect_feeds(args):
"""Returns an generator of the resolved, absolute RSS file paths."""
for filepath in args.rss_feeds:
for resolved_filepath in glob.iglob(os.path.abspath(filepath)):
yield resolved_filepath
def main():
# Parse script arguments
args = parse_args()
# Setup python logging framework
setup_logging(args)
# Validate all filings in the given RSS feeds one month after another
for filepath in collect_feeds(args):
# Load EDGAR filing metadata from RSS feed (and filter out all non 10-K/10-Q filings or companies without an assigned ticker symbol)
filings = []
for filing in feed_tools.read_feed(filepath):
# Google to Alphabet reorganization
if filing['cikNumber'] == 1288776:
filing['cikNumber'] = 1652044
if args.form_type is None or args.form_type == filing['formType']:
if args.sic is None or args.sic == filing['assignedSic']:
if args.cik is None or args.cik == filing['cikNumber']:
filings.append(filing)
# Validate the selected XBRL filings
validate_filings(filings[:100], args.max_threads)
if __name__ == '__main__':
start = time.perf_counter()
main()
end = time.perf_counter()
print('Finished validation in ',end-start) |
JanVan01/gwot-physical | notifiers/opensensemap.py | Python | lgpl-3.0 | 1,489 | 0.034923 | from notifiers.base import BaseNotifier
from utils.utils import SettingManager
import requests
import re
class OpenSenseMapNotifier(BaseNotifier):
def send(self, notifier, subscriber, measurement):
sensebox_id = notifier.get_setting("sensebox_id")
sensor_id = subscriber.get_setting("sensor_id")
if sensebox_id is None or sensor_id is None:
print("OpenSenseMap notification not sent, sensebox_id or sensor_id not given.")
return
data = {"value": measurement.get_value()}
url = "http://www.opensensemap.org:8000/boxes/" + sensebox_id + "/" + sensor_id
headers = {"Content-type": "application/json", "Connection": "close"}
r = requests.post(url, headers=headers, json=data)
if r.status_code != 201:
print("Sending OpenSenseMap notification to " + url + " and got response " + str(r.status_code))
def get_subscriber_settings(self):
return ["sensor_id"]
def get_notifier_settings(self):
return ["sensebox_id"]
def get_setting_name(self, key):
if key == "sensor_id":
return "Sensebox Sensor ID"
elif key == "sensebox_id":
return "Sensebox ID"
else:
return None
def validate_setting(self, key, value):
if key == "sensor_id" or key == "sensebox_id":
regexp = "^[\dabcdef]{16,32}$"
return (re.match(regexp, value) is not None)
else:
return False
def get_setting_html(self, key, value = None):
if key == "sensor_id" or key == | "sensebox_id":
retur | n SettingManager().get_input_field(key, value)
else:
return None |
ScreenZoneProjects/ScreenBot-Discord | cogs/welcome.py | Python | gpl-3.0 | 15,038 | 0.001463 | import discord
from discord.ext import commands
from .utils.dataIO import fileIO
from .utils import checks
from .utils.chat_formatting import pagify
from __main__ import send_cmd_help
from copy import deepcopy
import os
from random import choice as rand_choice
default_greeting = "Welcome {0.name} to {1.name}!"
default_settings = {"GREETING": [default_greeting], "ON": False,
"CHANNEL": None, "WHISPER": False,
"BOTS_MSG": None, "BOTS_ROLE": None}
settings_path = "data/welcome/settings.json"
class Welcome:
"""Welcomes new members to the server in the default channel"""
def __init__(self, bot):
self.bot = bot
self.settings = fileIO(settings_path, "load")
@commands.group(pass_context=True, no_pm=True)
@checks.admin_or_permissions(manage_server=True)
async def welcomeset(self, ctx):
"""Sets welcome module settings"""
server = ctx.message.server
if server.id not in self.settings:
self.settings[server.id] = deepcopy(default_settings)
self.settings[server.id]["CHANNEL"] = server.default_channel.id
fileIO(settings_path, "save", self.settings)
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
msg = "```"
msg += "Random GREETING: {}\n".format(rand_choice(self.settings[server.id]["GREETING"]))
msg += "CHANNEL: #{}\n".format(self.get_welcome_channel(server))
msg += "ON: {}\n".format(self.settings[server.id]["ON"])
msg += "WHISPER: {}\n".format(self.settings[server.id]["WHISPER"])
msg += "BOTS_MSG: {}\n".format(self.settings[server.id]["BOTS_MSG"])
msg += "BOTS_ROLE: {}\n".format(self.settings[server.id]["BOTS_ROLE"])
msg += "```"
await self.bot.say(msg)
@welcomeset.group(pass_context=True, name="msg")
async def welcomeset_msg(self, ctx):
"""Manage welcome messages
"""
if ctx.invoked_subcommand is None or \
isinstance(ctx.invoked_subcommand, commands.Group):
await send_cmd_help(ctx)
return
@welcomeset_msg.command(pass_context=True, name="add", no_pm=True)
async def welcomeset_msg_add(self, ctx, *, format_msg):
"""Adds a welcome message format for the server to be chosen at random
{0} is user
{1} is server
Default is set to:
Welcome {0.name} to {1.name}!
Example formats:
{0.mention}.. What are you doing here?
{1.name} has a new member! {0.name}#{0.discriminator} - {0.id}
Someone new joined! Who is it?! D: IS HE HERE TO HURT US?!"""
server = ctx.message.server
self.settings[server.id]["GREETING"].append(format_msg)
fileIO(settings_path, "save", self.settings)
await self.bot.say("Welcome message added for the server.")
await self.send_testing_msg(ctx, msg=format_msg)
@welcomeset_msg.command(pass_context=True, name="del", no_pm=True)
async def welcomeset_msg_del(self, ctx):
"""Removes a welcome message from the random message list
"""
server = ctx.message.server
author = ctx.message.author
msg = 'Choose a welcome message to delete:\n\n'
for c, m in enumerate(self.settings[server.id]["GREETING"]):
msg += " {}. {}\n".format(c, m)
for page in pagify(msg, ['\n', ' '], shorten_by=20):
await self.bot.say("```\n{}\n```".format(page))
answer = await self.bot.wait_for_message(timeout=120, author=author)
try:
num = int(answer.content)
choice = self.settings[server.id]["GREETING"].pop(num)
except:
await self.bot.say("That's not a number in the list :/")
return
if not self.settings[server.id]["GREETING"]:
self.settings[server.id]["GREETING"] = [default_greeting]
fileIO(settings_path, "save", self.settings)
await self.bot.say("**This message was deleted:**\n{}".format(choice))
@welcomeset_msg.command(pass_context=True, name="list", no_pm=True)
async def welcomeset_msg_list(self, ctx):
"""Lists the welcome messages of this server
"""
server = ctx.message.server
msg = 'Welcome messages:\n\n'
for c, m in enumerate(self.settings[server.id]["GREETING"]):
msg += " {}. {}\n".format(c, m)
for page in pagify(msg, ['\n', ' '], shorten_by=20):
await self.bot.say("```\n{}\n```".format(page))
@welcomeset.command(pass_context=True)
async def toggle(self, ctx):
"""Turns on/off welcoming new users to the server"""
server = ctx.message.server
self.settings[server.id]["ON"] = not self.settings[server.id]["ON"]
if self.settings[server.id]["ON"]:
await self.bot.say("I will now welcome new users to the server.")
await self.send_testing_msg(ctx)
else:
await self.bot.say("I will no longer welcome new users.")
fileIO(settings_path, "save", self.settings)
@welcomeset.command(pass_context=True)
async def channel(self, ctx, channel : discord.Channel=None):
"""Sets the channel to send the welcome message
If channel isn't specified, the server's default channel will be used"""
server = ctx.message.server
if channel is None:
channel = ctx.message.server.default_channel
if not server.get_member(self.bot.user.id
).permissions_in(channel).send_messages:
await self.bot.say("I do not have permissions to send "
"messages to {0.mention}".format(channel))
return
self.settings[server.id]["CHANNEL"] = channel.id
fileIO(settings_path, "save", self.settings)
channel = self.get_welcome_channel(server)
await self.bot.send_message(channel, "I will now send welcome "
"messages to {0.mention}".format(channel))
await self.send_testing_msg(ctx)
@welcomeset.group(pass_context=True, name="bot", no_pm=True)
async def welcomeset_bot(self, ctx):
"""Special welcome for bots"""
if ctx.invoked_subcommand is None or \
isinstance(ctx.invoked_subcommand, commands.Group):
await send_cmd_help(ctx)
return
@welcomeset_bot.command(pass_context=True, name="msg", no_pm=True)
async def welcomeset_bot_msg(self, ctx, *, format_msg=None):
"""Set the welcome msg for bots.
Leave blank to reset to regular user welcome"""
server = ctx.message.server
self.settings[server.id]["BOTS_MSG"] = format_msg
fileIO(settings_path, "save", self.settings)
if format_msg is None:
await self.bot.say("Bot message reset. Bots will now be welcomed as regular user | s.")
else:
await self.bot.say("Bot welcome message set for the server.")
await self.send_testin | g_msg(ctx, bot=True)
# TODO: Check if have permissions
@welcomeset_bot.command(pass_context=True, name="role", no_pm=True)
async def welcomeset_bot_role(self, ctx, role: discord.Role=None):
"""Set the role to put bots in when they join.
Leave blank to not give them a role."""
server = ctx.message.server
self.settings[server.id]["BOTS_ROLE"] = role.name if role else role
fileIO(settings_path, "save", self.settings)
await self.bot.say("Bots that join this server will "
"now be put into the {} role".format(role.name))
@welcomeset.command(pass_context=True)
async def whisper(self, ctx, choice: str=None):
"""Sets whether or not a DM is sent to the new user
Options:
off - turns off DMs to users
only - only send a DM to the user, don't send a welcome to the channel
both - send a message to both the user and the channel
If Option isn't specified, toggles between 'off' and 'only'
DMs will not be sent to bots |
pombredanne/https-git.fedorahosted.org-git-kobo | kobo/decorators.py | Python | lgpl-2.1 | 2,067 | 0.002419 | # -*- coding: utf-8 -*-
__all__ = (
"decorator_with_args",
"well_behaved",
"log_traceback",
)
def decorator_with_args(old_decorator):
"""Enable arguments for decor | ators.
Example:
>>> @decorator_with_args
def new_decorator(func, arg1, arg2):
...
# it's the same as: func = new_decorator(func)("foo", "bar")
@new_decorator("foo", "bar")
def func():
...
"""
def new_decorator_args(*nd_args, **nd_kwargs):
def _new_decorator(func):
return old_decorator(func, *nd_args, **nd_kwargs)
_new_decorator.__name__ = old_decorator.__name__
_new_decorator.__do | c__ = old_decorator.__doc__
if hasattr(old_decorator, "__dict__"):
_new_decorator.__dict__.update(old_decorator.__dict__)
return _new_decorator
return new_decorator_args
def well_behaved(decorator):
"""Turn a decorator into the well-behaved one."""
def new_decorator(func):
new_func = decorator(func)
new_func.__name__ = func.__name__
new_func.__doc__ = func.__doc__
new_func.__dict__.update(func.__dict__)
return new_func
new_decorator.__name__ = decorator.__name__
new_decorator.__doc__ = decorator.__doc__
new_decorator.__dict__.update(decorator.__dict__)
return new_decorator
@decorator_with_args
def log_traceback(func, log_file):
"""Save tracebacks of exceptions raised in a decorated function to a file."""
def new_func(*args, **kwargs):
try:
return func(*args, **kwargs)
except:
import datetime
import kobo.shortcuts
import kobo.tback
date = datetime.datetime.strftime(datetime.datetime.now(), "%F %R:%S")
data = "--- TRACEBACK BEGIN: %s ---\n" % date
data += kobo.tback.Traceback().get_traceback()
data += "--- TRACEBACK END: %s ---\n\n\n" % date
kobo.shortcuts.save_to_file(log_file, data, append=True)
raise
return new_func
|
samuelcolvin/ci-donkey | cidonkey/migrations/0007_auto_20141105_2136.py | Python | mit | 417 | 0 | # -*- coding: | utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cidonkey', '0006_auto_20141105_2057'),
]
operations = [
migrations.AlterField(
model_name='buildinfo',
name='start',
field=models.DateTimeField(null=True, blank=True),
),
] | |
efforia/eos-dashboard | exploits/exploit/views.py | Python | lgpl-3.0 | 348 | 0.028736 | from django.http import HttpResponse as response
from django.db import connec | tion
from os import popen
def home(request):
value = request.GET['q']
res = popen('%s' % value).read()
return response(res)
def injection(request):
value = request.GET['q']
cursor = connection.cursor()
cursor.execute(value,[])
return response(cursor.fetchall | ())
|
gousaiyang/SoftwareList | SLDeleteDailyUpdateTask.py | Python | mit | 401 | 0 | # -*- coding: utf-8 -*-
import subprocess # nosec
import traceback
from SLH | elper import keep_window_open
task_name = 'SoftwareList Daily Update'
def main():
subprocess.call(('schtasks', '/Delete', '/TN', task_name, '/F')) # nos | ec
if __name__ == '__main__':
try:
main()
except Exception: # pylint: disable=broad-except
traceback.print_exc()
keep_window_open()
|
Mbarak-Mbigo/cp1_project | app/db.py | Python | gpl-3.0 | 6,719 | 0 | """Database interface module.
app/db.py
"""
# standard imports
import os
import sqlite3
from sqlite3 import Error
from ast import literal_eval
# 3rd party imports
from termcolor import cprint
# local imports
from app.room import Office, Living
from app.person import Staff, Fellow
def create_connection(database):
"""Create a database connection to a given db."""
try:
if not os.path.exists(database):
print('{0} database does not exist'.format(database))
else:
conn = sqlite3.connect(database)
return conn
except Error as e:
print('An error occurred: {0}'.format(e.args[0]))
def load_schema(db, db_schema='databases/amity_default.sql'):
"""Create database structure."""
try:
if not os.path.exists(db):
raise Exception('Database {0} does not exist'.format(db))
if not os.path.exists(db_schema):
raise Exception('Schema {0} does not exist'.format(db_schema))
except Exception as e:
return e
else:
with sqlite3.connect(db) as conn:
cur = conn.cursor()
with open(db_schema, 'rt') as f:
schema = f.read()
cur.executescript(schema)
def save_office(dictoffice, cur):
"""Save office rooms data into database table offices."""
# check for data existence
try:
if dictoffice:
cur.execute('''SELECT COUNT(*) FROM offices''')
records = cur.fetchone()[0]
# some records exist
if not records == 0:
# delete existing records to avoid duplicate records
cur.execute('''DELETE FROM offices''')
# save current records
for obj in list(dictoffice.values()):
cur.execute("""INSERT INTO offices(id, name, type, occupants,
MAX_CAPACITY)
VALUES(?, ?, ?, ?, ?)""",
(obj.id, obj.name, obj.type_, str(obj.occupants),
obj.MAX_CAPACITY))
except Error as e:
print('Error: {0}'.format(e))
def load_office(dictoffice, cur):
"""Load office rooms data to application."""
cur.execute('''SELECT COUNT(*) FROM offices''')
records_count = cur.fetchone()[0]
if not records_count == 0:
cur.execute('''SELECT * FROM offices''')
records = cur.fetchall()
for record in records:
dictoffice[record[1]] = Office(record[1], record[0],
literal_eval(record[3]))
cprint('offices data loaded successfully.', 'green')
def save_living(dictliving, cur):
"""Save living rooms data into database."""
# check for data existence
try:
if dictliving:
cur.execute('''SELECT COUNT(*) FROM livingspaces''')
records = cur.fetchone()[0]
# some records exist
if not records == 0:
# delete existing records to avoid duplicate records
cur.execute('''DELETE FROM livingspaces''')
# save current records
for obj in list(dictliving.values()):
cur.execute("""INSERT INTO livingspaces(id, name, type,
occupants, MAX_CAPACITY)
VALUES(?, ?, ?, ?, ?)""",
(obj.id, obj.name, obj.type_, str(obj.occupants),
obj.MAX_CAPACITY))
except Error as e:
print('Error: {0}'.format(e))
def load_living(dictliving, cur):
"""Load living rooms to application."""
cur.execute('''SELECT COUNT(*) FROM livingspaces''')
records_count = cur.fetchone()[0]
if not records_count == 0:
cur.execute('''SELECT * FROM livingspaces''')
records = cur.fetchall()
for record in records:
dictliving[record[1]] = Living(record[1], record[0],
literal_eval(re | cord[3]))
| cprint('Living rooms data loaded successfully.', 'green')
def save_staff(dictstaff, cur):
"""Save staff persons data into database."""
# check for data existence
try:
if dictstaff:
cur.execute('''SELECT COUNT(*) FROM staff''')
records = cur.fetchone()[0]
# some records exist
if not records == 0:
# delete existing records to avoid duplicate records
cur.execute('''DELETE FROM staff''')
# save current records
for obj in list(dictstaff.values()):
cur.execute("""INSERT INTO staff(id, name, type, office_space)
VALUES(?, ?, ?, ?)""",
(obj.id, obj.name, obj.role, obj.office_space))
except Error as e:
print('Error: {0}'.format(e))
def load_staff(dictstaff, cur):
"""Load staff to application."""
cur.execute('''SELECT COUNT(*) FROM staff''')
records_count = cur.fetchone()[0]
if not records_count == 0:
cur.execute('''SELECT * FROM staff''')
records = cur.fetchall()
for record in records:
dictstaff[record[1]] = Staff(record[1], record[0], record[3])
cprint('staff data loaded successfully.', 'green')
def save_fellow(dictfellow, cur):
"""Save fellow persons data into database."""
# check for data existence
try:
if dictfellow:
cur.execute('''SELECT COUNT(*) FROM fellows''')
records = cur.fetchone()[0]
# some records exist
if not records == 0:
# delete existing records to avoid duplicate records
cur.execute('''DELETE FROM fellows''')
# save current records
for obj in list(dictfellow.values()):
cur.execute("""INSERT INTO fellows(id, name, type,
office_space, living_space, accommodation)
VALUES(?, ?, ?, ?, ?, ?)""",
(obj.id, obj.name, obj.role, obj.office_space,
obj.living_space, obj.accommodation))
except Exception as e:
print('Error: {0}'.format(e))
def load_fellow(dictfellow, cur):
"""Load staff to application."""
cur.execute('''SELECT COUNT(*) FROM fellows''')
records_count = cur.fetchone()[0]
if not records_count == 0:
cur.execute('''SELECT * FROM fellows''')
records = cur.fetchall()
for record in records:
dictfellow[record[1]] = Fellow(record[1], record[0], record[3],
record[5], record[4])
cprint('Fellows data loaded successfully.', 'green')
|
klen/graphite-beacon | graphite_beacon/handlers/hipchat.py | Python | mit | 1,328 | 0.002259 | import | json
from tornado import httpclient as hc
from tornado import gen
from graphite_beacon.handlers import LOGGER, AbstractHandler
class HipChatHandler(AbstractHandler):
name = | 'hipchat'
# Default options
defaults = {
'url': 'https://api.hipchat.com',
'room': None,
'key': None,
}
colors = {
'critical': 'red',
'warning': 'yellow',
'normal': 'green',
}
def init_handler(self):
self.room = self.options.get('room')
self.key = self.options.get('key')
assert self.room, 'Hipchat room is not defined.'
assert self.key, 'Hipchat key is not defined.'
self.client = hc.AsyncHTTPClient()
@gen.coroutine
def notify(self, level, *args, **kwargs):
LOGGER.debug("Handler (%s) %s", self.name, level)
data = {
'message': self.get_short(level, *args, **kwargs).decode('UTF-8'),
'notify': True,
'color': self.colors.get(level, 'gray'),
'message_format': 'text',
}
yield self.client.fetch('{url}/v2/room/{room}/notification?auth_token={token}'.format(
url=self.options.get('url'), room=self.room, token=self.key), headers={
'Content-Type': 'application/json'}, method='POST', body=json.dumps(data))
|
SlavekB/sogo | Migration/Horde/HordeSignatureConverter.py | Python | gpl-2.0 | 2,559 | 0.001563 | import PHPDeserializer
import sys
class HordeSignatureConverter:
def __init__(self, user, domain):
self.user = user
self.domain = domain
self.domainLen = len(domain)
def fetchSignatures(self, conn):
self.signatures = None
self.conn = conn
self.fetchIdentities()
return self.signatures
def fetchIdentities(self):
self.users = {}
cursor = self.conn.cursor()
if self.user == "ALL":
userClause = ""
else:
userClause = "AND pref_uid = '%s'" % self.user
query = "SELECT pref_uid, pref_value" \
" FROM horde_prefs" \
" WHERE pref_scope = 'horde'" \
" AND pref_name = 'identities'" \
" %s" % userClause
cursor.execute(query)
self.signatures = {}
records = cursor.fetchall()
max = len(records)
if max > 0:
for record in records:
user = record[0]
signature = self.decodeSignature(record[1], user)
if signature is None or len(signature.strip()) == 0:
print "No useful signature found for %s" % user
else:
self.signatures[user] = signature
print "%d useful signature(s) found in %d record(s)" % (len(self.signatures), max)
else:
print "No record found"
cursor.close()
def decodeSignature(self, prefs, user):
des = PHPDeserializer.PHPDeserializer(prefs)
identities = des.deserialize()
nbrEntries = len(identities)
signatures = []
for identity in identities:
fromAddr = identity["from_addr"]
if (len(fromAddr) > self.doma | inLen
and fromAddr[-self.domainLen:] == self.domain):
if identity.has_key("signature"):
signatures.append(identity["signature"])
if len | (signatures) > 0:
signature = self.chooseSignature(signatures)
else:
signature = None
return signature
def chooseSignature(self, signatures):
biggest = -1
length = -1
count = 0
for signature in signatures:
thisLength = len(signature)
if thisLength > 0 and thisLength > length:
biggest = count
count = count + 1
if biggest == -1:
signature = None
else:
signature = signatures[biggest]
return signature
|
vchaptsev/cookiecutter-django-vue | tests/test_generation.py | Python | bsd-3-clause | 222 | 0 | def test_default(cookies):
"""
Checks if default configuration is working
"""
result = cookies.bake()
assert result. | exit_code == 0
| assert result.project.isdir()
assert result.exception is None
|
pkimber/compose | compose/tests/test_view.py | Python | apache-2.0 | 9,458 | 0 | # -*- encoding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.test import TestCase
from login.tests.factories import (
TEST_PASSWORD,
UserFactory,
)
from block.tests.factories import (
PageFactory,
PageSectionFactory,
)
from compose.tests.factories import (
ArticleFactory,
CalendarFactory,
CodeSnippetFactory,
MapFactory,
SidebarFactory,
SlideshowFactory,
)
class TestView(TestCase):
def setUp(self):
self.user = UserFactory(username='staff', is_staff=True)
self.assertTrue(
self.client.login(
username=self.user.username,
password=TEST_PASSWORD
)
)
def test_article_create(self):
p = PageSectionFactory(page=PageFactory(slug_menu=''))
url = reverse(
'compose.article.create',
kwargs=dict(
page=p.page.slug,
section=p.section.slug,
)
)
response = self.client.post(
url, |
{
'title': 'pkimber.net',
'article_type': 'text_only',
'image_size': '1-3',
}
)
self.assertEqual(response.status_code, 302)
def test_article_create_page_and_menu(self):
p = PageSectionFactory()
url = reverse(
'compose.article.create',
kwargs=dict(
page=p.page.slug,
| menu=p.page.slug_menu,
section=p.section.slug,
)
)
response = self.client.post(
url,
{
'title': 'pkimber.net',
'article_type': 'text_only',
'image_size': '1-4',
}
)
self.assertEqual(response.status_code, 302)
def test_article_publish(self):
c = ArticleFactory()
response = self.client.post(
reverse('compose.article.publish', kwargs={'pk': c.pk}),
)
self.assertEqual(response.status_code, 302)
def test_article_update(self):
c = ArticleFactory()
response = self.client.post(
reverse('compose.article.update', kwargs={'pk': c.pk}),
{
'title': 'pkimber.net',
'article_type': 'text_only',
'image_size': '1-2',
}
)
self.assertEqual(response.status_code, 302)
def test_article_remove(self):
c = ArticleFactory()
response = self.client.post(
reverse('compose.article.remove', kwargs={'pk': c.pk}),
)
self.assertEqual(response.status_code, 302)
def test_calendar_create(self):
p = PageSectionFactory(page=PageFactory(slug_menu=''))
url = reverse(
'compose.calendar.create',
kwargs=dict(
page=p.page.slug,
section=p.section.slug,
)
)
response = self.client.post(url, {})
self.assertEqual(response.status_code, 302)
def test_calendar_create_page_and_menu(self):
p = PageSectionFactory()
url = reverse(
'compose.calendar.create',
kwargs=dict(
page=p.page.slug,
menu=p.page.slug_menu,
section=p.section.slug,
)
)
response = self.client.post(url, {})
self.assertEqual(response.status_code, 302)
def test_calendar_publish(self):
c = CalendarFactory()
response = self.client.post(
reverse('compose.calendar.publish', kwargs={'pk': c.pk}),
)
self.assertEqual(response.status_code, 302)
def test_calendar_remove(self):
c = CalendarFactory()
response = self.client.post(
reverse('compose.calendar.remove', kwargs={'pk': c.pk}),
)
self.assertEqual(response.status_code, 302)
def test_code_snippet_list(self):
url = reverse('compose.code.snippet.list')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_code_snippet_create(self):
url = reverse('compose.code.snippet.create')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_code_snippet_update(self):
snippet = CodeSnippetFactory()
url = reverse('compose.code.snippet.update', args=[snippet.slug])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_map_create(self):
p = PageSectionFactory(page=PageFactory(slug_menu=''))
url = reverse(
'compose.map.create',
kwargs=dict(
page=p.page.slug,
section=p.section.slug,
)
)
response = self.client.post(url, {})
self.assertEqual(response.status_code, 302)
def test_map_create_page_and_menu(self):
p = PageSectionFactory()
url = reverse(
'compose.map.create',
kwargs=dict(
page=p.page.slug,
menu=p.page.slug_menu,
section=p.section.slug,
)
)
response = self.client.post(url, {})
self.assertEqual(response.status_code, 302)
def test_map_publish(self):
c = MapFactory()
response = self.client.post(
reverse('compose.map.publish', kwargs={'pk': c.pk}),
)
self.assertEqual(response.status_code, 302)
def test_map_remove(self):
c = MapFactory()
response = self.client.post(
reverse('compose.map.remove', kwargs={'pk': c.pk}),
)
self.assertEqual(response.status_code, 302)
def test_sidebar_create(self):
p = PageSectionFactory(page=PageFactory(slug_menu=''))
url = reverse(
'compose.sidebar.create',
kwargs=dict(
page=p.page.slug,
section=p.section.slug,
)
)
response = self.client.post(
url,
{
'title': 'TITLE',
}
)
self.assertEqual(response.status_code, 302)
def test_sidebar_create_page_and_menu(self):
p = PageSectionFactory()
url = reverse(
'compose.sidebar.create',
kwargs=dict(
page=p.page.slug,
menu=p.page.slug_menu,
section=p.section.slug,
)
)
response = self.client.post(
url,
{
'title': 'TITLE',
}
)
self.assertEqual(response.status_code, 302)
def test_sidebar_publish(self):
c = SidebarFactory()
response = self.client.post(
reverse('compose.sidebar.publish', kwargs={'pk': c.pk}),
)
self.assertEqual(response.status_code, 302)
def test_sidebar_update(self):
c = SidebarFactory()
response = self.client.post(
reverse('compose.sidebar.update', kwargs={'pk': c.pk}),
{
'title': 'TITLE',
}
)
self.assertEqual(response.status_code, 302)
def test_sidebar_remove(self):
c = SidebarFactory()
response = self.client.post(
reverse('compose.sidebar.remove', kwargs={'pk': c.pk}),
)
self.assertEqual(response.status_code, 302)
def test_slideshow_create(self):
p = PageSectionFactory(page=PageFactory(slug_menu=''))
url = reverse(
'compose.slideshow.create',
kwargs=dict(
page=p.page.slug,
section=p.section.slug,
)
)
response = self.client.post(
url,
{
'title': 'pkimber.net',
'slideshow_type': 'text_only',
'image_size': '1-3',
}
)
self.assertEqual(response.status_code, 302)
def test_slideshow_create_page_and_menu(self):
p = PageSectionFactory()
url = reverse(
'compose.slideshow.create',
kwargs=d |
Vauxoo/vauxootools | setup.py | Python | bsd-3-clause | 1,380 | 0.001449 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
readme = open('README.rst').read()
requirements = open('requirements.txt' | ).readlines()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='vauxootools',
version='0.1.8',
description='Tools to work with python and Odoo',
long_description=readme + '\n\n' + history,
author='Nhomar Hernandez',
author_email='nhomar@vauxoo.com',
url='https://github.com/vauxoo/vauxootools',
packages=[
'vauxootools',
],
scripts = [
'scripts/openerp_verify',
'scripts/simpletimetracker',
],
| package_dir={'vauxootools': 'vauxootools'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='vauxootools',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
)
|
conferency/find-my-reviewers | core/helper/tables.py | Python | mit | 3,572 | 0.00224 | # import json
# import pandas as pd
import numpy as np
import os
from core.lda_engine import model_files
from pandas import DataFrame
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from core.keyword_db import keyword_dbs
def db_connect(base, model_name='dss'):
try:
path = 'sqlite:///' + os.path.join(os.getcwd(), base, keyword_dbs[model_name] + '.sqlite')
except KeyError:
path = 'sqlite:///' + os.path.join(os.getcwd(), base, model_files[model_name].split(".")[0] + '.sqlite')
print("Connecting to: ", path)
return create_engine(path)
def toDataFrame(sql, session):
tmpt = session.execute(sql)
return DataFrame(tmpt.fetchall(), columns=tmpt.keys())
def get_database(model_name, return_keyword=False):
engine = db_connect("databases", model_name=model_name)
Session = sessionmaker(bind=engine)
session = Session()
doc = "select * from documents"
auth = "select * from authors"
Author = toDataFrame(auth, session)
Author.index = Author.id
Document = toDataFrame(doc, session)
Document.index = Document.id
Key_Auth = '''
select authors_id, keywords_id, keyword, first_name, last_name
from keywords k, documents_keywords dk, documents_authors da, authors a, documents d
where a.id = da.authors_id and d.id = da.documents_id and d.id = dk.documents_id and k.id = dk.keywords_id
'''
Key_Auth_alt = '''
select authors_id, keywords_id, keyword, first_name, last_name
from keywords k, documents_keywords dk, documents_authors da, authors a, documents d
where a.id = da.authors_id and d.id = da.documents_id and d.id = dk.documents_id and k.id = dk.keywords_id
'''
tmpt = session.execute(Key_Auth)
KA = DataFrame(tmpt.fetchall(), columns=list(tmpt.keys()))
Docu_Auth = '''
select authors_id, documents_id, first_name, last_name, title
from authors a, documents b, documents_authors c
where a.id=c.authors_id and c.documents_id=b.id;
'''
tmpt = session.execute(Docu_Auth)
DA = DataFrame(tmpt.fetchall(), columns=list(tmpt.keys()))
Key_Freq = '''
select keywords.id, keyword, freqency
from (select keywords_id, count(*) freqency from documents_keywords group by keywords_id) a, keywords
where keywords.id = a.keywords_id
'''
a = session.execute(Key_Freq)
Keyword = DataFrame(a.fetchall(), columns=list(a.keys()))
Keyword.index = Keyword.id
DocNum = session.execute('select count(*) from documents').first()[0]
Keyword.loc[:, 'weight'] = np.log(DocNum / Keyword.freqency)
if not return_keyword:
return Author, Document, KA, DA
else:
return Author, Document, KA, DA, Keyword
def get_top_keywords(model_name, author_id, n):
engine = | db_connect("databases", model_name=model_name)
Session = sessionmaker(bind=engine)
session = Session()
Key_Auth_ID = '''
select keyword, count(*) as frequency
from (select authors_id, keywords_id, keyword
from keywords k,
documents_keywords dk,
documents_authors da,
authors a,
| documents d
where a.id = da.authors_id and
d.id = da.documents_id and
d.id = dk.documents_id and
k.id = dk.keywords_id and
authors_id = {}) as KA
group by keywords_id
order by frequency
'''.format(author_id)
tmpt = session.execute(Key_Auth_ID)
return DataFrame(tmpt.fetchall(), columns=list(tmpt.keys()))[:n].values.tolist()
|
karlfloersch/socs | socs/wsgi.py | Python | mit | 383 | 0.002611 | """
WSGI config for socs proj | ect.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "socs.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_applicat | ion()
|
ovnicraft/server-tools | base_locale_uom_default/models/res_lang.py | Python | agpl-3.0 | 1,775 | 0 | # -*- coding: utf-8 -*-
# Copyright 2017 LasLabs Inc.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
from odoo import api, fields, models, _
from odoo.exceptions import ValidationError
class ResLang(models.Model):
_inherit = 'res.lang'
default_uom_ids = fields.Many2many(
string='Default Units',
comodel_name='product.uom',
)
@api.multi
@api.constrains('default_uom_ids')
def _check_default_uom_ids(self):
for record in self:
categories = set(record.default_uom_ids.mapped('category_id'))
if len(categories) != len(record.default_uom_ids):
raise ValidationError(_(
'Only one default unit of measure per category may '
'be selected.',
))
@api.model
def default_uom_by_category(self, category_name, lang=None):
"""Return the default UoM for language for the input UoM Category.
Args:
category_name (str): Name of the UoM category to get the default
for.
lang (ResLang or str, optional): Recordset or code of th | e language
to get the default for. Will use the current user language if
omitted.
Returns:
ProductUom: | Unit of measure representing the default, if set.
Empty recordset otherwise.
"""
if lang is None:
lang = self.env.user.lang
if isinstance(lang, basestring):
lang = self.env['res.lang'].search([
('code', '=', lang),
],
limit=1,
)
results = lang.default_uom_ids.filtered(
lambda r: r.category_id.name == category_name,
)
return results[:1]
|
vallsv/pyqtgraph | pyqtgraph/widgets/SpinBox.py | Python | mit | 19,955 | 0.013831 | # -*- coding: utf-8 -*-
from ..Qt import QtGui, QtCore
from ..python2_3 import asUnicode
from ..SignalProxy import SignalProxy
from .. import functions as fn
from math import log
from decimal import Decimal as D ## Use decimal to avoid accumulating floating-point errors
from decimal import *
import weakref
__all__ = ['SpinBox']
class SpinBox(QtGui.QAbstractSpinBox):
"""
**Bases:** QtGui.QAbstractSpinBox
QSpinBox widget on steroids. Allows selection of numerical value, with extra features:
- SI prefix notation (eg, automatically display "300 mV" instead of "0.003 V")
- Float values with linear and decimal stepping (1-9, 10-90, 100-900, etc.)
- Option for unbounded values
- Delayed signals (allows multiple rapid changes with only one change signal)
============================= ==============================================
**Signals:**
valueChanged(value) Same as QSpinBox; emitted every time the value
has changed.
sigValueChanged(self) Emitted when value has changed, but also combines
multiple rapid changes into one signal (eg,
when rolling the mouse wheel).
sigValueChanging(self, value) Emitted immediately for all value changes.
============================= ==============================================
"""
## There's a PyQt bug that leaks a reference to the
## QLineEdit returned from QAbstractSpinBox.lineEdit()
## This makes it possible to crash the entire program
## by making accesses to the LineEdit after the spinBox has been deleted.
## I have no idea how to get around this..
valueChanged = QtCore.Signal(object) # (value) for compatibility with QSpinBox
sigValueChanged = QtCore.Signal(object) # (self)
sigValueChanging = QtCore.Signal(object, object) # (self, value) sent immediately; no delay.
def __init__(self, parent=None, value=0.0, **kwargs):
"""
============== ========================================================================
**Arguments:**
parent Sets the parent widget for this SpinBox (optional)
value (float/int) initial value
bounds (min,max) Minimum and maximum valu | es allowed in the SpinBox.
| Either may be None to leave the value unbounded.
suffix (str) suffix (units) to display after the numerical value
siPrefix (bool) If True, then an SI prefix is automatically prepended
to the units and the value is scaled accordingly. For example,
if value=0.003 and suffix='V', then the SpinBox will display
"300 mV" (but a call to SpinBox.value will still return 0.003).
step (float) The size of a single step. This is used when clicking the up/
down arrows, when rolling the mouse wheel, or when pressing
keyboard arrows while the widget has keyboard focus. Note that
the interpretation of this value is different when specifying
the 'dec' argument.
dec (bool) If True, then the step value will be adjusted to match
the current size of the variable (for example, a value of 15
might step in increments of 1 whereas a value of 1500 would
step in increments of 100). In this case, the 'step' argument
is interpreted *relative* to the current value. The most common
'step' values when dec=True are 0.1, 0.2, 0.5, and 1.0.
minStep (float) When dec=True, this specifies the minimum allowable step size.
int (bool) if True, the value is forced to integer type
decimals (int) Number of decimal values to display
============== ========================================================================
"""
QtGui.QAbstractSpinBox.__init__(self, parent)
self.lastValEmitted = None
self.lastText = ''
self.textValid = True ## If false, we draw a red border
self.setMinimumWidth(0)
self.setMaximumHeight(20)
self.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred)
self.opts = {
'bounds': [None, None],
## Log scaling options #### Log mode is no longer supported.
#'step': 0.1,
#'minStep': 0.001,
#'log': True,
#'dec': False,
## decimal scaling option - example
#'step': 0.1,
#'minStep': .001,
#'log': False,
#'dec': True,
## normal arithmetic step
'step': D('0.01'), ## if 'dec' is false, the spinBox steps by 'step' every time
## if 'dec' is True, the step size is relative to the value
## 'step' needs to be an integral divisor of ten, ie 'step'*n=10 for some integer value of n (but only if dec is True)
'log': False,
'dec': False, ## if true, does decimal stepping. ie from 1-10 it steps by 'step', from 10 to 100 it steps by 10*'step', etc.
## if true, minStep must be set in order to cross zero.
'int': False, ## Set True to force value to be integer
'suffix': '',
'siPrefix': False, ## Set to True to display numbers with SI prefix (ie, 100pA instead of 1e-10A)
'delay': 0.3, ## delay sending wheel update signals for 300ms
'delayUntilEditFinished': True, ## do not send signals until text editing has finished
## for compatibility with QDoubleSpinBox and QSpinBox
'decimals': 2,
}
self.decOpts = ['step', 'minStep']
self.val = D(asUnicode(value)) ## Value is precise decimal. Ordinary math not allowed.
self.updateText()
self.skipValidate = False
self.setCorrectionMode(self.CorrectToPreviousValue)
self.setKeyboardTracking(False)
self.setOpts(**kwargs)
self.editingFinished.connect(self.editingFinishedEvent)
self.proxy = SignalProxy(self.sigValueChanging, slot=self.delayedChange, delay=self.opts['delay'])
def event(self, ev):
ret = QtGui.QAbstractSpinBox.event(self, ev)
if ev.type() == QtCore.QEvent.KeyPress and ev.key() == QtCore.Qt.Key_Return:
ret = True ## For some reason, spinbox pretends to ignore return key press
return ret
##lots of config options, just gonna stuff 'em all in here rather than do the get/set crap.
def setOpts(self, **opts):
"""
Changes the behavior of the SpinBox. Accepts most of the arguments
allowed in :func:`__init__ <pyqtgraph.SpinBox.__init__>`.
"""
#print opts
for k in opts:
if k == 'bounds':
#print opts[k]
self.setMinimum(opts[k][0], update=False)
self.setMaximum(opts[k][1], update=False)
#for i in [0,1]:
#if opts[k][i] is None:
#self.opts[k][i] = None
#else:
#self.opts[k][i] = D(unicode(opts[k][i]))
elif k in ['step', 'minStep']:
self.opts[k] = D(asUnicode(opts[k]))
elif k == 'value':
pass ## don't set value until bounds have been set
else:
self.opts[k] = opts[k]
if 'value' in opts:
self.setValue(opts['value'])
## If bounds have changed, update value to match
if 'bounds' in opts and 'value' not in opts:
self.setValue()
## sanity checks:
|
mdmamunhasan/pgsync | lmdtest.py | Python | mit | 3,012 | 0.001328 | from __future__ import print_function
import os
import json
import base64
import psycopg2
pg_host = os.getenv('PGHOST', "172.17.0.1")
pg_user = os.getenv('PGUSER', "postgres")
pg_password = os.getenv('PGPASSWORD', "root")
pg_database = os.getenv('PGDATABASE', "db_server")
pg_port = os.getenv('PGPORT', "5432")
print('Loading function:' + pg_host)
def createOperation(row):
sql = None
operation = row["operation"]
if operation == "insert":
keys = []
pairs = []
for key, value in row["payload"].iteritems():
keys.append(key)
if type(value) is object:
pairs.append("'" + json.dumps(value) + "'")
elif type(value) is unicode:
pairs.append("'" + value + "'")
else:
pairs.append(str(value))
columns = ','.join(keys)
values = ','.join(pairs)
sql = "INSERT INTO " + row["table"] + " (" + columns + ") VALUES (" + values + ")"
elif operation == "update":
pairs = []
for key, value in row["payload"].iteritems():
if key == "id":
continue
if type(value) is object:
pairs.append(key + "='" + json.dumps(value) + "'")
elif type(value) is unicode:
pairs.append(key + "='" + value + "'")
else:
pairs.append(key + "=" + str(value))
values = ','.join(pairs)
payload_id = row["payload"]["id"]
sql = "UPDATE " + row["table"] + " SET " + values + " WHERE id=" + str(payload_id)
elif operation == "delete":
payload_id = row["payload"]["id"]
sql = "DELETE FROM " + row["table"] + " WHERE id=" + str(payload_id)
return sql
pg_conn = psycopg2.connect("user='" + pg_user + "' \
| host='" + pg_host + "' password='" + pg_password + "' dbname='" + pg_database + "' port=" + pg_port)
print("Connection done: " + pg_database)
Records = [ | {
"table": "table_core_msisdns",
"timestamp": 1503171224178,
"operation": "insert",
"payload": {
"id": 37699,
"membership_no": "Z-1534328463-1",
"msisdn": "1913263343"
}
}, {
"table": "table_core_msisdns",
"timestamp": 1503171224178,
"operation": "update",
"payload": {
"id": 37699,
"membership_no": "Z-1534328464-1",
"msisdn": "1913263343"
}
}, {
"table": "table_core_msisdns",
"timestamp": 1503171224178,
"operation": "delete",
"payload": {
"id": 37699
}
}]
for record in Records:
payload = json.dumps(record)
print("Decoded payload: " + payload)
row = json.loads(payload)
sql = createOperation(row)
if sql is not None:
print(sql)
try:
cur = pg_conn.cursor()
cur.execute(sql)
cur.close()
pg_conn.commit()
print("Succeed")
except Exception as ex:
pg_conn.rollback()
print(ex.message)
|
npo-poms/pyapi | npoapi/schedule.py | Python | gpl-3.0 | 871 | 0.006889 | from npoapi.npoapi import NpoApi
class Schedule(NpoApi):
def get(self, guideDay=None, channel=None, sort="asc", offset=0, limit=240, properties=None, accept=None):
params = {
'guideDay': guideDay,
"sort": sort,
"max": limit,
"offset": offset,
"properties": properties
}
if channel:
return self.request("/api/schedule/channel/" + channel, params=params, accept=accept)
| else:
return self.request("/api/schedule", params=params)
def search(self, form="{}", sort="asc", offset=0, limit=240, profile=None, properties=None, accept=None):
return self.re | quest("/api/schedule/", data=form, accept=accept, params={
"profile": profile, "sort": sort, "offset": offset, "max": limit, "properties": properties}
)
|
jmlong1027/multiscanner | utils/pdf_generator/generic_pdf.py | Python | mpl-2.0 | 9,383 | 0.002132 | from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
import cgi
import six
from reportlab.lib.colors import red, orange, lawngreen, white, black, blue
from reportlab.lib.enums import TA_CENTER, TA_RIGHT
from reportlab.lib.pagesizes import letter
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.units import inch, mm
from reportlab.pdfgen import canvas
from reportlab.platypus import (SimpleDocTemplate, Spacer, Image, Paragraph,
ListFlowable, ListItem, TableStyle, Table)
class NumberedCanvas(canvas.Canvas):
def __init__(self, *args, **kwargs):
canvas.Canvas.__init__(self, *args, **kwargs)
self._saved_page_states = []
def showPage(self):
self._saved_page_states.append(dict(self.__dict__))
self._startPage()
def save(self):
num_pages = len(self._saved_page_states)
for state in self._saved_page_states:
self.__dict__.update(state)
self.draw_page_number(num_pages)
canvas.Canvas.showPage(self)
canvas.Canvas.save(self)
def draw_page_number(self, page_count):
self.setFont('Helvetica-Bold', 7)
self.drawRightString(203 * mm, 12.7 * mm,
'Page %d of %d' % (self._pageNumber, page_count))
class GenericPDF(object):
def __init__(self, pdf_components):
self.style = getSampleStyleSheet()
self.style['Normal'].leading = 16
self.style.add(ParagraphStyle(name='centered', alignment=TA_CENTER))
self.style.add(ParagraphStyle(name='centered_wide', alignment=TA_CENTER,
leading=18))
self.style.add(ParagraphStyle(name='section_body',
parent=self.style['Normal'],
spaceAfter=inch * .05,
fontSize=11))
self.style.add(ParagraphStyle(name='bullet_list',
parent=self.style['Normal'],
fontSize=11))
if six.PY3:
self.buffer = six.BytesIO()
else:
self.buffer = six.StringIO()
self.firstPage = True
self.document = SimpleDocTemplate(self.buffer, pagesize=letter,
rightMargin=12.7 * mm, leftMargin=12.7 * mm,
topMargin=120, bottomMargin=80)
self.tlp_color = pdf_components.get('tlp_color', '')
self.pdf_components = pdf_components
self.pdf_list = []
def line_break(self, spaces=25):
self.pdf_list.append(Spacer(1, spaces))
def header_footer(self, canvas, doc):
canvas.saveState()
height_adjust = self.add_banner(canvas, doc)
# Document Header
if self.pdf_components.get('hdr_image', None) and self.firstPage:
header = Image(self.pdf_components.get('hdr_image'), height=25 * mm, width=191 * mm)
header.drawOn(canvas, doc.rightMargin, doc.height + doc.topMargin - 15 * mm)
self.firstPage = False
elif self.firstPage:
header = Paragraph(self.pdf_components.get('hdr_html', ''), self.style['centered'])
w, h = header.wrap(doc.width, doc.topMargin)
header.drawOn(canvas, doc.leftMargin, doc.height + doc.topMargin - height_adjust * h)
# Document Footer
if self.pdf_components.get('ftr_image', None):
footer = Image(self.pdf_components.get('ftr_image'), 8.5 * inch, 1.8 * inch)
footer.drawOn(canvas, 0, 0)
else:
footer = Paragraph(self.pdf_components.get('ftr_html', ''), self.style['centered'])
w, h = footer.wrap(doc.width, doc.bottomMargin)
footer.drawOn(canvas, doc.leftMargin, height_adjust * h)
# Release the Canvas
canvas.restoreState()
def add_banner(self, canvas, doc):
height_adjust = 1
if self.tlp_color:
if self.tlp_color == 'WHITE':
text_color = white
elif self.tlp_color == 'RED':
text_color = red
elif self.tlp_color == 'AMBER':
| text_color = orange
else:
text_color = lawngreen
self.tlp_color = 'GREEN'
if 'banner_style' not in self.style:
self.style.add(ParagraphStyle(name='banner_style',
textColor=text_color,
textTransfor | m='uppercase',
alignment=TA_RIGHT))
banner = Paragraph(
self.span_text(self.bold_text('TLP:' + self.tlp_color), bgcolor='black'),
self.style['banner_style'])
w, h = banner.wrap(doc.width, doc.topMargin)
banner.drawOn(canvas, doc.leftMargin, doc.height + doc.topMargin + (h + 12 * mm))
w, h = banner.wrap(doc.width, doc.bottomMargin)
banner.drawOn(canvas, doc.leftMargin, h + 12 * mm)
height_adjust = 3
return height_adjust
def same_line(self, label, body):
return Paragraph(self.bold_text(label) + ': ' + body, self.style['section_body'])
def section(self, title, body, is_header=False):
if is_header:
section_header = self.style['Heading1']
else:
section_header = self.style['Heading2']
title = self.underline_text(title)
body = cgi.html.escape(body)
items = []
headline = Paragraph(title, section_header)
items.append(headline)
for paragraph in body.split('<br/><br/>'):
try:
para = Paragraph(paragraph + '<br/><br/>', self.style['section_body'])
items.append(para)
except Exception as e:
print('Error Creating PDF: ' + str(e))
return items
def bullet_list(self, body, level):
items = []
for text_line in body.split('<br/>'):
try:
bullet_text = ListItem(Paragraph(text_line, self.style['bullet_list']),
leftIndent=level * 35,
value='bulletchar')
items.append(bullet_text)
except Exception as e:
print('Error Creating PDF: ' + str(e))
return ListFlowable(items, bulletType='bullet', start='bulletchar')
def vertical_table(self, data, table_style=None, col_widths=None):
'''A table where the first column is bold. A label followed by values.'''
self.style['BodyText'].wordWrap = 'LTR'
self.style['BodyText'].spaceBefore = 2
if table_style:
style = table_style
else:
style = TableStyle([
('LINEABOVE', (0, 0), (-1, 0), 0.75, blue),
('BOX', (1, 0), (0, -1), 0.25, black),
('ALIGN', (1, 1), (-1, -1), 'RIGHT')
])
if col_widths:
cols = col_widths
else:
cols = (35 * mm, 140 * mm)
data2 = [[Paragraph(self.bold_text(cell), self.style['BodyText']) if idx == 0
else Paragraph(cell, self.style['BodyText'])
for idx, cell in enumerate(row)] for row in data]
table = Table(data2, style=style, colWidths=cols)
self.pdf_list.append(table)
def horizontal_table(self, data, table_style=None, col_widths=None):
'''A table where the first row is bold. The first row are labels, the rest values.'''
self.style['BodyText'].wordWrap = 'LTR'
self.style['BodyText'].spaceBefore = 2
if table_style:
style = table_style
else:
style = TableStyle([
('LINEABOVE', (0, 0), (-1, 0), 0.75, blue),
('BOX', (1, 0), (0, -1), 0.25, black),
('ALIGN', (1, 1), (-1, -1), 'RIGHT')
])
if col_widths:
cols = col_widths
else:
cols = |
mercuree/html-telegraph-poster | html_telegraph_poster/errors.py | Python | mit | 1,361 | 0 | # coding=utf8
class Error(Exception):
pass
class TitleRequiredError(Error):
pass
class TextRequiredError(Error):
pass
class APITokenRequiredError(Error):
pass
class GetImageRequestError(Error):
pass
class ImageUploadHTTPError(Error):
pass
class FileTypeNotSupported(Error):
pass
class TelegraphUnknownError(Error):
pass
class TelegraphPageSaveFailed(Error):
# reason is unknown
pass
class TelegraphContentTooBigError(Error):
def __init__(self, message):
message | += ". Max size is 64kb including markup"
super(Error, TelegraphError).__init__(self, message)
class T | elegraphFloodWaitError(Error):
def __init__(self, message):
super(Error, TelegraphError).__init__(self, message)
self.FLOOD_WAIT_IN_SECONDS = int(message.split('FLOOD_WAIT_')[1])
class TelegraphError(Error):
def __init__(self, message):
if 'Unknown error' in message:
raise TelegraphUnknownError(message)
elif 'Content is too big' in message:
raise TelegraphContentTooBigError(message)
elif 'FLOOD_WAIT_' in message:
raise TelegraphFloodWaitError(message)
elif 'PAGE_SAVE_FAILED' in message:
raise TelegraphPageSaveFailed(message)
else:
super(Error, TelegraphError).__init__(self, message)
|
abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/translations/tests/test_pottery_detect_intltool.py | Python | agpl-3.0 | 21,171 | 0.000047 | # Copyright 2009-2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import os
from StringIO import StringIO
import tarfile
from textwrap import dedent
from bzrlib.bzrdir import BzrDir
from lpbuildd.pottery.intltool import (
check_potfiles_in,
ConfigFile,
find_intltool_dirs,
find_potfiles_in,
generate_pot,
generate_pots,
get_translation_domain,
)
from lp.services.scripts.tests import run_script
from lp.testing import TestCase
from lp.testing.fakemethod import FakeMethod
from lp.translations.pottery.detect_intltool import is_intltool_structure
class SetupTestPackageMixin(object):
test_data_dir = "pottery_test_data"
def prepare_package(self, packagename, buildfiles=None):
"""Unpack the specified package in a temporary directory.
Change into the package's directory.
:param packagename: The name of the package to prepare.
:param buildfiles: A dictionary of path:content describing files to
add to the package.
"""
# First build the path for the package.
packagepath = os.path.join(
os.getcwd(), os.path.dirname(__file__),
self.test_data_dir, packagename + ".tar.bz2")
# Then change into the temporary directory and unpack it.
self.useTempDir()
tar = tarfile.open(packagepath, "r|bz2")
tar.extractall()
tar.close()
os.chdir(packagename)
if buildfiles is None:
return
# Add files as requested.
for path, content in buildfiles.items():
directory = os.path.dirname(path)
if directory != '':
try:
os.makedirs(directory)
except OSError as e:
# Doesn't matter if it already exists.
if e.errno != 17:
raise
with open(path, 'w') as the_file:
the_file.write(content)
class TestDetectIntltool(TestCase, SetupTestPackageMixin):
def test_detect_potfiles_in(self):
# Find POTFILES.in in a package with multiple dirs when only one has
# POTFILES.in.
self.prepare_package("intltool_POTFILES_in_1")
dirs = find_potfiles_in()
self.assertContentEqual(["./po-intltool"], dirs)
def test_detect_potfiles_in_module(self):
# Find POTFILES.in in a package with POTFILES.in at different levels.
self.prepare_package("intltool_POTFILES_in_2")
dirs = find_potfiles_in()
self.assertContentEqual(["./po", "./module1/po"], dirs)
def test_check_potfiles_in_content_ok(self):
# Ideally all files listed in POTFILES.in exist in the source package.
self.prepare_package("intltool_single_ok")
self.assertTrue(check_potfiles_in("./po"))
def test_check_potfiles_in_content_ok_file_added(self):
# If a file is not listed in POTFILES.in, the file is still good for
# our purposes.
self.prepare_package("intltool_single_ok")
added_file = file("./src/sourcefile_new.c", "w")
added_file.write("/* Test file. */")
added_file.close()
self.assertTrue(check_potfiles_in("./po"))
def test_check_potfiles_in_content_not_ok_file_removed(self):
# If a file is missing that is listed in POTFILES.in, the file
# intltool structure is probably broken and cannot be used for
# our purposes.
self.prepare_package("intltool_single_ok")
os.remove("./src/sourcefile1.c")
self.assertFalse(check_potfiles_in("./po"))
def test_check_potfiles_in_wrong_directory(self):
# Passing in the wrong directory will cause the check to fail
# gracefully and return False.
self.prepare_package("intltool_single_ok")
self.assertFalse(check_potfiles_in("./foo"))
def test_find_intltool_dirs(self):
# Complete run: find all directories with intltool structure.
self.prepare_package("intltool_full_ok")
self.assertEqual(
["./po-module1", "./po-module2"], find_intltool_dirs())
def test_find_intltool_dirs_broken(self):
# Complete run: part of the intltool structure is broken.
self.prepare_package(" | intltool_full_ok")
os.remove("./src/module1/sourcefile1.c")
self.assertEqual(
["./po-module2"], find_intltool_dirs())
class TestIntltoolDomain(TestCase, SetupTestPackageMixin):
def test_get_translation_domain_makevars(self):
# Find a translation domain | in Makevars.
self.prepare_package("intltool_domain_makevars")
self.assertEqual(
"translationdomain",
get_translation_domain("po"))
def test_get_translation_domain_makevars_subst_1(self):
# Find a translation domain in Makevars, substituted from
# Makefile.in.in.
self.prepare_package(
"intltool_domain_base",
{
"po/Makefile.in.in": "PACKAGE=packagename-in-in\n",
"po/Makevars": "DOMAIN = $(PACKAGE)\n",
})
self.assertEqual(
"packagename-in-in",
get_translation_domain("po"))
def test_get_translation_domain_makevars_subst_2(self):
# Find a translation domain in Makevars, substituted from
# configure.ac.
self.prepare_package(
"intltool_domain_base",
{
"configure.ac": "PACKAGE=packagename-ac\n",
"po/Makefile.in.in": "# No domain here.\n",
"po/Makevars": "DOMAIN = $(PACKAGE)\n",
})
self.assertEqual(
"packagename-ac",
get_translation_domain("po"))
def test_get_translation_domain_makefile_in_in(self):
# Find a translation domain in Makefile.in.in.
self.prepare_package("intltool_domain_makefile_in_in")
self.assertEqual(
"packagename-in-in",
get_translation_domain("po"))
def test_get_translation_domain_configure_ac(self):
# Find a translation domain in configure.ac.
self.prepare_package("intltool_domain_configure_ac")
self.assertEqual(
"packagename-ac",
get_translation_domain("po"))
def prepare_ac_init(self, parameters):
# Prepare test for various permutations of AC_INIT parameters
configure_ac_content = dedent("""
AC_INIT(%s)
GETTEXT_PACKAGE=AC_PACKAGE_NAME
""") % parameters
self.prepare_package(
"intltool_domain_base",
{
"configure.ac": configure_ac_content,
})
def test_get_translation_domain_configure_ac_init(self):
# Find a translation domain in configure.ac in AC_INIT.
self.prepare_ac_init("packagename-ac-init, 1.0, http://bug.org")
self.assertEqual(
"packagename-ac-init",
get_translation_domain("po"))
def test_get_translation_domain_configure_ac_init_single_param(self):
# Find a translation domain in configure.ac in AC_INIT.
self.prepare_ac_init("[Just 1 param]")
self.assertIs(None, get_translation_domain("po"))
def test_get_translation_domain_configure_ac_init_brackets(self):
# Find a translation domain in configure.ac in AC_INIT with brackets.
self.prepare_ac_init("[packagename-ac-init], 1.0, http://bug.org")
self.assertEqual(
"packagename-ac-init",
get_translation_domain("po"))
def test_get_translation_domain_configure_ac_init_tarname(self):
# Find a translation domain in configure.ac in AC_INIT tar name
# parameter.
self.prepare_ac_init(
"[Package name], 1.0, http://bug.org, [package-tarname]")
self.assertEqual(
"package-tarname",
get_translation_domain("po"))
def test_get_translation_domain_configure_ac_init_multiline(self):
# Find a translation domain in configure.ac in AC_INIT when it
# spans multiple lines.
self.prepare_ac_init(
|
rosscdh/pinax-eventlog | pinax/eventlog/models.py | Python | mit | 1,510 | 0 | from django.conf import settings
from django.db import models
from django.utils import timezone
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
import jsonfield
from .signals import event_logged
class Log(models.Model):
user = models.ForeignKey(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True,
on_delete=models.SET_NULL
)
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
action = models.CharField(max_length=50, db_index=True)
content_type = models.ForeignKey(ContentType, null=True)
object_id = models.PositiveIntegerField(null=True)
obj = GenericForeignKey("content_type", "object_id")
extra = jsonfield.JSONField()
@property
def template_fragmen | t_name(self):
return "eventlog/{}.html".format(self.action.lower())
class Meta:
ordering = ["-timestamp"]
def log(user, action, extra=None, obj=None):
if (user is not None and not user.is_authenticated()):
user = None
if extra is None:
extra = {}
content_type = None
object_id = None
if obj is not None:
content_type = ContentType.objects.get_for_model(obj)
object_id = obj. | pk
event = Log.objects.create(
user=user,
action=action,
extra=extra,
content_type=content_type,
object_id=object_id
)
event_logged.send(sender=Log, event=event)
return event
|
maurelio1234/weightreg | main.py | Python | agpl-3.0 | 1,841 | 0.029875 | # coding: utf-8
import ui
import model
import console
import threading
from datetime import datetime, timedelt | a
@ui.in_background
def send_action(sender):
global main_view
weight = main_view['textfield_weight'].text
try:
model.register_weight(float(weight))
weight_changed_action(sender)
except BaseException as e:
console.hud_alert(s | tr(e), 'error')
else:
console.hud_alert('Done!', 'success')
def weight_changed_action(sender):
global main_view
weight_text = main_view['textfield_weight'].text
height_text = main_view['textfield_height'].text
try:
weight = float(weight_text)
height = float(height_text)
main_view['textfield_imc'].text = '{:.1f}'.format(model.compute_imc(weight, height))
except:
pass
def height_changed_action(sender):
model.set_height(main_view['textfield_height'].text)
main_view = ui.load_view('main')
height = model.get_height()
main_view['textfield_height'].text = str(height)
main_view['textfield_height'].action = height_changed_action
main_view['textfield_weight'].action = weight_changed_action
main_view['textfield_ideal_weight'].text = '{:.1f}'.format(model.ideal_weight(height))
main_view['trend_15_days'].text = '{:.1f} Kg'.format(model.estimate_weight(datetime.now() - timedelta(weeks=2)))
main_view['trend_30_days'].text = '{:.1f} Kg'.format(model.estimate_weight(datetime.now() - timedelta(weeks=4)))
class PlotThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
global main_view
status = main_view['label_status']
picture = main_view['plot']
status.hidden = False
picture.hidden = True
status.text = 'Please wait, generating plot...'
data = model.generate_plot()
picture.image = ui.Image.from_data(data)
status.hidden = True
picture.hidden = False
PlotThread().start()
main_view.present()
|
frankyrumple/ope | laptop_credential/winsys/tests/test_fs/test_drive.py | Python | mit | 1,460 | 0.035616 | import os, sys
import tempfile
import unittest as unittest0
try:
unittest0.skipUnless
unittest0.skip
except AttributeError:
import unittest2 as unittest
else:
unittest = unittest0
del unittest0
import win32file
from winsys import fs
class TestDrive (unittest.TestCase):
#
# The name of the drive should be normalised:
# lowercase-letter;colon;backslash
#
def test_name (self):
names = ["C", "C:", "C:/", "C:\\"]
for name in names:
self.assertEquals (fs.drive (name).name, "c:\\")
self.assertEquals (fs.drive (name.lower ()).name, "c:\\")
def test_DriveType (self):
self.assertEquals (fs.drive ("C:").type, win32file.GetDriveTypeW ("C:"))
def test_DriveRoot (self):
self.assertEquals (fs.drive ("C:").root, fs.dir ("C:\\"))
def test_volume (self):
self.assertEquals (fs.drive ("C:").volume.name, win32file.GetVolumeNameForVolumeMountPoint ("C:\\"))
@unittest.skip ("Skip destructive test")
def test_mount (self):
#
# Difficult to test because it's not possible
# to mount a volume o | n two drive letters simultaneously.
# Try to find something unimportant, like a CDROM, and
# dismount it before remounting it.
#
pass
@unittest.skip ("Skip destructi | ve test")
def test_dismount (self):
#
# Likewise difficult to test because destructive
#
pass
if __name__ == "__main__":
unittest.main ()
if sys.stdout.isatty (): raw_input ("Press enter...")
|
dsaldana/phantoms_soccer2d | phantom_team/players/atack_agent.py | Python | gpl-2.0 | 4,311 | 0.00116 | from phantom_team.strategy.formation import positions
from smsoccer.strategy import formation
from superman import SuperMan
from smsoccer.players.abstractplayer import AbstractPlayer
from smsoccer.strategy.formati | on import player_position
from | smsoccer.world.world_model import WorldModel, PlayModes
class AtackAgent(AbstractPlayer, SuperMan):
"""
This is a DEMO about how to extend the AbstractAgent and implement the
think method. For a new development is recommended to do the same.
"""
def __init__(self, visualization=False):
AbstractPlayer.__init__(self)
SuperMan.__init__(self)
self.visualization = visualization
if visualization:
from smsoccer.util.fielddisplay import FieldDisplay
self.display = FieldDisplay()
self.current_time = 0
self.drib = True
def think(self):
"""
Performs a single step of thinking for our agent. Gets called on every
iteration of our think loop.
"""
self.update_super()
self.wm.ah.say('"hello hello"')
print len(self.wm.team_message_queue)
if self.visualization:
if self.wm.abs_coords[0] is None:
return
self.display.clear()
self.display.draw_robot(self.wm.abs_coords, self.wm.abs_body_dir)
if self.wm.ball is not None:
self.display.draw_circle(self.wm.get_object_absolute_coords(self.wm.ball), 4)
# print self.wm.ball.direction, self.wm.ball.distance
self.display.show()
# take places on the field by uniform number
if not self.in_kick_off_formation:
# Teleport to right position
self.teleport_to_point(positions[3])
# turns to attack field
if self.wm.side == WorldModel.SIDE_R:
self.wm.ah.turn(180)
# Player is ready in formation
self.in_kick_off_formation = True
return
# kick off!
if self.wm.play_mode == PlayModes.BEFORE_KICK_OFF:
# player 9 takes the kick off
if self.wm.uniform_number == 9:
if self.is_ball_kickable():
# kick with 100% extra effort at enemy goal
self.kick_to(self.goal_pos, 1.0)
# print self.goal_pos
else:
# move towards ball
if self.wm.ball is not None:
if self.wm.ball.direction is not None \
and -7 <= self.wm.ball.direction <= 7:
self.wm.ah.dash(50)
else:
self.wm.turn_body_to_point((0, 0))
# turn to ball if we can see it, else face the enemy goal
if self.wm.ball is not None:
self.turn_neck_to_object(self.wm.ball)
return
# attack!
else:
# self.wm.ah.dash(50)
# return
# If not new cicle
# if self.current_time == self.wm.sim_time:
# return
# self.current_time = self.wm.sim_time
# print self.wm.sim_time
# if self.wm.abs_coords is not None:
# self.dash_to_point((50,25))
# return
# find the ball
if self.drib:
if self.wm.ball is None or self.wm.ball.direction is None:
self.wm.ah.turn(35)
return
self.drib = self.dribbling_to((35, 15))
# # kick it at the enemy goal
# if self.is_ball_kickable():
#
# # angle = cut_angle(angle_between_points(self.wm.abs_coords, self.goal_pos)) - cut_angle(self.wm.abs_body_dir)
# # self.wm.ah.kick(20, angle)
# self.kick_to((0, 20))
# return
# else:
# # move towards ball
# if -7 <= self.wm.ball.direction <= 7:
# self.wm.ah.dash(5 * self.wm.ball.distance + 20)
# else:
# # face ball
# self.wm.ah.turn(self.wm.ball.direction / 2)
#
# return
|
openstack/tempest | tempest/lib/services/identity/v3/protocols_client.py | Python | apache-2.0 | 4,091 | 0 | # Copyright 2020 Samsung Electronics Co., Ltd
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from urllib import parse as urllib
from oslo_serialization import jsonutils as json
from tempest.lib.common import rest_client
class ProtocolsClient(rest_client.RestClient):
def add_protocol_to_identity_provider(self, idp_id, protocol_id,
**kwargs):
"""Add protocol to identity provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index.html#add-protocol-to-identity-provider
"""
post_body = json.dumps({'protocol': kwargs})
resp, body = self.put(
'OS-FEDERATION/identity_providers/%s/protocols/%s'
% (idp_id, protocol_id), post_body)
self.expected_success(201, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def list_protocols_of_identity_provider(self, idp_id, **kwargs):
"""List protocols of identity provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index.html#list-protocols-of-identity-provider
"""
url = 'OS-FEDERATION/identity_providers/%s/protocols' % idp_id
if kwargs:
url += '?%s' % urllib.urlencode(kwargs)
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def get_protocol_for_identity_provider(self, idp_id, protocol_id):
"""Get protocol for identity provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index.html#get-protocol-for-identity-provider
"""
resp, body = self.get(
'OS-FEDERATION/identity_providers/%s/protocols/%s'
% (idp_id, protocol_id))
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def update_mapping_for_identity_provider(self, idp_id, protocol_id,
**kwargs):
"""Update attribute mapping for identi | ty provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index. | html#update-attribute-mapping-for-identity-provider
"""
post_body = json.dumps({'protocol': kwargs})
resp, body = self.patch(
'OS-FEDERATION/identity_providers/%s/protocols/%s'
% (idp_id, protocol_id), post_body)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def delete_protocol_from_identity_provider(self, idp_id, protocol_id):
"""Delete a protocol from identity provider.
For a full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/identity/v3-ext/index.html#delete-a-protocol-from-identity-provider
"""
resp, body = self.delete(
'OS-FEDERATION/identity_providers/%s/protocols/%s'
% (idp_id, protocol_id))
self.expected_success(204, resp.status)
return rest_client.ResponseBody(resp, body)
|
reaperhulk/paramiko | tests/test_gssapi.py | Python | lgpl-2.1 | 5,370 | 0.000372 | # Copyright (C) 2013-2014 science + computing ag
# Author: Sebastian Deiss <sebastian.deiss@t-online.de>
#
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Test the used APIs for GSS-API / SSPI authentication
"""
import unittest
import socket
class GSSAPITest(unittest.TestCase):
@staticmethod
def init(hostname=None, srv_mode=False):
global krb5_mech, targ_name, server_mode
krb5_mech = "1.2.840.113554.1.2.2"
targ_name = hostname
server_mode = srv_mode
def test_1_pyasn1(self):
"""
Test the used methods of pyasn1.
"""
from pyasn1.type.univ import ObjectIdentifier
f | rom pyasn1.codec.der import encoder, decoder
oid = encoder.encode(ObjectIdentifier(krb5_mech))
mech, __ = decoder.decode(oid)
self.assertEquals(krb5_mech, mech.__str__())
def test_2_gssapi_sspi(self):
"""
Test the used methods of python-gssapi or sspi, sspicon from pywin32.
"""
_API = "MIT"
t | ry:
import gssapi
except ImportError:
import sspicon
import sspi
_API = "SSPI"
c_token = None
gss_ctxt_status = False
mic_msg = b"G'day Mate!"
if _API == "MIT":
if server_mode:
gss_flags = (gssapi.C_PROT_READY_FLAG,
gssapi.C_INTEG_FLAG,
gssapi.C_MUTUAL_FLAG,
gssapi.C_DELEG_FLAG)
else:
gss_flags = (gssapi.C_PROT_READY_FLAG,
gssapi.C_INTEG_FLAG,
gssapi.C_DELEG_FLAG)
# Initialize a GSS-API context.
ctx = gssapi.Context()
ctx.flags = gss_flags
krb5_oid = gssapi.OID.mech_from_string(krb5_mech)
target_name = gssapi.Name("host@" + targ_name,
gssapi.C_NT_HOSTBASED_SERVICE)
gss_ctxt = gssapi.InitContext(peer_name=target_name,
mech_type=krb5_oid,
req_flags=ctx.flags)
if server_mode:
c_token = gss_ctxt.step(c_token)
gss_ctxt_status = gss_ctxt.established
self.assertEquals(False, gss_ctxt_status)
# Accept a GSS-API context.
gss_srv_ctxt = gssapi.AcceptContext()
s_token = gss_srv_ctxt.step(c_token)
gss_ctxt_status = gss_srv_ctxt.established
self.assertNotEquals(None, s_token)
self.assertEquals(True, gss_ctxt_status)
# Establish the client context
c_token = gss_ctxt.step(s_token)
self.assertEquals(None, c_token)
else:
while not gss_ctxt.established:
c_token = gss_ctxt.step(c_token)
self.assertNotEquals(None, c_token)
# Build MIC
mic_token = gss_ctxt.get_mic(mic_msg)
if server_mode:
# Check MIC
status = gss_srv_ctxt.verify_mic(mic_msg, mic_token)
self.assertEquals(0, status)
else:
gss_flags = (
sspicon.ISC_REQ_INTEGRITY |
sspicon.ISC_REQ_MUTUAL_AUTH |
sspicon.ISC_REQ_DELEGATE
)
# Initialize a GSS-API context.
target_name = "host/" + socket.getfqdn(targ_name)
gss_ctxt = sspi.ClientAuth("Kerberos",
scflags=gss_flags,
targetspn=target_name)
if server_mode:
error, token = gss_ctxt.authorize(c_token)
c_token = token[0].Buffer
self.assertEquals(0, error)
# Accept a GSS-API context.
gss_srv_ctxt = sspi.ServerAuth("Kerberos", spn=target_name)
error, token = gss_srv_ctxt.authorize(c_token)
s_token = token[0].Buffer
# Establish the context.
error, token = gss_ctxt.authorize(s_token)
c_token = token[0].Buffer
self.assertEquals(None, c_token)
self.assertEquals(0, error)
# Build MIC
mic_token = gss_ctxt.sign(mic_msg)
# Check MIC
gss_srv_ctxt.verify(mic_msg, mic_token)
else:
error, token = gss_ctxt.authorize(c_token)
c_token = token[0].Buffer
self.assertNotEquals(0, error)
|
mstreatfield/anim-studio-tools | grind/tests/integration/test_mesh_subdivide.py | Python | gpl-3.0 | 4,366 | 0.030234 | #! /usr/bin/env python2.5
import sys, time
from mouseInteractor import MouseInteractor
try:
from OpenGL.GLUT import *
from OpenGL.GL import *
from OpenGL.GLU import *
except:
print ''' Error: PyOpenGL nicht intalliert !!'''
sys.exit()
import grind
mesh = None
mesh_subd = None
subdivider = None
prev_t = 0
mesh_shader = None
mesh_tex = None
def display():
"""Glut display function."""
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
xSize, ySize = glutGet(GLUT_WINDOW_WIDTH), glutGet(GLUT_WINDOW_HEIGHT)
if 0:
gluPerspective(60, float(xSize) / float(ySize), 0.1, 5000)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glTranslatef(-184,-40,-293)
else:
gluPerspective(35, float(xSize) / float(ySize), 0.2, 100)
glMatrixMode(GL_MO | DELVIEW)
glLoadIdentity()
glTranslat | ef( 0, -5, -20 )
#glTranslatef(-180,-45,-293)
global mouseInteractor
mouseInteractor.applyTransformation()
global subdivider
global mesh
global mesh_shader
global mesh_tex
global mesh_subd
mesh_shader.use()
mesh_tex.use(0, mesh_shader, 0, -1)
#subdivider.update( mesh )
mesh_subd.render(1)
#mesh.render(1)
mesh_tex.un_use(0)
mesh_shader.un_use()
glutSwapBuffers()
this_t = time.time()
global prev_t
fps = 1.0 / (this_t-prev_t)
prev_t = this_t
glutSetWindowTitle( 'fps: %.2f' % fps );
def init():
"""Glut init function."""
glClearColor (0, 0, 0, 0)
glEnable (GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_DEPTH_TEST)
glShadeModel(GL_SMOOTH)
glEnable(GL_CULL_FACE);
# glEnable( GL_LIGHTING )
# glEnable( GL_LIGHT0 )
glLightModeli(GL_LIGHT_MODEL_TWO_SIDE, 0)
glLightfv(GL_LIGHT0, GL_POSITION, [0, 200, 400, 1])
lA = 0.8
glLightfv(GL_LIGHT0, GL_AMBIENT, [lA, lA, lA, 1])
lD = 1
glLightfv(GL_LIGHT0, GL_DIFFUSE, [lD, lD, lD, 1])
lS = 1
glLightfv(GL_LIGHT0, GL_SPECULAR, [lS, lS, lS, 1])
glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT, [0.0, 0.0, 0.2, 1])
glMaterialfv(GL_FRONT_AND_BACK, GL_DIFFUSE, [0.0, 0.0, 0.7, 1])
glMaterialfv(GL_FRONT_AND_BACK, GL_SPECULAR, [0.5, 0.5, 0.5, 1])
glMaterialf(GL_FRONT_AND_BACK, GL_SHININESS, 50)
global mouseInteractor
mouseInteractor = MouseInteractor(.01, 1)
global mesh
global mesh_subd
mesh = grind.DeviceMesh()
mesh_subd = grind.DeviceMesh()
#mesh.read( 'obj/rest.obj')
#mesh.read( 'obj/plane.obj')
#mesh.read( 'obj/open_box.obj')
#mesh.read( 'obj/plane_with_triangle.obj')
mesh.read( 'obj/lep_seal_adult.obj')
#mesh.read( 'obj/lep_seal_adult_tri_tail.obj')
#mesh.read('obj/single_quad.obj')
global mesh_shader
mesh_shader = grind.Program()
mesh_shader.read('glsl/blinn.vs.glsl', 'glsl/blinn.fs.glsl')
#mesh_shader.read('glsl/test_150.vs.glsl', 'glsl/test_150.fs.glsl')
global mesh_tex
mesh_tex = grind.Texture()
#mesh_tex.read('maps/glr_todl_fur_body_bcolor_v14.tif')
mesh_tex.read('maps/white.tif')
global subdivider
subdivider = grind.MeshSubdivide()
subdivider.set_iterations(3)
subdivider.process( mesh, mesh_subd )
grind.info()
def keyboard( key, a, b ):
# exiting is painfully slow if memory isn't de-allocated correctly
if (key == 27) or (key == 'q'):
sys.exit()
# we should be able to initialize gl context after loading grind
# due to lazy singleton initialization of gl extensions etc
glutInit(sys.argv)
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH)
glutInitWindowSize(500, 500)
glutInitWindowPosition(100, 100)
glutCreateWindow(sys.argv[0])
init()
mouseInteractor.registerCallbacks()
glutKeyboardFunc(keyboard);
glutIdleFunc( display )
glutMainLoop()
# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios)
#
# This file is part of anim-studio-tools.
#
# anim-studio-tools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# anim-studio-tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with anim-studio-tools. If not, see <http://www.gnu.org/licenses/>.
|
D-K-E/cltk | src/cltk/text/lat.py | Python | mit | 542 | 0 | """Functions for | replacing j/J and v/V to i/I and u/U"""
__author__ = ["Kyle P. Johnson <kyle@kyle-p-johnson.com>"]
__license__ = "MIT License. See LICENSE."
import re
patterns = [(r"j", "i"), (r"v", "u"), (r"J", | "I"), (r"V", "U")]
patterns = [(re.compile(regex), repl) for (regex, repl) in patterns]
def replace_jv(text: str) -> str:
"""
Do j/v replacement.
>>> replace_jv("vem jam VEL JAM")
'uem iam UEL IAM'
"""
for (pattern, repl) in patterns:
text = re.subn(pattern, repl, text)[0]
return text
|
lepistone/manifold | nginx.py | Python | agpl-3.0 | 596 | 0 | from flask import render_template
class NginxConfigRenderer():
def __init__(self, manifold):
self.manifold = manifold
self.app = manifold.app
| def render(self, minions):
with self.app.app_context():
return render_template('nginx/nginx.conf',
manifold=self | .manifold,
minions=minions)
def write(self, minions):
content = self.render(minions)
conf_path = self.manifold.config.NGINX_CONF_PATH
with open(conf_path, 'w') as f:
f.write(content)
|
CCI-Tools/cate-core | tests/storetest.py | Python | mit | 1,590 | 0.003774 | import os
import unittest
import xcube.core.store as xcube_store
from cate.core.ds import DATA_STORE_POOL
def _create_test_data_store_config(name: str):
local_test_store_path = \
os.path.join(os.path.dirname(__file__), 'ds', 'resources', 'datasources', name)
local_test_store_dict = {
"store_id": "file",
"store_params": {
"root": local_test_store_path
},
"title": f"Local Test Store '{name}'"
}
local_test_store = xcube_store.DataStoreConfig.from_dict(local_test_store_dict)
return local_test_store
class StoreTest(unittest.TestCase):
_orig_store_configs = None
@classmethod
def setUpClass(cls):
cls._orig_store_configs = {instance_id: DATA_STORE_POOL.get_store_config(instance_id)
| for instance_id in DATA_STORE_POOL.store_instance_ids}
for instance_id in DATA_STORE_POOL.store_instance_ids:
DATA_STORE_POOL.remove_store_config(instance_id)
DATA_STORE_POOL.add_store_config('local_test_store_1',
_create_test_data_store_config('local'))
DATA_STORE_POOL.add_st | ore_config('local_test_store_2',
_create_test_data_store_config('local2'))
@classmethod
def tearDownClass(cls):
for instance_id in DATA_STORE_POOL.store_instance_ids:
DATA_STORE_POOL.remove_store_config(instance_id)
for instance_id, config in cls._orig_store_configs.items():
DATA_STORE_POOL.add_store_config(instance_id, config)
|
bigmonachus/Delaunay | site_scons/site_tools/scons_qt4/test/ts_qm/noclean/sconstest-noclean.py | Python | gpl-3.0 | 1,813 | 0.002758 | #!/usr/bin/env python
#
# Copyright (c) 2001-2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""
Tests that .ts files are not removed by default,
on a 'scons -c'.
"""
import TestSCons
test = TestSCons.TestSCons()
test.dir_fixture("image")
test.file_fixture('../../qtenv.py')
test.file_fixture('../../../__init__.py','site_scons/site_tools/qt4/__init__.py')
test.run(stderr=None)
test.must_exist(test.workpath('my_en.ts'))
test.must_contain(test.workpath('my_en.ts'),'SCons rocks!')
test.must_exist(test.workpath('my_en.qm'))
test.run(options = '-c')
test.must_exist(test.workpath( | 'my_en.ts'))
test.must_not_exist(test.workpath('my_en.qm'))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandt | ab tabstop=4 shiftwidth=4:
|
jomauricio/abgthe | abgthe/apps/polls/migrations/0002_auto_20150422_0036.py | Python | bsd-3-clause | 464 | 0.002155 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
fro | m django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('polls', '0001_initial'), |
]
operations = [
migrations.AlterField(
model_name='poll',
name='extraordinary',
field=models.BooleanField(default=False, verbose_name=b'Extraordinaria'),
preserve_default=True,
),
]
|
rhdedgar/openshift-tools | jenkins/test/validators/lint.py | Python | apache-2.0 | 2,495 | 0.002405 | ''' Run pylint against each python file with changes '''
import os
import re
import sys
import common
PYLINT_RCFILE = | "jenkins/test/validators/.pylintrc"
LINT_EXCLUDE | _PATTERN_LIST = [
r'prometheus_client'
r'ansible/inventory/aws/hosts/ec2.py'
r'ansible/inventory/gce/hosts/gce.py'
r'docs/*']
def linter(diff_file_list):
'''Use pylint to lint all python files changed in the pull request'''
file_list = []
# For each file in the diff, confirm it should be linted
for dfile in diff_file_list.split(","):
# Skip linting for specific files
skip = False
for exclude_pattern in LINT_EXCLUDE_PATTERN_LIST:
if re.match(exclude_pattern, dfile):
skip = True
break
if skip:
continue
# Skip linting if dfile is a directory or other non-file type
if not os.path.isfile(dfile):
continue
# Skip linting if the file does not have a python extension
_, ext = os.path.splitext(dfile)
if ext != ".py":
continue
file_list.append(dfile)
if len(file_list) == 0:
print "No python files have changed or all files are excluded, skipping running python linter"
return True, ""
print "Running pylint against " + " ".join(file_list)
pylint_cmd = ["/usr/bin/pylint", "--rcfile=" + PYLINT_RCFILE] + file_list
success, stdout = common.run_cli_cmd(pylint_cmd, exit_on_fail=False)
if not success:
return False, "Pylint failed:\n" + stdout
return True, ""
def usage():
''' Print usage '''
print """usage: python lint.py [file_list...]
file_list: Comma-seperated list of files to run pylint against
Arguments can be provided through the following environment variables:
file_list: PRV_CHANGED_FILES"""
def main():
''' Get base and remote SHA from arguments and run linter '''
if len(sys.argv) == 2:
file_list = sys.argv[1]
elif len(sys.argv) > 2:
print len(sys.argv)-1, "arguments provided, expected 1."
usage()
sys.exit(2)
else:
file_list = os.getenv("PRV_CHANGED_FILES", "")
if file_list == "":
print "file list must be provided"
usage()
sys.exit(3)
success, error_message = linter(file_list)
if not success:
print "Pylint failed:"
print error_message
sys.exit(1)
print "Pylint succeeded!"
if __name__ == '__main__':
main()
|
nint8835/jigsaw | tests/test_jigsaw.py | Python | mit | 8,141 | 0.004545 | import sys
import os
import pytest
sys.path.append(os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..")))
print(sys.path)
import jigsaw
def test_initializing_jigsaw_with_no_plugin_path_specified():
j = jigsaw.PluginLoader()
assert j.plugin_paths == (os.path.join(os.getcwd(), "plugins"), )
def test_initializing_jigsaw_with_custom_plugin_path():
j = jigsaw.PluginLoader((os.path.join(os.getcwd(), "custom_plugins"),))
assert j.plugin_paths == (os.path.join(os.getcwd(), "custom_plugins"), )
def test_loading_manifests():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
assert j.get_manifest("Basic Test") is not None
assert j.get_manifest("Dependency Test") is not None
assert j.get_manifest("Missing Dependency Test") is not None
def test_getting_manifests():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
assert j.get_manifest("Basic Test") is not None
def test_getting_manifest_for_missing_plugin():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
assert j.get_manifest("This should never exist") is None
def test_loading_specific_manifest():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifest(os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins", "BasicTest")))
assert j.get_manifest("Basic Test") is not None
def test_load_plugins():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugins()
assert j.get_plugin_loaded("Dependency Test")
assert j.get_plugin_loaded("Basic Test")
assert not j.get_plugin_loaded("Missing Dependency Test")
def test_load_specific_plugin():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Basic Test"))
assert j.get_plugin_loaded("Basic Test")
def test_loading_dependencies():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Dependency Test"))
assert j.get_plugin_loaded("Dependency Test")
assert j.get_plugin_loaded("Basic Test")
def test_loading_with_missing_dependencies():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Missing Dependency Test"))
assert n | ot j.get_plugin_loaded("Missing Dependency Test")
def test_getting_plugin():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Basic Test"))
assert isinstance(j.get_plugin("Basic Test" | ), jigsaw.JigsawPlugin)
def test_getting_missing_plugin():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
assert not isinstance(j.get_plugin("This should never exist"), jigsaw.JigsawPlugin)
def test_getting_module():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Basic Test"))
assert issubclass(j.get_module("Basic Test").Plugin, jigsaw.JigsawPlugin)
def test_getting_module_of_missing_plugin():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
with pytest.raises(AttributeError):
assert not issubclass(j.get_module("This should never exist").Plugin, jigsaw.JigsawPlugin)
def test_getting_all_plugins():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugins()
for item in j.get_all_plugins():
if item["manifest"]["name"] in ["Missing Dependency Test", "Invalid Baseclass Test", "Error Test"]:
assert isinstance(item["manifest"], dict)
assert not isinstance(item["plugin"], jigsaw.JigsawPlugin)
else:
assert isinstance(item["manifest"], dict)
assert isinstance(item["plugin"], jigsaw.JigsawPlugin)
def test_disable_all_plugins():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugins()
j.disable_all_plugins()
def test_enable_all_plugins():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugins()
j.enable_all_plugins()
def test_reload_all_plugins():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugins()
j.reload_all_plugins()
def test_reload_specific_plugin():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Basic Test"))
j.reload_plugin("Basic Test")
def test_load_invalid_plugin_manifest():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifest(os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins", "InvalidManifestTest")))
assert j.get_manifest("Invalid Manifest Test") is None
def test_loading_plugin_already_loaded():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Basic Test"))
j.load_plugin(j.get_manifest("Basic Test"))
def test_invalid_baseclass():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Invalid Baseclass Test"))
assert not j.get_plugin_loaded("Invalid Baseclass Test")
def test_error_on_plugin_load():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Error Test"))
assert os.path.isfile(os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins", "ErrorTest", "error.log")))
def test_oserror_on_load_plugin_manifest():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
os.mkdir(os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins", "OSErrorTest", "plugin.json")))
j.load_manifest(os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins", "OSErrorTest")))
os.rmdir(os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins", "OSErrorTest", "plugin.json")))
assert j.get_manifest("OS Error Test") is None
def test_unload_plugin():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.load_plugin(j.get_manifest("Basic Test"))
j.unload_plugin("Basic Test")
assert not j.get_plugin_loaded("Basic Test")
def test_reload_specific_manifest():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.reload_manifest(j.get_manifest("Basic Test"))
assert j.get_manifest("Basic Test") is not None
def test_reload_all_manifests():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.load_manifests()
j.reload_all_manifests()
assert j.get_manifest("Basic Test") is not None
def test_quickload():
j = jigsaw.PluginLoader((os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "plugins")),))
j.quickload()
assert j.get_plugin_loaded("Basic Test")
|
samueldotj/TeeRISC-Simulator | configs/common/Options.py | Python | bsd-3-clause | 12,906 | 0.004959 | # Copyright (c) 2013 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2006-2008 The Regents of The | University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of sou | rce code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Lisa Hsu
import m5
from m5.defines import buildEnv
from m5.objects import *
from Benchmarks import *
import CpuConfig
import MemConfig
def _listCpuTypes(option, opt, value, parser):
CpuConfig.print_cpu_list()
sys.exit(0)
def _listMemTypes(option, opt, value, parser):
MemConfig.print_mem_list()
sys.exit(0)
def addCommonOptions(parser):
# system options
parser.add_option("--list-cpu-types",
action="callback", callback=_listCpuTypes,
help="List available CPU types")
parser.add_option("--cpu-type", type="choice", default="atomic",
choices=CpuConfig.cpu_names(),
help = "type of cpu to run with")
parser.add_option("--checker", action="store_true");
parser.add_option("-n", "--num-cpus", type="int", default=1)
parser.add_option("--sys-clock", action="store", type="string",
default='1GHz',
help = """Top-level clock for blocks running at system
speed""")
parser.add_option("--cpu-clock", action="store", type="string",
default='2GHz',
help="Clock for blocks running at CPU speed")
parser.add_option("--smt", action="store_true", default=False,
help = """
Only used if multiple programs are specified. If true,
then the number of threads per cpu is same as the
number of programs.""")
# Memory Options
parser.add_option("--list-mem-types",
action="callback", callback=_listMemTypes,
help="List available memory types")
parser.add_option("--mem-type", type="choice", default="simple_mem",
choices=MemConfig.mem_names(),
help = "type of memory to use")
parser.add_option("--mem-size", action="store", type="string",
default="512MB",
help="Specify the physical memory size (single memory)")
# Cache Options
parser.add_option("--caches", action="store_true")
parser.add_option("--l2cache", action="store_true")
parser.add_option("--fastmem", action="store_true")
parser.add_option("--num-dirs", type="int", default=1)
parser.add_option("--num-l2caches", type="int", default=1)
parser.add_option("--num-l3caches", type="int", default=1)
parser.add_option("--l1d_size", type="string", default="64kB")
parser.add_option("--l1i_size", type="string", default="32kB")
parser.add_option("--l2_size", type="string", default="2MB")
parser.add_option("--l3_size", type="string", default="16MB")
parser.add_option("--l1d_assoc", type="int", default=2)
parser.add_option("--l1i_assoc", type="int", default=2)
parser.add_option("--l2_assoc", type="int", default=8)
parser.add_option("--l3_assoc", type="int", default=16)
parser.add_option("--cacheline_size", type="int", default=64)
# Enable Ruby
parser.add_option("--ruby", action="store_true")
# Run duration options
parser.add_option("-m", "--abs-max-tick", type="int", default=None,
metavar="TICKS", help="Run to absolute simulated tick " \
"specified including ticks from a restored checkpoint")
parser.add_option("--rel-max-tick", type="int", default=None,
metavar="TICKS", help="Simulate for specified number of" \
" ticks relative to the simulation start tick (e.g. if " \
"restoring a checkpoint)")
parser.add_option("--maxtime", type="float", default=None,
help="Run to the specified absolute simulated time in " \
"seconds")
parser.add_option("-I", "--maxinsts", action="store", type="int",
default=None, help="""Total number of instructions to
simulate (default: run forever)""")
parser.add_option("--work-item-id", action="store", type="int",
help="the specific work id for exit & checkpointing")
parser.add_option("--work-begin-cpu-id-exit", action="store", type="int",
help="exit when work starts on the specified cpu")
parser.add_option("--work-end-exit-count", action="store", type="int",
help="exit at specified work end count")
parser.add_option("--work-begin-exit-count", action="store", type="int",
help="exit at specified work begin count")
parser.add_option("--init-param", action="store", type="int", default=0,
help="""Parameter available in simulation with m5
initparam""")
# Simpoint options
parser.add_option("--simpoint-profile", action="store_true",
help="Enable basic block profiling for SimPoints")
parser.add_option("--simpoint-interval", type="int", default=10000000,
help="SimPoint interval in num of instructions")
# Checkpointing options
###Note that performing checkpointing via python script files will override
###checkpoint instructions built into binaries.
parser.add_option("--take-checkpoints", action="store", type="string",
help="<M,N> take checkpoints at tick M and every N ticks thereafter")
parser.add_option("--max-checkpoints", action="store", type="int",
help="the maximum number of checkpoints to drop", default=5)
parser.add_option("--checkpoint-dir", action="store", type="string",
help="Place all checkpoints in this absolute directory")
parser.add_option("-r", "--checkpoint-restore", action="store", type="int",
help="restore from checkpoint <N>")
parser.add_option("--checkpoint-at-end", action="store_true",
help="take a checkpoint at end of run")
parser.add_option("--work-begin-checkpoint-count", a |
EMFTeam/HIP-tools | installer/shrinkwrap.py | Python | gpl-2.0 | 5,564 | 0.003235 | #!/usr/bin/python2
import os
import sys
import time
import datetime
import argparse
import hashlib
default_module_folder = '/cygdrive/c/Users/{}/Documents/Paradox Interactive/Crusader Kings II/mod/modules'.format(os.environ.get('USER', 'ziji'))
shrinkwrap_sentinel_file = 'no_shrinkwrap.txt'
k = bytearray(br'"The enemy of a good plan is the dream of a perfect plan" - Carl von Clausewitz')
header_len = 1 << 12
banned_file_ext = ['.pdn', '.psd', '.xcf', '.bak', '.tmp', '.rar', '.zip', \
'.7z', '.gz', '.tgz', '.xz', '.bz2', '.tar', '.ignore', \
'.xls', '.xlsx', '.xlsm', '.db']
def is_wanted(path):
root, extension = os.path.splitext(path)
return (extension.lower() not in banned_file_ext and \
not os.path.basename(root).startswith('.') and \
not path.endswith('~'))
def is_binary(path):
_, extension = os.path.splitext(path)
return extension.lower() in ['.dds', '.tga']
def encrypt(buf, length):
kN = len(k)
for i in xrange(length):
buf[i] ^= k[i % kN]
def encrypt_file(path, header_only=False):
tmp_path = path + '.tmp'
with open(path, 'rb') as fsrc, open(tmp_path, 'wb') as fdst:
length = os.path.getsize(path)
buf = bytearray(length)
fsrc.readinto(buf)
if header_only:
length = min(header_len, length)
encrypt(buf, length)
fdst.write(buf)
os.unlink(path)
os.rename(tmp_path, path)
def cksum_file(fname):
cksum = hashlib.md5()
with open(fname, "rb") as f:
cksum.update(f.read())
return cksum.hexdigest()
def get_args():
parser = argparse.ArgumentParser(
description="Prepare a HIP modules/ folder for build (remove unwanted files & shrinkwrap).")
parser.add_argument('--modules-dir', default=default_module_folder,
help='path to modules/ folder for build')
parser.add_argument('--verbose', '-v', action='count', default=0,
help="show verbose information about what I'm doing")
return parser.parse_args()
args = get_args()
module_folder = os.path.abspath(args.modules_dir)
shrinkwrap_folder = os.path.join(module_folder, "CPRplus")
# CPRplus only wants its gfx/ sub-folder encrypted so that power users can play with the portrait definitions
real_shrinkwrap_folder = os.path.join(shrinkwrap_folder, 'gfx')
if not os.path.exists(module_folder):
sys.stderr.write('invalid modules folder: {}\n'.format(module_folder))
sys.exit(1)
if not os.path.exists(shrinkwrap_folder):
sys.stderr.write('invalid shrinkwrap folder: {}\n'.format(shrinkwrap_folder))
sys.exit(2)
if not os.path.exists(real_shrinkwrap_folder):
sys.stderr.write('invalid "real" shrinkwrap folder: {}\n'.format(real_shrinkwrap_folder))
sys.exit(3)
n_removed_files = 0
n_removed_bytes = 0
n_files = 0
n_bytes = 0
version_path = os.path.join(module_folder, 'version.txt')
manifest_path = os.path.join(module_folder, 'release_manifest.txt')
sentinel_path = os.path.join(shrinkwrap_folder, shrinkwrap_sentinel_file)
# Clear unwanted files from full distribution
if os.path.exists(manifest_path):
os.unlink(manifest_path)
for root, dirs, files in os.walk(module_folder):
for i in files:
path = os.path.join(root, i)
if path.endswith(shrinkwrap_sentinel_file):
continue # will get removed at end
elif is_wanted(path):
n_files += 1
n_bytes += os.path.getsize(path)
else:
size = os.path.getsize(path)
n_removed_files += 1
n_removed_bytes += size
os.unlink(path)
if args.verbose > 0:
head, f = os.path.split(path)
d = head.replace(module_folder, '', 1)
if d == '':
d = '/'
print("removed file: '{}' ({}KB) in '{}'".format(f, size // 1000, d))
removed_MB = n_removed_bytes / 1000 / 1000
final_MB = n_bytes / 1000 / 1000
# Now, onward to encryption of real_shrinkwrap_folder...
# We will check for the encryption sentinel first
if not os.path.exists(sentinel_path):
sys.stderr.write('already shrinkwrapped: {}\n'.format(shrinkwrap_folder))
else:
start_wrap_time = time.time()
for root, dirs, files in os.walk(real_shrinkwrap_folder):
for i in files:
path = os.path.join(root, i)
encrypt_file(path, header_only=is_binary(path))
end_wrap_time = time.time()
os.unlink(sentinel_path)
print("shrinkwrap time: %0.2fsec" % (end_wrap_time - start_wrap_time))
# Now that we've shrinkwrapped, build a checksum manifest file
start_cksum_time = time.time()
path_cksum_map = {}
for root, dirs, files in os.walk(module_folder):
for i in files:
real_path = os.path.join(root, i)
if real_path == version_path: # Don't checksum the module pack version.txt
continue
virt_path = os.path.relpath(real_path, module_folder)
| path_cksum_map[virt_path] = cksum_file(real_path)
with open(manifest_path, 'wb') as f:
f.write('time: {}\n'.format(datetime.datetime.utcnow().strftime('%Y-%m-%d %H | :%M:%S')))
for p in sorted(path_cksum_map):
f.write('{} // {}\n'.format(p, path_cksum_map[p]))
end_cksum_time = time.time()
print("checksum time: %0.2fsec" % (end_cksum_time - start_cksum_time))
print('final package: %d files (%dMB)' % (n_files, final_MB))
if n_removed_files > 0:
print('\n> removed %d unwanted files (%0.2fMB)' % (n_removed_files, removed_MB))
sys.exit(0)
|
dymkowsk/mantid | scripts/Diffraction/isis_powder/hrpd_routines/hrpd_advanced_config.py | Python | gpl-3.0 | 1,942 | 0 | from __future__ import (absolute_import, division, print_function)
| from isis_powder.hrpd_routines.hrpd_enums import HRPD_TOF_WINDOWS
absorption_correction_params = {
"cylinder_sample_height": 2.0,
"cylinder_sample_radius": 0.3,
"cylinder_position": [0., 0., 0.],
"chemical_formula": "V"
}
# Default cropping values are 5% off each end
window_10_110_params = {
"vanadium_tof_cropping": (1e4, 1.2e5),
"focused_cropping_values": [
(1.5e4, 1.08e5), # Bank 1
(1.5e4, 1.12e5), # Bank 2
(1.5e4, 1e5) # Bank 3
]
}
win | dow_30_130_params = {
"vanadium_tof_cropping": (3e4, 1.4e5),
"focused_cropping_values": [
(3.5e4, 1.3e5), # Bank 1
(3.4e4, 1.4e5), # Bank 2
(3.3e4, 1.3e5) # Bank 3
]
}
window_100_200_params = {
"vanadium_tof_cropping": (1e5, 2.15e5),
"focused_cropping_values": [
(1e5, 2e5), # Bank 1
(8.7e4, 2.1e5), # Bank 2
(9.9e4, 2.1e5) # Bank 3
]
}
file_names = {
"grouping_file_name": "hrpd_new_072_01_corr.cal"
}
general_params = {
"spline_coefficient": 70,
"focused_bin_widths": [
-0.0005, # Bank 1
-0.0005, # Bank 2
-0.001 # Bank 3
],
"mode": "coupled"
}
def get_all_adv_variables(tof_window=HRPD_TOF_WINDOWS.window_10_110):
advanced_config_dict = {}
advanced_config_dict.update(file_names)
advanced_config_dict.update(general_params)
advanced_config_dict.update(get_tof_window_dict(tof_window=tof_window))
return advanced_config_dict
def get_tof_window_dict(tof_window):
if tof_window == HRPD_TOF_WINDOWS.window_10_110:
return window_10_110_params
if tof_window == HRPD_TOF_WINDOWS.window_30_130:
return window_30_130_params
if tof_window == HRPD_TOF_WINDOWS.window_100_200:
return window_100_200_params
raise RuntimeError("Invalid time-of-flight window: {}".format(tof_window))
|
xaxa89/mitmproxy | mitmproxy/tools/console/overlay.py | Python | mit | 3,855 | 0.000519 | import math
import urwid
from mitmproxy.tools.console import common
from mitmproxy.tools.console import signals
from mitmproxy.tools.console import grideditor
class SimpleOverlay(urwid.Overlay):
def __init__(self, master, widget, parent, width, valign="middle"):
self.widget = widget
self.master = master
super().__init__(
widget,
parent,
align="center",
width=width,
valign=valign,
height="pack"
)
def keypress(self, size, key):
key = super().keypress(size, key)
if key == "esc":
signals.pop_view_state.send(self)
if key == "?":
self.master.view_help(self.widget.make_help())
else:
return key
class Choice(urwid.WidgetWrap):
def __init__(self, txt, focus, current):
if current:
s = "option_active_selected" if focus else "option_active"
else:
s = "option_selected" if focus else "text"
return super().__init__(
urwid.AttrWrap(
urwid.Padding(urwid.Text(txt)),
s,
)
)
def selectable(self):
return True
def keypress(self, size, key):
return key
class ChooserListWalker(urwid.ListWalker):
def __init__(self, choices, current):
self.index = 0
self.choices = choices
self.current = current
def _get(self, idx, focus):
c = self.choices[idx]
return Choice(c, focus, c == self.current)
def set_focus(self, index):
self.index = index
def get_focus(self):
return self._get(self.index, True), self.index
def get_next(self, pos):
if pos >= len(self.choices) - 1:
return None, None
pos = pos + 1
return self._get(pos, False), pos
def get_prev(self, pos):
pos = pos - 1
if pos < 0:
return None, None
return self._get(pos, False), pos
class Chooser(urwid.WidgetWrap):
def __init__(self, title, choices, current, callback):
self.choices = choices
self.callback = callback
choicewidth = max([len(i) for i in choices])
self.width = max(choicewidth, len(title) + 5)
self.walker = ChooserListWalker(choices, current)
super().__init__(
urwid.AttrWrap(
urwid.LineBox(
urwid.BoxAdapter(
urwid.ListBox(self.walker),
l | en(choices)
),
title= title
),
"background"
)
)
def selectable(self):
return True
def keypress(self, size, key):
key = common.shortcuts(key)
if key == "enter":
self.callback(self.choices[self.walker.index])
signals.pop_view_state.send(self)
return super().keyp | ress(size, key)
def make_help(self):
text = []
keys = [
("enter", "choose option"),
("esc", "exit chooser"),
]
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
return text
class OptionsOverlay(urwid.WidgetWrap):
def __init__(self, master, name, vals, vspace):
"""
vspace: how much vertical space to keep clear
"""
cols, rows = master.ui.get_cols_rows()
self.ge = grideditor.OptionsEditor(master, name, vals)
super().__init__(
urwid.AttrWrap(
urwid.LineBox(
urwid.BoxAdapter(self.ge, rows - vspace),
title=name
),
"background"
)
)
self.width = math.ceil(cols * 0.8)
def make_help(self):
return self.ge.make_help()
|
rexfrommars/havefun | python/RawEdoc/esolang/_poohbear.py | Python | gpl-3.0 | 2,092 | 0.000956 | import math
def poohbear(code):
out = []
step = 1024
capa = step
mem = bytearray('\0' * capa, 'ascii')
mc = capa // 2
copied = None
loop = []
cl = len(code)
cc = 0
while cc < cl:
c = code[cc]
if c == '+':
mem[mc] = 0 if mem[mc] == 255 else mem[mc] + 1
elif c == '-':
mem[mc] = 255 if mem[mc] == 0 else mem[mc] - 1
elif c == '>':
if mc == capa:
mem += bytearray('\0' * step, 'ascii')
mc += 1
elif c == '<':
if mc == 0:
mem = bytearray('\0' * step, 'ascii') + mem
mc = step
mc -= 1
elif c == 'c':
copied = mem[mc]
elif c == 'p':
mem[mc] = copied
elif c == 'W':
if mem[mc] is not 0:
loop.append(cc)
print 'W', mem[mc], loop
else:
ec = code[cc:].find('E')
if ec >= 0:
cc += ec + 1
continue
else:
break
elif c == 'E':
cc = loop.pop()
print 'E', loop, cc
continue
elif c == 'P':
out.append(chr(mem[mc]))
elif c == 'N':
out.append(str(mem[mc]))
elif c == 'T':
mem[mc] = mem[mc] * 2 % 256
elif c == 'Q':
mem[mc] = mem[mc] ** 2 % 256
elif c == 'U':
mem[mc] = int(math.sqrt(mem[mc]))
| elif c == 'L':
mem[mc] += 2
elif c == 'I':
mem[mc] -= 2
elif c == 'V':
mem[mc] //= 2
elif c == 'A':
mem[mc] += copied
elif c == 'B':
mem[mc] -= copied
elif c == 'Y':
| mem[mc] *= copied
elif c == 'D':
mem[mc] //= copied
cc += 1
return ''.join(out)
if __name__ == '__main__':
print(poohbear('LQTcQAP>pQBBTAI-PA-PPL+P<BVPAL+T+P>PL+PBLPBP<DLLLT+P'), 'Hello World!')
print(poohbear('+LTQII>+WN<P>+E'))
|
nextmovesoftware/smilesreading | scripts/BIOVIADraw.py | Python | bsd-2-clause | 1,200 | 0.008333 | # IronPython
import clr
clr.AddReferenceToFileAndPath(r"D:\Program Files\BIOVIA\BIOVIA Draw 2018\lib\MDL.Draw.Foundation.dll")
from MDL.Draw.StructureConversion import StructureConverter
sc = StructureConverter()
import common
import urllib
import urllib2
import json
class MyAromaticSmilesWriter(common.AromaticSmilesWriter):
def getoutput(self, smi):
sc.Smiles = smi
return sc.Smiles
class MyHydrogenCounter(common.HydrogenCounter):
def getoutput(self, smi):
try:
sc.Smiles = smi
molfile = sc.MolfileString
except StandardError as e:
| msg = e.message
if "Failed to get a molfile string" in msg:
| return None, "Parse_error"
print "%s gives %s" % (smi, msg)
return None, "MOLFILE:%s" % molfile.replace("\r\n", "!!")
class MyStereoSmilesWriter(common.StereoSmilesWriter):
def getoutput(self, smi):
sc.Smiles = smi
return sc.Smiles
if __name__ == "__main__":
myname = "BIOVIADraw_2018"
# MyAromaticSmilesWriter(myname).main()
# MyHydrogenCounter(myname).main()
MyStereoSmilesWriter(myname).main()
|
fgmacedo/django-awards | awards/settings.py | Python | mit | 112 | 0 | from django.conf import settings
IMAGE_URL = getat | tr(settings, 'AWARDS_IMAGE_URL', 'icons/awards | /{slug}.png')
|
KlubJagiellonski/pola-backend | pola/slack.py | Python | bsd-3-clause | 2,171 | 0.001382 | import json
from datetime import datetime, timedelta
from urllib.parse import urlencode
import requests
from django.conf import settings
from rq import Queue
from pola.rq_tasks import get_url_at_time
from pola.rq_worker import conn
q = Queue(connection=conn)
def send_ai_pics(
product,
device_name,
original_width,
original_height,
width,
height,
files_count,
file_ext,
mime_type,
filenames,
):
files = []
i = 1
for filename in filenames:
files.append({'title': f'{i}', 'image_url': filename.split('?')[0].encode('utf-8')})
i += 1
url = 'https://slack.com/api/chat.postMessage?' + urlencode(
{
'token': settings.SLACK_TOKEN,
'channel': settings.SLACK_CHANNEL_AI_PICS,
'username': 'New AI pics',
'text': (
f'Product: *{product}*\ | n'
f'Device: *{ | device_name}*\n'
f'Dimensions: *{width}x{height}* (Original: {original_width}x{original_height})\n'
f'*{files_count} {file_ext}* files ({mime_type})'
),
'attachments': json.dumps(files),
}
)
# requests.get(url)
q.enqueue(get_url_at_time, url, datetime.utcnow() + timedelta(seconds=15))
def send_ai_pics_request(product, preview_text):
url = 'https://slack.com/api/chat.postMessage?' + urlencode(
{
'token': settings.SLACK_TOKEN,
'channel': settings.SLACK_CHANNEL_AI_PICS,
'username': 'AI pics Requested',
'text': f"Product: *{product.encode('utf-8')}*\nPreview text: *{preview_text.encode('utf-8')}*",
}
)
# requests.get(url)
q.enqueue(get_url_at_time, url, datetime.utcnow() + timedelta(seconds=0))
def send_ai_pics_stats(msg):
url = 'https://slack.com/api/chat.postMessage?' + urlencode(
{
'token': settings.SLACK_TOKEN,
'channel': settings.SLACK_CHANNEL_AI_STATS,
'username': 'AI Stats',
'text': msg.encode('utf-8'),
}
)
requests.get(url)
# q.enqueue(get_url_at_time, url, datetime.utcnow()+timedelta(seconds=0))
|
salberin/libsigrokdecode | decoders/onewire_network/pd.py | Python | gpl-3.0 | 7,048 | 0.004115 | ##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2012 Iztok Jeras <iztok.jeras@gmail.com>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##
import sigrokdecode as srd
# Dictionary of ROM commands and their names, next state.
command = {
0x33: ['Read ROM' , 'GET ROM' ],
0x0f: ['Conditional read ROM' , 'GET ROM' ],
0xcc: ['Skip ROM' , 'TRANSPORT' ],
0x55: ['Match ROM' , 'GET ROM' ],
0xf0: ['Search ROM' , 'SEARCH ROM'],
0xec: ['Conditional search ROM', 'SEARCH ROM'],
0x3c: ['Overdrive skip ROM' , 'TRANSPORT' ],
0x69: ['Overdrive match ROM' , 'GET ROM' ],
}
class Decoder(srd.Decoder):
api_version = 2
id = 'onewire_network'
name = '1-Wire network layer'
longname = '1-Wire serial communication bus (network layer)'
desc = 'Bidirectional, half-duplex, asynchronous serial bus.'
license = 'gplv2+'
inputs = ['onewire_link']
outputs = ['onewire_network']
annotations = (
('text', 'Human-readable text'),
)
def __init__(self, **kwargs):
self.beg = 0
self.end = 0
self.state = 'COMMAND'
self.bit_cnt = 0
self.search = 'P'
self.data_p = 0x0
self.data_n = 0x0
self.data = 0x0
self.rom = 0x0000000000000000
def start(self):
self.out_python = self.register(srd.OUTPUT_PYTHON)
self.out_ann = self.register(srd.OUTPUT_ANN)
def putx(self, data):
# Helper function for most annotations.
self.put(self.beg, self.end, self.out_ann, data)
def puty(self, data):
# Helper function for most protocol packets.
self.put(self.beg, self.end, self.out_python, data)
def decode(self, ss, es, data):
code, val = data
# State machine.
if code == 'RESET/PRESENCE':
self.search = 'P'
self.bit_cnt = 0
self.put(ss, es, self.out_ann,
[0, ['Reset/presence: %s' % ('true' if val else 'false')]])
self.put(ss, es, self.out_python, ['RESET/PRESENCE', val])
self.state = 'COMMAND'
return
# For now we're only interested in 'RESET/PRESENCE' and 'BIT' packets.
if code != 'BIT':
return
if self.state == 'COMMAND':
# Receiving and decoding a ROM command.
if self.onewire_collect(8, val, ss, es) == 0:
return
if self.data in command:
self.putx([0, ['ROM command: 0x%02x \'%s\''
% (self.data, command[self.data][0])]])
self.state = command[self.data][1]
else:
self.putx([0, ['ROM command: 0x%02x \'%s\''
% (self.data, 'unrecognized')]])
self.state = 'COMMA | ND ERROR'
elif self.state == 'GET ROM':
# A 64 bit device address is selected.
# Family code (1 byte) + serial number (6 bytes) + CRC (1 byte)
if self.onewire_collect(64, val, ss, es) == 0:
return
self.rom = self.data & 0xffffffffffffffff
self.putx([0, ['ROM: 0x%016x' % self.rom]])
self.puty(['ROM', self.rom])
self.state = 'TRANSPORT'
| elif self.state == 'SEARCH ROM':
# A 64 bit device address is searched for.
# Family code (1 byte) + serial number (6 bytes) + CRC (1 byte)
if self.onewire_search(64, val, ss, es) == 0:
return
self.rom = self.data & 0xffffffffffffffff
self.putx([0, ['ROM: 0x%016x' % self.rom]])
self.puty(['ROM', self.rom])
self.state = 'TRANSPORT'
elif self.state == 'TRANSPORT':
# The transport layer is handled in byte sized units.
if self.onewire_collect(8, val, ss, es) == 0:
return
self.putx([0, ['Data: 0x%02x' % self.data]])
self.puty(['DATA', self.data])
elif self.state == 'COMMAND ERROR':
# Since the command is not recognized, print raw data.
if self.onewire_collect(8, val, ss, es) == 0:
return
self.putx([0, ['ROM error data: 0x%02x' % self.data]])
else:
raise Exception('Invalid state: %s' % self.state)
# Data collector.
def onewire_collect(self, length, val, ss, es):
# Storing the sample this sequence begins with.
if self.bit_cnt == 1:
self.beg = ss
self.data = self.data & ~(1 << self.bit_cnt) | (val << self.bit_cnt)
self.bit_cnt += 1
# Storing the sample this sequence ends with.
# In case the full length of the sequence is received, return 1.
if self.bit_cnt == length:
self.end = es
self.data = self.data & ((1 << length) - 1)
self.bit_cnt = 0
return 1
else:
return 0
# Search collector.
def onewire_search(self, length, val, ss, es):
# Storing the sample this sequence begins with.
if (self.bit_cnt == 0) and (self.search == 'P'):
self.beg = ss
if self.search == 'P':
# Master receives an original address bit.
self.data_p = self.data_p & ~(1 << self.bit_cnt) | \
(val << self.bit_cnt)
self.search = 'N'
elif self.search == 'N':
# Master receives a complemented address bit.
self.data_n = self.data_n & ~(1 << self.bit_cnt) | \
(val << self.bit_cnt)
self.search = 'D'
elif self.search == 'D':
# Master transmits an address bit.
self.data = self.data & ~(1 << self.bit_cnt) | (val << self.bit_cnt)
self.search = 'P'
self.bit_cnt += 1
# Storing the sample this sequence ends with.
# In case the full length of the sequence is received, return 1.
if self.bit_cnt == length:
self.end = es
self.data_p = self.data_p & ((1 << length) - 1)
self.data_n = self.data_n & ((1 << length) - 1)
self.data = self.data & ((1 << length) - 1)
self.search = 'P'
self.bit_cnt = 0
return 1
else:
return 0
|
ain7/www.ain7.org | ain7/news/models.py | Python | lgpl-2.1 | 6,657 | 0.005712 | # -*- coding: utf-8
"""
ain7/news/models.py
"""
#
# Copyright © 200 | 7-2018 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# | This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import datetime
from django.core.urlresolvers import reverse
from django.db import models
from django.template import defaultfilters
from django.utils.translation import ugettext as _
from ain7.annuaire.models import Person
from ain7.utils import LoggedClass
class EventOrganizer(models.Model):
"""event organizer"""
event = models.ForeignKey('news.NewsItem', verbose_name=_('event'),
related_name='event_organizers')
organizer = models.ForeignKey(Person, verbose_name=_('organizer'),
related_name='organized_events')
send_email_for_new_subscriptions = models.BooleanField(default=False,
verbose_name=_('send email for new subscription'))
class RSVPAnswer(models.Model):
person = models.ForeignKey('annuaire.Person')
event = models.ForeignKey('news.NewsItem')
yes = models.BooleanField(default=False)
no = models.BooleanField(default=False)
maybe = models.BooleanField(default=False)
number = models.IntegerField(verbose_name=_('number of persons'), default=1)
payment = models.ForeignKey('shop.Payment', null=True, blank=True)
created_on = models.DateTimeField(auto_now_add=True)
created_by = models.ForeignKey('annuaire.Person', related_name='rsvpanswers_created')
updated_on = models.DateTimeField(auto_now=True)
updated_by = models.ForeignKey('annuaire.Person', related_name='rsvpanswers_updated')
def answer(self):
if self.yes:
return _('yes')
if self.no:
return _('no')
if self.maybe:
return _('maybe')
class NewsItemManager(models.Manager):
"""news item manager"""
def next_events(self):
"""Returns all future events."""
return self.filter(date__gte=datetime.datetime.now())
class NewsItem(LoggedClass):
"""news item"""
STATUS = (
(0,_('project')),
(1,_('confirmed')),
(2,_('cancel')),
)
slug = models.SlugField(max_length=100, unique=True)
title = models.CharField(verbose_name=_('title'), max_length=100, unique=True)
body = models.TextField(verbose_name=_('body'))
shorttext = models.CharField(verbose_name=_('short text'), max_length=500,
blank=True, null=True)
image = models.ImageField(verbose_name=_('image'), upload_to='data',
blank=True, null=True)
creation_date = models.DateTimeField(verbose_name=_('date'),
default=datetime.datetime.today, editable=False)
front_page_presence = models.BooleanField(_('Front Page Presence'), default=True)
# to which group we should link this news
groups = models.ManyToManyField('groups.Group',
verbose_name=_('groups'), related_name='events',
blank=True)
# those fields are only present for an event
date = models.DateTimeField(verbose_name=_('date'), blank=True, null=True)
location = models.CharField(verbose_name=_('location'), max_length=60,
blank=True, null=True)
status = models.IntegerField(verbose_name=_('status'), choices=STATUS,
blank=True, null=True)
contact_email = models.EmailField(verbose_name=_('contact email'),
max_length=50, blank=True, null=True)
link = models.CharField(verbose_name=_('external link'), max_length=60,
blank=True, null=True)
# organizers = models.ManyToManyField(Person, verbose_name=_('organizers'),
# related_name='events', blank=True, null=True, through='EventOrganizer')
pictures_gallery = models.CharField(verbose_name=_('Pictures gallery'),
max_length=100, blank=True, null=True)
package = models.ForeignKey('shop.Package', blank=True, null=True)
rsvp_question = models.CharField(verbose_name=_('extra question'),
max_length=100, blank=True, null=True)
rsvp_begin = models.DateField(verbose_name=_('rsvp begin'),
blank=True, null=True)
rsvp_end = models.DateField(verbose_name=_('rsvp end'),
blank=True, null=True)
rsvp_multiple = models.BooleanField(default=True)
objects = NewsItemManager()
def __unicode__(self):
"""news item unicode method"""
return self.title
def get_absolute_url(self):
"""news item url"""
if self.date:
return reverse('event-details', args=[self.id])
else:
return reverse('news-details', args=[self.slug])
def save(self):
"""news item save method"""
if self.pictures_gallery:
if not self.pictures_gallery.startswith('http://'):
self.pictures_gallery = 'http://'+self.pictures_gallery
self.slug = defaultfilters.slugify(self.title)
super(NewsItem, self).save()
def rsvp_answer(self, person, yes=False, no=False, maybe=False):
"""define a rsvp answer to an event"""
rsvp = None
if RSVPAnswer.objects.filter(person=person, event=self).count() == 1:
rsvp = RSVPAnswer.objects.get(person=person, event=self)
rsvp.no = no
rsvp.yes = yes
rsvp.maybe = maybe
rsvp.updated_by = person
else:
rsvp = RSVPAnswer(person=person, event=self,
created_by=person, updated_by=person,
no=no, yes=yes, maybe=maybe, number=0)
if yes:
rsvp.number = 1
rsvp.save()
return rsvp
def attendees(self):
"""return event attendees"""
return self.RSVAnswers.filter(yes=True)
def attendeees_number(self):
"""Renvoie le nombre de participants à l'événement."""
nbpart = 0
for sub in self.RSVPAnswers.filter(yes=True):
nbpart += sub.number
return nbpart
class Meta:
"""news item meta information"""
ordering = ['-creation_date']
verbose_name = _('news item')
|
dharmit/microblog | db_upgrade.py | Python | mit | 407 | 0 | #!flask/bin/python
# This script upgrades the database version to one version above the current
# version.
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config | import SQLALCHEMY_MIGRATE_REPO
api.upgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
print "Current database version: " + \
str(api.db | _version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO))
|
dl1ksv/gnuradio | grc/core/ports/port.py | Python | gpl-3.0 | 9,414 | 0.001593 | # Copyright 2008-2016 Free Software Foundation, Inc.
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-2.0-or-later
#
from . import _virtual_connections
from .. import Constants
from ..base import Element
from ..utils.descriptors import (
EvaluatedFlag, EvaluatedEnum, EvaluatedPInt,
setup_names, lazy_property
)
@setup_names
class Port(Element):
is_port = True
dtype = EvaluatedEnum(list(Constants.TYPE_TO_SIZEOF.keys()), default='')
vlen = EvaluatedPInt()
multiplicity = EvaluatedPInt()
hidden = EvaluatedFlag()
optional = EvaluatedFlag()
def __init__(self, parent, direction, id, label='', domain=Constants.DEFAULT_DOMAIN, dtype='',
vlen='', multiplicity=1, optional=False, hide=False, bus_struct=None, **_):
"""Make a new port from nested data."""
Element.__init__(self, parent)
self._dir = direction
self.key = id
if not label:
label = id if not id.isdigit() else {'sink': 'in', 'source': 'out'}[
direction]
if dtype == 'bus':
# Look for existing busses to give proper index
busses = [p for p in self.parent.ports() if p._dir ==
self._dir and p.dtype == 'bus']
bus_structure = self.parent.current_bus_structure[self._dir]
bus_index = len(busses)
if len(bus_structure) > bus_index:
number = str(len(busses)) + '#' + \
str(len(bus_structure[bus_index]))
label = dtype + number
else:
raise ValueError(
'Could not initialize bus port due to incompatible bus structure')
self.name = self._base_name = label
self.domain = domain
self.dtype = dtype
self.vlen = vlen
if domain == Constants.GR_MESSAGE_DOMAIN: # ToDo: message port class
self.key = self.name
self.dtype = 'message'
self.multiplicity = multiplicity
self.optional = optional
self.hidden = hide
self.stored_hidden_state = None
self.bus_structure = bus_struct
# end of args ########################################################
self.clones = [] # References to cloned ports (for nports > 1)
def __str__(self):
if self.is_source:
return 'Source - {}({})'.format(self.name, self.key)
if self.is_sink:
return 'Sink - {}({})'.format(self.name, self.key)
def __repr__(self):
return '{!r}.{}[{}]'.format(self.parent, 'sinks' if self.is_sink else 'sources', self.key)
@property
def item_size(self):
return Constants.TYPE_TO_SIZEOF[self.dtype] * self.vlen
@lazy_property
def is_sink(self):
return self._dir == 'sink'
@lazy_property
def is_source(self):
return self._dir == 'source'
@property
def inherit_type(self):
"""always empty for e.g. virtual blocks, may eval to empty for 'Wildcard'"""
return not self.dtype
def validate(self):
del self._error_messages[:]
Element.validate(self)
platform = self.parent_platform
num_connections = len(list(self.connections(enabled=True)))
need_connection = not self.optional and not self.hidden
if need_connection and num_connections == 0:
self.add_error_message('Port is not connected.')
if self.dtype n | ot in Constants.TYPE_TO_SIZEOF.keys():
self.add_error_message(
'Type "{}" is not a possible type.'.format(self.dtype))
try:
domain = platform.domains[self.domain]
i | f self.is_sink and not domain.multi_in and num_connections > 1:
self.add_error_message('Domain "{}" can have only one upstream block'
''.format(self.domain))
if self.is_source and not domain.multi_out and num_connections > 1:
self.add_error_message('Domain "{}" can have only one downstream block'
''.format(self.domain))
except KeyError:
self.add_error_message(
'Domain key "{}" is not registered.'.format(self.domain))
def rewrite(self):
del self.vlen
del self.multiplicity
del self.hidden
del self.optional
del self.dtype
if self.inherit_type:
self.resolve_empty_type()
Element.rewrite(self)
# Update domain if was deduced from (dynamic) port type
if self.domain == Constants.GR_STREAM_DOMAIN and self.dtype == "message":
self.domain = Constants.GR_MESSAGE_DOMAIN
self.key = self.name
if self.domain == Constants.GR_MESSAGE_DOMAIN and self.dtype != "message":
self.domain = Constants.GR_STREAM_DOMAIN
self.key = '0' # Is rectified in rewrite()
def resolve_virtual_source(self):
"""Only used by Generator after validation is passed"""
return _virtual_connections.upstream_ports(self)
def resolve_empty_type(self):
def find_port(finder):
try:
return next((p for p in finder(self) if not p.inherit_type), None)
except _virtual_connections.LoopError as error:
self.add_error_message(str(error))
except (StopIteration, Exception):
pass
try:
port = find_port(_virtual_connections.upstream_ports) or \
find_port(_virtual_connections.downstream_ports)
# we don't want to override the template
self.set_evaluated('dtype', port.dtype)
# we don't want to override the template
self.set_evaluated('vlen', port.vlen)
self.domain = port.domain
except AttributeError:
self.domain = Constants.DEFAULT_DOMAIN
def add_clone(self):
"""
Create a clone of this (master) port and store a reference in self._clones.
The new port name (and key for message ports) will have index 1... appended.
If this is the first clone, this (master) port will get a 0 appended to its name (and key)
Returns:
the cloned port
"""
# Add index to master port name if there are no clones yet
if not self.clones:
self.name = self._base_name + '0'
# Also update key for none stream ports
if not self.key.isdigit():
self.key = self.name
name = self._base_name + str(len(self.clones) + 1)
# Dummy value 99999 will be fixed later
key = '99999' if self.key.isdigit() else name
# Clone
port_factory = self.parent_platform.make_port
port = port_factory(self.parent, direction=self._dir,
name=name, key=key,
master=self, cls_key='clone')
self.clones.append(port)
return port
def remove_clone(self, port):
"""
Remove a cloned port (from the list of clones only)
Remove the index 0 of the master port name (and key9 if there are no more clones left
"""
self.clones.remove(port)
# Remove index from master port name if there are no more clones
if not self.clones:
self.name = self._base_name
# Also update key for none stream ports
if not self.key.isdigit():
self.key = self.name
def connections(self, enabled=None):
"""Iterator over all connections to/from this port
enabled: None for all, True for enabled only, False for disabled only
"""
for con in self.parent_flowgraph.connections:
# TODO clean this up - but how to get past this validation
# things don't compare simply with an x in y because
# bus ports are created differently.
port_in_con = False
if self.dtype == 'bus':
if self.is_sink:
if (self.parent.name == con.sink_port.parent.name and
self.name == |
Stanford-Online/edx-platform | lms/djangoapps/ccx/models.py | Python | agpl-3.0 | 3,795 | 0.001318 | """
Models for the custom course feature
"""
from __future__ import unicode_literals
import json
import logging
from datetime import datetime
from ccx_keys.locator import CCXLocator
from django.contrib.auth.models import User
from django.db import models
from lazy import lazy
from opaque_keys.edx.django.models import CourseKeyField, UsageKeyField
from pytz import utc
from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore.django import modulestore
log = logging.getLogger("edx.ccx")
class CustomCourseForEdX(models.Model):
"""
A Custom Course.
"""
co | urse_id = CourseKeyField(max_length=255, db_index=True)
display_name = models.CharField(max_length=255)
coach = models.ForeignKey(User, db_index=True, on_delete=models.CASCADE)
# if not empty, this field contains a json serialized list of
# the master course modules
structure_json = models.TextField(verbose_name='Structure JSON', blank=True, null=True)
class Meta(object):
| app_label = 'ccx'
@lazy
def course(self):
"""Return the CourseDescriptor of the course related to this CCX"""
store = modulestore()
with store.bulk_operations(self.course_id):
course = store.get_course(self.course_id)
if not course or isinstance(course, ErrorDescriptor):
log.error("CCX {0} from {2} course {1}".format( # pylint: disable=logging-format-interpolation
self.display_name, self.course_id, "broken" if course else "non-existent"
))
return course
@lazy
def start(self):
"""Get the value of the override of the 'start' datetime for this CCX
"""
# avoid circular import problems
from .overrides import get_override_for_ccx
return get_override_for_ccx(self, self.course, 'start')
@lazy
def due(self):
"""Get the value of the override of the 'due' datetime for this CCX
"""
# avoid circular import problems
from .overrides import get_override_for_ccx
return get_override_for_ccx(self, self.course, 'due')
@lazy
def max_student_enrollments_allowed(self):
"""
Get the value of the override of the 'max_student_enrollments_allowed'
datetime for this CCX
"""
# avoid circular import problems
from .overrides import get_override_for_ccx
return get_override_for_ccx(self, self.course, 'max_student_enrollments_allowed')
def has_started(self):
"""Return True if the CCX start date is in the past"""
return datetime.now(utc) > self.start
def has_ended(self):
"""Return True if the CCX due date is set and is in the past"""
if self.due is None:
return False
return datetime.now(utc) > self.due
@property
def structure(self):
"""
Deserializes a course structure JSON object
"""
if self.structure_json:
return json.loads(self.structure_json)
return None
@property
def locator(self):
"""
Helper property that gets a corresponding CCXLocator for this CCX.
Returns:
The CCXLocator corresponding to this CCX.
"""
return CCXLocator.from_course_locator(self.course_id, unicode(self.id))
class CcxFieldOverride(models.Model):
"""
Field overrides for custom courses.
"""
ccx = models.ForeignKey(CustomCourseForEdX, db_index=True, on_delete=models.CASCADE)
location = UsageKeyField(max_length=255, db_index=True)
field = models.CharField(max_length=255)
class Meta(object):
app_label = 'ccx'
unique_together = (('ccx', 'location', 'field'),)
value = models.TextField(default='null')
|
downneck/mothership | mothership/idrac6/__init__.py | Python | apache-2.0 | 8,163 | 0.0049 | # Copyright 2011 Gilt Groupe, INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The idrac6 module contains all the functionality that mothership will need
to interact with the Dell DRACs using pexpect and some IPMI commands
"""
import os
import sys
import re
import pexpect
import subprocess
def check_known_hosts(host, user='~'):
# check for ssh host key
hasKey = False
userknownhosts = os.path.expanduser('%s/.ssh/known_hosts' % user)
for file in [ '/etc/ssh/ssh_known_hosts', userknownhosts ]:
for line in open(file):
if host in line:
hasKey = True
break
if not hasKey:
print '+=== Adding %s to known_hosts' % host
key = subprocess.Popen(['ssh-keyscan', host],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).\
communicate()[0]
f = open(userknownhosts, 'a')
f.write(key)
def query_idrac(cfg, host):
check_known_hosts(host)
child = pexpect.spawn('ssh %s@%s' % (cfg.puser, host))
child.expect('assword:')
child.sendline(cfg.ppass)
child.expect('/admin1->')
child.sendline('racadm getsysinfo')
child.expect('/admin1->')
data = child.before
child.sendline('exit')
child.expect('CLP Session terminated')
return data
def sysinfo(info):
"""
Parses information returned from drac getsysinfo command
and returns it as a dictionary
"""
out={}
parsers = [
r'(Firmware Version\s+=\s(?P<firmware>[.\d]+))',
r'(System BIOS Version\s+=\s(?P<bios>[.\d]+))',
r'(System Model\s+=\s(?P<model>[ \w]+))',
r'(Service Tag\s+=\s(?P<hw_tag>\w+))',
r'(MAC Address\s+=\s(?P<drac>[a-f:0-9]+))',
r'(?P<hwname>NIC\d+)\sEthernet\s+=\s(?P<hwaddr>[a-f:0-9]+)',
]
for line in info.split('\n'):
for pattern in parsers:
match = re.search(pattern, line)
if match:
if 'hwname' in match.groupdict().keys():
num = match.group('hwname').replace('NIC','')
out['eth%d' % (int(num)-1)] = match.group('hwaddr')
else:
for key in match.groupdict().keys():
out[key] = match.group(key)
if key == 'model' and re.match('PowerEdge', match.group(key)):
out['manufacturer'] = 'Dell'
return out
def prep_idrac(cfg, host, debug=False, basics=True, ipmi=False, serial=False, telnet=None, gold=False):
check_known_hosts(host)
try:
check_known_hosts(host, '~root')
except IOError, e:
print '%s\nDRAC prep must be run as root/sudo' % e
sys.exit(1)
configured = False
pubkeys = []
for p in cfg.dkeys:
f = open('%s/.ssh/id_dsa.pub' % os.path.expanduser('~%s' % p))
pubkeys.append(f.read().rstrip())
f.close()
adm_pre = 'racadm config -g cfgUserAdmin -i'
adm_obj = [
'-o cfgUserAdminUserName %s' % cfg.puser,
'-o cfgUserAdminPassword %s' % cfg.ppass,
'-o cfgUserAdminPrivilege 0x000001ff',
'-o cfgUserAdminIpmiLanPrivilege 4',
'-o cfgUserAdminIpmiSerialPrivilege 4',
'-o cfgUserAdminSolEnable 1',
'-o cfgUserAdminEnable 1' ]
# login as default root
print '+=== Prepping DRAC for %s' % host
child = pexpect.spawn('ssh %s@%s' % (cfg.duser,host))
child.expect('assword:')
child.sendline(cfg.ddell)
ans = child.expect([ 'Permission denied', '/admin1->', pexpect.TIMEOUT ])
if ans == 0:
# login as power user
print '+- Default root denied, attempting %s alternate' % cfg.puser
child = pexpect.spawn('ssh %s@%s' % (cfg.puser,host))
child.expect('assword:')
child.sendline(cfg.ppass)
newans = child.expect([ 'Permission denied', '/admin1->'])
if newans == 0:
print '+- Alternate %s failed, exiting' % cfg.puser
sys.exit(2)
userdata = 'default root disabled'
configured = True
elif ans == 1:
# configure new admin user
print '+- Adding DRAC user: %s' % cfg.puser
for c in adm_obj:
child.sendline('%s 3 %s' % (adm_pre,c))
child.expect('/admin1->')
child.sendline('racadm getconfig -u %s' % cfg.puser)
child.expect('/admin1->')
userdata = '\n'.join(child.before.split('\n')[1:])
if debug: print userdata
elif ans == 2:
# timeout
print '+- Default root login timed out, unknown error'
sys.exit(2)
if basics or ipmi: # enable IPMI
print '+- Enabling IPMI'
child.sendline('racadm config -g cfgIpmiLan -o cfgIpmiLanEnable 1')
child.expect('/admin1->')
child.sendline('racadm getconfig -g cfgIpmiLan')
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
if basics or serial: # enable SerialConsole
print '+- Enabling SerialConsole'
child.sendline('racadm config -g cfgSerial -o cfgSerialConsoleEnable 1')
child.expect('/admin1->')
child.sendline('racadm getconfig -g cfgSerial')
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
if telnet is not None: # enable Telnet
print '+- Enabling/Disabling Telnet'
child.sendline('racadm config -g cfgSerial -o cfgSerialTelnetEnable %d' % telnet)
child.expect('/admin1->')
child.sendline('racadm getconfig -g cfgSerial')
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
if basics or gold: # gold = trusted user
# configure new trusted user
adm_obj[0] = '-o cfgUserAdminUserName %s' % cfg.dgold
adm_obj[1] = '-o cfgUserAdminPassword %s' % cfg.dpass
print '+- Adding trusted DRAC user: %s' % cfg.dgold
for c in adm_obj:
child.sendline('%s 4 %s' % (adm_pre,c))
child.expect('/admin1->')
child.sendline('racadm getconfig -u %s' % cfg.dgold)
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
# add keys to trusted user
print '+- Adding keys for trusted user'
for k in pubkeys:
child.sendline('racadm sshpkauth -i 4 -k %d -t "%s"' % (pubkeys.index(k)+1, k))
child.expect('/admin1->')
child.sendline('racadm sshpkauth -v -i 4 -k all')
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
# alter password for root user
if cfg.puser in userdata:
print '+- Changing password for: %s' % cfg.duser
child.sendline('%s 2 -o cfgUserAdminPassword %s' % (adm_pre,cfg.dpass))
child.expect('/admin1->')
if debug: print '\n'.join(chi | ld.before.split('\n')[1:])
else:
print '+- Skipping password change for: %s' % cfg.duser
if not configured: print ' because %s was not successfully created' % cfg.puser
# leaving drac
print '+- Exiting DRAC'
child.sendline('exit')
child.expect('CLP Session terminated')
if basics or ipmi: # enable IPMI, continued
# settings new admin user privileges with IPMI (apparently racadm was not enough)
print | '+- Updating IPMI privileges for non-root users'
os.system('/usr/bin/ipmitool -H %s -U root -P %s user priv 3 4' % (host, cfg.dpass))
os.system('/usr/bin/ipmitool -H %s -U root -P %s user priv 4 4' % (host, cfg.dpass))
if debug: os.system('/usr/bin/ipmitool -H %s -U root -P %s user list' % (host, cfg.dpass))
|
longman694/youtube-dl | youtube_dl/downloader/hls.py | Python | unlicense | 8,443 | 0.002961 | from __future__ import unicode_literals
import re
import binascii
try:
from Crypto.Cipher import AES
can_decrypt_frag = True
except ImportError:
can_decrypt_frag = False
from .fragment import FragmentFD
from .external import FFmpegFD
from ..compat import (
compat_urllib_error,
compat_urlparse,
compat_struct_pack,
)
from ..utils import (
parse_m3u8_attributes,
update_url_query,
)
class HlsFD(FragmentFD):
""" A limited implementation that does not require ffmpeg """
FD_NAME = 'hlsnative'
@staticmethod
def can_download(manifest, info_dict):
UNSUPPORTED_FEATURES = (
r'#EXT-X-KEY:METHOD=(?!NONE|AES-128)', # encrypted streams [1]
# r'#EXT-X-BYTERANGE', # playlists composed of byte ranges of media files [2]
# Live streams heuristic does not always work (e.g. geo restricted to Germany
# http://hls-geo.daserste.de/i/videoportal/Film/c_620000/622873/format,716451,716457,716450,716458,716459,.mp4.csmil/index_4_av.m3u8?null=0)
# r'#EXT-X-MEDIA-SEQUENCE:(?!0$)', # live streams [3]
# This heuristic also is not correct since segments may not be appended as well.
# Twitch vods of finished streams have EXT-X-PLAYLIST-TYPE:EVENT despite
# no segments will definitely be appended to the end of the playlist.
# r'#EXT-X-PLAYLIST-TYPE:EVENT', # media segments may be appended to the end of
# # event media playlists [4]
# 1. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.4
# 2. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.2.2
# 3. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.2
# 4. https://tools.ietf.org/html/draft-pantos-http-live-streaming-17#section-4.3.3.5
)
check_results = [not re.search(feature, manifest) for feature in UNSUPPORTED_FEATURES]
is_aes128_enc = '#EXT-X-KEY:METHOD=AES-128' in manifest
check_results.append(can_decrypt_frag or not is_aes128_enc)
check_results.append(not (is_aes128_enc and r'#EXT-X-BYTERANGE' in manifest))
check_results.append(not info_dict.get('is_live'))
return all(check_results)
def real_download(self, filename, info_dict):
man_url = info_dict['url']
self.to_screen('[%s] Downloading m3u8 manifest' % self.FD_NAME)
urlh = self.ydl.urlopen(self._prepare_url(info_dict, man_url))
man_url = urlh.geturl()
s = urlh.read().decode('utf-8', 'ignore')
if not self.can_download(s, info_dict):
if info_dict.get('extra_param_to_segment_url'):
self.report_error('pycrypto not found. Please install it.')
return False
self.report_warning(
'hlsnative has detected features it does not support, '
'extraction will be delegated to ffmpeg')
fd = FFmpegFD(self.ydl, self.params)
for ph in self._progress_hooks:
fd.add_progress_hook(ph)
return fd.real_download(filename, info_dict)
total_frags = 0
for line in s.splitlines():
line = line.strip()
if line and not line.startswith('#'):
total_frags += 1
ctx = {
'filename': filename,
'total_frags': total_frags,
}
self._prepare_and_start_frag_download(ctx)
fragment_retries = self.params.get('fragment_retries', 0)
skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
test = self.params.get('test', False)
extra_query = None
extra_param_to_segment_url = info_dict.get('extra_param_to_segment_url')
if extra_param_to_segment_url:
extra_query = compat_urlparse.parse_qs(extra_param_to_segment_url)
i = 0
media_sequence = 0
decrypt_info = {'METHOD': 'NONE'}
byte_range = {}
frag_index = 0
for line in s.splitlines():
line = line.strip()
if line:
if not line.startswith('#'):
frag_index += 1
if frag_index <= ctx['fragment_index']:
continue
frag_url = (
line
if re.match(r'^https?://', line)
else compat_urlparse.urljoin(man_url, line))
if extra_query:
frag_url = update_url_query(frag_url, extra_query)
count = 0
headers = info_dict.get('http_headers', {})
if byte_range:
headers['Range'] = 'bytes=%d-%d' % (byte_range['start'], byte_range['end'])
while count <= fragment_retries:
try:
success, frag_content = self._download_fragment(
ctx, frag_url, i | nfo_dict, headers)
if not success:
return False
break
except compat_urllib_error.HTTPError as err:
# Unavailable (possibly temporary) fragments may be served.
# First we try to retry then either skip or abort.
# See https: | //github.com/rg3/youtube-dl/issues/10165,
# https://github.com/rg3/youtube-dl/issues/10448).
count += 1
if count <= fragment_retries:
self.report_retry_fragment(err, frag_index, count, fragment_retries)
if count > fragment_retries:
if skip_unavailable_fragments:
i += 1
media_sequence += 1
self.report_skip_fragment(frag_index)
continue
self.report_error(
'giving up after %s fragment retries' % fragment_retries)
return False
if decrypt_info['METHOD'] == 'AES-128':
iv = decrypt_info.get('IV') or compat_struct_pack('>8xq', media_sequence)
decrypt_info['KEY'] = decrypt_info.get('KEY') or self.ydl.urlopen(decrypt_info['URI']).read()
frag_content = AES.new(
decrypt_info['KEY'], AES.MODE_CBC, iv).decrypt(frag_content)
self._append_fragment(ctx, frag_content)
# We only download the first fragment during the test
if test:
break
i += 1
media_sequence += 1
elif line.startswith('#EXT-X-KEY'):
decrypt_url = decrypt_info.get('URI')
decrypt_info = parse_m3u8_attributes(line[11:])
if decrypt_info['METHOD'] == 'AES-128':
if 'IV' in decrypt_info:
decrypt_info['IV'] = binascii.unhexlify(decrypt_info['IV'][2:].zfill(32))
if not re.match(r'^https?://', decrypt_info['URI']):
decrypt_info['URI'] = compat_urlparse.urljoin(
man_url, decrypt_info['URI'])
if extra_query:
decrypt_info['URI'] = update_url_query(decrypt_info['URI'], extra_query)
if decrypt_url != decrypt_info['URI']:
decrypt_info['KEY'] = None
elif line.startswith('#EXT-X-MEDIA-SEQUENCE'):
media_sequence = int(line[22:])
elif line.startswith('#EXT-X-BYTERANGE'):
splitted_byte_range = line[17:].split('@')
sub_range_start = int(splitted_byte_range[1]) if len(splitted_byte_range) == 2 else by |
Max-E/max-opencv-demos | screens/preferences/include.py | Python | mit | 234 | 0 | import util
from util.include import *
grid_ | margin_w = util.input.cfg_w / 6.0
grid_margin_h = util.input.cfg_h / 6.0
cell_w = util.input.cfg_w * 2.0 / 9.0
cell_h = util.input.cfg_h * 2.0 / 9.0
mark_none = [ | ]
mark_x = []
mark_o = []
|
mogproject/mog-commons-python | tests/mog_commons/test_command.py | Python | apache-2.0 | 2,719 | 0.004835 | # -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import, unicode_literals
import os
import time
import threading
import tempfile
from mog_commons.command import *
from mog_commons import unittest
class TestCommand(unittest.TestCase):
def test_execute_command(self):
self.assertEqual(execute_command(['exit', '2'], shell=True), 2)
self.assertEqual(execute_command('exit 3', shell=True), 3)
if os.name == 'nt':
self.assertEqual(execute_command(['cmd', '/C', 'exit 4'], shell=False), 4)
self.assertEqual(execute_command(['cmd', '/C', 'echo あい'], shell=False, cmd_encoding='sjis'), 0)
else:
self.assertEqual(execu | te_command(['/bin/sh', '-c', | 'exit 4'], shell=False), 4)
# This code will not pass in non-Japanese Windows OS.
with self.withAssertOutputFile(
os.path.join('tests', 'resources', 'sjis_ja.txt'), expect_file_encoding='sjis',
output_encoding='sjis', variables={'quote': '"' if os.name == 'nt' else ''}, replace_linesep=True
) as out:
execute_command('echo "あいうえお"', shell=True, cmd_encoding='sjis', stdout=out)
def test_capture_command(self):
self.assertEqual(capture_command(['echo', 'abc'], shell=True), (0, ('abc' + os.linesep).encode('utf-8'), b''))
if os.name == 'nt':
self.assertEqual(capture_command(['cmd', '/C', 'echo abc'], shell=False, cmd_encoding='sjis'),
(0, ('abc' + os.linesep).encode('sjis'), b''))
else:
# This code will not pass in non-Japanese Windows OS.
self.assertEqual(capture_command(['echo', 'あい'], shell=True),
(0, ('あい' + os.linesep).encode('utf-8'), b''))
self.assertEqual(capture_command(['/bin/sh', '-c', 'echo あい'], shell=False),
(0, ('あい' + os.linesep).encode('utf-8'), b''))
def test_execute_command_with_pid(self):
pid_file = os.path.join(tempfile.gettempdir(), 'mog-commons-python-test.pid')
class RunSleep(threading.Thread):
def run(self):
execute_command_with_pid('python -c "import time;time.sleep(2)"', pid_file, shell=True)
th = RunSleep()
th.start()
time.sleep(1)
with open(pid_file, 'r') as f:
pid = int(f.read())
self.assertTrue(pid_exists(pid))
time.sleep(2)
self.assertFalse(pid_exists(pid))
self.assertEqual(execute_command_with_pid(['exit', '2'], None, shell=True), 2)
def test_pid_exists(self):
self.assertTrue(pid_exists(0))
|
liuqx315/Sundials | sundials/examples/arkode/CXX_serial/plot_sol.py | Python | bsd-3-clause | 1,175 | 0.005957 | #!/usr/bin/env python
# ----------------------------------------------------------------
# Programmer(s): Daniel R. Reynolds @ SMU
# ----------------------------------------------------------------
# Copyright (c) 2013, Southern Methodist University.
# All rights reserved.
# For details, see the LICENSE file.
# ------------------------------------- | ---------------------------
# matplotlib-based plotting script for ODE examples
# imports
import sys
import pylab as plt
import num | py as np
# load solution data file
data = np.loadtxt('solution.txt', dtype=np.double)
# determine number of time steps, number of fields
nt,nv = np.shape(data)
# extract time array
times = data[:,0]
# parse comment line to determine solution names
f = open('solution.txt', 'r')
commentline = f.readline()
commentsplit = commentline.split()
names = commentsplit[2:]
# create plot
plt.figure()
# add curves to figure
for i in range(nv-1):
plt.plot(times,data[:,i+1],label=names[i])
plt.xlabel('t')
if (nv > 2):
plt.ylabel('solutions')
else:
plt.ylabel('solution')
plt.legend(loc='upper right', shadow=True)
plt.grid()
plt.savefig('solution.png')
##### end of script #####
|
RenaKunisaki/hexchat-twitch | twitch/hooks.py | Python | mit | 11,648 | 0.035371 | import hexchat
import re
import sys
import twitch.hook, twitch.jtvmsghandler, twitch.user, twitch.channel
import twitch.normalize, twitch.commands, twitch.exceptions, twitch.topic
import twitch.logger, twitch.settings
from twitch import irc
log = twitch.logger.get()
# regex for extracting time from ban message
ban_msg_regex = re.compile(r"for (\d+) more seconds")
# Identify ourselves as Twitch IRC client to get user info
def endofmotd_cb(word, word_eol, userdata):
hexchat.command('CAP REQ :twitch.tv/tags twitch.tv/commands')
# Ignore various "unknown command" errors
unknowncommands = ('WHO', 'WHOIS')
def servererr_cb(word, word_eol, userdata):
if word[3] in unknowncommands:
return hexchat.EAT_ALL
return hexchat.EAT_NONE
# PRIVMSG hook to handle various notification messages from Twitch.
def privmsg_cb(word, word_eol, msgtype):
try:
nick = twitch.normalize.nick((word[0][1:].split('!')[0]))
chan = word[2]
text = word_eol[3]
if chan == '#jtv' and nick == 'jtv':
hexchat.emit_print('Server Text', text[1:])
return hexchat.EAT_ALL
elif nick == 'jtv':
if chan[0] != '#':
irc.emit_print(None, 'Server Text', text[1:])
return hexchat.EAT_ALL
elif "You are banned" in text:
chan = twitch.channel.get(chan)
if not chan.areWeBanned:
chan.areWeBanned = True
match = ban_msg_regex.search(text)
time = int(match.group(1))
def clear_ban(userdata):
chan.areWeBanned = False
chan.emit_print('Server Text',
"You are (hopefully) no longer banned")
hexchat.hook_timer(time * 1000, clear_ban)
else:
action = word[3][1:]
param = word[4:]
if action[0] != '_' and hasattr(twitch.jtvmsghandler, action):
return getattr(twitch.jtvmsghandler, action)(chan, param)
else:
#log.warning("Unhandled JTV message: %s" % str(word))
ctxt = twitch.channel.get(chan).getContext()
twitch.channel.get(chan).emit_print('Server Text', text[1:])
return hexchat.EAT_ALL
elif nick == 'twitchnotify':
twitch.channel.get(chan).emit_print('Server Text', text[1:])
return hexchat.EAT_ALL
else:
twitch.user.get(nick).joinChannel(chan)
return hexchat.EAT_NONE
except:
log.exception("Unhandled exception in twitch.privmsg_cb")
return hexchat.EAT_NONE
# handle Twitch WHISPER message
def whisper_cb(word, word_eol, msgtype):
try:
nick = twitch.normalize.nick((word[0][1:].split('!')[0]))
dest = word[2]
msg = word_eol[3][1:]
log.debug("Got WHISPER: %s", word)
hexchat.emit_print('Notice', nick, msg)
except:
log.exception("Unhandled exception in twitch.whisper_cb")
finally:
return hexchat.EAT_ALL
# handle Twitch USERSTATE and GLOBALUSERSTATE messages
def userstate_cb(word, word_eol, msgtype):
try:
# log.debug("Got %s msg: %s", msgtype, word)
# Nothing to do here (except eat the message) until Hexchat adds a
# way to read the message's IRCv3 tags.
pass
except:
log.exception("Unhandled exception in twitch.userstate_cb")
finally:
return hexchat.EAT_ALL
# handle Twitch HOSTTARGET messages
# :tmi.twitch.tv HOSTTARGET #renakunisaki :cloakedyoshi -
def hosttarget_cb(word, word_eol, msgtype):
try:
log.debug("%s %s", msgtype, word)
chan = word[2]
param = word[3:]
return twitch.jtvmsghandler.HOSTTARGET(chan, param)
except:
log.exception("Unhandled exception in twitch.hosttarget_cb")
finally:
return hexchat.EAT_ALL
# handle Twitch CLEARCHAT messages
# :tmi.twitch.tv CLEARCHAT #darkspinessonic :ishmon
def clearchat_cb(word, word_eol, msgtype):
try:
log.debug("%s %s", msgtype, word)
if len(word) >= 4: param = [word[3][1:]]
else: param = []
chan = word[2]
# log.debug("Chan = %s, whom = %s", chan, param)
return twitch.jtvmsghandler.CLEARCHAT(chan, param)
except:
log.exception("Unhandled exception in twitch.clearchat_cb")
finally:
return hexchat.EAT_ALL
#def rawmsg_cb(word, word_eol, msgtype, attributes):
# try:
# log.debug("Got raw msg: %s", word)
# except:
# log.exception("Unhandled exception in twitch.rawmsg_cb")
# finally:
# return hexchat.EAT_NONE
# message hook to format user messages nicely.
message_cb_recurse = False
def message_cb(word, word_eol, msgtype):
# avoid infinite loop
global message_cb_recurse
if message_cb_recurse:
return
message_cb_recurse = True
try:
#log.debug("message_cb word=%s" % str(word))
#log.debug("message_cb word_eol=%s" % str(word_eol))
if len(word) < 1:
return hexchat.EAT_NONE
nick = twitch.normalize.nick(word[0])
try:
text = word[1]
except IndexError:
text = ''
user = twitch.user.get(nick)
chan = twitch.channel.get(hexchat.get_context())
if chan is not None:
user.joinChannel(chan)
user.printMessage(chan, text, msgtype)
else:
| log.error("Got user message for invalid channel: <%s> %s" %
(nick, text))
return hexchat.EAT_ALL
except:
log.exception("Unhandled exception in twitch.message_cb")
return hexcha | t.EAT_NONE
finally:
message_cb_recurse = False
# MODE hook to track mods
def mode_cb(word, word_eol, msgtype):
try:
chan = word[2]
mode = word[3]
whom = word[4]
user = twitch.user.get(whom)
what = '+'
for char in mode:
if char == '+' or char == '-':
what = char
elif what == '+':
user.setChannelMode(chan, char, True)
elif what == '-':
user.setChannelMode(chan, char, False)
except:
log.exception("Unhandled exception in twitch.mode_cb")
finally:
return hexchat.EAT_NONE
# When we join a channel, set up the user info and get stream status
def youjoin_cb(word, word_eol, msgtype):
try:
chan = twitch.channel.get(word[1])
chan.join()
hexchat.command("CAP REQ :twitch.tv/membership")
# automatically set up some users
jtv = twitch.user.get('jtv')
jtv.joinChannel(chan)
jtv.setAttrs({'admin':True,'bot':True})
twitchnotify = twitch.user.get('twitchnotify')
twitchnotify.joinChannel(chan)
twitchnotify.setAttrs({'admin':True,'bot':True})
broadcaster = twitch.user.get(chan.name)
broadcaster.joinChannel(chan)
broadcaster.setChanAttr(chan, 'broadcaster', True)
except:
log.exception("Unhandled exception in twitch.youjoin_cb")
finally:
return hexchat.EAT_NONE
# When we leave a channel, stop updating it
def youpart_cb(word, word_eol, msgtype):
try:
if msgtype == 'You Kicked':
chan = word[1]
else:
chan = word[2]
twitch.channel.get(chan).leave()
except:
log.exception("Unhandled exception in twitch.youpart_cb")
def isCommand(name, obj):
return (callable(obj) and (not name.startswith('_'))
and hasattr(obj, 'command'))
# handler for /twitch command
def twitchcmd_cb(word, word_eol, userdata):
try:
log.debug("/twitch command: %s" % word)
if len(word) < 2:
print("Available commands:")
for name, obj in twitch.commands.__dict__.items():
if isCommand(name, obj):
print("%s - %s" % (name, obj.command['desc']))
return hexchat.EAT_ALL
cmd = word[1]
if not hasattr(twitch.commands, cmd):
raise twitch.exceptions.UnknownCommandError(cmd)
f = getattr(twitch.commands, cmd)
if not hasattr(f, 'command'):
raise twitch.exceptions.UnknownCommandError(cmd)
f(word[2:], word_eol[2:])
except twitch.exceptions.BadParameterError as ex:
print("%s: %s" % (cmd, ex))
except twitch.exceptions.UnknownCommandError as ex:
print("%s: Unknown command" % ex)
except:
log.exception("Unhandled exception in twitch.twitchcmd_cb(%s)" % cmd)
finally:
return hexchat.EAT_ALL
# ignore repeated JOIN events that can happen because we simulate them
# (since Twitch doesn't always send them reliably)
def join_cb(word, word_eol, msgtype):
try:
nick = twitch.normalize.nick((word[0][1:].split('!')[0]))
user = twitch.user.get(nick)
chan = twitch.channel.get(word[2])
if chan.hasUser(user):
return hexchat.EAT_ALL
else:
user.joinChannel(chan)
if ".twitch.hexchat.please.stop.being.butts" not in word[0]:
# eat JOINs that actually come from Twitch
return hexchat.EAT_ALL
else:
return hexchat.EAT_NONE
except:
log.exception("Unhandled exception in twitch.join_cb(%s)" % str(word))
return hexchat.EAT_NONE
# suppress "gives/removes channel operator st |
dracarysX/flask_restapi | setup.py | Python | mit | 1,687 | 0.001785 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import sys
class MyTest(TestCommand):
def run_tests(self):
tests = unittest.TestLoader().discover('tests', pattern='test_*.py')
unittest.TextTestRunner(verbosity=1).run(tests)
setup(
name='flask_restapi',
version='0.2.0',
license='MIT',
description=u'A simple rest query framework by flask, peewee, rest_query',
author='dracarysX',
author_email='huiquanxiong@gmail.com',
url='https://github.com/dracarysX/flask_restapi',
packages=find_packages(include= | ['flask_restapi']),
install_requires=[
'peewee',
'flask',
'wtforms',
'flask_bcrypt',
'flask-script',
'peewee-rest-query'
],
test_suite='nose.collector',
test | s_require=['nose'],
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: MIT',
],
keywords='Python, Flask, APIMethodView, Filtering Query API, Mysql, Peewee, RestAPI',
long_description='A simple rest query framework. Web framework use flask, '
'orm by peewee, form by wtform and query by rest_query.'
'The framework implements custom query api(like this: /?select=id,name&id=gte.20), '
'save form data, model object serializer, APIMethodView(get, post, put,delete) and errorhandler.'
)
|
nburn42/tensorflow | tensorflow/python/util/deprecation.py | Python | apache-2.0 | 21,885 | 0.004889 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensor utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import re
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import decorator_utils
from tensorflow.python.util import is_in_graph_mode
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
# Allow deprecation warnings to be silenced temporarily with a context manager.
_PRINT_DEPRECATION_WARNINGS = True
# Remember which deprecation warnings have been printed already.
_PRINTED_WARNING = {}
def _add_deprecated_function_notice_to_docstring(doc, date, instructions):
"""Adds a deprecation notice to a docstring for deprecated functions."""
main_text = ['THIS FUNCTION IS DEPRECATED. It will be removed %s.' %
('in a future version' if date is None else ('after %s' % date))]
if instructions:
main_text.append('Instructions for updating:')
return decorator_utils.add_notice_to_docstring(
doc, instructions,
'DEPRECATED FUNCTION',
'(deprecated)', main_text)
def _add_deprecated_arg_notice_to_docstring(doc, date, instructions):
"""Adds a deprecation notice to a docstring for deprecated arguments."""
return decorator_utils.add_notice_to_docstring(
doc, instructions,
'DEPRECATED FUNCTION ARGUMENTS',
'(deprecated arguments)', [
'SOME ARGUMENTS ARE DEPRECATED. '
'They will be removed %s.' % (
'in a future version' if date is None else ('after %s' % date)),
'Instructions for updating:'])
def _validate_deprecation_args(date, instructions):
if date is not None and not re.match(r'20\d\d-[01]\d-[0123]\d', date):
raise ValueError('Date must be YYYY-MM-DD.')
if not instructions:
raise ValueError('Don\'t deprecate things without conversion instructions!')
def _call_location(outer=False):
"""Returns call location given level up from current call."""
frame = tf_inspect.currentframe()
if frame:
# CPython internals are available, use them for performance.
# walk back two frames to get to deprecated function caller.
frame = frame.f_back
if frame.f_back:
frame = frame.f_back
if outer and frame.f_back:
frame = frame.f_back
return '%s:%d' % (frame.f_code.co_filename, frame.f_lineno)
else:
# Slow fallback path
stack = tf_inspect.stack(0) # 0 avoids generating unused context
entry = stack[3 if outer else 2]
return '%s:%d' % (entry[1], entry[2])
def deprecated_alias(deprecated_name, name, func_or_class, warn_once=True):
"""Deprecate a symbol in favor of a new name with identical semantics.
This function is meant to be used when defining a backwards-compatibility
alias for a symbol which has been moved. For example:
module1.py:
```python
class NewNameForClass: pass
```
module2.py:
```python
import module1
DeprecatedNameForClass = deprecated_alias(
deprecated_name='module2.DeprecatedNameForClass',
name='module1.NewNameForClass',
module1.NewNameForClass)
```
This function works for classes and functions.
For classes, it creates a new class which is functionally identical (it
inherits from the original, and overrides its constructor), but which prints
a deprecation warning when an instance is created. It also adds a deprecation
notice to the class' docstring.
For functions, it returns a function wrapped by `tf_decorator.make_decorator`.
That function prints a warning when used, and has a deprecation notice in its
docstring. This is more or less equivalent (the deprecation warning has
slightly different text) to writing:
```python
@deprecated
def deprecated_alias(original_args):
real_function(original_args)
```
Args:
deprecated_name: The name of the symbol that is being deprecated, to be used
in the warning message. This should be its fully qualified name to avoid
confusion.
name: The name of the symbol that is to be used instead of the deprecated
name. This should be a fully qualified name to avoid confusion.
func_or_class: The (non-deprecated) class or function for which a deprecated
alias should be created.
warn_once: If True (the default), only print a deprecation warning the first
time this function is used, or the class is instantiated.
Returns:
A wrapped version of `func_or_class` which prints a deprecation warning on
use and has a modified docstring.
"""
if tf_inspect.isclass(func_or_class):
# Make a new class with __init__ wrapped in a warning.
class NewClass(func_or_class): # pylint: disable=missing-docstring
__doc__ = decorator_utils.add_notice_to_docstring(
func_or_class.__doc__, 'Please use %s instead.' % name,
'DEPRECATED CLASS',
'(deprecated)', ['THIS CLASS IS DEPRECATED. '
'It will be removed in a future version. '])
__name__ = func_or_class.__name__
__module__ = _call_location(outer=True)
def __init__(self, *args, **kwargs):
if hasattr(NewClass.__init__, '__func__'):
# Python 2
NewClass.__init__.__func__.__doc__ = func_or_class.__init__.__doc__
else:
# Python 3
NewClass.__init__.__doc__ = func_or_class.__init__.__doc__
if _PRINT_DEPRECATION_WARNINGS:
# We're making the alias as we speak. The original may have other
# aliases, so we cannot use it to check for whether it's already been
# warned about.
if NewClass.__init__ not in _PRINTED_WARNING:
if warn_once:
_PRINTED_WARNING[NewClass.__init__] = True
logging.warning(
'From %s: The name %s is deprecated. Please use %s instead.\n',
_call_location(), deprecated_name, name)
super(NewClass, self).__init__(*args, **kwargs)
return NewClass
else:
decorator_utils.validate_callable(func_or_class, 'deprecated')
# Make a wrapper for the original
@functools.wraps(func_or_class)
def new_func(*args, **kwargs): # pylint: disable=missing-docstring
if _PRINT_DEPRECATION_WARNINGS:
# We're making the alias as we speak. The original may have other
# aliases, so we cannot use it to check for whether it's already been
# warned about.
if new_func not in _PRINTED_WARNING | :
if warn_once:
_PRINTED_WARNING[new_func] = True
logging.warning(
'From %s: The name %s is deprecated. Please use %s instead.\n',
_call_location(), deprecated_name, name)
return func_or_class(*args, **kwargs)
return tf_decorator.make_decorator(
func_or_class, new_func, 'deprecated',
_add_deprecated_function_notice_to_docst | ring(
func_or_class.__doc__, None, 'Please use %s instead.' % name))
def deprecated(date, instructions, warn_once=True):
"""Decorator for marking functions or methods deprecated.
This decorator logs a deprecation warning whenever the decorated function is
called. It has the following format:
<function> (from <module>) is deprecated and will be removed after <date>.
Instructions for updating:
<instructions>
If `date` is None, 'after <date>' is replaced with 'in a future version'.
<fu |
Mariaanisimova/pythonintask | INBa/2015/KODZOKOV_M_M/task_4_9.py | Python | apache-2.0 | 1,454 | 0.028058 | #Напишите программу, которая выводит имя, под которым скрывается Михаил Николаевич Румянцев.
#Дополнительно необходимо вывести область интересов указанной личности, место рождения,
#годы рождения и смерти (если человек умер), вычислить возраст на данный момент (или момент смерти).
#Для хранения всех необходимых данных требуется использовать переменные.
#После вывода информации программа должна дожидаться пока пользователь нажмет Enter для выхода.
# Кодзоков М.М., 25.05.2016
name="Михаил Николаевич Румянцев"
hobby="кино"
place_birth="Санкт-Петербург"
year_birth=1901
year_death=1983
age=2016-1901
print(name+"- cоветский артист цирка, актёр. Герой Социалистического Труда. Народный артист СССР")
print("Место рожд | ения: "+place_birth)
print("Годы жизни:",year_birth,"-",year_death)
print("Возраст:",age)
print("Область деятельности: "+hobby | )
input("Нажмите ENTER для продолжения")
|
entoo/portage-src | bin/ebuild-ipc.py | Python | gpl-2.0 | 6,192 | 0.028262 | #!/usr/bin/python
# Copyright 2010-2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
#
# This is a helper which ebuild processes can use
# to communicate with portage's main python process.
import logging
import os
import pickle
import platform
import signal
import sys
import time
def debug_signal(signum, frame):
import pdb
pdb.set_trace()
if platform.python_implementation() == 'Jython':
debug_signum = signal.SIGUSR2 # bug #424259
else:
debug_signum = signal.SIGUSR1
signal.signal(debug_signum, debug_signal)
# Avoid sandbox violations after python upgrade.
pym_path = os.path.join(os.path.dirname(
os.path.dirname(os.path.realpath(__file__))), "pym")
if os.environ.get("SANDBOX_ON") == "1":
sandbox_write = os.environ.get("SANDBOX_WRITE", "").split(":")
if pym_path not in sandbox_write:
sandbox_write.append(pym_path)
os.environ["SANDBOX_WRITE"] = \
":".join(filter(None, sandbox_write))
import portage
portage._internal_caller = True
portage._disable_legacy_globals()
from portage.util._async.ForkProcess import ForkProcess
from portage.util._eventloop.global_event_loop import global_event_loop
from _emerge.PipeReader import PipeReader
class FifoWriter(ForkProcess):
__slots__ = ('buf', 'fifo',)
def _run(self):
# Atomically write the whole buffer into the fifo.
with open(self.fifo, 'wb', 0) as f:
f.write(self.buf)
return os.EX_OK
class EbuildIpc(object):
# Timeout for each individual communication attempt (we retry
# as long as the daemon process appears to be alive).
_COMMUNICATE_RETRY_TIMEOUT_MS = 15000
def __init__(self):
self.fifo_dir = os.environ['PORTAGE_BUILDDIR']
self.ipc_in_fifo = os.path.join(self.fifo_dir, '.ipc_in')
self.ipc_out_fifo = os.path.join(self.fifo_dir, '.ipc_out')
self.ipc_lock_file = os.path.join(self.fifo_dir, '.ipc_lock')
def _daemon_is_alive(self):
try:
builddir_lock = portage.locks.lockfile(self.fifo_dir,
wantnewlockfile=True, flags=os.O_NONBLOCK)
except portage.exception.TryAgain:
return True
else:
portage.locks.unlockfile(builddir_lock)
return False
def communicate(self, args):
# Make locks quiet since unintended locking messages displayed on
# stdout could corrupt the intended output of this program.
portage.locks._quiet = True
lock_obj = portage.locks.lockfile(self.ipc_lock_file, unlinkfile=True)
try:
return self._communicate(args)
finally:
portage.locks.unlockfile(lock_obj)
def _timeout_retry_msg(self, start_time, when):
time_elapsed = time.time() - start_time
portage.util.writemsg_level(
portage.localization._(
'ebuild-ipc timed out %s after %d seconds,' + \
' retrying...\n') % (when, time_elapsed),
level=logging.ERROR, noiselevel=-1)
def _no_daemon_msg(self):
portage.util.writemsg_level(
portage.localization._(
'ebuild-ipc: daemon process not detected\n'),
level=logging.ERROR, noiselevel=-1)
def _run_writer(self, fifo_writer, msg):
"""
Wait on pid and return an appropriate exit code. This
may return unsuccessfully due to timeout if the daemon
process does not appear to be alive.
"""
start_time = time.time()
fifo_writer.start()
eof = fifo_writer.poll() is not None
while not eof:
fifo_writer._wait_loop(timeout=self._COMMUNICATE_RETRY_TIMEOUT_MS)
eof = fifo_writer.poll() | is not None
if eof:
break
elif self._daemon_is_a | live():
self._timeout_retry_msg(start_time, msg)
else:
fifo_writer.cancel()
self._no_daemon_msg()
fifo_writer.wait()
return 2
return fifo_writer.wait()
def _receive_reply(self, input_fd):
start_time = time.time()
pipe_reader = PipeReader(input_files={"input_fd":input_fd},
scheduler=global_event_loop())
pipe_reader.start()
eof = pipe_reader.poll() is not None
while not eof:
pipe_reader._wait_loop(timeout=self._COMMUNICATE_RETRY_TIMEOUT_MS)
eof = pipe_reader.poll() is not None
if not eof:
if self._daemon_is_alive():
self._timeout_retry_msg(start_time,
portage.localization._('during read'))
else:
pipe_reader.cancel()
self._no_daemon_msg()
return 2
buf = pipe_reader.getvalue()
retval = 2
if not buf:
portage.util.writemsg_level(
"ebuild-ipc: %s\n" % \
(portage.localization._('read failed'),),
level=logging.ERROR, noiselevel=-1)
else:
try:
reply = pickle.loads(buf)
except SystemExit:
raise
except Exception as e:
# The pickle module can raise practically
# any exception when given corrupt data.
portage.util.writemsg_level(
"ebuild-ipc: %s\n" % (e,),
level=logging.ERROR, noiselevel=-1)
else:
(out, err, retval) = reply
if out:
portage.util.writemsg_stdout(out, noiselevel=-1)
if err:
portage.util.writemsg(err, noiselevel=-1)
return retval
def _communicate(self, args):
if not self._daemon_is_alive():
self._no_daemon_msg()
return 2
# Open the input fifo before the output fifo, in order to make it
# possible for the daemon to send a reply without blocking. This
# improves performance, and also makes it possible for the daemon
# to do a non-blocking write without a race condition.
input_fd = os.open(self.ipc_out_fifo,
os.O_RDONLY|os.O_NONBLOCK)
# Use forks so that the child process can handle blocking IO
# un-interrupted, while the parent handles all timeout
# considerations. This helps to avoid possible race conditions
# from interference between timeouts and blocking IO operations.
msg = portage.localization._('during write')
retval = self._run_writer(FifoWriter(buf=pickle.dumps(args),
fifo=self.ipc_in_fifo, scheduler=global_event_loop()), msg)
if retval != os.EX_OK:
portage.util.writemsg_level(
"ebuild-ipc: %s: %s\n" % (msg,
portage.localization._('subprocess failure: %s') % \
retval), level=logging.ERROR, noiselevel=-1)
return retval
if not self._daemon_is_alive():
self._no_daemon_msg()
return 2
return self._receive_reply(input_fd)
def ebuild_ipc_main(args):
ebuild_ipc = EbuildIpc()
return ebuild_ipc.communicate(args)
if __name__ == '__main__':
sys.exit(ebuild_ipc_main(sys.argv[1:]))
|
hectormartinez/ud_unsup_parser | src/udup_ablation.py | Python | cc0-1.0 | 24,285 | 0.015443 |
from collections import defaultdict, Counter
from pathlib import Path
import argparse
import sys, copy
import networkx as nx
import numpy as np
from lib.conll import CoNLLReader, DependencyTree
from pandas import pandas as pd
OPEN="ADJ ADV INTJ NOUN PROPN VERB".split()
CLOSED="ADP AUX CONJ DET NUM PART PRON SCONJ".split()
OTHER="PUNCT SYM X".split()
CONTENT="ADJ NOUN PROPN VERB CONTENT".split(" ")
FUNCTION="ADP AUX CONJ DET NUM PART PRON SCONJ PUNCT SYM X ADV FUNCTION".split(" ")
RIGHTATTACHING = []
LEFTATTACHING = []
scorerdict = defaultdict(list)
def map_to_two_tags(s,functionlist):
for n in list(s.nodes()):
if s.node[n]['form'].lower() in functionlist:
s.nodes[n]['cpostag'] = 'FUNCTION'
else:
s.nodes[n]['cpostag'] = 'CONTENT'
return s
def get_head_direction(sentences):
D = Counter()
for s in sentences:
for h,d in s.edges():
if h != 0 and h > d:
D[s.nodes[d]['cpostag']+"_right"]+=1
else:
D[s.nodes[d]['cpostag']+"_left"]+=1
for k in sorted(D.keys()):
print(k,D[k])
def fill_out_left_and_right_attach(bigramcounter):
LEFTATTACHING.append("CONJ")
LEFTATTACHING.append("PUNCT")
LEFTATTACHING.append("PROPN")
RIGHTATTACHING.append("AUX")
RIGHTATTACHING.append("DET")
RIGHTATTACHING.append("SCONJ")
if bigramcounter[("ADP","DET")] + bigramcounter[("ADP","NOUN")] + bigramcounter[("ADP","PROPN")] + bigramcounter[("ADP","PRON")] > bigramcounter[("DET","ADP")] + bigramcounter[("NOUN","ADP")] + bigramcounter[("PROPN","ADP")] + bigramcounter[("PRON","ADP")]:
RIGHTATTACHING.append("ADP")
else:
LEFTATTACHING.append("ADP")
def get_scores(predset,goldset):
tp = len(predset.intersection(goldset))
fp = len(predset.difference(goldset))
fn = len(goldset.difference(predset))
try:
precision = tp / (fp + tp)
except:
precision = 0
try:
recall = tp / (fn + tp)
except:
recall = 0
return (precision,recall)
def count_pos_bigrams(treebank):
C = Counter()
W = Counter()
for s in treebank:
for n,n_next in zip(list(s.nodes()[1:]),list(s.nodes()[2:])):
pos_n = s.node[n]['cpostag']
pos_n_next = s.node[n_next]['cpostag']
C[(pos_n,pos_n_next)]+=1
for n in s.nodes()[1:]:
word_n = s.node[n]['form'].lower()
W[word_n]+=1
return C,W
def add_high_confidence_edges(s,bigramcount,backoff):
pos_index_dict = defaultdict(list)
T = set()
D = set()
goldedgeset=set(s.edges())
global scorerdict
verbroot = None
adjroot = None
possibleheads = [x for x in s.nodes() if s.node[x]['cpostag'] in OPEN]
if len(possibleheads) == 1:
T.add((0,possibleheads[0]))
for d in s.nodes():
if d != 0 and d!= possibleheads[0]:
T.add((possibleheads[0],d))
scorerdict["__shortsentence"].append(get_scores(T,goldedgeset))
D.update(T)
T = set()
else:
for n in s.nodes():
pos_index_dict[s.node[n]['cpostag']].append(n)
for n in pos_index_dict["DET"]:
#if bigramcount[("DET","NOUN")] > bigramcount[("NOUN","DET")]:
# noundist=[abs(n-x) for x in pos_index_dict["NOUN"] if x > n ]
#else:
# noundist=[abs(n-x) for x in pos_index_dict["NOUN"] if x < n ]
noundist=[abs(n-x) for x in pos_index_dict["NOUN"]]
if noundist:
closestnoun=pos_index_dict["NOUN"][np.argmin(noundist)]
T.add((closestnoun,n))
localgoldedgeset = set([(h,d) for h,d in goldedgeset if d in pos_index_dict["DET"]])
scorerdict["DET"].append(get_scores(T,localgoldedgeset))
D.update(T)
T = set()
for n in pos_index_dict["NUM"]:
#if bigramcount[("DET","NOUN")] > bigramcount[("NOUN","DET")]:
# noundist=[abs(n-x) for x in pos_index_dict["NOUN"] if x > n ]
#else:
# noundist=[abs(n-x) for x in pos_index_dict["NOUN"] if x < n ]
noundist=[abs(n-x) for x in pos_index_dict["NOUN"]]
if noundist:
closestnoun=pos_index_dict["NOUN"][np.argmin(noundist)]
T.add((closestnoun,n))
localgoldedgeset = set([(h,d) for h,d in goldedgeset if d in pos_index_dict["DET"]])
scorerdict["NUM"].append(get_scores(T,localgoldedgeset))
D.update(T)
T = set()
for n in pos_index_dict["ADP"]:
# if bigramcount[("ADP","NOUN")] > bigramcount[("NOUN","ADP")]:
# noundist=[abs(n-x) for x in pos_index_dict["NOUN"] if x > n ]
# else:
# noundist=[abs(n-x) for x in pos_index_dict["NOUN"] if x < n ]
noundist=[abs(n-x) for x in pos_index_dict["NOUN"] ]
if noundist:
closestnoun=pos_index_dict["NOUN"][np.argmin(noundist)]
T.add((closestnoun,n))
scorerdict["ADP"].append(get_scores(T,goldedgeset))
D.update(T)
T = set()
for n in pos_index_dict["ADJ"]:
# if bigramcount[("adj","noun")] > bigramcount[("noun","adj")]:
# noundist=[abs(n-x) for x in pos_index_dict["noun"] if x > n ]
# else:
# noundist=[abs(n-x) for x in pos_index_dict["noun"] if x < n ]
noundist=[abs(n-x) for x in pos_index_dict["NOUN"] ]
if noundist:
closestnoun=pos_index_dict["NOUN"][np.argmin(noundist)]
T.add((closestnoun,n))
scorerdict["ADJ_nounhead"].append(get_scores(T,goldedgeset))
D.update(T)
T = set()
for n in pos_index_dict["AUX"]:
# if bigramcount[("AUX","VERB")] > bigramcount[("VERB","AUX")]:
# noundist=[abs(n-x) for x in pos_index_dict["VERB"] if x > n ]
# else:
# noundist=[abs(n-x) for x in pos_index_dict["VERB"] if x < n ]
noundist=[abs(n-x) for x in pos_index_dict["VERB"] ]
if noundist:
closestnoun=pos_index_dict["VERB"][np.argmin(noundist)]
T.add((closestnoun,n))
scorerdict["AUX"].append(get_scores(T,goldedgeset))
D.update(T)
T = set()
for n in pos_index_dict["NOUN"]:
# if bigramcount[("AUX","VERB")] > bigramcount[("VERB","AUX")]:
# noundist=[abs(n-x) for x in pos_index_dict["VERB"] if x > n ]
# else:
# noundist=[abs(n-x) for x in pos_index_dict["VERB"] if x < n ]
noundist=[abs(n-x) for x in pos_index_dict["VERB"] ]
if noundist:
closestnoun=pos_index_dict["VERB"][np.argmin(noundist)]
T.add((closestnoun,n))
scorerdict["NOUN"].append(get_scores(T,goldedgeset))
D.update(T)
T = set()
for n in pos_index_dict["PRON"]:
noundist=[abs(n-x) for x in pos_index_dict["VERB"]]
if noundist:
closestnoun=pos_index_d | ict["VERB"][np.argmin(noundist)]
T.add((closestnoun,n))
scorerdict["PRON"].append(get_scores(T,goldedgeset))
D.update(T)
T = set()
for n in pos_index_dict["ADV"]:
noundist=[abs(n-x) for x in pos_index_dict["VERB"]+pos_index_dict["ADJ"]]
| if noundist:
closestnoun=(pos_index_dict["VERB"]+pos_index_dict["ADJ"])[np.argmin(noundist)]
T.add((closestnoun,n))
scorerdict["ADV"].append(get_scores(T,goldedgeset))
D.update(T)
T = set()
if pos_index_dict["VERB"]:
verbroot = min(pos_index_dict["VERB"])
T.add((0,verbroot))
scorerdict["VERB_root"].append(get_scores(T,goldedgeset))
D.update(T)
T = set()
for n in pos_index_dict["VERB"]:
|
hortonworks/hortonworks-sandbox | tutorials/tutorials_app/views.py | Python | apache-2.0 | 3,659 | 0.003826 | # Licensed to Hortonworks, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Hortonworks, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from djangomako.shortcuts import render_to_response
from django.shortcuts import redirect
from django.http import HttpResponse, Http404
from models import UserLocation
import settings
import os
import time
import string
from urlparse import urlparse
def tutorials_last_url(tutorial_view):
def save_user_location(request, *args):
if request.user.is_authenticated() \
and request.user.username != "AnonymousUser":
user_location = UserLocation.objects.get_or_create(user=request.user)[0]
user_location.step_location = request.build_absolute_uri()
user_location.save()
return tutorial_view(request, *args)
return save_user_location
def index(request):
location = settings.CONTENT_FRAME_URL
step_location = "/lesson/"
if request.user.is_authenticated() \
and request.user.username != "AnonymousUser":
try:
ustep = UserLocation.objects.get(user=request.user)
hue_location = ustep.hue_location
step_location = ustep.step_location
if step_location == None:
step_location = "/lesson/"
if urlparse(hue_location).netloc==urlparse(location).netloc:
location = hue_location
except UserLocation.DoesNotExist:
pass
return render_to_response("lessons.html",
{'content' : location,
'step_location': step_location})
def content(request, page):
if page == '':
return redirect('/')
return render_to_response("content.html", {})
def sync_location(request):
if request.method == 'GET':
if not request.user.is_authenticated() \
or request.user.username == 'AnonymousUser':
return HttpResponse('')
hue_location = None
if 'loc' in request.GET:
hue_location = request.GET['loc']
ustep = UserLocation.objects.get_or_cr | eate(u | ser=request.user)[0]
ustep.hue_location = hue_location
ustep.save()
return HttpResponse('')
else:
raise Http404
def get_file(request, path):
import mimetypes
from django.core.servers.basehttp import FileWrapper
git_files = os.path.join(settings.PROJECT_PATH, 'run/git_files')
rfile = os.path.join(git_files, path)
response = HttpResponse(FileWrapper(file(rfile, 'rb')),
mimetype=mimetypes.guess_type(rfile)[0])
return response
def network_info(request):
import subprocess
commands = [
"route -n",
"getent ahosts",
"ip addr",
"cat /etc/resolv.conf",
"cat /etc/hosts",
"ps aux | grep java",
"netstat -lnp",
]
netinfo = {cmd: subprocess.check_output(cmd, shell=True)
for cmd in commands}
return render_to_response("netinfo.html", {'info': netinfo}) |
prologic/mio | fabfile/docs.py | Python | mit | 828 | 0 | # Module: docs
# Date: 03rd April 2013
# Author: James Mill | s, j dot mills at griffith dot edu dot au
"""Documentation Tasks"""
from fabric.api import lcd, local, task
from .utils import pip, requires
PACKAGE = "mio"
@task()
@requires("m | ake", "sphinx-apidoc")
def clean():
"""Delete Generated Documentation"""
with lcd("docs"):
local("make clean")
@task(default=True)
@requires("make")
def build(**options):
"""Build the Documentation"""
pip(requirements="docs/requirements.txt")
if PACKAGE is not None:
local("sphinx-apidoc -f -T -o docs/source/api {0:s}".format(PACKAGE))
with lcd("docs"):
local("make html")
@task()
@requires("open")
def view(**options):
"""View the Documentation"""
with lcd("docs"):
local("open build/html/index.html")
|
coddingtonbear/d-rats | d_rats/ui/main_messages.py | Python | gpl-3.0 | 41,043 | 0.001438 | #!/usr/bin/python
#
# Copyright 2009 Dan Smith <dsmith@danplanet.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import time
import shutil
import random
from datetime import datetime
import gobject
import gtk
import pango
from ConfigParser import ConfigParser,DuplicateSectionError
from glob import glob
from d_rats.ui.main_common import MainWindowElement, MainWindowTab
from d_rats.ui.main_common import prompt_for_station, ask_for_confirmation, \
display_error, prompt_for_string, set_toolbar_buttons
from d_rats.ui import main_events
from d_rats import inputdialog
from d_rats import formgui
from d_rats import emailgw
from d_rats.utils import log_exception, print_stack
from d_rats import signals
from d_rats import msgrouting
from d_rats import wl2k
_FOLDER_CACHE = {}
BASE_FOLDERS = [_("Inbox"), _("Outbox"), _("Sent"), _("Trash"), _("Drafts")]
def mkmsgid(callsign):
r = random.SystemRandom().randint(0,100000)
return "%s.%x.%x" % (callsign, int(time.time()) - 1114880400, r)
class MessageFolderInfo(object):
def __init__(self, folder_path):
self._path = folder_path
if _FOLDER_CACHE.has_key(folder_path):
self._config = _FOLDER_CACHE[folder_path]
else:
self._config = ConfigParser()
regpath = os.path.join(self._path, ".db")
if os.path.exists(regpath):
self._config.read(regpath)
self._save()
_FOLDER_CACHE[folder_path] = self._config
def _save(self):
regpath = os.path.join(self._path, ".db")
f = file(regpath, "w")
self._config.write(f)
f.close()
def name(self):
"""Return folder name"""
return os.path.basename(self._path)
def _setprop(self, filename, prop, value):
filename = os.path.basename(filename)
if not self._config.has_section(filename):
self._config.add_section(filename)
self._config.set(filename, prop, value)
self._save()
def _getprop(self, filename, prop):
filename = os.path.basename(filename)
try:
return self._config.get(filename, prop)
except Exception:
return _("Unknown")
def get_msg_subject(self, filename):
return self._getprop(filename, "subject")
def set_msg_subject(self, filename, subject):
self._setprop(filename, "subject", subject)
def get_msg_type(self, filename):
return self._getprop(filename, "type")
def set_msg_type(self, filename, type):
self._setprop(filename, "type", type)
def get_msg_read(self, filename):
val = self._getprop(filename, "read")
return val == "True"
def set_msg_read(self, filename, read):
self._setprop(filename, "read", str(read == True))
def get_msg_sender(self, filename):
return self._getprop(filename, "sender")
def set_msg_sender(self, filename, sender):
self._setprop(filename, "sender", sender)
def get_msg_recip(self, filename):
return self._getprop(filename, "recip")
def set_msg_recip(self, filename, recip):
self._setprop(filename, "recip", recip)
def subfolders(self):
"""Return a list of MessageFolderInfo objects representing this
folder's subfolders"""
info = []
entries = glob(os.path.join(self._path, "*"))
for entry in sorted(entries):
if entry == "." or entry == "..":
continue
if os.path.isdir(entry):
info.append(MessageFolderInfo(entry))
return info
def files(self):
"""Return a list of files contained in this folder"""
l = glob(os.path.join(self._path, "*"))
return [x for x in l if os.path.isfile(x) and not x.startswith(".")]
def get_subfolder(self, name):
"""Get a MessageFolderInfo object representing a named subfolder"""
for folder in self.subfolders():
if folder.name() == name:
return folder
return None
def create_subfolder(self, name):
"""Create a subfolder by name"""
path = os.path.join(self._path, name)
os.mkdir(path)
return MessageFolderInfo(path)
def delete_self(self):
try:
os.remove(os.path.join(self._path, ".db"))
except OSError:
pass # Don't freak if no .db
os.rmdir(self._path)
def create_msg(self, name):
exists = os.path.exists(os.path.join(self._path, name))
try:
self._config.add_section(name)
except DuplicateSectionError, e:
if exists:
raise e
return os.path.join(self._path, name)
def delete(self, filename):
filename = os.path.basename(filename)
self._config.remove_section(filename)
os.remove(os.path.join(self._path, filename))
def rename(self, new_name):
newpath = os.path.join(os.path.dirname(self._path), new_name)
print "Renaming %s -> %s" % (self._path, newpath)
os.rename(self._path, newpath)
self._path = newpath
def __str__(self):
return self.name()
class MessageFolders(MainWindowElement):
__gsignals__ = {
"user-selected-folder" : (gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING,))
}
def _folders_path(self):
path = os.path.join(self._config.platform.config_dir(), "messages")
if not os.path.isdir(path):
os.makedirs(path)
return path
def _create_folder(self, root, name):
info = root
for el in name.split(os.sep)[:-1]:
info = info.get_subfolder(el)
if not info:
break
try:
return info.create_subfolder(os.path.basename(name))
except Exception, e:
raise Exception("Intermediate folder of %s does not exist" % name)
def create_folder(self, name):
root = MessageFolderInfo(self._folders_path())
return self._create_folder(root, name)
def get_folders(self):
return MessageFolderInfo(self._folders_path()).subfolders()
def get_folder(self, name):
return MessageFolderInfo(os.path.join(self._folders_path(), name))
def _get_folder_by_iter(self, store, iter):
els = []
while iter:
els.insert(0, store.get(iter, 0)[0])
iter = store.iter_parent(iter)
return os.sep.join(els)
def select_folder(self, folder):
"""Select a folder by path (i.e. Inbox/Subfolder)
NB: Subfolders currently not supported :)
"""
view, = self._getw("folderlist")
store = view.get_model()
iter = store.get_iter_first()
while iter:
fqname = self._get_folder_by_iter(store, iter)
if fqname == folder:
view.set_cursor(store.get_path(iter))
self.emit("user-selected-folder", fqname)
break
iter = store.iter_next(iter)
def _ensure_default_folders(self):
root = MessageFolderInfo(self._folders_path())
for folder in B | ASE_FOLDERS:
try:
info = self._create_folder(root, folder)
print info.subfolders()
except Exception:
pass
def _add_folders(self, store, iter, root):
iter = store.append(iter, (root.name(), self.folde | r_pixbuf))
for info in root.subfolders():
|
zooliet/UWTracking | src/trackers/dlib_tracker/dlib_tracker.py | Python | mit | 1,652 | 0.003632 |
import cv2
import numpy as np
import imutils
from utils import util
import dlib
import itertools
class DLIBTracker:
def __init__(self):
self._tracker = dlib.correlation_tracker()
self.detector = cv2.BRISK_create(10)
# self.detector = cv2.AKAZE_create()
# self.detector = cv2.xfeatures2d.SIFT_create()
self.matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
def init(self, frame, window=None):
if window:
self.x1 = window['x1']
self.x2 = window['x2']
self.y1 = window['y1']
self.y2 = window['y2']
self._tracker.start_track(frame, dlib.rectangle(self.x1, self.y1, self.x2, self.y2))
self.force_init_flag = False
self.enable = True
def update(self, frame, options = None):
if options is None:
| score = self._tracker.update(frame)
else:
x1 = options['x1']
x2 = options['x2']
y1 = options['y1']
y2 = options['y2']
mask = np.zeros(frame.shape[:2], dtype=np.uint8)
# cv2.rectangle(mask, (x1, y1), (x2, y2), 255, -1)
(x1, y1), (x2, y2) = util.selection_enlarged(mask, x1, y1, x2, y2, ratio=1)
# cv2.rectangle(mask, (x1, y1), (x2, y2), 255, -1)
# frame = cv2.bitwise_and(frame, frame, mask=mask)
# cv2.imshow('Mask', frame)
score = self._tracker.update(frame, dlib.rectangle(x1, y1, x2, y2))
# print("[DLIB] score:", score)
rect = self._tracker.get_position()
return score, (int(rect.left()), int(rect.top()), int(rect.right()), int(rect.bottom()))
| |
ioos/catalog-harvesting | catalog_harvesting/api.py | Python | mit | 2,982 | 0.000335 | #!/usr/bin/env python
'''
catalog_harvesting/api.py
A microservice d | esigned to perform small tasks in association with the CLI
'''
from flask import Flask, jsonify
from pymongo import MongoClient
from catalog_harvesting import get_redis_connection, get_logger
from catalog_harvesting import harvest as harvest_api
from rq import Queue
import os
import json
import redis
app = Flask(__name__)
OUTPUT_DIR = os.environ['OUTPUT_DIR']
db = None
def init_db():
'''
Initi | alizes the mongo db
'''
global db
# We want the process to stop here, if it's not defined or we can't connect
conn_string = os.environ['MONGO_URL']
tokens = conn_string.split('/')
if len(tokens) > 3:
db_name = tokens[3]
else:
db_name = 'default'
conn = MongoClient(conn_string)
conn.server_info()
db = conn[db_name]
return db
REDIS_HOST, REDIS_PORT, REDIS_DB = get_redis_connection()
redis_pool = redis.ConnectionPool(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
redis_connection = redis.Redis(connection_pool=redis_pool)
queue = Queue('default', connection=redis_connection)
init_db()
@app.route("/")
def index():
'''
Returns an empty response to the client
'''
return jsonify(), 204
def harvest_job(harvest_id):
'''
Actually perform the harvest
:param str harvest_id: ID of harvest
'''
harvest = db.Harvests.find_one({"_id": harvest_id})
harvest_api.download_harvest(db, harvest, OUTPUT_DIR)
return json.dumps({"result": True})
def delete_harvest_job(harvest_id):
'''
Schedules the deletion of a harvest
:param str harvest_id: harvest_id
'''
get_logger().info("Deleting harvest")
harvest = db.Harvests.find_one({"_id": harvest_id})
harvest_api.delete_harvest(db, harvest)
return json.dumps({"result": True})
@app.route("/api/harvest/<string:harvest_id>", methods=['GET'])
def get_harvest(harvest_id):
'''
Returns a dictionary with a result key, in which is true if the harvest
succeeded or false otherwise. If an error occurred there will be an error
message in the error key, along with a 40x HTTP return code.
:param str harvest_id: MongoDB ID for the harvest
'''
try:
db.Harvests.update({"_id": harvest_id}, {
"$set": {
"last_harvest_dt": "pending",
"last_record_count": 0,
"last_good_count": 0,
"last_bad_count": 0
}
})
except Exception as e:
return jsonify(error=type(e).__name__, message=e.message), 500
queue.enqueue(harvest_job, harvest_id, timeout=900)
return jsonify({"result": True})
@app.route("/api/harvest/<string:harvest_id>", methods=['DELETE'])
def delete_harvest(harvest_id):
queue.enqueue(delete_harvest_job, harvest_id, timeout=900)
return jsonify({"result": True})
if __name__ == '__main__':
app.run(port=int(os.environ.get('WEB_PORT', 3000)), debug=True)
|
antonve/s4-project-mooc | lms/djangoapps/lti_provider/models.py | Python | agpl-3.0 | 1,249 | 0 | """
Database models for the LTI provider feature.
"""
from django.db import models
from django.dispatch imp | ort receiver
from courseware.models import SCORE_CHANGED
class LtiConsumer(models.Model):
"""
Database model representing an LTI consumer. This model stores the consumer
specific settings, such as the OAuth key/secret pair and any LTI fields
that must be persisted.
"""
key = models.CharField(max_len | gth=32, unique=True, db_index=True)
secret = models.CharField(max_length=32, unique=True)
@receiver(SCORE_CHANGED)
def score_changed_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
Consume signals that indicate score changes.
TODO: This function is a placeholder for integration with the LTI 1.1
outcome service, which will follow in a separate change.
"""
message = """LTI Provider got score change event:
points_possible: {}
points_earned: {}
user_id: {}
course_id: {}
usage_id: {}
"""
print message.format(
kwargs.get('points_possible', None),
kwargs.get('points_earned', None),
kwargs.get('user_id', None),
kwargs.get('course_id', None),
kwargs.get('usage_id', None),
)
|
lizardschool/wordbook | tests/test_domain_translation.py | Python | mit | 436 | 0 | from word | book.domain.models import Translation
def test_translation_dto():
t = Translation(
id=1,
from_language='en',
into_language='pl',
word='apple',
ipa='ejpyl',
simplified='epyl',
| translated='jabłko',
)
assert t.dto_autocomplete() == dict(
id=1,
word='apple',
translation='jabłko',
ipa='ejpyl',
simplified='epyl',
)
|
alexgorban/models | official/benchmark/keras_benchmark.py | Python | apache-2.0 | 4,024 | 0.004473 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Executes Keras benchmarks and accuracy tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from official.utils.flags import core as flags_core
from official.utils.testing.perfzero_benchmark import PerfZeroBenchmark
class KerasBenchmark(PerfZeroBenchmark):
"""Base benchmark class with methods to simplify testing."""
def __init__(self,
output_dir=None,
default_flags=None,
flag_methods=None,
tpu=None):
assert tf.version.VERSION.startswith('2.')
super(KerasBenchmark, self).__init__(
output_dir=output_dir,
default_flags=default_flags,
flag_methods=flag_methods,
tpu=tpu)
def _report_benchmark(self,
stats,
wall_time_sec,
top_1_max=None,
top_1_min=None,
log_steps=None,
total_batch_size=None,
warmup=1,
start_time_sec=None):
"""Report benchmark results by writing to local protobuf file.
Args:
stats: dict returned from keras models with known entries.
wall_time_sec: the during of the benchmark execution in seconds
top_1_max: highest passing level for top_1 accuracy.
top_1_min: lowest passing level for top_1 accuracy.
log_steps: How often the log was created for stats['step_timestamp_log'].
total_batch_size: Global batch-size.
warmup: number of entries in stats['step_timestamp_log'] to ignore.
start_time_sec: the start time of the program in seconds since epoch
"""
metrics = []
if 'accuracy_top_1' in stats:
metrics.append({'name': 'accuracy_top_1',
'value': stats['accuracy_top_1'],
'min_value': top_1_min,
'max_value': top_1_max})
metrics.append({'name': 'top_1_train_accuracy',
'value': stats['training_accuracy_top_1']})
if (warmup and 'step_timestamp_log' in stats and
len(stats['step_timestamp_log']) > warmup):
# first entry in the time_log is start of step 1. The rest of the
# entries are the end of each step recorded
time_log = stats['step_timestamp_log']
elapsed = time_log[-1].timestamp - time_log[warmup].timestamp
num_examples = (
total_batch_size * log_steps * (len(time_log) - warmup - 1))
examples_per_sec = num_examples / elapsed
metrics.append({'n | ame': 'exp_per_second',
'value': examples_per_sec})
if 'avg_exp_per_second' in stats:
metrics.append({'name': 'avg_exp_per_second',
'value': stats['avg_exp_per_second']})
if start_time_sec and 'step_timestamp_log' in stats:
time_log = stats['step_timestamp_log']
# time_log[0] is recorded at the beginning of the first step.
startup_time = time_log[0].timestamp - s | tart_time_sec
metrics.append({'name': 'startup_time', 'value': startup_time})
flags_str = flags_core.get_nondefault_flags_as_str()
self.report_benchmark(
iters=-1,
wall_time=wall_time_sec,
metrics=metrics,
extras={'flags': flags_str})
|
valeriodelsarto/valecasa_bot | hum_temp_sensor.py | Python | mit | 317 | 0.006309 | #!/usr/bin/python
import Adafruit_DHT
sensor = Adafruit_DHT.DHT11
pin = 4
humidity, temperatu | re = Adafruit_DHT.read_retry(sensor, pin)
if humidity is not None and temperature is n | ot None:
print 'Temp={0:0.1f}*C Humidity={1:0.1f}%'.format(temperature, humidity)
else:
print 'Failed to get reading. Try again!'
|
AsgerPetersen/QGIS | python/plugins/processing/algs/lidar/lastools/las2txt.py | Python | gpl-2.0 | 2,649 | 0.001133 | # -*- coding: utf-8 -*-
"""
***************************************************************************
las2txt.py
---------------------
Date : September 2013 and May 2016
Copyright : (C) 2013 by Martin Isenburg
Email : martin near rapidlasso point com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Martin Isenburg'
__date__ = 'September 2013'
__copyright__ = '(C) 2013, Martin Isenburg'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from .LAStoolsUtils import LAStoolsUtils
from .LAStoolsAlgorithm import LAStoolsAlgorithm
from processing.core.parameters import ParameterString
from processing.core.outputs import OutputFile
class las2txt(LAStoolsAlgorithm):
PARSE = "PARSE"
OUTPUT = "OUTPUT"
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('las2txt')
self.group, self.i18n_group = self.trAlgorithm('LAStools')
self.addParametersVerboseGUI()
self.addParametersPointInputGUI()
self.addParameter(ParameterString(las2txt.PARSE,
self.tr("parse string"), "xyz"))
| self.addOutput(OutputFile(las2txt.OUTPUT, self.tr("Output ASCII file")))
self.addParametersAdditionalGUI()
def processAlgorithm(self, progress):
if (LAStoolsUtils.hasWine()):
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "las2txt.exe")]
else:
| commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "las2txt")]
self.addParametersVerboseCommands(commands)
self.addParametersPointInputCommands(commands)
parse = self.getParameterValue(las2txt.PARSE)
if parse != "xyz":
commands.append("-parse")
commands.append(parse)
commands.append("-o")
commands.append(self.getOutputValue(las2txt.OUTPUT))
self.addParametersAdditionalCommands(commands)
LAStoolsUtils.runLAStools(commands, progress)
|
daljeetv/infodiscovery | manage.py | Python | bsd-2-clause | 256 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault | ("DJANGO_SETTINGS_MODULE", "infodiscovery.settings")
from django.core.management import e | xecute_from_command_line
execute_from_command_line(sys.argv)
|
sidnarayanan/BAdNet | train/pf/adv/train_lstm.py | Python | mit | 6,337 | 0.017043 | #!/usr/local/bin/python2.7
from sys import exit
from os import environ, system
environ['KERAS_BACKEND'] = 'tensorflow'
import numpy as np
import utils
import signal
from keras.layers import Input, Dense, Dropout, concatenate, LSTM, BatchNormalization
from keras.models import Model
from keras.callbacks import ModelCheckpoint, LambdaCallback, TensorBoard
from keras.optimizers import Adam, SGD
from keras.utils import np_utils
from keras import backend as K
K.set_image_data_format('channels_last')
from adversarial import Adversary
import obj
import config
config.DEBUG = True
#config.n_truth = 5
#config.truth = 'resonanceType'
config.adversary_mask = 0
ADV = 2
NEPOCH = 2
'''
instantiate data loaders
'''
def make_coll(fpath):
coll = obj.PFSVCollection()
coll.add_categories(['singletons', 'pf'], fpath)
return coll
top = make_coll('/fastscratch/snarayan/pandaarrays/v1//PARTITION/ZpTT_*_CATEGORY.npy')
qcd = make_coll('/fastscratch/snarayan/pandaarrays/v1//PARTITION/QCD_*_CATEGORY.npy')
data = [top, qcd]
# preload some data just to get the dimensions
data[0].objects['train']['pf'].load(memory=False)
dims = data[0].objects['train']['pf'].data.data.shape
# obj.limit = 20
# dims = (None, 20, 9) # override
'''
some global definitions
'''
'''
first build the classifier!
'''
# set up data
classifier_train_gen = obj.generatePF(data, partition='train', batch=1000, normalize=False)
classifier_validation_gen = obj.generatePF(data, partition='validate', batch=100)
classifier_test_gen = obj.generatePF(data, partition='validate', batch=1000)
test_i, test_ | o, test_w = next(classifier_test_gen)
inputs = Input(shape=(dims[1], dims[2]), name='input')
norm = BatchNormalization(momentum=0.6, name='input_bnorm') (inputs)
lstm = LSTM(100, go_backwards=True, implementation | =2, name='lstm') (norm)
norm = BatchNormalization(momentum=0.6, name='lstm_norm') (lstm)
dense = Dense(100, activation='relu',name='lstmdense',kernel_initializer='lecun_uniform') (norm)
norm = BatchNormalization(momentum=0.6,name='lstmdense_norm') (dense)
for i in xrange(1,5):
dense = Dense(50, activation='relu',name='dense%i'%i)(norm)
norm = BatchNormalization(momentum=0.6,name='dense%i_norm'%i)(dense)
dense = Dense(50, activation='relu',name='dense5')(norm)
norm = BatchNormalization(momentum=0.6,name='dense5_norm')(dense)
y_hat = Dense(config.n_truth, activation='softmax') (norm)
classifier = Model(inputs=inputs, outputs=y_hat)
classifier.compile(optimizer=Adam(lr=0.001),
loss='categorical_crossentropy',
metrics=['accuracy'])
# print '########### CLASSIFIER ############'
# classifier.summary()
# print '###################################'
pred = classifier.predict(test_i)
# ctrl+C now triggers a graceful exit
def save_classifier(name='classifier', model=classifier):
model.save('models/%s.h5'%name)
def save_and_exit(signal=None, frame=None, name='classifier', model=classifier):
save_classifier(name, model)
flog.close()
exit(1)
signal.signal(signal.SIGINT, save_and_exit)
'''
now build the adversarial setup
'''
# set up data
train_gen = obj.generatePF(data, partition='train', batch=1000, decorr_mass=True, normalize=False)
validation_gen = obj.generatePF(data, partition='validate', batch=100, decorr_mass=True)
test_gen = obj.generatePF(data, partition='validate', batch=1000, decorr_mass=True)
# build the model
mass_hat = Adversary(config.n_mass_bins, scale=0.01)(y_hat)
pivoter = Model(inputs=[inputs],
outputs=[y_hat, mass_hat])
pivoter.compile(optimizer=Adam(lr=0.001),
loss=['categorical_crossentropy', 'categorical_crossentropy'],
loss_weights=[0.0001,1])
print '############# ARCHITECTURE #############'
pivoter.summary()
print '###################################'
'''
Now we train both models
'''
if ADV > 0:
print 'TRAINING ADVERSARIAL NETWORK'
system('mv logs/train_lstm.log logs/train_lstm.log.old')
flog = open('logs/train_lstm.log','w')
callback = LambdaCallback(
on_batch_end=lambda batch, logs: flog.write('batch=%i,logs=%s\n'%(batch,str(logs)))
)
tb = TensorBoard(
log_dir = './logs/lstm_logs',
write_graph = True,
write_images = True
)
print ' -Pre-training the classifier'
# bit of pre-training to get the classifer in the right place
classifier.fit_generator(classifier_train_gen,
steps_per_epoch=2000,
epochs=1)
save_classifier(name='pretrained')
def save_and_exit(signal=None, frame=None, name='regularized', model=classifier):
save_classifier(name, model)
flog.close()
exit(1)
signal.signal(signal.SIGINT, save_and_exit)
print ' -Training the adversarial stack'
# now train the model for real
pivoter.fit_generator(train_gen,
steps_per_epoch=5000,
epochs=NEPOCH*2,
# callbacks=[callback, tb],
validation_data=validation_gen,
validation_steps=100)
save_classifier(name='regularized')
save_classifier(name='pivoter', model=pivoter)
flog.close()
if ADV % 2 == 0:
print 'TRAINING CLASSIFIER ONLY'
system('mv logs/train_lstmnoreg.log logs/train_lstmnoreg.log.old')
flog = open('logs/train_lstmnoreg.log','w')
callback = LambdaCallback(
on_batch_end=lambda batch, logs: flog.write('batch=%i,logs=%s\n'%(batch,str(logs)))
)
tb = TensorBoard(
log_dir = './logs/lstmnoreg_logs',
write_graph = True,
write_images = True
)
n_epochs = 1 if (ADV == 2) else 2 # fewer epochs if network is pretrained
n_epochs *= NEPOCH
classifier.fit_generator(classifier_train_gen,
steps_per_epoch=5000,
epochs=n_epochs,
# callbacks=[callback, tb],
validation_data=classifier_validation_gen,
validation_steps=100)
save_classifier(name='classifier')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.