repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
Videoclases/videoclases
|
quality_control/views/control_views.py
|
Python
|
gpl-3.0
| 1,076
| 0.001859
|
from django.contrib.auth.decorators import user_passes_test
from django.utils.decorators import method_decorator
from django.views.generic.base import TemplateView
def in_students_group(user):
if user:
return user.groups.filter(name='Alumnos').exists()
return False
def in_teachers_group(user):
if user:
return user.groups.filter(name='Profesores').exists()
return False
class NewControlView(TemplateView):
template_name = 'new_control.html'
def get_context_data(self, **kwargs):
context = super(NewControlView, self).get_context_data(
|
**kwargs)
# form = CrearTareaForm()
# context['crear_homework_form'] = form
# context['cours
|
es'] = self.request.user.teacher.courses.filter(year=timezone.now().year)
# context['homeworks'] = Homework.objects.filter(course__in=context['courses'])
return context
@method_decorator(user_passes_test(in_teachers_group, login_url='/'))
def dispatch(self, *args, **kwargs):
return super(NewControlView, self).dispatch(*args, **kwargs)
|
javrasya/watchdog
|
tests/__init__.py
|
Python
|
apache-2.0
| 951
| 0.001052
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific la
|
nguage governing permissions and
# limitations under the License.
from sys import version_info
from os import name as OS_NAME
__all__= ['unittest', 'skipIfNtMove']
if version_info < (2, 7):
import unitt
|
est2 as unittest
else:
import unittest
skipIfNtMove = unittest.skipIf(OS_NAME == 'nt', "windows can not detect moves")
|
krujos/strava-private-to-public
|
private-to-public.py
|
Python
|
apache-2.0
| 3,601
| 0.002499
|
"""
Really could have implemented this all in javascript on the client side...
"""
from __future__ import print_function
import requests
from flask import Flask, redirect, url_for, request, session, abort, jsonify
import os
import sys
import logging
import json
STRAVA_CLIENT_ID = 1367
Flask.get = lambda self, path: self.route(path, methods=['get'])
Flask.put = lambda self, path: self.route(path, methods=['put'])
Flask.post = lambda self, path: self.route(path, methods=['post'])
Flask.delete = lambda self, path: self.route(path, methods=['delete'])
app = Flask(__name__)
if not os.environ.get("CLIENT_SECRET"):
print("ERROR: CLIENT_SECRET is not defined", file=sys.stderr)
exit(1)
client_secret = os.environ.get("CLIENT_SECRET")
Flask.secret_key = client_secret
app.secret_key = client_secret
redirect_url = "http://127.0.0.1:5000"
@app.get('/')
def index():
"""Return static content, index.html only, or handle callbacks."""
#Call back from Strava for token exchange.
if request.args.get('code'):
code = request.args.get('code')
session.permanent = True
session['CODE'] = code
app.logger.debug("Code = %s " % code)
get_token(request.args.get('code'))
return redirect(url_for('static', filename='loggedin.html'))
return redirect(url_for('static', filename='index.html'))
def get_token(code):
data = {"client_id": STRAVA_CLIENT_ID,
"client_secret": client_secret,
"code": code}
url = 'https://www.strava.com/oauth/token'
app.logger.debug("Post URL = %s" % url)
response
|
= requests.post(url, data=data)
app.logger.info("Login post returned %d" % response.status_code)
app.logger.debug(response.json())
session['token'] = response.json()['access_token']
athlete = response.json()['athlete']
session['athlete_id'] = athlete['id']
session['athlete_name'] = athlete['firstname'] + " " + athlete['lastname']
@app.get('/athlete')
def get_current_user():
try:
return jsonify({"id": session['athlete_id'],
|
"name": session['athlete_name']})
except KeyError:
abort(404)
@app.get('/login')
def login():
return "https://www.strava.com/oauth/authorize?client_id=%s&response_type=code&redirect_uri=%s&scope=view_private,write" \
% (STRAVA_CLIENT_ID, redirect_url)
@app.get('/rides/<page>')
@app.get('/rides')
def get_rides(page=1):
"""Attempt to get all of a users rides so we can filter out the private ones"""
url = "https://www.strava.com/api/v3/athlete/activities"
data = {"per_page": 50, "page": page, "access_token": session['token']}
response = requests.get(url, data=data)
app.logger.debug("Strava return code = %d" % response.status_code)
app.logger.debug(response.json())
return json.dumps(response.json())#there has to be a better way.
@app.put('/ride/<ride_id>')
def update_ride(ride_id):
ride = request.json
app.logger.debug(ride)
if int(ride['id']) != int(ride_id):
abort(400)
app.logger.debug("Updating ride " + ride_id)
data = {"access_token": session['token']}
params = {"private": int(ride['private']), "trainer": int(ride['trainer'])}
url = "https://www.strava.com/api/v3/activities/" + ride_id
response = requests.put(url, data=data, params=params)
app.logger.debug(response.status_code)
return json.dumps(response.json())
if __name__ == '__main__':
app.logger.setLevel(logging.INFO)
file_handler = logging.FileHandler('strava.log')
app.logger.addHandler(file_handler)
app.run()
|
wieden-kennedy/django-haikus
|
setup.py
|
Python
|
bsd-3-clause
| 1,122
| 0.008021
|
#/usr/bin/env python
import os
from setuptools import setup, find_packages
ROOT_DIR = os.path.dirname(__file__)
SOURCE_DIR = os.path.join(ROOT_DIR)
setup(
name="django_haikus",
description="Some classes for finding haik
|
us in text",
author="Grant Thomas",
author_email="grant.thomas@wk.com",
url="https://github.com/wieden-kennedy/django_haikus",
version="0.0.1",
dependency_links = ['http://github.com/wieden-kennedy/haikus/tarball/master#egg=haikus'],
install_requires=["nltk","django>=1.3.1","redis","requests","elementtree", "django-tagging
|
", "django-picklefield"],
packages=['django_haikus'],
zip_safe=False,
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
eayunstack/ceilometer
|
ceilometer/api/app.py
|
Python
|
apache-2.0
| 3,764
| 0
|
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from oslo_config import cfg
from oslo_log import log
from paste import deploy
import pecan
from werkzeug import serving
from ceilometer.api import hooks
from ceilometer.api import middleware
from ceilometer.i18n import _LI, _LW
LOG = log.getLogger(__name__)
CONF = cfg.CONF
OPTS = [
cfg.StrOpt('api_paste_config',
default="api_paste.ini",
help="Configuration file for WSGI definition of API."
),
]
API_OPTS = [
cfg.BoolOpt('pecan_debug',
default=False,
help='Toggle Pecan Debug Middleware.'),
cfg.IntOpt('default_api_return_limit',
min=1,
default=100,
help='Default maximum number of items returned by API request.'
),
]
CONF.register_opts(OPTS)
CONF.register_opts(API_OPTS, group='api')
def setup_app(p
|
ecan_config=None):
# FIXME: Replace DBHook with a hooks.TransactionHook
app_hooks = [hooks.ConfigHook(),
hooks.DBHook(),
hooks.NotifierHook(),
hooks.TranslationHook()]
pecan_config = pecan_config or {
"app": {
'root': 'ceilometer.api.controllers.root.Roo
|
tController',
'modules': ['ceilometer.api'],
}
}
pecan.configuration.set_config(dict(pecan_config), overwrite=True)
# NOTE(sileht): pecan debug won't work in multi-process environment
pecan_debug = CONF.api.pecan_debug
if CONF.api.workers and CONF.api.workers != 1 and pecan_debug:
pecan_debug = False
LOG.warning(_LW('pecan_debug cannot be enabled, if workers is > 1, '
'the value is overrided with False'))
app = pecan.make_app(
pecan_config['app']['root'],
debug=pecan_debug,
hooks=app_hooks,
wrap_app=middleware.ParsableErrorMiddleware,
guess_content_type_from_ext=False
)
return app
def load_app():
# Build the WSGI app
cfg_file = None
cfg_path = cfg.CONF.api_paste_config
if not os.path.isabs(cfg_path):
cfg_file = CONF.find_file(cfg_path)
elif os.path.exists(cfg_path):
cfg_file = cfg_path
if not cfg_file:
raise cfg.ConfigFilesNotFoundError([cfg.CONF.api_paste_config])
LOG.info("Full WSGI config used: %s" % cfg_file)
return deploy.loadapp("config:" + cfg_file)
def build_server():
app = load_app()
# Create the WSGI server and start it
host, port = cfg.CONF.api.host, cfg.CONF.api.port
LOG.info(_LI('Starting server in PID %s') % os.getpid())
LOG.info(_LI("Configuration:"))
cfg.CONF.log_opt_values(LOG, logging.INFO)
if host == '0.0.0.0':
LOG.info(_LI(
'serving on 0.0.0.0:%(sport)s, view at http://127.0.0.1:%(vport)s')
% ({'sport': port, 'vport': port}))
else:
LOG.info(_LI("serving on http://%(host)s:%(port)s") % (
{'host': host, 'port': port}))
serving.run_simple(cfg.CONF.api.host, cfg.CONF.api.port,
app, processes=CONF.api.workers)
def app_factory(global_config, **local_conf):
return setup_app()
|
Koala-Kaolin/pyweb
|
src/__main__.py
|
Python
|
gpl-3.0
| 42
| 0
|
#!/usr/bin/python3
import gu
|
i
gui.m
|
ain()
|
chapman-phys227-2016s/hw-1-seama107
|
adaptive_trapezint.py
|
Python
|
mit
| 1,279
| 0.013292
|
#!/usr/bin/python
import math
def trapezint(f, a, b, n) :
"""
Just for testing - uses trapazoidal approximation from on f from a to b with
n
|
trapazoids
"""
|
output = 0.0
for i in range(int(n)):
f_output_lower = f( a + i * (b - a) / n )
f_output_upper = f( a + (i + 1) * (b - a) / n )
output += (f_output_lower + f_output_upper) * ((b-a)/n) / 2
return output
def second_derivative_approximation(f, x, h = .001):
"""
Approximates the second derivative with h (dx) = .001
"""
return (f(x + h) - 2 * f(x) + f(x - h))/h**2
def adaptive_trapezint(f, a, b, eps=1E-5):
"""
Uses trapazoidal approximation on f from a to b with an error value
of less than epsilon, to calculate the number of trapazoids
"""
max_second_derivative = 0
for i in range(10000):
i_second_d = abs(second_derivative_approximation(f, a + i * (b - a)/10000))
if( i_second_d > max_second_derivative):
max_second_derivative = i_second_d
h = math.sqrt(12 * eps / ((b - a) * max_second_derivative))
#There is a clear problem here, as if the second derivative is zero,
#h will become too large and there will be no approximation
n = (b - a)/h
return trapezint(f, a, b, n)
|
imphil/fusesoc
|
tests/test_capi1.py
|
Python
|
gpl-3.0
| 10,310
| 0.012512
|
import difflib
import os
import pytest
from fusesoc.core import Core
def compare_fileset(fileset, name, files):
assert name == fileset.name
for i in range(len(files)):
assert files[i] == fileset.file[i].name
def test_core_info():
tests_dir = os.path.dirname(__file__)
cores_root = os.path.join(tests_dir, 'cores')
for core_name in ['sockit', 'mor1kx-generic']:
core = Core(os.path.join(cores_root, core_name, core_name+'.core'))
gen_info = '\n'.join([x for x in core.info().split('\n') if not 'Core root' in x])
with open(os.path.join(tests_dir, __name__, core_name+".info")) as f:
assert f.read() == gen_info, core_name
def test_core_parsing():
from fusesoc.vlnv import Vlnv
cores_root = os.path.join(os.path.dirname(__file__), 'cores', 'misc')
core = Core(os.path.join(cores_root, 'nomain.core'))
assert core.name == Vlnv("::nomain:0")
import sys
if sys.version_info[0] > 2:
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(cores_root, "duplicateoptions.core"))
assert "option 'file_type' in section 'fileset dummy' already exists" in str(e.value)
def test_capi1_get_parameters():
tests_dir = os.path.join(os.path.dirname(__file__),
__name__)
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(tests_dir, 'parameters_nodatatype.core'))
assert "Invalid datatype '' for parameter" in str(e.value)
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(tests_dir, 'parameters_invaliddatatype.core'))
assert "Invalid datatype 'badtype' for parameter" in str(e.value)
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(tests_dir, 'parameters_noparamtype.core'))
assert "Invalid paramtype '' for parameter" in str(e.value)
with pytest.raises(SyntaxError) as e:
core = Core(os.path.join(tests_dir, 'parameters_invalidparamtype.core'))
assert "Invalid paramtype 'badtype' for parameter" in str(e.value)
def test_get_scripts():
flag_combos = [{'target' : 'sim' , 'is_toplevel' : False},
{'target' : 'sim' , 'is_toplevel' : True},
{'target' : 'synth', 'is_toplevel' : False},
{'target' : 'synth', 'is_toplevel' : True},
]
filename = os.path.join(os.path.dirname(__file__), 'cores', 'misc', 'scriptscore.core')
core = Core(filename, '', 'dummy_build_root')
for flags in flag_combos:
env = {
'BUILD_ROOT' : 'dummy_build_root',
|
'FILES_ROOT' :
|
'dummyroot'
}
result = core.get_scripts("dummyroot", flags)
expected = {}
if flags['target'] == 'sim':
sections = ['post_run', 'pre_build', 'pre_run']
else:
if flags['is_toplevel']:
env['SYSTEM_ROOT'] = core.files_root
sections = ['pre_build', 'post_build']
else:
sections = []
for section in sections:
_name = flags['target']+section+'_scripts{}'
expected[section] = [{'cmd' : ['sh', os.path.join('dummyroot', _name.format(i))],
'name' : _name.format(i),
'env' : env} for i in range(2)]
assert expected == result
def test_get_tool():
cores_root = os.path.join(os.path.dirname(__file__), 'cores')
core = Core(os.path.join(cores_root, 'atlys', 'atlys.core'))
assert None == core.get_tool({'target' : 'sim', 'tool' : None})
assert 'icarus' == core.get_tool({'target' : 'sim', 'tool' : 'icarus'})
assert 'ise' == core.get_tool({'target' : 'synth', 'tool' : None})
assert 'vivado' == core.get_tool({'target' : 'synth', 'tool' : 'vivado'})
core = Core(os.path.join(cores_root, 'sockit', 'sockit.core'))
assert 'icarus' == core.get_tool({'target' : 'sim', 'tool' : None})
assert 'icarus' == core.get_tool({'target' : 'sim', 'tool' : 'icarus'})
del core.main.backend
assert None == core.get_tool({'target' : 'synth', 'tool' : None})
assert 'vivado' == core.get_tool({'target' : 'synth', 'tool' : 'vivado'})
core.main.backend = 'quartus'
def test_get_tool_options():
cores_root = os.path.join(os.path.dirname(__file__), 'cores')
core = Core(os.path.join(cores_root, 'mor1kx-generic', 'mor1kx-generic.core'))
assert {'iverilog_options' : ['-DSIM']} == core.get_tool_options({'is_toplevel' : True, 'tool' : 'icarus'})
assert {} == core.get_tool_options({'is_toplevel' : True, 'tool' : 'modelsim'})
assert {'fuse_options' : ['some','isim','options']} == core.get_tool_options({'is_toplevel' : True, 'tool' : 'isim'})
expected = {'xelab_options' : ['--timescale 1ps/1ps', '--debug typical',
'dummy', 'options', 'for', 'xelab']}
assert expected == core.get_tool_options({'is_toplevel' : True, 'tool' : 'xsim'})
assert {} == core.get_tool_options({'is_toplevel' : False, 'tool' : 'icarus'})
core = Core(os.path.join(cores_root, 'elf-loader', 'elf-loader.core'))
assert {'libs' : ['-lelf']} == core.get_tool_options({'is_toplevel' : False, 'tool' : 'verilator'})
assert {} == core.get_tool_options({'is_toplevel' : True, 'tool' : 'invalid'})
def test_get_toplevel():
filename = os.path.join(os.path.dirname(__file__),
__name__,
"atlys.core")
core = Core(filename)
assert 'orpsoc_tb' == core.get_toplevel({'tool' : 'icarus'})
assert 'orpsoc_tb' == core.get_toplevel({'tool' : 'icarus', 'testbench' : None})
assert 'tb' == core.get_toplevel({'tool' : 'icarus', 'testbench' : 'tb'})
assert 'orpsoc_top' == core.get_toplevel({'tool' : 'vivado'})
filename = os.path.join(os.path.dirname(__file__),
__name__,
"sockit.core")
core = Core(filename)
assert 'dummy_tb' == core.get_toplevel({'tool' : 'icarus'})
assert 'dummy_tb' == core.get_toplevel({'tool' : 'icarus', 'testbench' : None})
assert 'tb' == core.get_toplevel({'tool' : 'icarus', 'testbench' : 'tb'})
assert 'orpsoc_top' == core.get_toplevel({'tool' : 'vivado'})
def test_icestorm():
filename = os.path.join(os.path.dirname(__file__),
__name__,
"c3demo.core")
core = Core(filename)
assert len(core.file_sets) == 3
compare_fileset(core.file_sets[0], 'rtl_files', ['c3demo.v', 'ledpanel.v','picorv32.v'])
compare_fileset(core.file_sets[1], 'tb_files' , ['firmware.hex', '$YOSYS_DAT_DIR/ice40/cells_sim.v', 'testbench.v'])
#Check that backend files are converted to fileset properly
compare_fileset(core.file_sets[2], 'backend_files', ['c3demo.pcf'])
assert core.file_sets[2].file[0].file_type == 'PCF'
assert core.icestorm.export_files == []
assert core.icestorm.arachne_pnr_options == ['-s', '1', '-d', '8k']
assert core.icestorm.top_module == 'c3demo'
assert core.icestorm.warnings == []
def test_ise():
filename = os.path.join(os.path.dirname(__file__),
__name__,
"atlys.core")
core = Core(filename)
#Check filesets
assert len(core.file_sets) == 4
assert core.file_sets[0].name == 'verilog_src_files'
assert core.file_sets[1].name == 'verilog_tb_src_files'
assert core.file_sets[2].name == 'verilog_tb_private_src_files'
#Check that backend files are converted to fileset properly
compare_fileset(core.file_sets[3], 'backend_files', ['data/atlys.ucf'])
assert core.file_sets[3].file[0].file_type == 'UCF'
#Check backend section
assert core.ise.export_files == []
assert core.ise.family == 'spartan6'
assert core.ise.device == 'xc6slx45'
assert core.ise.package == 'csg324'
assert core.ise.speed == '-2'
assert core.ise.top_module == 'orpsoc_top'
assert core.ise.warnings == []
def test_quartus():
filename = os.path.join(os.path.dirname(__file__),
__name__,
|
prefetchnta/questlab
|
bin/x64bin/python/37/Lib/distutils/version.py
|
Python
|
lgpl-2.1
| 12,688
| 0.001497
|
#
# distutils/version.py
#
# Implements multiple version numbering conventions for the
# Python Module Distribution Utilities.
#
# $Id$
#
"""Provides classes to represent module version numbers (one class for
each style of version numbering). There are currently two such classes
implemented: StrictVersion and LooseVersion.
Every version number class implements the following interface:
* the 'parse' method takes a string and parses it to some internal
representation; if the string is an invalid version number,
'parse' raises a ValueError exception
* the class constructor takes an optional string argument which,
if supplied, is passed to 'parse'
* __str__ reconstructs the string that was passed to 'parse' (or
an equivalent string -- ie. one that will generate an equivalent
version number instance)
* __repr__ generates Python code to recreate the version number instance
* _cmp compares the current instance with either another instance
of the same class or a string (which will be parsed to an instance
of the same class, thus must follow the same rules)
"""
import re
class Version:
"""Abstract base class for version numbering classes. Just provides
constructor (__init__) and reproducer (__repr__), because those
seem to be the same for all version numbering classes; and route
rich comparisons to _cmp.
"""
def __init__ (self, vstring=None):
if vstring:
self.parse(vstring)
def __repr__ (self):
return "%s ('%s')" % (self.__class__.__name__, str(self))
def __eq__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c == 0
def __lt__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c < 0
def __le__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c <= 0
def __gt__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c > 0
def __ge__(self, other):
c = self._cmp(other)
if c is NotImplemented:
return c
return c >= 0
# Interface for version-number classes -- must be implemented
# by the following classes (the concrete ones -- Version should
# be treated as an abstract class).
# __init__ (string) - create and take same action as 'parse'
# (string parameter is optional)
# parse (string) - convert a string representation to whatever
# internal representation is appropriate for
# this style of version numbering
# __str__ (self) - convert back to a string; should be very similar
# (if not identical to) the string supplied to parse
# __repr__ (self) - generate Python code to recreate
# the instance
# _cmp (self, other) - compare two version numbers ('other' may
# be an unparsed version string, or another
# instance of your version class)
class StrictVersion (Version):
"""Version numbering for anal retentives and software idealists.
Implements the standard interface for version number classes as
described above. A version number consists of two or three
dot-separated numeric components, with an optional "pre-release" tag
on the end. The pre-release tag consists of the letter 'a' or 'b'
followed by a number. If the numeric components of two version
numbers are equal, then one with a pre-release tag will always
be deemed earlier (lesser) than one without.
The following are valid version numbers (shown in the order that
would be obtained by sorting according to the supplied cmp function):
0.4 0.4.0 (these two are equivalent)
0.4.1
0.5a1
0.5b3
0.5
0.9.6
1.0
1.0.4a3
1.0.4b1
1.0.4
The following are examples of invalid version numbers:
1
|
2.7.2.2
1.3.a4
1.3pl1
1.3c4
The rationale for this version numbering system will be explained
in the distutils documentation.
"""
version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$',
re.VERBOSE | re.ASCII)
def parse (self, vstring):
match = self.version_re.m
|
atch(vstring)
if not match:
raise ValueError("invalid version number '%s'" % vstring)
(major, minor, patch, prerelease, prerelease_num) = \
match.group(1, 2, 4, 5, 6)
if patch:
self.version = tuple(map(int, [major, minor, patch]))
else:
self.version = tuple(map(int, [major, minor])) + (0,)
if prerelease:
self.prerelease = (prerelease[0], int(prerelease_num))
else:
self.prerelease = None
def __str__ (self):
if self.version[2] == 0:
vstring = '.'.join(map(str, self.version[0:2]))
else:
vstring = '.'.join(map(str, self.version))
if self.prerelease:
vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
return vstring
def _cmp (self, other):
if isinstance(other, str):
other = StrictVersion(other)
if self.version != other.version:
# numeric versions don't match
# prerelease stuff doesn't matter
if self.version < other.version:
return -1
else:
return 1
# have to compare prerelease
# case 1: neither has prerelease; they're equal
# case 2: self has prerelease, other doesn't; other is greater
# case 3: self doesn't have prerelease, other does: self is greater
# case 4: both have prerelease: must compare them!
if (not self.prerelease and not other.prerelease):
return 0
elif (self.prerelease and not other.prerelease):
return -1
elif (not self.prerelease and other.prerelease):
return 1
elif (self.prerelease and other.prerelease):
if self.prerelease == other.prerelease:
return 0
elif self.prerelease < other.prerelease:
return -1
else:
return 1
else:
assert False, "never get here"
# end class StrictVersion
# The rules according to Greg Stein:
# 1) a version number has 1 or more numbers separated by a period or by
# sequences of letters. If only periods, then these are compared
# left-to-right to determine an ordering.
# 2) sequences of letters are part of the tuple for comparison and are
# compared lexicographically
# 3) recognize the numeric components may have leading zeroes
#
# The LooseVersion class below implements these rules: a version number
# string is split up into a tuple of integer and string components, and
# comparison is a simple tuple comparison. This means that version
# numbers behave in a predictable and obvious way, but a way that might
# not necessarily be how people *want* version numbers to behave. There
# wouldn't be a problem if people could stick to purely numeric version
# numbers: just split on period and compare the numbers as tuples.
# However, people insist on putting letters into their version numbers;
# the most common purpose seems to be:
# - indicating a "pre-release" version
# ('alpha', 'beta', 'a', 'b', 'pre', 'p')
# - indicating a post-release patch ('p', 'pl', 'patch')
# but of course this can't cover all version number schemes, and there's
# no way to know what a programmer means without asking him.
#
# The problem is what to do with letters (and other non-numeric
# characters) in a version number. The current implementation does the
# o
|
mbayon/TFG-MachineLearning
|
venv/lib/python3.6/site-packages/django/core/management/utils.py
|
Python
|
mit
| 3,739
| 0.001337
|
from __future__ import unicode_literals
import os
import sys
from subprocess import PIPE, Popen
from django.apps import apps as installed_apps
from django.utils import six
from django.utils.crypto import get_random_string
from django.utils.encoding import DEFAULT_LOCALE_ENCODING, force_text
from .base import CommandError
def popen_wrapper(args, os_err_exc_type=CommandError, stdout_encoding='utf-8'):
"""
Friendly wrapper around Popen.
Returns stdout output, stderr output and OS status code.
"""
try:
p = Popen(args, shell=False, stdout=PIPE, stderr=PIPE, close_fds=os.name != 'nt')
except OSError as e:
strerror = force_text(e.strerror, DEFAULT_LOCALE_ENCODING, strings_only=True)
six.reraise(os_err_exc_type, os_err_exc_type('Error executing %s: %s' %
(args[0], strerror)), sys.exc_info()[2])
output, errors = p.communicate()
return (
force_text(output, stdout_encoding, strings_only=True, errors='strict'),
force_text(errors, DEFAULT_LOCALE_ENCODING, strings_only=True, errors='replace'),
p.returncode
)
def handle_extensions(extensions):
"""
Organizes multiple extensions that are separated with commas or passed by
using --extension/-e multiple times.
For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
would result in an extension list: ['.js', '.txt', '.xhtml']
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
{'.html', '.js', '.py'}
>>> handle_extensions(['.html, txt,.tpl'])
{'.html', '.tpl', '.txt'}
"""
ext_list = []
for ext in extensions:
ext_list.extend(ext.replace(' ', '').split(','))
for i, ext in enumerate(ext_list):
if not ext.startswith('.'):
ext_list[i] = '.%s' % ext_list[i]
return set(ext_list)
def find_command(cmd, path=None, pathext=None):
if path is None:
path = os.environ.get('PATH', '').split(os.pathsep)
if isinstance(path, six.string_types):
path = [path]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD').split(os.pathsep)
# don't use extensions if the command ends with one of them
for ext in pathext:
if cmd.endswith(ext):
pathext = ['']
break
# check if we find the command on PATH
for p in path:
f = os.path.join(p, cmd)
if os.path.isfile(f):
return f
for ext in pathext:
fext = f + ext
if os.path.isfile(fext):
return fext
return None
def get_random_secret_key():
"""
Return a 50 character random string usable as a SECRE
|
T_KEY setting value.
"""
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
return get_random_string(50, chars)
def parse_apps_and_model_label
|
s(labels):
"""
Parse a list of "app_label.ModelName" or "app_label" strings into actual
objects and return a two-element tuple:
(set of model classes, set of app_configs).
Raise a CommandError if some specified models or apps don't exist.
"""
apps = set()
models = set()
for label in labels:
if '.' in label:
try:
model = installed_apps.get_model(label)
except LookupError:
raise CommandError('Unknown model: %s' % label)
models.add(model)
else:
try:
app_config = installed_apps.get_app_config(label)
except LookupError as e:
raise CommandError(str(e))
apps.add(app_config)
return models, apps
|
akhileshpillai/treeherder
|
treeherder/etl/pulse_consumer.py
|
Python
|
mpl-2.0
| 4,116
| 0
|
import logging
from django.conf import settings
from kombu import (Exchange,
Queue)
from kombu.mixins import ConsumerMixin
from treeherder.etl.common import fetch_json
from treeherder.etl.tasks.pulse_tasks import (store_pulse_jobs,
store_pulse_resultsets)
logger = logging.getLogger(__name__)
class PulseConsumer(ConsumerMixin):
"""
Consume jobs from Pulse exchanges
"""
def __init__(self, connection, queue_suffix):
self.connection = connection
sel
|
f.consumers = []
self.queue = None
config = settings.PULSE_DATA_INGESTION_CONFIG
if not config:
raise ValueError("PULSE_DATA_INGESTION_CONFIG is required for the "
"JobConsumer c
|
lass.")
self.queue_name = "queue/{}/{}".format(config.username, queue_suffix)
def get_consumers(self, Consumer, channel):
return [
Consumer(**c) for c in self.consumers
]
def bind_to(self, exchange, routing_key):
if not self.queue:
self.queue = Queue(
name=self.queue_name,
channel=self.connection.channel(),
exchange=exchange,
routing_key=routing_key,
durable=settings.PULSE_DATA_INGESTION_QUEUES_DURABLE,
auto_delete=settings.PULSE_DATA_INGESTION_QUEUES_AUTO_DELETE
)
self.consumers.append(dict(queues=self.queue,
callbacks=[self.on_message]))
# just in case the queue does not already exist on Pulse
self.queue.declare()
else:
self.queue.bind_to(exchange=exchange, routing_key=routing_key)
def unbind_from(self, exchange, routing_key):
self.queue.unbind_from(exchange, routing_key)
def close(self):
self.connection.release()
def prune_bindings(self, new_bindings):
# get the existing bindings for the queue
bindings = []
try:
bindings = self.get_bindings(self.queue_name)["bindings"]
except Exception:
logger.error("Unable to fetch existing bindings for {}".format(
self.queue_name))
logger.error("Data ingestion may proceed, "
"but no bindings will be pruned")
# Now prune any bindings from the queue that were not
# established above.
# This indicates that they are no longer in the config, and should
# therefore be removed from the durable queue bindings list.
for binding in bindings:
if binding["source"]:
binding_str = self.get_binding_str(binding["source"],
binding["routing_key"])
if binding_str not in new_bindings:
self.unbind_from(Exchange(binding["source"]),
binding["routing_key"])
logger.info("Unbound from: {}".format(binding_str))
def get_binding_str(self, exchange, routing_key):
"""Use consistent string format for binding comparisons"""
return "{} {}".format(exchange, routing_key)
def get_bindings(self, queue_name):
"""Get list of bindings from the pulse API"""
return fetch_json("{}queue/{}/bindings".format(
settings.PULSE_GUARDIAN_URL, queue_name))
class JobConsumer(PulseConsumer):
def on_message(self, body, message):
store_pulse_jobs.apply_async(
args=[body,
message.delivery_info["exchange"],
message.delivery_info["routing_key"]],
routing_key='store_pulse_jobs'
)
message.ack()
class ResultsetConsumer(PulseConsumer):
def on_message(self, body, message):
store_pulse_resultsets.apply_async(
args=[body,
message.delivery_info["exchange"],
message.delivery_info["routing_key"]],
routing_key='store_pulse_resultsets'
)
message.ack()
|
cbeauvais/zAWygzxkeSjUBGGVsgMGTF56xvR
|
survox_api/resources/library/sample_dnc.py
|
Python
|
mit
| 5,130
| 0.002534
|
import os
import json
from ...resources.base import SurvoxAPIBase
from ...resources.exception import SurvoxAPIRuntime, SurvoxAPINotFound
from ...resources.valid import valid_url_field
class SurvoxAPIDncList(SurvoxAPIBase):
"""
Class to manage DNC lists.
"""
def __init__(self, base_url=None, headers=None, verbose=True):
super(SurvoxAPIDncList, self).__init__(base_url, headers, verbose)
self.url = '/sample/dnc/'
def list(self):
"""
Fetch a list of available DNC lists
:return: list of DNC lists
"""
return self.api_get(endpoint=self.url)
def create(self, name, description, dnc_type, account, filename=None, exists_okay=False):
"""
Create a new DNC list
:param name: new DNC list name
:param dnc_type: DNC list type
:param description: DNC description
:param account: Survox runtime account to put the DNC list into
:param filename: csv file containing dnc information
:param exists_okay: return existing list if True, else raise exception
:return: dnc list information
"""
valid, msg = valid_url_field('Do-Not-Contact', name, 1, 256)
if not valid:
raise SurvoxAPIRuntime(msg)
valid_dnc_types = ['phone', 'prefix', 'email']
if dnc_type not in valid_dnc_types:
raise SurvoxAPIRuntime('Unknown DNC type "{type}". Must be one of {opts}'.format(type=dnc_type,
opts=json.loads(
valid_dnc_types)))
try:
s = self.api_get(endpoint='{base}{name}/'.format(base=self.url, name=name))
if not exists_okay:
raise SurvoxAPIRuntime('Do-Not-Contact already exists: {name)'.format(name=name))
except SurvoxAPINotFound:
s = self.api_post(endpoint=self.url, data={
'name': name,
'dnc_type': dnc_type,
'description': description,
'account': account
})
if s and filename:
if not os.path.isfile(filename):
raise SurvoxAPIRuntime('No such filename for Do-Not-Contact: {name)'.format(name=filename))
x = SurvoxAPIDnc(name, base_url=self.base_url, headers=self.auth_headers, verbose=self.verbose)
upload = x.upload(filename)
s['upload_result'] = upload
return s
def delete(self):
"""
delete all DNC lists
:return: {}
"""
return self.api_delete(endpoint=self.url)
class SurvoxAPIDnc(SurvoxAPIBase):
"""
Class for working with a specific DNC list
"""
def __init__(self, name, base_url=None, headers=None, verbose=True):
super(SurvoxAPIDnc, self).__init__(base_url, headers, verbose)
self.name = name
self.url = '/sample/dnc/{name}/'.format(name=name)
self.upload_url = "{base}upload/".format(base=self.url)
self.download_url = "{base}download/".format(base=self.url)
def get(self):
try:
return self.api_get(endpoint=self.url)
except SurvoxAPINotFound:
return None
def set(self, description=None, realtime=None):
"""
update a DNC entry
:param description: new description for DNC list
:param realtime: if True, set DNC list as realtime, unset as realtime if False
:return: return the DNC list properties
"""
dnc = self.get()
if not dnc:
raise SurvoxAPIRuntime('No DNC availa
|
ble named: {name}'.format(name=self.name))
if not description and not realtime:
raise SurvoxAPIRuntime('No properties passed to set fo
|
r DNC named: {name}'.format(name=self.name))
changes = {}
if description and description != dnc['description']:
changes['description'] = description
if realtime and realtime != dnc['realtime']:
changes['realtime'] = realtime
if changes:
return self.api_put(endpoint=self.url, data=changes)
else:
return dnc
def delete(self):
"""
Delete the specified DNC list
:return:
"""
return self.api_delete(endpoint=self.url)
def upload(self, filename, block_size=None):
"""
Upload records into DNC list
:param filename: file to upload
:param block_size: block size of upload
:return:
"""
return self.api_upload(self.upload_url, filename, block_size=block_size)
def download(self, filename):
"""
Download a dnc file in csv format
:param filename: file to save as
:return:
"""
download_location = self.api_get(self.download_url)
if not download_location:
raise SurvoxAPIRuntime('No DNC available for download: {name}'.format(name=self.name))
return self.api_download(download_location, filename)
|
dgraph-io/pydgraph
|
pydgraph/__init__.py
|
Python
|
apache-2.0
| 849
| 0.001178
|
# Copyright 2018 Dgraph Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file exce
|
pt in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing perm
|
issions and
# limitations under the License.
from pydgraph.proto.api_pb2 import Operation, Payload, Request, Response, Mutation, TxnContext,\
Check, Version, NQuad, Value, Facet, Latency
from pydgraph.client_stub import *
from pydgraph.client import *
from pydgraph.txn import *
from pydgraph.errors import *
|
shirishgoyal/crowdsource-platform
|
crowdsourcing/migrations/0093_merge.py
|
Python
|
mit
| 334
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-06-09 22:16
from __future__ import unicode_literals
from django.db import migr
|
ations
class Migration(migrations.Migration):
dependencies = [
('crowdsourcing', '0092_merge'),
|
('crowdsourcing', '0092_auto_20160608_0236'),
]
operations = [
]
|
rodricios/crawl-to-the-future
|
crawlers/Way-Back/waybacktrack.py
|
Python
|
gpl-2.0
| 7,577
| 0.006071
|
"""waybacktrack.py
Use this to extract Way Back Machine's
url-archives of any given domain!
TODO: reiterate entire design!
"""
import time
import os
import urllib2
import random
from math import ceil
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
from lxml import html
from lxml.html import clean
ARCHIVE_DOMAIN = "http://web.archive.org"
CURR_DIR = os.path.dirname(__file__)
DATASET_DIR = os.path.join(CURR_DIR, '../../dataset/')
def archive_domain(domain, year, dir_path=DATASET_DIR,
percent=0, debug=False, throttle=1):
"""
domain
@type domain: string
@param domain: the domain of the website ie. www.nytimes.com
@type year: int
@param year: the year to extract archives from
@type dir_path: string
@param dir_path: the directory path to store archive, if
empty, directory will automatically be created
TODO: Think of better solution to storing
downloaded archives
@type percent: int
@param percent: the percentage of Way Back archives to crawl
@rtype:
@return: Returns a list of archived sites
"""
# TODO: Improve this for module portability
# WARNING: Module will likely break if used outside of
# crawl-to-the-future project
# automatically find or eventually create directory
# based off domain name
# Found way to check if file is being ran in crawl-to-the-future
# super "hacky" though
# TODO: Find better way to check if module is getting ran in
# in crawl-to-the-future project
if os.path.split(
os.path.abspath(os.path.join(__file__, os.pardir)))[1] != "Way-Back":
raise Exception("Please manually
|
specify 'dir_name' value")
if dir_path is DATASET_DIR:
dir_path = os.path.join(dir_path, domain + '/')
if not os.path.exists(dir_path):
#raise IOError("[Errno 2] No such file or directory: '" + dir_path + "'")
# this part is shady
os.makedirs(dir_path)
if not isinstance(dir_path, basestring):
raise Exception(
|
"Directory - third arg. - path must be a string.")
ia_year_url = ARCHIVE_DOMAIN + "/web/" + str(year) + \
"*/http://" + domain + "/"
ia_parsed = html.parse(ia_year_url)
domain_snapshots = list(set(ia_parsed.xpath('//*[starts-with(@id,"' +
str(year) + '-")]//a/@href')))
#snapshot_age_span is a percentage of total snapshots to process from
#the given year
#ie. if percent is 100, and there are a total of 50 snapshots for
#www.cnn.com, we will crawl (to a depth of 1 atm) all 50 snapshots
snapshot_age_span = 1 if percent <= 0 \
else len(domain_snapshots) - 1 \
if percent >= 100 \
else int(percent*len(domain_snapshots)/100)
if debug:
print "Extracting links from: ", domain
# http://margerytech.blogspot.com/2011/06/python-get-last-directory-name-in-path.html
print "Current directory: ", os.path.split(
os.path.abspath(os.path.join(__file__, os.pardir)))[1]
print "Storing files in: ", os.path.abspath(dir_path)
print "Number of domain snapshots: ", len(domain_snapshots)
print "Number of domain snapshots to process: ", snapshot_age_span + 1
random.shuffle(domain_snapshots)
forward_links = []
#for snapshot in domain_snapshots[:snapshot_age_span]:
for snapshot in domain_snapshots[:3]:
curr_snapshot_flinks = get_forwardlink_snapshots(snapshot)
forward_links.extend(curr_snapshot_flinks)
if debug:
print "snapshot url: ", snapshot
print "forward link count: ", len(curr_snapshot_flinks)
random.shuffle(forward_links)
if debug:
print "total number of foward links to download: ", len(forward_links)
random.shuffle(forward_links)
# archive forward links
archived_links = []
duds = []
for forwardlink in forward_links:
if archive(forwardlink, year, dir_path, debug, throttle):
archived_links.append(forwardlink)
else:
duds.append(forwardlink)
if debug:
print "Number of archived forward links: ", len(archived_links)
print "Number of duds: ", len(duds)
return archived_links, duds
# I know I'm breaking so many rules by not seperating concerns
def archive(page, year, dir_path, debug=False, throttle=1):
"""
Check to see if downloaded forward link
satisfies the archival year specification
ie. (2000, 2005, 2010)
"""
#files = [f for f in os.listdir(dir_path) if os.path.isfile(f)]
if debug:
print "requesting ", page
page_file = page.rsplit('/web/')[1].replace('http://', '').replace('-','_')
page_file = page_file.replace('/', '_').replace(':', '_').replace('&','_')
page_file = page_file.replace('?', '_').replace('*','_').replace('=','_')
file_path = dir_path + page_file
if os.path.isfile(file_path):
if debug:
print "Already saved: ", page_file
print
return False
try:
html_file = urllib2.urlopen(ARCHIVE_DOMAIN + page)
except IOError:
if debug:
print "Failed to open request for ", ARCHIVE_DOMAIN + page
print
return False
if html_file.getcode() == 302:
if debug:
print "Got HTTP 302 response for ", ARCHIVE_DOMAIN + page
print
return False
html_string = str(html_file.read())
if html_string.find("HTTP 302 response") != -1:
if debug:
print "Got HTTP 302 response for ", ARCHIVE_DOMAIN + page
print
return False
archival_year_spec = ARCHIVE_DOMAIN + '/web/' + str(year)
page_url = html_file.geturl()
if page_url.startswith(archival_year_spec):
if debug:
print "saving ", page_url
print
try:
with open(file_path, 'wb') as f:
f.write(BytesIO(html_string).read())
time.sleep(throttle)
except IOError as e:
if debug:
print "Got error: ", e
return False
return True
else:
return False
def get_forwardlink_snapshots(parent_site):
"""
@type index: string
@param index: the index.html page from which to extract forward links
@type year: int
@param year: the year to extract archives from
"""
try:
parsed_parent_site = html.parse(ARCHIVE_DOMAIN+parent_site)
except IOError:
print "Did not get extract links in ", ARCHIVE_DOMAIN+parent_site
return []
#cleaner = html.clean.Cleaner(scripts=True, javascript=True,style=True, kill_tags = ["img"])
cleaner = clean.Cleaner(scripts=True, javascript=True, comments=True,
style=True, meta=True, processing_instructions=True, embedded=True,
frames=True, forms=True, kill_tags=["noscript", "iframe", "img"])
parsed_parent_site = cleaner.clean_html(parsed_parent_site)
# spec archival year
# check to see if the archival year of a forwark link
# is that of the parent (ie. 2000|2005|2010)
all_forwardlinks = parsed_parent_site.xpath('//a[starts-with(@href,"' +
parent_site[:9] +'")]/@href')
return all_forwardlinks
|
OpenBeta/beta
|
tests/test_api_routes.py
|
Python
|
gpl-3.0
| 1,343
| 0.001489
|
import json
from apiserver.model import Route
import utils
def test_post(app, apiusers, db, default_headers, post_geojson):
with app.test_client() as client:
res = client.get('/routes?api_key=' + apiusers['valid'].api_key, default_headers['get'])
assert res.status_code == 200
ret = post_geojson('ying_yang_routes.geojson')
rows = db.session.query(Route).all()
assert len(rows) > 0
expected_data = ret['expected']
assert len(rows) == len(expected_data)
def test_query_by_radius(app, db, good_key, default_headers, post_geojson):
ret2 = post_geojson('caustic_cock.geojson')
assert ret2['response'].status_code == 200
with app.test_client() as client:
latlng = utils.geojson_to_lat_lng(ret2['expected'])
radius = 50 # caustic cock should be about 1km from yin yang
query_str = '&
|
latlng={}&r={}'.format(latlng, radius)
res = client.get('/routes?api_key=' + good_key + query_str, default_headers['get'])
assert res.status_code == 200
actual_json = json.loads(res.data)
assert len(actual_json['features']) > 0, "
|
Expect 1 route"
actual_route = Route(actual_json['features'][0])
expected = Route(ret2['expected']['features'][0])
assert actual_route == expected, "Expect route to be equal"
|
Endika/manufacture
|
mrp_sale_info/__init__.py
|
Python
|
agpl-3.0
| 165
| 0
|
# -*- coding: utf-8 -
|
*-
# © 2016 Antiun Ingenieria S.L. - Javie
|
r Iniesta
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import models
|
opendatateam/udata
|
udata/tests/api/test_swagger.py
|
Python
|
agpl-3.0
| 763
| 0
|
import json
from flask import url_for
from flask_restplus import schemas
from udata.tests.helpers import assert200
class SwaggerBlueprintTest:
modules = []
def test_swagger_resource_type(self, api):
respons
|
e = api.get(url_for('api.specs'))
assert200(response)
swagger = json.loads(response.data)
expected = swagger['paths']['/datasets/{dataset}/resources/']
expected = expected['put']['responses']['200']['schema']['type']
assert expected == 'array'
de
|
f test_swagger_specs_validate(self, api):
response = api.get(url_for('api.specs'))
try:
schemas.validate(response.json)
except schemas.SchemaValidationError as e:
print(e.errors)
raise
|
tomhenderson/ns-3-dev-git
|
src/topology-read/bindings/modulegen__gcc_LP64.py
|
Python
|
gpl-2.0
| 251,206
| 0.014602
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.topology_read', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterat
|
or*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_clas
|
s=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
module.add_class('Mac8Address', import_from_module='ns.network')
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address'])
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::ItemType [enumeration]
module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagDat
|
tjcsl/cslbot
|
cslbot/commands/botspam.py
|
Python
|
gpl-2.0
| 1,606
| 0.001868
|
# Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPO
|
SE. See the
# GNU General Public License for more detai
|
ls.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from random import choice
from ..helpers.command import Command
from ..helpers.misc import get_fortune
from ..helpers.textutils import gen_lenny
from ..helpers.web import get_urban
def gen_fortune(send):
for line in get_fortune('-o').splitlines():
send(line)
def gen_urban(send, session, key):
defn, url = get_urban("", session, key)
send(defn)
if url:
send("See full definition at %s" % url)
@Command('botspam', ['config', 'db'])
def cmd(send, _, args):
"""Abuses the bot.
Syntax: {command}
"""
def lenny_send(msg):
send(gen_lenny(msg))
key = args['config']['api']['bitlykey']
cmds = [lambda: gen_fortune(lenny_send), lambda: gen_urban(lenny_send, args['db'], key)]
choice(cmds)()
|
MartinHjelmare/home-assistant
|
homeassistant/components/nanoleaf/light.py
|
Python
|
apache-2.0
| 6,951
| 0
|
"""Support for Nanoleaf Lights."""
import logging
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_HS_COLOR,
ATTR_TRANSITION, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS,
SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT,
SUPPORT_TRANSITION, Light)
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_TOKEN
import homeassistant.helpers.config_validation as cv
from homeassistant.util import color as color_util
from homeassistant.util.color import \
color_temperature_mired_to_kelvin as mired_to_kelvin
from homeassistant.util.json import load_json, save_json
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'Nanoleaf'
DATA_NANOLEAF = 'nanoleaf'
CONFIG_FILE = '.nanoleaf.conf'
ICON = 'mdi:triangle-outline'
SUPPORT_NANOLEAF = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT |
SUPPORT_COLOR | SUPPORT_TRANSITION)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_TOKEN): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Nanoleaf light."""
from pynanoleaf import Nanoleaf, Unavailable
if DATA_NANOLEAF not in hass.data:
hass.data[DATA_NANOLEAF] = dict()
token = ''
if discovery_info is not None:
host = discovery_info['host']
name = discovery_info['hostname']
# if device already exists via config, skip discovery setup
if host in hass.data[DATA_NANOLEAF]:
return
_LOGGER.info("Discovered a new Nanoleaf: %s", discovery_info)
conf = load_json(hass.config.path(CONFIG_FILE))
if conf.get(host, {}).get('token'):
token = conf[host]['token']
else:
host = config[CONF_HOST]
name = config[CONF_NAME]
token = config[CONF_TOKEN]
nanoleaf_light = Nanoleaf(host)
if not token:
token = nanoleaf_light.request_token()
if not token:
_LOGGER.error("Could not generate the auth token, did you press "
"and hold the power button on %s"
"for 5-7 seconds?", name)
return
conf = load_json(hass.config.path(CONFIG_FILE))
conf[host] = {'token': token}
save_json(hass.config.path(CONFIG_FILE), conf)
nanoleaf_light.token = token
try:
nanoleaf_light.available
except Unavailable:
_LOGGER.error(
"Could not connect to Nanoleaf Light: %s on %s", name, host)
return
hass.data[DATA_NANOLEAF][host] = nanoleaf_light
add_entities([NanoleafLight(nanoleaf_light, name)], True)
class NanoleafLight(Light):
"""Representation of a Nanoleaf Light."""
def __init__(self, light, name):
"""Initialize an Nanoleaf light."""
self._available = True
self._brightness = None
self._color_temp = None
self._effect = None
self._effects_list = None
self._light = light
self._name = name
self._hs_color = None
self._state = None
@property
def available(self):
"""Return availability."""
return self._available
@property
def brightness(self):
"""Return the brightness of the light."""
if self._brightness is not None:
return int(self._brightness * 2.55)
return None
@property
def color_temp(self):
"""Return the current color temperature."""
if self._color_temp is not None:
return color_util.color_temperature_kelvin_to_mired(
self._color_temp)
return None
@property
def effect(self):
"""Return the current effect."""
return self._effect
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._effects_list
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return 154
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return 833
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def icon(self):
"""Return
|
the icon to use in the frontend, if any."""
return ICON
@property
def is_on(self):
"""Return true if light is on."""
return self._state
@property
def hs_color(self):
"""Return the color in HS."""
return self._hs_color
@property
def suppor
|
ted_features(self):
"""Flag supported features."""
return SUPPORT_NANOLEAF
def turn_on(self, **kwargs):
"""Instruct the light to turn on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
hs_color = kwargs.get(ATTR_HS_COLOR)
color_temp_mired = kwargs.get(ATTR_COLOR_TEMP)
effect = kwargs.get(ATTR_EFFECT)
transition = kwargs.get(ATTR_TRANSITION)
if hs_color:
hue, saturation = hs_color
self._light.hue = int(hue)
self._light.saturation = int(saturation)
if color_temp_mired:
self._light.color_temperature = mired_to_kelvin(color_temp_mired)
if transition:
if brightness: # tune to the required brightness in n seconds
self._light.brightness_transition(
int(brightness / 2.55), int(transition))
else: # If brightness is not specified, assume full brightness
self._light.brightness_transition(100, int(transition))
else: # If no transition is occurring, turn on the light
self._light.on = True
if brightness:
self._light.brightness = int(brightness / 2.55)
if effect:
self._light.effect = effect
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
transition = kwargs.get(ATTR_TRANSITION)
if transition:
self._light.brightness_transition(0, int(transition))
else:
self._light.on = False
def update(self):
"""Fetch new state data for this light."""
from pynanoleaf import Unavailable
try:
self._available = self._light.available
self._brightness = self._light.brightness
self._color_temp = self._light.color_temperature
self._effect = self._light.effect
self._effects_list = self._light.effects
self._hs_color = self._light.hue, self._light.saturation
self._state = self._light.on
except Unavailable as err:
_LOGGER.error("Could not update status for %s (%s)",
self.name, err)
self._available = False
|
riverloopsec/beekeeperwids
|
beekeeperwids/drone/daemon.py
|
Python
|
gpl-2.0
| 8,678
| 0.005646
|
#!/usr/bin/python
import sys
import plugins
import flask
import argparse
import os
import urllib2, urllib
import threading
import time
import socket
import subprocess
import random
import json
import signal
import traceback
from uuid import uuid4 as generateUUID
from killerbee import kbutils
from beekeeperwids.utils.errors import ErrorCodes as ec
from beekeeperwids.utils import KBLogUtil, KBInterface
from beekeeperwids.drone.plugins.capture import CapturePlugin
class DroneDaemon:
def __init__(self, name, port):
signal.signal(signal.SIGINT, self.SIGINT)
self.port = port
self.name = name
self.logutil = KBLogUtil(self.name, 'Daemon', os.getpid())
self.interfaces = {}
self.plugins = {}
self.pid = os.getpid()
def SIGINT(self, s, f):
#TODO find a cleaner way to do only handle signals from the parent process ?
if self.pid == os.getpid():
self.logutil.log("SIGINT")
signal.signal(signal.SIGINT, signal.SIG_IGN)
self.shutdown = True
self.shutdownDaemon()
def handleException(self):
etb = traceback.format_exc()
print(etb)
self.logutil.trace(etb)
return json.dumps({'success':False, 'data':str(etb)})
def runChecks(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', self.port))
s.close()
except socket.error:
print("Error Starting Drone:")
print("Socket TCP {0} already bound".format(self.port))
sys.exit()
def startDaemon(self):
self.runChecks()
self.logutil.writePID()
self.logutil.startlog()
self.logutil.log("Starting DroneDaemon")
self.enumerateInterfaces()
self.startRestServer()
def shutdownDaemon(self):
self.logutil.log('Initiating shutdown')
self.stopRunningPlugins()
self.logutil.log('Completed shutdown')
self.logutil.cleanup()
# TODO: verify that all subprocess have been terminated
sys.exit()
def startRestServer(self):
self.logutil.log('Starting REST Server on port {0}'.format(self.port))
app = flask.Flask(__name__)
app.add_url_rule('/task', None, self.processTaskRequest, methods=['POST'])
app.add_url_rule('/detask', None, self.processDetaskRequest, methods=['POST'])
app.add_url_rule('/status', None, self.processStatusGetRequest, methods=['POST'])
app.run(port=self.port, threaded=True)
def handleUnknownException(self):
etb = traceback.format_exc()
self.logutil.trace(etb)
return self.formatResult(error=ec.ERROR_UnknownException, data=str(etb))
def formatResponse(self, error, data):
return json.dumps({'error':error, 'data':data})
def processTaskRequest(self):
self.logutil.log('Processing Task Request')
try:
data = json.loads(flask.request.data)
uuid = data.get('uuid')
plugin = data.get('plugin')
channel = data.get('channel')
parameters = data.get('parameters')
self.logutil.log('Processing Task Request: {0} ({1})'.format(uuid, plugin))
(error,data) = self.taskPlugin(plugin, channel, uuid, parameters)
return self.formatResponse(error,data)
except Exception:
return self.handleUnknownException()
def processDetaskRequest(self):
self.logutil.log('Processing Detask Request')
try:
data = json.loads(flask.request.data)
uuid = data.get('uuid')
(error,data) = self.detaskPlugin(uuid)
return self.formatResponse(error,None)
except Exception:
return self.handleUnknownException()
def processStatusGetRequest(self):
self.logutil.log('Processing Status Get Request')
try:
status = {}
status['config'] = {}
status['config']['pid'] = self.pid
status['config']['name'] = self.name
status['interfaces'] = list((interface.info() for interface in self.interfaces.values()))
status['plugins'] = list((plugin.info() for plugin in self.plugins.values()))
return self.formatResponse(None, status)
except Exception:
self.handleUnknownException()
def loadPluginClass(self, plugin):
if plugin == 'CapturePlugin':
return CapturePlugin
def taskPlugin(self, plugin, channel, uuid, parameters):
self.logutil.debug('Tasking Plugin ({0},{1})'.format(plugin,channel))
pluginObject = self.plugins.get((plugin,channel), None)
if pluginObject == None:
self.logutil.log('No Instance of ({0},{1}) Found - Starting New one'.format(plugin, channel))
(error,d
|
ata) = self.startPlugin(plugin,channel)
if error ==
|
None:
pluginObject = data
else:
return (error,data)
try:
self.logutil.log('Tasking Plugin: ({0}, ch.{1}) with Task {2}'.format(plugin, channel, uuid))
success = pluginObject.task(uuid, parameters)
if success == False:
error = ec.ERROR_DRONE_UnknownTaskingFailure
else:
error = None
return (error,None)
except Exception:
self.handleException()
def startPlugin(self, plugin, channel):
self.logutil.debug('Starting Plugin ({0},{1})'.format(plugin,channel))
try:
interface = self.getAvailableInterface()
if interface == None:
self.logutil.log('Failed to Start Plugin - No Avilable Interfaces')
error = ec.ERROR_DRONE_UnavailableInterface
return (error, None)
pluginClass = self.loadPluginClass(plugin)
if pluginClass == None:
self.logutil.log('Failed to Start Plugin - Plugin Module: {0} does not exist'.format(plugin))
error = ec.ERROR_DroneFailedToLoadPlugin
return (error,plugin)
self.logutil.log('Acquired Interface: {0}'.format(interface.device))
self.logutil.log('Loaded Plugin Class: {0}'.format(pluginClass))
pluginObject = pluginClass([interface], channel, self.name)
self.plugins[(plugin,channel)] = pluginObject
self.logutil.log('Successfully Started Plugin')
time.sleep(0.5)
error = None
data = pluginObject
return (error,data)
except Exception:
self.handleException()
def detaskPlugin(self, uuid):
self.logutil.log('Processing Detask Request for {0}'.format(uuid))
try:
for pluginKey,pluginObject in self.plugins.items():
for task_uuid in pluginObject.tasks.keys():
if task_uuid == uuid:
detask_success = pluginObject.detask(uuid)
if detask_success == False:
error = ec.ERROR_DroneUnknownDetaskingFailure
return (error,None)
time.sleep(2)
if pluginObject.active == False:
del(self.plugins[pluginKey])
self.logutil.log('Succesfully detasked {0} from {1}'.format(uuid, str(pluginObject.desc)))
return (None,None)
except Exception:
return self.handleException()
def stopRunningPlugins(self):
self.logutil.log('Stopping Running Plugins')
for plugin in self.plugins.values():
if plugin.active == True:
self.logutil.log("Stopping Plugin: {0}".format(plugin.desc))
plugin.shutdown()
if plugin.active:
print("had a problem shutting down plugin")
self.logutil.log('Running plugins have been terminated')
def getAvailableInterface(self):
for interface in self.interfaces.values():
if not interface.a
|
BirkbeckCTP/janeway
|
src/proofing/models.py
|
Python
|
agpl-3.0
| 9,317
| 0.001717
|
__copyright__ = "Copyright 2017 Birkbeck, University of London"
__author__ = "Martin Paul Eve & Andy Byers"
__license__ = "AGPL v3"
__maintainer__ = "Birkbeck Centre for Technology and Publishing"
from django.db import models
from django.utils import timezone
from events import logic as event_logic
from utils import setting_handler
class ProofingAssignment(models.Model):
article = models.OneToOneField('submission.Article')
proofing_manager = models.ForeignKey('core.Account', null=True, on_delete=models.SET_NULL)
editor = models.ForeignKey('core.Account', null=True, related_name='proofing_editor')
assigned = models.DateTimeField(default=timezone.now)
notified = models.BooleanField(default=False)
completed = models.DateTimeField(blank=True, null=True)
class Meta:
unique_together = ('article', 'proofing_manager')
@property
def current_proofing_round_number(self):
try:
return self.proofinground_set.all().order_by('-number')[0].number
except IndexError:
return 0
def current_proofing_round(self):
try:
return self.proofinground_set.all().order_by('-number')[0]
except IndexError:
return None
def add_new_proofing_round(self):
new_round_number = self.current_proofing_round_number + 1
return ProofingRound.objects.create(assignment=self,
number=new_round_number)
def user_is_manager(self, user):
if user == self.proofing_manager:
return True
return False
def __str__(self):
return 'Proofing Assignment {pk}'.format(pk=self.pk)
class ProofingRound(models.Model):
assignment = models.ForeignKey(ProofingAssignment)
number = models.PositiveIntegerField(default=1)
date_started = models.DateTimeField(default=timezone.now)
class Meta:
ordering = ('-number',)
def __str__(self):
return "Round #{0} for Article {1}".format(self.number, self.assignment.article.title)
@property
def has_active_tasks(self):
if self.proofingtask_set.filter(completed__isnull=True):
return True
else:
return False
@property
def active_proofreaders(self):
return [task.proofreader for task in self.proofingtask_set.all()]
@property
def typeset_tasks(self):
typeset_tasks = list()
for p_task in self.proofingtask_set.all():
for t_task in p_task.typesetterproofingtask_set.all():
typeset_tasks.append(t_task)
return typeset_tasks
def delete_round_relations(self, request, article, tasks, corrections):
for task in tasks:
if not task.completed:
kwargs = {
'article': article,
'proofing_task': task,
'request': request,
}
event_logic.Events.raise_event(
event_logic.Events.ON_CANCEL_PROOFING_TASK,
task_object=article,
**kwargs,
)
task.delete()
for correction in corrections:
if not correction.completed and not correction.cancelled:
kwargs = {
'article': article,
'correction': correction,
'request': request,
}
event_logic.Events.raise_event(
event_logic.Events.ON_CORRECTIONS_CANCELLED,
task_object=article,
**kwargs,
)
correction.delete()
def can_add_another_proofreader(self, jo
|
urnal):
"""
Checks if this round can have another proofreader.
:param journal: Journal object
:return: Boolean, True or False
"""
limit = setting_handler.get_setting(
'general',
'max_proofreaders',
journal,
).processed_value
if not limit == 0:
current_num_proofers = ProofingTask.objects.filter(
round=self,
).c
|
ount()
if current_num_proofers >= limit:
return False
return True
class ProofingTask(models.Model):
round = models.ForeignKey(ProofingRound)
proofreader = models.ForeignKey('core.Account', null=True, on_delete=models.SET_NULL)
assigned = models.DateTimeField(default=timezone.now)
notified = models.BooleanField(default=False)
due = models.DateTimeField(default=None, verbose_name="Date Due")
accepted = models.DateTimeField(blank=True, null=True)
completed = models.DateTimeField(blank=True, null=True)
cancelled = models.BooleanField(default=False)
acknowledged = models.DateTimeField(blank=True, null=True)
task = models.TextField(verbose_name="Proofing Task")
galleys_for_proofing = models.ManyToManyField('core.Galley')
proofed_files = models.ManyToManyField('core.File')
notes = models.ManyToManyField('proofing.Note')
def __str__(self):
return "{0} proofing {1} in round {2}".format(self.proofreader.full_name(),
self.round.assignment.article.title,
self.round.number)
@property
def assignment(self):
return self.round.assignment
def typesetter_tasks(self):
return self.typesetterproofingtask_set.all()
def status(self):
if self.cancelled:
return {'slug': 'cancelled', 'friendly': 'Task cancelled'}
elif self.assigned and not self.accepted and not self.completed:
return {'slug': 'assigned', 'friendly': 'Awaiting response'}
elif self.assigned and self.accepted and not self.completed:
return {'slug': 'accepted', 'friendly': 'Task accepted, underway'}
elif self.assigned and not self.accepted and self.completed:
return {'slug': 'declined', 'friendly': 'Task declined'}
elif self.completed:
return {'slug': 'completed', 'friendly': 'Task completed'}
def galley_files(self):
return [galley.file for galley in self.galleys_for_proofing.all()]
def actor(self):
return self.proofreader
def review_comments(self):
comment_text = ''
for note in self.notes.all().order_by('galley'):
comment_text = comment_text + "Comment by: {0} for Galley {1}<br>{2}<br>".format(note.creator.full_name(),
note.galley,
note.text)
return comment_text
def reset(self):
self.completed = None
self.cancelled = False
self.accepted = None
self.save()
class TypesetterProofingTask(models.Model):
proofing_task = models.ForeignKey(ProofingTask)
typesetter = models.ForeignKey('core.Account', null=True, on_delete=models.SET_NULL)
assigned = models.DateTimeField(default=timezone.now)
notified = models.BooleanField(default=False)
due = models.DateTimeField(blank=True, null=True)
accepted = models.DateTimeField(blank=True, null=True)
completed = models.DateTimeField(blank=True, null=True)
cancelled = models.BooleanField(default=False)
acknowledged = models.DateTimeField(blank=True, null=True)
task = models.TextField(verbose_name="Typesetter Task")
galleys = models.ManyToManyField('core.Galley')
files = models.ManyToManyField('core.File')
notes = models.TextField(verbose_name="Correction Note", blank=True, null=True)
class Meta:
verbose_name = 'Correction Task'
def __str__(self):
return "Correction Task Proof ID: {0}, Proofreader {1}, Due: {2}".format(self.proofing_task.pk,
self.typesetter.full_name(),
self.due)
def status(s
|
IBMStreams/streamsx.health
|
samples/HealthcareJupyterDemo/package/healthdemo/utils.py
|
Python
|
apache-2.0
| 2,562
| 0.015613
|
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016, 2017
class DataAlreadyExistsError(RuntimeError):
def __init__(self, label):
self.message = str("Data with label '%s' already exists and cannot be added" % (label))
def get_patient_id(d):
return d['patient']['identifier']
def get_index_by_label(d, label):
for idx in range(len(d['data'])):
if d['data'][idx]['label'] == label:
return idx
return None
def get_sampled_data_values(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['values']
def get_coordinate_data_values(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueCoordinateData']['values']
def get_period_value(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['period']['value']
def get_sampled_data_unit(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['unit']
def get_period_unit(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['period']['unit']
def get_gain(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['gain']
def get_initValue(d, label):
idx = get_index_by_label(d, label)
return d['data'][idx]['valueSampledData']['initVal']
def get_patient_ID(d):
return d['patient']['identifier']
def add_sampled_data(d, label, sampled_data, period_value, period_unit, update_if_exists=False):
# check if label already exists
data_idx = get_index_by_label(d, label)
if data_idx is not None:
if update_if_exists == True:
v = {'valuesSampledData' : { 'values' : sampled_data, 'period' : { 'value' : period_value, 'unit' : period_unit }}}
d['data'][data_idx] = v
else:
raise DataAlreadyExistsError(label=label)
else:
v = {'label' : label, 'valuesSampledData' : { 'values' : sampled_data, 'period' : { 'value' : period_value, 'unit' : period_unit }}}
d['data'].append(v)
def add_coordinate_data(d, label, coords, replace_if_exists=False):
data_idx = get_index_by_label(d, label)
if data_idx is not None:
if replace_if_exists == True:
v = {'valueCoordinateData' : {'values' : coords}}
d['data'][data_idx] = v
else:
raise DataAlreadyExistsErro
|
r(label=label)
else:
v =
|
{'label' : label, 'valueCoordinateData' : {'values' : coords}}
d['data'].append(v)
|
liber118/pyFRED
|
src/fred_rules.py
|
Python
|
apache-2.0
| 11,958
| 0.005101
|
#!/usr/bin/env python
# encoding: utf-8
## Python impl of JFRED, developed by Robby Garner and Paco Nathan
## See: http://www.robitron.com/JFRED.php
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import fred_fuzzy
import random
import re
import sys
######################################################################
## rule classes
######################################################################
class ParseError (Exception):
def __init__ (self, value):
self.value = value
def __str__ (self):
return repr(self.value)
class Rules (object):
def __init__ (self, lang, rule_dict, first_action, fuzzy_dict):
self.lang = lang
self.rule_dict = rule_dict
self.first_action = first_action
# 1. create an inverted index for the fuzzy sets
self.fuzzy_sets = {}
for (name, r) in fuzzy_dict.items():
self.fuzzy_sets[name] = map(lambda x: (self.rule_dict[r.members[x]], r.weights[x]), range(0, len(r.members)))
# 2. randomly shuffle the order of responses within all the
# action rules, and establish priority rankings (later)
self.action_rules = [r for r in self.rule_dict.values() if isinstance(r, ActionRule)]
# 3. randomly shuffle the intro rule(s)
self.intro_rules = [r for r in self.rule_dict.values() if isinstance(r, IntroRule)]
# 4. create an inverted index for the regex phrases
self.regex_phrases = {}
for r in self.rule_dict.values():
if isinstance(r, RegexRule):
try:
invoked = set(map(lambda x: self.rule_dict[x], r.invokes.split(" ")))
for phrase in r.vector:
phrase_tuple = tuple(self.lang.parse(phrase))
self.regex_phrases[phrase_tuple] = invoked
except KeyError, e:
print "ERROR: references unknown action rule", e
sys.exit(1)
def choose_first (self):
return self.first_action.fire()
@staticmethod
def find_sublist (sub, bigger):
# kudos to nosklo
# http://stackoverflow.com/questions/2250633/python-find-a-list-within-members-of-another-listin-order
if not bigger:
return -1
if not sub:
return 0
first, remainder = sub[0], sub[1:]
pos = 0
try:
while True:
pos = bigger.index(first, pos) + 1
if not remainder or bigger[pos:pos+len(remainder)] == remainder:
return pos
except ValueError:
return -1
def choose_rule (self, utterance):
stimulus = self.lang.parse(utterance)
fuzzy_union = fred_fuzzy.FuzzyUnion()
# 1. select an optional introduction (p <= 0.03)
response = ""
if random.random < 0.03:
response = choice(self.intro_rules).fire()
# 2. "Fred.chooseReply()"
# based on key words from the input stream
# 2.1 regex matches => invoked action rules r=200
for (phrase, rules) in self.regex_phrases.items():
if Rules.find_sublist(phrase, stimulus) >= 0:
for rule in rules:
fuzzy_union.add_rule(rule, 2.0)
# 2.2 fuzzy rules => invoked action rules
for (fuzzy_term, members) in self.fuzzy_sets.items():
if fuzzy_term in stimulus:
for rule, weight in members:
fuzzy_union.add_rule(rule, weight)
# 2.3 action rules r=100
if fuzzy_union.is_empty():
for rule in self.action_rules:
if rule.repeat or rule.count < 1:
fuzzy_union.add_rule(rule, 1.0)
# select an action rule to use for a response template
selected_rule, weight = fuzzy_union.select_rule()
response_template = selected_rule.fire()
# 3. test for "bind" points in the selected response template
if selected_rule.bind and response_template.find("[]") > 0:
pos = stimulus.index(selected_rule.bind) + 1
fragment = stimulus[pos:]
# 3.1 invert the verb tense, possessives, contractions, negations...
# NB: some kind of context-free grammar might work better here
replacement = " ".join(self.lang.invert(fragment))
response_template = response_template.replace("[]", replacement)
response += response_template
# 4. decide whether the current query differs from the
# previous one...
# 5. "Fred.logChat()" keep track of what's been said
return response, selected_rule, weight
class Rule (object):
rule_pat = re.compile("(\S+)\:\s+(\S+)")
def __init__ (self):
self.name = None
self.vector = None
self.count = 0
def parse (self, name, vector, attrib):
self.name = name.lower()
self.vector = vector
return self
def fire (self):
self.count += 1
return random.choice(self.vector)
@staticmethod
def parse_lines (rule_lines):
"""
parse the raw text lines for one JFRED rule
"""
first_line = rule_lines.pop(0)
m = Rule.rule_pat.match(first_line)
if not m:
raise ParseError("unrecognized rule format: " + first_line)
(kind, name) = m.group(1).lower().strip(), m.group(2).lower().strip()
if not kind in ["intro", "action", "response", "regex", "fuzzy"]:
raise ParseError("bad rule type: " + kind)
vector = []
attrib = {}
for line in rule_lines:
m = Rule.rule_pat.match(line)
if m:
(elem, value) = m.group(1).lower().strip(), m.group(2).strip()
if not elem in ["priority", "requires", "equals", "bind", "invokes", "url", "next", "repeat", "expect"]:
raise ParseError("bad rule elem: " + elem)
else:
attrib[elem] = value
else:
vector.append(line)
rule = None
if kind == "intro":
rule = IntroRule().parse(name, vector, attrib)
elif kind == "action":
rule = ActionRule().parse(name, vector, attrib)
elif kind == "response":
rule = ResponseRule().parse(name, vector, attrib)
elif kind == "regex":
rule = RegexRule().parse(name, vector, attrib)
elif kind == "fuzzy":
rule = FuzzyRule().parse(name, vector, attrib)
return rule
@staticmethod
def parse_file (lang, filename):
"""
read a JFRED rule file, return a Rules object
"""
rule_dict = {}
first_action = None
fuzzy_dict = {}
with open(filename, "r") as f:
rule_lines = []
for line in f:
line = line.strip()
if line.startswith("#"):
pass
elif len(line) == 0:
if len(rule_lines) > 0:
try:
rule = Rule.parse_lines(rule_lines)
except ParseError:
|
print "ERROR: cannot parse rule description", rule_lines
sys.exit(1)
else:
|
if isinstance(rule, FuzzyRule):
fuzzy_dict[rule.name] = rule
else:
rule_dict[rule.name] = rule
|
zyoohv/zyoohv.github.io
|
code_repository/tencent_ad_contest/tencent_contest/model/main.py
|
Python
|
mit
| 1,329
| 0.000752
|
#! /usr/bin/python3
def main():
try:
while True:
|
line1 = input().strip().split(' ')
n = int(line1[0])
name_list = []
num_list = [0]
for i in range(1, len(line1)):
if i % 2 == 1:
name_list.append(line1[i])
else:
num_list.append(int(line1[i]))
a
|
ns = [0 for _ in range(len(num_list))]
m = int(input())
for i in range(len(num_list) - 1, 0, -1):
ans[i] = m % num_list[i]
m = int(m / num_list[i])
ans[0] = m
add = 0
if ans[1] * 2 >= num_list[1]:
add = 1
print("{} {}".format(ans[0] + add, name_list[0]))
add = 0
if n > 2 and ans[2] * 2 >= num_list[2]:
add = 1
if ans[1] + add >= num_list[1]:
print("{} {} {} {}".format(ans[0] + 1, name_list[0], 0,
name_list[1]))
else:
print("{} {} {} {}".format(ans[0], name_list[0], ans[1] +
add, name_list[1]))
except EOFError:
pass
if __name__ == '__main__':
main()
|
Royal-Society-of-New-Zealand/NZ-ORCID-Hub
|
orcid_api/models/contributor_orcid.py
|
Python
|
mit
| 3,922
| 0.00051
|
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ContributorOrcid(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, uri=None, path=None, host=None):
"""
ContributorOrcid - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'uri': 'str',
'path': 'str',
'host': 'str'
}
self.attribute_map = {
'uri': 'uri',
'path': 'path',
'host': 'host'
}
self._uri = uri
self._path = path
self._host = host
@property
def uri(self):
"""
Gets the uri of this ContributorOrcid.
:return: The uri of this ContributorOrcid.
:rtype: str
"""
return self._uri
@uri.setter
def uri(self, uri):
"""
Sets the uri of this ContributorOrcid.
:param uri: The uri of this ContributorOrcid.
:type: str
"""
self._uri = uri
@property
def path(self):
"""
Gets the path of this ContributorOrcid.
:return: The path of this ContributorOrcid.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this ContributorOrcid.
:param path: The path of this ContributorOrcid.
:type: str
"""
self._path = path
@property
def host(self):
"""
Gets the host of this ContributorOrcid.
:return: The host of this ContributorOrcid.
:rtype: str
"""
return self._host
@host.setter
def host(self, host):
"""
Sets the host of this ContributorOrcid.
:param host: The host of this ContributorOrcid.
:type: str
"""
self._host = host
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
|
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr
|
__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ContributorOrcid):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
obmarg/toolz
|
toolz/itertoolz/recipes.py
|
Python
|
bsd-3-clause
| 1,295
| 0
|
import itertools
from .core import frequencies
from
|
..compatibility import map
def countby(func, seq):
""" Count elements of a collection by a key function
>>> countby(len, ['cat', 'mouse', 'dog'])
{3: 2, 5: 1}
>>> def iseven(x): return x % 2 == 0
>>> countby(iseven, [1, 2, 3]) # doctest:+SKIP
{True: 1, False: 2}
See Also:
gr
|
oupby
"""
return frequencies(map(func, seq))
def partitionby(func, seq):
""" Partition a sequence according to a function
Partition `s` into a sequence of lists such that, when traversing
`s`, every time the output of `func` changes a new list is started
and that and subsequent items are collected into that list.
>>> is_space = lambda c: c == " "
>>> list(partitionby(is_space, "I have space"))
[('I',), (' ',), ('h', 'a', 'v', 'e'), (' ',), ('s', 'p', 'a', 'c', 'e')]
>>> is_large = lambda x: x > 10
>>> list(partitionby(is_large, [1, 2, 1, 99, 88, 33, 99, -1, 5]))
[(1, 2, 1), (99, 88, 33, 99), (-1, 5)]
See also:
partition
groupby
itertools.groupby
"""
# Note: applying `list` seems to be required both Python 2 and 3
# compatible (Python 3 works without it).
return (tuple(v) for k, v in itertools.groupby(seq, key=func))
|
cheungpat/sqlalchemy-utils
|
sqlalchemy_utils/primitives/weekdays.py
|
Python
|
bsd-3-clause
| 1,866
| 0
|
import six
from sqlalchemy_utils.utils import str_coercible
from .weekday import Week
|
Day
@str_coercible
class WeekDays(object):
def __init__(self, bit_string_or_week_days):
if isinstance(bit_string_or_week_days, six.string_types):
self._days = set()
if len(bit_string_or_week_days) != WeekDay.NUM_WEEK_DAYS:
raise ValueError(
'Bit
|
string must be {0} characters long.'.format(
WeekDay.NUM_WEEK_DAYS
)
)
for index, bit in enumerate(bit_string_or_week_days):
if bit not in '01':
raise ValueError(
'Bit string may only contain zeroes and ones.'
)
if bit == '1':
self._days.add(WeekDay(index))
elif isinstance(bit_string_or_week_days, WeekDays):
self._days = bit_string_or_week_days._days
else:
self._days = set(bit_string_or_week_days)
def __eq__(self, other):
if isinstance(other, WeekDays):
return self._days == other._days
elif isinstance(other, six.string_types):
return self.as_bit_string() == other
else:
return NotImplemented
def __iter__(self):
for day in sorted(self._days):
yield day
def __contains__(self, value):
return value in self._days
def __repr__(self):
return '%s(%r)' % (
self.__class__.__name__,
self.as_bit_string()
)
def __unicode__(self):
return u', '.join(six.text_type(day) for day in self)
def as_bit_string(self):
return ''.join(
'1' if WeekDay(index) in self._days else '0'
for index in six.moves.xrange(WeekDay.NUM_WEEK_DAYS)
)
|
joefutrelle/pocean-core
|
pocean/tests/dsg/trajectoryProfile/test_trajectoryProfile_cr.py
|
Python
|
mit
| 4,960
| 0.000605
|
import os
import unittest
from dateutil.parser import parse as dtparse
import numpy as np
from pocean.dsg import ContiguousRaggedTrajectoryProfile
import logging
from pocean import logger
logger.level = logging.INFO
logger.handlers = [logging.StreamHandler()]
class TestContinousRaggedTrajectoryProfile(unittest.TestCase):
def setUp(self):
self.single = os.path.join(os.path.dirname(__file__), 'resources', 'cr-single.nc')
self.multi = os.path.join(os.path.dirname(__file__), 'resources', 'cr-multiple.nc')
self.missing_time = os.path.join(os.path.dirname(__file__), 'resources', 'cr-missing-time.nc')
def test_crtp_load(self):
ContiguousRaggedTrajectoryProfile(self.single).close()
ContiguousRaggedTrajectoryProfile(self.multi).close()
ContiguousRaggedTrajectoryProfile(self.missing_time).close(
|
)
def test_crtp_dataframe(self):
with ContiguousRaggedTrajectoryProfile(self.single) as s:
s.to_dataframe()
with ContiguousRaggedTrajectoryProfile(self.multi) as m:
m.to_dataframe()
with ContiguousRaggedTrajectoryProfile(self.missing_time) as t:
t.to_dataframe()
|
def test_crtp_calculated_metadata(self):
with ContiguousRaggedTrajectoryProfile(self.single) as st:
s = st.calculated_metadata()
assert s.min_t == dtparse('2014-11-25 18:57:30')
assert s.max_t == dtparse('2014-11-27 07:10:30')
assert len(s.trajectories) == 1
traj = s.trajectories["sp025-20141125T1730"]
assert traj.min_z == 0
assert np.isclose(traj.max_z, 504.37827)
assert traj.min_t == dtparse('2014-11-25 18:57:30')
assert traj.max_t == dtparse('2014-11-27 07:10:30')
assert np.isclose(traj.first_loc.x, -119.79025)
assert np.isclose(traj.first_loc.y, 34.30818)
assert len(traj.profiles) == 17
with ContiguousRaggedTrajectoryProfile(self.multi) as mt:
m = mt.calculated_metadata()
assert m.min_t == dtparse('1990-01-01 00:00:00')
assert m.max_t == dtparse('1990-01-03 02:00:00')
assert len(m.trajectories) == 5
# First trajectory
traj0 = m.trajectories[0]
assert traj0.min_z == 0
assert traj0.max_z == 43
assert traj0.min_t == dtparse('1990-01-02 05:00:00')
assert traj0.max_t == dtparse('1990-01-03 01:00:00')
assert traj0.first_loc.x == -60
assert traj0.first_loc.y == 53
assert len(traj0.profiles) == 4
assert traj0.profiles[0].t == dtparse('1990-01-03 01:00:00')
assert traj0.profiles[0].x == -60
assert traj0.profiles[0].y == 49
# Last trajectory
traj4 = m.trajectories[4]
assert traj4.min_z == 0
assert traj4.max_z == 38
assert traj4.min_t == dtparse('1990-01-02 14:00:00')
assert traj4.max_t == dtparse('1990-01-02 15:00:00')
assert traj4.first_loc.x == -67
assert traj4.first_loc.y == 47
assert len(traj4.profiles) == 4
assert traj4.profiles[19].t == dtparse('1990-01-02 14:00:00')
assert traj4.profiles[19].x == -44
assert traj4.profiles[19].y == 47
with ContiguousRaggedTrajectoryProfile(self.missing_time) as mmt:
t = mmt.calculated_metadata()
assert t.min_t == dtparse('2014-11-16 21:32:29.952500')
assert t.max_t == dtparse('2014-11-17 07:59:08.398500')
assert len(t.trajectories) == 1
traj = t.trajectories["UW157-20141116T211809"]
assert np.isclose(traj.min_z, 0.47928014)
assert np.isclose(traj.max_z, 529.68005)
assert traj.min_t == dtparse('2014-11-16 21:32:29.952500')
assert traj.max_t == dtparse('2014-11-17 07:59:08.398500')
assert np.isclose(traj.first_loc.x, -124.681526638573)
assert np.isclose(traj.first_loc.y, 43.5022166666667)
assert len(traj.profiles) == 13
def test_just_missing_time(self):
with ContiguousRaggedTrajectoryProfile(self.missing_time) as mmt:
t = mmt.calculated_metadata()
assert t.min_t == dtparse('2014-11-16 21:32:29.952500')
assert t.max_t == dtparse('2014-11-17 07:59:08.398500')
assert len(t.trajectories) == 1
traj = t.trajectories["UW157-20141116T211809"]
assert np.isclose(traj.min_z, 0.47928014)
assert np.isclose(traj.max_z, 529.68005)
assert traj.min_t == dtparse('2014-11-16 21:32:29.952500')
assert traj.max_t == dtparse('2014-11-17 07:59:08.398500')
assert np.isclose(traj.first_loc.x, -124.681526638573)
assert np.isclose(traj.first_loc.y, 43.5022166666667)
assert len(traj.profiles) == 13
|
NemesisRE/ACE3
|
tools/deploy.py
|
Python
|
gpl-2.0
| 1,499
| 0.002668
|
#!/usr/bin/env python3
#########################
|
###########
# ACE3 automatic deployment script #
# ================================ #
# This is not meant to be run #
# directly! #
####################################
import os
import sys
import shutil
import traceback
import
|
subprocess as sp
from pygithub3 import Github
TRANSLATIONISSUE = 367
TRANSLATIONBODY = """**How to translate ACE3:**
https://github.com/acemod/ACE3/blob/master/documentation/development/how-to-translate-ace3.md
{}
"""
REPOUSER = "acemod"
REPONAME = "ACE3"
REPOPATH = "{}/{}".format(REPOUSER,REPONAME)
USERNAME = "ACE3 Travis"
USEREMAIL = "travis@ace3mod.com"
def update_translations(token):
diag = sp.check_output(["python3", "tools/stringtablediag.py", "--markdown"])
diag = str(diag, "utf-8")
repo = Github(token).get_repo(REPOPATH)
issue = repo.get_issue(TRANSLATIONISSUE)
issue.edit(body=TRANSLATIONBODY.format(diag))
def main():
print("Obtaining token ...")
try:
token = os.environ["GH_TOKEN"]
except:
print("Could not obtain token.")
print(traceback.format_exc())
return 1
else:
print("done.")
print("\nUpdating translation issue ...")
try:
update_translations(token)
except:
print("Failed to update translation issue.")
print(traceback.format_exc())
return 1
else:
print("done.")
return 0
if __name__ == "__main__":
sys.exit(main())
|
kottenator/django-compressor-toolkit
|
tests/integration_tests/test_views.py
|
Python
|
mit
| 3,738
| 0.002943
|
import re
from django.core.urlresolvers import reverse
def test_view_with_scss_file(client, precompiled):
"""
Test view that renders *SCSS file* that *imports SCSS file from another Django app*.
:param client: ``pytest-django`` fixture: Django test client
:param precompiled: custom fixture that asserts pre-compiled content
"""
response = client.get(reverse('scss-file'))
assert response.status_code == 200
assert precompiled('app/layout.scss', 'css').strip() == \
'.title {\n font: bold 30px Arial, sans-serif;\n}'
def test_view_with_inline_scss(client):
"""
Test view that renders *inline SCSS* that *imports SCSS file from another Django app*.
:param client: ``pytest-django`` fixture: Django test client
"""
response = client.get(reverse('scss-inline'))
assert response.status_code == 200
assert re.search(
r'<style type="text/css">.title \{\n\s*font: bold 30px Arial, sans-serif;\n\}\s*</style>',
response.content.decode('utf8')
)
def test_view_with_es6_file(client, precompiled):
"""
Test view that renders *ES6 file* into *ES5 file
|
*.
:param client: ``pytest-django`` fixture: Django test client
:param precompiled: custom fixture that asserts pre-compiled content
"""
response = client.get(reverse('es6-file'))
assert response.status_code == 200
assert precompiled('app/scripts.js', 'js') == (
|
'(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=='
'"function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f='
'new Error("Cannot find module \'"+o+"\'");throw f.code="MODULE_NOT_FOUND",f}'
'var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];'
'return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=='
'"function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:['
'function(require,module,exports){\n'
'\'use strict\';\n'
'\n'
'var _framework = require(\'base/framework\');\n'
'\n'
'var _framework2 = _interopRequireDefault(_framework);\n'
'\n'
'function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : '
'{ default: obj }; }\n'
'\n'
'new _framework2.default();\n'
'new _framework2.default(\'1.0.1\');\n'
'\n'
'},{"base/framework":2}],2:[function(require,module,exports){\n'
'\'use strict\';\n'
'\n'
'Object.defineProperty(exports, "__esModule", {\n'
' value: true\n'
'});\n'
'\n'
'function _classCallCheck(instance, Constructor) {'
' if (!(instance instanceof Constructor)) {'
' throw new TypeError("Cannot call a class as a function"); } }\n'
'\n'
'var version = exports.version = \'1.0\';\n'
'\n'
'var _class = function _class(customVersion) {\n'
' _classCallCheck(this, _class);\n'
'\n'
' console.log(\'Framework v\' + (customVersion || version) + \' initialized\');\n'
'};\n'
'\n'
'exports.default = _class;\n'
'\n'
'},{}]},{},[1]);\n'
)
def test_view_with_inline_es6(client):
"""
Test view that renders *inline ES6* into *inline ES5*.
:param client: ``pytest-django`` fixture: Django test client
"""
response = client.get(reverse('es6-inline'))
assert response.status_code == 200
assert b'"use strict";\n' \
b'\n' \
b'var square = function square(x) {\n' \
b' return x * x;\n' \
b'};\n'\
b'console.log("Square of 2:", square(2));' in response.content
|
amahabal/PySeqsee
|
farg/core/ui/gui/__init__.py
|
Python
|
gpl-3.0
| 7,857
| 0.005982
|
# Copyright (C) 2011, 2012 Abhijit Mahabal
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this
# program. If not, see <http://www.gnu.org/licenses/>
"""Defines the base GUI for the GUI run-mode."""
import logging
import threading
from tkinter import Button, Frame, Label, StringVar, Tk
from tkinter.constants import LEFT
from tkinter.messagebox import askyesno, showinfo
from farg.core.exceptions import AnswerFoundException
from farg.core.ltm.manager import LTMManager
fro
|
m farg.core.question.question import BooleanQuestion
from farg.core.ui.gui.central_pane import CentralPane
import farg.flags as farg_flags
class RunForNSteps(threading.Thread):
"""Runs controller for up
|
to n steps.
This does not update the GUI directly; It however results in changing the
state of the
attribute "controller" that it holds. This is shared by the GUI and used to
update itself.
Before each step, checks that we have not been asked to pause.
"""
def __init__(self, *, controller, gui, num_steps=1000):
threading.Thread.__init__(self)
#: Controller for the whole app.
self.controller = controller
#: Number of steps taken so far.
self.num_steps = num_steps
#: GUI being displayed. We need this to communicate some state (such as "we have found
#: an answer and can now quit.").
self.gui = gui
def run(self): # Name stipulated by Thread. pylint: disable=C0103
try:
self.controller.RunUptoNSteps(self.num_steps)
except AnswerFoundException:
# We should exit.
self.gui.quitting_called_from_thread = True
return
class GUI:
"""Base-class of GUI for an application.
Provides a :py:mod:`tkinter` based interface to display various components
such as the workspace,
and for interacting with the user (such as asking questions).
**Supported Views**
The central part of the window---everything except the row of buttons at the
top---is controlled by
an instance of the class
:py:class:`~farg.core.ui.gui.central_pane.CentralPane` (see which for
further details).The top-left corner of the window allows switching between
different views.
**Key Bindings**
The UI allows running the app at various speeds---full steam ahead,
step-by-step, or with long
strides. These keyboard bindings are provided:
* 'q' for Quit
* 'c' for Continue (full-steam ahead!)
* 'p' for Pause while running
* 's' for Step (run one codelet)
* 'l' for taking a 10-codelet stride
* 'k' for taking a 100-codelet stride.
"""
#: Size and location of the window.
geometry = '1280x980-0+0' # Not a const. pylint: disable=C6409
#: Class handling the central part of the display.
central_pane_class = CentralPane # Not a const. pylint: disable=C6409
def __init__(self, *, controller_class, stopping_condition_fn=None):
self.run_state_lock = threading.Lock()
self.pause_stepping = False
self.quitting = False
self.quitting_called_from_thread = False
self.stepping_thread = None
#: Button pane.
self.buttons_pane = None # Set up later.
#: Central pane (a canvas).
self.central_pane = None # Set up later.
#: A Tk variable tracking codelet count.
self.codelet_count_var = None # Set up later.
self.controller = controller_class(
ui=self, controller_depth=0, stopping_condition=stopping_condition_fn)
self.mw = mw = Tk()
# mw.geometry(self.geometry)
self.mw.bind('<KeyPress-q>', lambda e: self.Quit())
self.mw.bind('<KeyPress-s>', lambda e: self.StepsInAnotherThread(1))
self.mw.bind('<KeyPress-l>', lambda e: self.StepsInAnotherThread(10))
self.mw.bind('<KeyPress-k>', lambda e: self.StepsInAnotherThread(100))
self.mw.bind('<KeyPress-c>', lambda e: self.StartThreaded())
self.mw.bind('<KeyPress-p>', lambda e: self.Pause())
self.items_to_refresh = []
self.SetupWindows()
self.RegisterQuestionHandlers()
def UpdateDisplay(self):
"""Refresh the display. Erases everything and draws it again."""
if self.quitting_called_from_thread:
self.Quit()
for item in self.items_to_refresh:
try:
item.ReDraw()
except RuntimeError as error:
# This may occur because the object being updated may have changed. Log a warning
# and continue.
logging.warn('Runtime error while updating: %s', error)
self.codelet_count_var.set('%d' % self.controller.steps_taken)
def SetupWindows(self):
"""Sets up frames in the GUI."""
self.buttons_pane = Frame(self.mw)
self.PopulateButtonPane(self.buttons_pane)
self.buttons_pane.grid(row=0, column=0, columnspan=2)
self.PopulateCentralPane()
def StepsInAnotherThread(self, num_steps):
with self.run_state_lock:
if self.quitting:
return
if self.stepping_thread:
if self.stepping_thread.is_alive():
return
else:
self.stepping_thread = None
self.stepping_thread = RunForNSteps(
controller=self.controller, num_steps=num_steps, gui=self)
self.pause_stepping = False
self.stepping_thread.start()
def StartThreaded(self):
self.StepsInAnotherThread(10000)
def Pause(self):
with self.run_state_lock:
self.pause_stepping = True
if self.stepping_thread:
self.stepping_thread.join()
self.stepping_thread = None
def Quit(self):
"""Called when quitting.
Ensures that all threads have exited, and LTMs saved.
"""
with self.run_state_lock:
self.quitting = True
self.pause_stepping = True
self.Pause()
self.mw.quit()
LTMManager.SaveAllOpenLTMS()
def PopulateButtonPane(self, frame):
"""Adds buttons to the top row."""
Button(frame, text='Start', command=self.StartThreaded).pack(side=LEFT)
Button(frame, text='Pause', command=self.Pause).pack(side=LEFT)
Button(frame, text='Quit', command=self.Quit).pack(side=LEFT)
self.codelet_count_var = StringVar()
self.codelet_count_var.set('0')
Label(
frame,
textvariable=self.codelet_count_var,
font=('Helvetica', 28, 'bold')).pack(side=LEFT)
def PopulateCentralPane(self):
"""Sets up the display in the central part.
If an item must be refreshed, add it to items_to_refresh.
"""
height = farg_flags.FargFlags.gui_canvas_height
width = farg_flags.FargFlags.gui_canvas_width
canvas = self.central_pane_class(
self.mw,
self.controller,
height=int(height),
width=int(width),
background='#EEFFFF')
canvas.grid(row=1, column=0)
self.central_pane = canvas
self.items_to_refresh.append(canvas)
canvas.ReDraw()
def PopulateInteractionPane(self):
"""Sets up the interaction pane at the bottom."""
pass
def AskQuestion(self, question):
"""Asks the question (by delegating to the Ask method of the question)."""
return question.Ask(self)
def RegisterQuestionHandlers(self): # Needs to be a method. pylint: disable=R0201
"""Registers how to ask a given type of question."""
def BooleanQuestionHandler(question, ui): # pylint: disable=W0613
return askyesno('', question.question_string)
BooleanQuestion.Ask = BooleanQuestionHandler
def DisplayMessage(self, message): # Needs to be a method. pylint: disable=R0201
showinfo('', message)
|
opencobra/cobrapy
|
src/cobra/test/test_io/test_annotation_format.py
|
Python
|
gpl-2.0
| 944
| 0
|
from os.path import join
import pytest
from cobra.io import load_json_model, write_sbml_model
def test_load_json_model_valid(data_directory, tmp_path):
"""Test loading a valid annotation from JSON."""
path_to_file = join(data_directory, "valid_annotation_format.json")
model = load_json_model(path_t
|
o_file)
expected = {
"bigg.reaction": [["is", "PFK26"]],
"kegg.reaction": [["is", "R02732"]],
"rhea": [["is", "15656"]],
}
for metabolite in model.metabolites:
assert metabolite.annotation == expected
path_to_output = join(str(tmp_path), "valid_annotation_output.xml")
write_sbml_model(model, path_to_output)
def test_load_json_model_invalid(data_directory):
"""Test that loading an invalid annotation from JSON raises Typ
|
eError"""
path = join(data_directory, "invalid_annotation_format.json")
with pytest.raises(TypeError):
model = load_json_model(path)
|
Tesora-Release/tesora-trove
|
trove/tests/scenario/runners/database_actions_runners.py
|
Python
|
apache-2.0
| 9,910
| 0
|
# Copyright 2015 Tesora Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from proboscis import SkipTest
from trove.common import exception
from trove.common.utils import poll_until
from trove.tests.scenario.runners.test_runners import TestRunner
from troveclient.compat import exceptions
class DatabaseActionsRunner(TestRunner):
def __init__(self):
super(DatabaseActionsRunner, self).__init__()
self.db_defs = []
@property
def first_db_def(self):
if self.db_defs:
return self.db_defs[0]
raise SkipTest("No valid database definitions provided.")
@property
def non_existing_db_def(self):
db_def = self.test_helper.get_non_existing_database_definition()
if db_def:
return db_def
raise SkipTest("No valid database definitions provided.")
def run_databases_create(self, expected_http_code=202):
databases = self.test_helper.get_valid_database_definitions()
if databases:
self.db_defs = self.assert_databases_create(
self.instance_info.id, databases, expected_http_code)
else:
raise SkipTest("No valid database definitions provided.")
def assert_databases_create(self, instance_id, serial_databases_def,
expected_http_code):
self.auth_client.databases.create(instance_id, serial_databases_def)
self.assert_client_code(expected_http_code)
self._wait_for_database_create(instance_id, serial_databases_def)
return serial_databases_def
def run_databases_list(self, expected_http_code=200):
self.assert_databases_list(
self.instance_info.id, self.db_defs, expected_http_code)
def assert_databases_list(self, instance_id, expected_database_defs,
expected_http_code, limit=2):
full_list = self.auth_client.databases.list(instance_id)
self.assert_client_code(expected_http_code)
listed_databases = {database.name: database for database in full_list}
self.assert_is_none(full_list.next,
"Unexpected pagination in the list.")
for database_def in expected_database_defs:
database_name = database_def['name']
self.assert_true(
database_name in listed_databases,
"Database not included in the 'database-list' output: %s" %
database_name)
# Check that the system (ignored) databases are not included in the
# output.
system_databases = self.get_system_databases()
self.assert_false(
any(name in listed_databases for name in system_databases),
"System databases should not be included in the 'database-list' "
"output.")
# Test list pagination.
list_page = self.auth_client.databases.list(instance_id, limit=limit)
self.assert_client_code(expected_http_code)
self.assert_true(len(list_page) <= limit)
if len(full_list) > limit:
self.assert_is_not_none(list_page.next, "List page is missing.")
else:
self.assert_is_none(list_page.next, "An extra page in the list.")
marker = list_page.next
self.assert_pagination_match(list_page, full_list, 0, limit)
if marker:
last_database = list_page[-1]
expected_marker = last_database.name
self.assert_equal(expected_marker, marker,
"Pagination marker should be the last element "
"in the page.")
list_page = self.auth_client.databases.list(
instance_id, marker=marker)
self.assert_client_code(expected_http_code)
self.assert_pagination_match(
list_page, full_list, limit, len(full_list))
def _wait_for_database_create(self, instance_id, expected_database_defs):
expected_db_names = {db_def['name']
for db_def in expected_database_defs}
self.report.log("Waiting for all created databases to appear in the "
"listing: %s" % expected_db_names)
def _all_exist():
all_dbs = self._get_db_names(instance_id)
return all(db in all_dbs for db in expected_db_names)
try:
poll_until(_all_exist, time_out=self.GUEST_CAST_WAIT_TIMEOUT_SEC)
self.report.log("All databases now exist on the instance.")
except exception.PollTimeOut:
self.fail("Some databases were not created within the poll "
"timeout: %ds" % self.GUEST_CAST_WAIT_TIMEOUT_SEC)
def _get_db_names(self, instance_id):
full_list = self.auth_client.databases.list(instance_id)
return {database.name: database for database in full_list}
def run_database_create_with_no_attributes(
self, expected_exception=exceptions.BadRequest,
expected_http_code=400):
self.assert_databases_create_failure(
self.instance_info.id, {}, expected_exception, expected_http_code)
def run_database_create_with_blank_name(
|
self, expected_exception=exceptions.BadRequest,
expected_http_code=400):
self.assert_databases_create_failure(
self.instance_info.id, {'name': ''},
expected_exception, expected_http_code)
def run_existing_database_create(
self, expected_exception=exceptions.BadRequest,
expected_http_code=400):
self.assert_databases_create
|
_failure(
self.instance_info.id, self.first_db_def,
expected_exception, expected_http_code)
def assert_databases_create_failure(
self, instance_id, serial_databases_def,
expected_exception, expected_http_code):
self.assert_raises(
expected_exception,
expected_http_code,
self.auth_client.databases.create,
instance_id,
serial_databases_def)
def run_system_database_create(
self, expected_exception=exceptions.BadRequest,
expected_http_code=400):
# TODO(pmalik): Actions on system users and databases should probably
# return Forbidden 403 instead. The current error messages are
# confusing (talking about a malformed request).
system_databases = self.get_system_databases()
database_defs = [{'name': name} for name in system_databases]
if system_databases:
self.assert_databases_create_failure(
self.instance_info.id, database_defs,
expected_exception, expected_http_code)
def run_database_delete(self, expected_http_code=202):
for database_def in self.db_defs:
self.assert_database_delete(
self.instance_info.id, database_def['name'],
expected_http_code)
def assert_database_delete(
self,
instance_id,
database_name,
expected_http_code):
self.auth_client.databases.delete(instance_id, database_name)
self.assert_client_code(expected_http_code)
self._wait_for_database_delete(instance_id, database_name)
def _wait_for_database_delete(self, instance_id, deleted_database_name):
self.report.log("Waiting for deleted database to disappear from the "
"listing: %s" % deleted_database_name)
def _db_is_gone():
all_dbs = self._get_db_names(instance_id)
return deleted_data
|
nikesh-mahalka/cinder
|
cinder/volume/drivers/dell/dell_storagecenter_iscsi.py
|
Python
|
apache-2.0
| 7,849
| 0
|
# Copyright 2015 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Volume driver for Dell Storage Center."""
from oslo_log import log as logging
from oslo_utils import excutils
from cinder import exception
from cinder.i18n import _, _LE, _LI
from cinder.volume import driver
from cinder.volume.drivers.dell import dell_storagecenter_common
LOG = logging.getLogger(__name__)
class DellStorageCenterISCSIDriver(dell_storagecenter_common.DellCommonDriver,
driver.ISCSIDriver):
"""Implements commands for Dell StorageCenter ISCSI management.
To enable the driver add the following line to the cinder configuration:
volume_driver=cinder.volume.drivers.dell.DellStorageCenterISCSIDriver
Version history:
1.0.0 - Initial driver
1.1.0 - Added extra spec support for Storage Profile selection
1.2.0 - Added consistency group support.
2.0.0 - Switched to inheriting functional objects rather than volume
driver.
2.1.0 - Added support for ManageableVD.
2.2.0 - Driver retype support for switching volume's Storage Profile.
Added API 2.2 support.
2.3.0 - Added Legacy Port Mode Support
2.3.1 - Updated error handling.
"""
VERSION = '2.3.1'
def __init__(self, *args, **kwargs):
super(DellStorageCenterISCSIDriver, self).__init__(*args, **kwargs)
self.backend_name = (
self.configuration.safe_get('volume_backend_name')
or 'Dell-iSCSI')
def initialize_connection(self, volume, connector):
# Initialize_connection will find or create a server identified by the
# connector on the Dell backend. It will then map the volume to it
# and return the properties as follows..
# {'driver_volume_type': 'iscsi',
# data = {'target_discovered': False,
# 'target_iqn': preferred iqn,
# 'target_iqns': all iqns,
# 'target_portal': preferred portal,
# 'target_portals': all portals,
# 'target_lun': preferred lun,
# 'target_luns': all luns,
# 'access_mode': access_mode
# }
# We use id to name the volume name as it is a
# known unique name.
volume_name = volume.get('id')
initiator_name = connector.get('initiator')
multipath = connector.get('multipath', False)
LOG.info(_LI('initialize_ connection: %(vol)s:%(initiator)s'),
{'vol': volume_name,
'initiator': initiator_name})
with self._client.open_connection() as api:
try:
# Find our server.
server = api.find_server(initiator_name)
# No? Create it.
if server is None:
server = api.create_server(initiator_name)
# Find the volume on the storage center.
scvolume = api.find_volume(volume_name)
# if we have a server and a volume lets bring them together.
if server is not None and scvolume is not None:
mapping = api.map_volume(scvolume,
server)
if mapping is not None:
# Since we just mapped our volume we had best update
# our sc volume object.
scvolume = api.find_volume(volume_name)
# Our return.
iscsiprops = {}
ip = None
port = None
if not multipath:
# We want to make sure we point to the specified
# ip address for our target_portal return. This
# isn't an issue with multipath since it should
# try all the alternate portal.
ip = self.configuration.iscsi_ip_address
port = self.configuration.iscsi_port
# Three cases that should all be satisfied with the
# same return of Target_Portal and Target_Portals.
# 1. Nova is calling us so we need to return the
# Target_Portal stuff. It should ignore the
# Target_Portals stuff.
# 2. OS brick is calling us in multipath mode so we
# want to return Target_Portals. It will ignore
# the Target_Portal stuff.
# 3. OS brick is calling us in single path mode so
# we want to return Target_Portal and
# Target_Portals as alternates.
iscsiprops = (api.find_iscsi_properties(scvolume,
ip,
port))
# Return our iscsi properties.
return {'driver_volume_type': 'iscsi',
'data': iscsiprops}
# Re-raise any backend exception.
except exception.VolumeBackendAPIException:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to initialize connection'))
|
# If there is a d
|
ata structure issue then detail the exception
# and bail with a Backend Exception.
except Exception as error:
LOG.error(error)
raise exception.VolumeBackendAPIException(error)
# We get here because our mapping is none or we have no valid iqn to
# return so blow up.
raise exception.VolumeBackendAPIException(
_('Unable to map volume'))
def terminate_connection(self, volume, connector, force=False, **kwargs):
# Grab some initial info.
initiator_name = connector.get('initiator')
volume_name = volume.get('id')
LOG.debug('Terminate connection: %(vol)s:%(initiator)s',
{'vol': volume_name,
'initiator': initiator_name})
with self._client.open_connection() as api:
try:
scserver = api.find_server(initiator_name)
# Find the volume on the storage center.
scvolume = api.find_volume(volume_name)
# If we have a server and a volume lets pull them apart.
if (scserver is not None and
scvolume is not None and
api.unmap_volume(scvolume, scserver) is True):
LOG.debug('Connection terminated')
return
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to terminate connection '
'%(initiator)s %(vol)s'),
{'initiator': initiator_name,
'vol': volume_name})
raise exception.VolumeBackendAPIException(
_('Terminate connection failed'))
|
rayhu-osu/vcube
|
valet/migrations/0004_auto_20170801_1622.py
|
Python
|
mit
| 418
| 0
|
#
|
-*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-01 20:22
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('valet', '0003_sequence_driver'),
]
operations = [
migrations.RenameModel(
old_name='Sequence',
new_name='StoreSequence',
),
]
| |
acevest/acecode
|
learn/python/try.py
|
Python
|
gpl-2.0
| 456
| 0.013158
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ----------------------------------------
|
----------------------------------
# File Name: try.py
# Author: Zhao Yanbai
# Wed Dec 28 21:41:17 2011
# Descr
|
iption: none
# --------------------------------------------------------------------------
try:
s = input("Enter an integer: ")
n = int(s)
print "valid integer entered: ", n
except NameError as nerr:
print nerr
except ValueError as verr:
print verr
|
fnaum/rez
|
src/rez/vendor/attr/_funcs.py
|
Python
|
lgpl-3.0
| 9,725
| 0
|
from __future__ import absolute_import, division, print_function
import copy
from ._compat import iteritems
from ._make import NOTHING, _obj_setattr, fields
from .exceptions import AttrsAttributeNotFoundError
def asdict(
inst,
recurse=True,
filter=None,
dict_factory=dict,
retain_collection_types=False,
):
"""
Return the ``attrs`` attribute values of *inst* as a dict.
Optionally recurse into other ``attrs``-decorated classes.
:param inst: Instance of an ``attrs``-decorated class.
:param bool recurse: Recurse into classes that are also
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the :class:`attr.Attribute` as the first argument and the
value as the second argument.
:param callable dict_factory: A callable to produce dictionaries from. For
example, to produce ordered dictionaries instead of normal Python
dictionaries, pass in ``collections.OrderedDict``.
:param bool retain_collection_types: Do not convert to ``list`` when
encountering an attribute whose type is ``tuple`` or ``set``. Only
meaningful if ``recurse`` is ``True``.
:rtype: return type of *dict_factory*
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 16.0.0 *dict_factory*
.. versionadded:: 16.1.0 *retain_collection_types*
"""
attrs = fields(inst.__class__)
rv = dict_factory()
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
if recurse is True:
if has(v.__class__):
rv[a.name] = asdict(
v, True, filter, dict_factory, retain_collection_types
)
elif isinstance(v, (tuple, list, set)):
cf = v.__class__ if retain_collection_types is True else list
rv[a.name] = cf(
[
_asdict_anything(
i, filter, dict_factory, retain_collection_types
)
for i in v
]
)
elif isinstance(v, dict):
df = dict_factory
rv[a.name] = df(
(
_asdict_anything(
kk, filter, df, retain_collection_types
),
_asdict_anything(
vv, filter, df, retain_collection_types
),
)
for kk, vv in iteritems(v)
)
else:
rv[a.name] = v
else:
rv[a.name] = v
return rv
def _asdict_anything(val, filter, dict_factory, retain_collection_types):
"""
``asdict`` only works on attrs instances, this works on anything.
"""
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
# Attrs class.
rv = asdict(val, True, filter, dict_factory, retain_collection_types)
elif isinstance(val, (tuple, list, set)):
cf = val.__class__ if retain_collection_types is True else list
rv = cf(
[
_asdict_anything(
i, filter, dict_factory, retain_collection_types
)
for i in val
]
)
elif isinstance(val, dict):
df = dict_factory
rv = df(
(
_asdict_anything(kk, filter, df, retain_collection_types),
_asdict_anything(vv, filter, df, retain_collection_types),
)
for kk, vv in iteritems(val)
)
else:
rv = val
return rv
def astuple(
inst,
recurse=True,
filter=None,
tuple_factory=tuple,
retain_collection_types=False,
):
"""
Return the ``attrs`` attribute values of *inst* as a tuple.
Optionally recurse into other ``attrs``-decorated classes.
:param inst: Instance of an ``attrs``-decorated class.
:param bool recurse: Recurse into classes that are also
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the :class:`attr.Attribute` as the first argument and the
value as the second argument.
:param callable tuple_factory: A callable to produce tuples from. For
example, to produce lists instead of tuples.
:param bool retain_collection_types: Do not convert to ``list``
or ``dict`` when encountering an attribute which type is
``tuple``, ``dict`` or ``set``. Only meaningful if
|
``recurse`` is
``True``.
:rtype: return type of *tuple_factory*
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
.. versionadded:: 16.2.0
"""
attrs = fields(inst.__class__)
rv = []
retain = retain_collection_types # Very long. :/
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(
|
a, v):
continue
if recurse is True:
if has(v.__class__):
rv.append(
astuple(
v,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
)
elif isinstance(v, (tuple, list, set)):
cf = v.__class__ if retain is True else list
rv.append(
cf(
[
astuple(
j,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(j.__class__)
else j
for j in v
]
)
)
elif isinstance(v, dict):
df = v.__class__ if retain is True else dict
rv.append(
df(
(
astuple(
kk,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(kk.__class__)
else kk,
astuple(
vv,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(vv.__class__)
else vv,
)
for kk, vv in iteritems(v)
)
)
else:
rv.append(v)
else:
rv.append(v)
return rv if tuple_factory is list else tuple_factory(rv)
def has(cls):
"""
Check whether *cls* is a class with ``attrs`` attributes.
:param type cls: Class to introspect.
:raise TypeError: If *cls* is not a class.
:rtype: :class:`bool`
"""
return getattr(cls, "__attrs_attrs__", None) is not None
def assoc(inst, **changes):
"""
Copy *inst* and apply *changes*.
:param inst: Instance of a class with ``attrs`` attributes.
:param changes: Keyword changes in the new copy.
:return: A copy of inst with *changes* incorporated.
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
be found on *cls*.
:raise attr.exceptions.NotAnAttrsClassError: If
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/veteran_reward/shared_antidecay.py
|
Python
|
mit
| 459
| 0.045752
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION
|
FOR EXA
|
MPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/veteran_reward/shared_antidecay.iff"
result.attribute_template_id = -1
result.stfName("item_n","veteran_reward_antidecay")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
jorisvandenbossche/pandas
|
pandas/core/arrays/boolean.py
|
Python
|
bsd-3-clause
| 23,248
| 0.000559
|
from __future__ import annotations
import numbers
from typing import (
TYPE_CHECKING,
overload,
)
import warnings
import numpy as np
from pandas._libs import (
lib,
missing as libmissing,
)
from pandas._typing import (
ArrayLike,
AstypeArg,
Dtype,
DtypeObj,
npt,
type_t,
)
from pandas.compat.numpy import function as nv
from pandas.core.dtypes.common import (
is_bool_dtype,
is_float,
is_float_dtype,
is_integer_dtype,
is_list_like,
is_numeric_dtype,
pandas_dtype,
)
from pandas.core.dtypes.dtypes import (
ExtensionDtype,
register_extension_dtype,
)
from pandas.core.dtypes.missing import isna
from pandas.core import ops
from pandas.core.arrays import ExtensionArray
from pandas.core.arrays.masked import (
BaseMaskedArray,
BaseMaskedDtype,
)
if TYPE_CHECKING:
import pyarrow
@register_extension_dtype
class BooleanDtype(BaseMaskedDtype):
"""
Extension dtype for boolean data.
.. versionadded:: 1.0.0
.. warning::
BooleanDtype is considered experimental. The implementation and
parts of the API may change without warning.
Attributes
----------
None
Methods
-------
None
Examples
--------
>>> pd.BooleanDtype()
BooleanDtype
"""
name = "boolean"
# https://github.com/python/mypy/issues/4125
# error: Signature of "type" incompatible with supertype "BaseMaskedDtype"
@property
def type(self) -> type: # type: ignore[override]
return np.bool_
@property
def kind(self) -> str:
return "b"
@property
def numpy_dtype(self) -> np.dtype:
return np.dtype("bool")
@classmethod
def construct_array_type(cls) -> type_t[BooleanArray]:
"""
Return the array type associated with this dtype.
Returns
-------
type
"""
return BooleanArray
def __repr__(self) -> str:
return "BooleanDtype"
@property
def _is_boolean(self) -> bool:
return True
@property
def _is_numeric(self) -> bool:
return True
def __from_arrow__(
self, array: pyarrow.Array | pyarrow.ChunkedArray
) -> BooleanArray:
"""
Construct BooleanArray from pyarrow Array/ChunkedArray.
"""
import pyarrow
if array.type != pyarrow.bool_():
raise TypeError(f"Expected array of boolean type, got {array.type} instead")
if isinstance(array, pyarrow.Array):
chunks = [array]
else:
# pyarrow.ChunkedArray
chunks = array.chunks
results = []
for arr in chunks:
buflist = arr.buffers()
data = pyarrow.BooleanArray.from_buffers(
arr.type, len(arr), [None,
|
buflist[1]], offset=arr.offset
).to_numpy(zero_copy_only=False)
if arr.null_count != 0:
mask = pyarrow.BooleanArray.from_buffers(
arr.type, len(arr), [None, buflist[0]], offset=arr.offset
).to_numpy(zero_copy_only=False)
mask = ~mask
else:
mask = np.zeros(len(arr), dtype=bool)
|
bool_arr = BooleanArray(data, mask)
results.append(bool_arr)
if not results:
return BooleanArray(
np.array([], dtype=np.bool_), np.array([], dtype=np.bool_)
)
else:
return BooleanArray._concat_same_type(results)
def _get_common_dtype(self, dtypes: list[DtypeObj]) -> DtypeObj | None:
# Handle only boolean + np.bool_ -> boolean, since other cases like
# Int64 + boolean -> Int64 will be handled by the other type
if all(
isinstance(t, BooleanDtype)
or (isinstance(t, np.dtype) and (np.issubdtype(t, np.bool_)))
for t in dtypes
):
return BooleanDtype()
else:
return None
def coerce_to_array(
values, mask=None, copy: bool = False
) -> tuple[np.ndarray, np.ndarray]:
"""
Coerce the input values array to numpy arrays with a mask.
Parameters
----------
values : 1D list-like
mask : bool 1D array, optional
copy : bool, default False
if True, copy the input
Returns
-------
tuple of (values, mask)
"""
if isinstance(values, BooleanArray):
if mask is not None:
raise ValueError("cannot pass mask for BooleanArray input")
values, mask = values._data, values._mask
if copy:
values = values.copy()
mask = mask.copy()
return values, mask
mask_values = None
if isinstance(values, np.ndarray) and values.dtype == np.bool_:
if copy:
values = values.copy()
elif isinstance(values, np.ndarray) and is_numeric_dtype(values.dtype):
mask_values = isna(values)
values_bool = np.zeros(len(values), dtype=bool)
values_bool[~mask_values] = values[~mask_values].astype(bool)
if not np.all(
values_bool[~mask_values].astype(values.dtype) == values[~mask_values]
):
raise TypeError("Need to pass bool-like values")
values = values_bool
else:
values_object = np.asarray(values, dtype=object)
inferred_dtype = lib.infer_dtype(values_object, skipna=True)
integer_like = ("floating", "integer", "mixed-integer-float")
if inferred_dtype not in ("boolean", "empty") + integer_like:
raise TypeError("Need to pass bool-like values")
mask_values = isna(values_object)
values = np.zeros(len(values), dtype=bool)
values[~mask_values] = values_object[~mask_values].astype(bool)
# if the values were integer-like, validate it were actually 0/1's
if (inferred_dtype in integer_like) and not (
np.all(
values[~mask_values].astype(float)
== values_object[~mask_values].astype(float)
)
):
raise TypeError("Need to pass bool-like values")
if mask is None and mask_values is None:
mask = np.zeros(len(values), dtype=bool)
elif mask is None:
mask = mask_values
else:
if isinstance(mask, np.ndarray) and mask.dtype == np.bool_:
if mask_values is not None:
mask = mask | mask_values
else:
if copy:
mask = mask.copy()
else:
mask = np.array(mask, dtype=bool)
if mask_values is not None:
mask = mask | mask_values
if values.shape != mask.shape:
raise ValueError("values.shape and mask.shape must match")
return values, mask
class BooleanArray(BaseMaskedArray):
"""
Array of boolean (True/False) data with missing values.
This is a pandas Extension array for boolean data, under the hood
represented by 2 numpy arrays: a boolean array with the data and
a boolean array with the mask (True indicating missing).
BooleanArray implements Kleene logic (sometimes called three-value
logic) for logical operations. See :ref:`boolean.kleene` for more.
To construct an BooleanArray from generic array-like input, use
:func:`pandas.array` specifying ``dtype="boolean"`` (see examples
below).
.. versionadded:: 1.0.0
.. warning::
BooleanArray is considered experimental. The implementation and
parts of the API may change without warning.
Parameters
----------
values : numpy.ndarray
A 1-d boolean-dtype array with the data.
mask : numpy.ndarray
A 1-d boolean-dtype array indicating missing values (True
indicates missing).
copy : bool, default False
Whether to copy the `values` and `mask` arrays.
Attributes
----------
None
Methods
-------
None
Returns
-------
BooleanArray
Examples
--------
Create an BooleanArray with :func:`pandas.array`:
>>> pd.array([True, False, None], dtype="boolean")
<BooleanArray>
[True, False, <NA>]
Length: 3, dtype
|
andymckay/addons-server
|
src/olympia/zadmin/tests/test_views.py
|
Python
|
bsd-3-clause
| 77,618
| 0.000013
|
# -*- coding: utf-8 -*-
import csv
import json
from cStringIO import StringIO
from datetime import datetime
from django.conf import settings
from django.core import mail
from django.core.cache import cache
import mock
from pyquery import PyQuery as pq
from olympia import amo
from olympia.amo.tests import TestCase
from olympia.amo.tests import formset, initial
from olympia.access.models import Group, GroupUser
from olympia.addons.models import Addon, CompatOverride, CompatOverrideRange
from olympia.amo.urlresolvers import reverse
from olympia.amo.tests.test_helpers import get_image_path
from olympia.amo.utils import urlparams
from olympia.applications.models import AppVersion
from olympia.bandwagon.models import FeaturedCollection, MonthlyPick
from olympia.compat.cron import compatibility_report
from olympia.compat.models import CompatReport
from olympia.constants.base import VALIDATOR_SKELETON_RESULTS
from olympia.devhub.models import ActivityLog
from olympia.files.models import File, FileUpload
from olympia.stats.models import UpdateCount
from olympia.users.models import UserProfile
from olympia.users.utils import get_task_user
from olympia.versions.models import ApplicationsVersions, Version
from olympia.zadmin import forms, tasks
from olympia.zadmin.forms import DevMailerForm
from olympia.zadmin.models import (
EmailPreviewTopic, ValidationJob, ValidationResult)
from olympia.zadmin.tasks import updated_versions
from olympia.zadmin.views import find_files
class TestSiteEvents(TestCase):
fixtures = ['base/users', 'zadmin/tests/siteevents']
def setUp(self):
super(TestSiteEvents, self).setUp()
self.client.login(username='admin@mozilla.com', password='password')
def test_get(self):
url = reverse('zadmin.site_events')
response = self.client.get(url)
assert response.status_code == 200
events = response.context['events']
assert len(events) == 1
def test_add(self):
url = reverse('zadmin.site_events')
new_event = {
'event_type': 2,
'start': '2012-01-01',
'description': 'foo',
}
response = self.client.post(url, new_event, follow=True)
assert response.status_code == 200
events = response.context['events']
assert len(events) == 2
def test_edit(self):
url = reverse('zadmin.site_events', args=[1])
modified_event = {
'event_type': 2,
'start': '2012-01-01',
'description': 'bar',
}
response = self.client.post(url, modified_event, follow=True)
assert response.status_code == 200
events = response.context['events']
assert events[0].description == 'bar'
def test_delete(self):
url = reverse('zadmin.site_events.delete', args=[1])
response = self.client.get(url, follow=True)
assert response.status_code == 200
events = response.context['events']
assert len(events) == 0
class BulkValidationTest(TestCase):
fixtures = ['base/addon_3615', 'base/appversion', 'base/users']
def setUp(self):
super(BulkValidationTest, self).setUp()
assert self.client.login(username='admin@mozilla.com',
password='password')
self.addon = Addon.objects.get(pk=3615)
self.creator = UserProfile.objects.get(username='editor')
self.version = self.addon.get_version()
ApplicationsVersions.objects.filter(
application
|
=amo.FIREFOX.id, version=self.version).update(
max=AppVersion.objects.get(application=1, version='3.7a1pre'))
self.application_version = self.version.apps.all()[0]
self.application = self.application_version.application
|
self.min = self.application_version.min
self.max = self.application_version.max
self.curr_max = self.appversion('3.7a1pre')
self.counter = 0
self.old_task_user = settings.TASK_USER_ID
settings.TASK_USER_ID = self.creator.id
def tearDown(self):
settings.TASK_USER_ID = self.old_task_user
super(BulkValidationTest, self).tearDown()
def appversion(self, version, application=amo.FIREFOX.id):
return AppVersion.objects.get(application=application,
version=version)
def create_job(self, **kwargs):
kw = dict(application=amo.FIREFOX.id,
curr_max_version=kwargs.pop('current', self.curr_max),
target_version=kwargs.pop('target',
self.appversion('3.7a3')),
creator=self.creator)
kw.update(kwargs)
return ValidationJob.objects.create(**kw)
def create_file(self, version=None, platform=amo.PLATFORM_ALL.id):
if not version:
version = self.version
return File.objects.create(version=version,
filename='file-%s' % self.counter,
platform=platform,
status=amo.STATUS_PUBLIC)
def create_result(self, job, f, **kwargs):
self.counter += 1
kw = dict(file=f,
validation='{}',
errors=0,
warnings=0,
notices=0,
validation_job=job,
task_error=None,
valid=0,
completed=datetime.now())
kw.update(kwargs)
return ValidationResult.objects.create(**kw)
def start_validation(self, new_max='3.7a3'):
self.new_max = self.appversion(new_max)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': self.new_max.id,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
assert r.status_code == 200
class TestBulkValidation(BulkValidationTest):
@mock.patch('olympia.zadmin.tasks.bulk_validate_file')
def test_start(self, bulk_validate_file):
new_max = self.appversion('3.7a3')
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': new_max.id,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
self.assertNoFormErrors(r)
self.assert3xx(r, reverse('zadmin.validation'))
job = ValidationJob.objects.get()
assert job.application == amo.FIREFOX.id
assert job.curr_max_version.version == self.curr_max.version
assert job.target_version.version == new_max.version
assert job.finish_email == 'fliggy@mozilla.com'
assert job.completed is None
assert job.result_set.all().count() == len(self.version.all_files)
assert bulk_validate_file.delay.called
@mock.patch('olympia.zadmin.tasks.bulk_validate_file')
def test_ignore_user_disabled_addons(self, bulk_validate_file):
self.addon.update(disabled_by_user=True)
r = self.client.post(reverse('zadmin.start_validation'),
{'application': amo.FIREFOX.id,
'curr_max_version': self.curr_max.id,
'target_version': self.appversion('3.7a3').id,
'finish_email': 'fliggy@mozilla.com'},
follow=True)
self.assertNoFormErrors(r)
self.assert3xx(r, reverse('zadmin.validation'))
assert not bulk_validate_file.delay.called
@mock.patch('olympia.zadmin.tasks.bulk_validate_file')
def test_ignore_non_public_addons(self, bulk_validate_file):
target_ver = self.appversion('3.7a3').id
for status in (amo.STATUS_DISABLED, amo.STATUS_NULL,
amo.STATUS_DELETED):
self.addon.update(
|
OLAPLINE/TM1py
|
TM1py/Utils/MDXUtils.py
|
Python
|
mit
| 10,016
| 0.002895
|
import warnings
class DimensionSelection:
""" Instances of this class to be passed to construct_mdx function
"""
SUBSET = 1
EXPRESSION = 2
ITERABLE = 3
def __init__(self, dimension_name, elements=None, subset=None, expression=None):
warnings.warn(
f"class DimensionSelection will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
self.dimension_name = dimension_name
self.selection_type = self.determine_selection_type(elements, subset, expression)
if self.selection_type == self.SUBSET:
self.expression = curly_braces(expression="Tm1SubsetToSet([{dimension}], '{subset}')".format(
dimension=dimension_name,
subset=subset))
elif self.selection_type == self.EXPRESSION:
self.expression = curly_braces(expression=expression)
elif self.selection_type == self.ITERABLE:
self.expression = curly_braces(expression=",".join(["[{}].[{}]".format(dimension_name, element)
for element
in elements]))
elif not self.selection_type:
self.expression = curly_braces(expression="TM1SubsetAll([{dimension}])".format(dimension=dimension_name))
@staticmethod
def determine_selection_type(elements=None, subset=None, expression=None):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
if elements is not None and subset is None and expression is None:
return DimensionSelection.ITERABLE
elif elements is None and subset is not None and expression is None:
return DimensionSelection.SUBSET
elif elements is None and subset is None and expression is not None:
return DimensionSelection.EXPRESSION
elif elements is None and subset is None and expression is None:
return None
else:
raise ValueError("DimensionSelection constructor takes one type of selection only: "
"elements, subset or expression")
def construct_mdx_axis(dim_selections):
""" Construct MDX for one Axis (Row or Column).
Can have multiple dimensions stacked.
:param dim_selections: instances of TM1py.Utils.MDXUtils.DimensionSelection
:return: a valid MDX for an Axis
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
return "*".join(selection.expression
for selection
in dim_selections)
def construct_mdx(cube_name, rows, columns, contexts=None, suppress=None):
""" Method to construct MDX Query from different dimension selection
:param cube_name: Name of the Cube
:param rows: List of DimensionSelections
:param columns: List of DimensionSelections
:param contexts: Dictionary of Dimensions and Elements
:param suppress: "Both", "Rows", "Columns" or None
:return: Generated MDX Query
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
# MDX Skeleton
mdx_template = "SELECT {}{} ON ROWS, {}{} ON COLUMNS FROM [{}] {}"
# Suppression
mdx_rows_suppress = "NON EMPTY " if suppress and suppress.upper() in ["ROWS", "BOTH"] else ""
mdx_columns_suppress = "NON EMPTY " if suppress and suppress.upper() in ["COLUMNS", "BOTH"] else ""
# Rows and Columns
mdx_rows = construct_mdx_axis(rows)
mdx_columns = construct_mdx_axis(columns)
# Context filter (where statement)
mdx_where = ""
if contexts:
mdx_where_parts = ["[{}].[{}]".format(dim, elem)
for dim, elem
in contexts.items()]
mdx_where = "".join(["WHERE (",
",".join(mdx_where_parts),
")"])
# Return Full MDX
return mdx_template.format(mdx_rows_suppress, mdx_rows, mdx_columns_suppress, mdx_columns, cube_name, mdx_where)
def curly_braces(expression):
""" Put curly braces around a string
:param expression:
:return:
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
return "".join(["{" if not expression.startswith("{") else "",
expression,
"}" if not expression.endswith("}") else ""])
def read_cube_name_from_mdx(mdx):
""" Read the cube name from a valid MDX Query
:param mdx: The MDX Query as String
:return: String, name of a cube
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
cube, _, _, _ = read_dimension_composition_from_mdx(mdx)
return cube
def read_dimension_composition_from_mdx(mdx):
""" Parse a valid MDX Query and return the name of the cube and a list of dimensions for each axis
:param mdx:
:return:
"""
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
mdx_rows, mdx_columns, mdx_from, mdx_where = split_mdx(mdx)
cube = mdx_from[1:-1]
rows = read_dimension_composition_from_mdx_set_or_tuple(mdx_rows)
columns = read_dimension_composit
|
ion_from_mdx_set_or_tuple(mdx_columns)
titles = read_dimension_composition_from_mdx_set_or_tuple(mdx_where)
return cube, rows, columns, titles
def read_dimension_composition_from_mdx_set_or_tuple(mdx):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
mdx_without_spaces = ''.join(mdx.split())
# case for mdx statemen
|
t no where statement
if len(mdx_without_spaces) == 0:
return []
# case for tuples mdx statement on rows or columns
if mdx_without_spaces[1] == '(' and mdx_without_spaces[-2] == ')':
return read_dimension_composition_from_mdx_tuple(mdx)
# case for where mdx statement
elif mdx_without_spaces[0] == '(' and mdx_without_spaces[-1] == ')':
return read_dimension_composition_from_mdx_tuple(mdx)
# case for set mdx statement on rows or columns
else:
return read_dimension_composition_from_mdx_set(mdx)
def read_dimension_composition_from_mdx_set(mdx):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
dimensions = []
mdx_without_spaces = ''.join(mdx.split())
for sub_mdx in mdx_without_spaces.split("}*{"):
pos_start, pos_end = sub_mdx.find("["), sub_mdx.find("]")
dimension_name = sub_mdx[pos_start + 1:pos_end]
dimensions.append(dimension_name)
return dimensions
def read_dimension_composition_from_mdx_tuple(mdx):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
dimensions = []
for unique_member_name in mdx.split(","):
pos_start, pos_end = unique_member_name.find("["), unique_member_name.find("]")
dimension_name = unique_member_name[pos_start + 1:pos_end]
# only parse through first tuple of potentially many tuples
if dimension_name in dimensions:
return dimensions
dimensions.append(dimension_name)
return dimensions
def split_mdx(mdx):
warnings.warn(
f"Module MdxUtils will be deprecated. Use https://github.com/cubewise-code/mdxpy instead",
DeprecationWarning,
stacklevel=2)
|
fclesio/learning-space
|
Python/textract_extraction.py
|
Python
|
gpl-2.0
| 1,358
| 0.002209
|
import boto3
import numpy as np
import time
import json
import os
import pandas as pd
name = 'Flavio C.'
root_dir = '/document/'
file_name = 'augmented-data.png'
# Get all files in directory
meine_id_kartes = os.listdir(root_dir)
# get the results
client = boto3.client(
service_name='textract',
region_name='eu-west-1',
endpoint_url='https://textract.eu-west-1.amazonaws.com',
)
meine_id_karte_card_info = []
# For every card get all info
for meine_id_karte in meine_id_kartes:
time.sleep(5)
with open(root_dir + meine_id_karte, 'rb') as file:
img_test = file.read()
bytes_test = bytearray(img_test)
print('Image loaded', root_dir + meine_id_karte)
try:
# Process using image bytes
response = client.analyze_document(Document={'Bytes': bytes_test}, FeatureTypes=['FORMS'])
# Get the text blocks
blocks = response['Blocks']
meine_id_karte_text = []
for i in blocks:
|
i = json.dumps(i)
i = json.loads(i)
try:
meine_id_karte_text.append(i['Text'])
except:
pass
meine_id_karte_card_info.append((meine_id_karte, meine_id_kart
|
e_text))
except:
pass
df_legible = pd.DataFrame(meine_id_karte_card_info)
df_legible.to_csv('normal-karte.csv')
print(df_legible)
|
BetterCollective/thumbor
|
tests/loaders/test_http_loader.py
|
Python
|
mit
| 7,414
| 0.00054
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
from os.path import abspath, join, dirname
from preggy import expect
import mock
# from tornado.concurrent import Future
import tornado.web
from tests.base import PythonTestCase, TestCase
from tornado.concurrent import Future
import thumbor.loaders.http_loader as loader
from thumbor.context import Context
from thumbor.config import Config
from thumbor.loaders import LoaderResult
def fixture_for(filename):
return abspath(join(dirname(__file__), 'fixtures', filename))
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write('Hello')
class EchoUserAgentHandler(tornado.web.RequestHandler):
def get(self):
self.write(self.request.headers['User-Agent'])
class HandlerMock(object):
def __init__(self, headers):
self.request = RequestMock(headers)
class RequestMock(object):
def __init__(self, headers):
self.headers = headers
class ResponseMock:
def __init__(self, error=None, content_type=None, body=None, code=None):
self.error = error
self.code = code
self.time_info = None
self.headers = {
'Content-Type': 'image/jpeg'
}
if content_type:
self.headers['Content-Type'] = content_type
self.body = body
class ReturnContentTestCase(PythonTestCase):
def test_return_none_on_error(self):
response_mock = ResponseMock(error='Error', code=599)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer)
|
.to_be_null()
expect(result.successful).to_be_false()
def test_return_body_if_valid(self):
response_mock = ResponseMock(body='body', code=200)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
|
expect(result.buffer).to_equal('body')
def test_return_upstream_error_on_body_none(self):
response_mock = ResponseMock(body=None, code=200)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_be_null()
expect(result.successful).to_be_false()
expect(result.error).to_equal(LoaderResult.ERROR_UPSTREAM)
def test_return_upstream_error_on_body_empty(self):
response_mock = ResponseMock(body='', code=200)
callback_mock = mock.Mock()
ctx = Context(None, None, None)
loader.return_contents(response_mock, 'some-url', callback_mock, ctx)
result = callback_mock.call_args[0][0]
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_be_null()
expect(result.successful).to_be_false()
expect(result.error).to_equal(LoaderResult.ERROR_UPSTREAM)
class ValidateUrlTestCase(PythonTestCase):
def test_with_allowed_sources(self):
config = Config()
config.ALLOWED_SOURCES = ['s.glbimg.com']
ctx = Context(None, config, None)
expect(
loader.validate(
ctx,
'http://www.google.com/logo.jpg'
)
).to_be_false()
expect(
loader.validate(
ctx,
'http://s2.glbimg.com/logo.jpg'
)
).to_be_false()
expect(
loader.validate(
ctx,
'/glob=:sfoir%20%20%3Co-pmb%20%20%20%20_%20%20%20%200%20%20g.-%3E%3Ca%20hplass='
)
).to_be_false()
expect(
loader.validate(ctx, 'http://s.glbimg.com/logo.jpg')).to_be_true()
def test_without_allowed_sources(self):
config = Config()
config.ALLOWED_SOURCES = []
ctx = Context(None, config, None)
is_valid = loader.validate(ctx, 'http://www.google.com/logo.jpg')
expect(is_valid).to_be_true()
class NormalizeUrlTestCase(PythonTestCase):
def test_should_normalize_url(self):
for url in ['http://some.url', 'some.url']:
expect(loader._normalize_url(url)).to_equal('http://some.url')
def test_should_normalize_quoted_url(self):
url = 'https%3A//www.google.ca/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png'
expected = 'https://www.google.ca/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png'
result = loader._normalize_url(url)
expect(result).to_equal(expected)
class HttpLoaderTestCase(TestCase):
def get_app(self):
application = tornado.web.Application([
(r"/", MainHandler),
])
return application
def test_load_with_callback(self):
url = self.get_url('/')
config = Config()
ctx = Context(None, config, None)
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('Hello')
expect(result.successful).to_be_true()
def test_load_with_curl(self):
url = self.get_url('/')
config = Config()
config.HTTP_LOADER_CURL_ASYNC_HTTP_CLIENT = True
ctx = Context(None, config, None)
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('Hello')
expect(result.successful).to_be_true()
def test_should_return_a_future(self):
url = self.get_url('/')
config = Config()
ctx = Context(None, config, None)
future = loader.load(ctx, url)
expect(isinstance(future, Future)).to_be_true()
class HttpLoaderWithUserAgentForwardingTestCase(TestCase):
def get_app(self):
application = tornado.web.Application([
(r"/", EchoUserAgentHandler),
])
return application
def test_load_with_user_agent(self):
url = self.get_url('/')
config = Config()
config.HTTP_LOADER_FORWARD_USER_AGENT = True
ctx = Context(None, config, None, HandlerMock({"User-Agent": "test-user-agent"}))
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('test-user-agent')
def test_load_with_default_user_agent(self):
url = self.get_url('/')
config = Config()
config.HTTP_LOADER_FORWARD_USER_AGENT = True
config.HTTP_LOADER_DEFAULT_USER_AGENT = "DEFAULT_USER_AGENT"
ctx = Context(None, config, None, HandlerMock({}))
loader.load(ctx, url, self.stop)
result = self.wait()
expect(result).to_be_instance_of(LoaderResult)
expect(result.buffer).to_equal('DEFAULT_USER_AGENT')
|
jakubbrindza/gtg
|
GTG/gtk/browser/browser.py
|
Python
|
gpl-3.0
| 61,395
| 0.000016
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
""" The main window for GTG, listing tags, and open and closed tasks """
from webbrowser import open as openurl
import threading
from gi.repository import GObject, Gtk, Gdk
from GTG import info
from GTG.backends.backendsignals import BackendSignals
from GTG.core.dirs import ICONS_DIR
from GTG.core.search import parse_search_query, SEARCH_COMMANDS, InvalidQuery
from GTG.core.tag import SEARCH_TAG, ALLTASKS_TAG
from GTG.core.task import Task
from GTG.core.translations import _, ngettext
from GTG.gtk.browser import GnomeConfig
from GTG.gtk.browser.custominfobar import CustomInfoBar
from GTG.gtk.browser.modifytags_dialog import ModifyTagsDialog
from GTG.gtk.browser.tag_context_menu import TagContextMenu
from GTG.gtk.browser.treeview_factory import TreeviewFactory
from GTG.gtk.editor.calendar import GTGCalendar
from GTG.gtk.tag_completion import TagCompletion
from GTG.tools.dates import Date
from GTG.tools.logger import Log
class TaskBrowser(GObject.GObject):
""" The UI for browsing open and closed tasks,
and listing tags in a tree """
__string_signal__ = (GObject.SignalFlags.RUN_FIRST, None, (str, ))
__none_signal__ = (GObject.SignalFlags.RUN_FIRST, None, tuple())
__gsignals__ = {'task-added-via-quick-add': __string_signal__,
'visibility-toggled': __none_signal__,
}
def __init__(self, requester, vmanager):
GObject.GObject.__init__(self)
# Object prime variables
self.req = requester
self.vmanager = vmanager
self.config = self.req.get_config('browser')
self.tag_active = False
self.applied_tags = []
# Treeviews handlers
self.vtree_panes = {}
self.tv_factory = TreeviewFactory(self.req, self.config)
# Active Tasks
self.activetree = self.req.get_tasks_tree(name='active', refresh=False)
self.activetree.apply_filter('active', refresh=False)
self.vtree_panes['active'] = \
self.tv_factory.active_tasks_treeview(self.activetree)
# Workview Tasks
self.workview_tree = \
self.req.get_tasks_tree(name='workview', refresh=False)
self.workview_tree.apply_filter('workview', refresh=False)
self.vtree_panes['workview'] = \
self.tv_factory.active_tasks_treeview(self.workview_tree)
# Closed Tasks
self.closedtree = \
self.req.get_tasks_tree(name='closed', refresh=False)
self.closedtree.apply_filter('closed', refresh=False)
self.vtree_panes['closed'] = \
self.tv_factory.closed_tasks_treeview(self.closedtree)
# YOU CAN DEFINE YOUR INTERNAL MECHANICS VARIABLES BELOW
# Setup GTG icon theme
self._init_icon_theme()
# Tags
self.tagtree = None
self.tagtreeview = None
# Load window tree
self.builder = Gtk.Builder()
self.builder.add_from_file(GnomeConfig.BROWSER_UI_FILE)
# Define aliases for specific widgets
self._init_widget_aliases()
# Init non-GtkBuilder widgets
self._init_ui_widget()
# Initialize tooltip for GtkEntry button
self._init_toolbar_tooltips()
# Initialize "About" dialog
self._init_about_dialog()
# Create our dictionary and connect it
self._init_signal_connections()
# Define accelerator keys
self._init_accelerators()
# Initialize search completion
self._init_search_completion()
self.restore_state_from_conf()
self.on_select_tag()
self.browser_shown = False
# Update the title when a task change
self.activetree.register_cllbck('node-added-inview',
self._update_window_title)
self.activetree.register_cllbck('node-deleted-inview',
self._update_window_title)
self._update_window_title()
vmanager.timer.connect('refresh', self.refresh_all_views)
# INIT HELPER FUNCTIONS #######################################################
def _init_icon_theme(self):
"""
sets the deafault theme for icon and its directory
"""
# TODO(izidor): Add icon dirs on app level
Gtk.IconTheme.get_default().prepend_search_path(ICONS_DIR)
# TODO(izidor): Set it outside browser as it applies to every window
Gtk.Window.set_default_icon_name("gtg")
def _init_widget_aliases(self):
"""
defines aliases for UI elements found in the glide file
"""
self.window = self.builder.get_object("MainWindow")
self.taskpopup = self.builder.get_object("task_context_menu")
self.defertopopup = self.builder.get_object("defer_to_context_menu")
self.ctaskpopup = self.builder.get_object("closed_task_context_menu")
self.about = self.builder.get_object("about_dialog")
self.main_pane = self.builder.get_object("main_pane")
self.workview_pane = self.builder.get_object("workview_pane")
self.closed_pane = self.builder.get_object("closed_pane")
self.menu_view_workview = self.builder.get_object("view_workview")
self.toggle_workview = self.builder.get_object("workview_toggle")
self.quickadd_entry = self.builder.get_object("quickadd_field")
self.quickadd_pane = self.builder.get_object("quickadd_pane")
self.sidebar = self.builder.get_object("sidebar_vbox")
self.sidebar_container = self.builder.get_object("sidebar-scroll")
self.sidebar_notebook = self.builder.get_object("sidebar_notebook")
self.main_notebook = self.builder.get_object("main_notebook")
self.accessory_notebook = self.builder.get_object("accessory_notebook")
self.vbox_toolbars = self.builder.get_object("vbox_toolbars")
self.tagpopup = TagContextMenu(self.req, self.vmanager)
def _init_ui_widget(self):
""" Sets the main pane with three trees for active tasks,
actionable tasks (workview), closed tasks and creates
ModifyTagsDialog & Calendar """
# Tasks treeviews
self.main_pane.add(self.vtree_panes['active'])
self.workview_pane.add(self.vtree_panes['workview'])
self.closed_pane.add(self.vtree_panes['closed'])
tag_completion = TagCompletion(self.req.get_tag_tree())
self.modif
|
ytags_dialog = ModifyTagsDialog(tag_completion, self.req)
self.calendar = GTGCalendar()
self.calendar.set_transient_for(self.window)
self.calendar.connect("date-changed", self.on_date_changed)
def init_tags_sidebar(self):
"""
initializes the tagtree (left area with tags and searches)
"""
# The tags treeview
self.tagtree = self.req.get_tag_tree()
self.tagtreeview =
|
self.tv_factory.tags_treeview(self.tagtree)
# Tags treeview
self.tagtreeview.get_selection().connect('changed',
self.on_select_tag)
self.tagtreeview.connect('button-press-event',
self.on_tag_treeview_button_press_event)
self.tagtreeview.connect('key-press-event',
|
tkasp/osmose-backend
|
analysers/Analyser_Osmosis.py
|
Python
|
gpl-3.0
| 28,447
| 0.005765
|
#-*- coding: utf-8 -*-
###########################################################################
## ##
## Copyrights Frederic Rodrigo 2011 ##
## ##
## This program is free software: you can redistribute it and/or modify ##
## it under the terms of the GNU General Public License as published by ##
## the Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, ##
## but WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ##
## GNU General Public License for more details. ##
## ##
## You should have received a copy of the GNU General Public License ##
## along with this program. If not, see <http://www.gnu.org/licenses/>. ##
## ##
###########################################################################
from .Analyser import Analyser
import os
import psycopg2
import psycopg2.extensions
import re
from modules import DictCursorUnicode
from collections import defaultdict
from inspect import getframeinfo, stack
class Analyser_Osmosis(Analyser):
sql_create_highways = """
CREATE UNLOGGED TABLE {0}.highways AS
SELECT
id,
nodes,
tags,
tags->'highway' AS highway,
linestring,
ST_Transform(linestring, {1}) AS linestring_proj,
is_polygon,
tags->'highway' LIKE '%_link' AS is_link,
(tags?'junction' AND tags->'junction' = 'roundabout') AS is_roundabout,
(tags?'oneway' AND tags->'oneway' IN ('yes', 'true', '1', '-1')) AS is_oneway,
(tags?'area' AND tags->'area' != 'no') AS is_area,
tags->'highway' IN ('planned', 'proposed', 'construction') AS is_construction,
CASE tags->'highway'
WHEN 'motorway' THEN 1
WHEN 'primary' THEN 1
WHEN 'trunk' THEN 1
WHEN 'motorway_link' THEN 2
WHEN 'primary_link' THEN 2
WHEN 'trunk_link' THEN 2
WHEN 'secondary' THEN 2
WHEN 'secondary_link' THEN 2
WHEN 'tertiary' THEN 3
WHEN 'tertiary_link' THEN 3
WHEN 'unclassified' THEN 4
WHEN 'unclassified_link' THEN 4
WHEN 'residential' THEN 4
WHEN 'residential_link' THEN 4
WHEN 'living_street' THEN 5
WHEN 'track' THEN 5
WHEN 'cycleway' THEN 5
WHEN 'service' THEN 5
WHEN 'road' THEN 5
ELSE NULL
END AS level
FROM
ways
WHERE
tags != ''::hstore AND
tags?'highway' AND
tags->'highway' NOT IN ('services', 'rest_area', 'razed', 'no') AND
ST_NPoints(linestring) >= 2
;
CREATE INDEX idx_highways_linestring ON {0}.highways USING gist(linestring);
CREATE INDEX idx_highways_linestring_proj ON {0}.highways USING gist(linestring_proj);
CREATE INDEX idx_highways_id ON {0}.highways(id);
CREATE INDEX idx_highways_highway ON {0}.highways(highway);
ANALYZE {0}.highways;
"""
sql_create_highway_ends = """
CREATE UNLOGGED TABLE {0}.highway_ends AS
SELECT
id,
nodes,
linestring,
highway,
is_link,
is_roundabout,
(ends_geom(nodes, linestring)).id AS nid,
(ends_geom(nodes, linestring)).geom AS geom,
level
FROM
highways
WHERE
NOT is_area AND
NOT is_construction
;
ANALYZE {0}.highway_ends;
"""
sql_create_buildings = """
CREATE UNLOGGED TABLE {0}.buildings AS
SELECT
*,
CASE WHEN polygon_proj IS NOT NULL AND wall THEN ST_Area(polygon_proj) ELSE NULL END AS area
FROM (
SELECT DISTINCT ON (id)
id,
tags,
linestring,
CASE WHEN ST_IsValid(linestring) = 't' AND ST_IsSimple(linestring) = 't' AND ST_IsValid(ST_MakePolygon(ST_Transform(linestring, {1}))) THEN ST_MakePolygon(ST_Transform(linestring, {1})) ELSE NULL END AS polygon_proj,
(NOT tags?'wall' OR tags->'wall' != 'no') AND tags->'building' != 'roof' AS wall,
tags?'layer' AS layer,
ST_NPoints(linestring) AS npoints,
relation_members.relation_id IS NOT NULL AS relation
FROM
ways
LEFT JOIN relation_members ON
relation_members.member_type = 'W' AND
relation_members.member_id = ways.id
WHERE
tags != ''::hstore AND
tags?'building' AND
tags->'building' != 'no' AND
is_polygon
) AS t
;
CREATE INDEX idx_buildings_linestring ON {0}.buildings USING GIST(linestring);
CREATE INDEX idx_buildings_linestring_wall ON {0}.buildings USING GIST(linestring) WHERE wall;
CREATE INDEX idx_buildings_polygon_proj ON {0}.buildings USING gist(polygon_proj);
ANALYZE {0}.buildings;
"""
def __init__(self, config, logger = None):
Analyser.__init__(self, config, logger)
self.classs = {}
self.classs_change = {}
self.explain_sql = False
self.FixTypeTable = {
self.node:"node", self.node_full:"node", self.node_new:"node", self.node_position:"node",
self.way:"way", self.way_full:"way",
self.relation:"relation", self.relation_full:"relation",
}
self.typeMapping = {'N': self.node_full, 'W': self.way_full, 'R': self.relation_full}
self.resume_from_timestamp = None
self.already_issued_objects = None
if hasattr(config, "verbose") and config.verbose:
self.explain_sql = True
def __enter__(self):
Analyser.__enter__(self)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
# open database connections + output file
self.apiconn = self.config.osmosis_manager.osmosis()
self.gisconn = self.apiconn.conn()
self.giscurs = self.gisconn.cursor(cursor_factory=DictCursorUnicode.DictCursorUnicode50)
return self
def __exit__(self, exc_type, exc_value, traceback):
# close database connections + output file
self.config.osmosis_manager.osmosis_close()
Analyser.__exit__(self, exc_type, exc_value, traceback)
def timestamp(self):
return self.apiconn.timestamp()
def analyser(self):
self.init_analyser()
if self.classs != {} or self.classs_change != {}:
self.logger.log(u"run osmosis all analyser {0}".format(self.__class__.__name__))
self.error_file.analyser(self.timestamp(), self.analyser_version())
if hasattr(self, 'requires_tables_common'):
self.requires_tables_build(self.requires_tables_common)
if hasattr(self, 'requires_tables_full'):
self.requires_tables_build(self.requires_tables_full)
self.dump_class(self.classs)
self.dump_class(self.classs_change)
try:
self.analyser_osmosis_common()
self.analyser_osmosis_full()
finally:
self.error_file.analyser_end()
def analyser_deferred_clean(self):
if hasattr(self, 'requires_tables_common'):
self.requires_tables_clean(self.requires_tables_common)
if hasattr(self, 'requires_tables_full'):
self.requires_tables_clean(self.requires_tables_full)
def analyser_change(self):
self.init_analyser()
if self.classs != {}:
self.logger.log(u"run osmosis base analyser {0}".format(self.__class__.__name__))
self.error_file.analyser(self.timestamp(), self.analyser_version())
if hasattr(self, 'requires_tables_common'):
self.requires_tables_build(self.requires_tables_common)
se
|
lf.dump_class(self.classs)
|
try:
self.analyser_osmosis_common()
finally:
self.error_file.analyser_end()
if self.classs_change != {}:
self.logger.log(u"run osmosis change an
|
hzlf/openbroadcast
|
website/tools/suit/watch_less.py
|
Python
|
gpl-3.0
| 1,306
| 0
|
import sys
import os
import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, FileModifiedEvent
class LessCompiler(FileSystemEventHandler):
def __init__(self, source):
self.source = source
FileSystemEventHandler.__init__(self)
def compile_css(self):
if len(sys.argv) < 3:
destination = self.source.replace('less', 'css')
else:
|
destination = sys.argv[2]
cmd = 'lessc %s > %s -x' % (source, os.path.abspath(destination))
print(cmd)
os.system(cmd)
def on_any_event(self, event):
if '__' not in event.src_path and isinstance(event, FileModifiedEvent):
self.compile_css()
if __name__ == "__main__":
if len(sys.argv) < 2:
sys.stderr.write(
'Usage: %s source [destination=../css/$1.css]\n' % sys.argv[0])
sys.exit(1
|
)
source = os.path.abspath(sys.argv[1])
event_handler = LessCompiler(source)
# Run once at startup
event_handler.compile_css()
observer = Observer()
observer.schedule(event_handler, os.path.dirname(source), recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
mortada/tensorflow
|
tensorflow/python/ops/rnn.py
|
Python
|
apache-2.0
| 44,560
| 0.004129
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""RNN helpers for TensorFlow models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.util import nest
# pylint: disable=protected-access
_concat = rnn_cell_impl._concat
_like_rnncell = rnn_cell_impl._like_rnncell
# pylint: enable=protected-access
def _transpose_batch_time(x):
"""Transpose the batch and time dimensions of a Tensor.
Retains as much of the static shape information as possible.
Args:
x: A tensor of rank 2 or higher.
Returns:
x transposed along the first two dimensions.
Raises:
ValueError: if `x` is rank 1 or lower.
"""
x_static_shape = x.get_shape()
if x_static_shape.ndims is not None and x_static_shape.ndims < 2:
raise ValueError(
"Expected input tensor %s to have rank at least 2, but saw shape: %s" %
(x, x_static_shape))
x_rank = array_ops.rank(x)
x_t = array_ops.transpose(
x, array_ops.concat(
([1, 0], math_ops.range(2, x_rank)), axis=0))
x_t.set_shape(
tensor_shape.TensorShape([
x_static_shape[1].value, x_static_shape[0].value
]).concatenate(x_static_shape[2:]))
return x_t
def _infer_state_dtype(explicit_dtype, state):
"""Infer the dtype of an RNN state.
Args:
explicit_dtype: explicitly declared dtype or None.
state: RNN's hidden state. Must be a Tensor or a nested iterable containing
Tensors.
Returns:
dtype: inferred dtype of hidden state.
Raises:
ValueError: if `state` has heterogeneous dtypes or is empty.
"""
if explicit_dtype is not None:
return explicit_dtype
elif nest.is_sequence(state):
inferred_dtypes = [element.dtype for element in nest.flatten(state)]
if not inferred_dtypes:
raise ValueError("Unable to infer dtype from empty state.")
all_same = all([x == inferred_dtypes[0] for x in inferred_dtypes])
if not all_same:
raise ValueError(
"State has tensors of different inferred_dtypes. Unable to infer a "
"single representative dtype.")
return inferred_dtypes[0]
else:
return state.dtype
def _on_device(fn, device):
"""Build the subgraph defined by lambda `fn` on `device` if it's not None."""
if device:
with ops.device(device):
return fn()
else:
return fn()
# pylint: disable=unused-argument
def _rnn_step(
time, sequence_length, min_sequence_length, max_sequence_length,
zero_output, state, call_cell, state_size, skip_conditionals=False):
"""Calculate one step of a dynamic RNN minibatch.
Returns an (output, state) pair conditioned on the sequence_lengths.
When skip_conditionals=False, the pseudocode is something like:
if t >= max_sequence_length:
return (zero_output, state)
if t < min_sequence_length:
return call_cell()
# Selectively output zeros or output, old state or new state depending
# on if we've finished calculating each row.
new_output, new_state = call_cell()
final_output = np.vstack([
zero_output if time >= sequence_lengths[r] else new_output_r
for r, new_output_r in enumerate(new_output)
])
final_state = np.vstack([
state[r] if time >= sequence_lengths[r] else new_state_r
for r, new_state_r in enumerate(new_state)
])
return (final_output, final_state)
Args:
time: Python int, the current time step
sequence_length: int32 `Tensor` vector of size [batch_size]
min_sequence_length: int32 `Tensor` scalar, min of sequence_length
max_sequence_length: int32 `Tensor` scalar, max of sequence_length
zero_output: `Tensor` vector of shape [output_size]
state: Either a single `Tensor` matrix of shape `[batch_size, state_size]`,
or a list/tuple of such tensors.
call_cell: lambda returning tuple of (new_output, new_state) where
new_output is a `Tensor` matrix of shape `[batch_size, output_size]`.
new_state is a `Tensor` matrix of shape `[batch_si
|
ze, state_size]`.
state_size: The `cell.state_size` associated with the state.
skip_conditionals: Python bool, whether to skip using the conditional
calculations. This is useful for `dynamic_rnn`, where the input tensor
matches `max_sequence_length`, and using conditionals just slows
everything down.
Returns:
A tuple of (`final_output`, `final_state
|
`) as given by the pseudocode above:
final_output is a `Tensor` matrix of shape [batch_size, output_size]
final_state is either a single `Tensor` matrix, or a tuple of such
matrices (matching length and shapes of input `state`).
Raises:
ValueError: If the cell returns a state tuple whose length does not match
that returned by `state_size`.
"""
# Convert state to a list for ease of use
flat_state = nest.flatten(state)
flat_zero_output = nest.flatten(zero_output)
def _copy_one_through(output, new_output):
copy_cond = (time >= sequence_length)
return _on_device(
lambda: array_ops.where(copy_cond, output, new_output),
device=new_output.op.device)
def _copy_some_through(flat_new_output, flat_new_state):
# Use broadcasting select to determine which values should get
# the previous state & zero output, and which values should get
# a calculated state & output.
flat_new_output = [
_copy_one_through(zero_output, new_output)
for zero_output, new_output in zip(flat_zero_output, flat_new_output)]
flat_new_state = [
_copy_one_through(state, new_state)
for state, new_state in zip(flat_state, flat_new_state)]
return flat_new_output + flat_new_state
def _maybe_copy_some_through():
"""Run RNN step. Pass through either no or some past state."""
new_output, new_state = call_cell()
nest.assert_same_structure(state, new_state)
flat_new_state = nest.flatten(new_state)
flat_new_output = nest.flatten(new_output)
return control_flow_ops.cond(
# if t < min_seq_len: calculate and return everything
time < min_sequence_length, lambda: flat_new_output + flat_new_state,
# else copy some of it through
lambda: _copy_some_through(flat_new_output, flat_new_state))
# TODO(ebrevdo): skipping these conditionals may cause a slowdown,
# but benefits from removing cond() and its gradient. We should
# profile with and without this switch here.
if skip_conditionals:
# Instead of using conditionals, perform the selective copy at all time
# steps. This is faster when max_seq_len is equal to the number of unrolls
# (which is typical for dynamic_rnn).
new_output, new_state = call_cell()
nest.assert_same_structure(state, new_state)
new_state = nest.flatten(new_state)
new_output = nest.flatten(new_output)
final_output_and_state = _copy_some_through(new_output, new_state)
else:
empty_update = lambda: flat_zero_output + flat_state
final_output_and_state = control_flow_ops.cond(
# if t >= max_seq_len: copy all state throug
|
ondrejkajinek/pyGrim
|
example/server.py
|
Python
|
mit
| 1,308
| 0.000765
|
# coding: utf8
from pygrim import Server as WebServer
from routes import Routes
from test_iface import Test
from uwsgidecorators import postfork as postfork_decorator
# from pygrim.components.session import FileSessionStorage
# to create custom session handler, view, etc:
"""
class MySessionClass(SessionStorage):
pass
from pygrim import register_session_handler
register_session_handler("myhandler", MySessionClass)
"""
inheritance = (
WebServer,
Test,
Routes
)
def __init__(self, *args, **kwargs):
WebServer.__init__(self, *args, **kwar
|
gs)
def postfork(self):
# for all interfaces call postfork to ensure all will be called
for cls in inheritance:
pfork = getattr(cls, "postfork", None)
if pfork:
pfork(self)
# Dynamicaly creating type.
# It allows me to do the trick with inheritance in po
|
stfork without
# using inspect
Server = type("Server", inheritance, {
"__init__": __init__,
"postfork": postfork
})
# naming instance of Server as application
# can bee seen in configfile in section uwsgi->module=server:application
# server is filename and application is method (uwsgi will do __call__ on
# this object on every request)
application = Server()
@postfork_decorator
def postfork():
application.do_postfork()
|
betoesquivel/PLYpractice
|
testingParser.py
|
Python
|
mit
| 1,412
| 0.003541
|
import parser
import logging
def test(code):
log = logging.getLogger()
parser.parser.parse(code, tracking=True)
print "Programa con 1 var y 1 asignacion bien: "
s = "program id; var beto: int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con 1 var mal: "
s = "program ; var beto: int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa sin vars bien: "
s = "program id; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
pr
|
int "\n"
print "Programa con var mal: "
s = "program id; var beto int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con var mal: "
s = "program id; var beto: int { id = 1234; }"
test(s);
print "Original: \n{0}".format(s)
print "\n"
print "Programa con var mal: "
s = "program id; beto
|
: int; { id = 1234; }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con bloque vacio bien: "
s = "program id; var beto: int; { }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con bloque lleno y estatuto mal: "
s = "program id; var beto: int; { id = 1234; id2 = 12345 }"
test(s)
print "Original: \n{0}".format(s)
print "\n"
print "Programa con bloque lleno y condicion mal: "
s = "program id; var beto: int; { id = 1234; if ( 8 > 3 ) { id3 = 34234; } else { } }"
test(s)
print "\n"
print "Original: \n{0}".format(s)
|
masiqi/douquan
|
member/urls.py
|
Python
|
mit
| 478
| 0.002092
|
from django.conf.urls.defaults import *
urlpatterns = patterns('member.views',
url(r'^$', 'login', name='passport_index'),
url(r'^register/$', 'register', name='passpor
|
t_register'),
url(r'^login/$', 'login', name='passport_login'),
url(r'^logout/$', 'logout', name='passport_logout'),
url(r'^active/$', 'active', name='passport_active'),
url(r'^forget/$', 'forge
|
t', name='passport_forget'),
url(r'^profile/$', 'profile', name='passport_profile'),
)
|
mvaled/sentry
|
tests/sentry/api/validators/sentry_apps/test_image.py
|
Python
|
bsd-3-clause
| 966
| 0
|
from __future__ import absolute_import
from sentry.testutils import TestCase
from .util import invalid_schema
from sentry.api.validators.sentry_apps.schema import validate_component
class TestImageSchemaValidation(TestC
|
ase):
def setUp(self):
self.schema = {
"type": "image",
"url": "https://example.com/image.gif",
"alt": "example video",
}
def test_valid_schema(self):
|
validate_component(self.schema)
@invalid_schema
def test_missing_url(self):
del self.schema["url"]
validate_component(self.schema)
@invalid_schema
def test_invalid_url(self):
self.schema["url"] = "not-a-url"
validate_component(self.schema)
def test_missing_alt(self):
del self.schema["alt"]
validate_component(self.schema)
@invalid_schema
def test_invalid_alt_type(self):
self.schema["alt"] = 1
validate_component(self.schema)
|
sashakames/COG
|
cog/forms/forms_project.py
|
Python
|
bsd-3-clause
| 9,756
| 0.005638
|
from cog.models import *
from django.forms import ModelForm, ModelMultipleChoiceField, NullBooleanSelect
from django.db import models
from django.contrib.admin.widgets import FilteredSelectMultiple
from django import forms
from django.forms import ModelForm, Textarea, TextInput, Select, SelectMultiple, FileInput, CheckboxSelectMultiple
from django.core.exceptions import ObjectDoesNotExist
from os.path import basename
import re
from cog.utils import *
from django.db.models import Q
from cog.forms.forms_image import ImageForm
from cog.utils import hasText
#note parent and peer formatting is in forms_other.py
class ProjectForm(ModelForm):
# define the widget for parent/peer selection so we can set the styling. The class is set to .selectfilter and its
# styles are controlled in cogstyle.css
parents = forms.ModelMultipleChoiceField("parents", required=False,
widget=forms.SelectMultiple(attrs={'size': '20',
'class': 'selectprojects'}))
peers = forms.ModelMultipleChoiceField("peers", required=False,
widget=forms.SelectMultiple(attrs={'size': '20',
'class': 'selectprojects'}))
# filtering of what is see in the form is done down below.
# ERROR: FilteredSelectMultiple does not exist in the module but choosing widget=SelectMultiple throws an error.
# FilteredSelectMultiple throws an error in IE.
# extra field not present in model, used for deletion of previously uploaded logo
delete_logo = forms.BooleanField(required=False)
# specify size of logo_url text field
logo_url = forms.CharField(required=False, widget=TextInput(attrs={'size': '80'}))
# extra fields to manage folder state
#folders = ModelMultipleChoiceField(queryset=Folder.objects.all(), required=False, widget=CheckboxSelectMultiple)
# override __init__ method to change the querysets for 'parent' and 'peers'
def __init__(self, *args, **kwargs):
super(ProjectForm, self).__init__(*args, **kwargs)
current_site = Site.objects.get_current()
queryset2 = Q(site__id=current_site.id) | Q(site__peersite__enabled=True)
if 'instance' in kwargs:
# peer and parent query-set options: exclude the project itself, projects from disabled peer nodes
instance = kwargs.get('instance')
queryset1 = ~Q(id=instance.id)
self.fields['parents'].queryset = \
Project.objects.filter(queryset1).filter(queryset2).distinct().\
extra(select={'snl': 'lower(short_name)'}, order_by=['snl'])
self.fields['peers'].queryset = \
Project.objects.filter(queryset1).filter(queryset2).distinct().\
extra(select={'snl': 'lower(short_name)'}, order_by=['snl'])
else:
# peer and parent query-set options: exclude projects from disabled peer nodes
self.fields['parents'].queryset = \
Project.objects.filter(queryset2).distinct().extra(select={'snl': 'lower(short_name)'},
order_by=['snl'])
self.fields['peers'].queryset = \
Project.objects.filter(queryset2).distinct().extra(select={'snl': 'lower(short_name)'},
order_by=['snl'])
# overridden validation method for project short name
def clean_short_name(self):
short_name = self.cleaned_data['short_name']
# must not start with any of the URL matching patterns
if short_name in ('admin', 'project', 'news', 'post', 'doc', 'signal'):
raise forms.ValidationError("Sorry, '%s' "
"is a reserved URL keyword - it cannot be used as project short name"
% short_name)
# only allows letters, numbers, '-' and '_'
if re.search("[^a-zA-Z0-9_\-]", short_name):
raise forms.ValidationError("Project short name contains invalid characters")
# do not allow new projects to have the same short name as existing ones, regardless to case
if self.instance.id is None: # new projects only
try:
p = Project.objects.get(short_name__iexact=short_name)
raise forms.ValidationError("The new project short name conflicts with an existing project: %s"
% p.short_name)
except Project.DoesNotExist:
pass
return short_name
def clean_long_name(self):
long_name = self.cleaned_data['long_name']
# do not allow quotation characters in long name (causes problems in browser widget)
if '\"' in long_name:
raise forms.ValidationError("Quotation characters are not allowed in project long name")
# check for non-ascii characters
try:
long_name.decode('ascii')
except (UnicodeDecodeError, UnicodeEncodeError):
raise forms.ValidationError("Project long name contains invalid non-ASCII characters")
return long_name
|
class Meta:
model = Project
fields = ('short_name', 'long_name', 'author', 'description',
'parents', 'peers', 'logo', 'logo_url', 'activ
|
e', 'private', 'shared',
'dataSearchEnabled', 'nodesWidgetEnabled',
'site', 'maxUploadSize')
class ContactusForm(ModelForm):
# overridden validation method for project short name
def clean_projectContacts(self):
value = self.cleaned_data['projectContacts']
if not hasText(value):
raise forms.ValidationError("Project Contacts cannot be empty")
return value
class Meta:
model = Project
fields = ('projectContacts', 'technicalSupport', 'meetingSupport', 'getInvolved')
widgets = {'projectContacts': Textarea(attrs={'rows': 4}),
'technicalSupport': Textarea(attrs={'rows': 4}),
'meetingSupport': Textarea(attrs={'rows': 4}),
'getInvolved': Textarea(attrs={'rows': 4}), }
class DevelopmentOverviewForm(ModelForm):
class Meta:
model = Project
widgets = {'developmentOverview': Textarea(attrs={'rows': 8})}
fields = ('developmentOverview',)
class SoftwareForm(ModelForm):
class Meta:
model = Project
widgets = {'software_features': Textarea(attrs={'rows': 8}),
'system_requirements': Textarea(attrs={'rows': 8}),
'license': Textarea(attrs={'rows': 1}),
'implementationLanguage': Textarea(attrs={'rows': 1}),
'bindingLanguage': Textarea(attrs={'rows': 1}),
'supportedPlatforms': Textarea(attrs={'rows': 8}),
'externalDependencies': Textarea(attrs={'rows': 8}),
}
fields = ('software_features', 'system_requirements', 'license',
'implementationLanguage', 'bindingLanguage', 'supportedPlatforms', 'externalDependencies')
def clean(self):
features = self.cleaned_data.get('software_features')
if not hasText(features):
self._errors["software_features"] = self.error_class(["'SoftwareFeatures' must not be empty."])
print 'error'
return self.cleaned_data
class UsersForm(ModelForm):
class Meta:
model = Project
widgets = {'getting_started': Textarea(attrs={'rows': 12}), }
fields = ('getting_started', )
class ProjectTagForm(ModelForm):
# since this is the base form, we don't have access to the project's specific tags. The form is initialized in the
# form constructor in views_project.py
# field['tags'] is the list of preexisting tags
tags = forms.ModelMultipleChoiceField("tags", required=False,
|
nisavid/bedframe
|
bedframe/auth/http/_basic/_connectors.py
|
Python
|
lgpl-3.0
| 1,567
| 0.000638
|
"""Connectors"""
__copyright__ = "Copyright (C) 2014 Ivan D Vasin"
__docformat__ = "restructuredtext"
import abc as _abc
import re as _re
from ... import plain as _plain
from .. import _std as _std_http
_BASIC_USER_TOKENS = ('user', 'password')
class HttpBasicClerk(_std_http.HttpStandardClerk):
"""An authentication clerk for HTTP Basic authentication"""
__metaclass__ = _abc.ABCMeta
_BASIC_USER_TOKENS = _BASIC_USER_TOKENS
def _inputs(self, upstream_affordances, downstream_affordances):
return ((),)
def _append_response_auth_challenge(self, realm, input=None,
affordances=None):
self._append_response_auth_challenge_header('Basic realm="{}"'
.format(realm))
def _outputs(self, upstream_affordances, downstream_affordances):
return (_BASIC_USER_TOKENS,)
def _provisions
|
ets(self, upstream_affordances, downstream_affordances):
return (_plain.PlainAuth.PROVISIONS,)
class HttpBasicScanner(_std_http.HttpStandardScanner):
"""An authentication scanner for HTTP Basic authentication"""
__metaclass__ = _abc.ABCMeta
_AUTHORIZATION_HEADER_RE = \
_re.c
|
ompile(r'\s*Basic\s*(?P<creds_base64>[^\s]*)')
_BASIC_USER_TOKENS = _BASIC_USER_TOKENS
def _outputs(self, upstream_affordances, downstream_affordances):
return (self._BASIC_USER_TOKENS,)
def _provisionsets(self, upstream_affordances, downstream_affordances):
return (_plain.PlainAuth.PROVISIONS,)
|
pyfa-org/Pyfa
|
gui/builtinContextMenus/droneSplitStack.py
|
Python
|
gpl-3.0
| 3,081
| 0.002597
|
import re
# noinspection PyPackageRequirements
import wx
import gui.fitCommands as cmd
import gui.mainFrame
from gui.contextMenu import ContextMenuSingle
from service.fit import Fit
_t = wx.GetTranslation
class DroneSplitStack(ContextMenuSingle):
def __init__(self):
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
def display(self, callingWindow, srcContext, mainItem):
if srcContext != "droneItem":
return False
if mainItem is None:
return False
return mainItem.amount > 1
def getText(self, callingWindow, itmContext, mainItem):
return _t("Split {} Stack").format(itmContext)
def activate(self, callingWindow, fullContext, mainItem, i):
with DroneStackSplit(self.mainFrame, mainItem.amount) as dlg:
if dlg.ShowModal() == wx.ID_OK:
if dlg.input.GetLineText(0).strip() == '':
return
fitID = self.mainFrame.getActiveFit()
fit = Fit.getInstance().getFit(fitID)
cleanInput = re.sub(r'[^0-9.]', '', dlg.input.GetLineText(0).strip())
if mainItem in fit.drones:
position = fit.drones.index(mainItem)
self.mainFrame.command.Submit(cmd.GuiSplitLocalDroneStackCommand(
fitID=fitID, position=position, amount=int(cleanInput)))
DroneSplitStack.register()
class DroneStackSplit(wx.Dialog):
def __init__(self, parent, value):
super().__init__(parent, title="Split Drone Stack", style=wx.DEFAULT_DIALOG_STYLE)
self.SetMinSize((346, 156))
bSizer1 = wx.BoxSizer(wx.VERTICAL)
bSizer2 = wx.BoxSizer(wx.VERTICAL)
text = wx.StaticText(self, wx.ID_ANY, "New Amount:")
bSizer2.Add(text, 0)
bSizer1.Add(bSizer2, 0, wx.ALL, 10)
self.input = wx.TextCtrl(self, wx.ID_ANY, style=wx.TE_PROCESS_ENTER)
self.input.SetValue(str(value))
self.input.SelectAll()
bSizer1.Add(self.input, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, 15)
bSizer3 = wx.BoxSizer(wx.VERTICAL)
bSizer3.Add(wx.StaticLine(self, wx.ID_ANY), 0, wx.BOTTOM | wx.EXPAND, 15)
bSizer3.Add(self.CreateStdDialogButtonSizer(wx.OK | wx.CANCEL), 0, wx.EXPAND)
bSizer1.Add(bSizer3, 0, wx.ALL | wx.EXPAND, 10)
self.input.SetFocus()
self.input.Bind(wx.EVT_CHAR, self.onCha
|
r)
self.input.Bind(wx.EVT_TEXT_ENTER, self.processEnter)
self.SetSizer(bSizer1)
self.CenterOnParent()
self.Fit()
def processEnter(self, evt):
self.EndModal(wx.ID_OK)
# checks to make sure it's valid number
@staticme
|
thod
def onChar(event):
key = event.GetKeyCode()
acceptable_characters = "1234567890"
acceptable_keycode = [3, 22, 13, 8, 127] # modifiers like delete, copy, paste
if key in acceptable_keycode or key >= 255 or (key < 255 and chr(key) in acceptable_characters):
event.Skip()
return
else:
return False
|
c22n/ion-channel-ABC
|
docs/examples/hl1/data/ikr/data_ikr.py
|
Python
|
gpl-3.0
| 6,863
| 0.009617
|
import numpy as np
### Digitised data for HL-1 i_Kr channel.
# I-V curves.
def IV_Toyoda():
"""Data points in IV curve for i_Kr.
Data from Figure 1E from Toyoda 2010. Reported as mean \pm SEM from
10 cells.
"""
x = [-80, -70, -60, -50, -40, -30, -20, -10, 0, 10, 20, 30, 40]
y = np.asarray([0.015652640252213246, 0.017761353533590096, 0.12218171120781562,
0.26069293359766377, 1.0809824000541504, 4.458125770428815,
9.301137273329594, 14.007824367613809, 16.191748422695483,
15.307494653370496, 11.423322889562002, 6.891346595664643,
4.0639525877947165])
N = 10
sem = np.asarray([0.2201587783000427, 0.22230654182737197, 0.32668784925564154,
0.6016015807539574, 1.6605271002198556, 5.753539579376788,
11.142043967973596, 16.666872765186966, 19.498562300111555,
18.989347092904445, 14.321046390390517, 8.561798966730493,
5.666184179183357])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
### Activation curves.
def Act_Toyoda():
"""Data points from activation curve for i_Kr.
Data from Figure 2B in Toyoda 2010. Reported as mean \pm SEM for
10 cells.
"""
x = [-80, -70, -60, -50, -40, -30, -20, -10, 0, 10, 20, 30, 40, 50]
y = np.asarray([9.784726905999186E-4, 0.002037953817909388, 0.006032853017018169,
0.01100804532103461, 0.0649050517407257, 0.24600350793837167,
0.5190802174666932, 0.7735641254593133, 0.9331361824637671,
0.9860547161928584, 1.0057060886487157, 1.0018732063230271,
0.9970627615138931, 0.9991016255389565])
N = 10
sem = np.asarray([0.008805344008242733, 0.007907879754353253, 0.010926126677170744,
0.016877971257478475, 0.08251847037867321, 0.2802500521093595,
0.5631114885436777, 0.8058537242491018, 0.952704726068606,
0.9929031148199019, 1.0135338701735128, 1.0087225151572248,
1.005869925936444, 1.0069294070637538])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
### Activation kinetics.
def ActKin_Toyoda():
"""Data points for activation time constants for i_Kr.
Data from Figure 3C in Toyoda 2010. Reported as mean \pm SEM for
10 cells.
"""
x = [-30, -20, -10, 0, 10, 20, 30, 40]
y = np.asarray([457.75075987841944, 259.87841945288756, 184.1945288753799,
116.71732522796344, 77.50759878419456, 48.32826747720367,
49.24012158054711, 36.474164133738554])
N = 10
sem = np.asarray([350.15197568389056, 234.34650455927044, 164.13373860182367,
99.39209726443767, 64.741641337386, 39.209726443769,
40.121580547112444, 27.355623100303887])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
### Deactivation kinetics.
def DeactKinFast_Toyoda():
"""Data points for fast deactivation time constant for i_Kr.
Data from Figure 3C in Toyoda 2010. Reported as mean \pm SEM for
10 cells.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30]
y
|
= np.asarray([4.430675707271462, 6.125789104512478, 7.817005689346161,
3.1291403631829553, 13.027043
|
878107747, 15.63011456628476,
30.095082222741894, 45.460213545320016, 64.47665809367948,
69.81918790429427])
N = 10
sem = np.asarray([15.372924947393017, 17.068038344634147, 18.759254929467716,
14.979346894240507, 23.05743901488586, 26.572363806406315,
40.121580547112444, 56.40246278544157, 79.0663237471748,
88.0562699711636])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
def DeactKinSlow_Toyoda():
"""Data points for slow deactivation time constant for i_Kr.
Data from Figure 3C in Toyoda 2010. Reported as mean \pm SEM for
10 cells.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30]
y = np.asarray([55.49840230691302, 57.18961889174648, 60.700646870859714,
47.80999142701285, 55.88418673525064, 86.75473462707498,
246.20060790273556, 438.46933208635335, 551.4067492790898,
565.8678201231394])
N = 10
sem = np.asarray([45.46411035772735, 47.15922375496848, 44.291169823084715,
30.48086665107951, 35.819499649286854, 61.22281973345798,
165.04169589275978, 310.8097576182683, 474.81490141064614,
467.3875769620451])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
def DeactKinRelAmp_Toyoda():
"""Data points for relative amplitude of fast to slow component.
Data from Figure 3D in Toyoda 2010. Reported as mena \pm SEM for 10 cells.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30]
y = np.asarray([0.9269882659713168, 0.908735332464146, 0.8370273794002607,
0.7131681877444589, 0.7001303780964798, 0.7561929595827901,
0.740547588005215, 0.7053455019556714, 0.6453715775749673,
0.5567144719687093])
N = 10
sem = np.asarray([0.9113428943937418, 0.894393741851369, 0.8161668839634941,
0.681877444589309, 0.620599739243807, 0.741851368970013,
0.7014341590612776, 0.6792698826597132, 0.6062581486310299,
0.49543676662320724])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
### Inactivation.
def Inact_Toyoda():
"""Data points for steady state inactivation.
Data from Figure 6B in Toyoda 2010. Reported for a single cell.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30, -20, -10, 0, 10]
y = [1.0021276595744677, 0.9654255319148934, 0.9670212765957444,
0.9877659574468083, 0.9382978723404253, 0.8393617021276594,
0.7452127659574466, 0.5920212765957444, 0.4835106382978722,
0.3829787234042552, 0.27606382978723376, 0.17074468085106376,
0.13563829787234027, 0.0494680851063829]
return x, y, None
def InactKin_Toyoda():
"""Data points for inactivation kinetics.
Data from Figure 5B in Toyoda 2010.
"""
x = [-120, -110, -100, -90, -80, -70, -60, -50, -40, -30, -20, -10]
y = np.asarray([1.0140488976949733, 1.0888568124153863, 1.235334725970283,
1.310268157326135, 2.318292256520138, 3.972601511578908,
5.626910766637675, 6.599037108096718, 8.253346363155487,
9.692269071804478, 12.136328997032429, 10.59573781367952])
N = 4
sem = np.asarray([0.7710716681472896, 0.8342368149101667, 1.0050945482894313,
1.0683322153126547, 2.0360788295229995, 3.5638631542687254,
5.3285711695705, 6.037891836031692, 7.630364233007597,
8.770418985804161, 10.29823957068006, 9.736787715067898])
sem = np.abs(y-sem)
sd = np.sqrt(N) * sem
return x, y.tolist(), sd.tolist()
|
jackemoore/cfclient-gps-2-ebx-io
|
lib/cflib/crazyflie/ablock.py
|
Python
|
gpl-2.0
| 17,198
| 0.005815
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Crazyflie ablock is used to receive characters sent using ablock
from the firmware.
"""
__author__ = 'Bitcraze AB'
__all__ = ['Ablock']
import time
import struct
from cflib.utils.callbacks import Caller
from cflib.crtp.crtpstack import CRTPPacket, CRTPPort
from PyQt4.QtCore import pyqtSlot, pyqtSignal
import urllib2
import datetime
import math
class Ablock:
"""
Crazyflie ablock is used to receive characters sent using ablock
from the firmware.
"""
receivedChar = Caller()
def __init__(self, crazyflie):
"""
Initialize the ablock and register it to receive data from the copter.
"""
self.cf = crazyflie
self.cf.add_port_callback(CRTPPort.ABLOCK, self.incoming)
self.init_gps = 0
self.active = False
self.taccs = 60.0
self.lat = 33.767440
self.lon = -117.500734
self.altmsl = 382.0
self.gh = -32.5
self.pacc = 150.0
def incoming(self, packet):
"""
Callback for data received from the firmware
<S> Begin AssistNow input
<n> Ready for Next Line input
<r> Repeat Last Line input
<R> Repeat Last Block input
<E> End of AssistNow input
<G> Go Ahead output
<E> End of Data output
<X> Abort AssistNow output
"""
# This might be done prettier ;-)
ablock_text = "%s" % struct.unpack("%is" % len(packet.data),
packet.data)
self.receivedChar.call(ablock_text)
# size = len(ablock_text)
# print ("insize %d" % size)
print ("indata %s" % ablock_text)
"""Begin AssistNow Transfers"""
if ablock_text == "<S>\n":
if self.init_gps == 0 :
self.receivedChar.call("Reset Gps\n")
self.rst_hex(False)
if self.init_gps == 1 :
self.receivedChar.call("Nav_Pvt Enable\n")
self.pvt_hex(False)
if self.init_gps == 2 :
self.receivedChar.call("NMEA Disable\n")
self.ebx_hex(False)
if self.init_gps == 3 :
self.receivedChar.call("Time_UTC\n")
self.utc_hex(self.taccs, False)
if self.init_gps == 4 :
self.receivedChar.call("Pos_LLH\n")
self.llh_hex(self.lat, self.lon, self.altmsl, self.gh, self.pacc, False)
if self.init_gps == 5 :
self.receivedChar.call("Alm\n")
msg = "B5620611020008002191"
self.fileFormat(msg, False)
self.sv_alm_hex(True)
if self.init_gps > 5 :
if self.init_gps == 99 :
self.receivedChar.call("AssistNow Aborted\n")
else:
self.receivedChar.call("Finished\n")
self.outgoing("<E>\n")
self.init_gps = 0
else:
self.init_gps += 1
self.loadlines()
self.endBlock = True
self.lineNbr = -1
self.active = True
self.outgoing("<G>\n")
elif (self.active):
if (ablock_text == "<n>\n"):
self.lineNbr +=1
line = self.lines[self.lineNbr]
if (self.endBlock):
self.blockNbr = self.lineNbr
endBlock = False
size = len(line)
if (size == 0):
self.active = False
self.outgoing("<E>\n")
self.receivedChar.call("EOM\n")
elif (size > 29):
self.active = False
self.init_gps = 99
self.outgoing("<X>\n")
self.receivedChar.call("EOM\n")
elif ((line[size-2] == "<") and (line[size-1] == ">")):
self.endBlock = True
self.outgoing("%s\n" % line)
else:
print ("line %d" %self.lineNbr)
self.outgoing("%s\n" % line)
elif (ablock_text == "<r>\n"):
print "<r>\n"
line = self.lines[self.lineNbr]
self.outgoing("%s\n" % line)
elif (ablock_text == "<R>\n"):
print "<R>\n"
endBlock = False
self.lineNbr = self.blockNbr
line = self.lines[self.lineNbr]
self.outgoing("%s\n" % line)
elif (ablock_text == "<X>\n"):
print "<X>\n"
self.active = False
self.receivedChar.call("EOM\n")
self.init_gps = 99
elif (ablock_text == "<E>\n"):
self.active = False
self.receivedChar.call("EOM\n")
else:
self.active = False
self.init_gps = 99
self.outgoing("<X>\n")
def loadlines(self):
with open("a-block.txt", "r") as mfile:
data = mfile.read()
mfile.closed
self.lines = data.splitlines()
self.lines.append("");
def outgoing(self, p):
time.sleep(100.0 / 1000.0)
pk = CRTPPacket()
pk.port = CRTPPort.ABLOCK
pk.data = p
self.cf.send_packet(pk)
def putFile(self, data, add):
if add :
with open("a-block.txt", "a") as mfile:
mfile.write(data)
mfile.closed
else:
with open("a-block.txt", "w") as mfile:
mfile.write(data)
mfile.closed
def fileFormat(self, data, add):
block = ""
lineLen = 28
dataLen = len(data)
nbrFull = dataLen / lineLen
lenLast = dataLen % lineLen
if lenLast > lineLen - 2 :
nbrFull -= 1
iData = 0
while nbrFull > 0 :
i = 0
while i < lineLen :
block = block + data[iData]
iData += 1
i += 1
block = block + "\n"
nbrFull -= 1
lenNext = 0
if lenLast > lineLen - 2 :
|
lenNext = lineLen - 2
i = 0
while i < lenNext :
block = block + data[iData]
iData += 1
i += 1
if lenNext > 0 :
block = block + "\n"
lenLast -= lenNext
i = 0
while i < lenLast :
block = block + data[iData
|
]
iData += 1
i += 1
block = block + "<>\n"
print len(block)
self.putFile(block, add)
def get_int_len(self, val, base):
value = val
if value < 0:
value = - value
l = 1
while value > base - 1:
l += 1
value /= base
return l
def itoa(self, decimal, base, p
|
OmniaGM/spark-training
|
quiz/quiz1/quiz.py
|
Python
|
mit
| 628
| 0.007962
|
#!/usr/bin/env python
old_new_salaries = [
|
# (old_salary, new_salary)
(2401, 2507), (2172, 2883), (2463, 2867), (2462, 3325), (2949, 2974),
(2713, 3109), (2778, 3771), (2596, 3045), (2819, 2848), (2974, 3322),
(2539, 2790), (2440, 3051), (2526, 3240), (2869, 3635), (2341, 2495),
(2197, 2897), (2706, 2782), (2
|
712, 3056), (2666, 2959), (2149, 2377)
]
def is_high_raise(r):
return r > 500
raises = map( lambda ss: ss[1] - ss[0] , old_new_salaries)
high_raises = filter(is_high_raise, raises)
total_high_raises = reduce(lambda a,b: a + b, high_raises)
print "total high raises: %s" % total_high_raises
|
1kastner/analyse_weather_data
|
interpolation/interpolator/prepare/neural_network_single_group_filtered.py
|
Python
|
agpl-3.0
| 3,132
| 0.002554
|
"""
prepare prediction:
filtered pws -> filtered pws
Uses:
PROCESSED_DATA_DIR/neural_networks/training_data_filtered.csv
"""
import os
import random
import logging
import platform
import pandas
from filter_weather_data.filters import StationRepository
from filter_weather_data import get_repository_parameters
from filter_weather_data import RepositoryParameter
from filter_weather_data import PROCESSED_DATA_DIR
from interpolation.interpolator.prepare.neural_network_single_group import load_eddh
from interpolation.interpolator.prepare.neural_network_single_group import fill_missing_eddh_values
if platform.uname()[1].startswith("ccblade"): # the output files can turn several gigabyte so better not store them
# on a network drive
PROCESSED_DATA_DIR = "/export/scratch/1kastner"
def join_to_big_vector(output_csv_file, station_dicts, eddh_df):
"""
:param station_dicts: The stations to use
:param output_csv_file: Where to save the joined data to
:return:
"""
joined_stations = []
while len(station_dicts):
station_dict = station_dicts.pop()
logging.debug("work on %s" % station_dict["name"])
station_df = station_dict["data_frame"]
for attribute in station_df.columns:
if attribute not in ["temperature", "humidity", "dewpoint"]:
station_df.drop(attribute, axis=1, inplace=True)
position = station_dict["meta_d
|
ata"]["position"]
station_df['lat'] = position["lat"]
station_df['lon'] = position["lon"]
joined_stations.append(station_df.join(eddh_df, how="left"))
common_df = pandas.concat(joined_stations)
common_df.sort_index(inplace=True)
common_df = fill_missing_eddh_values(common_df)
common_df.to_csv(output_csv_file)
def run():
start_date = "2016-01-01T00:00"
end_date = "2016-12-31T23:59"
eddh_df = load_eddh(start_date, end_date)
station_repositor
|
y = StationRepository(*get_repository_parameters(
RepositoryParameter.ONLY_OUTDOOR_AND_SHADED
))
station_dicts = station_repository.load_all_stations(
start_date,
end_date,
# limit=5, # for testing purposes
limit_to_temperature=False
)
random.shuffle(station_dicts)
split_point = int(len(station_dicts) * .7)
training_dicts, evaluation_dicts = station_dicts[:split_point], station_dicts[split_point:]
logging.info("training stations: %s" % [station["name"] for station in training_dicts])
logging.info("evaluation stations: %s" % [station["name"] for station in evaluation_dicts])
training_csv_file = os.path.join(
PROCESSED_DATA_DIR,
"neural_networks",
"training_data_filtered.csv"
)
join_to_big_vector(training_csv_file, training_dicts, eddh_df)
evaluation_csv_file = os.path.join(
PROCESSED_DATA_DIR,
"neural_networks",
"evaluation_data_filtered.csv"
)
join_to_big_vector(evaluation_csv_file, evaluation_dicts, eddh_df)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
run()
|
gljohn/meterd
|
meterd/parser/currentcost.py
|
Python
|
gpl-3.0
| 1,012
| 0.003953
|
''' Module '''
import re
import logging
class CurrentCost:
''' Class '''
'''
def __init__(self, data=None, logger=None):
''' Method '''
self._data = data
self.logger = logger or logging
|
.getLogger(__name__)
self.time = None
self.uid = None
self.value = None
'''
def parse_data(self):
''' Method '''
try:
'''#http://www.marcus-povey.co
|
.uk - USED REGEX REGEX!'''
uidregex = re.compile('<id>([0-9]+)</id>')
valueregex = re.compile('<watts>([0-9]+)</watts>')
timeregex = re.compile('<time>([0-9\.\:]+)</time>')
self.value = str(int(valueregex.findall(self._data)[0]))
self.time = timeregex.findall(self._data)[0]
self.uid = uidregex.findall(self._data)[0]
self.logger.info('Parsed data sucessfully!')
except Exception:
self.logger.error('Could not get details from device',
exc_info=True)
|
openhatch/new-mini-tasks
|
vendor/packages/Django/tests/regressiontests/admin_scripts/complex_app/models/foo.py
|
Python
|
apache-2.0
| 148
| 0.006757
|
from django.db import models
class Foo(models.Model):
name = models.CharField(ma
|
x_length=5)
class Meta:
app_label = 'complex_app'
|
|
lahwaacz/qutebrowser
|
scripts/dev/misc_checks.py
|
Python
|
gpl-3.0
| 5,504
| 0.000182
|
#!/usr/bin/env python3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2017 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Various small code checkers."""
import os
import re
import sys
import os.path
import argparse
import subprocess
import tokenize
import traceback
import collections
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir))
from scripts import utils
def _get_files(only_py=False):
"""Iterate over all python files and yield filenames."""
for (dirpath, _dirnames, filenames) in os.walk('.'):
parts = dirpath.split(os.sep)
if len(parts) >= 2:
rootdir = parts[1]
if rootdir.startswith('.') or rootdir == 'htmlcov':
# ignore hidden dirs and htmlcov
continue
if only_py:
endings = {'.py'}
else:
endings = {'.py', '.asciidoc', '.js', '.feature'}
files = (e for e in filenames if os.path.splitext(e)[1] in endings)
for name in files:
yield os.path.join(dirpath, name)
def check_git():
"""Check for uncommitted git files.."""
if not os.path.isdir(".git"):
print("No .git dir, ignoring")
print()
return False
untracked = []
gitst = subprocess.check_output(['git', 'status', '--porcelain'])
gitst = gitst.decode('UTF-8').strip()
for line in gitst.splitlines():
s, name = line.split(maxsplit=1)
if s == '??' and name != '.venv/':
untracked.append(name)
status = True
if untracked:
status = False
utils.print_col("Untracked files:", 'red')
print('\n'.join(untracked))
print()
return status
def check_spelling():
"""Check commonly misspelled words."""
# Words which I often misspell
words = {'[Bb]ehaviour', '[Qq]uitted', 'Ll]ikelyhood', '[Ss]ucessfully',
'[Oo]ccur[^rs .]', '[Ss]eperator', '[Ee]xplicitely',
'[Aa]uxillary', '[Aa]ccidentaly', '[Aa]mbigious', '[Ll]oosly',
'[Ii]nitialis', '[Cc]onvienence', '[Ss]imiliar', '[Uu]ncommited',
'[Rr]eproducable', '[Aa]n [Uu]ser', '[Cc]onvienience',
'[Ww]ether', '[Pp]rogramatically', '[Ss]plitted', '[Ee]xitted',
'[Mm]ininum', '[Rr]esett?ed', '[Rr]ecieved', '[Rr]egularily',
'[Uu]nderlaying', '[Ii]nexistant', '[Ee]lipsis', 'commiting',
'existant', '[Rr]esetted'}
# Words which look better when splitted, but might need some fine tuning.
words |= {'[Ww]ebelements', '[Mm]ouseevent', '[Kk]eysequence',
'[Nn]ormalmode', '[Ee]ventloops', '[Ss]izehint',
'[Ss]tatemachine', '[Mm]etaobject', '[Ll]ogrecord',
'[Ff]iletype'}
# Files which should be ignored, e.g. because they come from another
# package
ignored = [
os.path.join('.', 'scripts', 'dev', 'misc_checks.py'),
os.path.join('.', 'qutebrowser', '3rdparty', 'pdfjs'),
os.path.join('.', 'tests', 'end2end', 'data', 'hints', 'ace',
'ace.js'),
]
seen = collections.defaultdict(list)
try:
ok = True
for fn in _get_files():
with tokenize.open(fn) as f:
if any(fn.startswith(i) for i in ignored):
continue
for line in f:
for w in words:
if (re.search(w, line) and
fn not in seen[w] and
'# pragma: no spellcheck' not in line):
print('Found "{}" in {}!'.format(w, fn))
seen[w].append(fn)
ok = False
print()
return ok
except Exception:
traceback.print_exc()
return None
def check_vcs_conflict():
"""Check VCS conflict markers."""
try:
ok = True
for fn in _get_files(onl
|
y_py=True):
with tokenize.open(fn) as f:
for line in f:
if any(line.startswith(c * 7) for c in '<>=|'):
print("Found conflict marker
|
in {}".format(fn))
ok = False
print()
return ok
except Exception:
traceback.print_exc()
return None
def main():
parser = argparse.ArgumentParser()
parser.add_argument('checker', choices=('git', 'vcs', 'spelling'),
help="Which checker to run.")
args = parser.parse_args()
if args.checker == 'git':
ok = check_git()
elif args.checker == 'vcs':
ok = check_vcs_conflict()
elif args.checker == 'spelling':
ok = check_spelling()
return 0 if ok else 1
if __name__ == '__main__':
sys.exit(main())
|
fanglinfang/myuw
|
myuw/test/dao/canvas.py
|
Python
|
apache-2.0
| 1,890
| 0
|
from django.test import TestCase
from django.test.client import RequestFactory
from myuw.dao.canvas import get_indexed_data_for_regid
from myuw.dao.canvas import get_indexed_by_decrosslisted
from myuw.dao.schedule import _get_schedule
from myuw.dao.term import get_current_quarter
FDAO_SWS = 'restclients.dao_implementation.sws.File'
class TestCanvas(TestCase):
def test_crosslinks(self):
with self.settings(RESTCLIENTS_SWS_DAO_CLASS=FDAO_SWS):
data = get_indexed_data_for_regid(
"12345678901234567890123456789012")
physics = data['2013,spring,PHYS,121/A']
self.assertEquals(physics.course_url,
'https://canvas.uw.edu/courses/149650')
has_section_a = '2013,spring,TRAIN,100/A' in data
self.assertFalse(has_section_a)
train = data['2013,spring,TRAIN,100/B']
self.assertEquals(train.course_url,
'https://canvas.uw.edu/courses/249650')
def test_crosslinks_lookup(self):
with self.settings(RESTCLIENTS_SWS_DAO_CLASS=FDAO_SWS):
data = get_indexed_data_for_regid(
"12345678901234567890123456789012")
now_request = RequestFactory().get("/")
now_request.session = {}
term = get_current_quarter(now_request)
schedule = _get_schedule("12345678901234567890123456789012", term)
canvas_data_by_course_id = get_indexed_by_decrosslisted(
data, schedule.sections)
physi
|
cs = data['2013,spring,PHYS,121/A']
self.assertEquals(physics.course_url,
'https://canvas.uw.edu/courses/149650')
train = data['2013,spring,TRAIN,100/A']
self.assertEquals(train.course_url,
'https://canvas.uw.edu/courses/
|
249650')
|
adereis/avocado
|
avocado/utils/archive.py
|
Python
|
gpl-2.0
| 7,118
| 0.00014
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: Red Hat Inc. 2014
# Author: Ruda Moura <rmoura@redhat.com>
"""
Module to help extract and create compressed archives.
"""
import logging
import os
import platform
import stat
import tarfile
import zipfile
LOG = logging.getLogger(__name__)
try:
import lzma
LZMA_CAPABLE = True
except ImportError:
LZMA_CAPABLE = False
class ArchiveException(Exception):
"""
Base exception for all archive errors.
"""
pass
class _WrapLZMA(object):
""" wraps tar.xz for python 2.7's tarfile """
def __init__(self, filename, mode):
"""
Creates an instance of :class:`ArchiveFile`.
:param filename: the archive file name.
:param mode: file mode, `r` read, `w` write.
"""
self._engine = tarfile.open(fileobj=lzma.LZMAFile(filename, mode),
mode=mode)
methods = dir(self._engine)
for meth in dir(self):
try:
methods.remove(meth)
except ValueError:
pass
for method in methods:
setattr(self, method, getattr(self._engine, method))
@classmethod
def open(cls, filename, mode='r'):
"""
Creates an instance of :class:`_WrapLZMA`.
:param filename: the archive file name.
:param mode: file mode, `r` read, `w` write.
"""
return cls(filename, mode)
class ArchiveFile(object):
"""
Class that represents an Archive file.
Archives are ZIP files or Tarballs.
"""
# extension info: is_zip, is_tar, zipfile|tarfile, +mode
_extension_table = {
'.zip': (True, False, zipfile.ZipFile, ''),
'.tar': (False, True, tarfile.open, ''),
'.tar.gz': (False, True, tarfile.open, ':gz'),
'.tgz': (False, True, tarfile.open, ':gz'),
'.tar.bz2': (False, True, tarfile.open, ':bz2'),
'.tbz2': (False, True, tarfile.open, ':bz2')}
if LZMA_CAPABLE:
_extension_table['.xz'] = (False, True, _WrapLZMA.open, '')
def __init__(self, filename, mode='r'):
"""
Creates an instance of :class:`ArchiveFile`.
:param filename: the archive file name.
:param mode: file mode, `r` read, `w` write.
"""
self.filename = filename
self.mode = mode
engine = None
for ext in ArchiveFile._extension_table:
if filename.endswith(ext):
(self.is_zip,
self.is_tar,
engine,
extra_mode) = ArchiveFile._extension_table[ext]
if engine is not None:
self.mode += extra_mode
self._engine = engine(self.filename, self.mode)
else:
raise ArchiveException('file is not an archive')
def __repr__(self):
return "ArchiveFile('%s', '%s')" % (self.filename, self.mode)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if self._engine is not None:
self.close()
@classmethod
def open(cls, filename, mode='r'):
"""
Creates an instance of :class:`ArchiveFile`.
:param filename: the archive file name.
:param mode: file mode, `r` read, `w` write.
"""
return cls(filename, mode)
def add(self, filename, arcname=None):
"""
Add file to the archive.
:param filename: file to archive.
:param arcname: alternative name for the file in the archive.
"""
if self.is_zip:
self._engine.write(filename, arcname, zipfile.ZIP_DEFLATED)
else:
self._engine.add(filename, arcname)
def list(self):
"""
List files to the standard output.
"""
if self.is_zip:
self._engine.printdir()
else:
self._engine.list()
def extract(self, path='.'):
"""
Extract all files from the archive.
:param path: destination path.
"""
self._engine.extractall(path)
if self.is_zip:
self._update_zip_extra_attrs(path)
def _update_zip_extra_attrs(self, dst_dir):
if platform.system() != "Linux":
LOG.warn("Attr handling in zip files only supported on Linux.")
return
# Walk all files and re-create files as symlinks
for path, info in self._engine.NameToInfo.iteritems():
dst = os.path.join(dst_dir, path)
if not os.path.exists(dst):
LOG.warn("One or more files in the ZIP archive '%s' could "
"not be found after extraction. Their paths are "
"probably stored in unsupported format and their "
"attributes are not going to be updated",
|
self.filename)
return
attr = info.external_attr >> 16
if attr & stat.S_IFLNK == stat.S_IFLNK:
dst = os.path.join(dst_dir, path)
src = open(dst, 'r').read()
os.remove(dst)
os.symlink(src, dst)
continue # Don't override any other attributes on links
mode = attr & 511 # Mask only permissions
if mode and m
|
ode != 436: # If mode is stored and is not default
os.chmod(dst, mode)
def close(self):
"""
Close archive.
"""
self._engine.close()
def is_archive(filename):
"""
Test if a given file is an archive.
:param filename: file to test.
:return: `True` if it is an archive.
"""
return zipfile.is_zipfile(filename) or tarfile.is_tarfile(filename)
def compress(filename, path):
"""
Compress files in an archive.
:param filename: archive file name.
:param path: origin directory path to files to compress. No
individual files allowed.
"""
with ArchiveFile.open(filename, 'w') as x:
if os.path.isdir(path):
for root, _, files in os.walk(path):
for name in files:
newroot = root.replace(path, '')
x.add(os.path.join(root, name),
os.path.join(newroot, name))
elif os.path.isfile(path):
x.add(path, os.path.basename(path))
def uncompress(filename, path):
"""
Extract files from an archive.
:param filename: archive file name.
:param path: destination path to extract to.
"""
with ArchiveFile.open(filename) as x:
x.extract(path)
# Some aliases
create = compress
extract = uncompress
|
PXke/invenio
|
invenio/legacy/bibcatalog/templates.py
|
Python
|
gpl-2.0
| 3,365
| 0.009807
|
## Thi
|
s file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNES
|
S FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio BibCatalog HTML generator."""
from invenio.legacy.bibcatalog.api import bibcatalog_system
from invenio.base.i18n import wash_language, gettext_set_language
from invenio.config import CFG_SITE_LANG
from invenio.legacy.webstyle.templates import Template as DefaultTemplate
class Template(DefaultTemplate):
""" HTML generators for BibCatalog """
SHOW_MAX_TICKETS = 25
def tmpl_your_tickets(self, uid, ln=CFG_SITE_LANG, start=1):
""" make a pretty html body of tickets that belong to the user given as param """
ln = wash_language(ln)
_ = gettext_set_language(ln)
if bibcatalog_system is None:
return _("Error: No BibCatalog system configured.")
#errors? tell what happened and get out
bibcat_probs = bibcatalog_system.check_system(uid)
if bibcat_probs:
return _("Error")+" "+bibcat_probs
tickets = bibcatalog_system.ticket_search(uid, owner=uid) #get ticket id's
lines = "" #put result here
i = 1
lines += (_("You have %(x_num)i tickets.", x_num=len(tickets))) + "<br/>"
#make a prev link if needed
if (start > 1):
newstart = start - self.SHOW_MAX_TICKETS
if (newstart < 1):
newstart = 1
lines += '<a href="/yourtickets/display?start='+str(newstart)+'">'+_("Previous")+'</a>'
lines += """<table border="1">"""
lastshown = len(tickets) #what was the number of the last shown ticket?
for ticket in tickets:
#get info and show only for those that within the show range
if (i >= start) and (i < start+self.SHOW_MAX_TICKETS):
ticket_info = bibcatalog_system.ticket_get_info(uid, ticket)
subject = ticket_info['subject']
status = ticket_info['status']
text = ""
if 'text' in ticket_info:
text = ticket_info['text']
display = '<a href="'+ticket_info['url_display']+'">'+_("show")+'</a>'
close = '<a href="'+ticket_info['url_close']+'">'+_("close")+'</a>'
lines += "<tr><td>"+str(ticket)+"</td><td>"+subject+" "+text+"</td><td>"+status+"</td><td>"+display+"</td><td>"+close+"</td></tr>\n"
lastshown = i
i = i+1
lines += "</table>"
#make next link if needed
if (len(tickets) > lastshown):
newstart = lastshown+1
lines += '<a href="/yourtickets/display?start='+str(newstart)+'">'+_("Next")+'</a>'
return lines
|
Cue/greplin-twisted-utils
|
src/greplin/net/dnsCache.py
|
Python
|
apache-2.0
| 2,900
| 0.01069
|
# Copyright 2011 The greplin-twisted-utils Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""DNS resolver that uses a short lived local cache to improve performance."""
from greplin.defer import lazymap
from twisted.internet import defer, interfaces
from twisted.python.failure import Failure
from zope.interface import implements
import collections
import time
class CachingDNS(object):
"""DNS resolver that uses a short lived
|
local cache to improve performance."""
implements(interfaces.IResolverSimple)
def __init__(self, original, timeout = 60, useFallback = True):
self._original = original
self._timeout = timeout
self._fallback = {} if useFallback else None
self._cache = lazymap.DeferredMap(self.__fetchHost)
self._st
|
ats = {
'miss': collections.defaultdict(int),
'hit': collections.defaultdict(int),
'error': collections.defaultdict(int),
'fallback': collections.defaultdict(int),
}
def __fetchHost(self, args):
"""Actually fetches the host name."""
return self._original.getHostByName(*args).addCallback(lambda x: (x, time.time()))
def __fallback(self, err, key):
"""Returns the fallback for the given key."""
try:
result = self._fallback[key]
self._stats['fallback'][str(key)] += 1
return result
except KeyError:
self._stats['error'][str(key)] += 1
return err
def getStats(self):
"""Gets stats about hits / misses / failures."""
return self._stats
def getHostByName(self, name, *args):
"""Gets a host by name."""
key = (name,) + args
if key in self._cache:
# If we failed last time, try again
if isinstance(self._cache[key], Failure):
del self._cache[key]
# Check for a cache hit.
elif time.time() >= self._cache[key][1] + self._timeout:
# Ensure the item hasn't expired.
if self._fallback is not None:
self._fallback[key] = self._cache[key][0]
del self._cache[key]
else:
# If the item is in cache and not expired, return it immediately.
self._stats['hit'][str(key)] += 1
return defer.succeed(self._cache[key][0])
# If it wasn't already in the cache, this always returns a deferred.
result = self._cache[key].addCallback(lambda x: x[0]).addErrback(self.__fallback, key)
self._stats['miss'][str(key)] += 1
return result
|
SRLKilling/sigma-backend
|
data-server/django_app/sigma_core/admin.py
|
Python
|
agpl-3.0
| 3,213
| 0.008092
|
from django.contrib import admin
from dj
|
ango.contrib.auth.models import Group as AuthGroup
from sigma_core.models.user import User
from sigma_core.models.group import Group
from sigma_core.models.group_member import GroupMember
from sigma_core.models.group_field impor
|
t GroupField
from sigma_core.models.group_field_value import GroupFieldValue
from sigma_core.models.group_invitation import GroupInvitation
from sigma_core.models.participation import Participation
from sigma_core.models.publication import Publication
from sigma_core.models.event import Event
from sigma_core.models.shared_publication import SharedPublication
admin.site.unregister(AuthGroup)
from sigma_core.models.acknowledgment import Acknowledgment
from sigma_core.models.acknowledgment_invitation import AcknowledgmentInvitation
admin.site.register(Acknowledgment)
admin.site.register(AcknowledgmentInvitation)
admin.site.register(GroupMember)
#admin.site.register(GroupInvitation)
#admin.site.register(SharedPublication)
#admin.site.register(Participation)
admin.site.register(GroupField)
admin.site.register(GroupFieldValue)
class ParticipationInline(admin.TabularInline):
model = Participation
extra = 0
class EventAdmin(admin.ModelAdmin):
list_display = ['name', 'date_start', 'date_end', 'place_name']
list_filter = ['date_start', 'date_end']
search_fields = ['name', 'place_name']
inlines = [ParticipationInline]
admin.site.register(Event, EventAdmin)
class SharedInline(admin.TabularInline):
model = SharedPublication
extra = 0
class PublicationAdmin(admin.ModelAdmin):
inlines = [SharedInline]
list_display = ['title', 'group', 'author', 'related_event', 'internal']
list_filter = ['group', 'author', 'internal']
admin.site.register(Publication, PublicationAdmin)
class GroupsInline(admin.TabularInline):
model = GroupMember
extra = 0
class InvitationsInline(admin.TabularInline):
model = GroupInvitation
extra = 0
class UserAdmin(admin.ModelAdmin):
list_display = ['firstname', 'lastname', 'email', 'is_active', 'is_superuser']
list_filter = ['is_active', 'is_superuser']
search_fields = ['firstname', 'lastname', 'email']
inlines = [GroupsInline, InvitationsInline]
admin.site.register(User, UserAdmin)
class MembersInline(admin.TabularInline):
model = GroupMember
extra = 0
class ParentsInline(admin.TabularInline):
model = Acknowledgment
extra = 0
fk_name = "acknowledged"
class ChildrenInline(admin.TabularInline):
model = Acknowledgment
extra = 0
fk_name = "acknowledged_by"
class GroupAdmin(admin.ModelAdmin):
list_display = ['name', 'is_protected', 'can_anyone_ask', 'need_validation_to_join', 'members_visibility', 'group_visibility']
list_filter = ['is_protected', 'can_anyone_ask', 'need_validation_to_join']
search_fields = ['name', 'description']
inlines = [MembersInline, InvitationsInline, ParentsInline, ChildrenInline]
admin.site.register(Group, GroupAdmin)
from sigma_core.models.tag import Tag
from sigma_core.models.like import Like
from sigma_core.models.comment import Comment
admin.site.register(Tag)
admin.site.register(Like)
admin.site.register(Comment)
|
boris-savic/python-furs-fiscal
|
demos/invoice_demo.py
|
Python
|
mit
| 1,878
| 0.004792
|
import pytz
from datetime import datetime
from decimal import Decimal
from furs_fiscal.api import FURSInvoiceAPI
# Path to our .p12 cert
|
file
P12_CERT_PATH = 'demo_podjetje.p12'
# Password for out .p12 cert file
P12_
|
CERT_PASS = 'Geslo123#'
class InvoiceDemo():
def demo_zoi(self):
"""
Obtaining Invoice ZOI - Protective Mark of the Invoice Issuer
Our Invoice Number on the Receipt is:
11/BP101/B1
Where:
* 11 - Invoice Number
* BP101 - Business premise ID
* B1 - Electronic Register ID
"""
# First we'll need to initialize FURSInvoice APi - so that it loads all the certs
api = FURSInvoiceAPI(p12_path=P12_CERT_PATH,
p12_password=P12_CERT_PASS,
production=False,
request_timeout=1.0)
date_issued = datetime.now(tz=pytz.UTC)
# let's get that ZOI
zoi = api.calculate_zoi(tax_number=10039856, # Issuer Tax Number
issued_date=date_issued, # DateTime of the Invoice
invoice_number='11', # Invoice Number - Sequential
business_premise_id='BP101', # Business premise ID
electronic_device_id='B1', # Electronic Device ID
invoice_amount=Decimal('19.15')) # Invoice Amount
print("ZOI: " + zoi)
# Let's obtain data for Code128/QR/PDF417 that should be placed at the bottom of the Invoice
print_data = api.prepare_printable(tax_number=10039856,
zoi=zoi,
issued_date=date_issued)
print("QR/Code128/PDF417 Data: " + print_data)
if __name__ == "__main__":
demo = InvoiceDemo()
demo.demo_zoi()
|
toomanyjoes/mrperfcs386m
|
test/gen.py
|
Python
|
mit
| 26,543
| 0.029951
|
#!/usr/bin/py
|
thon
import xml.dom.minidom
import sys
from optparse import OptionParser
import random
from hadoop_conf import *
chunk_size = []
def xml_children(node, children_name):
"""return list of node's children nodes with name of children_name"""
return node.getElementsByTagName(children_name)
def xml_text(node):
return node.childNodes[0].nodeValue
def xml_child_text(node, child_name)
|
:
"""probably encoded in utf-8, be careful."""
return xml_text(xml_children(node, child_name)[0])
class empty_t:
pass
class hnode_t:
"""HDFS node for a HDFS tree.
5-level hierarchy: rack_group (multiple identical racks), rack, node_group
(multiple identical nodes), node, and disk.
disk should be initiated with a capacity. Other nodes' capacity are
calculated by summing up children's capacity."""
def __init__(self, parent, capacity=None, num=1):
self.parent = parent
self._capacity = capacity
self._num = num
self._children = []
self.used = 0
self.end = None
self.reserved = None
if parent <> None:
self.index_stack = parent.index_stack[:] + [len(parent.children())]
parent.children().append(self)
if parent._capacity <> None:
parent._capacity = None
else:
self.index_stack = []
def clone(self, parent=None):
'''clone a node from self, and append it to parent's children'''
if parent == None:
parent = self.parent
node = hnode_t(parent, self._capacity)
node._children = []
if self._children <> []:
for child in self._children:
#print self, self.parent, self._children
child.clone(node)
#node._children.append(child.clone(node)) ## wrong!!!
node.used = 0
node.reserved = self.reserved
return node
def capacity(self):
if self._capacity <> None:
return self._capacity
else :
assert self._children <> []
self._capacity = 0
for child in self._children:
self._capacity += child.capacity()
return self._capacity
def children(self):
return self._children;
def add_chunk(self):
if self.used >= self.capacity():
print 'error: node full' + self.index_stack
self.used += chunk_size
parent = self.parent
if parent != None:
parent.add_chunk()
def name(self):
if len(self.index_stack) == 5: #disk
return 'd_rg%d_%d_ng%d_%d_disk%d' % tuple(self.index_stack)
elif len(self.index_stack) == 4: #node
return 'n_rg%d_%d_ng%d_%d' % tuple(self.index_stack)
elif len(self.index_stack) == 3: #node group template
return 'n_rg%d_%d_ng%d' % tuple(self.index_stack)
elif len(self.index_stack) == 2: #rack
return 'r_rg%d_%d' % tuple(self.index_stack)
elif len(self.index_stack) == 1: #rack_group
return 'rg_rg%d' % tuple(self.index_stack)
else:
print 'error: request name for unknown node type. (' \
+ self.index_stack + ')'
def dump(self, level=0):
if options.verbose == False:
return
print self.index_stack, self.used, self._capacity, len(self.children())
node = self
if node.children() <> []:
for child in node.children():
child.dump()
def prev_node(self):
if self.index_stack == []:
return None
myindex = self.index_stack[-1]
if myindex == 0:
return self.parent.prev_node()
siblings = self.parent.children()
return siblings[myindex-1]
def global_end(self):
'''global index at the end of a node'''
if self.end <> None:
return self.end
# end should be previous node's end + self.capacity()
prev = self.prev_node()
if prev <> None:
self.end = prev.global_end() + self.capacity()
else:
# Otherwise, this is a first node
self.end = self.capacity()
return self.end
def choose_disk(self):
'''when a node is chosen for replication, it needs to choose a disk to put the data.'''
if self.used >= self.capacity():
return None
disk_id = random.randrange(len(self.children()))
disk = self.children()[disk_id]
if disk.used < disk.capacity():
return disk
else:
return self.choose_disk()
class machine_type_t:
def __init__(self, mt):
disk = xml_children(mt, u'disk')[0]
self.disk = empty_t()
self.disk.type = str(xml_child_text(disk, u'type'))
self.disk.capacity = int(xml_child_text(disk, u'capa'))*1024 # in MB
self.disk.num = int(xml_child_text(disk, u'num'))
cpu = xml_children(mt, u'cpu')[0]
self.cpu = empty_t()
self.cpu.type = str(xml_child_text(cpu, u'type'))
self.cpu.cores = int(xml_child_text(cpu, u'number_of_cores'))
self.cpu.num = int(xml_child_text(cpu, u'num'))
mem = xml_children(mt, u'mem')[0]
self.mem = empty_t()
self.mem.type = str(xml_child_text(mem, u'type'))
self.mem.capacity = str(xml_child_text(mem, u'capa')) # in MB
# TODO: other parts of machine_type
class topology_t:
def __init__(self, topo_xml):
root = xml.dom.minidom.parse(topo_xml)
self.htree = hnode_t(None)
self.dmt = {} # dict of machine type
topo = root.getElementsByTagName(u"topo")[0]
# populate dict of machine type
list_machine_type = topo.getElementsByTagName(u'machine_type')
for mt_node in list_machine_type:
name = str(xml_child_text(mt_node, u'name'))
self.dmt[name] = machine_type_t(mt_node)
# topology
for rack_group in xml_children(topo, u"rack_group"):
rg_node = hnode_t(self.htree)
# rgname not in use currently. maybe a name-node map is needed.
rg_node.rgname = str(xml_child_text(rack_group, u'name'))
num_rack = len(xml_children(rack_group, u"rack_index"))
self.racks = num_rack
rack_node = hnode_t(rg_node)
# populate the first rack_node
for node_group in xml_children(rack_group, u"compute_node_group"):
ng_node = hnode_t(rack_node)
# machine type and disk
mt_name = str(xml_child_text(node_group, u'machine_type_name'))
mt = self.dmt[mt_name]
ng_node.reserved = mt
num_node = len(xml_children(node_group, u'node_index'))
self.nodes = num_node
node_node = hnode_t(ng_node)
# populate the first node_node
for i in range(mt.disk.num):
disk_node = hnode_t(node_node, mt.disk.capacity)
#self.htree.dump()
# clone other node_nodes
for i in range(num_node-1):
new_node_node = node_node.clone()
#self.htree.dump()
# clone other rack_nodes
for i in range(num_rack-1):
new_rack_node = rack_node.clone()
#self.htree.dump()
self.routers = []
for router in xml_children(topo, u'router'):
rt = empty_t()
rt.connect_to_groups = []
for connect_to_group in xml_children(router, u'connect_to_group'):
rgname = str(xml_child_text(connect_to_group, u'rack_group_name'))
switch = empty_t()
switch.rg = self.find_hnode(tuple([int(rgname[5:])]))
switch.index = int(xml_child_text(connect_to_group, u'switch_index'))
rt.connect_to_groups.append(switch)
rt.name = str(xml_child_text(router, u'name'))
self.routers.append(rt)
self.data_nodes = int(xml_child_text(topo, u'data_nodes'))
self.job_tracker = str(xml_child_text(topo, u'job_tracker'))
topology = xml_children(topo, u'topology')
if len(topology) > 0 :
self.topology = str(xml_text(topology[0]))
else:
self.topology = None
def find_hnode(self, index_stack):
if len(index_stack) > 5:
print 'Wrong index stack' + index_stack
return None
node = self.htree
for i in index_stack:
children = node.children()
node = children[i]
return node
def totcl(self, topo_tcl):
f = open(topo_tcl, 'w')
f.write('set int_bw %s\n' % (int_bw))
f.write('set int_latency %s\n' % (int_latency))
num_of_nodes = 0
if self.topology == 'dcell':
# special case, assume everything is symmetric
# take the first ng to get mt (machine type)
# number of nodes in a rack matters,
# number of racks does not matter.
rg = self.htree.children()[0]
racks = len(rg.children())
r = rg.children()[0]
ng = r.children()[0]
nodes = len(ng.children())
mt = ng.reserved
f.write('set cpu_freq %f\n' % (freq_table[mt.cpu.type]))
f.write('set cpu_cores %d\n' % (mt.cpu.cores * mt.cpu.num))
f.write('set rbw %f\n' % (read_bw_table[mt.disk.type]))
f.write('set wbw %f\n' % (write_bw_table[mt.disk.type]))
f.write("\nset num_of_nodes %d\n" % (self.data_nodes))
f.write('setup_2level_dcell %d\n' % (nodes))
f.write('\n')
f.write('set jt
|
antoinecarme/pyaf
|
tests/artificial/transf_None/trend_Lag1Trend/cycle_5/ar_/test_artificial_128_None_Lag1Trend_5__100.py
|
Python
|
bsd-3-clause
| 260
| 0.088462
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.
|
process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 5, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 0)
|
;
|
rhyolight/nupic.son
|
app/soc/views/user.py
|
Python
|
apache-2.0
| 4,262
| 0.005397
|
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# ht
|
tp://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permiss
|
ions and
# limitations under the License.
"""Module for the User related pages."""
import os
from django.conf.urls import url as django_url
from soc.logic import accounts
from soc.logic import cleaning
from soc.models.user import User
from soc.views import base
from soc.views.forms import ModelForm
class UserCreateForm(ModelForm):
"""Django form for the User profile.
"""
class Meta:
model = User
fields = ['link_id', 'name']
clean_link_id = cleaning.clean_user_not_exist('link_id')
def templatePath(self):
# TODO: This needs a more generic form.
return 'modules/gsoc/_form.html'
class UserEditForm(ModelForm):
"""Django form to edit a User profile.
"""
class Meta:
model = User
fields = ['name']
def templatePath(self):
# TODO: This needs a more generic form.
return 'modules/gsoc/_form.html'
class CreateUserPage(base.RequestHandler):
"""View for creating the user profile.
"""
def djangoURLPatterns(self):
return [
django_url(r'^user/create$', self, name='create_user'),
]
def checkAccess(self, data, check, mutator):
"""Ensures that the user is logged in and does not have a User profile."""
check.isNotUser()
def templatePath(self):
# TODO: make this specific to the current active program
return 'soc/user/base.html'
def context(self, data, check, mutator):
# TODO: program specific in core module, needs to be avoided
from soc.modules.gsoc.views.forms import GSoCBoundField
form = UserCreateForm(GSoCBoundField, data=data.POST or None)
return {
'base_layout': 'modules/gsoc/base.html',
'app_version': os.environ.get('CURRENT_VERSION_ID', '').split('.')[0],
'page_name': 'Create User profile',
'forms': [form],
}
def post(self, data, check, mutator):
"""Handler for HTTP POST request."""
from soc.modules.gsoc.views.forms import GSoCBoundField
form = UserCreateForm(GSoCBoundField, data=data.POST)
if not form.is_valid():
# TODO(nathaniel): problematic self-call.
return self.get(data, check, mutator)
cleaned_data = form.cleaned_data
norm_account = accounts.normalizeAccount(data.gae_user)
cleaned_data['account'] = norm_account
cleaned_data['account_id'] = data.gae_user.user_id()
form.create(key_name=cleaned_data['link_id'])
return data.redirect.to('edit_user', validated=True)
class EditUserPage(base.RequestHandler):
"""View to edit the user profile."""
def djangoURLPatterns(self):
return [
django_url(r'^user/edit', self, name='edit_user'),
]
def checkAccess(self, data, check, mutator):
check.isUser()
def templatePath(self):
# TODO: make this specific to the current active program
return 'soc/user/base.html'
def context(self, data, check, mutator):
# TODO: program specific in core module
from soc.modules.gsoc.views.forms import GSoCBoundField
form = UserEditForm(
GSoCBoundField, data=data.POST or None, instance=data.user)
return {
'base_layout': 'modules/gsoc/base.html',
'app_version': os.environ.get('CURRENT_VERSION_ID', '').split('.')[0],
'page_name': 'Edit User profile',
'forms': [form],
}
def post(self, data, check, mutator):
"""Handler for HTTP POST request."""
from soc.modules.gsoc.views.forms import GSoCBoundField
form = UserEditForm(
GSoCBoundField, data=data.POST, instance=data.user)
if not form.is_valid():
# TODO(nathaniel): problematic self-call.
return self.get(data, check, mutator)
form.save()
# TODO(nathaniel): redirection to same page.
return data.redirect.to('edit_user', validated=True)
|
f-apolinario/BFTStorageService
|
server/StorageService.py
|
Python
|
mit
| 750
| 0.025333
|
import xmlrpclib
from SimpleXMLRPCServer import SimpleXMLRPCServer
import json
import KV
|
SHandler as handler
with open('config.json') as d:
config = json.load(d)
ip = config['ip']
port = int(config['port'])
def write(key, value):
global handler
return handler.write(key,value)
def delete(key):
global handler
return handler.delete(key)
def list():
global handler
return handler.list()
def read(key):
global handler
return handler.read(key)
server = SimpleXMLRPCServer((ip, port), allow_none=True)
print "Listening on port " + str(port) + "..."
server.reg
|
ister_function(write, "write")
server.register_function(delete, "delete")
server.register_function(list, "list")
server.register_function(read, "read")
server.serve_forever()
|
plotly/python-api
|
packages/python/plotly/plotly/tests/test_core/test_px/test_px_functions.py
|
Python
|
mit
| 11,286
| 0.00124
|
import plotly.express as px
import plotly.graph_objects as go
from numpy.testing import assert_array_equal
import numpy as np
import pandas as pd
import pytest
def _compare_figures(go_trace, px_fig):
"""Compare a figure created with a go trace and a figure created with
a px function call. Check that all values inside the go Figure are the
same in the px figure (which sets more parameters).
"""
go_fig = go.Figure(go_trace)
go_fig = go_fig.to_plotly_json()
px_fig = px_fig.to_plotly_json()
del go_fig["layout"]["template"]
del px_fig["layout"]["template"]
for key in go_fig["data"][0]:
assert_array_equal(go_fig["data"][0][key], px_fig["data"][0][key])
for key in go_fig["layout"]:
assert go_fig["layout"][key] == px_fig["layout"][key]
def test_pie_like_px():
# Pie
labels = ["Oxygen", "Hydrogen", "Carbon_Dioxide", "Nitrogen"]
values = [4500, 2500, 1053, 500]
fig = px.pie(names=labels, values=values)
trace = go.Pie(labels=labels, values=values)
_compare_figures(trace, fig)
labels = ["Eve", "Cain", "Seth", "Enos", "Noam", "Abel", "Awan", "Enoch", "Azura"]
parents = ["", "Eve", "Eve", "Seth", "Seth", "Eve", "Eve", "Awan", "Eve"]
values = [10, 14, 12, 10, 2, 6, 6, 4, 4]
# Sunburst
fig = px.sunburst(names=labels, parents=parents, values=values)
trace = go.Sunburst(labels=labels, parents=parents, values=values)
_compare_figures(trace, fig)
# Treemap
fig = px.treemap(names=labels, parents=parents, values=values)
trace = go.Treemap(labels=labels, parents=parents, values=values)
_compare_figures(trace, fig)
# Funnel
x = ["A", "B", "C"]
y = [3, 2, 1]
fig = px.funnel(y=y, x=x)
trace = go.Funnel(y=y, x=x)
_compare_figures(trace, fig)
# Funnelarea
fig = px.funnel_area(values=y, names=x)
trace = go.Funnelarea(values=y, labels=x)
_compare_figures(trace, fig)
def test_sunburst_treemap_colorscales():
labels = ["Eve", "Cain", "Seth", "Enos", "Noam", "Abel", "Awan", "Enoch", "Azura"]
parents = ["", "Eve", "Eve", "Seth", "Seth", "Eve", "Eve", "Awan", "Eve"]
values = [10, 14, 12, 10, 2, 6, 6, 4, 4]
for func, colorway in zip(
[px.sunburst, px.treemap], ["sunburstcolorway", "treemapcolorway"]
):
# Continuous colorscale
fig = func(
names=labels,
parents=parents,
values=values,
color=values,
color_continuous_scale="Viridis",
range_color=(5, 15),
)
assert fig.layout.coloraxis.cmin, fig.layout.coloraxis.cmax == (5, 15)
# Discrete colorscale, color arg passed
color_seq = px.colors.sequential.Reds
fig = func(
names=labels,
parents=parents,
values=values,
color=labels,
color_discrete_sequence=color_seq,
)
assert np.all([col in color_seq for col in fig.data[0].marker.colors])
# Numerical color arg passed, fall back to continuous
fig = func(names=labels, parents=parents, values=values, color=values,)
assert [
el[0] == px.colors.sequential.Viridis
for i, el in enumerate(fig.layout.coloraxis.colorscale)
]
# Numerical color arg passed, continuous colorscale
# even if color_discrete_sequence if passed
fig = func(
names=labels,
parents=parents,
values=values,
color=values,
color_discrete_sequence=color_seq,
)
assert [
el[0] == px.colors.sequential.Viridis
for i, el in enumerate(fig.layout.coloraxis.colorscale)
]
# Discrete colorscale, no color arg passed
color_seq = px.colors.sequential.Reds
fig = func(
names=labels,
parents=parents,
values=values,
color_discrete_sequence=color_seq,
)
assert list(fig.layout[colorway]) == color_seq
def test_sunburst_treemap_with_path():
vendors = ["A", "B", "C", "D", "E", "F", "G", "H"]
sectors = [
"Tech",
"Tech",
"Finance",
"Finance",
"Tech",
"Tech",
"Finance",
"Finance",
]
regions = ["North", "North", "North", "North", "South", "South", "South", "South"]
values = [1, 3, 2, 4, 2, 2, 1, 4]
total = ["total",] * 8
df = pd.DataFrame(
dict(
vendors=vendors,
sectors=sectors,
regions=regions,
values=values,
total=total,
)
)
path = ["total", "regions", "sectors", "vendors"]
# No values
fig = px.sunburst(df, path=path)
assert fig.data[0].branchvalues == "total"
# Values passed
fig = px.sunburst
|
(df, path=path, values="values")
assert fig.data[0].branchvalues == "total"
assert fig.data[0].values[-1] == np.sum(values)
# Values passed
fig
|
= px.sunburst(df, path=path, values="values")
assert fig.data[0].branchvalues == "total"
assert fig.data[0].values[-1] == np.sum(values)
# Continuous colorscale
fig = px.sunburst(df, path=path, values="values", color="values")
assert "coloraxis" in fig.data[0].marker
assert np.all(np.array(fig.data[0].marker.colors) == np.array(fig.data[0].values))
# Error when values cannot be converted to numerical data type
df["values"] = ["1 000", "3 000", "2", "4", "2", "2", "1 000", "4 000"]
msg = "Column `values` of `df` could not be converted to a numerical data type."
with pytest.raises(ValueError, match=msg):
fig = px.sunburst(df, path=path, values="values")
# path is a mixture of column names and array-like
path = [df.total, "regions", df.sectors, "vendors"]
fig = px.sunburst(df, path=path)
assert fig.data[0].branchvalues == "total"
def test_sunburst_treemap_with_path_and_hover():
df = px.data.tips()
fig = px.sunburst(
df, path=["sex", "day", "time", "smoker"], color="smoker", hover_data=["smoker"]
)
assert "smoker" in fig.data[0].hovertemplate
def test_sunburst_treemap_with_path_color():
vendors = ["A", "B", "C", "D", "E", "F", "G", "H"]
sectors = [
"Tech",
"Tech",
"Finance",
"Finance",
"Tech",
"Tech",
"Finance",
"Finance",
]
regions = ["North", "North", "North", "North", "South", "South", "South", "South"]
values = [1, 3, 2, 4, 2, 2, 1, 4]
calls = [8, 2, 1, 3, 2, 2, 4, 1]
total = ["total",] * 8
df = pd.DataFrame(
dict(
vendors=vendors,
sectors=sectors,
regions=regions,
values=values,
total=total,
calls=calls,
)
)
path = ["total", "regions", "sectors", "vendors"]
fig = px.sunburst(df, path=path, values="values", color="calls")
colors = fig.data[0].marker.colors
assert np.all(np.array(colors[:8]) == np.array(calls))
fig = px.sunburst(df, path=path, color="calls")
colors = fig.data[0].marker.colors
assert np.all(np.array(colors[:8]) == np.array(calls))
# Hover info
df["hover"] = [el.lower() for el in vendors]
fig = px.sunburst(df, path=path, color="calls", hover_data=["hover"])
custom = fig.data[0].customdata
assert np.all(custom[:8, 0] == df["hover"])
assert np.all(custom[8:, 0] == "(?)")
assert np.all(custom[:8, 1] == df["calls"])
# Discrete color
fig = px.sunburst(df, path=path, color="vendors")
assert len(np.unique(fig.data[0].marker.colors)) == 9
# Discrete color and color_discrete_map
cmap = {"Tech": "yellow", "Finance": "magenta", "(?)": "black"}
fig = px.sunburst(df, path=path, color="sectors", color_discrete_map=cmap)
assert np.all(np.in1d(fig.data[0].marker.colors, list(cmap.values())))
# Numerical column in path
df["regions"] = df["regions"].map({"North": 1, "South": 2})
path = ["total", "regions", "sectors", "vendors"]
fig = px.sunburst(df, path=path, values="values", color="calls")
colors = fig.data[0].marker.colors
assert np.all(np.array(c
|
nouiz/pydy
|
examples/Kane1985/Chapter5/Ex9.3.py
|
Python
|
bsd-3-clause
| 3,590
| 0.002237
|
#!/usr/bin/env pyt
|
hon
# -*- coding: utf-8 -*-
"""Exercise 9.3 from Kane 1985."""
from __future__ impor
|
t division
from sympy import cos, diff, expand, pi, solve, symbols
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import dot, dynamicsymbols
from util import msprint, subs, partial_velocities
from util import generalized_active_forces, potential_energy
g, m, Px, Py, Pz, R, t = symbols('g m Px Py Pz R t')
q = dynamicsymbols('q1:6')
qd = dynamicsymbols('q1:6', level=1)
u = dynamicsymbols('u1:6')
## --- Define ReferenceFrames ---
A = ReferenceFrame('A')
B_prime = A.orientnew('B_prime', 'Axis', [q[0], A.z])
B = B_prime.orientnew('B', 'Axis', [pi/2 - q[1], B_prime.x])
C = B.orientnew('C', 'Axis', [q[2], B.z])
## --- Define Points and their velocities ---
pO = Point('O')
pO.set_vel(A, 0)
# R is the point in plane H that comes into contact with disk C.
pR = pO.locatenew('R', q[3]*A.x + q[4]*A.y)
pR.set_vel(A, pR.pos_from(pO).diff(t, A))
pR.set_vel(B, 0)
# C^ is the point in disk C that comes into contact with plane H.
pC_hat = pR.locatenew('C^', 0)
pC_hat.set_vel(C, 0)
# C* is the point at the center of disk C.
pCs = pC_hat.locatenew('C*', R*B.y)
pCs.set_vel(C, 0)
pCs.set_vel(B, 0)
# calculate velocities in A
pCs.v2pt_theory(pR, A, B)
pC_hat.v2pt_theory(pCs, A, C)
## --- Expressions for generalized speeds u1, u2, u3, u4, u5 ---
u_expr = map(lambda x: dot(C.ang_vel_in(A), x), B)
u_expr += qd[3:]
kde = [u_i - u_ex for u_i, u_ex in zip(u, u_expr)]
kde_map = solve(kde, qd)
## --- Define forces on each point in the system ---
R_C_hat = Px*A.x + Py*A.y + Pz*A.z
R_Cs = -m*g*A.z
forces = [(pC_hat, R_C_hat), (pCs, R_Cs)]
## --- Calculate generalized active forces ---
partials = partial_velocities([pC_hat, pCs], u, A, kde_map)
Fr, _ = generalized_active_forces(partials, forces)
# Impose the condition that disk C is rolling without slipping
u_indep = u[:3]
u_dep = u[3:]
vc = map(lambda x: dot(pC_hat.vel(A), x), [A.x, A.y])
vc_map = solve(subs(vc, kde_map), u_dep)
partials_tilde = partial_velocities([pC_hat, pCs], u_indep, A, kde_map, vc_map)
Fr_tilde, _ = generalized_active_forces(partials_tilde, forces)
Fr_tilde = map(expand, Fr_tilde)
# solve for ∂V/∂qs using 5.1.9
V_gamma = m * g * R * cos(q[1])
print(('\nVerify V_γ = {0} is a potential energy '.format(V_gamma) +
'contribution of γ for C.'))
V_gamma_dot = -sum(fr * ur for fr, ur in
zip(*generalized_active_forces(partials_tilde,
forces[1:])))
if V_gamma_dot == V_gamma.diff(t).subs(kde_map):
print('d/dt(V_γ) == -sum(Fr_γ * ur).')
else:
print('d/dt(V_γ) != -sum(Fr_γ * ur).')
print('d/dt(V_γ) = {0}'.format(msprint(V_gamma.diff(t))))
print('-sum(Fr_γ * ur) = {0}'.format(msprint(V_gamma_dot)))
#print('\nFinding a potential energy function V while C is rolling '
# 'without slip.')
#V = potential_energy(Fr_tilde, q, u_indep, kde_map, vc_map)
#if V is not None:
# print('V = {0}'.format(V))
print('\nFinding a potential energy function V while C is rolling with slip.')
V = potential_energy(Fr, q, u, kde_map)
if V is not None:
print('V = {0}'.format(V))
print('\nFinding a potential energy function V while C is rolling with slip '
'without friction.')
V = potential_energy(subs(Fr, {Px: 0, Py: 0}), q, u, kde_map)
if V is not None:
print('Define a2, C as functions of t such that the respective '
'contributing potential terms go to zero.')
print('V = {0}'.format(V.subs(dict(zip(symbols('C α2'), [0, pi/2])))))
|
manfredmacx/django-convo
|
convo/Convo.py
|
Python
|
mit
| 2,261
| 0.037152
|
"""
Util class
"""
from django.forms import ModelForm, CharField, URLField, BooleanField
from django.db import models
from models import Entry
def getForm(
|
user):
""" If no form is passed in to new/edit views, use this one """
class _Form(ModelForm):
class Meta:
model = Entry
fields = ('title', 'body',)
def save(self, force_insert=False, force_update=False, commit=True):
m = super(ModelForm, self).save(commit=False)
import bleach
TAGS = ['b', 'em', 'i', 'strong', 'br', 'li', 'ul', 'ol', 'p', 'span']
m.title = bleach.clean(self.cleaned_data['title'])
m.body = bleach.clean(self.cleaned_data['bod
|
y'], tags=TAGS)
if commit:
m.save()
return m
class _AdminForm(ModelForm):
published = BooleanField(required = False, initial = False)
class Meta:
model = Entry
fields = ('title', 'body', 'published')
class _AnonForm(ModelForm):
owner_if_anonymous = CharField(max_length = 150, label="Name")
url_if_anonymous = URLField(max_length=1000, label="URL", required=False)
class Meta:
model = Entry
fields = ('title', 'owner_if_anonymous', 'url_if_anonymous', 'body')
def save(self, force_insert=False, force_update=False, commit=True):
m = super(ModelForm, self).save(commit=False)
import bleach
TAGS = ['b', 'em', 'i', 'strong', 'br', 'li', 'ul', 'ol', 'p', 'span']
m.title = bleach.clean(self.cleaned_data['title'])
m.body = bleach.clean(self.cleaned_data['body'], tags=TAGS)
if commit:
m.save()
return m
if user.is_staff:
return _AdminForm
if user.is_authenticated():
return _Form
return _AnonForm
def getConvo(entry):
s, t = getConvoWithTitle(entry)
return s
def getConvoWithTitle(entry):
""" return list containing a sorted Entry thread """
sorted = []
original = entry.getOriginal()
if original:
if original.published == True:
sorted.append(original)
else:
return None, None
sorted.extend(__sortConvo(Entry.objects.filter(published = True).filter(parent=original)))
return sorted, original.title
def __sortConvo(children):
""" Private function: Sorts a queryset (or list) of Entries """
sorted = []
for c in children:
sorted.append(c)
sorted.extend(__sortConvo(Entry.objects.filter(published = True).filter(parent=c)))
return sorted
|
dragondjf/musicplayertest
|
constant.py
|
Python
|
gpl-2.0
| 1,831
| 0.004369
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Deepin, Inc.
# 2011 Hou Shaohui
#
# Author: Hou Shaohui <houshao55@gmail.com>
# Maintainer: Hou ShaoHui <houshao55@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implie
|
d warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this
|
program. If not, see <http://www.gnu.org/licenses/>.
from nls import _
CONFIG_FILENAME = "config"
PROGRAM_NAME = "deepin-music-player"
PROGRAM_VERSION = "2.0"
PROGRAM_NAME_LONG = _("Deepin Music")
PROGRAM_NAME_SHORT = _("DMusic")
DEFAULT_TIMEOUT = 10
DEFAULT_FONT_SIZE = 9
AUTOSAVE_TIMEOUT = 1000 * 60 * 5 # 5min
# Lyric mode
LRC_DESKTOP_MODE = 1
LRC_WINDOW_MODE = 2
PREDEFINE_COLORS = {
"fresh_green" : ["#e4dcb9", "#ffea93", "#ffd631", "#efede6", "#b3fc9c", "#77d035"],
"playful_pink" : ["#ffffff", "#70b8e5", "#3788c0", "#ffe0ee", "#ffa1ca", "#ff2586"],
"cool_blue" : ["#f8f8f8", "#dadada", "#bdbdbd", "#ffffff", "#60c0ff", "#19a1ff"],
"vitality_yellow" : ["#e4dcb9", "#ffea93", "#ffd631", "#f7f4ea", "#77d1ff", "#4199d5"],
}
FULL_DEFAULT_WIDTH = 886
FULL_DEFAULT_HEIGHT = 625
SIMPLE_DEFAULT_WIDTH = 322
SIMPLE_DEFAULT_HEIGHT = 625
HIDE_PLAYLIST_WIDTH = 210
LIST_WIDTH = 312
CATEGROYLIST_WIDTH = 104
PLAYLIST_WIDTH = 198
TAB_LOCAL = 1
TAB_WEBCAST = 2
TAB_RADIO = 3
EMPTY_WEBCAST_ITEM = 1
EMPTY_RADIO_ITEM = 2
|
TeppieC/M-ords
|
mords_backend/mords_backend/wsgi.py
|
Python
|
mit
| 404
| 0
|
"""
WSGI config for mords_backend project.
It exposes the WSGI callable as a module-level variable named ``application`
|
`.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE",
|
"mords_backend.settings")
application = get_wsgi_application()
|
montanier/pandora
|
docs/tutorials/01_src/tutorial_pyPandora.py
|
Python
|
lgpl-3.0
| 2,012
| 0.011928
|
#!/usr/bin/python3
import os, sys, random
pandoraPath = os.getenv('PANDORAPATH', '/usr/local/
|
pandora')
sys.path.append(pandoraPath+'/bin')
sys.path.append(pandoraPath+'/lib')
from pyPandora import Config, World, Agent, SizeInt
class MyAgent(Agent):
gatheredResources = 0
def __init__(self, id):
Agent.__init__( self, id)
print('constructing agent: ',self
|
.id)
def updateState(self):
print('updating state of: ',self.id)
newPosition = self.position
newPosition._x = newPosition._x + random.randint(-1,1)
newPosition._y = newPosition._y + random.randint(-1,1)
if self.getWorld().checkPosition(newPosition):
self.position = newPosition
self.gatheredResources = self.gatheredResources + self.getWorld().getValue('resources', self.position)
self.getWorld().setValue('resources', self.position, 0)
def registerAttributes(self):
self.registerIntAttribute('resources')
def serialize(self):
print('serializing MyAgent: ',self.id)
self.serializeIntAttribute('resources', self.gatheredResources)
class MyWorld(World):
def __init__(self, config):
World.__init__( self, config)
print('constructing MyWorld')
def createRasters(self):
print('creating rasters')
self.registerDynamicRaster("resources", 1)
self.getDynamicRaster("resources").setInitValues(0, 10, 0)
return
def createAgents(self):
print('creating agents')
for i in range (0, 10):
newAgent = MyAgent('MyAgent_'+str(i))
self.addAgent(newAgent)
newAgent.setRandomPosition()
def main():
print('getting started with pyPandora')
numTimeSteps = 10
worldSize = SizeInt(64,64)
myConfig = Config(worldSize, numTimeSteps)
myWorld = MyWorld(myConfig)
myWorld.initialize()
myWorld.run()
print('simulation finished')
if __name__ == "__main__":
main()
|
svn2github/pyopt
|
pyOpt/pyGCMMA/__init__.py
|
Python
|
gpl-3.0
| 112
| 0.017857
|
#!/usr/bin/env p
|
ytho
|
n
try:
from pyGCMMA import GCMMA
__all__ = ['GCMMA']
except:
__all__ = []
#end
|
ingenieroariel/geonode
|
geonode/groups/tests.py
|
Python
|
gpl-3.0
| 22,844
| 0.000175
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import json
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.conf import settings
from guardian.shortcuts import get_anonymous_user
from geonode.groups.models import GroupProfile, GroupInvitation, GroupCategory
from geonode.documents.models import Document
from geonode.layers.models import Layer
from geonode.maps.models import Map
from geonode.base.populate_test_data import create_models
from geonode.security.views import _perms_info_json
class SmokeTest(TestCase):
"""
Basic checks to make sure pages load, etc.
"""
fixtures = ['initial_data.json', "group_test_data"]
def setUp(self):
create_models(type='layer')
create_models(type='map')
create_models(type='document')
self.norman = get_user_model().objects.get(username="norman")
self.norman.groups.add(Group.objects.get(name='anonymous'))
self.test_user = get_user_model().objects.get(username='test_user')
self.test_user.groups.add(Group.objects.get(name='anonymous'))
self.bar = GroupProfile.objects.get(slug='bar')
self.anonymous_user = get_anonymous_user()
def test_group_permissions_extend_to_user(self):
"""
Ensures that when a user is in a group, the group permissions
extend to the user.
"""
layer = Layer.objects.all()[0]
# Set the default permissions
layer.set_default_permissions()
# Test that the anonymous user can read
self.assertTrue(
self.anonymous_user.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# Test that the default perms give Norman view permissions but not
# write permissions
self.assertTrue(
self.norman.has_perm(
'view_resourcebase',
layer.get_self_resource()))
self.assertFalse(
self.norman.has_perm(
'change_resourcebase',
layer.get_self_resource()))
# Make sure Norman is not in the bar group.
self.assertFalse(self.bar.user_is_member(self.norman))
# Add norman to the bar group.
self.bar.join(self.norman)
# Ensure Norman is in the bar group.
self.assertTrue(self.bar.user_is_member(self.norman))
# Give the bar group permissions to change the layer.
permissions = {
'groups': {
'bar': [
'view_resourcebase',
'change_resourcebase']}}
layer.set_permissions(permissions)
self.assertTrue(
self.norman.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# check that now norman can change the layer
self.assertTrue(
self.norman.has_perm(
'change_resourcebase',
layer.get_self_resource()))
# Test adding a new user to the group after setting permissions on the layer.
# Make sure Test User is not in the bar group.
self.assertFalse(self.bar.user_is_member(self.test_user))
self.assertFalse(
self.test_user.has_perm(
'change_resourcebase',
layer.get_self_resource()))
self.bar.join(self.test_user)
self.assertTrue(
self.test_user.has_perm(
'change_resourcebase',
layer.get_self_resource()))
def test_group_resource(self):
"""
Tests the resources method on a Group object.
"""
layer = Layer.objects.all()[0]
map = Map.objects.all()[0]
perm_spec = {'groups': {'bar': ['change_resourcebase']}}
# Give the self.bar group write perms on the layer
layer.set_permissions(perm_spec)
map.set_permissions(perm_spec)
# Ensure the layer is returned in the group's resources
self.assertTrue(layer.get_self_resource() in self.bar.resources())
self.assertTrue(map.get_self_resource() in self.bar.resources())
# Test the resource filter
self.assertTrue(
|
layer.get_self_resource() in self.bar.resources(
resource_type='layer'))
self.assertTrue(
map.get_self_resource() not in self.bar.resources(
resource_type='layer'))
# Revoke permissions on the layer from the self.bar group
layer.set_permissions("{}")
# Ensure the layer is no longer returned in the groups resources
self.assertFalse(layer.get_self_resource() in self.bar.resources())
|
def test_perms_info(self):
"""
Tests the perms_info function (which passes permissions to the response context).
"""
# Add test to test perms being sent to the front end.
layer = Layer.objects.all()[0]
layer.set_default_permissions()
perms_info = layer.get_all_level_info()
# Ensure there is only one group 'anonymous' by default
self.assertEqual(len(perms_info['groups'].keys()), 1)
# Add the foo group to the layer object groups
layer.set_permissions({'groups': {'bar': ['view_resourcebase']}})
perms_info = _perms_info_json(layer)
# Ensure foo is in the perms_info output
self.assertItemsEqual(
json.loads(perms_info)['groups'], {
'bar': ['view_resourcebase']})
def test_resource_permissions(self):
"""
Tests that the client can get and set group permissions through the test_resource_permissions view.
"""
self.assertTrue(self.client.login(username="admin", password="admin"))
layer = Layer.objects.all()[0]
document = Document.objects.all()[0]
map_obj = Map.objects.all()[0]
layer.set_default_permissions()
document.set_default_permissions()
map_obj.set_default_permissions()
objects = layer, document, map_obj
for obj in objects:
response = self.client.get(
reverse(
'resource_permissions',
kwargs=dict(
resource_id=obj.id)))
self.assertEqual(response.status_code, 200)
js = json.loads(response.content)
permissions = js.get('permissions', dict())
if isinstance(permissions, unicode) or isinstance(
permissions, str):
permissions = json.loads(permissions)
# Ensure the groups value is empty by default
expected_permissions = {}
if settings.DEFAULT_ANONYMOUS_DOWNLOAD_PERMISSION:
expected_permissions.setdefault(
u'anonymous', []).append(u'download_resourcebase')
if settings.DEFAULT_ANONYMOUS_VIEW_PERMISSION:
expected_permissions.setdefault(
u'anonymous', []).append(u'view_resourcebase')
self.assertItemsEqual(
permissions.get('groups'),
expected_permissions)
permissions = {
'groups': {
'bar': ['change_resourcebase']
},
'users': {
|
alanjw/GreenOpenERP-Win-X86
|
python/Lib/site-packages/_xmlplus/dom/html/HTMLTextAreaElement.py
|
Python
|
agpl-3.0
| 4,989
| 0.005813
|
########################################################################
#
# File Name: HTMLTextAreaElement
#
#
### This file is automatically generated by GenerateHtml.py.
### DO NOT EDIT!
"""
WWW: http://4suite.com/4DOM e-mail: support@4suite.com
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
import string
from xml.dom import Node
from xml.dom.html.HTMLElement import HTMLElement
class HTMLTextAreaElement(HTMLElement):
def __init__(self, ownerDocument, nodeName="TEXTAREA"):
HTMLElem
|
ent.__init__(self, ownerDocument, nodeName)
### Attribute Methods ###
def _get_accessKey(self):
retur
|
n self.getAttribute("ACCESSKEY")
def _set_accessKey(self, value):
self.setAttribute("ACCESSKEY", value)
def _get_cols(self):
value = self.getAttribute("COLS")
if value:
return int(value)
return 0
def _set_cols(self, value):
self.setAttribute("COLS", str(value))
def _get_defaultValue(self):
if not self.firstChild:
return
if self.firstChild == self.lastChild:
return self.firstChild.data
self.normalize()
text = filter(lambda x: x.nodeType == Node.TEXT_NODE, self.childNodes)
return text[0].data
def _set_defaultValue(self, value):
text = None
for node in self.childNodes:
if not text and node.nodeType == Node.TEXT_NODE:
text = node
else:
self.removeChild(node)
if text:
text.data = value
else:
text = self.ownerDocument.createTextNode(value)
self.appendChild(text)
def _get_disabled(self):
return self.hasAttribute("DISABLED")
def _set_disabled(self, value):
if value:
self.setAttribute("DISABLED", "DISABLED")
else:
self.removeAttribute("DISABLED")
def _get_form(self):
parent = self.parentNode
while parent:
if parent.nodeName == "FORM":
return parent
parent = parent.parentNode
return None
def _get_name(self):
return self.getAttribute("NAME")
def _set_name(self, value):
self.setAttribute("NAME", value)
def _get_readonly(self):
return self.hasAttribute("READONLY")
def _set_readonly(self, value):
if value:
self.setAttribute("READONLY", "READONLY")
else:
self.removeAttribute("READONLY")
def _get_rows(self):
value = self.getAttribute("ROWS")
if value:
return int(value)
return 0
def _set_rows(self, value):
self.setAttribute("ROWS", str(value))
def _get_tabIndex(self):
value = self.getAttribute("TABINDEX")
if value:
return int(value)
return 0
def _set_tabIndex(self, value):
self.setAttribute("TABINDEX", str(value))
def _get_type(self):
return "textarea"
def _get_value(self):
if not self.firstChild:
return
if self.firstChild == self.lastChild:
return self.firstChild.data
self.normalize()
text = filter(lambda x: x.nodeType == Node.TEXT_NODE, self.childNodes)
return text[0].data
def _set_value(self, value):
text = None
for node in self.childNodes:
if not text and node.nodeType == Node.TEXT_NODE:
text = node
else:
self.removeChild(node)
if text:
text.data = value
else:
text = self.ownerDocument.createTextNode(value)
self.appendChild(text)
### Methods ###
def blur(self):
pass
def focus(self):
pass
def select(self):
pass
### Attribute Access Mappings ###
_readComputedAttrs = HTMLElement._readComputedAttrs.copy()
_readComputedAttrs.update({
"accessKey" : _get_accessKey,
"cols" : _get_cols,
"defaultValue" : _get_defaultValue,
"disabled" : _get_disabled,
"form" : _get_form,
"name" : _get_name,
"readonly" : _get_readonly,
"rows" : _get_rows,
"tabIndex" : _get_tabIndex,
"type" : _get_type,
"value" : _get_value
})
_writeComputedAttrs = HTMLElement._writeComputedAttrs.copy()
_writeComputedAttrs.update({
"accessKey" : _set_accessKey,
"cols" : _set_cols,
"defaultValue" : _set_defaultValue,
"disabled" : _set_disabled,
"name" : _set_name,
"readonly" : _set_readonly,
"rows" : _set_rows,
"tabIndex" : _set_tabIndex,
"value" : _set_value
})
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
HTMLElement._readOnlyAttrs + _readComputedAttrs.keys())
|
gramps-project/addons-source
|
GenealogyTree/gt_ancestor.py
|
Python
|
gpl-2.0
| 6,685
| 0.003141
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2017-2018 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is di
|
stributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General
|
Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
LaTeX Genealogy Tree ancestor report
"""
#------------------------------------------------------------------------
#
# python modules
#
#------------------------------------------------------------------------
from functools import partial
#------------------------------------------------------------------------
#
# Set up logging
#
#------------------------------------------------------------------------
import logging
LOG = logging.getLogger(".Tree")
#------------------------------------------------------------------------
#
# Gramps module
#
#------------------------------------------------------------------------
from gramps.gen.errors import ReportError
from gramps.gen.plug.report import Report
from gramps.gen.plug.report import MenuReportOptions
from gramps.gen.plug.report import stdoptions
from gramps.gen.plug.menu import PersonOption, NumberOption, BooleanOption
#------------------------------------------------------------------------
#
# Internationalisation
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
try:
_trans = glocale.get_addon_translator(__file__)
except ValueError:
_trans = glocale.translation
_ = _trans.gettext
#------------------------------------------------------------------------
#
# AncestorTree
#
#------------------------------------------------------------------------
class AncestorTree(Report):
""" Ancestor Tree report """
def __init__(self, database, options, user):
"""
Create LaTeX Genealogy Tree ancestor report.
"""
Report.__init__(self, database, options, user)
menu = options.menu
get_option_by_name = menu.get_option_by_name
get_value = lambda name: get_option_by_name(name).get_value()
self._db = self.database
self._pid = get_value('pid')
self.max_generations = menu.get_option_by_name('maxgen').get_value()
self.include_images = menu.get_option_by_name('images').get_value()
self.set_locale(menu.get_option_by_name('trans').get_value())
def write_report(self):
"""
Inherited method; called by report() in _ReportDialog.py
"""
if self._pid:
person = self._db.get_person_from_gramps_id(self._pid)
if person is None:
raise ReportError(_("Person %s is not in the Database") %
self._pid)
family_handle = person.get_main_parents_family_handle()
if family_handle:
options = ['pref code={\\underline{#1}}',
'list separators hang',
'place text={\\newline}{}']
if self.include_images:
images = ('if image defined={'
'add to width=25mm,right=25mm,\n'
'underlay={\\begin{tcbclipinterior}'
'\\path[fill overzoom image=\\gtrDBimage]\n'
'([xshift=-24mm]interior.south east) '
'rectangle (interior.north east);\n'
'\\end{tcbclipinterior}},\n'
'}{},')
box = 'box={halign=left,\\gtrDBsex,%s\n}' % images
else:
box = 'box={halign=left,\\gtrDBsex}'
options.append(box)
self.doc.start_tree(options)
self.write_subgraph(0, 'parent', family_handle, person.handle)
self.doc.end_tree()
def write_subgraph(self, level, subgraph_type, family_handle, ghandle):
if level > self.max_generations:
return
family = self._db.get_family_from_handle(family_handle)
self.doc.start_subgraph(level, subgraph_type, family)
for handle in (family.get_father_handle(), family.get_mother_handle()):
if handle:
parent = self._db.get_person_from_handle(handle)
family_handle = parent.get_main_parents_family_handle()
if family_handle:
self.write_subgraph(level+1, 'parent', family_handle,
handle)
else:
self.doc.write_node(self._db, level+1, 'p', parent, True)
for childref in family.get_child_ref_list():
child = self._db.get_person_from_handle(childref.ref)
if childref.ref == ghandle:
self.doc.write_node(self._db, level+1, 'g', child, True)
else:
self.doc.write_node(self._db, level+1, 'c', child, False)
self.doc.end_subgraph(level)
#------------------------------------------------------------------------
#
# AncestorTreeOptions
#
#------------------------------------------------------------------------
class AncestorTreeOptions(MenuReportOptions):
"""
Defines all of the controls necessary
to configure the Ancestor Tree report.
"""
def __init__(self, name, dbase):
self.__pid = None
MenuReportOptions.__init__(self, name, dbase)
def add_menu_options(self, menu):
category_name = _("Report Options")
self.__pid = PersonOption(_("Center Person"))
self.__pid.set_help(_("The center person for the report"))
menu.add_option(category_name, "pid", self.__pid)
maxgen = NumberOption(_("Generations"), 10, 1, 100)
maxgen.set_help(_("The number of generations to include in the tree"))
menu.add_option(category_name, "maxgen", maxgen)
images = BooleanOption(_("Include images"), False)
images.set_help(_("Include images of people in the nodes."))
menu.add_option(category_name, "images", images)
locale_opt = stdoptions.add_localization_option(menu, category_name)
|
eifuentes/kaggle_whats_cooking
|
train_word2vec_rf.py
|
Python
|
mit
| 1,909
| 0.002095
|
"""
train supervised classifier with what's cooking recipe data
objective - determine recipe type categorical value from 20
"""
import time
from features_bow import *
from features_word2vec import *
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.cross_validation import cross_val_score
""" main entry method """
def main(use_idf=False, random_state=None, std=False, n_jobs=-1, verbose=2):
wc_idf_map = None
if use_idf:
# ingredients inverse document frequencies
wc_components = build_tfidf_wc(verbose=(verbose > 0))
wc_idf = wc_components['model'].idf_
wc_idf_words = wc_components['model'].get_feature_names()
wc_idf_map = dict(zip(wc_idf_words, wc_idf))
# word2vec recipe feature vectors
wc_components = build_word2vec_wc(feature_vec_size=120, avg=True, idf=wc_idf_map, verbose=(verbose > 0))
y_train = wc_components['train']['df']['cuisine_code'].as_matrix()
X_train = wc_compone
|
nts['train']['features_matrix']
# standardize features aka mean ~ 0, std ~ 1
if std:
scaler = StandardScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
# random forest su
|
pervised classifier
time_0 = time.time()
clf = RandomForestClassifier(n_estimators=100, max_depth=None,
n_jobs=n_jobs, random_state=random_state, verbose=verbose)
# perform cross validation
cv_n_fold = 8
print 'cross validating %s ways...' % cv_n_fold
scores_cv = cross_val_score(clf, X_train, y_train, cv=cv_n_fold, n_jobs=-1)
print 'accuracy: %0.5f (+/- %0.5f)' % (scores_cv.mean(), scores_cv.std() * 2)
time_1 = time.time()
elapsed_time = time_1 - time_0
print 'cross validation took %.3f seconds' % elapsed_time
if __name__ == '__main__':
main()
|
memphis-iis/GLUDB
|
tests/testpkg/module.py
|
Python
|
apache-2.0
| 123
| 0
|
from gludb.simple import DBObject, Field
@D
|
BObject(table_name='TopData')
class TopDat
|
a(object):
name = Field('name')
|
google/ghost-userspace
|
experiments/scripts/shenango.py
|
Python
|
apache-2.0
| 4,128
| 0.009205
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs the RocksDB Shenango experiments.
This script runs the RocksDB Shenango experiments on ghOSt and on CFS. In these
experiments, RocksDB is co-located with an Antagonist. Specifically, the
dispatcher and worker threads are co-located with the Antagonist threads while
the load generator is isolated on its own CPU (to ensure that the load we think
we are generating is the load we are actually generating). For ghOSt, the
Antagonist threads are preempted to allow RocksDB threads to run. For CFS, this
preemption is left to CFS to figure out. Fur
|
thermore, for the CFS experiments,
the worker threads sleep on a futex when they do not have work rather than spin
so that CFS gives the Antagonist threads a chance to run.
"""
from typing import Sequence
from absl import app
from experiments.scripts.options import CfsWaitType
from experiments.scripts.options import CheckSchedulers
from experiments.scripts.op
|
tions import GetAntagonistOptions
from experiments.scripts.options import GetGhostOptions
from experiments.scripts.options import GetRocksDBOptions
from experiments.scripts.options import Scheduler
from experiments.scripts.run import Experiment
from experiments.scripts.run import Run
_NUM_CPUS = 8
_NUM_CFS_WORKERS = _NUM_CPUS - 2
_NUM_GHOST_WORKERS = 11
# Subtract 1 for the Antagonist since the Antagonist does not run a thread on
# the same CPU as the load generator.
_NUM_ANTAGONIST_CPUS = _NUM_CPUS - 1
def RunCfs():
"""Runs the CFS (Linux Completely Fair Scheduler) experiment."""
e: Experiment = Experiment()
# Run throughputs 10000, 20000, 30000, ... 60000.
e.throughputs = list(i for i in range(10000, 600000, 10000))
# Toward the end, run throughputs 70000, 71000, 72000, ..., 120000.
e.throughputs.extend(list(i for i in range(70000, 121000, 1000)))
e.rocksdb = GetRocksDBOptions(Scheduler.CFS, _NUM_CPUS, _NUM_CFS_WORKERS)
e.rocksdb.cfs_wait_type = CfsWaitType.FUTEX
e.rocksdb.get_exponential_mean = '1us'
e.antagonist = GetAntagonistOptions(Scheduler.CFS, _NUM_ANTAGONIST_CPUS)
e.ghost = None
Run(e)
def RunGhost():
"""Runs the ghOSt experiment."""
e: Experiment = Experiment()
# Run throughputs 10000, 20000, 30000, ..., 380000.
e.throughputs = list(i for i in range(10000, 381000, 10000))
# Toward the end, run throughputs 390000, 391000, 392000, ..., 450000.
e.throughputs.extend(list(i for i in range(390000, 451000, 1000)))
e.rocksdb = GetRocksDBOptions(Scheduler.GHOST, _NUM_CPUS, _NUM_GHOST_WORKERS)
e.rocksdb.get_exponential_mean = '1us'
e.rocksdb.ghost_qos = 2
e.antagonist = GetAntagonistOptions(Scheduler.GHOST, _NUM_ANTAGONIST_CPUS)
e.antagonist.ghost_qos = 1
e.ghost = GetGhostOptions(_NUM_CPUS)
# There is no time-based preemption for Shenango, so set the preemption time
# slice to infinity.
e.ghost.preemption_time_slice = 'inf'
Run(e)
def main(argv: Sequence[str]):
if len(argv) > 3:
raise app.UsageError('Too many command-line arguments.')
elif len(argv) == 1:
raise app.UsageError(
'No experiment specified. Pass `cfs` and/or `ghost` as arguments.')
# First check that all of the command line arguments are valid.
if not CheckSchedulers(argv[1:]):
raise ValueError('Invalid scheduler specified.')
# Run the experiments.
for i in range(1, len(argv)):
scheduler = Scheduler(argv[i])
if scheduler == Scheduler.CFS:
RunCfs()
else:
if scheduler != Scheduler.GHOST:
raise ValueError(f'Unknown scheduler {scheduler}.')
RunGhost()
if __name__ == '__main__':
app.run(main)
|
Knapsacks/power-pi-v2
|
facebook-messenger-bot/app.py
|
Python
|
mit
| 5,321
| 0.004326
|
# -*- coding: utf-8 -*-
from flask import Flask, request
from fbmq import Page, QuickReply, Attachment, Template
import requests, records, re, json
from flask_restful import Resource, Api
token = '<auth token here>'
metricsData = {}
macid = 111111111111
pg = Page(token)
import time
db = records.Database('mysql://<user>:<password>@<url>:3306/db')
app = Flask(__name__)
api = Api(app)
class deviceMetrics(Resource):
def get(self):
return {"energy": metricsData["energy"], "money_saved": metricsData["savings"], "days": metricsData["days"], "charging_status": "charging"}
@app.route('/')
def index():
# return str(macid)
return '^_^'
def date_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
raise TypeError
@app.route('/device/dbase')
def dbaser():
data = db.query('select * from client where mac = %s LIMIT 1' % macid)
time.sleep(1)
return json.dumps(data.as_dict(), default=date_handler)
@app.route('/hook', methods=['POST'])
def hook():
db = records.Database('mysql://<user>:<password>@<url>:3306/db')
pg.greeting("Welcome, get started below!")
# pg.show_starting_button("startpay")
pg.handle_webhook(request.get_data(as_text=True))
return 'ok'
@pg.handle_message
def mhandle(event):
sender_id = event.sender_id.decode('utf-8')
pg.typing_on(sender_id)
# print event.message # debugging
quick_replies = [
{'title': 'Charging status', 'payload': 'charge_stat'},
{'title': 'Last saved', 'payload': 'l_saved'},
{'title': 'Total saving', 'payload': 'c_saved'},
]
message = event.message_text
rec = db.query("select * from user where fbid = %s" % sender_id)
time.sleep(.5)
if len(rec.as_dict()) == 0:
user = pg.get_user_profile(sender_id)[u'first_name']
message = ''.join(re.findall('\d+', message))
if (len(str(message)) != 12):
pg.send(sender_id, "Kindly enter your 12 digit MAC ID")
else:
db.query("insert into user values(DEFAULT, %s, %s)" % (sender_id, str(message)))
pg.send(sender_id, "Registration successful!")
else:
pg.send(sender_id, "What do you want to know?", quick_replies=quick_replies)
@pg.callback(['startpay'])
def start_callback(payload, event):
sender_id = event.sender_id.decode('utf-8')
pg.typing_on(sender_id)
db = records.Database('mysql://<user>:<password>@<url>:3306/db')
rec = db.query("select * from user where fbid = %s" % sender_id)
if len(rec.as_dict()) == 0:
user = pg.get_user_profile(sender_id)[u'first_name']
pg.send(send
|
er_id, "Hey %s, please send me your MAC ID" % user)
else:
pg.typing_on(sender_id)
quick_replies = [
{'title': 'Charging status', 'payload': 'charge_stat'},
{'title': 'Last saved', 'payload': 'l_saved'}
|
,
{'title': 'Total saving', 'payload': 'c_saved'},
]
pg.send(sender_id, "What do you want to know?", quick_replies=quick_replies)
@pg.callback(['charge_stat', 'l_saved', 'c_saved'])
def doer(payl, event):
global macid
global metricsData
sender_id = event.sender_id
pg.typing_on(sender_id)
quick_replies = [
{'title': 'Charging status', 'payload': 'charge_stat'},
{'title': 'Last saved', 'payload': 'l_saved'},
{'title': 'Total saving', 'payload': 'c_saved'},
]
if payl == 'charge_stat':
pg.send(sender_id, "Charging status: Charging", quick_replies=quick_replies)
elif payl == 'l_saved':
pg.send(sender_id, "Last savings: ₹ 131!", quick_replies=quick_replies)
elif payl == 'c_saved':
macid = db.query("select mac from user where fbid = %s" % sender_id)
macid = macid[0].as_dict()["mac"]
data = db.query('select * from client where mac = %s' % macid)
row = data.as_dict()[::-1]
# fav_rows = {}
fav_rows = []
maxi = 1
start = 0
total_hrs = list()
for r in row:
if (r['status'] == 0):
maxi += 1
if start == 0:
sTime = r['timestamp']
start += 1
else:
eTime = r['timestamp']
# print eTime
else:
if r['strength']>96:
# fav_rows[maxi] = [sTime, eTime, r['strength']]
# fav_rows[maxi] = [sTime, eTime]
fav_rows.append(sTime - eTime)
maxi = 0
start = 0
days = sum([x.days for x in fav_rows])
fav_rows = sum([x.total_seconds()/60 for x in fav_rows])
# total_hrs = sum(total_hrs)
power = .5 # in watt
price = 5 # per KWh
energy = ((fav_rows*power)/1000)*days
# print fav_rows, days
pg.send(sender_id, "You've saved total %d KWh of energy so a total of"\
" %d ₹ of savings in last %d days!" % (energy, energy*price,\
days), quick_replies=quick_replies)
metricsData["energy"] = energy
metricsData["savings"] = energy*price
metricsData["days"] = days
api.add_resource(deviceMetrics, '/device/metrics')
if __name__ == '__main__':
app.run(debug=True, use_reloader=True)
|
gustavofonseca/packtools
|
tests/test_schematron_1_3.py
|
Python
|
bsd-2-clause
| 181,171
| 0.000746
|
# coding: utf-8
from __future__ import unicode_literals
import unittest
import io
from lxml import isoschematron, etree
from packtools.catalogs import SCHEMAS
SCH = etree.parse(SCHEMAS['sps-1.3'])
def TestPhase(phase_name, cache):
"""Factory of parsed Schematron phases.
:param phase_name: the phase name
:param cache: mapping type
"""
if phase_name not in cache:
phase = isoschematron.Schematron(SCH, phase=phase_name)
cache[phase_name] = phase
return cache[phase_name]
class PhaseBasedTestCase(unittest.TestCase):
cache = {}
def _run_validation(self, sample):
schematron = TestPhase(self.sch_phase, self.cache)
return schematron.validate(etree.parse(sample))
class JournalIdTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/journal-id elements.
Ticket #14 makes @journal-id-type="publisher-id" mandatory.
Ref: https://github.com/scieloorg/scielo_publishing_schema/issues/14
"""
sch_phase = 'phase.journal-id'
def test_case1(self):
"""
presence(@nlm-ta) is True
presence(@publisher-id) is True
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="nlm-ta">
Rev Saude Publica
</journal-id>
<journal-id journal-id-type="publisher-id">
RSP
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
presence(@nlm-ta) is True
presence(@publisher-id) is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="nlm-ta">
Rev Saude Publica
</journal-id>
</journa
|
l-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case3(self):
"""
presence(@nlm-ta) is False
presence(@publisher-id) is True
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">
|
RSP
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case4(self):
"""
presence(@nlm-ta) is False
presence(@publisher-id) is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type='doi'>
123.plin
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_publisher_id_cannot_be_empty(self):
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id"></journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class JournalTitleGroupTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/journal-title-group elements.
"""
sch_phase = 'phase.journal-title-group'
def test_journal_title_group_is_absent(self):
sample = u"""<article>
<front>
<journal-meta>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case1(self):
"""
A: presence(journal-title) is True
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is True
A ^ B is True
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title>
Revista de Saude Publica
</journal-title>
<abbrev-journal-title abbrev-type='publisher'>
Rev. Saude Publica
</abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
A: presence(journal-title) is True
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is False
A ^ B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title>
Revista de Saude Publica
</journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case3(self):
"""
A: presence(journal-title) is False
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is True
A ^ B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<abbrev-journal-title abbrev-type='publisher'>
Rev. Saude Publica
</abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case4(self):
"""
A: presence(journal-title) is False
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is False
A ^ B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_journal_title(self):
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title></journal-title>
<abbrev-journal-title abbrev-type='publisher'>Rev. Saude Publica</abbrev-journal-title>
</journal-title-group>
</journal-meta>
|
abhinavjain241/acousticbrainz-server
|
db/test/test_data.py
|
Python
|
gpl-2.0
| 14,021
| 0.003994
|
from db.testing import DatabaseTestCase, TEST_DATA_PATH
import db.exceptions
import db.data
import os.path
import json
import mock
import copy
class DataDBTestCase(DatabaseTestCase):
def setUp(self):
super(DataDBTestCase, self).setUp()
self.test_mbid = "0dad432b-16cc-4bf0-8961-fd31d124b01b"
self.test_lowlevel_data_json = open(os.path.join(TEST_DATA_PATH, self.test_mbid + '.json')).read()
self.test_lowlevel_data = json.loads(self.test_lowlevel_data_json)
@mock.patch("db.data.sanity_check_data")
@mock.patch("db.data.write_low_level")
@mock.patch("db.data.clean_metadata")
def test_submit_low_level_data(self, clean, write, sanity):
"""Submission with valid data"""
clean.side_effect = lambda x: x
sanity.return_value = None
db.data.submit_low_level_data(self.test_mbid, self.test_lowlevel_data)
write.assert_called_with(self.test_mbid, self.test_lowlevel_data)
@mock.patch("db.data.sanity_check_data")
@mock.patch("db.data.write_low_level")
@mock.patch("db.data.clean_metadata")
def test_submit_low_level_data_rewrite_keys(self, clean, write, sanity):
"""submit rewrites trackid -> recordingid, and sets lossless to a boolean"""
clean.side_effect = lambda x: x
sanity.return_value = None
input = {"metadata": {"tags": {"musicbrainz_trackid": [self.test_mbid]}, "audio_properties": {"lossless": 1}}}
output = {"metadata": {"tags": {"musicbrainz_recordingid": [self.test_mbid]}, "audio_properties": {"lossless": True}}}
db.data.submit_low_level_data(self.test_mbid, input)
write.assert_called_with(self.test_mbid, output)
@mock.patch("db.data.sanity_check_data")
@mock.patch("db.data.write_low_level")
@mock.patch("db.data.clean_metadata")
def test_submit_low_level_data_bad_mbid(self, clean, write, sanity):
"""Check that hl write raises an error if the provided mbid is different to what is in the metadata"""
clean.side_effect = lambda x: x
sanity.return_value = None
input = {"metadata": {"tags": {"musicbrainz_recordingid": ["not-the-recording-mbid"]}, "audio_properties": {"lossless": False}}}
with self.assertRaises(db.exceptions.BadDataException):
db.data.submit_low_level_data(self.test_mbid, input)
@mock.
|
patch("db.data.sanity_check_data")
@mock.patch("db.data.write_low_level")
@mock.patch("d
|
b.data.clean_metadata")
def test_submit_low_level_data_missing_keys(self, clean, write, sanity):
"""Check that hl write raises an error if some required keys are missing"""
clean.side_effect = lambda x: x
sanity.return_value = ["missing", "key"]
with self.assertRaises(db.exceptions.BadDataException):
db.data.submit_low_level_data(self.test_mbid, self.test_lowlevel_data)
def test_write_load_low_level(self):
"""Writing and loading a dict returns the same data"""
one = {"data": "one", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
db.data.write_low_level(self.test_mbid, one)
self.assertEqual(one, db.data.load_low_level(self.test_mbid))
def test_load_low_level_offset(self):
"""If two items with the same mbid are added, you can select between them with offset"""
one = {"data": "one", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
two = {"data": "two", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
db.data.write_low_level(self.test_mbid, one)
db.data.write_low_level(self.test_mbid, two)
self.assertEqual(one, db.data.load_low_level(self.test_mbid))
self.assertEqual(one, db.data.load_low_level(self.test_mbid, 0))
self.assertEqual(two, db.data.load_low_level(self.test_mbid, 1))
def test_load_low_level_none(self):
"""If no lowlevel data is loaded, or offset is too high, an exception is raised"""
with self.assertRaises(db.exceptions.NoDataFoundException):
db.data.load_low_level(self.test_mbid)
one = {"data": "one", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
db.data.write_low_level(self.test_mbid, one)
with self.assertRaises(db.exceptions.NoDataFoundException):
db.data.load_low_level(self.test_mbid, 1)
def _get_ll_id_from_mbid(self, mbid):
with db.engine.connect() as connection:
ret = []
result = connection.execute("select id from lowlevel where mbid = %s", (mbid, ))
for row in result:
ret.append(row[0])
return ret
def test_write_load_high_level(self):
"""Writing and loading a dict returns the same data"""
ll = {"data": "one", "metadata": {"audio_properties": {"lossless": True}, "version": {"essentia_build_sha": "x"}}}
ver = {"hlversion": "123", "models_essentia_git_sha": "v1"}
hl = {"highlevel": {"model1": {"x": "y"}, "model2": {"a": "b"}},
"metadata": {"meta": "here",
"version": {"highlevel": ver}
}
}
db.data.add_model("model1", "v1", "show")
db.data.add_model("model2", "v1", "show")
build_sha = "test"
db.data.write_low_level(self.test_mbid, ll)
ll_id = self._get_ll_id_from_mbid(self.test_mbid)[0]
db.data.write_high_level(self.test_mbid, ll_id, hl, build_sha)
hl_expected = copy.deepcopy(hl)
for mname in ["model1", "model2"]:
hl_expected["highlevel"][mname]["version"] = ver
self.assertEqual(hl_expected, db.data.load_high_level(self.test_mbid))
def test_load_high_level_offset(self):
# If there are two lowlevel items, but only one highlevel, we should raise NoDataFound
second_data = copy.deepcopy(self.test_lowlevel_data)
second_data["metadata"]["tags"]["album"] = ["Another album"]
db.data.write_low_level(self.test_mbid, self.test_lowlevel_data)
db.data.write_low_level(self.test_mbid, second_data)
ll_id1, ll_id2 = self._get_ll_id_from_mbid(self.test_mbid)
db.data.add_model("model1", "v1", "show")
db.data.add_model("model2", "v1", "show")
build_sha = "sha"
ver = {"hlversion": "123", "models_essentia_git_sha": "v1"}
hl1 = {"highlevel": {"model1": {"x": "y"}, "model2": {"a": "b"}},
"metadata": {"meta": "here",
"version": {"highlevel": ver}
}
}
hl2 = {"highlevel": {"model1": {"1": "2"}, "model2": {"3": "3"}},
"metadata": {"meta": "for hl2",
"version": {"highlevel": ver}
}
}
db.data.write_high_level(self.test_mbid, ll_id1, hl1, build_sha)
hl1_expected = copy.deepcopy(hl1)
hl2_expected = copy.deepcopy(hl2)
for mname in ["model1", "model2"]:
hl1_expected["highlevel"][mname]["version"] = ver
hl2_expected["highlevel"][mname]["version"] = ver
# First highlevel item
self.assertEqual(hl1_expected, db.data.load_high_level(self.test_mbid))
self.assertEqual(hl1_expected, db.data.load_high_level(self.test_mbid, offset=0))
# second has a ll, but no hl => exception
with self.assertRaises(db.exceptions.NoDataFoundException):
db.data.load_high_level(self.test_mbid, offset=1)
# after adding the hl, no error
db.data.write_high_level(self.test_mbid, ll_id2, hl2, build_sha)
self.assertEqual(hl2_expected, db.data.load_high_level(self.test_mbid, offset=1))
def test_load_high_level_offset_reverse(self):
# If hl are added in a different order to ll, offset should return ll order
second_data = copy.deepcopy(self.test_lowlevel_data)
second_data["metadata"]["tags"]["album"] = ["Another album"]
db.data.write_low_level(self.
|
rickmer/rephone
|
tests/test_views.py
|
Python
|
agpl-3.0
| 1,721
| 0.001162
|
from . import RephoneTest
from re import match
class TestViews(RephoneTest):
def test_index(self):
with self.client:
response = self.client.get('/')
assert response.status_code == 303
def test_outbound(self):
with self.client:
response = self.client.post('/outbound/1')
assert response.status_code == 200
assert match(r'.*<Dial><Number>\+33388175572</Number></Dial>.*', str(response.get_data()))
def test_outbound_test_number(self):
with self.client:
self.app.config['TWILIO_TEST
|
_NUMBER'] = '+4940123456789'
response = self.client.post('/outbound/1')
assert response.status_code == 200
assert match(r'.*<Dial><Number>\+49401
|
23456789</Number></Dial>.*', str(response.get_data()))
def test_bias_alteration_audience_1(self):
index_0_before = self.app.random[1][0]
index_1_before = self.app.random[1][1]
self.app.random.add_sample(audience_id=1, respondent_id=1)
index_0_after = self.app.random[1][0]
index_1_after = self.app.random[1][1]
assert index_0_before == index_0_after
assert index_1_before == index_1_after - 1
def test_bias_alteration_audience_2(self):
index_0_before = self.app.random[2][0]
index_1_before = self.app.random[2][1]
self.app.random.add_sample(audience_id=2, respondent_id=751)
index_0_after = self.app.random[2][0]
index_1_after = self.app.random[2][1]
print(index_0_before, index_0_after)
print(index_1_before, index_1_after)
assert index_0_before == index_0_after
assert index_1_before == index_1_after - 1
|
linuxdeepin/deepin-media-player
|
src/widget/playlistview.py
|
Python
|
gpl-3.0
| 11,382
| 0.00541
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2012 ~ 2013 Deepin, Inc.
# 2012 ~ 2013 Hailong Qiu
#
# Author: Hailong Qiu <356752238@qq.com>
# Maintainer: Hailong Qiu <356752238@qq.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from dtk.ui.theme import ui_theme
from dtk.ui.scrolled_window import ScrolledWindow
from dtk.ui.utils import propagate_expose
from dtk.ui.draw import draw_vlinear
from skin import app_theme
from listview import ListView
from listview_base import Text
from treeview_base import TreeViewBase
from net_search import Search
from notebook import NoteBook
from color import alpha_color_hex_to_cairo
from utils import get_text_size
from draw import draw_text, draw_pixbuf
import gtk
class PlayListView(object):
def __init__(self):
self.one_close = app_theme.get_pixbuf("treeview/1-close.png")
self.one_open = app_theme.get_pixbuf("treeview/1-open.png")
self.two_close = app_theme.get_pixbuf("treeview/2-close.png")
self.two_open = app_theme.get_pixbuf("treeview/2-open.png")
self.three_close = app_theme.get_pixbuf("treeview/3-close.png")
self.three_open = app_theme.get_pixbuf("treeview/3-open.png")
#
self.tree_view_open = app_theme.get_pixbuf("treeview/open.png")
self.tree_view_close = app_theme.get_pixbuf("treeview/close.png")
self.tree_view_right = app_theme.get_pixbuf("treeview/right.png")
self.tree_view_bottom = app_theme.get_pixbuf("treeview/bottom.png")
#
self.listview_color = ui_theme.get_color("scrolledbar")
self.play_list_vbox = gtk.VBox()
#
self.list_view_vbox = gtk.VBox()
self.list_scroll_win = ScrolledWindow(0, 0)
self.list_scroll_win.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
self.list_view = ListView()
#
self.play_list_con = PlayListControl()
#
self.list_view_vbox.pack_start(self.list_scroll_win, True, True)
self.list_view_vbox.pack_start(self.play_list_con, False, False)
# 网络列表,搜索框.
self.tree_scroll_win = ScrolledWindow(0, 0)
self.tree_scroll_win.set_policy(gtk.POLICY_NEVER, gtk.POLICY_ALWAYS)
self.tree_view_vbox = gtk.VBox()
self.tree_view = TreeViewBase()
self.search_ali = gtk.Alignment(0, 0, 1, 1)
self.search = Search()
self.search_ali.add(self.search)
#
self.search_ali.set_padding(7, 5, 12, 12)
self.tree_view_vbox.pack_start(self.search_ali, False, False)
self.tree_view_vbox.pack_start(self.tree_scroll_win, True, True)
self.search_ali.connect("expose-event", self.search_ali_expose_event)
#
self.note_book = NoteBook()
#
self.list_view.on_draw_sub_item = self.__listview_on_draw_sub_item
self.list_view.columns.add_range(["filename", "time"])
self.list_view.columns[0].width = 120
self.list_view.columns[1].width = 95
#
self.note_book.hide_title()
self.tree_view.paint_nodes_event = self.__treeview_paint_nodes_event
#
self.list_scroll_win.add_with_viewport(self.list_view)
self.tree_scroll_win.add_with_viewport(self.tree_view)
#self.note_book.add_layout1(self.list_scroll_win)
self.note_book.add_layout1(self.list_view_vbox)
self.note_book.add_layout2(self.tree_view_vbox)
#self.play_list_vbox.pack_start(self.scroll_win, True, True)
self.play_list_vbox.pack_start(self.note_book, True, True)
def __listview_on_draw_sub_item(self, e):
color = self.listview_color.get_color()
if e.double_items == e.item:
e.text_color = "#000000"
text_size=9
color_info = [(0, (color, 0.8)), (1, (color, 0.8))]
draw_vlinear(e.cr,
e.x, e.y, e.w, e.h,
color_info
)
elif e.item in e.single_items:
e.text_color = "#FFFFFF"
text_size=9
color_info = [(0, (color, 0.5)), (1, (color, 0.5))]
draw_vlinear(e.cr
|
,
e.x, e.y, e.w, e.h,
color_info
)
elif e.motion_items == e.item:
e.text_color = "#FFFFFF"
text_size=9
color_info = [(0, (color, 0.2)), (1, (color, 0.2))
|
]
draw_vlinear(e.cr,
e.x, e.y, e.w, e.h,
color_info
)
else:
e.text_color = "#FFFFFF"
text_size=9
#
text = e.text.decode("utf-8")
one_width = self.list_view.columns[0].width
two_width = self.list_view.columns[1].width
#if e.w == one_width: # 显示播放名字的第一列.
if e.column_index == 0:
#
t_width = 0
t_index = 0
add_point = False
for t in text:
t_width += get_text_size(t, text_size=text_size)[0]
if t_width > one_width - 20:
add_point = True
break
t_index += 1
if add_point:
text = text[:t_index] + "..."
#
alignment = Text.LEFT
x = e.x + 15
elif e.w == two_width:
alignment = Text.RIGHT
x = e.x - 15
e.draw_text(e.cr,
str(text),
x, e.y, e.w, e.h,
text_color=e.text_color,
text_size=text_size,
alignment=alignment)
def __treeview_paint_nodes_event(self, node_event):
color = self.listview_color.get_color()
text_color = "#FFFFFF"
# 单击和移动, 双击.
if node_event.node in node_event.single_items:
color_info = [(0, (color, 0.45)), (1, (color, 0.45))]
draw_vlinear(node_event.cr,
node_event.x, node_event.y, node_event.w, node_event.h,
color_info
)
#text_color = "#000000"
elif node_event.node in node_event.motion_items:
color_info = [(0, (color, 0.75)), (1, (color, 0.75))]
draw_vlinear(node_event.cr,
node_event.x, node_event.y, node_event.w, node_event.h,
color_info
)
#
x_padding = 12 # 因为要和搜索框对齐.
if 0 == node_event.node.leave: # 根节点. :比如->> >我看过的. >优酷视频. >pps.
if node_event.node.is_expanded:
pixbuf = self.one_open.get_pixbuf()
else:
pixbuf = self.one_close.get_pixbuf()
elif 1 == node_event.node.leave: #
if node_event.node.is_expanded:
pixbuf = self.two_open.get_pixbuf()
else:
pixbuf = self.two_close.get_pixbuf()
else:
if node_event.node.is_expanded:
pixbuf = self.three_open.get_pixbuf()
else:
pixbuf = self.three_close.get_pixbuf()
#
icon_x = node_event.x + x_padding
icon_y = node_event.y + node_event.h/2 - pixbuf.get_height()/2 + 1
if node_event.node.leave > 1:
icon_x += (node_event.node.leave - 1) * pixbuf.get_width()
if node_event.node.leave > 0:
text_color = "#a8a8a8"
##########
# 画图标.
if node_event.node.nodes != []:
draw_pixbuf(node_event.cr,
pixbu
|
Data2Semantics/linkitup
|
linkitup/linkedlifedata/plugin.py
|
Python
|
mit
| 2,673
| 0.014964
|
'''
Created on 26 Mar 2013
@author: hoekstra
'''
from flask.ext.login import login_required
import requests
from linkitup import app
from linkitup.util.baseplugin import plugin
from linkitup.util.provenance import provenance
LLD_AUTOCOMPLETE_URL = "http://linkedlifedata.com/autocomplete.json"
@app.route('/linkedlifedata', methods=['POST'])
@login_required
@plugin(fields=[('tags','id','name'),('categories','id','name')], link='mapping')
@provenance()
def link_to_lld(*args, **kwargs):
# Retrieve the article from the wrapper
article_id = kwargs['article']['id']
app.logger.debug("Running LinkedLifeData.com plugin for article {}".format(article_id))
match_items = kwargs['inputs']
search_parameters = {'limit': '2'}
matches = {}
for item in match_items :
search_parameters['q'] = item['label']
original_id = item['id']
response = requests.get(LLD_AUTOCOMPLETE_URL, params=search_parameters)
hits = response.json()['results']
for h in hits:
app.logger.debug(h)
|
match_uri = h['uri']['namespace'] + h['uri']['localName']
web_uri = match_uri
display_uri = h['label']
id_base = h['uri']['localName']
|
if 'types' in h:
if len(h['types']) > 0 :
types = ", ".join(h['types'])
else :
types = None
elif 'type' in h:
types = h['type']
else :
types = None
if 'definition' in h :
if h['definition'] != None :
if h['definition'].strip() != "" :
description = h['definition']
else :
description = None
else :
description = None
else :
description = None
score = "Score: {}".format(h['score'])
# Create the match dictionary
match = {'type': "mapping",
'uri': match_uri,
'web': web_uri,
'show': display_uri,
'short': id_base,
'description': description,
'extra': types,
'subscript': score,
'original':original_id}
# Append it to all matches
matches[match_uri] = match
# Return the matches
return matches
|
onshape-public/onshape-clients
|
python/onshape_client/oas/models/btp_conversion_function1362.py
|
Python
|
mit
| 14,003
| 0.000428
|
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import btp_annotation231
except ImportError:
btp_annotation231 = sys.modules["onshape_client.oas.models.btp_annotation231"]
try:
from onshape_client.oas.models import btp_argument_declaration232
except ImportError:
btp_argument_declaration232 = sys.modules[
"onshape_client.oas.models.btp_argument_declaration232"
]
try:
from onshape_client.oas.models import btp_conversion_function1362_all_of
except ImportError:
btp_conversion_function1362_all_of = sys.modules[
"onshape_client.oas.models.btp_conversion_function1362_all_of"
]
try:
from onshape_client.oas.models import btp_identifier8
except ImportError:
btp_identifier8 = sys.modules["onshape_client.oas.models.btp_identifier8"]
try:
from onshape_client.oas.models import btp_literal_number258
except ImportError:
btp_literal_number258 = sys.modules[
"onshape_client.oas.models.btp_literal_number258"
]
try:
from onshape_client.oas.models import btp_procedure_declaration_base266
except ImportError:
btp_procedure_declaration_base266 = sys.modules[
"onshape_client.oas.models.btp_procedure_declaration_base266"
]
try:
from onshape_client.oas.models import btp_space10
except ImportError:
btp_space10 = sys.modules["onshape_client.oas.models.btp_space10"]
try:
from onshape_client.oas.models import btp_statement269
except ImportError:
btp_statement269 = sys.modules["onshape_client.oas.models.btp_statement269"]
try:
from onshape_client.oas.models import btp_statement_block271
except ImportError:
btp_statement_block271 = sys.modules[
"onshape_client.oas.models.btp_statement_block271"
]
try:
from onshape_client.oas.models import btp_type_name290
except ImportError:
btp_type_name290 = sys.modules["onshape_client.oas.models.btp_type_name290"]
class BTPConversionFunctio
|
n1362(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key
|
is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("documentation_type",): {
"FUNCTION": "FUNCTION",
"PREDICATE": "PREDICATE",
"CONSTANT": "CONSTANT",
"ENUM": "ENUM",
"USER_TYPE": "USER_TYPE",
"FEATURE_DEFINITION": "FEATURE_DEFINITION",
"FILE_HEADER": "FILE_HEADER",
"UNDOCUMENTABLE": "UNDOCUMENTABLE",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"_from": (btp_literal_number258.BTPLiteralNumber258,), # noqa: E501
"space_after_type": (btp_space10.BTPSpace10,), # noqa: E501
"to": (btp_literal_number258.BTPLiteralNumber258,), # noqa: E501
"type_name": (btp_identifier8.BTPIdentifier8,), # noqa: E501
"atomic": (bool,), # noqa: E501
"documentation_type": (str,), # noqa: E501
"end_source_location": (int,), # noqa: E501
"node_id": (str,), # noqa: E501
"short_descriptor": (str,), # noqa: E501
"space_after": (btp_space10.BTPSpace10,), # noqa: E501
"space_before": (btp_space10.BTPSpace10,), # noqa: E501
"space_default": (bool,), # noqa: E501
"start_source_location": (int,), # noqa: E501
"annotation": (btp_annotation231.BTPAnnotation231,), # noqa: E501
"arguments_to_document": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"deprecated": (bool,), # noqa: E501
"deprecated_explanation": (str,), # noqa: E501
"for_export": (bool,), # noqa: E501
"space_after_export": (btp_space10.BTPSpace10,), # noqa: E501
"symbol_name": (btp_identifier8.BTPIdentifier8,), # noqa: E501
"arguments": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"body": (btp_statement_block271.BTPStatementBlock271,), # noqa: E501
"precondition": (btp_statement269.BTPStatement269,), # noqa: E501
"return_type": (btp_type_name290.BTPTypeName290,), # noqa: E501
"space_after_arglist": (btp_space10.BTPSpace10,), # noqa: E501
"space_in_empty_list": (btp_space10.BTPSpace10,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"_from": "from", # noqa: E501
"space_after_type": "spaceAfterType", # noqa: E501
"to": "to", # noqa: E501
"type_name": "typeName", # noqa: E501
"atomic": "atomic", # noqa: E501
"documentation_type": "documentationType", # noqa: E501
"end_source_location": "endSourceLocation", # noqa: E501
"node_id": "nodeId", # noqa: E501
"short_descriptor": "shortDescriptor", # noqa: E501
"space_after": "spaceAfter", # noqa: E501
"space_before": "spaceBefore", # noqa: E501
"space_default": "spaceDefault", # noqa: E501
"start_source_location": "startSourceLocation", # noqa: E501
"annotation": "annotation", # noqa: E501
"arguments_to_document": "argumentsToDocument", # noqa: E501
"deprecated": "deprecated", # noqa: E501
"deprecated_explanation": "deprecatedExplanation", # noqa: E501
"for_export": "forExport", # noqa: E501
"space_after_export": "spaceAfterExport", # noqa: E501
"symbol_name": "symbolName", # noqa: E501
"arguments": "arguments", # noqa: E501
"body": "body", # noqa: E501
"precondition": "precondition", # noqa: E501
"return_type": "returnType", # noqa: E501
"space_after_arglist": "spaceAfterArglist", # noqa: E501
"space_in_empty_list": "spaceInEmptyList", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
|
bgribble/mfp
|
mfp/builtins/oscutils.py
|
Python
|
gpl-2.0
| 3,258
| 0.012277
|
#! /usr/bin/env python
'''
oscutils.py -- Open Sound Control builtins for MFP
Copyright (c) 2013 Bill Gribble <grib@billgribble.com>
'''
from ..processor import Processor
from ..mfp_app import MFPApp
from ..bang import Uninit
class OSCPacket(object):
def __init__(self, payload):
self.payload = payload
class OSCIn (Processor):
doc_tooltip_obj = "Open Sound Control message receive"
doc_tooltip_inlet = ["Config input"]
doc_tooltip_outlet = ["OSC data output"]
def __init__(self, init_type, init_args, patch, scope, name):
self.path = None
self.types = None
self.handler = None
Processor.__init__(self, 1, 1, init_type, init_args, patch, scope, name)
initargs, kwargs = self.parse_args(init_args)
if len(initargs) > 0:
self.path = initargs[0]
if len(initargs) > 1:
self.types = initargs[1]
def trigger(self):
need_update = False
if isinstance(self.inlets[0], OSCPacket):
self.outlets[0] = self.inlets[0].payload
self.inlets[0] = Uninit
elif isinstance(self.inlets[0], dict):
path = self.inlets[0].get("path")
if path:
self.path = path
need_update = True
types = self.inlets[0].get("types")
if types:
self.types = types
need_update = True
if need_update:
if self.handler is not None:
MFPApp().osc_mgr.del_method(self.handler, self.types)
self.handler = None
self.handler = MFPApp().osc_mgr.add_method(self.path, self.types, self._handler)
class OSCOut (Processor):
doc_tooltip_obj = "Open Sound Control message send"
doc_tooltip_inlet = ["Message data",
"Destination host:port (UDP) (default: initarg 0)",
"OSC path (default: initarg 1)" ]
def __init__(self, init_type, init_args, patch, scope, name):
self.host = None
self.port = None
self.path = None
Processor.__init__(self, 3, 0, init_type, init_args, patch, scope, name)
initargs, kwargs = self.parse_args(init_args)
if len(initargs) > 0:
parts = initargs[0].split(":")
self.host = parts[0]
if len(parts) > 1:
self.port = int(parts[1])
if len(initargs) > 1:
self.path = initargs[1]
|
def trigger(self):
if self.inlets[2] is not Uninit:
self.path = self.inlets[2]
self.inlets[2] = Uninit
if self.inlets[1] is not
|
Uninit:
if isinstance(self.inlets[1], str):
parts = self.inlets[1].split(":")
self.host = parts[0]
if len(parts) > 1:
self.port = int(parts[1])
elif isinstance(self.inlets[1], (float, int)):
self.port = int(self.inlets[1])
self.inlets[1] = Uninit
MFPApp().osc_mgr.send((self.host, self.port), self.path, self.inlets[0])
self.inlets[0] = Uninit
def register():
MFPApp().register("osc_in", OSCIn)
MFPApp().register("osc_out", OSCOut)
|
andreaso/ansible
|
lib/ansible/modules/database/mysql/mysql_replication.py
|
Python
|
gpl-3.0
| 13,039
| 0.001534
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage mysql replication
(c) 2013, Balazs Pocze <banyek@gawker.com>
Certain parts are taken from Mark Theunissen's mysqldb module
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: mysql_replication
short_description: Manage MySQL replication
description:
- Manages MySQL server replication, slave, master status get and change master host.
version_added: "1.3"
author: "Balazs Pocze (@banyek)"
options:
mode:
description:
- module operating mode. Could be getslave (SHOW SLAVE STATUS), getmaster (SHOW MASTER STATUS), changemaster (CHANGE MASTER TO), startslave
(START SLAVE), stopslave (STOP SLAVE), resetslave (RESET SLAVE), resetslaveall (RESET SLAVE ALL)
required: False
choices:
- getslave
- getmaster
- changemaster
- stopslave
- startslave
- resetslave
- resetslaveall
default: getslave
master_host:
description:
- same as mysql variable
master_user:
description:
- same as mysql variable
master_password:
description:
- same as mysql variable
master_port:
description:
- same as mysql variable
master_connect_retry:
description:
- same as mysql variable
master_log_file:
description:
- same as mysql variable
master_log_pos:
description:
- same as mysql variable
relay_log_file:
description:
- same as mysql variable
relay_log_pos:
description:
- same as mysql variable
master_ssl:
description:
- same as mysql variable
choices: [ 0, 1 ]
master_ssl_ca:
description:
- same as mysql variable
master_ssl_capath:
description:
- same as mysql variable
master_ssl_cert:
description:
- same as mysql variable
master_ssl_key:
description:
- same as mysql variable
master_ssl_cipher:
description:
- same as mysql variable
master_auto_position:
description:
- does the host uses GTID based replication or not
required: false
default: null
version_added: "2.0"
extends_documentation_fragment: mysql
'''
EXAMPLES = '''
# Stop mysql slave thread
- mysql_replication:
mode: stopslave
# Get master binlog file name and binlog position
- mysql_replication:
mode: getmaster
# Change master to master server 192.0.2.1 and use binary log 'mysql-bin.000009' with position 4578
- mysql_replication:
mode: changemaster
master_host: 192.0.2.1
master_log_file: mysql-bin.000009
master_log_pos: 4578
# Check slave status using port 3308
- mysql_replication:
mode: getslave
login_host: ansible.example.com
login_port: 3308
'''
import os
import warnings
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.mysql import mysql_connect
from ansible.module_utils.pycompa
|
t24 import get_exception
def get_master_status(cursor):
cursor.execute("SHOW MASTER STATUS")
mast
|
erstatus = cursor.fetchone()
return masterstatus
def get_slave_status(cursor):
cursor.execute("SHOW SLAVE STATUS")
slavestatus = cursor.fetchone()
return slavestatus
def stop_slave(cursor):
try:
cursor.execute("STOP SLAVE")
stopped = True
except:
stopped = False
return stopped
def reset_slave(cursor):
try:
cursor.execute("RESET SLAVE")
reset = True
except:
reset = False
return reset
def reset_slave_all(cursor):
try:
cursor.execute("RESET SLAVE ALL")
reset = True
except:
reset = False
return reset
def start_slave(cursor):
try:
cursor.execute("START SLAVE")
started = True
except:
started = False
return started
def changemaster(cursor, chm, chm_params):
sql_param = ",".join(chm)
query = 'CHANGE MASTER TO %s' % sql_param
cursor.execute(query, chm_params)
def main():
module = AnsibleModule(
argument_spec=dict(
login_user=dict(default=None),
login_password=dict(default=None, no_log=True),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
mode=dict(default="getslave", choices=["getmaster", "getslave", "changemaster", "stopslave", "startslave", "resetslave", "resetslaveall"]),
master_auto_position=dict(default=False, type='bool'),
master_host=dict(default=None),
master_user=dict(default=None),
master_password=dict(default=None, no_log=True),
master_port=dict(default=None, type='int'),
master_connect_retry=dict(default=None, type='int'),
master_log_file=dict(default=None),
master_log_pos=dict(default=None, type='int'),
relay_log_file=dict(default=None),
relay_log_pos=dict(default=None, type='int'),
master_ssl=dict(default=False, type='bool'),
master_ssl_ca=dict(default=None),
master_ssl_capath=dict(default=None),
master_ssl_cert=dict(default=None),
master_ssl_key=dict(default=None),
master_ssl_cipher=dict(default=None),
connect_timeout=dict(default=30, type='int'),
config_file=dict(default="~/.my.cnf", type='path'),
ssl_cert=dict(default=None),
ssl_key=dict(default=None),
ssl_ca=dict(default=None),
)
)
mode = module.params["mode"]
master_host = module.params["master_host"]
master_user = module.params["master_user"]
master_password = module.params["master_password"]
master_port = module.params["master_port"]
master_connect_retry = module.params["master_connect_retry"]
master_log_file = module.params["master_log_file"]
master_log_pos = module.params["master_log_pos"]
relay_log_file = module.params["relay_log_file"]
relay_log_pos = module.params["relay_log_pos"]
master_ssl = module.params["master_ssl"]
master_ssl_ca = module.params["master_ssl_ca"]
master_ssl_capath = module.params["master_ssl_capath"]
master_ssl_cert = module.params["master_ssl_cert"]
master_ssl_key = module.params["master_ssl_key"]
master_ssl_cipher = module.params["master_ssl_cipher"]
master_auto_position = module.params["master_auto_position"]
ssl_cert = module.params["ssl_cert"]
ssl_key = module.params["ssl_key"]
ssl_ca = module.params["ssl_ca"]
connect_timeout = module.params['connect_timeout']
config_file = module.params['config_file']
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
else:
warnings.filterwarnings('error', category=MySQLdb.Warning)
login_password = module.params["login_password"]
login_user = module.params["login_user"]
try:
cursor = mysql_connect(module, login_user, login_password, config_file, ssl_cert, ssl_key, ssl_ca, None, 'MySQLdb.cursors.DictCursor
|
castelao/CoTeDe
|
tests/fuzzy/test_fuzzyfy.py
|
Python
|
bsd-3-clause
| 2,666
| 0.001125
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
"""
import numpy as np
from numpy.testing import assert_allclose
from cotede.fuzzy import fuzzyfy
CFG = {
"output": {
"low": {"type": "trimf", "params": [0.0, 0.225, 0.45]},
"medium": {"type": "trimf", "params": [0.275, 0.5, 0.725]},
"high": {"type": "smf", "params": [0.55, 0.775]},
},
"features": {
"f1": {
"weight": 1,
"low": {"type": "zmf", "params": [0.07, 0.2]},
"medi
|
um": {"type": "trapmf", "params": [0.07, 0.2, 2, 6]},
"high": {"type": "smf", "params": [2, 6]},
},
"f2": {
"weight": 1,
"low": {"type": "zmf", "params": [3, 4]},
"medium": {"type": "trapmf", "params": [3, 4, 5, 6]},
"high": {"type": "smf", "params": [5, 6]},
},
"f3": {
"weight": 1,
"low": {"type": "zmf", "params": [0.5,
|
1.5]},
"medium": {"type": "trapmf", "params": [0.5, 1.5, 3, 4]},
"high": {"type": "smf", "params": [3, 4]},
},
},
}
def test_fuzzyfy():
features = {"f1": np.array([1.0]), "f2": np.array([5.2]), "f3": np.array([0.9])}
rules = fuzzyfy(features, **CFG)
assert_allclose(rules["low"], [0.226666666])
assert_allclose(rules["medium"], [0.733333333])
assert_allclose(rules["high"], [0.08000000])
def test_fuzzyfy_with_nan():
features = {
"f1": np.array([1.0, np.nan, 1.0, 1.0, np.nan]),
"f2": np.array([5.2, 5.2, np.nan, 5.2, np.nan]),
"f3": np.array([0.9, 0.9, 0.9, np.nan, np.nan]),
}
rules = fuzzyfy(features, **CFG)
assert_allclose(rules["low"], [0.22666667, np.nan, np.nan, np.nan, np.nan])
assert_allclose(rules["medium"], [0.733333333, np.nan, np.nan, np.nan, np.nan])
assert_allclose(rules["high"], [0.08000000, np.nan, np.nan, np.nan, np.nan])
rules = fuzzyfy(features, **CFG, require="any")
assert_allclose(rules["low"], [0.22666667, 0.34, 0.34, 0, np.nan])
assert_allclose(rules["medium"], [0.733333333, 0.6, 0.7, 0.9, np.nan])
assert_allclose(rules["high"], [0.08, 0.08, 0, 0.08, np.nan])
def test_fuzzyfy_all_nan():
features = {
"f1": np.array([np.nan]),
"f2": np.array([np.nan]),
"f3": np.array([np.nan]),
}
rules = fuzzyfy(features, **CFG)
assert_allclose(rules["low"], [np.nan])
assert_allclose(rules["medium"], [np.nan])
assert_allclose(rules["high"], [np.nan])
"""
# FIXME: If there is only one feature, it will return 1 value
# instead of an array with N values.
"""
|
jacobajit/ion
|
intranet/apps/feedback/models.py
|
Python
|
gpl-2.0
| 374
| 0
|
# -*- coding: utf-8 -*-
from django.db import models
from ..users.models import User
class Feedb
|
ack(models.Model):
user = models.ForeignKey(User)
comments = models.CharField(max_length=50000)
date = models.DateTimeField(auto_now=True)
class Meta:
ordering = ["-date"]
def __str__(self):
return "{} - {}".format(self.user, s
|
elf.date)
|
swiftstack/swift
|
test/unit/account/test_server.py
|
Python
|
apache-2.0
| 135,836
| 0
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import os
import mock
import posix
import unittest
from tempfile import mkdtemp
from shutil import rmtree
from test.unit import FakeLogger
import itertools
import random
from io import BytesIO
import json
from six import StringIO
from six.moves.urllib.parse import quote
import xml.dom.minidom
from swift import __version__ as swift_version
from swift.common.swob import (Request, WsgiBytesIO, HTTPNoContent)
from swift.common.constraints import ACCOUNT_LISTING_LIMIT
from swift.account.backend import AccountBroker
from swift.account.server import AccountController
from swift.common.utils import (normalize_timestamp, replication, public,
mkdirs, storage_directory, Timestamp)
from swift.common.request_helpers import get_sys_meta_prefix, get_reserved_name
from test.unit import patch_policies, debug_logger, mock_check_drive, \
make_timestamp_iter
from swift.common.storage_policy import StoragePolicy, POLICIES
@patch_policies
class TestAccountController(unittest.TestCase):
"""Test swift.account.server.AccountController"""
def setUp(self):
"""Set up for testing swift.account.server.AccountController"""
self.testdir_base = mkdtemp()
self.testdir = os.path.join(self.testdir_base, 'account_server')
mkdirs(os.path.join(self.testdir, 'sda1'))
self.logger = debug_logger()
self.controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false'},
logger=sel
|
f.logger)
self.ts = make_timestamp_iter()
def tearDown(self):
"""Tear down for testing swift.account.server.AccountController"""
try:
rmtree(self.testdir_base)
except OSError as err:
if err.errno != errno.ENOENT:
raise
def test_init(self):
conf = {
|
'devices': self.testdir,
'mount_check': 'false',
}
AccountController(conf, logger=self.logger)
self.assertEqual(self.logger.get_lines_for_level('warning'), [])
conf['auto_create_account_prefix'] = '-'
AccountController(conf, logger=self.logger)
self.assertEqual(self.logger.get_lines_for_level('warning'), [
'Option auto_create_account_prefix is deprecated. '
'Configure auto_create_account_prefix under the '
'swift-constraints section of swift.conf. This option '
'will be ignored in a future release.'
])
def test_OPTIONS(self):
server_handler = AccountController(
{'devices': self.testdir, 'mount_check': 'false'})
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = server_handler.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE'.split():
self.assertIn(verb, resp.headers['Allow'].split(', '))
self.assertEqual(len(resp.headers['Allow'].split(', ')), 7)
self.assertEqual(resp.headers['Server'],
(server_handler.server_type + '/' + swift_version))
def test_insufficient_storage_mount_check_true(self):
conf = {'devices': self.testdir, 'mount_check': 'true'}
account_controller = AccountController(conf)
self.assertTrue(account_controller.mount_check)
for method in account_controller.allowed_methods:
if method == 'OPTIONS':
continue
req = Request.blank('/sda1/p/a-or-suff', method=method,
headers={'x-timestamp': '1'})
with mock_check_drive() as mocks:
try:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
mocks['ismount'].return_value = True
resp = req.get_response(account_controller)
self.assertNotEqual(resp.status_int, 507)
# feel free to rip out this last assertion...
expected = 2 if method == 'PUT' else 4
self.assertEqual(resp.status_int // 100, expected)
except AssertionError as e:
self.fail('%s for %s' % (e, method))
def test_insufficient_storage_mount_check_false(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
account_controller = AccountController(conf)
self.assertFalse(account_controller.mount_check)
for method in account_controller.allowed_methods:
if method == 'OPTIONS':
continue
req = Request.blank('/sda1/p/a-or-suff', method=method,
headers={'x-timestamp': '1'})
with mock_check_drive() as mocks:
try:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
mocks['isdir'].return_value = True
resp = req.get_response(account_controller)
self.assertNotEqual(resp.status_int, 507)
# feel free to rip out this last assertion...
expected = 2 if method == 'PUT' else 4
self.assertEqual(resp.status_int // 100, expected)
except AssertionError as e:
self.fail('%s for %s' % (e, method))
def test_DELETE_not_found(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('X-Account-Status', resp.headers)
def test_DELETE_empty(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_DELETE_not_empty(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
# We now allow deleting non-empty accounts
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_DELETE_now_empty(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/
|
harej/requestoid
|
authentication.py
|
Python
|
mit
| 819
| 0
|
from . import config
from django.shortcuts import render
from mwoauth import ConsumerToken, Handshaker, tokens
def requests_handshaker():
consumer_key = config.OAUTH_CONSUMER_KEY
consumer_secret = config.OAUTH_CONSUMER_SECRET
consumer_token = ConsumerToken(consumer_key, consumer_secret)
return Handshaker("https://meta.wikimedia.org/w/index.php",
|
consumer_token)
def get_username(request):
handshaker = requests_handshaker()
if 'access_token_key' in request.session:
access_key = request.session['access_token_key'].encode('utf-8
|
')
access_secret = request.session['access_token_secret'].encode('utf-8')
access_token = tokens.AccessToken(key=access_key, secret=access_secret)
return handshaker.identify(access_token)['username']
else:
return None
|
TrampolineRTOS/trampoline
|
goil/build/libpm/python-makefiles/default_build_options.py
|
Python
|
gpl-2.0
| 3,374
| 0.02786
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#----------------------------------------------------------------------------------------------------------------------*
#
# Options for all compilers
#
#----------------------------------------------------------------------------------------------------------------------*
def allCompilerOptions (platformOptions):
result = platformOptions
result.append ("-Wall")
result.append ("-Werror")
result.append ("-Wreturn-type")
result.append ("-Wformat")
result.append ("-Wsign-compare")
result.append ("-Wpointer-arith")
#--- Options added for GALGAS 1.9.0
result.append ("-ansi")
result.append ("-W")
result.append ("-Wshadow")
# result.append ("-Wcast-qual")
result.append ("-Wwrite-strings")
result.append ("-ffunction-sections")
result.append ("-fdata-sections")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# Release options
#
#----------------------------------------------------------------------------------------------------------------------*
def compilerReleaseOptions (platformOptions):
result = platformOptions
result.append ("-DDO_NOT_GENERATE_CHECKINGS")
result.append ("-Wunused-variable")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# Debug options
#
#----------------------------------------------------------------------------------------------------------------------*
def compilerDebugOptions (platformOptions):
result = platformOptions
result.append ("-g")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# C compiler options
#
#----------------------------------------------------------------------------------------------------------------------*
def C_CompilerOptions (platformOptions):
result = platformOptions
result.append ("-std=c99")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# C++ compiler options
#
#----------------------------------------------------------------------------------------------------------------------*
def Cpp_CompilerOptions (platformOptions):
result = platformOptions
result.append ("-std=c++14")
result.append ("-Woverloaded-virtual")
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# Objective C compiler options
#
#----------------------------------------------------------------------------------------------------------------------*
def ObjectiveC_CompilerOptions (platformOptions):
result = plat
|
formOptions
return result
#----------------------------------------------------------------------------------------------------------------------*
#
# Objective C+
|
+ compiler options
#
#----------------------------------------------------------------------------------------------------------------------*
def ObjectiveCpp_CompilerOptions (platformOptions):
result = platformOptions
return result
#----------------------------------------------------------------------------------------------------------------------*
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.