repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
nielsbuwen/ilastik
|
ilastik/applets/counting/opCountingDataExport.py
|
Python
|
gpl-3.0
| 3,184
| 0.007538
|
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any late
|
r version.
#
# In addition, as a special exception, the copyright holders of
|
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
from lazyflow.graph import InputSlot
from ilastik.applets.dataExport.opDataExport import OpDataExport
from ilastik.applets.base.applet import DatasetConstraintError
class OpCountingDataExport( OpDataExport ):
# Add these additional input slots, to be used by the GUI.
PmapColors = InputSlot()
LabelNames = InputSlot()
UpperBound = InputSlot()
ConstraintDataset = InputSlot() # Any dataset from the training workflow, which we'll use for
# comparison purposes when checking dataset constraints.
def __init__(self,*args,**kwargs):
super(OpCountingDataExport, self).__init__(*args, **kwargs)
self.ConstraintDataset.notifyReady(self._checkDataConstraint)
self.RawData.notifyReady(self._checkDataConstraint)
def _checkDataConstraint(self, *args):
"""
The batch workflow uses the same classifier as the training workflow,
and therefore the batch datasets must be compatible with the training datasets in certain respects.
This function tests those constraints by comparing the batch input against a (arbitrary) training dataset,
and raises a DatasetConstraintError if something is incorrect.
"""
if not self.ConstraintDataset.ready() or not self.RawData.ready():
return
dataTrain = self.ConstraintDataset.meta
dataBatch = self.RawData.meta
# Must have same dimensionality (but not necessarily the same shape)
if len(dataTrain.shape) != len(dataBatch.shape):
raise DatasetConstraintError("Batch Prediction Input","Batch input must have the same dimension as training input.")
# Must have same number of channels
if dataTrain.getTaggedShape()['c'] != dataBatch.getTaggedShape()['c']:
raise DatasetConstraintError("Batch Prediction Input","Batch input must have the same number of channels as training input.")
# Must have same set of axes (but not necessarily in the same order)
if set(dataTrain.getAxisKeys()) != set(dataBatch.getAxisKeys()):
raise DatasetConstraintError("Batch Prediction Input","Batch input axis must fit axis of training input.")
|
capergroup/bayou
|
tool_files/acceptpy_1_3_0/accept.py
|
Python
|
apache-2.0
| 9,670
| 0.003619
|
# Copyright (c) 2017 rmbar
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublic
|
ense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import argparse
import json
import os
import subprocess
#
# Constant string prefixes and suffix for printing colors to the console.
#
import sys
HEADER = '\033[95m'
OK_GREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
END_COLOR = '\033[0m'
def run_test_from_python_test_file(test_file_path: str):
"""Runs the given python file and returns True if the exit code is 0 and False otherwise.
test_file_path -- the path to the python file to run
"""
print(HEADER + "[running: " + test_file_path + "]" + END_COLOR)
test_file_parent_path = os.path.abspath(os.path.join(test_file_path, os.pardir))
test_file_name = os.path.basename(test_file_path)
return run_shell_command("python3 " + test_file_name, test_file_parent_path, 0)
def run_test_from_json_test_file(test_file_path: str):
"""Parses the given json test file and runs the defined test returning True if the tests passes and False otherwise.
Example json:
{
"test_type": "shell command",
"command" : "echo dog",
"expect_exit": 0,
"expect_stdout": "dog\n"
}
test_file_path -- the path to the json test file
"""
print(HEADER + "[running: " + test_file_path + "]" + END_COLOR)
#
# Read and parse test_file_path as a JSON file.
#
with open(test_file_path, "r") as test_file:
test_file_content = test_file.read()
try:
test = json.loads(test_file_content)
except json.JSONDecodeError:
print(WARNING + "non-json test file: " + test_file_path + END_COLOR)
return False
#
# Determine the kind of test to run and run it.
#
test_type = test.get('test type', None)
if test_type is None:
test_type = test.get('test_type', None) # support legacy key name
if test_type is None:
print(WARNING + "missing test_type in test file: " + test_file_path + END_COLOR)
return False
if test_type == "shell command":
command = test.get('command', None)
if command is None:
print(WARNING + "missing command in test file: " + test_file_path + END_COLOR)
return False
expect_exit = test.get('expect exit', None)
if expect_exit is None:
expect_exit = test.get('expect_exit', None) # support legacy key name
expect_stdout = test.get('expect stdout', None)
if expect_stdout is None:
expect_stdout = test.get('expect_stdout', None) # support legacy key name
expect_stdout_contains = test.get('expect stdout contains', None)
print(HEADER + "shell command: " + command + END_COLOR)
test_file_parent_path = os.path.abspath(os.path.join(test_file_path, os.pardir))
return run_shell_command(command, test_file_parent_path, expect_exit, expect_stdout, expect_stdout_contains)
else:
print(WARNING + "unknown test type in test file: " + test_file_path + END_COLOR)
return False
def run_shell_command(command: str, working_directory: str, expected_exit: int, expected_stdout: str = None,
expect_stdout_contains: str = None):
"""Runs the given command string as a shell command in a new subprocess and returns whether the command
met the given expectations.
command -- the shell command to run e.g. "ls -l"
working_directory -- the working directory of the launched shell
expected_exit -- the expected exit code of the shell command or None for no expectation
expected_stdout -- the expected standard out characters printed by the shell command or None for no expectation
expect_stdout_contains -- an expected substring of standard out characters printed by the shell command or None for no
expectation
"""
#
# Run the given shell command and report standard out.
#
completed_process = subprocess.run(command, shell=True, stdout=subprocess.PIPE, cwd=working_directory)
std_out = completed_process.stdout.decode('utf-8') # take stdout bytes and assume UTF-8 text
if len(std_out) > 0:
print(HEADER + "<begin stdout>" + END_COLOR + completed_process.stdout.decode('utf-8') +
HEADER + "<end stdout>" + END_COLOR)
#
# Check if the exit code and standard out match expectations if specified.
#
test_passed = True
if expected_stdout is not None and expected_stdout != std_out:
test_passed = False
print(FAIL + "<expected out>" + END_COLOR + expected_stdout + FAIL + "<end expected out>" + END_COLOR)
# n.b. we use the string "<expected out>" instead of "<expected stdout>" so same char length as "<begin stdout>"
# and thus lines up visually.
if expect_stdout_contains is not None and expect_stdout_contains not in std_out:
test_passed = False
print(FAIL + "<expected stdout to contain>" + END_COLOR + expect_stdout_contains + FAIL +
"<end expected stdout to contain>" + END_COLOR)
if expected_exit is not None and expected_exit != completed_process.returncode:
test_passed = False
print(FAIL + "<expected error code " + str(expected_exit) + " but found " + str(completed_process.returncode) +
">" + END_COLOR)
return test_passed
def include_file(file_path: str, ignore_py: bool):
"""Returns whether the given file path should be considered a test file for this execution.
file_path -- the path of the file in the tests path
ignore_py -- whether files that in in .py should be considered a test file
"""
return file_path.endswith(".test") or (file_path.endswith(".py") and not ignore_py)
if __name__ == "__main__":
print(HEADER)
print("#########################")
print(" Welcome to AcceptPy ")
print("#########################")
print(END_COLOR)
#
# Determine the tests directory path from the command line arguments.
#
parser = argparse.ArgumentParser()
parser.add_argument('tests_path', type=str, help='the directory containing tests to run')
parser.add_argument('--ignore_py', dest='ignore_py', action='store_const', const=True, default=False,
help='do not treat found .py files in the tests path as tests')
args = parser.parse_args()
tests_path = args.tests_path
#
# Find all test files.
#
print("searching for tests...")
print("")
test_file_paths = []
if os.path.isfile(tests_path) and include_file(tests_path, args.ignore_py):
test_file_paths.append(tests_path)
print(tests_path)
else:
for root, dirs, files in os.walk(tests_path):
for file in files:
if include_file(file, args.ignore_py):
f = os.path.join(root, file)
print(f)
test_file_paths.append(f)
print("")
print("found " + str(len(test_file_
|
ox1omon/movement_fefu
|
movement_app/views.py
|
Python
|
mit
| 5,870
| 0.00233
|
import json
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import logout
from django.shortcuts import render
from django.http import HttpResponseRedirect, HttpResponse
from movement_app.models import *
from movement_app.logic import find_move
def index(request):
if request.user.is_anonymous():
return render(request, "movement_app/unlogined.html", dict())
social_user = request.user.social_auth.filter(provider='vk-oauth2').first()
# Проверим, в каком состоянии сейчас переезд.
# Если активен (в поиске) - предложим изменить данные, сделав его перед этим не активным
# Если успешен - предложим сделать ещё один
# Если не активен и не успешен сразу возвращаем форму.
q = UserInfo.objects.filter(userfield=request.user)
if not q:
succeed, active = False, False
else:
q = q[0]
succeed = q.is_succeed
active = q.is_active
ctx = dict(succeed=succeed, active=active, social_user=social_user)
return render(request, "movement_app/logined.html", ctx)
@login_required(login_url='/')
def registration(request):
user = request.user
ctx = {}
if request.method == 'GET':
# TODO разобрать случаи, аналогичные рассмотренным в index. Передать данные в форму и заполнить через
# javascript
# Если форма активна - уведомляем об этом юзера.
# Если форма не активна - даём возможность редактировать сразу. (Используем имеющиеся данные)
userinfo = UserInfo.objects.filter(userfield=user)
ctx.update(dict(is_active=False))
if userinfo:
userinfo = userinfo.first()
ctx.update(dict(is_active=userinfo.is_active)) # Если активно, пользователь получит предупреждение
# Заполняем форму данными из модели
form = UserInfoForm({
'is_female': userinfo.is_female,
'now': userinfo.now,
'want': userinfo.want,
'misc': userinfo.misc,
'user_from': userinfo.user_from,
'user_to': userinfo.user_to,
'phon
|
e_number': userinfo.phone_number,
})
else:
form = UserInfoForm() # БД не заполнена. Выдаем чистую форму пользова
|
телю.
ctx.update(dict(form=form))
return render(request, "movement_app/registration.html", ctx)
form = UserInfoForm(request.POST)
if form.is_valid(): # TODO Проверь джаваскриптовую парсилку инфы с карты
try:
userinfo = UserInfo.objects.get(userfield=request.user, )
except UserInfo.DoesNotExist:
userinfo = UserInfo(userfield=request.user)
for (key, val) in form.cleaned_data.items():
setattr(userinfo, key, val)
userinfo.is_active = True
userinfo.is_succeed = False
userinfo.save() # TODO
code = find_move(request.user)
if code == 0:
return HttpResponse(json.dumps({'code': 0, 'text': 'We have a movement, congrats!'}))
if code == 1:
return HttpResponse(json.dumps({'code': 0, 'text': 'No movement for now, sorry :('}))
return HttpResponse(json.dumps({
'code': 1,
'text': 'form is wrong, denied :C' + str([(k, v[0]) for k, v in form.errors.items()]),
'errors': [(k, v[0]) for k, v in form.errors.items()]
}))
def result(request):
""" Наша задача проверить, есть ли переезд для юзера и выдать участников если он есть
И если нет, то запустить поиск. Выдать грусть, если ничего не нашлось,
редиректнуть его сюда еще раз, если нашлось.
"""
users_list = []
if not request.user.is_anonymous():
user_last_move = MovePartisTable.objects.filter(user=request.user).order_by('id').last()
# Случай, когда у пользователя нет переездов
if not user_last_move:
return render(request, "movement_app/result.html")
user_last_move = user_last_move.move
if user_last_move.rejected:
r = find_move(request.user)
if r == 0:
return HttpResponseRedirect('/result/')
return render(request, "movement_app/result.html")
else:
r = user_last_move
if r != 1:
users_list = MovePartisTable.objects.filter(move=r)
users_list = [i.user for i in users_list]
users_list = UserInfo.objects.filter(userfield__in=users_list)
return render(request, "movement_app/result.html", dict(users_list=users_list))
@login_required
def reject(request):
if request.method == 'POST':
conformity = MovePartisTable.objects.get(userfield=request.user)
move_q = MovePartisTable.objects.filter(move=conformity.move)
move_u = [x.user for x in move_q]
user_info = UserInfo.objects.filter(userfield__in=move_u)
for i in user_info:
i.is_active = True
i.save()
move = conformity.move
move.rejected = True
move.reject_reason = request.POSt['reject_reason']
move.save()
return HttpResponse(json.dumps({'code': 0, 'text': 'Successfully rejected.'}))
|
drtuxwang/system-config
|
bin/swell_foop_.py
|
Python
|
gpl-2.0
| 1,717
| 0
|
#!/usr/bin/env python3
"""
Sandbox for "swell-foop" command
"""
import glob
import os
import signal
import sys
import network_mod
import subtask_mod
class Main:
"""
Main class
"""
def __init__(self) -> None:
try:
self.config()
sys.exit(self.run())
except (EOFError, KeyboardInterrupt):
sys.exit(114)
except SystemExit as exception:
sys.exit(exception)
@staticmethod
def config() -> None:
"""
Configure program
"""
if hasattr(signal, 'SIGPIPE'):
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
if os.name == 'nt':
argv = []
for arg in sys.argv:
files = glob.glob(arg) # Fixes Windows globbing bug
if files:
argv.extend(files)
else:
argv.append(arg)
sys.argv = argv
@staticmethod
def run() -> int:
"""
Start program
"""
command = network_mod.Sandbox(
'/usr/games/swell-foop',
args=sys.argv[1:],
errors='stop'
)
# Start
|
slow for very large history (.local/share/swell-foop/)
if not os.path.isfile(command.get_file() + '.py'):
configs = [
'/dev/dri',
f'/run/user/{os.getuid()}/dconf',
os.path.join(os.getenv('HOME', '/'), '.
|
config/dconf/user'),
]
command.sandbox(configs)
subtask_mod.Background(command.get_cmdline()).run()
return 0
if __name__ == '__main__':
if '--pydoc' in sys.argv:
help(__name__)
else:
Main()
|
khalim19/gimp-plugin-export-layers
|
export_layers/pygimplib/__init__.py
|
Python
|
gpl-3.0
| 11,842
| 0.016298
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014-2019 khalim19
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT W
|
ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
import os
import sys
import traceback
PYGIMPLIB_DIRPATH = os.path.dirname(inspect.getfile(inspect.currentframe()))
try:
import gimp
except ImportError:
_g
|
imp_dependent_modules_imported = False
else:
_gimp_dependent_modules_imported = True
from . import logging
if _gimp_dependent_modules_imported:
# Enable logging as early as possible to capture any unexpected errors (such
# as missing modules) before pygimplib is fully initialized.
logging.log_output(
log_mode="exceptions",
log_dirpaths=[os.path.dirname(PYGIMPLIB_DIRPATH), PYGIMPLIB_DIRPATH],
log_stdout_filename=None,
log_stderr_filename="error.log",
log_header_title="pygimplib")
if _gimp_dependent_modules_imported:
from . import _gui_messages
_gui_messages.set_gui_excepthook(title=None, app_name=None)
def _setup_import_of_external_lib_modules(dirpath):
"""
Add directory paths containing external libraries for pygimplib to `sys.path`
so that modules from these external libraries can be imported as system
modules (i.e. without using absolute or explicit relative imports).
Modules with the same name that are already installed system-wide override the
external library modules from `pygimplib`.
"""
for filename in os.listdir(dirpath):
external_libs_dirpath = os.path.join(dirpath, filename)
if os.path.isdir(external_libs_dirpath) and external_libs_dirpath not in sys.path:
sys.path.append(external_libs_dirpath)
_setup_import_of_external_lib_modules(os.path.join(PYGIMPLIB_DIRPATH, "_lib"))
from future.builtins import (
ascii, bytes, chr, dict, filter, hex, input, int, list, map, next, object,
oct, open, pow, range, round, str, super, zip)
import __builtin__
import collections
import gettext
from .constants import *
from . import utils
from . import version
if _gimp_dependent_modules_imported:
import gimpenums
import gimpui
from . import fileformats
from . import invocation
from . import gui
from . import itemtree
from . import objectfilter
from . import operations
from . import overwrite
from . import path
from . import pdbutils
from . import progress
from . import setting
from gimp import pdb
from .setting import SettingTypes
__all__ = [
# Modules
"logging",
"utils",
"version",
# Global elements imported to or defined in this module
"config",
"init",
]
if _gimp_dependent_modules_imported:
__all__.extend([
# Modules
"fileformats",
"invocation",
"gui",
"itemtree",
"objectfilter",
"operations",
"overwrite",
"path",
"pdbutils",
"progress",
"setting",
# Global elements imported to or defined in this module
"pdb",
"procedure",
"main",
"SettingTypes",
])
config = None
class _Config(object):
def __init__(self):
super().__setattr__("_config", {})
def __setattr__(self, name, value):
self._config[name] = value
def __getattr__(self, name):
if name not in self._config:
raise AttributeError("configuration entry '{}' not found".format(name))
attr = self._config[name]
if callable(attr):
return attr()
else:
return attr
def __hasattr__(self, name):
return name in self._config
def _init_config():
global config
if config is not None:
return
def _get_domain_name():
if config.PLUGIN_NAME == config._DEFAULT_PLUGIN_NAME:
return "gimp20-python"
else:
return "gimp-plugin-" + config.PLUGIN_NAME.replace("_", "-")
config = _Config()
config._DEFAULT_PLUGIN_NAME = "gimp_plugin"
config.PLUGIN_NAME = config._DEFAULT_PLUGIN_NAME
config.PLUGIN_TITLE = lambda: config.PLUGIN_NAME
config.PLUGIN_VERSION = "1.0"
config.LOCALE_DIRPATH = (
lambda: os.path.join(config.PLUGINS_DIRPATH, config.PLUGIN_NAME, "locale"))
config.DOMAIN_NAME = _get_domain_name
config.BUG_REPORT_URL_LIST = []
config.PLUGINS_DIRPATH = os.path.dirname(os.path.dirname(PYGIMPLIB_DIRPATH))
if _gimp_dependent_modules_imported:
config.LOG_MODE = "exceptions"
else:
config.LOG_MODE = "none"
_init_config_builtin(config)
_init_config_from_file()
_init_config_builtin_delayed(config)
def _init_config_builtin(config):
config.DEFAULT_LOGS_DIRPATH = os.path.dirname(PYGIMPLIB_DIRPATH)
config.PLUGINS_LOG_DIRPATHS = []
config.PLUGINS_LOG_DIRPATHS.append(config.DEFAULT_LOGS_DIRPATH)
if _gimp_dependent_modules_imported:
plugins_dirpath_alternate = os.path.join(gimp.directory, "plug-ins")
if plugins_dirpath_alternate != config.DEFAULT_LOGS_DIRPATH:
# Add `[user directory]/[GIMP directory]/plug-ins` as another log path in
# case the plug-in was installed system-wide and there is no permission to
# create log files there.
config.PLUGINS_LOG_DIRPATHS.append(plugins_dirpath_alternate)
config.PLUGINS_LOG_STDOUT_DIRPATH = config.DEFAULT_LOGS_DIRPATH
config.PLUGINS_LOG_STDERR_DIRPATH = config.DEFAULT_LOGS_DIRPATH
config.PLUGINS_LOG_STDOUT_FILENAME = "output.log"
config.PLUGINS_LOG_STDERR_FILENAME = "error.log"
config.GIMP_CONSOLE_MESSAGE_DELAY_MILLISECONDS = 50
def _init_config_from_file():
orig_builtin_c = None
if hasattr(__builtin__, "c"):
orig_builtin_c = __builtin__.c
__builtin__.c = config
try:
from .. import config as plugin_config
except ImportError:
pass
if orig_builtin_c is None:
del __builtin__.c
else:
__builtin__.c = orig_builtin_c
def _init_config_builtin_delayed(config):
def _get_setting_source_name():
if config.PLUGIN_NAME.startswith("plug_in"):
return config.PLUGIN_NAME
else:
return "plug_in_" + config.PLUGIN_NAME
if _gimp_dependent_modules_imported:
config.SOURCE_NAME = _get_setting_source_name()
config.SESSION_SOURCE = setting.SessionSource(config.SOURCE_NAME)
config.PERSISTENT_SOURCE = setting.PersistentSource(config.SOURCE_NAME)
gettext.install(config.DOMAIN_NAME, config.LOCALE_DIRPATH, unicode=True)
if _gimp_dependent_modules_imported or config.LOG_MODE != "gimp_console":
logging.log_output(
config.LOG_MODE, config.PLUGINS_LOG_DIRPATHS,
config.PLUGINS_LOG_STDOUT_FILENAME, config.PLUGINS_LOG_STDERR_FILENAME,
config.PLUGIN_TITLE, config.GIMP_CONSOLE_MESSAGE_DELAY_MILLISECONDS)
_init_config()
if _gimp_dependent_modules_imported:
_procedures = collections.OrderedDict()
_procedures_names = collections.OrderedDict()
def procedure(**kwargs):
"""
This function is a decorator that installs the wrapped function as a GIMP
procedure. The procedure can then be accessed via the GIMP procedural
database (PDB) and optionally from the GIMP user interface.
The function name is used as the procedure name as found in the GIMP PDB.
The following keyword arguments are accepted:
* `blurb` - Short description of the procedure.
* `description` - More detailed information about the procedure.
* `author` - Author of the plug-in.
* `copyright_holder` - Copyright holder of the plug-in.
* `date` - Dates (usually years) at which the plug-in development was
active.
* `menu_name` - Name of the menu entry in the GIMP user interface.
* `menu_path` - Path of the menu entry in the GIMP user interface.
* `image_types` - Image types to which the procedure applies (e.g. RGB or
indexed). Default
|
ladder1984/backbonejs_todos_with_Django
|
backbonejs_todos_with_Django/wsgi.py
|
Python
|
mit
| 431
| 0.00464
|
"""
WSGI config for backbonejs_todos_with_Django project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, se
|
e
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backbonejs_todos_with_Django.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_applicati
|
on()
|
nens/dpnetcdf
|
dpnetcdf/utils.py
|
Python
|
gpl-3.0
| 1,037
| 0
|
# (c) Nelen & Schuurmans. GPL licensed, see LICENSE.rst.
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
import re
DATASET_NAME_PATTERN = re.compile(
r"""^(?P<time_zero>\d{12}) # e.g. 200506010000
_
(?P<program>DP[a-zA-Z]*) # e.g. DPR_rijn, DPRD
(_(?P<strategy>S[0-9]v[0-9]))? # e.g. S0v1
_
(_(?P<scenario>[A-Z]{0,2}))? # scenario is optional
_
(?P<calculation_facility>[A-Za
|
-z0-9]*).nc$ # e.g. RF1p0p3
""", re.X)
year = re.compile(
r"""
_(?P<year>\d{4}[A-Z]?)_ # year can have a one character appended
""", re.X)
scenario = re.compile(
r"""
_(?P<scenario>S\dv\d)_ # scenario is optional
""", re.X)
def parse_dataset_name(name):
"""Test the regex extensively. See tests file for specific tests."""
d = year.search(name).groupdict()
scenario_s
|
earch = scenario.search(name)
if scenario_search:
d.update(scenario_search.groupdict())
return d
|
cellofellow/symphony
|
library/migrations/0009_score_type_to_choices.py
|
Python
|
agpl-3.0
| 1,288
| 0.000776
|
# Generated by Django 2.0.2 on 2018-03-23 22:29
from django.db import migrations, models
SCORE_TYPE_MAP = {1: 'NO_SCORE', 2: 'FULL', 3: 'PIANO', 4: 'CONDENSED'}
FORWARDS_QUERY = """
UPDATE library_piece
SET "score_type" = %s
WHERE "score_id" = %s
"""
FORWARDS = [(FORWARDS_QUERY, [v, k]) for k, v in SCORE_TYPE_MAP.items()]
REVERSE_QUERY = """
UPDATE library_piece
SET "score_id" = %s
WHERE "score_type" = %s
"""
REVERSE = [(REVERSE_QUERY, [k, v]) for k,
|
v in SCORE_TYPE_MAP.items()]
class Migration(migrations.Migration):
dependencies = [
|
('library', '0008_auto_20180323_2228'),
]
operations = [
migrations.AddField(
model_name='piece',
name='score_type',
field=models.CharField(blank=True, choices=[('NO_SCORE', 'No Score'), ('CONDENSED', 'Condensed'), ('PIANO', 'Piano'), ('FULL', 'Full')], max_length=12, null=True, verbose_name='Score Type'),
),
migrations.RunSQL(FORWARDS, REVERSE),
migrations.RemoveField(
model_name='piece',
name='score',
),
migrations.DeleteModel(
name='ScoreType',
),
]
|
forslund/mycroft-core
|
mycroft/metrics/__init__.py
|
Python
|
apache-2.0
| 6,083
| 0
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
from queue import Queue, Empty
import threading
import time
import requests
from mycroft.api import DeviceApi, is_paired
from mycroft.configuration import Configuration
from mycroft.session import SessionManager
from mycroft.util.log import LOG
from mycroft.version import CORE_VERSION_STR
from copy import copy
class _MetricSender(threading.Thread):
"""Thread responsible for sending metrics data."""
def __init__(self):
super().__init__()
self.queue = Queue()
self.daemon = True
self.start()
def run(self):
while True:
time.sleep(30)
try:
while True: # Try read the queue until it fails
report_metric(*self.queue.get_nowait())
time.sleep(0.5)
except Empty:
pass # If the queue is empty just continue the loop
except Exception as e:
LOG.error('Could not send Metrics: {}'.format(repr(e)))
_metric_uploader = _MetricSender()
def report_metric(name, data):
"""
Report a general metric to the Mycroft servers
Args:
name (str): Name of metric. Must use only letters and hyphens
data (dict): JSON dictionary to report. Must be valid JSON
"""
try:
if is_paired() and Configuration().get()['opt_in']:
DeviceApi().report_metric(name, data)
except requests.RequestException as e:
LOG.error('Metric couldn\'t be uploaded, due to a network error ({})'
.format(e))
def report_timing(ident,
|
system, timing, additional_data=None):
"""Create standardized message for reporting timing.
Args:
ident (str): identifier of user interaction
system (str): system the that's generated the report
timing (stopwatch): Stopwatch object with recorded timing
additional_data (dict): dictionary with related data
"""
additional_data = additional_data or {}
report = copy(additional_da
|
ta)
report['id'] = ident
report['system'] = system
report['start_time'] = timing.timestamp
report['time'] = timing.time
_metric_uploader.queue.put(('timing', report))
class Stopwatch:
"""
Simple time measuring class.
"""
def __init__(self):
self.timestamp = None
self.time = None
def start(self):
"""
Start a time measurement
"""
self.timestamp = time.time()
def lap(self):
cur_time = time.time()
start_time = self.timestamp
self.timestamp = cur_time
return cur_time - start_time
def stop(self):
"""
Stop a running time measurement. returns the measured time
"""
cur_time = time.time()
start_time = self.timestamp
self.time = cur_time - start_time
return self.time
def __enter__(self):
"""
Start stopwatch when entering with-block.
"""
self.start()
def __exit__(self, tpe, value, tb):
"""
Stop stopwatch when exiting with-block.
"""
self.stop()
def __str__(self):
cur_time = time.time()
if self.timestamp:
return str(self.time or cur_time - self.timestamp)
else:
return 'Not started'
class MetricsAggregator:
"""
MetricsAggregator is not threadsafe, and multiple clients writing the
same metric "concurrently" may result in data loss.
"""
def __init__(self):
self._counters = {}
self._timers = {}
self._levels = {}
self._attributes = {}
self.attr("version", CORE_VERSION_STR)
def increment(self, name, value=1):
cur = self._counters.get(name, 0)
self._counters[name] = cur + value
def timer(self, name, value):
cur = self._timers.get(name)
if not cur:
self._timers[name] = []
cur = self._timers[name] = []
cur.append(value)
def level(self, name, value):
self._levels[name] = value
def clear(self):
self._counters = {}
self._timers = {}
self._levels = {}
self._attributes = {}
self.attr("version", CORE_VERSION_STR)
def attr(self, name, value):
self._attributes[name] = value
def flush(self):
publisher = MetricsPublisher()
payload = {
'counters': self._counters,
'timers': self._timers,
'levels': self._levels,
'attributes': self._attributes
}
self.clear()
count = (len(payload['counters']) + len(payload['timers']) +
len(payload['levels']))
if count > 0:
# LOG.debug(json.dumps(payload))
def publish():
publisher.publish(payload)
threading.Thread(target=publish).start()
class MetricsPublisher:
def __init__(self, url=None, enabled=False):
conf = Configuration().get()['server']
self.url = url or conf['url']
self.enabled = enabled or conf['metrics']
def publish(self, events):
if 'session_id' not in events:
session_id = SessionManager.get().session_id
events['session_id'] = session_id
if self.enabled:
requests.post(
self.url,
headers={'Content-Type': 'application/json'},
data=json.dumps(events), verify=False)
|
kyubifire/softlayer-python
|
SoftLayer/CLI/vpn/ipsec/translation/add.py
|
Python
|
mit
| 1,435
| 0
|
"""Add an address translation to an IPSEC tunnel context."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
# from SoftLayer.CLI.exceptions import ArgumentError
# from SoftLayer.CLI.exceptions import CLIHalt
@click.command()
@click.argument('context_id', type=int)
@click.option('-s',
'--static-ip',
required=True,
help='Static IP address value')
@click.option('-r',
'--remote-ip',
required=True,
help='Remote IP address value')
|
@click.option('-n',
'--note',
default=None,
help='Note value')
@environment.pass_env
def cli(env, context_id, static_ip, remote_ip, note):
"""Add an address translation to an IPSEC tunnel context.
A separate configuration request should be made to realize changes on
network devices.
"""
manager = SoftLayer.IPSECManager(env.client)
# ensure context can be retrieved by given id
manager.get_tunnel_context(context_
|
id)
translation = manager.create_translation(context_id,
static_ip=static_ip,
remote_ip=remote_ip,
notes=note)
env.out('Created translation from {} to {} #{}'
.format(static_ip, remote_ip, translation['id']))
|
czhengsci/pymatgen
|
pymatgen/analysis/wulff.py
|
Python
|
mit
| 21,287
| 0.000235
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module define a WulffShape class to generate the Wulff shape from
a lattice, a list of indices and their corresponding surface energies,
and the total area and volume of the wulff shape,the weighted surface energy,
the anisotropy and shape_factor can also be calculated.
In support of plotting from a given view in terms of miller index.
The lattice is from the conventional unit cell, and (hkil) for hexagonal
lattices.
If you use this code extensively, consider citing the following:
Tran, R.; Xu, Z.; Radhakrishnan, B.; Winston, D.; Persson, K. A.; Ong, S. P.
(2016). Surface energies of elemental crystals. Scientific Data.
"""
from __future__ import division, unicode_literals
from pymatgen.core.structure import Structure
from pymatgen.core.surface import get_recp_symmetry_operation
from pymatgen.util.coord import get_angle
import numpy as np
import scipy as sp
from scipy.spatial import ConvexHull
import logging
__author__ = 'Zihan Xu, Richard Tran, Shyue Ping Ong'
__copyright__ = 'Copyright 2013, The Materials Virtual Lab'
__version__ = '0.1'
__maintainer__ = 'Zihan Xu'
__email__ = 'zix009@eng.ucsd.edu'
__date__ = 'May 5 2016'
logger = logging.getLogger(__name__)
def hkl_tuple_to_str(hkl):
"""
Prepare for display on plots
"(hkl)" for surfaces
Agrs:
hkl: in the form of [h, k, l] or (h, k, l)
"""
str_format = '($'
for x in hkl:
if x < 0:
str_format += '\\overline{' + str(-x) + '}'
else:
str_format += str(x)
str_format += '$)'
return str_format
def get_tri_area(pts):
"""
Given a list of coords for 3 points,
Compute the area of this triangle.
Args:
pts: [a, b, c] three points
"""
a, b, c = pts[0], pts[1], pts[2]
v1 = np.array(b) - np.array(a)
v2 = np.array(c) - np.array(a)
area_tri = abs(sp.linalg.norm(sp.cross(v1, v2)) / 2)
return area_tri
class WulffFacet(object):
"""
Helper container for each Wulff plane.
"""
def __init__(self, normal, e_surf, normal_pt, dual_pt, index, m_ind_ori
|
g,
miller):
self.normal = normal
self.e_surf = e_surf
self.normal_pt = normal_pt
self.dual_pt = dual_pt
self.index = index
self.m_ind_orig = m_ind_orig
self.miller = miller
self.points = []
self.outer_lines = []
class WulffShape(o
|
bject):
"""
Generate Wulff Shape from list of miller index and surface energies,
with given conventional unit cell.
surface energy (Jm^2) is the length of normal.
Wulff shape is the convex hull.
Based on:
http://scipy.github.io/devdocs/generated/scipy.spatial.ConvexHull.html
Process:
1. get wulff simplices
2. label with color
3. get wulff_area and other properties
.. attribute:: debug (bool)
.. attribute:: alpha
transparency
.. attribute:: color_set
.. attribute:: grid_off (bool)
.. attribute:: axis_off (bool)
.. attribute:: show_area
.. attribute:: off_color
color of facets off wulff
.. attribute:: structure
Structure object, input conventional unit cell (with H ) from lattice
.. attribute:: miller_list
list of input miller index, for hcp in the form of hkil
.. attribute:: hkl_list
modify hkill to hkl, in the same order with input_miller
.. attribute:: e_surf_list
list of input surface energies, in the same order with input_miller
.. attribute:: lattice
Lattice object, the input lattice for the conventional unit cell
.. attribute:: facets
[WulffFacet] for all facets considering symm
.. attribute:: dual_cv_simp
simplices from the dual convex hull (dual_pt)
.. attribute:: wulff_pt_list
.. attribute:: wulff_cv_simp
simplices from the convex hull of wulff_pt_list
.. attribute:: on_wulff
list for all input_miller, True is on wulff.
.. attribute:: color_area
list for all input_miller, total area on wulff, off_wulff = 0.
.. attribute:: miller_area
($hkl$): area for all input_miller
"""
def __init__(self, lattice, miller_list, e_surf_list, symprec=1e-5):
"""
Args:
lattice: Lattice object of the conventional unit cell
miller_list ([(hkl), ...]: list of hkl or hkil for hcp
e_surf_list ([float]): list of corresponding surface energies
symprec (float): for recp_operation, default is 1e-5.
"""
self.color_ind = list(range(len(miller_list)))
self.input_miller_fig = [hkl_tuple_to_str(x) for x in miller_list]
# store input data
self.structure = Structure(lattice, ["H"], [[0, 0, 0]])
self.miller_list = tuple([tuple(x) for x in miller_list])
self.hkl_list = tuple([(x[0], x[1], x[-1]) for x in miller_list])
self.e_surf_list = tuple(e_surf_list)
self.lattice = lattice
self.symprec = symprec
# 2. get all the data for wulff construction
# get all the surface normal from get_all_miller_e()
self.facets = self._get_all_miller_e()
logger.debug(len(self.facets))
# 3. consider the dual condition
dual_pts = [x.dual_pt for x in self.facets]
dual_convex = ConvexHull(dual_pts)
dual_cv_simp = dual_convex.simplices
# simplices (ndarray of ints, shape (nfacet, ndim))
# list of [i, j, k] , ndim = 3
# i, j, k: ind for normal_e_m
# recalculate the dual of dual, get the wulff shape.
# conner <-> surface
# get cross point from the simplices of the dual convex hull
wulff_pt_list = [self._get_cross_pt_dual_simp(dual_simp)
for dual_simp in dual_cv_simp]
wulff_convex = ConvexHull(wulff_pt_list)
wulff_cv_simp = wulff_convex.simplices
logger.debug(", ".join([str(len(x)) for x in wulff_cv_simp]))
# store simplices and convex
self.dual_cv_simp = dual_cv_simp
self.wulff_pt_list = wulff_pt_list
self.wulff_cv_simp = wulff_cv_simp
self.wulff_convex = wulff_convex
self.on_wulff, self.color_area = self._get_simpx_plane()
miller_area = []
for m, in_mill_fig in enumerate(self.input_miller_fig):
miller_area.append(
in_mill_fig + ' : ' + str(round(self.color_area[m], 4)))
self.miller_area = miller_area
def _get_all_miller_e(self):
"""
from self:
get miller_list(unique_miller), e_surf_list and symmetry
operations(symmops) according to lattice
apply symmops to get all the miller index, then get normal,
get all the facets functions for wulff shape calculation:
|normal| = 1, e_surf is plane's distance to (0, 0, 0),
normal[0]x + normal[1]y + normal[2]z = e_surf
return:
[WulffFacet]
"""
all_hkl = []
color_ind = self.color_ind
planes = []
recp = self.structure.lattice.reciprocal_lattice_crystallographic
recp_symmops = get_recp_symmetry_operation(self.structure, self.symprec)
for i, (hkl, energy) in enumerate(zip(self.hkl_list,
self.e_surf_list)):
for op in recp_symmops:
miller = tuple([int(x) for x in op.operate(hkl)])
if miller not in all_hkl:
all_hkl.append(miller)
normal = recp.get_cartesian_coords(miller)
normal /= sp.linalg.norm(normal)
normal_pt = [x * energy for x in normal]
dual_pt = [x / energy for x in normal]
color_plane = color_ind[divmod(i, len(color_ind))[1]]
planes.append(WulffFacet(normal, energy, normal_pt,
dual_pt, color_plane, i, hkl))
# sort by e_surf
planes.sort(key=lambda x: x.e_surf)
|
wfxiang08/Nuitka
|
nuitka/codegen/templates/CodeTemplatesFrames.py
|
Python
|
apache-2.0
| 8,340
| 0.001079
|
# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Code templates for frames of all kinds.
"""
template_frame_guard_cache_decl = """\
static PyFrameObject *cache_%(frame_identifier)s = NULL;
"""
template_frame_guard_frame_decl = """\
PyFrameObject *%(frame_identifier)s;
"""
# Frame in a function
template_frame_guard_full_block = """\
MAKE_OR_REUSE_FRAME( cache_%(frame_identifier)s, %(code_identifier)s, %(module_identifier)s );
%(frame_identifier)s = cache_%(frame_identifier)s;
// Push the new frame as the currently active one.
pushFrameStack( %(frame_identifier)s );
// Mark the frame object as in use, ref count 1 will be up for reuse.
Py_INCREF( %(frame_identifier)s );
assert( Py_REFCNT( %(frame_identifier)s ) == 2 ); // Frame stack
#if PYTHON_VERSION >= 340
%(frame_identifier)s->f_executing += 1;
#endif
// Framed code:
%(codes)s
#if %(needs_preserve)d
RESTORE_FRAME_EXCEPTION( %(frame_identifier)s );
#endif
// Put the previous frame back on top.
popFrameStack();
#if PYTHON_VERSION >= 340
%(frame_identifier)s->f_executing -= 1;
#endif
Py_DECREF( %(frame_identifier)s );
goto %(no_exception_exit)s;
"""
template_frame_guard_full_return_handler = """\
%(frame_return_exit)s:;
#if %(needs_preserve)d
RESTORE_FRAME_EXCEPTION( %(frame_identifier)s );
#endif
popFrameStack();
#if PYTHON_VERSION >= 340
%(frame_identifier)s->f_executing -= 1;
#endif
Py_DECREF( %(frame_identifier)s );
goto %(return_exit)s;
"""
template_frame_guard_full_exception_handler = """\
%(frame_exception_exit)s:;
#if %(needs_preserve)d
RESTORE_FRAME_EXCEPTION( %(frame_identifier)s );
#endif
{
bool needs_detach = false;
if ( exception_tb == NULL )
{
exception_tb = %(tb_making)s;
needs_detach = true;
}
else if ( exception_lineno != -1 )
{
PyTracebackObject *traceback_new = MAKE_TRACEBACK( %(frame_identifier)s, exception_lineno );
traceback_new->tb_next = exception_tb;
exception_tb = traceback_new;
needs_detach = true;
}
if (needs_detach)
{
%(store_frame_locals)s
detachFrame( exception_tb, %(frame_locals_name)s );
}
}
popFrameStack();
#if PYTHON_VERSION >= 340
%(frame_identifier)s->f_executing -= 1;
#endif
Py_DECREF( %(frame_identifier)s );
// Return the error.
goto %(parent_exception_exit)s;
"""
# Frame for a module. TODO: Use it for functions called only once.
# TODO: The once guard need not take a reference count in its frame class.
template_frame_guard_once = """\
// Frame without reuse.
%(frame_identifier)s = MAKE_FRAME( %(code_identifier)s, %(module_identifier)s );
// Push the new frame as the currently active one, and we should be exclusively
// owning it.
pushFrameStack( %(frame_identifier)s );
assert( Py_REFCNT( %(frame_identifier)s ) == 1 );
#if PYTHON_VERSION >= 340
%(frame_identifier)s->f_executing += 1;
#endif
// Framed code:
%(codes)s
// Restore frame exception if necessary.
#if %(needs_preserve)d
RESTORE_FRAME_EXCEPTION( %(frame_identifier)s );
#endif
popFrameStack();
assertFrameObject( %(frame_identifier)s );
Py_DECREF( %(frame_identifier)s );
goto %(no_exception_exit)s;
%(frame_exception_exit)s:;
#if %(needs_preserve)d
RESTORE_FRAME_EXCEPTION( %(frame_identifier)s );
#endif
if ( exception_tb == NULL )
{
exception_tb = %(tb_making)s;
}
else if ( exception_tb->tb_frame != %(frame_identifier)s )
{
PyTracebackObject *traceback_new = MAKE_TRACEBACK( %(frame_identifier)s, exception_lineno );
traceback_new->tb_next = exception_tb;
exception_tb = traceback_new;
}
// Put the previous frame back on top.
popFrameStack();
#if PYTHON_VERSION >= 340
%(frame_identifier)s->f_executing -= 1;
#endif
Py_DECREF( %(frame_identifier)s );
// Return the error.
goto %(parent_exception_exit)s;
%(no_exception_exit)s:;"""
template_generator_initial_throw = """\
// Throwing into not started generators is possible. As they don't stand any
// chance to deal with them, we might as well create traceback on the
// outside,
if ( generator->m_exception_type )
{
generator->m_yielded = NULL;
exception_type = generator->m_exception_type;
generator->m_exception_type = NULL;
exception_value = generator->m_exception_value;
generator->m_exception_value = NULL;
exception_tb = generator->m_exception_tb;;
generator->m_exception_tb = NULL;
if (exception_tb == NULL)
{
goto %(frame_exception_exit)s;
}
else
{
goto function_exception_exit;
}
}
"""
# Frame in a generator
template_frame_guard_generator = """\
MAKE_OR_REUSE_FRAME( cache_%(frame_identifier)s, %(code_identifier)s, %(module_identifier)s );
%(frame_identifier)s = cache_%(frame_identifier)s;
Py_INCREF( %(frame_identifier)s );
generator->m_frame = %(frame_identifier)s;
#if PYTHON_VERSION >= 340
%(frame_identifier)s->f_gen = (PyObject *)generator;
#endif
Py_CLEAR( generator->m_frame->f_back );
generator->m_frame->f_back = PyThreadState_GET()->frame;
Py_INCREF( generator->m_frame->f_back );
PyThreadState_GET()->frame = generator->m_frame;
Py_INCREF( generator->m_frame );
#if PYTHON_VERSION >= 340
%(frame_identifier)s->f_executing += 1;
#endif
#if PYTHON_VERSION >= 300
// Accept currently existing exception as the one to publish again when we
// yield or yield from.
PyThreadState *thread_state = PyThreadState_GET();
%(frame_identifier)s->f_exc_type = thread_state->exc_type;
if ( %(frame_identifier)s->f_exc_type == Py_None ) %(frame_identifier)s->f_exc_type = NULL;
Py_XINCREF( %(frame_identifier)s->f_exc_type );
%(frame_identifier)s->f_exc_value = thread_state->exc_value;
Py_XINCREF( %(frame_identifier)s->f_exc_value );
%(frame_identifier)s->f_exc_traceback = thread_state->exc_traceback;
Py_XINCREF( %(frame_identifier)s->f_exc_traceback );
#endif
// Framed code:
%(codes)s
#if PYTHON_VERSION >= 340
%(frame_identifier)s->f_executing -= 1;
#endif
#if PYTHON_VERSION >= 300
Py_CLEAR( %(frame_identifier)s->f_exc_type );
Py_CLEAR( %(frame_identifier)s->f_exc_value );
Py_CLEAR( %(frame_identifier)s->f_exc_traceback );
#endif
Py_DECREF( %(frame_identifier)s );
goto %(no_exception_exit)s;
"""
# TODO: This cannot happen, can it?
template_frame_guard_generator_return_handler = """\
%(frame_return_exit)s:;
#if PYTHON_VERSION >= 300
Py_CLEAR( %(frame_identifier)s->f_exc_type );
Py_CLEAR( %(frame_identifier)s->f_exc_value );
Py_CLEAR( %(frame_identifier)s->f_exc_traceback );
#endif
Py_DECREF( %(frame_identifier)s );
goto %(return_exit)s;
"""
template_frame_guard_generator_exception_handler = """\
%(frame_exception_exit)s:;
// If it's not an exit exception, consider and create a traceback for it.
if ( !EXCEPTION_MATCH_GENERATOR( exception_type ) )
{
int needs_detach = false;
if ( exception_tb == NULL )
{
exception_tb = %(tb_making)s;
needs_detach = true;
}
else if ( exception_tb->tb_frame != %(frame_identifier)s )
{
PyTracebackObject *traceback_new = MAKE_TRACEBACK( %(frame_identifier)s, exception_lineno );
traceback_new->tb_next = exception_tb;
exception_tb = traceback_new;
needs_det
|
ach = true;
}
if (needs_detach)
{
%(store_frame_locals)s
detachFrame( exception_tb, %(frame_locals_name)s );
}
}
#if PYTHON_VERSION >= 300
Py_CLEAR( %(frame_identifier)s->f_exc_type );
Py_
|
CLEAR( %(frame_identifier)s->f_exc_value );
Py_CLEAR( %(frame_identifier)s->f_exc_traceback );
#endif
Py_DECREF( %(frame_identifier)s );
// Return the
|
unreal666/outwiker
|
src/test/wikiparser/test_wikicommandattachlist.py
|
Python
|
gpl-3.0
| 11,139
| 0.000187
|
# -*- coding: UTF-8 -*-
import os
import os.path
import unittest
from tempfile import mkdtemp
from outwiker.core.tree import WikiDocument
from outwiker.core.application import Application
from outwiker.core.attachment import Attachment
from outwiker.pages.wiki.parser.commands.attachlist import AttachListCommand
from outwiker.pages.wiki.parserfactory import ParserFactory
from outwiker.pages.wiki.thumbnails import Thumbnails
from outwiker.pages.wiki.wikipage import WikiPageFactory
from test.utils import removeDir
class WikiAttachListCommandTest (unittest.TestCase):
def setUp(self):
self.encoding = "utf8"
self.__createWiki()
self.testPage = self.wikiroot["Страница 1"]
factory = ParserFactory()
self.parser = factory.make(self.testPage, Application.config)
filesPath = "../test/samplefiles/"
self.files = [
"image.jpg",
"add.png",
"anchor.png",
"файл с пробелами.tmp",
"dir",
"for_sort"]
self.fullFilesPath = [
os.path.join(
filesPath,
fname) for fname in self.files]
def _attachFiles(self):
attach = Attachment(self.testPage)
attach.attach(self.fullFilesPath)
def __createWiki(self):
# Здесь будет создаваться вики
self.path = mkdtemp(prefix='Абырвалг абыр')
self.wikiroot = WikiDocument.create(self.path)
WikiPageFactory().create(self.wikiroot, "Страница 1", [])
def tearDown(self):
removeDir(self.path)
def _compareResult(self, titles, names, result):
attachdir = "__attach"
template = '<a href="{path}">{title}</a>\n'
result_right = "".join([template.format(path=os.path.join(attachdir, name).replace("\\", "/"), title=title)
for (name, title) in zip(names, titles)]).rstrip()
self.assertEqual(result_right, result)
def testCommand1(self):
self._attachFiles()
cmd = AttachListCommand(self.parser)
result = cmd.execute("", "")
titles = [
"[dir]",
"[for_sort]",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
names = [
"dir",
"for_sort",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
self._compareResult(titles, names, result)
def testParse1(self):
self._attachFiles()
text = "(:attachlist:)"
result = self.parser.toHtml(text)
titles = [
"[dir]",
"[for_sort]",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
names = [
"dir",
"for_sort",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
self._compareResult(titles, names, result)
def testParse2(self):
self._attachFiles()
# Тест на то, что игнорируется директория __thumb
thumb = Thumbnails(self.testPage)
thumb.getThumbPath(True)
text = "(:attachlist:)"
result = self.parser.toHtml(text)
titles = [
"[dir]",
"[for_sort]",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
names = [
"dir",
"for_sort",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
self._compareResult(titles, names, result)
def testCommandSortByName(self):
self._attachFiles()
cmd = AttachListCommand(self.parser)
result = cmd.execute("sort=name", "")
titles = [
"[dir]",
"[for_sort]",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
names = [
"dir",
"for_sort",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
self._compareResult(titles, names, result)
def testParseSortByName(self):
self._attachFiles()
text = "(:attachlist sort=name:)"
result = self.parser.toHtml(text)
titles = [
"[dir]",
"[for_sort]",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
names = [
"dir",
"for_sort",
"add.png",
"anchor.png",
"image.jpg",
"файл с пробелами.tmp"]
self._compareResult(titles, names, result)
def testCommandSortDescendName(self):
self._attachFiles()
cmd = AttachListCommand(self.parser)
result = cmd.execute("sort=descendname", "")
titles = [
"[for_sort]",
"[dir]",
"файл с пробелами.tmp",
"image.jpg",
"anchor.png",
"add.png"]
names = [
"for_sort",
"dir",
"файл с пробелами.tmp",
"image.jpg",
"anchor.png",
"add.png"]
|
self._compareResult(titles, names, result)
def testParseSortDescendName(self):
self._attachFiles()
text = "(:attachlist sort=descendname:)"
result = self.parser.toHtml(text)
titles = [
"[for_sort]",
"[dir]",
"
|
файл с пробелами.tmp",
"image.jpg",
"anchor.png",
"add.png"]
names = [
"for_sort",
"dir",
"файл с пробелами.tmp",
"image.jpg",
"anchor.png",
"add.png"]
self._compareResult(titles, names, result)
def testParseSortByExt(self):
self._attachFiles()
text = "(:attachlist sort=ext:)"
result = self.parser.toHtml(text)
titles = [
"[dir]",
"[for_sort]",
"image.jpg",
"add.png",
"anchor.png",
"файл с пробелами.tmp"]
names = [
"dir",
"for_sort",
"image.jpg",
"add.png",
"anchor.png",
"файл с пробелами.tmp"]
self._compareResult(titles, names, result)
def testParseSortDescendExt(self):
self._attachFiles()
text = "(:attachlist sort=descendext:)"
result = self.parser.toHtml(text)
titles = [
"[for_sort]",
"[dir]",
"файл с пробелами.tmp",
"anchor.png",
"add.png",
"image.jpg"]
names = [
"for_sort",
"dir",
"файл с пробелами.tmp",
"anchor.png",
"add.png",
"image.jpg"]
self._compareResult(titles, names, result)
def testParseSortBySize(self):
self._attachFiles()
text = "(:attachlist sort=size:)"
result = self.parser.toHtml(text)
titles = [
"[dir]",
"[for_sort]",
"файл с пробелами.tmp",
"anchor.png",
"add.png",
"image.jpg"]
names = [
"dir",
"for_sort",
"файл с пробелами.tmp",
"anchor.png",
"add.png",
"image.jpg"]
self._compareResult(titles, names, result)
def testParseSortDescendSize(self):
self._attachFiles()
text = "(:attachlist sort=descendsize:)"
result = self.parser.toHtml(text)
titles = [
"[for_sort]",
"[dir]",
"image.jpg",
"add.png",
"anchor.png",
"файл с пробелами.tmp"]
names = [
"for_sort",
"dir",
"image.jpg",
"add.png",
"anchor.png",
"файл с пробелами.tmp"]
self._compareResult(titles, names, result)
def testParseSortByDate(self):
|
piotrdubiel/scribeserver
|
app/recognition/admin.py
|
Python
|
mit
| 220
| 0
|
from flask.ext.admin.contrib.mongoengine import ModelView
from .models import Prediction
from app.management.mixins import Admi
|
nMixin
class PredictionView(AdminMixin, ModelView):
col
|
umn_searchable_list = ('text',)
|
wesleykendall/django-issue
|
issue/tests/model_tests.py
|
Python
|
mit
| 22,256
| 0.002696
|
from datetime import datetime, timedelta
import json
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from django_dynamic_fixture import G, N
from freezegun import freeze_time
from mock import patch
from issue.models import (
Assertion, ExtendedEnum, Issue, IssueAction, IssueStatus, ModelAssertion, ModelIssue, Responder, ResponderAction,
load_function,
)
from issue.tests.models import TestModel
def function_to_load(self, *args, **kwargs):
pass
def is_even_number(record):
return ((int(record.name) % 2) == 0, {})
class LoadFucntionTests(TestCase):
def test_load_class_instance(self):
func = load_function('issue.tests.model_tests.function_to_load')
self.assertEqual(func, function_to_load)
class ExtendedEnumTests(TestCase):
class TestEnum(ExtendedEnum):
red = 1
blue = 3
green = 2
def test_name_to_value(self):
self.assertEqual(2, ExtendedEnumTests.TestEnum.name_to_value('green'))
def test_choices(self):
self.assertEqual(
set([(1, 'red'), (2, 'green'), (3, 'blue')]), set(ExtendedEnumTests.TestEnum.choices()))
class IssueManagerTests(TestCase):
def test_get_open_issues(self):
i = G(Issue)
G(Issue, status=IssueStatus.Resolved.value)
i3 = G(Issue)
self.assertEqual(set(Issue.objects.get_open_issues()), set([i, i3]))
def test_reopen_issue(self):
mi = G(Issue, status=IssueStatus.Resolved.value)
Issue.objects.reopen_issue(name=mi.name)
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(pk=mi.pk).status)
def test_is_wont_fix(self):
mi = G(Issue, status=IssueStatus.Wont_fix.value)
self.assertTrue(Issue.objects.is_wont_fix(name=mi.name))
def test_maybe_open_issue_when_none_exists(self):
"""
Verify that maybe_open_issue will create a new Issue when none like it exists.
"""
(issue, created) = Issue.objects.maybe_open_issue(name='falafel')
self.assertTrue(created)
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(name=issue.name).status)
def test_maybe_open_issue_when_it_is_marked_as_wont_fix(self):
"""
Verify that maybe_open_issue will not create or return an Issue when it exists and
is marked as WONT_FIX.
"""
issue = G(Issue, status=IssueStatus.Wont_fix.value)
self.assertEqual((None, False), Issue.objects.maybe_open_issue(name=issue.name))
self.assertEqual(IssueStatus.Wont_fix.value, Issue.objects.get(pk=issue.pk).status)
self.assertEqual(1, Issue.objects.filter(name=issue.name).count())
def test_maybe_open_issue_returns_already_open_issue(self):
"""
Verify that maybe_open_issue will return a the extant Issue of hte provided name
when it is open.
"""
issue = G(Issue, status=IssueStatus.Open.value)
(issue2, created) = Issue.objects.maybe_open_issue(name=issue.name)
self.assertFalse(created)
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(pk=issue.pk).status)
self.assertEqual(1, Issue.objects.filter(name=issue.name).count())
def maybe_open_issue_when_it_is_marked_as_resolved(self):
"""
Verify that maybe_open_issue will create a new issue when a Resolved one
exists with the same name.
"""
issue = G(Issue, status=IssueStatus.Resolved.value)
(issue2, created) = Issue.objects.maybe_open_issue(name=issue.name)
self.assertTrue(created)
self.assertEqual(IssueStatus.Open.value, Issue.objects.get(pk=issue2.pk).status)
self.assertEqual(2, Issue.objects.get(name=issue2.name))
def test_resolve_open_issue(self):
a = G(Assertion)
issue = G(Issue, name=a.name, status=IssueStatus.Open.value)
a._resolve_open_issue()
self.assertEqual(IssueStatus.Resolved.value, Issue.objects.get(pk=issue.pk).status)
class ModelIssueManagerTests(TestCase):
def test_replace_record_with_content_type(self):
record = N(TestModel)
kwargs = {
'record': record,
}
expected_kwargs = {
'record_id': record.id,
'record_type': ContentType.objects.get_for_model(record),
}
self.assertEqual(
expected_kwargs, ModelIssue.objects._replace_record_with_content_type(kwargs))
def test_replace_record_with_content_type_with_no_record(self):
self.assertEqual({}, ModelIssue.objects._replace_record_with_content_type({}))
def test_reopen_issue(self):
record = G(TestModel)
mi = G(
ModelIssue, record_id=record.id, record_type=ContentType.objects.get_for_model(record),
status=IssueStatus.Resolved.value)
ModelIssue.objects.reopen_issue(name=mi.name, record=mi.record)
self.assertEqual(IssueStatus.Open.value, ModelIssue.objects.get(pk=mi.pk).status)
def test_is_wont_fix(self):
record = G(TestModel)
mi = G(
ModelIssue, record_id=record.id, record_type=ContentType.objects.get_for_model(record),
status=IssueStatus.Wont_fix.value)
self.assertTrue(ModelIssue.objects.is_wont_fix(name=mi.name, record=mi.record))
class IssueTests(TestCase):
def test__str__(self):
i = Issue(name='an-issue', status=IssueStatus.Resolved.value)
self.assertEqual('Issue: an-issue - IssueStatus.Resolved', str(i))
def test__is_open(self):
i = N(Issue, status=IssueStatus.Open.value)
self.assertTrue(i.is_open)
self.assertFalse(i.is_resolved)
self.assertFalse(i.is_wont_fix)
def test__is_resolved(self):
i = N(Issue, status=IssueStatus.Resolved.value)
self.assertTrue(i.is_resolved)
self.assertFalse(i.is_open)
self.assertFalse(i.is_wont_fix)
def test__is_wont_fix(self):
i = N(Issue, status=IssueStatus.Wont_fix.value)
self.assertTrue(i.is_wont_fix)
self.assertFalse(i.is_resolved)
self.assertFalse(i.is_open)
class IssueActionTests(TestCase):
def test__str__(self):
ia = N(IssueAction)
self.assertEqual(
'IssueResponse: {self.issue.name} - {self.responder_action} - '
'{self.success} at {self.execution_time}'.format(self=ia),
str(ia)
)
class ResponderTests(TestCase):
def test__str__(self):
self.assertEqual(
'Responder: error-.*',
str(Responder(watch_pattern='error-.*'))
)
@patch('issue.models.load_function', spec_set=True)
def test_respond(self, load_function):
# Setup the scenario
target_function = 'do'
issue = G(Issue, name='error-42')
responder = G(Responder, issue=issue, watch_pattern='error-\d+')
G(ResponderAction, responder=responder, target_function=target_function, delay_sec=0)
# Run the code
r = responder.respond(issue)
# Verify expectations
self.assertTrue(r)
load_function.assert_called_with(target_function)
@patch('issue.models.load_function', spec_set=True)
def test_respond_ignores_non_
|
watching_pattern(self, load_function):
# Setup the scenario
issue = G(Issue, name='success')
responder = G(Responder, issue=issue, watch_pattern='error-\d+')
G(ResponderAction, responder=responder, target_function='do')
# Run the code
|
r = responder.respond(issue)
# Verify expectations
self.assertFalse(r)
self.assertFalse(load_function.called)
def test__match(self):
r = Responder(watch_pattern='error-.*')
self.assertTrue(r._match('error-42'))
self.assertFalse(r._match('success'))
def test__get_pending_actions_for_issue(self):
# Setup the scenario
now = datetime(2014, 8, 11, 15, 0, 0)
delta = timedelta(minutes=30)
r = G(Responder)
ra = G(ResponderAction, responder=r, delay_sec=delta.total_seconds())
issue = G(Issue, creation_time=now - (delta * 2))
# Run the code
|
jperla/spark-advancers
|
compareResult.py
|
Python
|
bsd-3-clause
| 358
| 0.002793
|
#!/usr/bin/env python
import
|
math
f_truth = open('lr_good_weights.txt', 'r')
f_result = open('ALRresult.log', 'r')
frlines = f_result.readlines()
ftlines = f_truth.readlines()
c = 0
margin = 0.001
fo
|
r i, line in enumerate(ftlines):
if math.fabs(float(line) - float(frlines[i])) > margin :
c = c + 1
print c
f_truth.close()
f_result.close()
|
jokuf/hack-blog
|
users/forms.py
|
Python
|
mit
| 1,762
| 0
|
from django import forms
from .models import Author
class RegisterNewAuthorForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput())
repeated_password = forms.CharField(widget=forms.PasswordInput())
class Meta:
model = Author
fields = ['email', 'first_name', 'last_name']
def __init__(self, *args, **kwargs):
super(RegisterNewAuthorForm, self).__init__(*args, **kwargs)
for visible in self.visible_fields():
visible.field.widget.attrs['class'] = 'form-control'
def clean(self):
cleaned_data = super(RegisterNewAuthorForm, self).clean()
password = cleaned_data.get("password")
repeated_password = cleaned_data.get("repeated_password")
if password == repeated_password:
return cleaned_data
raise forms.ValidationError("Passwords does no
|
t match! ")
class LoginForm(forms.Form):
email = forms.EmailField()
password = forms.CharField(widget=forms.PasswordInput())
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
for visible in self.visible_fie
|
lds():
visible.field.widget.attrs['class'] = 'form-control'
def clean(self):
cleaned_data = super(LoginForm, self).clean()
email = cleaned_data.get("email")
password = cleaned_data.get("password")
try:
author = Author.objects.get(email=email)
if author.check_password(password):
return cleaned_data
raise forms.ValidationError("Wrong password")
except Author.DoesNotExist:
error_msg = "User with email {} does not exist".format(email)
raise forms.ValidationError(error_msg)
|
F483/bikesurf.org
|
apps/page/models.py
|
Python
|
mit
| 1,607
| 0.011201
|
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Fabian Barkhau <fabian.barkhau@gmail.com>
# License: MIT (see LICENSE.TXT file)
from django.db import models
from django.utils.translation import ugettext as _
from django.core.validators import RegexValidator
from sanitizer.models import SanitizedCharField
from config.settings import SANITIZER_ALLOWED_TAGS, SANITIZER_ALLOWED_ATTRIBUTES
from apps.account.models import Account
class Page(models.Model):
team = models.ForeignKey("team.Team", related_name="pages")
link = models.SlugField(unique=True)
name = models.CharField(max_length=1024)
content = SanitizedCharField(
max_length=50000, allowed_tags=SANITIZER_ALLOWED_TAGS,
allowed_attributes=SANITIZER_ALLOWED_ATTRIBUTES, strip=False
)
order = models.IntegerField(blank=True, null=True)
# meta
created_by = models.ForeignKey(Account, related_name="pages_created")
created_on = models.DateTimeField(auto_now_add=True)
updated_by = models.ForeignKey(Account, related_name="pages_updated")
updated_on = models.DateTimeField(auto_now=True)
def __unicode__(self):
return u"%s: %s" % (self.t
|
eam.name, self.name)
class Meta:
unique_together = (("team", "name"), ("team", "link"))
order
|
ing = ["order", "name"]
|
bheinzerling/bpemb
|
bpemb/__init__.py
|
Python
|
mit
| 25
| 0
|
from .bpemb impo
|
rt BPEmb
| |
sencha/chromium-spacewalk
|
tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/fallback_viaheader.py
|
Python
|
bsd-3-clause
| 701
| 0.00428
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class FallbackViaHeaderPage(page_module.Page):
def __init__(self, url, page_set):
super(FallbackViaHeaderPage, self).__init__(url=url, page_set=page_set)
class FallbackViaHeaderPageSet(page_set_module.PageSet):
""" Chrome proxy test
|
sites """
def __init__(self):
super(FallbackViaHeaderPageSet, self).__init__()
self.AddPage(FallbackViaHeaderPage(
'http://chromeproxy-test.appsp
|
ot.com/default', self))
|
awakenting/gif_fitting
|
bbp_analysis/bluebrain_data_io.py
|
Python
|
gpl-3.0
| 9,533
| 0.004196
|
# coding: utf-8
import os
import urllib
import numpy as np
import pickle
from Experiment import Experiment
ROOT_PATH = './full_dataset/article_4_data/grouped_ephys'
ZIPFILE_PATH = './full_dataset/article_4_data'
EXPM_PATH = './results/experiments/'
URL = 'http://microcircuits.epfl.ch/data/released_data/'
if not os.path.exists(EXPM_PATH):
os.makedirs(EXPM_PATH)
if not os.path.exists(ROOT_PATH):
print('It seems that the directory of the raw data does not exist. It is expected to be at: ' + ROOT_PATH)
if not os.path.exists(ROOT_PATH):
print('It seems that the directory with the zip files does not exist. It is expected to be at: ' + ZIPFILE_PATH)
# ==============================================================================
# General io function
# ==============================================================================
def download_info_from_url(url):
"""
Download content fr
|
om url and return it.
"""
r = urllib.request.urlopen(url)
data = r.read()
data = d
|
ata.decode(encoding='UTF-8')
return data
def get_genetic_cell_infos(filepath):
"""
Downloads genetic information from cells in the directory at filepath.
"""
filelist = os.listdir(filepath)
raw_names = [name[0:-4] for name in filelist]
cell_names = []
for name in raw_names:
# if name.rfind('ET') == -1:
cell_names.append(name)
infos = {}
for cell in cell_names:
url_complete = URL + cell + '.txt'
try:
infos[cell] = download_info_from_url(url_complete)
except Exception:
next
return infos
def save_filtered_cell_infos(filtername, criterion1='SOM:1', criterion2='PV:0', criterion3='VIP:0'):
"""
Gets genetic information from all cells in ZIPFILE_PATH directory, filters them by the given
criterions and saves the filtered list with pickle.
"""
infos = get_genetic_cell_infos(ZIPFILE_PATH)
desired_cells = {}
for cell in infos.keys():
if criterion1 in infos[cell] and criterion2 in infos[cell] and criterion3 in infos[cell]:
desired_cells[cell] = infos[cell]
with open(filtername + '_infos.pkl', 'wb') as f:
pickle.dump(desired_cells, f)
def save_all_cell_infos(filepath):
"""
Saves genetic information from all cells in ZIPFILE_PATH directory in one list with pickle.
"""
infos = get_genetic_cell_infos(filepath)
with open('cell_infos_full.pkl', 'wb') as f:
pickle.dump(infos, f)
def open_filtered_cell_info_list(filtername):
"""
Opens the list that was saved with save_filtered_cell_infos with the given filtername.
"""
with open(filtername + '_infos.pkl', 'rb') as f:
filtered_list = pickle.load(f)
return filtered_list
def create_experiments_from_list(cells, cell_type, verbose=True):
"""
Creates Experiment objects for cells in cells, adds all existing traces and saves them.
Params:
- cells: List with cell names or dictionairy where the keys are the cell names.
"""
if type(cells) is dict:
cell_names = list(cells.keys())
else:
cell_names = cells
ncells = len(cell_names)
for i in range(ncells):
PATH = os.path.join(ROOT_PATH, cell_names[i])
animal_files = sorted(os.listdir(PATH))
ntraces = int(len(animal_files) / 2)
current_exp = Experiment('Cell_' + cell_names[i] + '_single_traces', cell_type=cell_type)
exp_merged_traces = Experiment('Cell_' + cell_names[i] + '_merged_idrest_traces', cell_type=cell_type)
nincluded_idrest_traces = 0
for j in np.arange(ntraces):
# files end with 'recordingType_recordingNumber.ibw'
file_split = str.split(animal_files[j][0:-4], '_')
file_identifier = file_split[-2] + '_' + file_split[-1] + '.ibw'
current_recording_type = file_split[-2]
# find indeces of matching files in folder (current file always comes first because it's always Ch0)
file_idc = [i for i, elem in enumerate(animal_files) if file_identifier in elem]
current_file = animal_files[file_idc[0]]
voltage_file = animal_files[file_idc[1]]
current_exp.add_trainingset_trace(os.path.join(PATH, voltage_file), 10 ** -3,
os.path.join(PATH, current_file), 10 ** -12, FILETYPE='Igor',
verbose=verbose)
tr = current_exp.trainingset_traces[j]
tr.recording_type = current_recording_type
tr.estimate_input_amp()
if current_recording_type == 'IDRest':
exp_merged_traces.add_trainingset_trace(os.path.join(PATH, voltage_file), 10 ** -3,
os.path.join(PATH, current_file), 10 ** -12, FILETYPE='Igor',
verbose=verbose)
tr = current_exp.trainingset_traces[nincluded_idrest_traces]
tr.recording_type = current_recording_type
tr.estimate_input_amp()
nincluded_idrest_traces += 1
if not len(exp_merged_traces.trainingset_traces) < 3:
exp_merged_traces.mergeTrainingTraces()
exp_merged_traces.save(os.path.join(EXPM_PATH), verbose=verbose)
current_exp.save(os.path.join(EXPM_PATH), verbose=verbose)
def load_merged_traces_experiments_from_list(cells, verbose=True):
"""
Load experiments where IDRest traces have been merged.
This function will try to load an experiment with merged IDRest traces for all cells
in the list and just skip the ones for which it is not found. If no experiments were
found, None is returned.
Params:
- cells: List with cell names or dictionairy where the keys are the cell names.
See also:
load_single_traces_experiments_from_list()
"""
if type(cells) is dict:
cell_names = list(cells.keys())
else:
cell_names = cells
expms = []
for i in range(len(cell_names)):
current_expm_name = 'Experiment_Cell_' + cell_names[i] + '_merged_idrest_traces.pkl'
current_expm_path = os.path.join(EXPM_PATH, current_expm_name)
try:
current_expm = Experiment.load(current_expm_path, verbose=verbose)
expms.append(current_expm)
except:
pass
if not len(expms) == 0:
return expms
else:
return None
def load_single_traces_experiments_from_list(cells, verbose=True):
"""
Load experiments where traces have been added separately.
Params:
- cells: List with cell names or dictionairy where the keys are the cell names.
See also:
load_merged_traces_experiments_from_list()
"""
if type(cells) is dict:
cell_names = list(cells.keys())
else:
cell_names = cells
expms = []
for i in range(len(cell_names)):
current_expm_name = 'Experiment_Cell_' + cell_names[i] + '_single_traces.pkl'
current_expm_path = os.path.join(EXPM_PATH, current_expm_name)
try:
current_expm = Experiment.load(current_expm_path, verbose=verbose)
expms.append(current_expm)
except:
pass
if not len(expms) == 0:
return expms
else:
return None
# ==============================================================================
# From here on it's interneuron-specific functions
# ==============================================================================
def create_interneuron_specific_experiments(verbose=True):
"""
Filters cell infos for SOM, PV and VIP neurons, loads them and creates
Experiment objects.
"""
# create and save filtered info lists for SOM, PV and VIP neurons
save_filtered_cell_infos('som_cells', criterion1='SOM:1', criterion2='PV:0', criterion3='VIP:0')
save_filtered_cell_infos('pv_cells', criterion1='SOM:0', criterion2='PV:1', criterion3='VIP:0')
save_filtered_cell_infos('vip_cells', criterion1='SOM:0', criterion2='PV:0', criteri
|
jundongl/scikit-feast
|
skfeature/example/test_CMIM.py
|
Python
|
gpl-2.0
| 1,485
| 0.003367
|
import scipy.io
from sklearn.metrics import accuracy_score
from sklearn import cross_validation
from sklearn import svm
from skfeature.function.information_theoretical_based import CMIM
def main():
# load data
mat = scipy.io.loadmat('../data/colon.mat')
X = mat['X'] # data
X = X.astype(float)
y = mat['Y'] # label
y = y[:, 0]
n_samples, n_features = X.shape # number of samples and number of features
# split data into 10 folds
ss = cross_validation.KFold(n_samples, n_folds=10, shuffle=True)
# perform evaluation on cl
|
assification task
num_fea = 10 # number of selected features
clf = svm.LinearSVC() # linear SVM
correct = 0
for train, test in ss:
# obtain the index of each feature on the training set
idx,_,_ = CMIM.cmim(X[train], y[train], n_selected_features=num_fea)
# obtain the dataset on the selected features
features = X[:, idx[0:num_fea]]
# train a classification model with the selected features on the training dataset
|
clf.fit(features[train], y[train])
# predict the class labels of test data
y_predict = clf.predict(features[test])
# obtain the classification accuracy on the test data
acc = accuracy_score(y[test], y_predict)
correct = correct + acc
# output the average classification accuracy over all 10 folds
print 'Accuracy:', float(correct)/10
if __name__ == '__main__':
main()
|
ecolitan/fatics
|
venv/lib/python2.7/site-packages/MySQLdb/cursors.py
|
Python
|
agpl-3.0
| 17,253
| 0.00284
|
"""MySQLdb Cursors
This module implements Cursors of various types for MySQLdb. By
default, MySQLdb uses the Cursor class.
"""
import re
import sys
try:
from types import ListType, TupleType, UnicodeType
except ImportError:
# Python 3
ListType = list
TupleType = tuple
UnicodeType = str
restr = r"""
\s
values
\s*
(
\(
[^()']*
(?:
(?:
(?:\(
# ( - editor hightlighting helper
[^)]*
\))
|
'
[^\\']*
(?:\\.[^\\']*)*
'
)
[^()']*
)*
\)
)
"""
insert_values = re.compile(restr, re.S | re.I | re.X)
from _mysql_exceptions import Warning, Error, InterfaceError, DataError, \
DatabaseError, OperationalError, IntegrityError, InternalError, \
NotSupportedError, ProgrammingError
class BaseCursor(object):
"""A base for Cursor classes. Useful attributes:
description
A tuple of DB API 7-tuples describing the columns in
the last executed query; see PEP-249 for details.
description_flags
Tuple of column flags for last query, one entry per column
in the result set. Values correspond to those in
MySQLdb.constants.FLAG. See MySQL documentation (C API)
for more information. Non-standard extension.
arraysize
default number of rows fetchmany() will fetch
"""
from _mysql_exceptions import MySQLError, Warning, Error, InterfaceError, \
DatabaseError, DataError, OperationalError, IntegrityError, \
InternalError, ProgrammingError, NotSupportedError
_defer_warnings = False
def __init__(self, connection):
from weakref import proxy
self.connection = proxy(connection)
self.description = None
self.description_flags = None
self.rowcount = -1
self.arraysize = 1
self._executed = None
self.lastrowid = None
self.messages = []
self.errorhandler = connection.errorhandler
self._result = None
self._warnings = 0
self._info = None
self.rownumber = None
def __del__(self):
self.close()
self.errorhandler = None
self._result = None
def close(self):
"""Close the cursor. No further queries will be possible."""
if not self.connection: return
while self.nextset(): pass
self.connection = None
def _check_executed(self):
if not self._executed:
self.errorhandler(self, ProgrammingError, "execute() first")
def _warning_check(self):
from warnings import warn
if self._warnings:
warnings = self._get_db().show_warnings()
if warnings:
# This is done in two loops in case
# Warnings are set to raise exceptions.
for w in warnings:
self.messages.append((self.Warning, w))
for w in warnings:
warn(w[-1], self.Warning, 3)
elif self._inf
|
o:
self.messages.append((self.Warning, self._info))
|
warn(self._info, self.Warning, 3)
def nextset(self):
"""Advance to the next result set.
Returns None if there are no more result sets.
"""
if self._executed:
self.fetchall()
del self.messages[:]
db = self._get_db()
nr = db.next_result()
if nr == -1:
return None
self._do_get_result()
self._post_get_result()
self._warning_check()
return 1
def _post_get_result(self): pass
def _do_get_result(self):
db = self._get_db()
self._result = self._get_result()
self.rowcount = db.affected_rows()
self.rownumber = 0
self.description = self._result and self._result.describe() or None
self.description_flags = self._result and self._result.field_flags() or None
self.lastrowid = db.insert_id()
self._warnings = db.warning_count()
self._info = db.info()
def setinputsizes(self, *args):
"""Does nothing, required by DB API."""
def setoutputsizes(self, *args):
"""Does nothing, required by DB API."""
def _get_db(self):
if not self.connection:
self.errorhandler(self, ProgrammingError, "cursor closed")
return self.connection
def execute(self, query, args=None):
"""Execute a query.
query -- string, query to execute on server
args -- optional sequence or mapping, parameters to use with query.
Note: If args is a sequence, then %s must be used as the
parameter placeholder in the query. If a mapping is used,
%(key)s must be used as the placeholder.
Returns long integer rows affected, if any
"""
del self.messages[:]
db = self._get_db()
if isinstance(query, unicode):
query = query.encode(db.unicode_literal.charset)
if args is not None:
query = query % db.literal(args)
try:
r = None
r = self._query(query)
except TypeError, m:
if m.args[0] in ("not enough arguments for format string",
"not all arguments converted"):
self.messages.append((ProgrammingError, m.args[0]))
self.errorhandler(self, ProgrammingError, m.args[0])
else:
self.messages.append((TypeError, m))
self.errorhandler(self, TypeError, m)
except (SystemExit, KeyboardInterrupt):
raise
except:
exc, value, tb = sys.exc_info()
del tb
self.messages.append((exc, value))
self.errorhandler(self, exc, value)
self._executed = query
if not self._defer_warnings: self._warning_check()
return r
def executemany(self, query, args):
"""Execute a multi-row query.
query -- string, query to execute on server
args
Sequence of sequences or mappings, parameters to use with
query.
Returns long integer rows affected, if any.
This method improves performance on multiple-row INSERT and
REPLACE. Otherwise it is equivalent to looping over args with
execute().
"""
del self.messages[:]
db = self._get_db()
if not args: return
if isinstance(query, unicode):
query = query.encode(db.unicode_literal.charset)
m = insert_values.search(query)
if not m:
r = 0
for a in args:
r = r + self.execute(query, a)
return r
p = m.start(1)
e = m.end(1)
qv = m.group(1)
try:
q = [ qv % db.literal(a) for a in args ]
except TypeError, msg:
if msg.args[0] in ("not enough arguments for format string",
"not all arguments converted"):
self.errorhandler(self, ProgrammingError, msg.args[0])
else:
self.errorhandler(self, TypeError, msg)
except (SystemExit, KeyboardInterrupt):
raise
except:
exc, value, tb = sys.exc_info()
del tb
self.errorhandler(self, exc, value)
r = self._query('\n'.join([query[:p], ',\n'.join(q), query[e:]]))
if not self._defer_warnings: self._warning_check()
return r
def callproc(self, procname, args=()):
"""Execute stored procedure procname with args
procname -- string, name of procedure to execute on server
args -- Sequence of parameters to use with procedure
Returns the original args.
Compatibility warning: PEP-249 specifies that a
|
dimchenkoAlexey/python_training
|
fixture/contact.py
|
Python
|
apache-2.0
| 3,347
| 0.001793
|
__author__ = 'admin'
class ContactHelper:
def __init__(self, app):
self.app = app
def create(self, contact):
self.fill_contact_fields(contact)
def modify(self, contact):
# modify contact
self.click_edit_button()
self.fill_contact_fields(contact)
def set_field_value(self, field_name, value):
if value is not None:
wd = self.app.wd
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(value)
def select_list_item(self, list_id, value):
if value is not None:
wd = self.app.wd
xpath = "//div[@id='content']/form/select[" + list_id + "]//option[" + value + "]"
if not wd.find_element_by_xpath(xpath).is_selected():
wd.find_element_by_xpath(xpath).click()
def fill_contact_fields(self, contact):
wd = self.app.wd
wd.find_element_by_link_text("add new").click()
self.set_field_value("firstname", contact.firstname)
self.set_field_value("middlename", contact.middlename)
self.set_field_value("lastname", contact.lastname)
self.set_field_value("nickname", contact.nickname)
self.set_field_value("title", contact.title)
self.set_field_value("company", contact.company)
self.set_field_value("address", contact.address)
self.set_field_value("home", contact.phone_home)
self.set_field_value("mobile", contact.phone_mobile)
self.set_field_value("work", contact.phone_work)
self.set_field_value("fax", contact.fax)
self.set_field_value("email", contact.email_first)
self.set_field_value("email2", contact.email_second)
self.set_field_value("email3", contact.email_third)
self.set_field_value("homepage", contact.homepage)
self.set_field_value("homepage", contact.homepage)
self.select_list_item("1", contact.birth_day_list_item)
self.select_list_item("2", contact.birth_month_list_item)
self.set_field_value("byear", contact.birth_year)
self.select_list_item("3", contact.anniversary_day_list_item)
self.select_list_item("4", contact.anniversary_month_list_item)
self.set_field_value
|
("ayear", contact.anniversary_year)
self.set_field_value("address2", contact.second_address)
self.set_field_value("phone2", contact.second_phone)
self.set_field_value("notes", contact.notes)
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
def delete_first_contact(self):
wd = self.app.wd
wd.find_element_by_name("se
|
lected[]").click()
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
def delete_all_contacts(self):
wd = self.app.wd
mass_checkbox = wd.find_element_by_id("MassCB")
if not mass_checkbox.is_selected():
mass_checkbox.click()
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
def click_edit_button(self):
wd = self.app.wd
wd.find_element_by_css_selector("img[alt=\"Edit\"]").click()
def count(self):
wd = self.app.wd
return len(wd.find_elements_by_name("selected[]"))
|
wood-galaxy/FreeCAD
|
src/Mod/Plot/Plot.py
|
Python
|
lgpl-2.1
| 13,577
| 0.002652
|
#***************************************************************************
#* *
#* Copyright (c) 2011, 2012 *
#* Jose Luis Cercos Pita <jlcercos@gmail.com> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
import FreeCAD
import PySide
from PySide import QtCore, QtGui
from distutils.version import LooseVersion as V
try:
import matplotlib
matplotlib.use('Qt4Agg')
matplotlib.rcParams['backend.qt4']='PySide'
import matplotlib.pyplot as plt
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
if V(matplotlib.__version__) < V("1.4.0"):
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QTAgg as NavigationToolbar
else:
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
except ImportError:
msg = PySide.QtGui.QApplication.translate(
"plot_console",
"matplotlib not found, so Plot module can not be loaded",
None,
PySide.QtGui.QApplication.UnicodeUTF8)
FreeCAD.Console.PrintMessage(msg + '\n')
raise ImportError("matplotlib not installed")
def getMainWindow():
""" Return the FreeCAD main window. """
toplevel = PySide.QtGui.QApplication.topLevelWidgets()
for i in toplevel:
if i.metaObject().className() == "Gui::MainWindow":
return i
return None
def getMdiArea():
""" Return FreeCAD MdiArea. """
mw = getMainWindow()
if not mw:
return None
childs = mw.children()
for c in childs:
if isinstance(c, PySide.QtGui.QMdiArea):
return c
return None
def getPlot():
""" Return the selected Plot document if exist. """
# Get active tab
mdi = getMdiArea()
if not mdi:
return None
sub = mdi.activeSubWindow()
if not sub:
return None
# Explore childrens looking for Plot class
for i in sub.children():
if i.metaObject().className() == "Plot":
return i
return None
def closePlot():
""" closePlot(): Close the active plot window. """
# Get active tab
mdi = getMdiArea()
if not mdi:
return None
sub = mdi.activeSubWindow()
if not sub:
return None
# Explore childrens looking for Plot class
for i in sub.children():
if i.metaObject().className() == "Plot":
sub.close()
def figure(winTitle="plot"):
"""Create a new plot subwindow/tab.
Keyword arguments:
winTitle -- Plot tab title.
"""
mdi = getMdiArea()
if not mdi:
return None
win = Plot(winTitle)
sub = mdi.addSubWindow(win)
sub.show()
return win
def plot(x, y, name=None):
"""Plots a new serie (as line plot)
Keyword arguments:
x -- X values
y -- Y values
name -- Data serie name (for legend).
"""
# Get active plot, or create another one if don't exist
plt = getPlot()
if not plt:
plt = figure()
# Call to plot
return plt.plot(x, y,
|
name)
def series():
"""Return all the lines from a selected p
|
lot."""
plt = getPlot()
if not plt:
return []
return plt.series
def removeSerie(index):
"""Remove a data serie from the active plot.
Keyword arguments:
index -- Index of the serie to remove.
"""
# Get active series
plt = getPlot()
if not plt:
return
plots = plt.series
if not plots:
return
# Remove line from plot
axes = plots[index].axes
axes.lines.pop(plots[index].lid)
# Remove serie from list
del plt.series[index]
# Update GUI
plt.update()
def legend(status=True, pos=None, fontsize=None):
"""Show/Hide the legend from the active plot.
Keyword arguments:
status -- True if legend must be shown, False otherwise.
pos -- Legend position.
fontsize -- Font size
"""
plt = getPlot()
if not plt:
return
plt.legend = status
if fontsize:
plt.legSiz = fontsize
# Hide all legends
for axes in plt.axesList:
axes.legend_ = None
# Legend must be activated on last axes
axes = plt.axesList[-1]
if status:
# Setup legend handles and names
lines = series()
handles = []
names = []
for l in lines:
if l.name is not None:
handles.append(l.line)
names.append(l.name)
# Show the legend (at selected position or at best)
if pos:
l = axes.legend(handles, names, bbox_to_anchor=pos)
plt.legPos = pos
else:
l = axes.legend(handles, names, loc='best')
# Update canvas in order to compute legend data
plt.canvas.draw()
# Get resultant position
try:
fax = axes.get_frame().get_extents()
except:
fax = axes.patch.get_extents()
fl = l.get_frame()
plt.legPos = (
(fl._x + fl._width - fax.x0) / fax.width,
(fl._y + fl._height - fax.y0) / fax.height)
# Set fontsize
for t in l.get_texts():
t.set_fontsize(plt.legSiz)
plt.update()
def grid(status=True):
"""Show/Hide the grid from the active plot.
Keyword arguments:
status -- True if grid must be shown, False otherwise.
"""
plt = getPlot()
if not plt:
return
plt.grid = status
axes = plt.axes
axes.grid(status)
plt.update()
def title(string):
"""Setup the plot title.
Keyword arguments:
string -- Plot title.
"""
plt = getPlot()
if not plt:
return
axes = plt.axes
axes.set_title(string)
plt.update()
def xlabel(string):
"""Setup the x label.
Keyword arguments:
string -- Title to set.
"""
plt = getPlot()
if not plt:
return
axes = plt.axes
axes.set_xlabel(string)
plt.update()
def ylabel(string):
"""Setup the y label.
Keyword arguments:
string -- Title to set.
"""
plt = getPlot()
if not plt:
return
axes = plt.axes
axes.set_ylabel(string)
plt.update()
def axesList():
"""Return the plot axes sets list. """
plt = getPlot()
if not plt:
return []
return plt.axesList
def axes():
"""Return the active plot axes."""
plt = getPlot()
if not plt:
return None
return plt.axes
def addNewAxes(rect=None, frameon=True, patchcolor='none'):
"""Add new axes to plot, setting it as the active one.
Keyword arguments:
rect -- Axes area, None to copy from the last axes data.
frameon -- True to show frame, Fals
|
chrislee35/pypassivedns
|
pypassivedns/pypassivedns.py
|
Python
|
bsd-3-clause
| 1,538
| 0.006502
|
# -*- coding: utf-8 -*-
import threading
import datetime
class PDNSResult:
def __init__(self, source, response_time, query, answer, rrtype, ttl, firstseen, lastseen, count):
self.source = source
self.response_time = response_time
self.query = query
self.answer = answer
self.rrtype = rrtype
self.ttl = ttl
self.firstseen = firstseen
self.lastseen = lastseen
self.count = count
def __str__(self):
return "%s,%0.2f,%s,%s,%s,%d,%s,%s,%d" % (
self.source,
self.response_time,
self.query,
self.answer,
self.rrtype or 'A',
self.ttl or 0,
PDNSResult.datetime_utc_str(self.firstseen),
PDNSResult.datetime_utc_str(self.lastseen),
self.count or 0
)
@staticmethod
def datetime_utc_str(dt):
if dt:
return datetime.datetime.utcfromtimestamp(dt.timestamp())
return ''
class Client:
def __init__(self, providers):
self.providers = providers
def query(self, item, limit=None):
threads = []
for provider in self.provid
|
ers:
t = threading.Thread(target=_query, args=(provider, item, limit))
threads.append(t)
t.start()
|
res = []
for thread in threads:
res.append(thread.join())
def _query(provider, item, limit=None):
res = provider.query(item, limit)
return res
|
pythonistas-tw/academy
|
web-api/maomao/usecase.py
|
Python
|
gpl-2.0
| 313
| 0
|
from models import DBUse
test1 = DBUse()
test1.f_create(account="maomao
|
@gmail.com", password="123321")
test1.f_create(account="hahaha@gmail.com", password="abcd")
user = test1.f_read(uid=2)
test1.f_update(user, account="amy111@gmail.com")
ul = test1.f_read
|
()
for i in ul:
test1.f_delete(i)
test1.f_read()
|
unlessbamboo/django
|
unusebamboo/settings.py
|
Python
|
gpl-3.0
| 3,094
| 0
|
"""
Django settings for un
|
usebamboo project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.
|
com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '29e3ohmh+=s^udbct$yg^2p+9n+fpdpp%+9-^or(g+o%@2oosm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'accounts',
'polls',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'unusebamboo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'unusebamboo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'idjango',
'USER': 'root',
'PASSWORD': 'root',
'HOST': '127.0.0.1',
'PORT': '3306',
'OPTIONS': {'charset': 'utf8mb4'},
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = 'static'
# EMAIL
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.163.com'
EMAIL_HOST_USER = 'ilifediary2@163.com'
EMAIL_HOST_PASSWORD = 'a297413'
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = 'ilifediary2@163.com'
|
HIPS/Kayak
|
kayak/regularizers.py
|
Python
|
mit
| 2,109
| 0.006164
|
# Authors: Harvard Intelligent Probabilistic Systems (HIPS) Group
# http://hips.seas.harvard.edu
# Ryan Adams, David Duvenaud, Scott Linderman,
# Dougal Maclaurin, Jasper Snoek, and others
# Copyright 2014, The President and Fellows of Harvard University
# Distributed under an MIT license. See license.txt file.
import numpy as np
from . import Differentiable
class Regularizer(Differentiable):
__slots__ = ['X', 'weight']
def __init__(self, X, weight):
super(Regularizer, self).__init__([X])
self.X = X
self.weight = weight
class L2Norm(Regularizer):
__slots__ = []
def __init__(self, X, weight=1.0):
super(L2Norm, self).__init__(X, weight)
def _compute_value(self):
return self.weight * np.
|
sum(self.X.value**2)
def _local_grad(self, parent, d_out_d_self):
return self.weight * 2.0 * sel
|
f.X.value * d_out_d_self
class L1Norm(Regularizer):
__slots__ = []
def __init__(self, X, weight=1.0):
super(L1Norm, self).__init__(X, weight)
def _compute_value(self):
return self.weight * np.sum(np.abs(self.X.value))
def _local_grad(self, parent, d_out_d_self):
return self.weight * np.sign(self.X.value) * d_out_d_self
class Horseshoe(Regularizer):
__slots__ = []
def __init__(self, X, weight=1.0):
super(Horseshoe, self).__init__(X, weight)
def _compute_value(self):
return -self.weight * np.sum(np.log(np.log(1.0 + self.X.value**(-2))))
def _local_grad(self, parent, d_out_d_self):
return -(self.weight * d_out_d_self * (1 / (np.log(1.0 + self.X.value**(-2))))
* (1.0/(1 + self.X.value**(-2))) * (-2*self.X.value**(-3)))
class NExp(Regularizer):
__slots__ = []
def __init__(self, X, weight=1.0):
super(NExp, self).__init__(X, weight)
def _compute_value(self):
return self.weight * np.sum(1.0 - np.exp(-np.abs(self.X.value)))
def _local_grad(self, parent, d_out_d_self):
return self.weight * d_out_d_self * np.exp(-np.abs(self.X.value)) * np.sign(self.X.value)
|
sighill/shade_app
|
primus/migrations/0007_auto_20160418_0959.py
|
Python
|
mit
| 457
| 0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-18 07:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('primus', '0006_auto_20160418_0959'),
]
o
|
perations = [
migrations.AlterField(
model_name='building',
|
name='modified',
field=models.DateTimeField(auto_now=True),
),
]
|
pombredanne/numba
|
numba/hsa/hsaimpl.py
|
Python
|
bsd-2-clause
| 10,106
| 0.000396
|
from __future__ import print_function, absolute_import, division
import operator
from functools import reduce
from llvmlite.llvmpy.core import Type
import llvmlite.llvmpy.core as lc
import llvmlite.binding as ll
from numba.targets.imputils import implement, Registry
from numba import cgutils
from numba import types
from numba.itanium_mangler import mangle_c, mangle, mangle_type
from . import target
from . import stubs
from . import hlc
from . import enums
registry = Registry()
register = registry.register
_void_value = lc.Constant.null(lc.Type.pointer(lc.Type.int(8)))
# -----------------------------------------------------------------------------
def _declare_function(context, builder, name, sig, cargs,
mangler=mangle_c):
"""Insert declaration for a opencl builtin function.
Uses the Itanium mangler.
Args
----
context: target context
builder: llvm builder
name: str
symbol name
sig: signature
function signature of the symbol being declared
cargs: sequence of str
C type names for the arguments
mangler: a mangler function
function to use to mangle the symbol
"""
mod = builder.module
if sig.return_type == types.void:
llretty = lc.Type.void()
else:
llretty = context.get_value_type(sig.return_type)
llargs = [context.get_value_type(t) for t in sig.args]
fnty = Type.function(llretty, llargs)
mangled = mangler(name, cargs)
fn = mod.get_or_insert_function(fnty, mangled)
fn.calling_convention = target.CC_SPIR_FUNC
return fn
@register
@implement(stubs.get_global_id, types.uint32)
def get_global_id_impl(context, builder, sig, args):
[dim] = args
get_global_id = _declare_function(context, builder, 'get_global_id', sig,
['unsigned int'])
res = builder.call(get_global_id, [dim])
return context.cast(builder, res, types.uintp, types.intp)
@register
@implement(stubs.get_local_id, types.uint32)
def get_local_id_impl(context, builder, sig, args):
[dim] = args
get_local_id = _declare_function(context, builder, 'get_local_id', sig,
['unsigned int'])
res = builder.call(get_local_id, [dim])
return context.cast(builder, res, types.uintp, types.intp)
@register
@implement(stubs.get_group_id, types.uint32)
def get_group_id_impl(context, builder, sig, args):
[dim] = args
get_group_id = _declare_function(context, builder, 'get_group_id', sig,
['unsigned int'])
res = builder.call(get_group_id, [dim])
return context.cast(builder, res, types.uintp, types.intp)
@register
@implement(stubs.get_num_groups, types.uint32)
def get_num_groups_impl(context, builder, sig, args):
[dim] = args
get_num_groups = _declare_function(context, builder, 'get_num_groups', sig,
['unsigned int'])
res = builder.call(get_num_groups, [dim])
return context.cast(builder, res, types.uintp, types.intp)
@register
@implement(stubs.get_work_dim)
def get_work_dim_impl(context, builder, sig, args):
get_work_dim = _declare_function(context, builder, 'get_work_dim', sig,
["void"])
res = builder.call(get_work_dim, [])
return res
@register
@implement(stubs.get_global_size, types.uint32)
def get_global_size_impl(context, builder, sig, args):
[dim] = args
get_global_size = _declare_function(context, builder, 'get_global_size',
sig, ['unsigned int'])
res = builder.call(get_global_size, [dim])
return context.cast(builder, res, types.uintp, types.intp)
@register
@implement(stubs.get_local_size, types.uint32)
def get_local_size_impl(context, builder, sig, args):
[dim] = args
get_local_size = _declare_function(context, builder, 'get_local_size',
sig, ['unsigned int'])
res = builder.call(get_local_size, [dim])
return context.cast(builder, res, types.uintp, types.intp)
@register
@implement(stubs.barrier, types.uint32)
def barrier_one_arg_impl(context, builder, sig, args):
[flags] = args
barrier = _declare_function(context, builder, 'barrier', sig,
['unsigned int'])
builder.call(barrier, [flags])
return _void_value
@register
@implement(stubs.barrier)
def barrier_no_arg_impl(context, builder, sig, args):
assert not args
sig = types.void(types.uint32)
barrier = _declare_function(context, builder, 'barrier', sig,
['unsigned int'])
flags = context.get_constant(types.uint32, enums.CLK_GLOBAL_MEM_FENCE)
builder.call(barrier, [flags])
return _void_value
@register
@implement(stubs.mem_fence, types.uint32)
def mem_fence_impl(context, builder, sig, args):
[flags] = args
mem_fence = _declare_function(context, builder, 'mem_fence', sig,
['unsigned int'])
builder.call(mem_fence, [flags])
return _void_value
@register
@implement(stubs.wavebarrier)
def wavebarrier_impl(context, builder, sig, args):
assert not args
fnty = Type.function(Type.void(), [])
fn = builder.module.get_or_insert_function(fnty, name="__hsail_wavebarrier")
fn.calling_convention = target.CC_SPIR_FUNC
builder.call(fn, [])
return _void_value
@register
@implement(stubs.activelanepermute_wavewidth,
types.Any, types.uint32, types.Any, types.bool_)
def activelanepermute_wavewidth_impl(context, builder, sig, args):
[src, laneid, identity, use_ident] = args
assert sig.args[0] == sig.args[2]
elem_type = sig.args[0]
bitwidth = elem_type.bitwidth
intbitwidth = Type.int(bitwidth)
i32 = Type.int(32)
i1 = Type.int(1)
name = "__hsail_activelanepermute_wavewidth_b{0}".format(bitwidth)
fnty = Type.function(intbitwidth, [intbitwidth, i32, intbitwidth, i1])
fn = builder.module.get_or_insert_function(fnty, name=name)
fn.calling_convention = target.CC_SPIR_FUNC
def cast(val):
return builder.bitcast(val, intbitwidth)
result = builder.call(fn, [cast(src), laneid, cast(identity), use_ident])
return builder.bitcast(result, context.get_value_type(elem_type))
@register
@implement(stubs.atomic.add, types.Kind(types.Array), types.intp, types.Any)
@implement(stubs.atomic.add, types.Kind(types.Array),
types.Kind(types.UniTuple), types.Any)
@implement(stubs.atomic.add, types.Kind(types.Array), types.Kind(types.Tuple),
types.Any)
def hsail_atomic_add_tuple(context, builder, sig, args):
aryty, indty, valty = sig.args
ary, inds, val = args
dtype = aryty.dtype
if indty == types.intp:
indices = [inds] # just a single integer
indty = [indty]
else:
indices = cgutils.unpack_tuple(builder, inds, count=len(indty))
indices = [context.cast(builder, i, t, types.intp)
for t, i in zip(indty, indices)]
if dtype != valty:
raise TypeError("expecting %s but got %s" % (dtype, valty))
if aryty.ndim != len(indty):
raise TypeError("indexing %d-D array with %d-D index" %
(aryty.ndim, len(indty)))
lary = context.make_array(aryty)(context, builder, ary)
ptr = cgutils.get_item_pointer(builder, aryty, lary, indices)
return builder.atomic_rmw("add", ptr, val, ordering='monotonic')
@register
@implement('hsail.smem.alloc', types.Kind(types.Uni
|
Tuple), types.Any)
def hsail_smem_alloc_array(context, builder, sig, args):
shape, dtype = args
return _generic_array(context, builder, shape=shape, dtype=dtype,
symb
|
ol_name='_hsapy_smem',
addrspace=target.SPIR_LOCAL_ADDRSPACE)
def _generic_array(context, builder, shape, dtype, symbol_name, addrspace):
elemcount = reduce(operator.mul, shape)
lldtype = context.get_data_type(dtype)
laryty = Type.array(lldtype, elemcount)
if addrspace == target.SPIR_LOCAL_ADDRSPACE:
lmod = builder.module
# Create global variable in the reques
|
andir/ganeti
|
test/py/cmdlib/cluster_unittest.py
|
Python
|
bsd-2-clause
| 96,615
| 0.005248
|
#!/usr/bin/python
#
# Copyright (C) 2008, 2011, 2012, 2013 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for LUCluster*
"""
import OpenSSL
import copy
import unittest
import re
import shutil
import os
from ganeti.cmdlib import cluster
from ganeti.cmdlib.cluster import verify
from ganeti import constants
from ganeti import errors
from ganeti import netutils
from ganeti import objects
from ganeti import opcodes
from ganeti import utils
from ganeti import pathutils
from ganeti import query
from ganeti.hypervisor import hv_xen
from testsupport import *
import testutils
class TestClusterVerifySsh(unittest.TestCase):
def testMultipleGroups(self):
fn = verify.LUClusterVerifyGroup._SelectSshCheckNodes
mygroupnodes = [
objects.Node(name="node20", group="my", offline=False,
master_candidate=True),
objects.Node(name="node21", group="my", offline=False,
master_candidate=True),
objects.Node(name="node22", group="my", offline=False,
master_candidate=False),
objects.Node(name="node23", group="my", offline=False,
master_candidate=True),
objects.Node(name="node24", group="my", offline=False,
master_candidate=True),
objects.Node(name="node25", group="my", offline=False,
master_candidate=False),
objects.Node(name="node26", group="my", offline=True,
master_candidate=True),
]
nodes = [
objects.Node(name="node1", group="g1", offline=True,
master_candidate=True),
objects.Node(name="node2", group="g1", offline=False,
master_candidate=False),
objects.Node(name="node3", group="g1", offline=False,
master_candidate=True),
objects.Node(name="node4", group="g1", offline=True,
master_candidate=True),
objects.Node(name="node5", group="g1", offline=False,
master_candidate=True),
objects.Node(name="node10", group="xyz", offline=False,
master_candidate=True),
objects.Node(name="node11", group="xyz", offline=False,
master_candidate=True),
objects.Node(name="node40", group="alloff", offline=True,
master_candidate=True),
objects.Node(name="node41", group="alloff", offline=True,
master_candidate=True),
objects.Node(name="node50", group="aaa", offline=False,
master_candidate=True),
] + mygroupnodes
assert not utils.FindDuplicates(n.name for n in nodes)
(online, perhost, _) = fn(mygroupnodes, "my", nodes)
self.assertEqual(online, ["node%s" % i for i in range(20, 26)])
self.assertEqual(set(perhost.keys()), set(online))
self.assertEqual(perhost, {
"node20": ["node10", "node2", "node50"],
"node21": ["node11", "node3", "node50"],
"node22": ["node10", "node5", "node50"],
"node23": ["node11", "node2", "node50"],
"node24": ["node10", "node3", "node50"],
"node25": ["node11", "node5", "node50"],
})
def testSingleGroup(self):
fn = verify.LUClusterVerifyGroup._SelectSshCheckNodes
nodes = [
objects.Node(name="node1", group="default", offline=True,
master_candidate=True),
objects.Node(name="node2", group="default", offline=False,
master_candidate=True),
objects.Node(name="node3", group="default", offline=False,
master_candidate=True),
objects.Node(name="node4", group="default", offline=True,
master_candidate=True),
]
assert not utils.FindDuplicates(n.name for n in nodes)
(online, perhost, _) = fn(nodes, "default", nodes)
self.assertEqual(online, ["node2", "node3"])
self.assertEqual(set(perhost.keys()), set(online))
self.assertEqual(perhost, {
"node2": [],
"node3": [],
})
class TestLUClusterActivateMasterIp(CmdlibTestCase):
def testSuccess(self):
op = opcodes.OpClusterActivateMasterIp()
self.rpc.call_node_activate_master_ip.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master)
self.ExecOpCode(op)
self.rpc.call_node_activate_master_ip.assert_called_once_with(
self.master_uuid, self.cfg.GetMasterNetworkParameters(), False)
def testFailure(self):
op = opcodes.OpClusterActivateMasterIp()
self.rpc.call_node_activate_master_ip.return_value = \
self.RpcResultsBuilder() \
.CreateFailedNodeResult(self.master) \
self.ExecOpCodeExpectOpExecError(op)
class TestLUClusterDeactivateMasterIp(CmdlibTestCase):
def testSuccess(self):
op = opcodes.OpClusterDeactivateMasterIp()
self.rpc.call_node_deactivate_master_ip.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master)
self.ExecOpCode(op)
self.rpc.call_node_deactivate_master_ip.assert_called_once_with(
self.master_uuid, self.cfg.GetMasterNetworkParameters(), False)
def testFailure(self):
op = opcodes.OpClusterDeactivateMasterIp()
self.rpc.call_node_deactivate_master_ip.return_value = \
self.RpcResultsBuilder() \
.CreateFailedNodeResult(self.master) \
self.ExecOpCodeExpectOpExecError(op)
class TestLUClusterConfigQuery(CmdlibTestCase):
def testInvalidField(self):
op = opcodes.OpClusterConfigQuery(output_fields=["pinky_bunny"])
self.ExecOpCodeExpectOpPrereqError(op, "pinky_bunny")
def testAllFields(self):
op = opcodes.OpClusterConfigQuery(output_fields=query.CLUSTER_FIELDS.keys())
self.rpc.call_get_watcher_pause.return_value = \
self.RpcResultsBuilder() \
.CreateSuccessfulNodeResult(self.master, -1)
ret = self.ExecOpCode(op)
self.assertEqual(1, self.rpc.call_get_watcher_pause.call_count)
self.assertEqual(len(ret), len(query.CLUSTER_FIELDS))
def testEmpytFields(self):
op = opcodes.OpClusterConfigQuery(output_fields=[])
self.ExecOpCode(op)
self.assertFalse(self.rpc.call_get_watcher_pause.called)
class TestLUClusterDestroy(CmdlibTestCase):
def testExistingNodes(self):
op = opcodes.OpClusterDestroy
|
()
self.cfg.AddNewNode()
self.cfg.AddNewNode()
self.ExecOpCodeExpectOpPrereqError(op, "still 2 node\(s\)")
def testExistingInstances(self):
op = opcodes.OpClusterDestroy()
self.cfg.AddNewInstance()
self.cfg.AddNewInstance()
self.ExecOpCodeExpectOpPrereqError(op, "still 2 instance\(s\)")
def testEmptyCluster(self):
op = opcodes.OpClusterDestroy()
self.ExecOpCode(op)
s
|
elf.assertHooksCall([self.master.uuid], constants.GLOBAL_HOOKS_DIR,
constants.HOOKS_PHASE_PRE, index=0)
self.assertHooksCall([self.master.uuid], "cluster-destroy",
constants.HOOKS_PHASE_POST,
|
nwiizo/workspace_2017
|
pipng/multiplexer2.py
|
Python
|
mit
| 4,364
| 0.00298
|
#!/usr/bin/env python3
# Copyright © 2012-13 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. It is provided for educational
# purposes and is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import collections
import random
random.seed(917) # Not truly random for ease of regression testing
def main():
totalCounter = Counter()
carCounter = Counter("cars")
commercialCounter = Counter("trucks", "vans")
multiplexer = Multiplexer()
for eventName, callback in (("cars", carCounter),
("vans", commercialCounter), ("trucks", commercialCounter)):
multiplexer.connect(eventName, callback)
multiplexer.connect(eventName, totalCounter)
for event in generate_random_events(100):
multiplexer.send(event)
print("After 100 active events: cars={} vans={} trucks={} total={}"
.format(carCounter.cars, commercialCounter.vans,
commercialCounter.trucks, totalCounter.count))
multiplexer.state = Multiplexer.DORMANT
for event in generate_random_events(100):
multiplexer.send(event)
print("After 100 dormant events: cars={} vans={} trucks={} total={}"
.format(carCounter.cars, commercialCounter.vans,
commercialCounter.trucks, totalCounter.count))
multiplexer.state = Multiplexer.ACTIVE
for event in generate_random_events(100):
multiplexer.send(event)
print("After 100 active events: cars={} vans={} trucks={} total={}"
.format(carCounter.cars, commercialCounter.vans,
commercialCounter.trucks, totalCounter.cou
|
nt))
|
def generate_random_events(count):
vehicles = (("cars",) * 11) + (("vans",) * 3) + ("trucks",)
for _ in range(count):
yield Event(random.choice(vehicles), random.randint(1, 3))
class Counter:
def __init__(self, *names):
self.anonymous = not bool(names)
if self.anonymous:
self.count = 0
else:
for name in names:
if not name.isidentifier():
raise ValueError("names must be valid identifiers")
setattr(self, name, 0)
def __call__(self, event):
if self.anonymous:
self.count += event.count
else:
count = getattr(self, event.name)
setattr(self, event.name, count + event.count)
class Event:
def __init__(self, name, count=1):
if not name.isidentifier():
raise ValueError("names must be valid identifiers")
self.name = name
self.count = count
class Multiplexer:
ACTIVE, DORMANT = ("ACTIVE", "DORMANT")
def __init__(self):
self.callbacksForEvent = collections.defaultdict(list)
self.state = Multiplexer.ACTIVE
@property
def state(self):
return (Multiplexer.ACTIVE if self.send == self.__active_send
else Multiplexer.DORMANT)
@state.setter
def state(self, state):
if state == Multiplexer.ACTIVE:
self.connect = self.__active_connect
self.disconnect = self.__active_disconnect
self.send = self.__active_send
else:
self.connect = lambda *args: None
self.disconnect = lambda *args: None
self.send = lambda *args: None
def __active_connect(self, eventName, callback):
self.callbacksForEvent[eventName].append(callback)
def __active_disconnect(self, eventName, callback=None):
if callback is None:
del self.callbacksForEvent[eventName]
else:
self.callbacksForEvent[eventName].remove(callback)
def __active_send(self, event):
for callback in self.callbacksForEvent.get(event.name, ()):
callback(event)
if __name__ == "__main__":
main()
|
Alternhuman/marcopolo
|
marcopolo/tests/test_polo_binding.py
|
Python
|
mpl-2.0
| 1,960
| 0.015816
|
import sys
sys.path.append('/opt/marcopolo/')
from bindings.polo import polo
from twisted.trial import unittest
from mock import MagicMock, patch
class TestRegisterService(unittest.TestCase):
pass
class TestRegisterService(unittest.TestCase):
def setUp(self):
self.polo = polo.Polo()
self.polo.polo_socket.sendto = MagicMock(return_value=0)
def test_register_success(self):
#self.polo.polo_socket.recv = MagicMock(return_value = bytes("{\"Return\":\"OK\", \"Args\":\"Registered\"}"))
self.polo.polo_socket.recv = MagicMock(return_value = bytes("{\"OK\":\"dummy\"}"))
self.assertEqual(self.polo.publish_service("dummy"), "dummy")
def test_register_fail(self):
self.polo.polo_socket.recv = MagicMock(return_value = bytes("{\"Error\":\"Error\", \"Args\":\"Service already exists\"}"))
self.assertRaises(polo.PoloException, self.polo.publish_service, "dummy")
def test_wrong_json(self):
self.polo.polo_socket.recv = MagicMock(return_value = bytes("[{\"Return\":\"OK\", \"Args\":\"Registered\"}]"))
self.assertRaises(polo.PoloInternalException, self.polo.publish_service, "dummy")
def test_malformed_json(self):
self.polo.polo_socket.recv = MagicMock(return_value = bytes("[{\"Return\":\"OK\""))
self.assertRaises(polo.PoloInternalException, self.polo.publish_service, "dummy")
def test_connection_fail(self):
self.polo.polo_socket.sendto = MagicMock(return_value = -1)
self.assertRaises(polo.PoloInternalException, self.polo.publish_service, "dummy")
# c
|
lass TestRemoveService(unittest.TestCase):
# def setU
|
p(self):
# self.polo = polo.Polo()
# def test_remove_success(self):
# self.assertFalse(self.polo.unpublish_service("dummy"))
# def test_remove_failure(self):
# self.assertFalse(self.polo.unpublish("dummy"))
# def test_have_service(self):
# pass
|
tomhaoye/LetsPython
|
practice/practice50.py
|
Python
|
mit
| 87
| 0
|
#!/usr/bin/pyth
|
on
# -*- co
|
ding: UTF-8 -*-
import random
print random.uniform(10, 30)
|
siosio/intellij-community
|
python/helpers/profiler/vmprof_profiler.py
|
Python
|
apache-2.0
| 2,691
| 0.001115
|
import os
import shutil
import tempfile
import vmprof
import prof_six as six
from _prof_imports import TreeStats, CallTreeStat
class VmProfProfile(object):
""" Wrapper class that represents VmProf Python profiling backend with API matching
the cProfile.
"""
def __init__(self):
self.stats = None
self.basepath = None
self.file = None
self.is_enabled = False
def runcall(self, func, *args, **kw):
self.enable()
try:
return func(*args, **kw)
finally:
self.disable()
def enable(self):
if not self.is_enabled:
if not os.path.exists(self.basepath):
os.makedirs(self.basepath)
self.file = tempfile.NamedTemporaryFile(delete=False, dir=self.basepath)
try:
vmprof.enable(self.file.fileno(), lines=True)
except:
vmprof.enable(self.file.fileno())
self.is_enabled = True
def disable(self):
if self.is_enabled:
vmprof.disable()
self.file.close()
self.is_enabled = False
def create_stats(self):
return None
def getstats(self):
self.create_stats()
return self.stats
def dump_stats(self, file):
shutil.copyfile(self.file.name, file)
def _walk_tree(self, parent, node, callback):
|
tree = callback(parent, node)
for c in six.itervalues(node.children):
self._walk_tree(node, c, callback)
return tree
def tree_stats_to_response(self, filename, response):
tree_stats_to_response(filename, response)
def snapshot_
|
extension(self):
return '.prof'
def _walk_tree(parent, node, callback):
if node is None:
return None
tree = callback(parent, node)
for c in six.itervalues(node.children):
_walk_tree(tree, c, callback)
return tree
def tree_stats_to_response(filename, response):
stats = vmprof.read_profile(filename)
response.tree_stats = TreeStats()
response.tree_stats.sampling_interval = vmprof.DEFAULT_PERIOD
try:
tree = stats.get_tree()
except vmprof.stats.EmptyProfileFile:
tree = None
def convert(parent, node):
tstats = CallTreeStat()
tstats.name = node.name
tstats.count = node.count
tstats.children = []
tstats.line_count = getattr(node, 'lines', {})
if parent is not None:
if parent.children is None:
parent.children = []
parent.children.append(tstats)
return tstats
response.tree_stats.call_tree = _walk_tree(None, tree, convert)
|
Opentrons/labware
|
api/tests/opentrons/data/testosaur.py
|
Python
|
apache-2.0
| 745
| 0
|
from opentrons import instruments, containers
metadata = {
'protocolName': 'Testosaur',
'author': 'Opentrons <engineering@opentrons.com>',
'description': 'A variant on "Dinosaur" for testing',
'source': 'Opentrons Reposit
|
ory'
}
p200rack = containers.load('tiprack-200ul', '5', 'tiprack')
# create a p200 pipette on robot axis B
p300 = instruments.P300_Single(
mount="right",
tip_racks=[p200rack])
p300.pick_up_tip()
conts = [
containers.load('96-PCR-flat', slot)
for slot
in ('8', '11')
]
# U
|
ncomment these to test precision
# p300.move_to(robot.deck['11'])
# p300.move_to(robot.deck['6'])
for container in conts:
p300.aspirate(10, container[0]).dispense(10, container[-1].top(5))
p300.drop_tip()
|
dubourg/openturns
|
python/test/t_NegativeBinomial_std.py
|
Python
|
gpl-3.0
| 2,540
| 0.000787
|
#! /usr/bin/env python
from __future__ import print_function
from openturns import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
# Instanciate one distribution object
distribution = NegativeBinomial(4.5, 0.7)
print("Distribution ", repr(distribution))
print("Distribution ", distribution)
# Is this distribution elliptical ?
print("Elliptical = ", distribution.isElliptical())
# Is this distribution continuous ?
print("Continuous = ", distribution.isContinuous())
# Test for realization of distribution
oneRealization = distribution.getRealization()
print("oneRealization=", repr(oneRealization))
# Test for sampling
size = 10000
oneSample = distribution.getSample(size)
print("oneSample first=", repr(oneSample[0]), " last=", repr(oneSample[1]))
print("mean=", repr(oneSample.computeMean()))
print("covariance=", repr(oneSample.computeCovariance()))
# Define a point
point = NumericalPoint(distribution.getDimension(), 5.0)
print("Point= ", repr(point))
# Show PDF and CDF of point
eps = 1e-5
# PDF value
PDF = distribution.computePDF(point)
print("pdf =%.6f" % PDF)
# by the finite difference technique from CDF
print("pdf (FD)=%.6f" % (distribution.computeCDF(
point + NumericalPoint(1, 0)) - distribution.computeCDF(point + NumericalPoint(1, -1))))
# derivative of the PDF with re
|
gards the parameters of the distribution
CDF = distribution.computeCDF(point)
print("cdf=%.6f" % CDF)
# quantile
quantile = distribution.computeQuantile(0.95)
p
|
rint("quantile=", repr(quantile))
print("cdf(quantile)=%.6f" % distribution.computeCDF(quantile))
mean = distribution.getMean()
print("mean=", repr(mean))
standardDeviation = distribution.getStandardDeviation()
print("standard deviation=", repr(standardDeviation))
skewness = distribution.getSkewness()
print("skewness=", repr(skewness))
kurtosis = distribution.getKurtosis()
print("kurtosis=", repr(kurtosis))
covariance = distribution.getCovariance()
print("covariance=", repr(covariance))
parameters = distribution.getParametersCollection()
print("parameters=", repr(parameters))
for i in range(6):
print("standard moment n=", i, " value=",
distribution.getStandardMoment(i))
print("Standard representative=", distribution.getStandardRepresentative())
except:
import sys
print("t_NegativeBinomial_std.py", sys.exc_info()[0], sys.exc_info()[1])
|
treasure-data/td-client-python
|
tdclient/test/dtypes_and_converters_test.py
|
Python
|
apache-2.0
| 7,897
| 0.001773
|
#!/usr/bin/env python
"""Tests for the dtypes and converters arguments to CSV import.
"""
import pytest
from io import BytesIO
from unittest import mock
from tdclient import api, Client
from tdclient.util import read_csv_records
from tdclient.test.test_helper import gunzipb
from tdclient.test.test_helper import make_response
from tdclient.test.test_helper import msgunpackb
DEFAULT_DATA = [
{
"time": "100",
"col1": "0001",
"col2": "10",
"col3": "1.0",
"col4": "abcd",
"col5": "true",
"col6": "none",
},
{
"time": "200",
"col1": "0002",
"col2": "20",
"col3": "2.0",
"col4": "efgh",
"col5": "false",
"col6": "",
},
]
def sample_reader(data=DEFAULT_DATA):
"""A very simple emulation of the actual CSV readers.
"""
for item in data:
yield item
def test_basic_read_csv_records():
"""The base test of read_csv_records - no customisation.
"""
reader = sample_reader()
result = list(read_csv_records(reader))
assert result == [
{
"time": 100,
"col1": 1,
"col2": 10,
"col3": 1.0,
"col4": "abcd",
"col5": True,
"col6": None,
},
{
"time": 200,
"col1": 2,
"col2": 20,
"col3": 2.0,
"col4": "efgh",
"col5": False,
"col6": None,
},
]
def test_unsupported_dtype_gives_error():
reader = sample_reader()
with pytest.raises(ValueError) as excinfo:
# Remember, it won't yield anything if we don't "next" it
next(read_csv_records(reader, dtypes={"something": "no-such-dtype"}))
assert "Unrecognized dtype 'no-such-dtype'" in str(excinfo.value)
def test_guess_dtype_gives_default_result():
reader = sample_reader()
result = list(
read_csv_records(
reader,
dtypes={
"time": "guess",
"col1": "guess",
"col2": "guess",
"col3": "guess",
"col4": "guess",
"col5": "guess",
"col6": "guess",
},
)
)
assert result == [
{
"time": 100,
"col1": 1,
"col2": 10,
"col3": 1.0,
"col4": "abcd",
"col5": True,
"col6": None,
},
{
"time": 200,
"col1": 2,
"col2": 20,
"col3": 2.0,
"col4": "efgh",
"col5": False,
"col6": None,
},
]
def test_dtypes_change_parsing():
reader = sample_reader()
result = list(
read_csv_records(
reader, dtypes={"col1": "str", "col2": "float", "col6": "str",}
)
)
assert result == [
{
"time": 100,
"col1": "0001",
"col2": 10.0,
"col3": 1.0,
"col4": "abcd",
"col5": True,
"col6": "none",
},
{
"time": 200,
"col1": "0002",
"col2": 20.0,
"col3": 2.0,
"col4": "efgh",
"col5": False,
"col6": "",
},
]
def test_converters_change_parsing():
reader = sample_reader()
result = list(
read_csv_records(reader, converters={"col1": str, "col2": float, "col6": str,})
)
assert result == [
{
"time": 100,
"col1": "0001",
"col2": 10.0,
"col3": 1.0,
"col4": "abcd",
"col5": True,
"col6": "none",
},
|
{
"time": 200,
"col1": "0002",
"col2": 20.0,
"col3": 2.0,
"col4": "efgh",
"col5": False,
"col6": "",
},
]
def test_dtypes_plus_converters_change_parsing(
|
):
reader = sample_reader()
result = list(
read_csv_records(
reader, dtypes={"col1": "str", "col6": "str",}, converters={"col2": float,}
)
)
assert result == [
{
"time": 100,
"col1": "0001",
"col2": 10.0,
"col3": 1.0,
"col4": "abcd",
"col5": True,
"col6": "none",
},
{
"time": 200,
"col1": "0002",
"col2": 20.0,
"col3": 2.0,
"col4": "efgh",
"col5": False,
"col6": "",
},
]
def test_dtypes_overridden_by_converters():
reader = sample_reader()
result = list(
read_csv_records(
reader,
dtypes={
"time": "bool", # overridden by converters
"col1": "str",
"col2": "int", # overridden by converters
"col6": "str",
},
converters={"time": int, "col2": float, "col5": str,},
)
)
assert result == [
{
"time": 100,
"col1": "0001",
"col2": 10.0,
"col3": 1.0,
"col4": "abcd",
"col5": "true",
"col6": "none",
},
{
"time": 200,
"col1": "0002",
"col2": 20.0,
"col3": 2.0,
"col4": "efgh",
"col5": "false",
"col6": "",
},
]
DEFAULT_HEADER_BYTE_CSV = (
b"time,col1,col2,col3,col4\n" b"100,0001,10,1.0,abcd\n" b"200,0002,20,2.0,efgh\n"
)
def test_import_file_supports_dtypes_and_converters():
def import_data(db, table, format, stream, size, unique_id=None):
data = stream.read(size)
assert msgunpackb(gunzipb(data)) == [
{"time": 100, "col1": "0001", "col2": 10.0, "col3": 1.0, "col4": "abcd"},
{"time": 200, "col1": "0002", "col2": 20.0, "col3": 2.0, "col4": "efgh"},
]
td = api.API("APIKEY")
td.import_data = import_data
td.import_file(
"db",
"table",
"csv",
BytesIO(DEFAULT_HEADER_BYTE_CSV),
dtypes={"col1": "str", "col6": "str"},
converters={"col2": float},
)
def test_bulk_import_upload_file_supports_dtypes_and_converters():
def bulk_import_upload_part(name, part_name, stream, size):
data = stream.read(size)
assert msgunpackb(gunzipb(data)) == [
{"time": 100, "col1": "0001", "col2": 10.0, "col3": 1.0, "col4": "abcd"},
{"time": 200, "col1": "0002", "col2": 20.0, "col3": 2.0, "col4": "efgh"},
]
td = api.API("APIKEY")
td.bulk_import_upload_part = bulk_import_upload_part
td.bulk_import_upload_file(
"name",
"part-name",
"csv",
BytesIO(DEFAULT_HEADER_BYTE_CSV),
dtypes={"col1": "str", "col6": "str"},
converters={"col2": float},
)
def test_bulk_import_dot_upload_file_supports_dtypes_and_converters():
def bulk_import_upload_part(name, part_name, stream, size):
data = stream.read(size)
assert msgunpackb(gunzipb(data)) == [
{"time": 100, "col1": "0001", "col2": 10.0, "col3": 1.0, "col4": "abcd"},
{"time": 200, "col1": "0002", "col2": 20.0, "col3": 2.0, "col4": "efgh"},
]
with Client("APIKEY") as td:
td.api.post = mock.MagicMock(return_value=make_response(200, b""))
td.api.bulk_import_upload_part = bulk_import_upload_part
bulk_import = td.create_bulk_import("session-name", "mydb", "mytbl")
bulk_import.update = mock.MagicMock()
bulk_import.upload_file(
"part-name",
"csv",
BytesIO(DEFAULT_HEADER_BYTE_CSV),
dtypes={"col1": "str", "col6": "str"},
converters={"col2": float},
)
|
angea/corkami
|
src/java/java.py
|
Python
|
bsd-2-clause
| 1,502
| 0.009987
|
# simple class generator helper
#Ange Albertini, BSD licence 2011
import struct
def make_utf8(s):
return "".join(["\x01", struct.pack(">H", len(s)), s])
def make_class(i):
return "".join(["\x07", struct.pack(">H", i)])
def make_nat(name, type):
return "".join(["\x0C", struct.pack(">H", name), struct.pack(">H", type)])
def make_fieldref(field, ref):
return "".join(["\x09", struct.pack(">H", field), struct.pack(">H", ref)])
def make_methodref(method, ref):
return "".join(["\x0A", struct.pack(">H", method), struct.pac
|
k(">H", ref)])
def make_string(utf):
return "".join(["\x08", struct.pack(">H", utf)])
def u4length(s):
return "".join([struct.pack(">L", len(s)), s])
def u2larray(l):
return "".join([struct.pack(">H", l
|
en(l)), "".join(l)])
GETSTATIC = "\xb2"
LDC = "\x12"
INVOKEVIRTUAL = "\xb6"
RETURN = "\xb1"
def make_classfile(
magic, minor_version, major_version, pool, access_flags, this_class, super_class, interfaces,
fields, methods, attributes):
return "".join([
magic,
struct.pack(">H", minor_version),
struct.pack(">H", major_version),
u2larray(pool),
struct.pack(">H", access_flags),
struct.pack(">H", this_class),
struct.pack(">H", super_class),
u2larray(interfaces),
u2larray(fields),
u2larray(methods),
u2larray(attributes)
])
|
sussexstudent/falmer
|
manage.py
|
Python
|
mit
| 1,026
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local')
try:
from django.core.management import execute_from_command_line
e
|
xcept ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
|
# exceptions on Python 2.
try:
import django # noqa
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
# This allows easy placement of apps within the interior
# falmer directory.
current_path = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(current_path, 'falmer'))
execute_from_command_line(sys.argv)
|
Parisson/TimeSide
|
tests/unit_timeside.py
|
Python
|
agpl-3.0
| 5,327
| 0.000188
|
# -*- coding: utf-8 -*-
import unittest
import doctest
import sys
import time
import timeside.core
class _TextTestResult(unittest.TestResult):
"""A test result class that can print formatted text results to a stream.
Used by TextTestRunner.
"""
separator1 = '=' * 70
separator2 = '-' * 70
def __init__(self, stream, descriptions, verbosity):
unittest.TestResult.__init__(self)
self.stream = stream
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.descriptions = descriptions
self.currentTestCase = None
def getDescription(self, test):
if self.descriptions:
return test.shortDescription() or str(test)
else:
return str(test)
def startTest(self, test):
unittest.TestResult.startTest(self, test)
if self.showAll:
if self.currentTestCase != test.__class__:
self.currentTestCase = test.__class__
self.stream.writeln()
self.stream.writeln("[%s]" % self.currentTestCase.__name__)
self.stream.write(" " + self.getDescription(test))
self.stream.write(" ... ")
def addSuccess(self, test):
unittest.TestResult.addSuccess(self, test)
if self.showAll:
self.stream.writeln("ok")
elif self.dots:
self.stream.write('.')
def addError(self, test, err):
unittest.TestResult.addError(self, test, err)
if self.showAll:
self.stream.writeln("ERROR")
elif self.dots:
self.stream.write('E')
def addFailure(self, test, err):
unittest.TestResult.addFailure(self, test, err)
if self.showAll:
self.stream.writeln("FAIL")
elif self.dots:
self.stream.write('F')
def addSkip(self, test, reason):
unittest.TestResult.addSkip(self, test, reason)
if self.sh
|
owAll:
self.stream.writeln("SKIP : " + reason)
|
elif self.dots:
self.stream.write('S')
def printErrors(self):
if self.dots or self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavour, errors):
for test, err in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: [%s] --> %s "
% (flavour,
test.__class__.__name__,
self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln("%s" % err)
class _WritelnDecorator:
"""Used to decorate file-like objects with a handy 'writeln' method"""
def __init__(self, stream):
self.stream = stream
def __getattr__(self, attr):
return getattr(self.stream, attr)
def writeln(self, arg=None):
if arg:
self.write(arg)
self.write('\n') # text-mode streams translate to \r\n if needed
class TestRunner:
"""A test runner class that displays results in textual form.
It prints out the names of tests as they are run, errors as they
occur, and a summary of the results at the end of the test run.
"""
def __init__(self, stream=sys.stderr, descriptions=1, verbosity=2):
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
def _makeResult(self):
return _TextTestResult(self.stream, self.descriptions, self.verbosity)
def run(self, test):
"Run the given test case or test suite."
result = self._makeResult()
startTime = time.time()
test(result)
stopTime = time.time()
timeTaken = stopTime - startTime
result.printErrors()
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
if not result.wasSuccessful():
self.stream.write("FAILED (")
failed, errored = map(len, (result.failures, result.errors))
if failed:
self.stream.write("failures=%d" % failed)
if errored:
if failed:
self.stream.write(", ")
self.stream.write("errors=%d" % errored)
self.stream.writeln(")")
else:
self.stream.writeln("OK")
return result
def run_test_module(test_modules_list=None, test_prefix=None):
suite = unittest.TestSuite()
finder = doctest.DocTestFinder(exclude_empty=False) # finder for doctest
if test_prefix:
unittest.TestLoader.testMethodPrefix = test_prefix
if not test_modules_list:
test_modules_list = []
elif not isinstance(test_modules_list, list):
test_modules_list = [test_modules_list]
test_modules_list.append('__main__')
for test in test_modules_list:
# Doctest
suite.addTest(doctest.DocTestSuite(test, test_finder=finder))
# unittest
suite.addTest(unittest.loader.TestLoader().loadTestsFromModule(test))
TestRunner().run(suite)
|
areitz/pants
|
src/python/pants/backend/core/targets/resources.py
|
Python
|
apache-2.0
| 1,552
| 0.005799
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_imp
|
ort, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.payload import Payload
from pants.base.target import Target
class Resources(Target):
"""A set of files accessible as resources from the JVM classpath.
Looking for loose files in your
|
application bundle? Those are
`bundle <#bundle>`_\s.
Resources are Java-style resources accessible via the ``Class.getResource``
and friends API. In the ``jar`` goal, the resource files are placed in the resulting `.jar`.
"""
def __init__(self, address=None, payload=None, sources=None, **kwargs):
"""
:param sources: Files to "include". Paths are relative to the
BUILD file's directory.
:type sources: ``Fileset`` or list of strings
"""
payload = payload or Payload()
payload.add_fields({
'sources': self.create_sources_field(sources,
sources_rel_path=address.spec_path, key_arg='sources'),
})
super(Resources, self).__init__(address=address, payload=payload, **kwargs)
def has_sources(self, extension=None):
"""``Resources`` never own sources of any particular native type, like for example
``JavaLibrary``.
"""
# TODO(John Sirois): track down the reason for this hack and kill or explain better.
return extension is None
|
JasonThomasData/payslip_code_test
|
app/models/db_connector.py
|
Python
|
mit
| 498
| 0.002008
|
import os
from sqlalchemy.ext.declarative import dec
|
larative_base
from sqlalchemy import
|
create_engine
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class DBConnector():
'''
where every row is the details one employee was paid for an entire month.
'''
@classmethod
def get_session(cls):
database_path = os.environ["SQL_DATABASE"]
engine = create_engine(database_path)
session = sessionmaker(bind=engine)()
return session
|
pixeltasim/IRCBot-Pixeltasim
|
plugins/tale.py
|
Python
|
unlicense
| 3,149
| 0.039695
|
from whiffle import wiki
|
dotapi
from util import hook
import re
import time,threading
@hook.command
def tale(inp): #this is for WL use, easily adaptab
|
le to SCP
".tale <Article Name> -- Will return first page containing exact match to Article Name"
if firstrefresh == 0: #make sure the cache actually exists
return "Cache has not yet updated, please wait a minute and search again."
api = wikidotapi.connection() #creates API connection
api.Site = "wanderers-library"
pages = api.refresh_pages() #refresh page list provided by the API, is only a list of strings
line = re.sub("[ ,']",'-',inp) #removes spaces and apostrophes and replaces them with dashes, per wikidot's standards
for page in pages:
for item in pagecache: #iterates through ever attribute in the pagecache, similar to .author
if line.lower() in page: #check for first match to input
if api.page_exists(page.lower()): #only api call in .tale, verification of page existence
try: #must do error handling as the key will be wrong for most of the items
if "entry" in item[page]["tags"]: #check for tag
rating = item[page]["rating"]
if rating < 0:
ratesign = "-"
if rating >= 0:
ratesign = "+" #adds + or minus sign in front of rating
ratestring = "Rating["+ratesign+str(rating)+"]"
author = item[page]["created_by"]
authorstring = "Written by "+author
title = item[page]["title"]
sepstring = ", "
return "nonick::"+title+" ("+ratestring+sepstring+authorstring+") - http://wanderers-library.wikidot.com/"+page.lower() #returns the string, nonick:: means that the caller's nick isn't prefixed
except KeyError:
pass
else:
return "nonick::Match found but page does not exist, please consult pixeltasim for error."
for page in pages:
for item in pagecache: #iterates through ever attribute in the pagecache, similar to .author
try:
if inp.lower() in item[page]["title"].lower(): #check for first match to input
print item[page]["title"].lower()
if api.page_exists(page.lower()): #only api call in .tale, verification of page existence
#must do error handling as the key will be wrong for most of the items
if "entry" in item[page]["tags"]: #check for tag
rating = item[page]["rating"]
if rating < 0:
ratesign = "-"
if rating >= 0:
ratesign = "+" #adds + or minus sign in front of rating
ratestring = "Rating["+ratesign+str(rating)+"]"
author = item[page]["created_by"]
authorstring = "Written by "+author
title = item[page]["title"]
sepstring = ", "
return "nonick::"+title+" ("+ratestring+sepstring+authorstring+") - http://wanderers-library.wikidot.com/"+page.lower() #returns the string, nonick:: means that the caller's nick isn't prefixed
else:
return "nonick::Page was found but it is either untagged or an administrative page."
else:
return "nonick::Match found but page does not exist, please consult pixeltasim for error."
except KeyError:
pass
return "nonick::Page not found"
|
cemarchi/biosphere
|
Src/BioAnalyzer/CrossCutting/Contracts/GenePriotization/GlobalDifferentialDnaMethylationSampleRepositoryBase.py
|
Python
|
bsd-3-clause
| 1,566
| 0.012133
|
from abc import ABCMeta, abstractmethod
from typing import Dict
from Src.BioAnalyzer.CrossCutting.DTOs.GenePrioritization.GlobalDifferentialSampleDto import GlobalDifferentialSampleDto
from Src.BioAnalyzer.CrossCutting.Filters.GenePrioritization.FeSingleGlobalDifferentialSample import \
FeSingleGlobalDifferentialSample
from Src.Core.Data.MongoRepositoryBase import MongoRepositoryBase
class GlobalDifferentialDnaMethylationS
|
ampleRepositoryBase(MongoRepositoryBase, metaclass=ABCMeta):
"""
Class responsible for manipulates gene annotation file
"""
def __init__(self, db):
"""
Constructor.
:param db:
"""
super().__init__(db, 'global_diff_dna_methylation_samples')
@abstractmethod
def add_one(self, diff_dna_methylation_sample: GlobalDifferentialSampleDto):
"""
:param diff_dna_methylation_sa
|
mple:
:return:
"""
pass
@abstractmethod
def get_one(self, fe_diff_dna_methylation_sample: FeSingleGlobalDifferentialSample, dto_class=None,
include_or_exclude_fields: Dict[str, int] = None) -> FeSingleGlobalDifferentialSample:
"""
:param fe_diff_dna_methylation_sample:
:param dto_class:
:param include_or_exclude_fields:
:return:
"""
pass
@abstractmethod
def replace_one(self, diff_dna_methylation_sample: GlobalDifferentialSampleDto):
"""
:param diff_dna_methylation_sample:
:return:
"""
pass
|
nyov/mooshell
|
models.py
|
Python
|
mit
| 14,529
| 0.001445
|
from datetime import timedelta, datetime
import os
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.contrib.auth.models import User
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.exceptions import ObjectDoesNotExist
from managers import JSDependencyManager, JSLibraryManager, PastieManager, \
ShellManager, DraftManager
def next_week():
return datetime.now() + timedelta(days=7)
class JSLibraryGroup(models.Model):
"""
Main library to load - MooTools core, jQuery, Prototype, etc.
"""
name = models.CharField('Name', max_length=100, unique=True)
description = models.TextField(blank=True, null=True)
# TODO: check if selected is used at all
selected = models.BooleanField(blank=True, default=False)
def __unicode__(self):
return self.name
class Admin:
pass
class JSLibraryWrap(models.Model):
"""
how to wrap the code in specific library
"""
name = models.CharField(max_length=255)
code_start = models.TextField()
code_end = models.TextField()
def __unicode__(self):
return self.name
class Admin:
pass
class Meta:
verbose_name_plural = "JS Library Code Wrappers"
class JSLibrary(models.Model):
"""
Version of the library - Mootools 1.2.4, etc.
"""
library_group = models.ForeignKey(JSLibraryGroup, related_name="libs")
version = models.CharField(max_length=30, null=True, blank=True)
href = models.CharField('URL to the core library file', max_length=255,
unique=True)
selected = models.BooleanField(blank=True, default=False)
wrap_d = models.ForeignKey(JSLibraryWrap, related_name='lib_for_domready')
wrap_l = models.ForeignKey(JSLibraryWrap, related_name='lib_for_load')
active = models.BooleanField(default=True, blank=True)
objects = JSLibraryManager()
def get_name(self):
return ' '.join((self.library_group.name, self.version))
def __unicode__(self):
return '%s / %s' % (self.get_name(),
'active' if self.active else '-')
class Admin:
pass
class Meta:
verbose_name_plural = "JS Library versions"
ordering = ['-active', 'library_group', '-version']
class JSDependency(models.Model):
"""
Additional library file - MooTools more, Scriptaculous, etc.
"""
library = models.ForeignKey(JSLibrary)
name = models.CharField(max_length=150)
url = models.CharField('URL to the library file', max_length=255)
description = models.TextField(blank=True, null=True)
selected = models.BooleanField(blank=True, default=False)
ord = models.IntegerField("Order", default=0, blank=True, null=True)
active = models.BooleanField(default=True, blank=True)
objects = JSDependencyManager()
def __unicode__(self):
return self.name
class Admin:
pass
class Meta:
verbose_name_plural = "JS Dependencies"
# highest number on top
ordering = ['-active', '-library', '-ord']
class ExternalResource(models.Model):
url = models.CharField('URL to the resource file', max_length=255,
unique=True)
class Admin:
pass
def __unicode__(self):
return self.filename
def __str__(self):
return self.filename
class Meta:
ordering = ["id"]
@property
def filename(self):
if not hasattr(self, '_filename'):
self._filename = ExternalResource.get_filename(self.url)
return self._filename
@property
def extension(self):
if not hasattr(self, '_extension'):
self._extension = ExternalResource.get_extension(self.url)
return self._extension
@staticmethod
def get_filename(url):
return url.split('/')[-1]
@staticmethod
def get_extension(url):
return os.path.splitext(ExternalResource.get_filename(url))[1][1:]
WRAPCHOICE = (
('h', 'no Wrap (HEAD)'),
('b', 'no Wrap (BODY)'),
('d', 'onDomready'),
('l', 'onLoad'),
)
class DocType(models.Model):
"""
DocString to choose from
"""
name = models.CharField(max_length=255, unique=True)
code = models.TextField(blank=True, null=True)
type = models.CharField(max_length=100, default='html', blank=True)
template = models.CharField(max_length=100, default='xhtml.1.0.strict.html',
blank=True)
selected = models.BooleanField(default=False, blank=True)
def __unicode__(self):
return self.code
class Admin:
pass
class Pastie(models.Model):
"""
default metadata
"""
slug = models.CharField(max_length=255, unique=True, blank=True)
created_at = models.DateTimeField(default=datetime.now)
author = models.ForeignKey(User, null=True, blank=True)
example = models.BooleanField(default=False, blank=True)
favourite = models.ForeignKey('Shell', null=True, blank=True,
related_name='favs')
objects = PastieManager()
def set_slug(self):
from random import choice
allowed_chars = 'abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'
check_slug = True
# repeat until the slug will be unique
while check_slug:
self.slug = ''.join([choice(allowed_chars) \
for i in range(settings.MOOSHELL_SLUG_LENGTH)])
try:
check_slug = Pastie.objects.get(slug=self.slug)
except ObjectDoesNotExist:
check_slug = False
def __unicode__(self):
return self.slug
def get_latest(self):
shells = Shell.objects.filter(
pastie__id=self.id).order_by('-version')
if shells:
return shells[0]
else:
return []
def get_absolute_url(self):
return self.favourite.get_absolute_url() \
if self.favourite else reverse('pastie',args=[self.slug])
def get_delete_url(self):
return reverse('pastie_delete', args=[self.slug])
def get_delete_confirmation_url(self):
return reverse('pastie_delete_confirmation', args=[self.slug])
def get_title(self):
return self.favourite.title
class Admin:
pass
class Meta:
verbose_name_plural = "Pasties"
ordering = ['-example', '-created_at']
def make_slug_on_create(instance, **kwargs):
if kwargs.get('raw', False):
return
if not instance.id and not instance.slug:
instance.set_slug()
pre_save.connect(make_slug_on_create, sender=Pastie)
class Draft(models.Model):
"""
Saves the draft (only one per user)
"""
author = models.ForeignKey(User, unique=True, related_name='draft')
html = models.TextField()
objects = DraftManager(
|
)
LANG_HTML = ((0, 'HTML'),)
LANG_CSS = ((0, 'CSS'),
(1, 'SCSS'))
LANG_JS = ((0, 'JavaScript'),
(1, 'CoffeeScript'),
(2, 'JavaScript 1.7'))
class Shell(models.Model):
"""
Holds shell data
"""
PANEL_HTM
|
L = [i[1] for i in LANG_HTML]
PANEL_CSS = [i[1] for i in LANG_CSS]
PANEL_JS = [i[1] for i in LANG_JS]
pastie = models.ForeignKey(Pastie, related_name='shells')
version = models.IntegerField(default=0, blank=True)
revision = models.IntegerField(default=0, blank=True, null=True)
# authoring
author = models.ForeignKey(User, null=True, blank=True)
private = models.BooleanField(default=False, blank=True)
# meta
title = models.CharField(max_length=255, null=True, blank=True)
description = models.TextField(null=True, blank=True)
# STATISTICS (a bit)
displayed = models.PositiveIntegerField(default=1, null=True, blank=True)
# is the shell private (do not list in search)
# how long author she should be hold by the system ?
valid_until = models.DateTimeField('Valid until', default=None,
null=True, blank=True)
# editors
code_css = models.TextField('CSS', null=True, blan
|
savi-dev/heat
|
heat/engine/parameters.py
|
Python
|
apache-2.0
| 14,053
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import json
import re
from heat.common import exception
PARAMETER_KEYS = (
TYPE, DEFAULT, NO_ECHO, ALLOWED_VALUES, ALLOWED_PATTERN,
MAX_LENGTH, MIN_LENGTH, MAX_VALUE, MIN_VALUE,
DESCRIPTION, CONSTRAINT_DESCRIPTION
) = (
'Type', 'Default', 'NoEcho', 'AllowedValues', 'AllowedPattern',
'MaxLength', 'MinLength', 'MaxValue', 'MinValue',
'Description', 'ConstraintDescription'
)
PARAMETER_TYPES = (
STRING, NUMBER
|
, COMMA_DELIMITED_LIST, JSON
) = (
'String', 'Number', 'CommaDelimitedList', 'Json'
)
PSEUDO_PARAMETERS = (
PARAM_STACK_ID, PARAM_STACK_NAME, PARAM_REGION
) = (
'AWS::StackId', 'AWS::StackName', 'AWS::Region'
)
class ParamSchema(dict):
'''Parameter schema.'''
def __init__(self, schema):
super(ParamSchema, self).__init__(schema)
def do_check(self, name, value, keys):
for k in keys:
check = self.check(k)
const = self.get(k)
|
if check is None or const is None:
continue
check(name, value, const)
def constraints(self):
ptype = self[TYPE]
keys = {
STRING: [ALLOWED_VALUES, ALLOWED_PATTERN, MAX_LENGTH, MIN_LENGTH],
NUMBER: [ALLOWED_VALUES, MAX_VALUE, MIN_VALUE],
JSON: [MAX_LENGTH, MIN_LENGTH]
}.get(ptype)
list_keys = {
COMMA_DELIMITED_LIST: [ALLOWED_VALUES],
JSON: [ALLOWED_VALUES]
}.get(ptype)
return (keys, list_keys)
def validate(self, name, value):
(keys, list_keys) = self.constraints()
if keys:
self.do_check(name, value, keys)
if list_keys:
values = value
for value in values:
self.do_check(name, value, list_keys)
def raise_error(self, name, message, desc=True):
if desc:
message = self.get(CONSTRAINT_DESCRIPTION) or message
raise ValueError('%s %s' % (name, message))
def check_allowed_values(self, name, val, const, desc=None):
vals = list(const)
if val not in vals:
err = '"%s" not in %s "%s"' % (val, ALLOWED_VALUES, vals)
self.raise_error(name, desc or err)
def check_allowed_pattern(self, name, val, p, desc=None):
m = re.match(p, val)
if m is None or m.end() != len(val):
err = '"%s" does not match %s "%s"' % (val, ALLOWED_PATTERN, p)
self.raise_error(name, desc or err)
def check_max_length(self, name, val, const, desc=None):
max_len = int(const)
val_len = len(val)
if val_len > max_len:
err = 'length (%d) overflows %s (%d)' % (val_len,
MAX_LENGTH, max_len)
self.raise_error(name, desc or err)
def check_min_length(self, name, val, const, desc=None):
min_len = int(const)
val_len = len(val)
if val_len < min_len:
err = 'length (%d) underflows %s (%d)' % (val_len,
MIN_LENGTH, min_len)
self.raise_error(name, desc or err)
def check_max_value(self, name, val, const, desc=None):
max_val = float(const)
val = float(val)
if val > max_val:
err = '%d overflows %s %d' % (val, MAX_VALUE, max_val)
self.raise_error(name, desc or err)
def check_min_value(self, name, val, const, desc=None):
min_val = float(const)
val = float(val)
if val < min_val:
err = '%d underflows %s %d' % (val, MIN_VALUE, min_val)
self.raise_error(name, desc or err)
def check(self, const_key):
return {ALLOWED_VALUES: self.check_allowed_values,
ALLOWED_PATTERN: self.check_allowed_pattern,
MAX_LENGTH: self.check_max_length,
MIN_LENGTH: self.check_min_length,
MAX_VALUE: self.check_max_value,
MIN_VALUE: self.check_min_value}.get(const_key)
class Parameter(object):
'''A template parameter.'''
def __new__(cls, name, schema, value=None, validate_value=True):
'''Create a new Parameter of the appropriate type.'''
if cls is not Parameter:
return super(Parameter, cls).__new__(cls)
param_type = schema[TYPE]
if param_type == STRING:
ParamClass = StringParam
elif param_type == NUMBER:
ParamClass = NumberParam
elif param_type == COMMA_DELIMITED_LIST:
ParamClass = CommaDelimitedListParam
elif param_type == JSON:
ParamClass = JsonParam
else:
raise ValueError(_('Invalid Parameter type "%s"') % param_type)
return ParamClass(name, schema, value, validate_value)
def __init__(self, name, schema, value=None, validate_value=True):
'''
Initialise the Parameter with a name, schema and optional user-supplied
value.
'''
self.name = name
self.schema = schema
self.user_value = value
if validate_value:
if self.has_default():
self.validate(self.default())
if self.user_value is not None:
self.validate(self.user_value)
elif not self.has_default():
raise exception.UserParameterMissing(key=self.name)
def value(self):
'''Get the parameter value, optionally sanitising it for output.'''
if self.user_value is not None:
return self.user_value
if self.has_default():
return self.default()
raise KeyError(_('Missing parameter %s') % self.name)
def no_echo(self):
'''
Return whether the parameter should be sanitised in any output to
the user.
'''
return str(self.schema.get(NO_ECHO, 'false')).lower() == 'true'
def description(self):
'''Return the description of the parameter.'''
return self.schema.get(DESCRIPTION, '')
def has_default(self):
'''Return whether the parameter has a default value.'''
return DEFAULT in self.schema
def default(self):
'''Return the default value of the parameter.'''
return self.schema.get(DEFAULT)
def __str__(self):
'''Return a string representation of the parameter'''
value = self.value()
if self.no_echo():
return '******'
else:
return str(value)
class NumberParam(Parameter):
'''A template parameter of type "Number".'''
def __int__(self):
'''Return an integer representation of the parameter'''
return int(super(NumberParam, self).value())
def __float__(self):
'''Return a float representation of the parameter'''
return float(super(NumberParam, self).value())
def validate(self, val):
self.schema.validate(self.name, val)
def value(self):
try:
return int(self)
except ValueError:
return float(self)
class StringParam(Parameter):
'''A template parameter of type "String".'''
def validate(self, val):
self.schema.validate(self.name, val)
class CommaDelimitedListParam(Parameter, collections.Sequence):
'''A template parameter of type "CommaDelimitedList".'''
def __init__(self, name, schema, value=None, validate_value=True):
super(CommaDelimitedListParam, self).__init__(name, schema, value,
validate_value)
|
sameersingh/uci-statnlp
|
hw2/lm.py
|
Python
|
apache-2.0
| 3,681
| 0.002173
|
#!/bin/python
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import itertools
from math import log
import sys
# Python 3 backwards compatibility tricks
if sys.version_info.major > 2:
def xrange(*args, **kwargs):
return iter(range(*args, **kwargs))
def unicode(*args, **kwargs):
return str(*args, **kwargs)
class LangModel:
def fit_corpus(self, corpus):
"""Learn the language model for the whole corpus.
The corpus consists of a list of sentences."""
for s in corpus:
self.fit_sentence(s)
self.norm()
def perplexity(self, corp
|
us):
"""Computes the perplexity of the corpus by the model.
Assumes the model uses an EOS symbol at the end of each sentence.
"""
numOOV = self.get_num_oov(corpus)
return pow(2.0, self.entropy(corpus, numOOV))
def get_num_oov(self, corpus):
vocab_set = set(self.vocab())
words_set = set(itertools.chain(*corpus))
numOOV
|
= len(words_set - vocab_set)
return numOOV
def entropy(self, corpus, numOOV):
num_words = 0.0
sum_logprob = 0.0
for s in corpus:
num_words += len(s) + 1 # for EOS
sum_logprob += self.logprob_sentence(s, numOOV)
return -(1.0/num_words)*(sum_logprob)
def logprob_sentence(self, sentence, numOOV):
p = 0.0
for i in xrange(len(sentence)):
p += self.cond_logprob(sentence[i], sentence[:i], numOOV)
p += self.cond_logprob('END_OF_SENTENCE', sentence, numOOV)
return p
# required, update the model when a sentence is observed
def fit_sentence(self, sentence): pass
# optional, if there are any post-training steps (such as normalizing probabilities)
def norm(self): pass
# required, return the log2 of the conditional prob of word, given previous words
def cond_logprob(self, word, previous, numOOV): pass
# required, the list of words the language model suports (including EOS)
def vocab(self): pass
class Unigram(LangModel):
def __init__(self, unk_prob=0.0001):
self.model = dict()
self.lunk_prob = log(unk_prob, 2)
def inc_word(self, w):
if w in self.model:
self.model[w] += 1.0
else:
self.model[w] = 1.0
def fit_sentence(self, sentence):
for w in sentence:
self.inc_word(w)
self.inc_word('END_OF_SENTENCE')
def norm(self):
"""Normalize and convert to log2-probs."""
tot = 0.0
for word in self.model:
tot += self.model[word]
ltot = log(tot, 2)
for word in self.model:
self.model[word] = log(self.model[word], 2) - ltot
def cond_logprob(self, word, previous, numOOV):
if word in self.model:
return self.model[word]
else:
return self.lunk_prob-log(numOOV, 2)
def vocab(self):
return self.model.keys()
class Ngram(LangModel):
def __init__(self, ngram_size): pass
# required, update the model when a sentence is observed
def fit_sentence(self, sentence): pass
# optional, if there are any post-training steps (such as normalizing probabilities)
def norm(self): pass
# required, return the log2 of the conditional prob of word, given previous words
def cond_logprob(self, word, previous, numOOV): pass
# required, the list of words the language model suports (including EOS)
def vocab(self): pass
|
o-zander/django-filer
|
filer/admin/folderadmin.py
|
Python
|
bsd-3-clause
| 53,477
| 0.002711
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
import itertools
import os
import re
from django import forms
fro
|
m django.conf import settings as django_settings
from django.contrib import messages
from django.contrib.admin import helpers
from django.contrib.admin.util import quote, unquote, capfirst
from django.core.exceptions import ValidationError
from django.core.exceptions import Perm
|
issionDenied
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.db import router, models
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render, get_object_or_404
try:
from django.utils.encoding import force_text
except ImportError:
# Django < 1.5
from django.utils.encoding import force_unicode as force_text
from django.utils.html import escape
from django.utils.http import urlquote
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext, ugettext_lazy
from filer import settings
from filer.admin.forms import (CopyFilesAndFoldersForm, ResizeImagesForm,
RenameFilesForm)
from filer.admin.permissions import PrimitivePermissionAwareModelAdmin
from filer.admin.patched.admin_utils import get_deleted_objects
from filer.admin.tools import (userperms_for_request,
check_folder_edit_permissions,
check_files_edit_permissions,
check_files_read_permissions,
check_folder_read_permissions,
admin_each_context)
from filer.models import (Folder, FolderRoot, UnfiledImages, File, tools,
ImagesWithMissingData, FolderPermission, Image)
from filer.settings import FILER_STATICMEDIA_PREFIX, FILER_PAGINATE_BY
from filer.thumbnail_processors import normalize_subject_location
from filer.utils.compatibility import get_delete_permission
from filer.utils.filer_easy_thumbnails import FilerActionThumbnailer
from filer.views import (popup_status, popup_param, selectfolder_status,
selectfolder_param)
class AddFolderPopupForm(forms.ModelForm):
folder = forms.HiddenInput()
class Meta:
model = Folder
fields = ('name',)
class FolderAdmin(PrimitivePermissionAwareModelAdmin):
list_display = ('name',)
exclude = ('parent',)
list_per_page = 20
list_filter = ('owner',)
search_fields = ['name', 'files__name']
raw_id_fields = ('owner',)
save_as = True # see ImageAdmin
actions = ['move_to_clipboard', 'files_set_public', 'files_set_private',
'delete_files_or_folders', 'move_files_and_folders',
'copy_files_and_folders', 'resize_images', 'rename_files']
directory_listing_template = 'admin/filer/folder/directory_listing.html'
order_by_file_fields = ('_file_size', 'original_filename', 'name', 'owner',
'uploaded_at', 'modified_at')
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
parent_id = request.GET.get('parent_id', None)
if not parent_id:
parent_id = request.POST.get('parent_id', None)
if parent_id:
return AddFolderPopupForm
else:
folder_form = super(FolderAdmin, self).get_form(
request, obj=None, **kwargs)
def folder_form_clean(form_obj):
cleaned_data = form_obj.cleaned_data
folders_with_same_name = Folder.objects.filter(
parent=form_obj.instance.parent,
name=cleaned_data['name'])
if form_obj.instance.pk:
folders_with_same_name = folders_with_same_name.exclude(
pk=form_obj.instance.pk)
if folders_with_same_name.exists():
raise ValidationError('Folder with this name already exists.')
return cleaned_data
# attach clean to the default form rather than defining a new form class
folder_form.clean = folder_form_clean
return folder_form
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
r = form.save(commit=False)
parent_id = request.GET.get('parent_id', None)
if not parent_id:
parent_id = request.POST.get('parent_id', None)
if parent_id:
parent = Folder.objects.get(id=parent_id)
r.parent = parent
return r
def response_change(self, request, obj):
"""
Overrides the default to be able to forward to the directory listing
instead of the default change_list_view
"""
r = super(FolderAdmin, self).response_change(request, obj)
## Code borrowed from django ModelAdmin to determine changelist on the fly
if r['Location']:
# it was a successful save
if (r['Location'] in ['../'] or
r['Location'] == self._get_post_url(obj)):
if obj.parent:
url = reverse('admin:filer-directory_listing',
kwargs={'folder_id': obj.parent.id})
else:
url = reverse('admin:filer-directory_listing-root')
url = "%s%s%s" % (url,popup_param(request),
selectfolder_param(request,"&"))
return HttpResponseRedirect(url)
else:
# this means it probably was a save_and_continue_editing
pass
return r
def render_change_form(self, request, context, add=False, change=False,
form_url='', obj=None):
extra_context = {'show_delete': True,
'is_popup': popup_status(request),
'select_folder': selectfolder_status(request),}
context.update(extra_context)
return super(FolderAdmin, self).render_change_form(
request=request, context=context, add=False,
change=False, form_url=form_url, obj=obj)
def delete_view(self, request, object_id, extra_context=None):
"""
Overrides the default to enable redirecting to the directory view after
deletion of a folder.
we need to fetch the object and find out who the parent is
before super, because super will delete the object and make it
impossible to find out the parent folder to redirect to.
"""
parent_folder = None
try:
obj = self.get_queryset(request).get(pk=unquote(object_id))
parent_folder = obj.parent
except self.model.DoesNotExist:
obj = None
r = super(FolderAdmin, self).delete_view(
request=request, object_id=object_id,
extra_context=extra_context)
url = r.get("Location", None)
if url in ["../../../../", "../../"] or url == self._get_post_url(obj):
if parent_folder:
url = reverse('admin:filer-directory_listing',
kwargs={'folder_id': parent_folder.id})
else:
url = reverse('admin:filer-directory_listing-root')
url = "%s%s%s" % (url, popup_param(request),
selectfolder_param(request,"&"))
return HttpResponseRedirect(url)
return r
def icon_img(self, xs):
return mark_safe(('<img src="%simg/icons/plainfolder_32x32.png" ' + \
'alt="Folder Icon" />') % FILER_STATICMEDIA_PREFIX)
icon_img.allow_tags = True
def get_urls(self):
from django.conf.urls import patterns, url
urls = super(FolderAdmin, self).get_urls()
from filer i
|
piqueserver/piqueserver
|
piqueserver/scheduler.py
|
Python
|
gpl-3.0
| 1,666
| 0
|
# Copyright (c) Mathias Kaerlev 2012.
# This file is part of pyspades.
# pyspades is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# pyspades is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License
|
for more deta
|
ils.
# You should have received a copy of the GNU General Public License
# along with pyspades. If not, see <http://www.gnu.org/licenses/>.
from weakref import WeakSet
from twisted.internet import reactor
from twisted.internet.task import LoopingCall
class Scheduler:
def __init__(self, protocol):
self.protocol = protocol
self.calls = WeakSet()
self.loops = WeakSet()
def call_later(self, *arg, **kw):
call = reactor.callLater(*arg, **kw)
self.calls.add(call)
return call
def call_end(self, *arg, **kw):
call = self.protocol.call_end(*arg, **kw)
self.calls.add(call)
return call
def loop_call(self, delay, func, *arg, **kw):
loop = LoopingCall(func, *arg, **kw)
loop.start(delay, False)
self.loops.add(loop)
return loop
def reset(self):
for call in self.calls:
if call.active():
call.cancel()
for loop in self.loops:
if loop.running:
loop.stop()
self.calls = WeakSet()
self.loops = WeakSet()
|
anksp21/Community-Zenpacks
|
ZenPacks.community.DistributedCollectors/setup.py
|
Python
|
gpl-2.0
| 2,617
| 0.010317
|
################################
# These variables are overwritten by Zenoss when the ZenPack is exported
# or saved. Do not modify them directly here.
# NB: PACKAGES is deprecated
NAME = 'ZenPacks.community.DistributedCollectors'
VERSION = '1.7'
AUTHOR = 'Egor Puzanov'
LICENSE = ''
NAMESPACE_PACKAGES = ['ZenPacks', 'ZenPacks.community']
PACKAGES = ['ZenPacks', 'ZenPacks.community', 'ZenPacks.community.DistributedCollectors']
INSTALL_REQUIRES = []
COMPAT_ZENOSS_VERS = '>=2.5'
PREV_ZENPACK_NAME = ''
# STOP_REPLACEMENTS
################################
# Zenoss will not overwrite any changes you make below here.
from setuptools import setup, find_packages
setup(
# This ZenPack metadata should usually be edited with the Zenoss
# ZenPack edit page. Whenever the edit page is submitted it will
# overwrite the values below (the ones it knows about) with new values.
name = NAME,
version = VERSION,
author = AUTHOR,
license = LICENSE,
# This is the version spec which indicates what version
|
s of Zenoss
# this ZenPack is c
|
ompatible with
compatZenossVers = COMPAT_ZENOSS_VERS,
# previousZenPackName is a facility for telling Zenoss that the name
# of this ZenPack has changed. If no ZenPack with the current name is
# installed then a zenpack of this name if installed will be upgraded.
prevZenPackName = PREV_ZENPACK_NAME,
# Indicate to setuptools which namespace packages the zenpack
# participates in
namespace_packages = NAMESPACE_PACKAGES,
# Tell setuptools what packages this zenpack provides.
packages = find_packages(),
# Tell setuptools to figure out for itself which files to include
# in the binary egg when it is built.
include_package_data = True,
# The MANIFEST.in file is the recommended way of including additional files
# in your ZenPack. package_data is another.
#package_data = {}
# Indicate dependencies on other python modules or ZenPacks. This line
# is modified by zenoss when the ZenPack edit page is submitted. Zenoss
# tries to put add/delete the names it manages at the beginning of this
# list, so any manual additions should be added to the end. Things will
# go poorly if this line is broken into multiple lines or modified to
# dramatically.
install_requires = INSTALL_REQUIRES,
# Every ZenPack egg must define exactly one zenoss.zenpacks entry point
# of this form.
entry_points = {
'zenoss.zenpacks': '%s = %s' % (NAME, NAME),
},
# All ZenPack eggs must be installed in unzipped form.
zip_safe = False,
)
|
apache/incubator-airflow
|
tests/providers/google/cloud/operators/test_gcs_system_helper.py
|
Python
|
apache-2.0
| 2,215
| 0
|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from airflow.providers.google.cloud.example_dags.example_gcs import (
BUCKET_1,
BUCKET_2,
PATH_TO_SAVED_FILE,
PATH_TO_TRANSFORM_SCRIPT,
PATH_TO_UPLOAD_FILE,
)
from tests.test_utils.logging_command_executor import CommandExecutor
class GcsSystemTestHelper(CommandExecutor):
@staticmethod
def create_test_file():
# Create test file for upload
with open(PATH_TO_UPLOAD_FILE, "w+") as file:
file.writelines(["This is a test file"])
# Create script for transform operator
with open(PATH_TO_TRANSFORM_SCRIPT, "w+") as file:
file.write(
"""import sys
source = sys.argv[1]
destination = sys.argv[2]
print('running script')
with open(source, "r") as src, open(destination, "w+") as dest:
lines = [l.upper() for l in src.readlines()]
print(l
|
ines)
dest.wr
|
itelines(lines)
"""
)
@staticmethod
def remove_test_files():
if os.path.exists(PATH_TO_UPLOAD_FILE):
os.remove(PATH_TO_UPLOAD_FILE)
if os.path.exists(PATH_TO_SAVED_FILE):
os.remove(PATH_TO_SAVED_FILE)
if os.path.exists(PATH_TO_TRANSFORM_SCRIPT):
os.remove(PATH_TO_TRANSFORM_SCRIPT)
def remove_bucket(self):
self.execute_cmd(["gsutil", "rm", "-r", f"gs://{BUCKET_1}"])
self.execute_cmd(["gsutil", "rm", "-r", f"gs://{BUCKET_2}"])
|
nlsynth/iroha
|
py/iroha/__init__.py
|
Python
|
bsd-3-clause
| 73
| 0
|
'''
|
IROHA Python package.
'''
__all__ = ["iroha", "design_tool", "
|
axi"]
|
tipabu/swift
|
test/unit/common/test_memcached.py
|
Python
|
apache-2.0
| 37,888
| 0
|
# -*- coding:utf-8 -*-
# Copyright (c) 20
|
10-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "A
|
S IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for swift.common.utils"""
from collections import defaultdict
import errno
from hashlib import md5
import io
import logging
import six
import socket
import time
import unittest
from uuid import uuid4
import os
import mock
from eventlet import GreenPool, sleep, Queue
from eventlet.pools import Pool
from swift.common import memcached
from mock import patch, MagicMock
from test.unit import debug_logger
class MockedMemcachePool(memcached.MemcacheConnPool):
def __init__(self, mocks):
Pool.__init__(self, max_size=2)
self.mocks = mocks
# setting this for the eventlet workaround in the MemcacheConnPool
self._parent_class_getter = super(memcached.MemcacheConnPool, self).get
def create(self):
return self.mocks.pop(0)
class ExplodingMockMemcached(object):
should_explode = True
exploded = False
def sendall(self, string):
if self.should_explode:
self.exploded = True
raise socket.error(errno.EPIPE, os.strerror(errno.EPIPE))
def readline(self):
if self.should_explode:
self.exploded = True
raise socket.error(errno.EPIPE, os.strerror(errno.EPIPE))
def read(self, size):
if self.should_explode:
self.exploded = True
raise socket.error(errno.EPIPE, os.strerror(errno.EPIPE))
def close(self):
pass
class MockMemcached(object):
# See https://github.com/memcached/memcached/blob/master/doc/protocol.txt
# In particular, the "Storage commands" section may be interesting.
def __init__(self):
self.inbuf = b''
self.outbuf = b''
self.cache = {}
self.down = False
self.exc_on_delete = False
self.read_return_none = False
self.read_return_empty_str = False
self.close_called = False
def sendall(self, string):
if self.down:
raise Exception('mock is down')
self.inbuf += string
while b'\n' in self.inbuf:
cmd, self.inbuf = self.inbuf.split(b'\n', 1)
parts = cmd.split()
cmd_name = parts[0].decode('ascii').lower()
handler = getattr(self, 'handle_%s' % cmd_name, None)
if handler:
handler(*parts[1:])
else:
raise ValueError('Unhandled command: %s' % parts[0])
def handle_set(self, key, flags, exptime, num_bytes, noreply=b''):
self.cache[key] = flags, exptime, self.inbuf[:int(num_bytes)]
self.inbuf = self.inbuf[int(num_bytes) + 2:]
if noreply != b'noreply':
self.outbuf += b'STORED\r\n'
def handle_add(self, key, flags, exptime, num_bytes, noreply=b''):
value = self.inbuf[:int(num_bytes)]
self.inbuf = self.inbuf[int(num_bytes) + 2:]
if key in self.cache:
if noreply != b'noreply':
self.outbuf += b'NOT_STORED\r\n'
else:
self.cache[key] = flags, exptime, value
if noreply != b'noreply':
self.outbuf += b'STORED\r\n'
def handle_delete(self, key, noreply=b''):
if self.exc_on_delete:
raise Exception('mock is has exc_on_delete set')
if key in self.cache:
del self.cache[key]
if noreply != b'noreply':
self.outbuf += b'DELETED\r\n'
elif noreply != b'noreply':
self.outbuf += b'NOT_FOUND\r\n'
def handle_get(self, *keys):
for key in keys:
if key in self.cache:
val = self.cache[key]
self.outbuf += b' '.join([
b'VALUE',
key,
val[0],
str(len(val[2])).encode('ascii')
]) + b'\r\n'
self.outbuf += val[2] + b'\r\n'
self.outbuf += b'END\r\n'
def handle_incr(self, key, value, noreply=b''):
if key in self.cache:
current = self.cache[key][2]
new_val = str(int(current) + int(value)).encode('ascii')
self.cache[key] = self.cache[key][:2] + (new_val, )
self.outbuf += new_val + b'\r\n'
else:
self.outbuf += b'NOT_FOUND\r\n'
def handle_decr(self, key, value, noreply=b''):
if key in self.cache:
current = self.cache[key][2]
new_val = str(int(current) - int(value)).encode('ascii')
if new_val[:1] == b'-': # ie, val is negative
new_val = b'0'
self.cache[key] = self.cache[key][:2] + (new_val, )
self.outbuf += new_val + b'\r\n'
else:
self.outbuf += b'NOT_FOUND\r\n'
def readline(self):
if self.read_return_empty_str:
return b''
if self.read_return_none:
return None
if self.down:
raise Exception('mock is down')
if b'\n' in self.outbuf:
response, self.outbuf = self.outbuf.split(b'\n', 1)
return response + b'\n'
def read(self, size):
if self.down:
raise Exception('mock is down')
if len(self.outbuf) >= size:
response = self.outbuf[:size]
self.outbuf = self.outbuf[size:]
return response
def close(self):
self.close_called = True
pass
class TestMemcached(unittest.TestCase):
"""Tests for swift.common.memcached"""
def setUp(self):
self.logger = debug_logger()
def test_logger_kwarg(self):
server_socket = '%s:%s' % ('[::1]', 11211)
client = memcached.MemcacheRing([server_socket])
self.assertIs(client.logger, logging.getLogger())
client = memcached.MemcacheRing([server_socket], logger=self.logger)
self.assertIs(client.logger, self.logger)
def test_get_conns(self):
sock1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock1.bind(('127.0.0.1', 0))
sock1.listen(1)
sock1ipport = '%s:%s' % sock1.getsockname()
sock2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock2.bind(('127.0.0.1', 0))
sock2.listen(1)
orig_port = memcached.DEFAULT_MEMCACHED_PORT
try:
sock2ip, memcached.DEFAULT_MEMCACHED_PORT = sock2.getsockname()
sock2ipport = '%s:%s' % (sock2ip, memcached.DEFAULT_MEMCACHED_PORT)
# We're deliberately using sock2ip (no port) here to test that the
# default port is used.
memcache_client = memcached.MemcacheRing([sock1ipport, sock2ip],
logger=self.logger)
one = two = True
while one or two: # Run until we match hosts one and two
key = uuid4().hex.encode('ascii')
for conn in memcache_client._get_conns(key):
if 'b' not in getattr(conn[1], 'mode', ''):
self.assertIsInstance(conn[1], (
io.RawIOBase, io.BufferedIOBase))
peeripport = '%s:%s' % conn[2].getpeername()
self.assertTrue(peeripport in (sock1ipport, sock2ipport))
if peeripport == sock1ipport:
one = False
if peeripport == sock2ipport:
two = False
self.assertEqual(len(memcache_client._errors[sock1ipport]), 0)
self.assertEqual(len(memcache_client._errors[sock2ip]), 0)
finally:
memcached.DEFAULT_MEMCACHED_PORT = orig_port
def t
|
tinloaf/home-assistant
|
homeassistant/components/media_player/yamaha.py
|
Python
|
apache-2.0
| 13,441
| 0
|
"""
Support for Yamaha Receivers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.yamaha/
"""
import logging
import requests
import voluptuous as vol
from homeassistant.components.media_player import (
DOMAIN, MEDIA_PLAYER_SCHEMA, MEDIA_TYPE_MUSIC, PLATFORM_SCHEMA,
SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY, SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK, SUPPORT_SELECT_SOURCE, SUPPORT_STOP,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
SUPPORT_SELECT_SOUND_MODE, MediaPlayerDevice)
from homeassistant.const import (
ATTR_ENTITY_ID, CONF_HOST, CONF_NAME, STATE_IDLE, STATE_OFF, STATE_ON,
STATE_PLAYING)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['rxv==0.5.1']
_LOGGER = logging.getLogger(__name__)
ATTR_ENABLED = 'enabled'
ATTR_PORT = 'port'
CONF_SOURCE_IGNORE = 'source_ignore'
CONF_SOURCE_NAMES = 'source_names'
CONF_ZONE_IGNORE = 'zone_ignore'
CONF_ZONE_NAMES = 'zone_names'
DATA_YAMAHA = 'yamaha_known_receivers'
DEFAULT_NAME = "Yamaha Receiver"
ENABLE_OUTPUT_SCHEMA = MEDIA_PLAYER_SCHEMA.extend({
vol.Required(ATTR_ENABLED): cv.boolean,
vol.Required(ATTR_PORT): cv.string,
})
SERVICE_ENABLE_OUTPUT = 'yamaha_enable_output'
SUPPORT_YAMAHA = SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE | SUPPORT_PLAY \
| SUPPORT_SELECT_SOUND_MODE
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_SOURCE_IGNORE, default=[]):
vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_ZONE_IGNORE, default=[]):
vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_SOURCE_NAMES, default={}): {cv.string: cv.string},
vol.Optional(CONF_ZONE_NAMES, default={}): {cv.string: cv.string},
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Yamaha platform."""
import rxv
# Keep track of configured receivers so that we don't end up
# discovering a receiver dynamically that we have static config
# for. Map each device from its zone_id to an instance since
# YamahaDevice is not hashable (thus not possible to add to a set).
if hass.data.get(DATA_YAMAHA) is None:
hass.data[DATA_YAMAHA] = {}
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
source_ignore = config.get(CONF_SOURCE_IGNORE)
source_names = config.get(CONF_SOURCE_NAMES)
zone_ignore = config.get(CONF_ZONE_IGNORE)
zone_names = config.get(CONF_ZONE_NAMES)
if discovery_info is not None:
name = discovery_info.get('name')
model = discovery_info.get('model_name')
ctrl_url = discovery_info.get('control_url')
desc_url = discovery_info.get('description_url')
receivers = rxv.RXV(
ctrl_url, model_name=model, friendly_name=name,
unit_desc_url=desc_url).zone_controllers()
_LOGGER.debug("Receivers: %s", receivers)
# when we are dynamically discovered config is empty
zone_ignore = []
elif host is None:
receivers = []
for recv in rxv.find():
receivers.extend(recv.zone_controllers())
else:
ctrl_url = "http://{}:80/YamahaRemoteControl/ctrl".format(host)
receivers = rxv.RXV(ctrl_url, name).zone_controllers()
devices = []
for receiver in receivers:
if receiver.zone in zone_ignore:
continue
device = YamahaDevice(
name, receiver, source_ignore, source_names, zone_names)
# Only add device if it's not already added
if device.zone_id not in hass.data[DATA_YAMAHA]:
hass.data[DATA_YAMAHA][device.zone_id] = device
devices.append(device)
else:
_LOGGER.debug("Ignoring duplicate receiver: %s", name)
def service_handler(service):
"""Handle for services."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
devices = [device for device in hass.data[DATA_YAMAHA].values()
if not entity_ids or device.entity_id in entity_ids]
for device in devices:
port = service.data[ATTR_PORT]
enabled = service.data[ATTR_ENABLED]
device.enable_output(port, enabled)
device.schedule_update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_ENABLE_OUTPUT, service_handler,
schema=ENABLE_OUTPUT_SCHEMA)
add_entities(devices)
class YamahaDevice(MediaPlayerDevice):
"""Representation of a Yamaha device."""
def __init__(
self, name, receiver, source_ignore, source_names, zone_names):
"""Initialize the Yamaha Receiver."""
self.receiver = receiver
self._muted = False
self._volume = 0
self._pwstate = STATE_OFF
self._current_source = None
self._sound_mode = None
self._sound_mode_list = None
self._source_list = None
self._source_ignore = source_ignore or []
self._source_names = source_names or {}
self._zone_names = zone_names or {}
self._reverse_mapping = None
self._playback_support = None
self._is_playback_supported = False
self._play_status = None
self._name = name
self._zone = receiver.zone
def update(self):
"""Get the latest details from the device."""
try:
self._play_status = self.receiver.play_status()
except requests.exceptions.ConnectionError:
_LOGGER.info("Receiver is offline: %s", self._name)
return
if self.receiver.on:
if self._play_status is None:
self._pwstate = STATE_ON
elif self._play_status.playing:
self._pwstate = STATE_PLAYING
else:
self._pwstate = STATE_IDLE
else:
self._pwstate = STATE_OFF
self._muted = self.receiver.mute
self._volume = (self.receiver.volume / 100) + 1
if self.source_list is None:
self.build_source_list()
current_source = self.receiver.input
self._current_source = self._source_names.get(
current_source, current_source)
self._playback_support = self.receiver.get_playback_support()
self._is_playback_supported = self.receiver.is_playback_
|
supported(
self._current_source)
surround_programs = self.receiver.surround_programs()
if surround_programs:
self._sound_mode = self.receiver.surround_program
self._sound_mode_list = surround_programs
else:
self._sound_mode = None
self._sound_mode_list = None
def build_source_list(self):
"""Build the source list."""
self._reverse_mapping = {alias: source for source, alias
|
in
self._source_names.items()}
self._source_list = sorted(
self._source_names.get(source, source) for source in
self.receiver.inputs()
if source not in self._source_ignore)
@property
def name(self):
"""Return the name of the device."""
name = self._name
zone_name = self._zone_names.get(self._zone, self._zone)
if zone_name != "Main_Zone":
# Zone will be one of Main_Zone, Zone_2, Zone_3
name += " " + zone_name.replace('_', ' ')
return name
@property
def state(self):
"""Return the state of the device."""
return self._pwstate
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def source(self):
"""Return the current input source."""
return self._current_source
@property
def sound_mode(self):
"""Return the current sound mode."""
re
|
oxc/Flexget
|
flexget/plugins/cli/t411.py
|
Python
|
mit
| 4,537
| 0.004408
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
from flexget import options, plugin
from flexget.event import event
from flexget.terminal import console
from flexget.manager import Session
try:
from flexget.plugins.internal.api_t411 import (T411Proxy)
except:
raise plugin.DependencyError(issued_by='cli_series', missing='api_t411',
message='Torrent411 commandline interface not loaded')
def do_cli(manager, options):
"""
Dispach cli action
:param manager:
:param options:
:return:
"""
if options.t411_action == 'list-cats':
print_categories(parent_category_name=options.category)
elif options.t411_action == 'add-auth':
add_credential(username=options.username, password=options.password)
elif options.t411_action == 'list-auth':
pass
elif options.t411_action == 'list-terms':
print_terms(category_name=options.category, term_type_name=options.type)
def add_credential(username, password):
"""
Add (or update) credential into database
:param username:
:param password:
:return:
"""
proxy = T411Proxy()
is_new = proxy.add_credential(username=username, password=password)
if is_new:
console('Credential successfully added')
else:
console('Credential successfully updated')
def print_terms(category_name=None, term_type_name=None):
proxy = T411Proxy()
proxy.set_credential()
formatting_main = '%-60s %
|
-5s %-5s'
formatting_sub = ' %-55s %-5s %-5s'
console(formatting_main % ('Name', 'PID', 'ID'))
if term_type_name:
console("Not yet implemented !")
else:
with Session() as session:
categories = proxy.find_categories(category_name=category_name, is_sub_category=True, session=session)
for category in categories:
console(formatting_main % (category.name, ca
|
tegory.parent_id, category.id))
for term_type in category.term_types:
console(formatting_main % (term_type.name, '', term_type.id))
for term in term_type.terms:
console(formatting_sub % (term.name, term_type.id, term.id))
def print_categories(parent_category_name=None):
"""
Print category and its sub-categories
:param parent_category_name: if None, all categories will be displayed
:return:
"""
proxy = T411Proxy()
proxy.set_credential()
with Session() as session:
if parent_category_name is None:
categories = proxy.main_categories(session=session)
else:
categories = proxy.find_categories(parent_category_name, session=session)
formatting_main = '%-30s %-5s %-5s'
formatting_sub = ' %-25s %-5s %-5s'
console(formatting_main % ('Category name', 'PID', 'ID'))
for category in categories:
console(formatting_main % (category.name, category.parent_id, category.id))
for sub_category in category.sub_categories:
console(formatting_sub % (sub_category.name, sub_category.parent_id, sub_category.id))
@event('options.register')
def register_parser_arguments():
# Register the command
parser = options.register_command('t411', do_cli, help='view and manipulate the Torrent411 plugin database')
# Set up our subparsers
action_parsers = parser.add_subparsers(title='actions', metavar='<action>', dest='t411_action')
auth_parser = action_parsers.add_parser('add-auth', help='authorize Flexget to access your Torrent411 account')
auth_parser.add_argument('username', metavar='<username>', help='Your t411 username')
auth_parser.add_argument('password', metavar='<password>', help='Your t411 password')
list_categories_parser = action_parsers.add_parser('list-cats', help='list available categories on Torrent411')
list_categories_parser.add_argument('category',
nargs='?',
metavar='<category>',
help='limit list to all, main or sub categories (default: %(default)s)')
list_terms = action_parsers.add_parser('list-terms', help='list available terms usable on Torrent411')
list_terms.add_argument('--category', help='show terms only for this category')
list_terms.add_argument('--type', help='show terms only for this term type')
|
ric2b/Vivaldi-browser
|
chromium/ios/build/bots/scripts/xcode_util_test.py
|
Python
|
bsd-3-clause
| 20,961
| 0.004389
|
#!/usr/bin/env vpython
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for xcode_util.py."""
import logging
import mock
import os
import unittest
import test_runner_errors
import test_runner_test
import xcode_util
_XCODEBUILD_VERSION_OUTPUT_12 = b"""Xcode 12.4
Build version 12D4e
"""
_XCODEBUILD_VERSION_OUTPUT_13 = b"""Xcode 13.0
Build version 13A5155e
"""
class XcodeUtilTest(test_runner_test.TestCase):
"""Test class for xcode_util functions."""
def setUp(self):
super(XcodeUtilTest, self).setUp()
@mock.patch(
'subprocess.check_output', return_value=_XCODEBUILD_VERSION_OUTPUT_13)
def test_version(self, _):
"""Tests xcode_util.version()"""
version, build_version = xcode_util.version()
self.assertEqual(version, '13.0')
self.assertEqual(build_version, '13a5155e')
@mock.patch(
'subprocess.check_output', return_value=_XCODEBUILD_VERSION_OUTPUT_12)
def test_using_xcode_12(self, _):
"""Tests xcode_util.using_xcode_11_or_higher"""
self.assertTrue(xcode_util.using_xcode_11_or_higher())
self.assertFalse(xcode_util.using_xcode_13_or_higher())
@mock.patch(
'subprocess.check_output', return_value=_XCODEBUILD_VERSION_OUTPUT_13)
def test_using_xcode_13(self, _):
"""Tests xcode_util.using_xcode_13_or_higher"""
self.assertTrue(xcode_util.using_xcode_11_or_higher())
self.assertTrue(xcode_util.using_xcode_13_or_higher())
class InstallTest(XcodeUtilTest):
"""Test class for xcode_util.install function."""
def setUp(self):
super(InstallTest, self).setUp()
self.mac_toolchain = 'mac_toolchain'
self.xcode_build_version = 'TestXcodeVersion'
self.xcode_app_path = 'test/path/Xcode.app'
self.runtime_cache_folder = 'test/path/Runtime'
self.ios_version = '14.4'
@mock.patch('xcode_util.move_runtime', autospec=True)
@mock.patch('xcode_util._install_runtime', autospec=True)
@mock.patch('xcode_util._install_xcode', autospec=True)
def test_legacy_mactoolchain_new_xcode(self, mock_install_xcode,
mock_install_runtime,
mock_move_runtime):
self.mock(xcode_util, '_using_new_mac_toolchain', lambda cmd: False)
self.mock(xcode_util, '_is_legacy_xcode_package', lambda path: False)
with self.assertRaises(test_runner_errors.XcodeMacToolchainMismatchError):
is_legacy_xcode = xcode_util.install(self.mac_toolchain,
self.xcode_build_version,
self.xcode_app_path)
self.assertTrue(is_legacy_xcode, 'install should return true')
mock_install_xcode.assert_called_with('mac_toolchain', 'TestXcodeVersion',
'test/path/Xcode.app', False)
self.assertFalse(mock_install_runtime.called,
'_install_runtime shouldn\'t be called')
self.assertFalse(mock_move_runtime.called,
'move_runtime shouldn\'t be called')
@mock.patch('xcode_util.move_runtime', autospec=True)
@mock.patch('xcode_util._install_runtime', autospec=True)
@mock.patch('xcode_util._install_xcode', autospec=True)
def test_legacy_mactoolchain_legacy_xcode(self, mock_install_xcode,
mock_install_runtime,
mock_move_runtime):
self.mock(xcode_util, '_using_new_mac_toolchain', lambda cmd: False)
self.mock(xcode_util, '_is_legacy_xcode_package', lambda path: True)
is_legacy_xcode = xcode_util.install(self.mac_toolchain,
self.xcode_build_version,
self.xcode_app_path)
self.assertTrue(is_legacy_xcode, 'install_should return true')
mock_install_xcode.assert_called_with('mac_toolchain', 'TestXcodeVersion',
'test/path/Xcode.app', False)
self.assertFalse(mock_install_runtime.called,
'_install_runtime shouldn\'t be called')
self.assertFalse(mock_move_runtime.called,
'move_runtime shouldn\'t be called')
@mock.patch('xcode_uti
|
l.move_runtime', autospec=True)
@mock.patch('xcode_util._install_runtime', autospec=True)
@mock.patch('xcode_util._install_xcode', autospec=True)
def test_new_mactoolchain_legacy_xcode(self, mock_install_xcode,
mock_install_runtime,
|
mock_move_runtime):
self.mock(xcode_util, '_using_new_mac_toolchain', lambda cmd: True)
self.mock(xcode_util, '_is_legacy_xcode_package', lambda path: True)
is_legacy_xcode = xcode_util.install(self.mac_toolchain,
self.xcode_build_version,
self.xcode_app_path)
self.assertTrue(is_legacy_xcode, 'install should return true')
mock_install_xcode.assert_called_with('mac_toolchain', 'TestXcodeVersion',
'test/path/Xcode.app', True)
self.assertFalse(mock_install_runtime.called,
'_install_runtime shouldn\'t be called')
self.assertFalse(mock_move_runtime.called,
'move_runtime shouldn\'t be called')
@mock.patch('xcode_util.move_runtime', autospec=True)
@mock.patch('xcode_util._install_runtime')
@mock.patch('xcode_util._install_xcode')
def test_new_mactoolchain_new_xcode(self, mock_install_xcode,
mock_install_runtime, mock_move_runtime):
self.mock(xcode_util, '_using_new_mac_toolchain', lambda cmd: True)
self.mock(xcode_util, '_is_legacy_xcode_package', lambda path: False)
is_legacy_xcode = xcode_util.install(
self.mac_toolchain,
self.xcode_build_version,
self.xcode_app_path,
runtime_cache_folder=self.runtime_cache_folder,
ios_version=self.ios_version)
self.assertFalse(is_legacy_xcode, 'install should return False')
mock_install_xcode.assert_called_with('mac_toolchain', 'TestXcodeVersion',
'test/path/Xcode.app', True)
mock_install_runtime.assert_called_with('mac_toolchain',
'test/path/Runtime',
'TestXcodeVersion', '14.4')
mock_move_runtime.assert_called_with('test/path/Runtime',
'test/path/Xcode.app', True)
@mock.patch('xcode_util.move_runtime', autospec=True)
@mock.patch('xcode_util._install_runtime')
@mock.patch('xcode_util._install_xcode')
def test_new_mactoolchain_new_xcode_no_runtime(self, mock_install_xcode,
mock_install_runtime,
mock_move_runtime):
self.mock(xcode_util, '_using_new_mac_toolchain', lambda cmd: True)
self.mock(xcode_util, '_is_legacy_xcode_package', lambda path: False)
is_legacy_xcode = xcode_util.install(
self.mac_toolchain,
self.xcode_build_version,
self.xcode_app_path,
runtime_cache_folder=None,
ios_version=None)
self.assertFalse(is_legacy_xcode, 'install should return False')
mock_install_xcode.assert_called_with('mac_toolchain', 'TestXcodeVersion',
'test/path/Xcode.app', True)
self.assertFalse(mock_install_runtime.called)
self.assertFalse(mock_move_runtime.called)
class HelperFunctionTests(XcodeUtilTest):
"""Test class for xcode_util misc util functions."""
def setUp(self):
super(HelperFunctionTests, self).setUp()
self.xcode_runtime_dir_rel_path = (
'Contents/Developer/'
'Platforms/iPhoneOS.platform/Library/Developer/'
'CoreSimulator/Profiles/Runtimes')
self.xcode_runtime_rel_path = (
'Contents/Developer/'
'Platforms/iPhoneOS.platform/Library/Developer/'
'CoreSimulator/Profiles/Runtimes/iOS.simruntime')
@mock.patch('su
|
jeremiahyan/odoo
|
addons/account_test/__manifest__.py
|
Python
|
gpl-3.0
| 1,166
| 0.005146
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2011 CCI Connect asbl (http://www.cciconnect.be) All Rights Reserved.
# Philmer <philmer@cciconnect.be>
{
'name': 'Accounting Consistency Tests',
'version': '1.0',
'category': 'Accounting/Accounting',
'description': """
Asserts on accounting.
======================
With this module you can manually check consistencies and inconsistencies of accounting module from menu Reporting/Accounting/Accounting Tests.
You can write a query in order to create Consistency Test and you will get the result of the test
in PDF format which can be accessed by Menu Reporting -> Accounting Tests, then select the test
and print the report from Print button in header area.
""",
'depends': ['account'],
'd
|
ata': [
'security/ir.model.access.csv',
'views/accounting_assert_test_views.xml',
'report/accounting_assert_test_reports.xml',
'data/accounting_assert_test_data.xml',
'report/report_account_test_templates.xml',
],
'
|
installable': True,
'license': 'LGPL-3',
}
|
letolab/airy
|
airy/core/db.py
|
Python
|
bsd-2-clause
| 116
| 0.008621
|
fr
|
om airy.core.conf import settings
from mongoengine import *
connect(getattr(settings, 'database_name', 'airy'))
| |
robsco-git/spreadsheet_server
|
example_client.py
|
Python
|
gpl-2.0
| 2,456
| 0.004072
|
# Copyright (C) 2016 Robert Scott
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import shutil, os
from client import SpreadsheetClient
if __name__ == "__main__":
"""This script shows how the differnet functions exposed by client.py can be
used."""
EXAMPLE_SPREADSHEET = "example.ods"
# Copy the example spreadsheet from the tests directory into the spreadsheets
# directory
shutil.copyfile(
os.path.join("tests", EXAMPLE_SPREADSHEET),
os.path.join("spreadsheets", EXAMPLE_SPREADSHEET)
)
SHEET_NAME = "Sheet1"
print("Waiting for the example spreadsheet to be scanned and loaded into LibreOffice.")
sc = SpreadsheetClient(EXAMPLE_SPREADSHEET)
# Get sheet names
sheet_names = sc.get_sheet_names()
print(sheet_names)
# Set a cell value
sc.set_cells(SHEET_NAME, "A1", 5)
# Retrieve a cell value.
cell_value = sc.get_cells(SHEET_NAME, "C3")
print(cell_value)
# Set a one dimensional cell range.
# Cells are set using the format: [A1, A2, A3]
cell_values = [1, 2, 3]
sc.set_cells(SHEET_NAME, "A1:A3", cell_va
|
lues)
# Retrieve one dimensional cell range.
cell_values = sc.get_cells(SHEET_NAME, "C1:C3")
print(cell_values)
# Set a two dimensional cell range.
# Cells are set using the format: [[A1, B1, C1], [A2, B2, C2], [A3, B3, C3]]
cell_values = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
sc.set_cells(SHEET_NAME, "A1:C3", cell_values)
# Retrieve a two dimensi
|
onal cell range.
cell_values = sc.get_cells(SHEET_NAME, "A1:C3")
print(cell_values)
# Save a spreadsheet - it will save into ./saved_spreadsheets
sc.save_spreadsheet(EXAMPLE_SPREADSHEET)
sc.disconnect()
os.remove(os.path.join("spreadsheets", EXAMPLE_SPREADSHEET))
|
BitPrepared/morso
|
12.04/10.04/morso/PreferencesMorsoDialog.py
|
Python
|
gpl-2.0
| 3,829
| 0.00235
|
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# This file is in the public domain
### END LICENSE
from desktopcouch.records.server import CouchDatabase
from desktopcouch.records.record import Record
import gtk
from morso.helpers import get_builder
import gettext
from gettext import gettext as _
gettext.textdomain('morso')
class PreferencesMorsoDialog(gtk.Dialog):
__gtype_name__ = "PreferencesMorsoDialog"
preferences = {}
def __new__(cls):
"""Special static method that's automatically called by Python when
constructing a new instance of this class.
Returns a fully instantiated PreferencesMorsoDialog object.
"""
builder = get_builder('PreferencesMorsoDialog')
new_object = builder.get_object("preferences_morso_dialog")
new_object.finish_initializing(builder)
return new_object
def finish_initializing(self, builder):
"""Called while initializing this instance in __new__
finish_initalizing should be called after parsing the ui definition
and creating a PreferencesMorsoDialog object with it in order to
finish initializing the start of the new PerferencesMorsoDialog
instance.
Put your initialization code in here and leave __init__ undefined.
"""
# Get a reference to the builder and set up the signals.
self.builder = builder
self.builder.connect_signals(self)
# Set up couchdb and the preference info.
self._db_name = "morso"
self._database = CouchDatabase(self._db_name, create=True)
self._preferences = None
self._key = None
# Set the record type and then initalize the preferences.
self._record_type = (
"http://wiki.ubuntu.com/Quickly/RecordTypes/Morso/"
"Preferences")
self._preferences = self.get_preferences()
# TODO: code for other initialization actions should be added here
def get_preferences(self):
"""Return a dict of preferences for morso.
Creates a couchdb record if necessary.
"
|
""
if self._preferences == None:
# The dialog is
|
initializing.
self._load_preferences()
# If there were no saved preference, this.
return self._preferences
def _load_preferences(self):
# TODO: add preferences to the self._preferences dict default
# preferences that will be overwritten if some are saved
self._preferences = {"record_type": self._record_type}
results = self._database.get_records(
record_type=self._record_type, create_view=True)
if len(results.rows) == 0:
# No preferences have ever been saved, save them before returning.
self._key = self._database.put_record(Record(self._preferences))
else:
self._preferences = results.rows[0].value
del self._preferences['_rev']
self._key = results.rows[0].value["_id"]
def _save_preferences(self):
self._database.update_fields(self._key, self._preferences)
def ok(self, widget, data=None):
"""The user has elected to save the changes.
Called before the dialog returns gtk.RESONSE_OK from run().
"""
# Make any updates to self._preferences here. e.g.
#self._preferences["preference1"] = "value2"
self._save_preferences()
def cancel(self, widget, data=None):
"""The user has elected cancel changes.
Called before the dialog returns gtk.RESPONSE_CANCEL for run()
"""
# Restore any changes to self._preferences here.
pass
if __name__ == "__main__":
dialog = PreferencesMorsoDialog()
dialog.show()
gtk.main()
|
Anatoscope/sofa
|
applications/plugins/SofaPython/doc/SofaDays_oct2013/3_OneParticle/controller.py
|
Python
|
lgpl-2.1
| 195
| 0.030769
|
import Sofa
import particle
class Tuto3(
|
Sofa.PythonScriptController):
# optionnally, script can create a graph...
def createGraph(self,node):
particle.oneParticleSample(node)
ret
|
urn 0
|
calpeyser/google-cloud-python
|
vision/tests/unit/test_client.py
|
Python
|
apache-2.0
| 26,626
| 0
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import unittest
import mock
IMAGE_CONTENT = b'/9j/4QNURXhpZgAASUkq'
IMAGE_SOURCE = 'gs://some/image.jpg'
PROJECT = 'PROJECT'
B64_IMAGE_CONTENT = base64.b64encode(IMAGE_CONTENT).decode('ascii')
def _make_credentials():
import google.auth.credentials
return mock.Mock(spec=google.auth.credentials.Credentials)
class TestClient(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.vision.client import Client
return Client
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_ctor(self):
creds = _make_credentials()
client = self._make_one(project=PROJECT, credentials=creds)
self.assertEqual(client.project, PROJECT)
def test_annotate_with_preset_api(self):
credentials = _make_credentials()
client = self._make_one(project=PROJECT, credentials=credentials)
vision_api = client._vision_api
vision_api._connection = _Connection()
annotate = mock.Mock(return_value=mock.sentinel.annotated, spec=[])
api = mock.Mock(annotate=annotate, spec=['annotate'])
client._vision_api_internal = api
client._vision_api.annotate()
annotate.assert_called_once_with()
def test_make_gax_client(self):
from google.cloud.vision._gax import _GAPICVisionAPI
credentials = _make_credentials()
client = self._make_one(
project=PROJECT, credentials=credentials, _use_grpc=None)
vision_api = client._vision_api
vision_api._connection = _Connection()
with mock.patch('google.cloud.vision.client._GAPICVisionAPI',
spec=True):
self.assertIsInstance(client._vision_api, _GAPICVisionAPI)
def test_make_http_client(self):
from google.cloud.vision._http import _HTTPVisionAPI
credentials = _make_credentials()
client = self._make_one(
project=PROJECT, credentials=credentials, _use_grpc=False)
self.assertIsInstance(client._vision_api, _HTTPVisionAPI)
def test_face_annotation(self):
from google.cloud.vision.annotations import Annotations
from google.cloud.vision.feature import Feature, FeatureTypes
from tests.unit._fixtures import FACE_DETECTION_RESPONSE
returned = FACE_DETECTION_RESPONSE
request = {
"requests": [
{
"image": {
"content": B64_IMAGE_CONTENT,
},
"features": [
{
"maxResults": 3,
"type": "FACE_DETECTION",
},
],
|
},
],
}
credentials = _make_credentials()
client = self._make_one(
project=PROJECT, credentials=credentials, _use_grpc=False)
vision_api = client._vision_api
connection = _Connection(returned)
vision_api._connection = connection
features = [
Feature(feature_type=FeatureTypes.FACE_DETECTION, max_resu
|
lts=3),
]
image = client.image(content=IMAGE_CONTENT)
images = ((image, features),)
api_response = client._vision_api.annotate(images)
self.assertEqual(len(api_response), 1)
response = api_response[0]
self.assertEqual(
request, connection._requested[0]['data'])
self.assertIsInstance(response, Annotations)
def test_image_with_client_gcs_source(self):
from google.cloud.vision.image import Image
credentials = _make_credentials()
client = self._make_one(project=PROJECT, credentials=credentials)
gcs_image = client.image(source_uri=IMAGE_SOURCE)
self.assertIsInstance(gcs_image, Image)
self.assertEqual(gcs_image.source, IMAGE_SOURCE)
def test_image_with_client_raw_content(self):
from google.cloud.vision.image import Image
credentials = _make_credentials()
client = self._make_one(
project=PROJECT, credentials=credentials, _use_grpc=False)
raw_image = client.image(content=IMAGE_CONTENT)
self.assertIsInstance(raw_image, Image)
self.assertEqual(raw_image.content, IMAGE_CONTENT)
def test_image_with_client_filename(self):
from mock import mock_open
from mock import patch
from google.cloud.vision.image import Image
credentials = _make_credentials()
client = self._make_one(
project=PROJECT, credentials=credentials, _use_grpc=False)
with patch('google.cloud.vision.image.open',
mock_open(read_data=IMAGE_CONTENT)) as m:
file_image = client.image(filename='my_image.jpg')
m.assert_called_once_with('my_image.jpg', 'rb')
self.assertIsInstance(file_image, Image)
self.assertEqual(file_image.content, IMAGE_CONTENT)
def test_multiple_detection_from_content(self):
import copy
from google.cloud.vision.feature import Feature
from google.cloud.vision.feature import FeatureTypes
from tests.unit._fixtures import LABEL_DETECTION_RESPONSE
from tests.unit._fixtures import LOGO_DETECTION_RESPONSE
returned = copy.deepcopy(LABEL_DETECTION_RESPONSE)
logos = copy.deepcopy(LOGO_DETECTION_RESPONSE['responses'][0])
returned['responses'][0]['logoAnnotations'] = logos['logoAnnotations']
credentials = _make_credentials()
client = self._make_one(project=PROJECT, credentials=credentials,
_use_grpc=False)
vision_api = client._vision_api
connection = _Connection(returned)
vision_api._connection = connection
limit = 2
label_feature = Feature(FeatureTypes.LABEL_DETECTION, limit)
logo_feature = Feature(FeatureTypes.LOGO_DETECTION, limit)
features = [label_feature, logo_feature]
image = client.image(content=IMAGE_CONTENT)
detected_items = image.detect(features)
self.assertEqual(len(detected_items), 1)
items = detected_items[0]
self.assertEqual(len(items.logos), 2)
self.assertEqual(len(items.labels), 3)
first_logo = items.logos[0]
second_logo = items.logos[1]
self.assertEqual(first_logo.description, 'Brand1')
self.assertEqual(first_logo.score, 0.63192177)
self.assertEqual(second_logo.description, 'Brand2')
self.assertEqual(second_logo.score, 0.5492993)
first_label = items.labels[0]
second_label = items.labels[1]
third_label = items.labels[2]
self.assertEqual(first_label.description, 'automobile')
self.assertEqual(first_label.score, 0.9776855)
self.assertEqual(second_label.description, 'vehicle')
self.assertEqual(second_label.score, 0.947987)
self.assertEqual(third_label.description, 'truck')
self.assertEqual(third_label.score, 0.88429511)
requested = connection._requested
requests = requested[0]['data']['requests']
image_request = requests[0]
label_request = image_request['features'][0]
logo_request = image_request['features'][1]
self.assertEqual(B64_IMAGE_CONTENT,
image_request['image']['content'])
self.assertEqual(label_request['maxResults'], 2)
self.assertEqual(label_request['type'], 'LABEL_DETECTION')
self.assertEqual(logo_request['maxResults'], 2)
|
lavizhao/shopping
|
python/create_trans_table.py
|
Python
|
gpl-2.0
| 2,625
| 0.013415
|
#coding: utf-8
import csv
from read_conf import config
import MySQLdb as mysql
conn = mysql.connect(host='localhost',user='root',passwd='111111111',port=3306)
cur = conn.cursor()
def create_db():
count = cur.execute('create database if not exists shopping;')
print "create database",count
result = cur.fetchmany(count)
print result
conn.commit()
#cid代表用户的id
def create_trans_table():
conn.select_db('shopping')
cur = conn.cursor()
count = cur.execute('create table trans (trans_id int primary key,cid varchar(40),chain varchar(40),dept varchar(40),category varchar(40),company varchar(40),brand varchar(40),date varchar(40),productsize varchar(40),productmeasure varchar(40),purchasequantity varchar(40),purchaseamount varchar(40)) ENGINE = MyISAM ')
print "create table train",count
result = cur.fetchmany(count)
print result
conn.commit()
def insert_trans(conf):
conn.select_db('shopping')
f = open(conf["reduction_trans_dir"])
reader = csv.reader(f)
a = 0
for line in reader:
row_string = '"'+str(a)+'","'+'","'.join(line)+'"'
cur.execute('insert into trans values(%s);'%(row_string))
a += 1
if a % 10000 == 0 :
conn.commit()
print a
conn.commit()
def drop_table():
conn.select_db('shopping')
cur = conn.cursor()
count = cur.execute('drop table trans')
print "drop table train",count
result = cur.fetchmany(count)
print result
conn.commit()
#建索引 : create index cindex using btree on trans(cid);
def search_table(cid):
conn.select_db('shopping')
cur = conn.cursor()
count = cur.execute('select * from trans where cid = "%s"'%(cid))
result = cur.fetchmany(count)
return result
def search_table_with_ccb(category,company,brand):
conn.select_db('shopping')
cur = conn.cursor()
count = cur.execute('select productmeasure f
|
rom trans where category = "%s" and \
company = "%s" and brand = "%s"'%(category,company,brand))
result = cur.fetchone()
return result[0]
def build_index():
conn.select_db('shopping')
cur = conn.cursor()
count = c
|
ur.execute('create index cindex using btree on trans(cid);')
result = cur.fetchmany(count)
return result
if __name__ == '__main__':
print "hello"
data_position_conf = config("../conf/data_position.conf")
drop_table()
create_db()
create_trans_table()
insert_trans(data_position_conf)
build_index()
#result = search_table('86246')
#print result[0]
|
quintusdias/glymur
|
tests/test_callbacks.py
|
Python
|
mit
| 3,032
| 0
|
"""
Test suite for openjpeg's callback functions.
"""
# Standard library imports ...
from io import StringIO
import warnings
import unittest
from unittest.mock import patch
# Local imports ...
import glymur
from . import fixtures
@unittest.skipIf(
fixtures.OPENJPEG_NOT_AVAILABLE, fixtures.OPENJPEG_NOT_AVAILABLE_MSG
)
class TestSuite(fixtures.TestCommon):
"""Test suite for callbacks."""
def test_info_callback_on_write_backwards_compatibility(self):
"""
SCENARIO: write to a J2K file while in verbose mode
EXPECTED RESULT: verify messages from the library
"""
j = glymur.Jp2k(self.jp2file)
with warnings.catch_warnings():
# Ignore a library warning.
warnings.simplefilter('ignore')
tiledata = j.read(tile=0)
with patch('sys.stdout', new=StringIO()) as fake_out:
glymur.Jp2k(self.temp_j2k_filename, data=tiledata, verbose=True)
actual = fake_out.getvalue().strip()
expected = '[INFO] tile number 1 / 1'
self.assertEqual(actual, expected)
def test_info_callback_on_write(self):
"""
SCENARIO: write to a JP2 file while in verbose mode
EXPECTED RESULT: verify messages from the library
"""
j = glymur.Jp2k(self.jp2file)
tiledata = j[:]
with patch('sys.stdout', new=StringIO()) a
|
s fake_out:
glymur.Jp2k(self.temp_jp2_filename, data=ti
|
ledata, verbose=True)
actual = fake_out.getvalue().strip()
expected = '[INFO] tile number 1 / 1'
self.assertEqual(actual, expected)
def test_info_callbacks_on_read(self):
"""
SCENARIO: the verbose attribute is set to True
EXPECTED RESULT: The info callback handler should be enabled. There
should be [INFO] output present in sys.stdout.
"""
jp2 = glymur.Jp2k(self.j2kfile)
with patch('sys.stdout', new=StringIO()) as fake_out:
jp2.verbose = True
jp2[::2, ::2]
actual = fake_out.getvalue().strip()
self.assertIn('[INFO]', actual)
@unittest.skipIf(
not fixtures.HAVE_SCIKIT_IMAGE, fixtures.HAVE_SCIKIT_IMAGE_MSG
)
def test_info_callbacks_on_writing_tiles(self):
"""
SCENARIO: the verbose attribute is set to True
EXPECTED RESULT: The info callback handler should be enabled. There
should be [INFO] output present in sys.stdout.
"""
jp2_data = fixtures.skimage.data.moon()
shape = jp2_data.shape[0] * 3, jp2_data.shape[1] * 2
tilesize = (jp2_data.shape[0], jp2_data.shape[1])
j = glymur.Jp2k(
self.temp_jp2_filename, shape=shape, tilesize=tilesize,
verbose=True
)
with patch('sys.stdout', new=StringIO()) as fake_out:
for tw in j.get_tilewriters():
tw[:] = jp2_data
actual = fake_out.getvalue().strip()
self.assertIn('[INFO] tile number', actual)
|
davidzchen/tensorflow
|
tensorflow/python/data/util/sparse_test.py
|
Python
|
apache-2.0
| 12,352
| 0.002995
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for utilities working with arbitrarily nested structures."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.util import nest
from tensorflow.python.data.util import sparse
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
class SparseTest(test.TestCase):
def testAnySparse(self):
test_cases = (
{
"classes": (),
"expected": False
},
{
"classes": (ops.Tensor),
"expected": False
},
{
"classes": (((ops.Tensor))),
"expected": False
},
{
"classes": (ops.Tensor, ops.Tensor),
"expected": False
},
{
"classes": (ops.Tensor, sparse_tensor.SparseTensor),
"expected": True
},
{
"classes": (sparse_tensor.SparseTensor, sparse_tensor.SparseTensor),
"expected":
True
},
{
"classes": (sparse_tensor.SparseTensor, ops.Tensor),
"expected": True
},
{
"classes": (((sparse_tensor.SparseTensor))),
"expected": True
},
)
for test_case in test_cases:
self.assertEqual(
sparse.any_sparse(test_case["classes"]), test_case["expected"])
def assertShapesEqual(self, a, b):
for a, b in zip(nest.flatten(a), nest.flatten(b)):
self.assertEqual(a.ndims, b.ndims)
if a.ndims is None:
continue
for c, d in zip(a.as_list(), b.as_list()):
self.assertEqual(c, d)
def testAsDenseShapes(self):
test_cases = (
{
"types": (),
"classes": (),
"expected": ()
},
{
"types": tensor_shape.TensorShape([]),
"classes": ops.Tensor,
"expected": tensor_shape.TensorShape([])
},
{
"types": tensor_shape.TensorShape([]),
"classes": sparse_tensor.SparseTensor,
"expected": tensor_shape.unknown_shape()
},
{
"types": (tensor_shape.TensorShape([])),
"classes": (ops.Tensor),
"expected": (tensor_shape.TensorShape([]))
},
{
"types": (tensor_shape.TensorShape([])),
"classes": (sparse_tensor.SparseTensor),
"expected": (tensor_shape.unknown_shape())
},
{
"types": (tensor_shape.TensorShape([]), ()),
"classes": (ops.Tensor, ()),
"expected": (tensor_shape.TensorShape([]), ())
},
{
"types": ((), tensor_shape.TensorShape([])),
"classes": ((), ops.Tensor),
"expected": ((), tensor_shape.TensorShape([]))
},
{
"types": (tensor_shape.TensorShape([]), ()),
"classes": (sparse_tensor.SparseTensor, ()),
"expected": (tensor_shape.unknown_shape(), ())
},
{
"types": ((), tensor_shape.TensorShape([])),
"classes": ((), sparse_tensor.SparseTensor),
"expected": ((), tensor_shape.unknown_shape())
},
{
"types": (tensor_shape.TensorShape([]), (),
tensor_shape.TensorShape([])),
"classes": (ops.Tensor, (), ops.Tensor),
"expected": (tensor_shape.TensorShape([]), (),
tensor_shape.TensorShape([]))
},
{
"types": (tensor_shape.TensorShape([]), (),
tensor_shape.TensorShape([])),
"classes":
(sparse_tensor.SparseTensor, (), sparse_tensor.SparseTensor),
"expected": (tensor_shape.unknown_shape(), (),
tensor_shape.unknown_shape())
},
{
"types": ((), tensor_shape.TensorShape([]), ()),
"classes": ((), ops.Tensor, ()),
"expected": ((), tensor_shape.TensorShape([]), ())
},
{
"types": ((), tensor_shape.TensorShape([]), ()),
"classes": ((), sparse_tensor.SparseTensor, ()),
"expected": ((), tensor_shape.unknown_shape(), ())
},
)
for test_case in test_cases:
self.assertShapesEqual(
sparse.as_dense_shapes(test_case["types"], test_case["classes"]),
test_case["expected"])
def testAsDenseTypes(self):
test_cases = (
{
"types": (),
"classes": (),
"expected": ()
},
{
"types": dtypes.int32,
"classes": ops.Tensor,
"expected": dtypes.int32
},
{
"types": dtypes.int32,
"classes": sparse_tensor.SparseTensor,
"expected": dtypes.variant
},
{
"types": (dtypes.int32),
"classes": (ops.Tensor),
"expected": (dtypes.int32)
},
{
"types": (dtypes.int32),
"classes": (sparse_tensor.SparseTensor),
"expected": (dtypes.variant)
},
{
"types": (dtypes.int32, ()),
"classes": (ops.Tensor, ()),
"expected": (dtypes.int32, ())
},
{
"types": ((), dtypes.int32),
"classes": ((), ops.Tensor),
"expected": ((), dtypes.int32)
},
{
"types": (dtypes.int32, ()),
"classes": (sparse_tensor.SparseTensor, ()),
"expected": (dtypes.variant, ())
},
{
"types": ((), dtypes.int32),
"classes": ((), sparse_tensor.SparseTensor),
"expected": ((), dtypes.variant)
},
{
"types": (dtypes.int32, (), dtypes.int32),
"classes": (ops.Tensor, (), ops.Tensor),
"expected": (dtypes.int32, (), dtypes.int32)
},
{
"types": (dtypes.int32, (), dtypes.int32),
"classes": (sparse_tensor.SparseTensor, (),
sparse_tensor.SparseTensor),
"expected": (dtypes.variant, (), dtypes.variant)
},
{
"types":
|
((), dtypes.int32, ()),
|
"classes": ((), ops.Tensor, ()),
"expected": ((), dtypes.int32, ())
},
{
"types": ((), dtypes.int32, ()),
"classes": ((), sparse_tensor.SparseTensor, ()),
"expected": ((), dtypes.variant, ())
},
)
for test_case in test_cases:
self.assertEqual(
sparse.as_dense_types(test_case["types"], test_case["classes"]),
test_case["expected"])
def testGetClasses(self):
s = sparse_tensor.SparseTensor(indices=[[0]], values=[1], dense_shape=[1])
d = ops.Tensor
t = sparse_tensor.SparseTensor
test_cases = (
{
"classes": (),
"expected": ()
},
{
"classes": s,
"expected": t
},
{
"classes": constant_op.constant([1]),
"expected": d
},
{
"classes": (s),
"expected": (t)
|
xiexiao/zzz
|
zzz.py
|
Python
|
mit
| 1,198
| 0.013356
|
'''
zzz
'''
import os
import mako.lookup
import tornado.options
import tornado.httpserver
import tornado.web
import tornado.template
from tornado.options import define, options
CURRENT_PATH = os.path.dirname(__file__)
define('port', default=8081, help="run on the given port", type=int)
define('debug', default=False, help="run on debug mode", type=bool)
from utils import route, route_add
import config
import handlers
import admins
def main():
'''main'''
tornado.options.parse_command_line()
routes = route.get_routes()
template_path=os.path.join(CURREN
|
T_PATH, 'templates'),
settings = dict(
template_path=template_path,
static_path=os.path.join(CURRENT_PATH, 'static'),
xsrf_cookies=True,
cookie_secret=config.cookie_secret,
debug=options.debug,
)
app = tornado.web.Application(
routes,
**settings
)
#templdate
app.lookup = mako.lookup.TemplateLookup(
directories = template_path,
input_encoding = 'utf-8',
output_encoding = 'utf-8'
)
|
app.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/histogram2d/_colorscale.py
|
Python
|
mit
| 495
| 0.00404
|
import _plotly_utils.basevalidators
class Colorscale
|
Validator(_plotly_utils.basevalidators.ColorscaleValidator):
def __init__(self, plotly_name="colorscale", parent_name="histogram2d", **kwargs):
super(ColorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=paren
|
t_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {"autocolorscale": False}),
**kwargs
)
|
ribacq/utaxc
|
items/Monster.py
|
Python
|
gpl-3.0
| 519
| 0.04817
|
#!/bin/python3
# -*-coding:utf-8 -*
import curses;
from .Creature import Creature;
from random import choice;
"""A module containing the Monster class"""
class Monster(Creature):
"""The Monster class"""
def __init__(self, env, y, x, char):
"""Constructor"""
super(Monster, self).__init__(env, y, x, char, 7, 1);
self.possible_running_actions.extend(['move']);
self.add_ru
|
nning_action('move');
def ra_move(self):
"""Move running action for the monster"""
self.move(choice(('l
|
eft', 'right')));
|
helloztq/helloztq.github.io
|
_drafts/encrypt.py
|
Python
|
mit
| 2,578
| 0.009697
|
#coding:utf-8
import os, sys, io
import shutil
import struct
_DELTA = 0x9E3779B9
def _long2str(v, w):
n = (len(v) - 1) << 2
if w:
m = v[-1]
if (m < n - 3) or (m > n): return ''
n = m
s = struct.pack('<%iL' % len(v), *v)
if w :
return s[0:n]
else :
return s
def _str2long(s, w):
n = len(s)
m = (4 - (n & 3) & 3) + n
s = s.ljust(m, "\0")
v = list(struct.unpack('<%iL' % (m >> 2), s))
if w: v.append(n)
return v
def encrypt(str, key):
if str == '': return str
v = _str2long(str, True)
k = _str2long(key.ljust(16, "\0"), False)
n = len(v) - 1
z = v[n]
y = v[0]
sum = 0
q = 6 + 52 // (n + 1)
while q > 0:
sum = (sum + _DELTA) & 0xffffffff
e = sum >> 2 & 3
for p in xrange(n):
y = v[p + 1]
v[p] = (v[p] + ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[p & 3 ^ e] ^ z))) & 0xffffffff
z = v[p]
y = v[0]
v[n] = (v[n] + ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[n & 3 ^ e] ^ z))) & 0xffffffff
z = v[n]
q -= 1
return _long2str(v, False)
def decrypt(str, key):
if str == '': return str
v = _str2long(str, False)
k
|
= _str2long(key.ljust(16, "\0"), Fa
|
lse)
n = len(v) - 1
z = v[n]
y = v[0]
q = 6 + 52 // (n + 1)
sum = (q * _DELTA) & 0xffffffff
while (sum != 0):
e = sum >> 2 & 3
for p in xrange(n, 0, -1):
z = v[p - 1]
v[p] = (v[p] - ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[p & 3 ^ e] ^ z))) & 0xffffffff
y = v[p]
z = v[n]
v[0] = (v[0] - ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[0 & 3 ^ e] ^ z))) & 0xffffffff
y = v[0]
sum = (sum - _DELTA) & 0xffffffff
return _long2str(v, True)
key = "f!3CvF@*5T72afcM"
sign= "UPVIsndj"
def encrypt_file(filepath, encryptname = ""):
f = io.open(filepath, "rb")
s = f.read()
f.close()
if s[:len(sign)] == sign:
print("[ERROR:]" + filepath)
else:
f = io.open(filepath + encryptname, "wb")
f.write(sign + encrypt(s, key))
f.close()
def decrypt_file(filepath):
f = io.open(filepath, "rb")
s = f.read()
f.close()
if s[:len(sign)] == sign:
f = io.open(filepath + ".unencrypt", "wb")
f.write(decrypt(s[len(sign):], key))
f.close()
else:
print("not encrypt file!!")
if __name__ == '__main__':
# traverse(dst_path)
print("------- over --------")
|
jay-lau/magnum
|
magnum/db/sqlalchemy/alembic/versions/e647f5931da8_add_insecure_registry_to_baymodel.py
|
Python
|
apache-2.0
| 1,007
| 0.001986
|
#
|
Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
#
|
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add insecure_registry to baymodel
Revision ID: e647f5931da8
Revises: 049f81f6f584
Create Date: 2016-03-28 09:08:07.467102
"""
# revision identifiers, used by Alembic.
revision = 'e647f5931da8'
down_revision = '049f81f6f584'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('baymodel', sa.Column('insecure_registry',
sa.String(length=255), nullable=True))
|
mhoffma/micropython
|
tests/basics/unpack1.py
|
Python
|
mit
| 1,794
| 0.036232
|
# unpack sequences
a, = 1, ; print(a)
a, b = 2, 3 ; print(a, b)
a, b, c = 1, 2, 3; print(a, b, c)
a, =
|
range(1); print(a)
a, b = range(2); print(a,
|
b)
a, b, c = range(3); print(a, b, c)
(a) = range(1); print(a)
(a,) = range(1); print(a)
(a, b) = range(2); print(a, b)
(a, b, c) = range(3); print(a, b, c)
(a, (b, c)) = [-1, range(2)]; print(a, b, c)
# lists
[] = []
[a] = range(1); print(a)
[a, b] = range(2); print(a, b)
[a, b, c] = range(3); print(a, b, c)
# with star
*a, = () ; print(a)
*a, = 4, ; print(a)
*a, = 5, 6 ; print(a)
*a, b = 7, ; print(a, b)
*a, b = 8, 9 ; print(a, b)
*a, b = 10, 11, 12 ; print(a, b)
a, *b = 13, ; print(a, b)
a, *b = 14, 15 ; print(a, b)
a, *b = 16, 17, 18 ; print(a, b)
a, *b, c = 19, 20 ; print(a, b)
a, *b, c = 21, 22, 23 ; print(a, b)
a, *b, c = 24, 25, 26, 27 ; print(a, b)
a = [28, 29]
*b, = a
print(a, b, a == b)
[*a] = [1, 2, 3]
print(a)
try:
a, *b, c = (30,)
except ValueError:
print("ValueError")
# with star and generic iterator
*a, = range(5) ; print(a)
*a, b = range(5) ; print(a, b)
*a, b, c = range(5) ; print(a, b, c)
a, *b = range(5) ; print(a, b)
a, *b, c = range(5) ; print(a, b, c)
a, *b, c, d = range(5) ; print(a, b, c, d)
a, b, *c = range(5) ; print(a, b, c)
a, b, *c, d = range(5) ; print(a, b, c, d)
a, b, *c, d, e = range(5) ; print(a, b, c, d, e)
*a, = [x * 2 for x in [1, 2, 3, 4]] ; print(a)
*a, b = [x * 2 for x in [1, 2, 3, 4]] ; print(a, b)
a, *b = [x * 2 for x in [1, 2, 3, 4]] ; print(a, b)
a, *b, c = [x * 2 for x in [1, 2, 3, 4]]; print(a, b, c)
try:
a, *b, c = range(0)
except ValueError:
print("ValueError")
try:
a, *b, c = range(1)
except ValueError:
print("ValueError")
|
alex/warehouse
|
tests/unit/accounts/test_views.py
|
Python
|
apache-2.0
| 12,295
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import uuid
import freezegun
import pretend
import pytest
from pyramid.httpexceptions import HTTPMovedPermanently, HTTPSeeOther
from warehouse.accounts import views
from warehouse.accounts.interfaces import IUserService, TooManyFailedLogins
from ...common.db.accounts import UserFactory
class TestFailedLoginView:
exc = TooManyFailedLogins(resets_in=datetime.timedelta(seconds=600))
request = pretend.stub()
resp = views.failed_logins(exc, request)
assert resp.status == "429 Too Many Failed Login Attempts"
assert resp.detail == (
"There have been too many unsuccessful login attempts. Please try "
"again later."
)
assert dict(resp.headers).get("Retry-After") == "600"
class TestUserProfile:
def test_user_redirects_username(self, db_request):
user = UserFactory.create()
if user.username.upper() != user.username:
username = user.username.upper()
else:
username = user.username.lower()
db_request.current_route_path = pretend.call_recorder(
lambda username: "/user/the-redirect/"
)
db_request.matchdict = {"username": username}
result = views.profile(user, db_request)
assert isinstance(result, HTTPMovedPermanently)
assert result.headers["Location"] == "/user/the-redirect/"
assert db_request.current_route_path.calls == [
pretend.call(username=user.username),
]
def test_returns_user(self, db_request):
user = UserFactory.create()
assert views.profile(user, db_request) == {
"user": user,
"projects": [],
}
class TestLogin:
@pytest.mark.parametrize("next_url", [None, "/foo/bar/", "/wat/"])
def test_get_returns_form(self, pyramid_request, next_url):
user_service = pretend.stub()
pyramid_request.find_service = pretend.call_recorder(
lambda iface, context: user_service
)
form_obj = pretend.stub()
form_class = pretend.call_recorder(lambda d, user_service: form_obj)
if next_url is not None:
pyramid_request.GET["next"] = next_url
result = views.login(pyramid_request, _form_class=form_class)
assert result == {
"form": form_obj,
"redirect": {"field": "next", "data": next_url},
}
assert pyramid_request.find_service.calls == [
pretend.call(IUserService, context=None),
]
assert form_class.calls == [
pretend.call(pyramid_request.POST, user_service=user_service),
]
@pytest.mark.parametrize("next_url", [None, "/foo/bar/", "/wat/"])
def test_post_invalid_returns_form(self, pyramid_request, next_url):
user_service = pretend.stub()
pyramid_request.find_service = pretend.call_recorder(
lambda iface, context: user_service
)
pyramid_request.method = "POST"
if next_url is not None:
pyramid_request.POST["next"] = next_url
form_obj = pretend.stub(validate=pretend.call_recorder(lambda: False))
form_class = pretend.call_recorder(lambda d, user_service: form_obj)
result = views.login(pyramid_request, _form_class=form_class)
assert result == {
"form": form_obj,
"redirect": {"field": "next", "data": next_url},
}
assert pyramid_request.find_service.calls == [
pretend.call(IUserService, context=None),
]
assert form_class.calls == [
pretend.call(pyramid_request.POST, user_service=user_service),
]
assert form_obj.validate.calls == [pretend.call()]
@pytest.mark.parametrize("with_user", [True, False])
def test_post_validate_redirects(self, monkeypatch, pyramid_request,
with_user):
remember = pretend.call_recorder(
lambda request, user_id: [("foo", "bar")]
)
monkeypatch.setattr(views, "remember", remember)
new_session = {}
user_id = uuid.uuid4()
user_service = pretend.stub(
find_userid=pretend.call_recorder(lambda username: user_id),
update_user=pretend.call_recorder(lambda *a, **kw: None),
)
pyramid_request.find_service = pretend.call_recorder(
lambda iface, context: user_service
)
pyramid_request.method = "POST"
pyramid_request.session = pretend.stub(
items=lambda: [("a", "b"), ("foo", "bar")],
update=new_session.update,
invalidate=pretend.call_recorder(lambda: None),
new_csrf_token=pretend.call_recorder(lambda: None),
)
pyram
|
id_request.set_property(
lambda r: str(uuid.uuid4()) if with_user else None,
name="unauthenticated_userid",
)
form_obj = pretend.stub(
validate=pretend.call_recorder(lambda: True),
username=pretend.stub(data="theuser"),
)
form_class = pretend.call_recorder(lambda d, user_service: form_obj)
now = datetime.datetime.utcnow()
with freezegun.freeze_time(now):
result = views.login(pyramid_request, _form
|
_class=form_class)
assert isinstance(result, HTTPSeeOther)
assert result.headers["Location"] == "/"
assert result.headers["foo"] == "bar"
assert form_class.calls == [
pretend.call(pyramid_request.POST, user_service=user_service),
]
assert form_obj.validate.calls == [pretend.call()]
assert user_service.find_userid.calls == [pretend.call("theuser")]
assert user_service.update_user.calls == [
pretend.call(user_id, last_login=now),
]
if with_user:
assert new_session == {}
else:
assert new_session == {"a": "b", "foo": "bar"}
assert remember.calls == [pretend.call(pyramid_request, str(user_id))]
assert pyramid_request.session.invalidate.calls == [pretend.call()]
assert pyramid_request.find_service.calls == [
pretend.call(IUserService, context=None),
pretend.call(IUserService, context=None),
]
assert pyramid_request.session.new_csrf_token.calls == [pretend.call()]
@pytest.mark.parametrize(
# The set of all possible next URLs. Since this set is infinite, we
# test only a finite set of reasonable URLs.
("expected_next_url, observed_next_url"),
[
("/security/", "/security/"),
("http://example.com", "/"),
],
)
def test_post_validate_no_redirects(self, pyramid_request,
expected_next_url, observed_next_url):
user_service = pretend.stub(
find_userid=pretend.call_recorder(lambda username: 1),
update_user=lambda *a, **k: None,
)
pyramid_request.find_service = pretend.call_recorder(
lambda iface, context: user_service
)
pyramid_request.method = "POST"
pyramid_request.POST["next"] = expected_next_url
form_obj = pretend.stub(
validate=pretend.call_recorder(lambda: True),
username=pretend.stub(data="theuser"),
)
form_class = pretend.call_recorder(lambda d, user_service: form_obj)
result = views.login(pyramid_request, _form_class=form_class)
assert isinstance(result, HTTPSeeOther)
assert result.headers["Location"] == observed_next_url
class TestLogout:
@pytest.mark.param
|
dianshen/python_day
|
s12day10/rabbitMQ_rpc_serverl.py
|
Python
|
apache-2.0
| 951
| 0.010515
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import pika
import time
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='rpc_queue')
def fib(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fib(n-1) + fib(n-2)
def on_request(ch, meth
|
od, props, body):
n = int(body)
print(" [.] fib(%s)" % n)
response = fib(n)
ch.basic_publish(exchange='',
routing_key=props.reply_to,
properties=pika.BasicProperties(correlation_id = \
|
props.correlation_id),
body=str(response))
ch.basic_ack(delivery_tag = method.delivery_tag)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(on_request, queue='rpc_queue')
print(" [x] Awaiting RPC requests")
channel.start_consuming()
|
csmtco10/SRshare
|
Reyn/AI/Generic/crypto_fitness.py
|
Python
|
gpl-2.0
| 4,583
| 0.057822
|
def fitness(string, length) :
strSum = 0.0
newAlpha = convert(string.gene)
fhCrypt = open("crypto.txt")
flippedCrypt = ""
flippedCrypt = flip(fhCrypt.readline(),newAlpha)
flippedCrypt = flippedCrypt.split()
for word in flippedCrypt :
#print "Word: %s" % word
if len(word) == 1 :
fhDict = open("1.txt")
elif len(word) == 2 :
fhDict = open("2.txt")
elif len(word) == 3 :
fhDict = open("3.txt")
elif len(word) == 4 :
fhDict = open("4.txt")
elif len(word) == 5 :
fhDict = open("5.txt")
elif len(word) == 6 :
fhDict = open("6.txt")
elif len(word) == 7 :
fhDict = open("7.txt")
elif len(word) == 8 :
fhDict = open("8.txt")
elif len(word) == 9 :
fhDict = open("9.txt")
elif len(word) == 10 :
fhDict = open("10.txt")
elif len(word) == 11 :
fhDict = open("11.txt")
elif len(word) == 12 :
fhDict = open("12.txt")
elif len(word) == 13 :
fhDict = open("13.txt")
elif len(word) == 14 :
fhDict = open("14.txt")
elif len(word) == 15 :
fhDict = open("15.txt")
else :
fhDict = open("long.txt")
for line in fhDict :
count = 0.0
highest = 0.0
if len(line)-1 == len(word) :
for n in range(0,len(word)) :
if line[n].upper() == word[n] :
count += 1
highest = count / len(word)
if highest == 1
|
:
print "Match: %s" % line
break
strSum += highest
fhDict.close()
string.result = strSum / len(flippedCrypt)
return string.result
def flip (string, alpha) :
temp = []
for i in range(0,len(string)) :
if string[i] == "a" or string[i] == "A" :
temp.append(alpha[0])
elif string[i] == "b" or string[i] == "B" :
temp.append(alpha[1])
elif string[i]
|
== "c" or string[i] == "C" :
temp.append(alpha[2])
elif string[i] == "d" or string[i] == "D" :
temp.append(alpha[3])
elif string[i] == "e" or string[i] == "E" :
temp.append(alpha[4])
elif string[i] == "f" or string[i] == "F" :
temp.append(alpha[5])
elif string[i] == "g" or string[i] == "G" :
temp.append(alpha[6])
elif string[i] == "h" or string[i] == "H" :
temp.append(alpha[7])
elif string[i] == "i" or string[i] == "I" :
temp.append(alpha[8])
elif string[i] == "j" or string[i] == "J" :
temp.append(alpha[9])
elif string[i] == "k" or string[i] == "K" :
temp.append(alpha[10])
elif string[i] == "l" or string[i] == "L" :
temp.append(alpha[11])
elif string[i] == "m" or string[i] == "M" :
temp.append(alpha[12])
elif string[i] == "n" or string[i] == "N" :
temp.append(alpha[13])
elif string[i] == "o" or string[i] == "O" :
temp.append(alpha[14])
elif string[i] == "p" or string[i] == "P" :
temp.append(alpha[15])
elif string[i] == "q" or string[i] == "Q" :
temp.append(alpha[16])
elif string[i] == "r" or string[i] == "R" :
temp.append(alpha[17])
elif string[i] == "s" or string[i] == "S" :
temp.append(alpha[18])
elif string[i] == "t" or string[i] == "T" :
temp.append(alpha[19])
elif string[i] == "u" or string[i] == "U" :
temp.append(alpha[20])
elif string[i] == "v" or string[i] == "V" :
temp.append(alpha[21])
elif string[i] == "w" or string[i] == "W" :
temp.append(alpha[22])
elif string[i] == "x" or string[i] == "X" :
temp.append(alpha[23])
elif string[i] == "y" or string[i] == "Y" :
temp.append(alpha[24])
elif string[i] == "z" or string[i] == "Z" :
temp.append(alpha[25])
else :
temp.append(string[i])
return ''.join(temp)
def convert (string) :
newAlpha = []
for i in string :
if i == 1 :
newAlpha.append("A")
elif i == 2 :
newAlpha.append("B")
elif i == 3 :
newAlpha.append("C")
elif i == 4 :
newAlpha.append("D")
elif i == 5 :
newAlpha.append("E")
elif i == 6 :
newAlpha.append("F")
elif i == 7 :
newAlpha.append("G")
elif i == 8 :
newAlpha.append("H")
elif i == 9 :
newAlpha.append("I")
elif i == 10 :
newAlpha.append("J")
elif i == 11 :
newAlpha.append("K")
elif i == 12 :
newAlpha.append("L")
elif i == 13 :
newAlpha.append("M")
elif i == 14 :
newAlpha.append("N")
elif i == 15 :
newAlpha.append("O")
elif i == 16 :
newAlpha.append("P")
elif i == 17 :
newAlpha.append("Q")
elif i == 18 :
newAlpha.append("R")
elif i == 19 :
newAlpha.append("S")
elif i == 20 :
newAlpha.append("T")
elif i == 21 :
newAlpha.append("U")
elif i == 22 :
newAlpha.append("V")
elif i == 23 :
newAlpha.append("W")
elif i == 24 :
newAlpha.append("X")
elif i == 25 :
newAlpha.append("Y")
elif i == 26 :
newAlpha.append("Z")
return newAlpha
|
django-danceschool/django-danceschool
|
danceschool/vouchers/migrations/0004_customergroupvoucher.py
|
Python
|
bsd-3-clause
| 1,085
| 0.002765
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-08-09 02:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0020_auto_20180808_2247'),
('vouchers', '0003_auto_20170724_2103'),
]
operations = [
migrations.CreateModel(
|
name='CustomerGroupVoucher',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.CustomerGroup', verbose_name='Customer group')),
('voucher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='vouchers.Voucher', verbose_name='Voucher')),
],
options={
'verbose_name': 'Group-specific voucher restriction',
'verbose_name_plural': 'Group-specific voucher restrictions',
},
),
]
|
ahoarau/m3meka
|
python/scripts/m3qa/calibrate_head_s2r2.py
|
Python
|
mit
| 4,572
| 0.042651
|
#Copyright 2008, Meka Robotics
#All rights reserved.
#http://mekabot.com
#Redistribution and use in source and binary forms, with or without
#modification, are permitted.
#THIS SOFTWARE IS PROVIDED BY THE Copyright HOLDERS AND CONTRIBUTORS
#"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
#LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
#FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
#Copyright OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
#INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES INCLUDING,
#BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
#LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
#LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
#ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
#POSSIBILITY OF SUCH DAMAGE.
import time
import numpy.numarray as na
#import Numeric as nu
import math
import os
import sys
import yaml
import m3.unit_conversion as m3u
from m3qa.calibrate import *
from m3qa.calibrate_sensors import *
from m3qa.calibrate_actuator_ec_r2 import *
import m3.actuator_ec_pb2 as aec
import m3qa.config_head_s2r2 as s2r2
# ######################################## Default S2 ############################################################
config_default_s2_j0={
'calib':s2r2.config_head_s2r2_actuator_j0['calib'],
'param':s2r2.config_head_s2r2_actuator_j0['param'],
'param_internal':
{
'joint_limits': [-36.0,19.0],
}
}
config_default_s2_j1={
'calib':s2r2.config_head_s2r2_actuator_j1['calib'],
'param':s2r2.config_head_s2r2_actuator_j1['param'],
'param_internal':
{
'joint_limits': [-60.0,60.0],
}
}
config_default_s2_j2={
'calib':s2r2.config_head_s2r2_actuator_j2['calib'],
'param':s2r2.config_head_s2r2_actuator_j2['param'],
'param_internal':
{
'joint_limits': [-20.0,20.0],
}
}
config_default_s2_j3={
'calib':s2r2.config_head_s2r2_actuator_j3['calib'],
'param':s2r2.config_head_s2r2_actuator_j
|
3['param'],
'param_internal':
{
'joint_limits': [-7.5,13.0],
}
}
config_default_
|
s2_j4={
'calib':s2r2.config_head_s2r2_actuator_j4['calib'],
'param':s2r2.config_head_s2r2_actuator_j4['param'],
'param_internal':
{
'joint_limits': [-65.0,65.0],
}
}
config_default_s2_j5={
'calib':s2r2.config_head_s2r2_actuator_j5['calib'],
'param':s2r2.config_head_s2r2_actuator_j5['param'],
'param_internal':
{
'joint_limits': [-36.0,36.0],
}
}
config_default_s2_j6={
'calib':s2r2.config_head_s2r2_actuator_j6['calib'],
'param':s2r2.config_head_s2r2_actuator_j6['param'],
'param_internal':
{
'joint_limits': [-40.0,32.0],
}
}
# ######################################## ENS S2 ############################################################
config_default_s2_j7={
'calib':s2r2.config_head_s2r2_actuator_j7_ens_eyelids['calib'],
'param':s2r2.config_head_s2r2_actuator_j7_ens_eyelids['param'],
'param_internal':
{
'joint_limits': [0.0,191.0],
'pwm_theta': [-800,800]
}
}
# ###########################################################################
class M3Calibrate_Head_S2R2(M3CalibrateActuatorEcR2):
def __init__(self):
M3CalibrateActuatorEcR2.__init__(self)
def do_task(self,ct):
if ct=='tt':
self.reset_sensor('theta')
if self.jid>=7:
self.calibrate_theta(use_pwm=True)
else:
self.calibrate_theta()
self.write_config()
return True
if M3CalibrateActuatorEcR2.do_task(self,ct):
return True
return False
def print_tasks(self):
M3Calibrate.print_tasks(self)
print 'et: ext_temp'
print 'at: amp_temp'
print 'sa: sensor analyze'
print 'tt: calibrate theta'
print 'zt: zero theta'
def start(self,ctype):
self.joint_names=['Neck Tilt J0',
'Neck Pan J1',
'Head Roll J2',
'Head Tilt J3',
'Eye Tilt J4',
'Eye Pan Right J5',
'Eye Pan Left J6',
'Eyelids J7']
self.config_default=[
config_default_s2_j0,
config_default_s2_j1,
config_default_s2_j2,
config_default_s2_j3,
config_default_s2_j4,
config_default_s2_j5,
config_default_s2_j6,
config_default_s2_j7]
if not M3CalibrateActuatorEcR2.start(self,ctype):
return False
self.jid=int(self.comp_ec.name[self.comp_ec.name.find('_j')+2:])
self.calib_default=self.config_default[self.jid]['calib']
self.param_default=self.config_default[self.jid]['param']
self.param_internal=self.config_default[self.jid]['param_internal']
print 'Calibrating joint',self.joint_names[self.jid]
return True
|
YetAnotherNerd/requests-cache
|
requests_cache/backends/base.py
|
Python
|
bsd-2-clause
| 8,344
| 0.001558
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
requests_cache.backends.base
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Contains BaseCache class which can be used as in-memory cache backend or
extended to support persistence.
"""
from datetime import datetime
import hashlib
from copy import copy
from io import BytesIO
import requests
from ..compat import is_py2, urlencode, urlparse, urlunparse, parse_qsl
_DEFAULT_HEADERS = requests.utils.default_headers()
class BaseCache(object):
""" Base class for cache implementations, can be used as in-memory cache.
To extend it you can provide dictionary-like objects for
:attr:`keys_map` and :attr:`responses` or override public methods.
"""
def __init__(self, *args, **kwargs):
#: `key` -> `key_in_responses` mapping
self.keys_map = {}
#: `key_in_cache` -> `response` mapping
self.responses = {}
self._include_get_headers = kwargs.get("include_get_headers", False)
self._ignored_parameters = set(kwargs.get("ignored_parameters") or [])
def save_response(self, key, response):
""" Save response to cache
:param key: key for this response
:param response: response to save
.. note:: Response is reduced before saving (with :meth:`reduce_response`)
to make it picklable
"""
self.responses[key] = self.reduce_response(response), datetime.utcnow()
def add_key_mapping(self, new_key, key_to_response):
"""
Adds mapping of `new_key` to `key_to_response` to make it possible to
associate many keys with single response
:param new_key: new key (e.g. url from redirect)
:param key_to_response: key which can be found in :attr:`responses`
:return:
"""
self.keys_map[new_key] = key_to_response
def get_response_and_time(self, key, default=(None, None)):
""" Retrieves response and timestamp for `key` if it's stored in cache,
otherwise returns `default`
:param key: key of resource
:param default: return this if `key` not found in cache
:returns: tuple (response, datetime)
.. note:: Response is restored after unpickling with :meth:`restore_response`
"""
try:
if key not in self.responses:
key = self.keys_map[key]
response, timestamp = self.responses[key]
except KeyError:
return default
return self.restore_response(response), timestamp
def delete(self, key):
""" Delete `key` from cache. Also deletes all responses from response history
"""
try:
if key in self.responses:
response, _ = self.responses[key]
del self.responses[key]
else:
response, _ = self.responses[self.keys_map[key]]
del self.keys_map[key]
for r in response.history:
del self.keys_map[self.create_key(r.request)]
except KeyError:
pass
def delete_url(self, url):
""" Delete response associated with `url` from cache.
Also deletes all responses from response history. Works only for GET requests
"""
self.delete(self._url_to_key(url))
def clear(self):
""" Clear cache
"""
self.responses.clear()
self.keys_map.clear()
def has_key(self, key):
""" Returns `True` if cache has `key`, `False` otherwise
"""
return key in self.responses or key in self.keys_map
def has_url(self, url):
""" Returns `True` if cache has `url`, `False` otherwise.
Works only for GET request urls
"""
return self.has_key(self._url_to_key(url))
def _url_to_key(self, url):
session = requests.Session()
return self.create_key(session.prepare_request(requests.Request('GET', url)))
_response_attrs = ['_content', 'url', 'status_code', 'cookies',
'headers', 'encoding', 'request', 'reason', 'raw']
_raw_response_attrs = ['_original_response', 'decode_content', 'headers',
'reason', 'status', 'strict', 'version']
def reduce_response(self, response, seen=None):
""" Reduce response object to make it compatible with ``pickle``
"""
if seen is None:
seen = {}
try:
return seen[id(response)]
except KeyError:
pass
result = _Store()
# prefetch
response.content
for field in self._response_attrs:
setattr(result, field, self._picklable_field(response, field))
seen[id(response)] = result
result.history = tuple(self.reduce_response(r, seen) for r in response.history)
return result
def _picklable_field(self, response, name):
value = getattr(response, name)
if name == 'request':
value = copy(value)
value.hooks = []
elif name == 'raw':
result = _RawStore()
for field in self._raw_response_attrs:
setattr(result, field, getattr(value, field, None))
if result._original_response is not None:
setattr(result._original_response, "fp", None) # _io.BufferedReader is not picklable
value = result
return value
def restore_response(self, response, seen=None):
""" Restore response object after unpickling
"""
if seen
|
is None:
seen = {}
try:
return seen[id(response)]
except KeyError:
pass
result = requests.Response()
for field in self._response_attrs:
setattr(result, field, getattr(response, field, None))
result.raw._cached_conte
|
nt_ = result.content
seen[id(response)] = result
result.history = tuple(self.restore_response(r, seen) for r in response.history)
return result
def _remove_ignored_parameters(self, request):
def filter_ignored_parameters(data):
return [(k, v) for k, v in data if k not in self._ignored_parameters]
url = urlparse(request.url)
query = parse_qsl(url.query)
query = filter_ignored_parameters(query)
query = urlencode(query)
url = urlunparse((url.scheme, url.netloc, url.path, url.params, query, url.fragment))
body = request.body
content_type = request.headers.get('content-type')
if body and content_type:
if content_type == 'application/x-www-form-urlencoded':
body = parse_qsl(body)
body = filter_ignored_parameters(body)
body = urlencode(body)
elif content_type == 'application/json':
import json
body = json.loads(body)
body = filter_ignored_parameters(sorted(body.items()))
body = json.dumps(body)
return url, body
def create_key(self, request):
if self._ignored_parameters:
url, body = self._remove_ignored_parameters(request)
else:
url, body = request.url, request.body
key = hashlib.sha256()
key.update(_to_bytes(request.method.upper()))
key.update(_to_bytes(url))
if request.body:
key.update(_to_bytes(body))
else:
if self._include_get_headers and request.headers != _DEFAULT_HEADERS:
for name, value in sorted(request.headers.items()):
key.update(_to_bytes(name))
key.update(_to_bytes(value))
return key.hexdigest()
def __str__(self):
return 'keys: %s\nresponses: %s' % (self.keys_map, self.responses)
# used for saving response attributes
class _Store(object):
pass
class _RawStore(object):
# noop for cached response
def release_conn(self):
pass
# for streaming requests support
def read(self, chunk_size=1):
if not hasattr(self, "_io_with_content_"):
self._io_with_content_ = BytesIO(self._cached_content_)
return
|
PXke/invenio
|
invenio/legacy/bibauthorid/merge.py
|
Python
|
gpl-2.0
| 15,747
| 0.004953
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011, 2012 CERN.
##
## Invenio is free software; you ca
|
n redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
|
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from operator import itemgetter
from itertools import groupby, chain, imap, izip
from invenio.legacy.bibauthorid.general_utils import update_status \
, update_status_final
from invenio.legacy.bibauthorid.matrix_optimization import maximized_mapping
from invenio.legacy.bibauthorid.backinterface import update_personID_canonical_names
from invenio.legacy.bibauthorid.backinterface import get_existing_result_clusters
from invenio.legacy.bibauthorid.backinterface import get_lastname_results
from invenio.legacy.bibauthorid.backinterface import personid_name_from_signature
from invenio.legacy.bibauthorid.backinterface import personid_from_signature
from invenio.legacy.bibauthorid.backinterface import move_signature
from invenio.legacy.bibauthorid.backinterface import get_claimed_papers
from invenio.legacy.bibauthorid.backinterface import get_new_personid
from invenio.legacy.bibauthorid.backinterface import find_conflicts
from invenio.legacy.bibauthorid.backinterface import get_free_pids as backinterface_get_free_pids
from invenio.legacy.bibauthorid.backinterface import get_signature_info
from invenio.legacy.bibauthorid.backinterface import delete_empty_persons
from invenio.legacy.bibauthorid.backinterface import get_bibrefrec_to_pid_flag_mapping
def merge_static_classy():
'''
This function merges aidPERSONIDPAPERS with aidRESULTS.
Use it after tortoise.
This function is static: if aid* tables are changed while it's running,
probably everything will crash and a black hole will open, eating all your data.
NOTE: this is more elegant that merge_static but much slower. Will have to be improved
before it can replace it.
'''
class Sig(object):
def __init__(self, bibrefrec, pid_flag):
self.rejected = dict(filter(lambda p: p[1] <= -2, pid_flag))
self.assigned = filter(lambda p:-2 < p[1] and p[1] < 2, pid_flag)
self.claimed = filter(lambda p: 2 <= p[1], pid_flag)
self.bibrefrec = bibrefrec
assert self.invariant()
def invariant(self):
return len(self.assigned) + len(self.claimed) <= 1
def empty(self):
return not self.isclaimed and not self.isassigned
def isclaimed(self):
return len(self.claimed) == 1
def get_claimed(self):
return self.claimed[0][0]
def get_assigned(self):
return self.assigned[0][0]
def isassigned(self):
return len(self.assigned) == 1
def isrejected(self, pid):
return pid in self.rejected
def change_pid(self, pid):
assert self.invariant()
assert self.isassigned()
self.assigned = [(pid, 0)]
move_signature(self.bibrefrec, pid)
class Cluster(object):
def __init__(self, pid, sigs):
self.pid = pid
self.sigs = dict((sig.bibrefrec[2], sig) for sig in sigs if not sig.empty())
def send_sig(self, other, sig):
paper = sig.bibrefrec[2]
assert paper in self.sigs and paper not in other.sigs
del self.sigs[paper]
other.sigs[paper] = sig
if sig.isassigned():
sig.change_pid(other.pid)
last_names = frozenset(name[0].split('.')[0] for name in get_existing_result_clusters())
personid = get_bibrefrec_to_pid_flag_mapping()
free_pids = backinterface_get_free_pids()
for idx, last in enumerate(last_names):
update_status(float(idx) / len(last_names), "Merging, %d/%d current: %s" % (idx, len(last_names), last))
results = ((int(row[0].split(".")[1]), row[1:4]) for row in get_lastname_results(last))
# [(last name number, [bibrefrecs])]
results = [(k, map(itemgetter(1), d)) for k, d in groupby(sorted(results, key=itemgetter(0)), key=itemgetter(0))]
# List of dictionaries.
# [{new_pid -> N}]
matr = []
# Set of all old pids.
old_pids = set()
for k, ds in results:
pids = []
for d in ds:
pid_flag = filter(lambda x: x[1] > -2, personid.get(d, []))
if pid_flag:
assert len(pid_flag) == 1
pid = pid_flag[0][0]
pids.append(pid)
old_pids.add(pid)
matr.append(dict((k, len(list(d))) for k, d in groupby(sorted(pids))))
old_pids = list(old_pids)
best_match = maximized_mapping([[row.get(old, 0) for old in old_pids] for row in matr])
# [[bibrefrecs] -> pid]
matched_clusters = [(results[new_idx][1], old_pids[old_idx]) for new_idx, old_idx, _ in best_match]
not_matched_clusters = frozenset(xrange(len(results))) - frozenset(imap(itemgetter(0), best_match))
not_matched_clusters = izip((results[i][1] for i in not_matched_clusters), free_pids)
# pid -> Cluster
clusters = dict((pid, Cluster(pid, [Sig(bib, personid.get(bib, [])) for bib in sigs]))
for sigs, pid in chain(matched_clusters, not_matched_clusters))
todo = clusters.items()
for pid, clus in todo:
assert clus.pid == pid
for paper, sig in clus.sigs.items():
if sig.isclaimed():
if sig.get_claimed() != pid:
target_clus = clusters[sig.get_claimed()]
if paper in target_clus.sigs:
new_clus = Cluster(free_pids.next(), [])
target_clus.send_sig(new_clus, target_clus[paper])
todo.append(new_clus)
clusters[new_clus.pid] = new_clus
assert paper not in target_clus.sigs
clus.send_sig(target_clus, sig)
elif sig.get_assigned() != pid:
if not sig.isrejected(pid):
move_signature(sig.bibrefrec, pid)
else:
move_signature(sig.bibrefrec, free_pids.next())
else:
assert not sig.isrejected(pid)
update_status_final("Merging done.")
update_status_final()
delete_empty_persons()
update_personID_canonical_names()
def merge_static():
'''
This function merges aidPERSONIDPAPERS with aidRESULTS.
Use it after tortoise.
This function is static: if aid* tables are changed while it's running,
probably everything will crash and a black hole will open, eating all your data.
'''
last_names = frozenset(name[0].split('.')[0] for name in get_existing_result_clusters())
def get_free_pids():
while True:
yield get_new_personid()
free_pids = get_free_pids()
current_mapping = get_bibrefrec_to_pid_flag_mapping()
def move_sig_and_update_mapping(sig, old_pid_flag, new_pid_flag):
move_signature(sig, new_pid_flag[0])
current_mapping[sig].remove(old_pid_flag)
current_mapping[sig].append(new_pid_flag)
def try_move_signature(sig, target_pid):
"""
"""
paps = current_mapping[sig]
rejected = filter(lambda p: p[1] <= -2, paps)
assigned = filter(lambda p:-2 < p[1] and
|
SICOM/rlib
|
src/examples/python/xml.py
|
Python
|
gpl-2.0
| 388
| 0.002577
|
#!/usr/bin/python
|
# -*- coding: ISO-8859-15 -*-
import rlib
myreport = rlib.
|
Rlib()
print rlib.version
myreport.add_datasource_xml("local_xml")
myreport.add_query_as("local_xml", "data.xml", "data")
myreport.add_report("array.xml")
myreport.set_output_format_from_text("pdf")
myreport.execute()
print myreport.get_content_type_as_text()
open('xml.pdf','wb').write(myreport.get_output())
|
HarkonenBade/jadepunk-chargen
|
jadepunk/assets.py
|
Python
|
mit
| 15,523
| 0.001611
|
import enum
import math
from .attrs import AttrTypes
class AssetTypes(enum.Enum):
DEVICE = "Device"
TECH = "Technique"
ALLY = "Ally"
class Asset(object):
class Prop(object):
allowed_types = []
def __init__(self):
self.master = None
self.extra_flaw = False
self.max_ranks = math.inf
def set_master(self, master):
self.master = master
def name(self):
return type(self).__name__
def err_txt(self, txt, *args):
return self.master.err_txt("Property ({}): {}", self.name(), txt.format(*args))
def _type_restrict(self, val):
if len(self.allowed_types) > 0 and self.master.type not in self.allowed_types:
val.err(self.err_txt("Can only be applied to {}",
" and ".join([t.value for t in self.allowed_types])))
def _req_situational(self, val):
if not self.master.has_prop(Asset.Situational):
val.err(self.err_txt("Requires Situational"))
def _check_max_ranks(self, val):
if hasattr(self, "ranks"):
if self.ranks > self.max_ranks:
val.err(self.err_txt("Cannot have more than {} ranks.",
self.max_ranks))
def render(self, engine):
if hasattr(self, "ranks"):
if hasattr(self, "desc"):
return "{} {} ({})".format(engine.italics(self.name()),
self.ranks,
self.desc(engine))
else:
return "{} {}".format(engine.italics(self.name()),
self.ranks)
else:
if hasattr(self, "desc"):
return "{} ({})".format(engine.italics(self.name()),
self.desc(eng
|
ine))
else:
return "{}".format(engine.italics(self.name()))
def validate(self, val):
self._type_restrict(val)
self._check_max_ranks(val)
def additional_flaw(self):
return self.extra_flaw
def cost(self):
if
|
hasattr(self, "ranks"):
return self.ranks
return 1
class Feature(Prop):
pass
class Flaw(Prop):
pass
class Aspect(Feature):
allowed_types = [AssetTypes.ALLY, AssetTypes.DEVICE]
def __init__(self, aspect):
super().__init__()
self.aspect = aspect
def desc(self, engine):
return engine.boldit(self.aspect)
def validate(self, val):
super().validate(val)
if self.aspect == "":
val.err(self.err_txt("Missing details for Aspect."))
class Exceptional(Feature):
def __init__(self, txt):
super().__init__()
self.txt = txt
self.extra_flaw = True
def desc(self, _):
return self.txt
def cost(self):
return 2
class Flexible(Feature):
allowed_types = [AssetTypes.DEVICE, AssetTypes.TECH]
def __init__(self, replacing, replaced):
super().__init__()
self.replacing = replacing
self.replaced = replaced
def desc(self, _):
return "Use {} instead of {}".format(self.replacing.value, self.replaced.value)
def validate(self, val):
super().validate(val)
if self.replacing == self.replaced:
val.err(self.err_txt("Replacing and Replaced must be different"))
self._req_situational(val)
def cost(self):
return 2
class Focus(Feature):
allowed_types = [AssetTypes.DEVICE, AssetTypes.TECH]
def __init__(self, attr, ranks=1):
super().__init__()
self.attr = attr
self.ranks = ranks
def desc(self, _):
return "{} +{}".format(self.attr.value, self.ranks)
def validate(self, val):
super().validate(val)
self._req_situational(val)
class Harmful(Feature):
allowed_types = [AssetTypes.DEVICE, AssetTypes.TECH]
def __init__(self, ranks):
super().__init__()
self.ranks = ranks
class Independent(Feature):
allowed_types = [AssetTypes.ALLY]
class Numerous(Feature):
allowed_types = [AssetTypes.ALLY, AssetTypes.DEVICE]
def __init__(self, ranks):
super().__init__()
self.ranks = ranks
def desc(self, _):
return "{} copies".format(2**self.ranks)
class Professional(Feature):
allowed_types = [AssetTypes.ALLY]
def __init__(self, ranks, avg=None, fair=None):
super().__init__()
self.ranks = ranks
self.avg = avg
self.fair = fair
self.max_ranks = 3
def desc(self, _):
if self.ranks == 1:
return "{} +1".format(self.avg.value)
else:
if self.ranks == 2:
avg = self.avg.value
else:
avg = "+1, ".join([a.value for a in self.avg])
return "{} +2, {} +1".format(self.fair.value, avg)
def validate(self, val):
super().validate(val)
if self.ranks == 1 and (self.fair is not None or
self.avg is None or
not isinstance(self.avg, AttrTypes)):
val.err(self.err_txt("Professional 1 must have a single profession at average (+1)"))
if self.ranks == 2 and (self.fair is None or
self.avg is None or
not isinstance(self.avg, AttrTypes) or
not isinstance(self.fair, AttrTypes)):
val.err(self.err_txt("Professional 2 must have a single profession at fair (+2) "
"and a single at average (+1)"))
if self.ranks == 3 and (self.fair is None or
self.avg is None or
not isinstance(self.avg, list) or
not isinstance(self.fair, AttrTypes) or
len(self.avg) != 3):
val.err(self.err_txt("Professional 3 must have a single profession at fair (+2) "
"and three at average (+1)"))
def cost(self):
return self.ranks-1
class Protective(Feature):
allowed_types = [AssetTypes.DEVICE, AssetTypes.TECH]
def __init__(self, ranks):
super().__init__()
self.ranks = ranks
self.extra_flaw = self.master.type == AssetTypes.TECH
def set_master(self, master):
super().set_master(master)
if self.master.type == AssetTypes.DEVICE:
self.max_ranks = 2
def cost(self):
return self.ranks*2
class Resilient(Feature):
allowed_types = [AssetTypes.ALLY]
def __init__(self, ranks):
super().__init__()
self.ranks = ranks
self.max_ranks = 2
def cost(self):
return self.ranks-1
class Sturdy(Feature):
allowed_types = [AssetTypes.ALLY, AssetTypes.DEVICE]
def __init__(self, ranks):
super().__init__()
self.ranks = ranks
def set_master(self, master):
super().set_master(master)
if self.master.type == AssetTypes.DEVICE:
self.max_ranks = 2
elif self.master.type == AssetTypes.ALLY:
self.max_ranks = 3
def cost(self):
return self.ranks-1 if self.master.type == AssetTypes.ALLY else self.ranks
class Talented(Feature):
allowed_types = [AssetTypes.ALLY]
def __init__(self, a_type, prop):
super().__
|
ceball/param
|
tests/API0/testdynamicparams.py
|
Python
|
bsd-3-clause
| 9,003
| 0.003443
|
"""
Unit test for dynamic parameters.
Tests __get__, __set__ and that inspect_value() and
get_value_generator() work.
Originally implemented as doctests in Topographica in the file
testDynamicParameter.txt
"""
import copy
import unittest
import param
import numbergen
class TestDynamicParameters(unittest.TestCase):
def setUp(self):
param.Dynamic.time_dependent = False
class TestPO1(param.Parameterized):
x = param.Dynamic(default=numbergen.UniformRandom(lbound=-1,ubound=1,seed=1),
|
doc="nothing")
|
y = param.Dynamic(default=1)
class TestPO2(param.Parameterized):
x = param.Dynamic(default=numbergen.UniformRandom(lbound=-1,ubound=1,seed=30))
y = param.Dynamic(default=1.0)
self.TestPO2 = TestPO2
self.TestPO1 = TestPO1
self.t1 = self.TestPO1()
self.t2 = self.TestPO1(x=numbergen.UniformRandom(lbound=-1,ubound=1,seed=10))
self.t3 = self.TestPO1(x=numbergen.UniformRandom(lbound=-1,ubound=1,seed=10))
self.t2.set_dynamic_time_fn(None)
self.t3.set_dynamic_time_fn(None)
self.t6 = self.TestPO2()
self.t7 = self.TestPO2()
class TestDynamicParameterBasics(TestDynamicParameters):
def test_set_dynamic_time_fn_x(self):
self.t1.set_dynamic_time_fn(None)
self.assertEqual(
self.t1.params()['x']._value_is_dynamic(self.t1), True)
def test_set_dynamic_time_fn_y(self):
self.assertEqual(
self.t1.params()['y']._value_is_dynamic(self.t1), False)
def test_inspect_x(self):
"no value generated yet"
self.assertEqual(self.t1.inspect_value('x'), None)
def test_inspect_y(self):
self.assertEqual(self.t1.inspect_value('y'), 1)
def test_inspect_y_set(self):
self.t1.y = 2
self.assertEqual(self.t1.inspect_value('y'), 2)
def test_set_dynamic_numbergen(self):
is_numbergen = isinstance(self.t2.get_value_generator('x'),
numbergen.UniformRandom)
self.assertEqual(is_numbergen, True)
def test_matching_numbergen_streams(self):
"check that t2 and t3 have identical streams"
self.assertEqual(self.t2.x, self.t3.x)
def test_numbergen_objects_distinct(self):
"check t2 and t3 do not share UniformRandom objects"
self.t2.x
self.assertNotEqual(self.t2.inspect_value('x'),
self.t3.inspect_value('x'))
def test_numbergen_inspect(self):
" inspect_value() should return last generated value "
self.t2.x # Call 1
self.t2.x # Call 2
t2_last_value = self.t2.x # advance t2 beyond t3
self.assertEqual(self.t2.inspect_value('x'),
t2_last_value)
# ensure last_value is not shared
self.assertNotEqual(self.t3.inspect_value('x'), t2_last_value)
def test_dynamic_value_instantiated(self):
t6_first_value = self.t6.x
self.assertNotEqual(self.t7.inspect_value('x'),
t6_first_value)
def test_non_dynamic_value_not_instantiated(self):
" non-dynamic value not instantiated"
self.TestPO2.y = 4
self.assertEqual(self.t6.y, 4)
self.assertEqual(self.t7.y, 4)
def test_dynamic_value_setting(self):
self.t6.y = numbergen.UniformRandom()
t8 = self.TestPO2()
self.TestPO2.y = 10
# t6 got a dynamic value, but shouldn't have changed Parameter's instantiate
self.assertEqual(t8.y, 10)
def test_setting_y_param_numbergen(self):
self.TestPO2.y=numbergen.UniformRandom() # now the Parameter instantiate should be true
t9 = self.TestPO2()
self.assertEqual('_y_param_value' in t9.__dict__, True)
def test_shared_numbergen(self):
"""
Instances of TestPO2 that don't have their own value for the
parameter share one UniformRandom object
"""
self.TestPO2.y=numbergen.UniformRandom() # now the Parameter instantiate should be true
self.assertEqual(self.t7.get_value_generator('y') is self.TestPO2().params()['y'].default, True)
self.assertEqual(self.TestPO2().params()['y'].default.__class__.__name__, 'UniformRandom')
def test_copy_match(self):
"check a copy is the same"
t9 = copy.deepcopy(self.t7)
self.assertEqual(t9.get_value_generator('y') is self.TestPO2().params()['y'].default, True)
class TestDynamicTimeDependent(TestDynamicParameters):
def setUp(self):
super(TestDynamicTimeDependent, self).setUp()
param.Dynamic.time_dependent = True
class TestPO3(param.Parameterized):
x = param.Dynamic(default=numbergen.UniformRandom(name='xgen',
time_dependent=True))
class TestPO4(self.TestPO1):
"Nested parameterized objects"
z = param.Parameter(default=self.TestPO1())
self.TestPO3 = TestPO3
self.TestPO4 = TestPO4
self.t10 = self.TestPO1()
self.t11 = TestPO3()
def test_dynamic_values_unchanged_dependent(self):
param.Dynamic.time_dependent = True
call_1 = self.t10.x
call_2 = self.t10.x
call_3 = self.t10.x
self.assertEqual(call_1, call_2)
self.assertEqual(call_2, call_3)
def test_dynamic_values_changed_independent(self):
param.Dynamic.time_dependent = False
call_1 = self.t10.x
call_2 = self.t10.x
call_3 = self.t10.x
self.assertNotEqual(call_1, call_2)
self.assertNotEqual(call_2, call_3)
def test_dynamic_values_change(self):
param.Dynamic.time_dependent = True
with param.Dynamic.time_fn as t:
t(0)
call_1 = self.t10.x
t += 1
call_2 = self.t10.x
t(0)
call_3 = self.t10.x
self.assertNotEqual(call_1, call_2)
self.assertNotEqual(call_1, call_3)
def test_dynamic_values_time_dependent(self):
param.Dynamic.time_dependent = True
with param.Dynamic.time_fn as t:
t(0)
call_1 = self.t11.x
t += 1
call_2 = self.t11.x
t(0)
call_3 = self.t11.x
self.assertNotEqual(call_1, call_2)
self.assertEqual(call_1, call_3)
def test_class_dynamic_values_change(self):
call_1 = self.TestPO3.x
call_2 = self.TestPO3.x
self.assertEqual(call_1, call_2)
with param.Dynamic.time_fn as t:
t += 1
call_3 = self.TestPO3.x
self.assertNotEqual(call_2, call_3)
def test_dynamic_value_change_independent(self):
t12 = self.TestPO1()
t12.set_dynamic_time_fn(None)
self.assertNotEqual(t12.x, t12.x)
self.assertEqual(t12.y, t12.y)
def test_dynamic_value_change_disabled(self):
" time_fn set on the UniformRandom() when t13.y was set"
t13 = self.TestPO1()
t13.set_dynamic_time_fn(None)
t13.y = numbergen.UniformRandom()
self.assertNotEqual(t13.y, t13.y)
def test_dynamic_value_change_enabled(self):
" time_fn set on the UniformRandom() when t13.y was set"
t14 = self.TestPO1()
t14.y = numbergen.UniformRandom()
self.assertEqual(t14.y, t14.y)
def test_dynamic_time_fn_not_inherited(self):
" time_fn not inherited"
t15 = self.TestPO4()
t15.set_dynamic_time_fn(None)
with param.Dynamic.time_fn as t:
call_1 = t15.z.x
t += 1
call_2 = t15.z.x
self.assertNotEqual(call_1, call_2)
class TestDynamicSharedNumbergen(TestDynamicParameters):
"Check shared generator"
def setUp(self):
super(TestDynamicSharedNumbergen, self).setUp()
self.shared = numbergen.UniformRandom(lbound=-1,ubound=1,seed=20)
def test_dynamic_shared_numbergen(self):
param.Dynamic.time_dependent = True
t11 = self.TestPO1(x=self.shared)
t12 = self.TestPO1(x=self.shared)
with param.Dynamic.time_fn as t:
t += 1
|
fparrel/regepe
|
vps/regepe_flask_server.py
|
Python
|
gpl-3.0
| 30,280
| 0.024935
|
# -*- coding: utf-8 -*-
from flask import Flask,render_template,send_file,Response,flash,request,redirect,session
from werkzeug.utils import secure_filename
import json
import os.path
import os
import gzip
import urllib
from db import DbGetListOfDates,DbGet,DbGetComments,DbGetMulitple,DbGetNearbyPoints,DbPut,DbPutWithoutPassword,DbSearchWord,DbGetMapsOfUser,DbGetAllMaps,DbAddComment,CheckValidMapId,CheckValidFreetext,DbDelMap,DbChkPwd
import anydbm
import traceback
from progress import GetProgress,SetProgress
from users import CheckSession,Login,ActivateUser,SendActivationMail,ReserveUser,GetUserFromUserOrEmail,SendForgotPasswordMail
import sys
from orchestrator import BuildMap,ProcessTrkSegWithProgress,BuildMapFromTrack
from searchparser import SearchQueryParser
from sets import Set
from textutils import remove_accents
from log import Log
from mapparser import ParseMap
from model import Track
from options import options_default
from dem import GetEleFromLatLon
from computeprofile import ComputeProfile
from demize import Demize
from generate_id import uniqid
from config import keysnpwds, config
from flask_babel import Babel, gettext
from thumbnail import selectPointsForThumbnail, thumbnailUrlMapbox
# Create flask application
application = Flask(__name__)
application.config['UPLOAD_FOLDER'] = 'uploads'
application.secret_key = keysnpwds['secret_key']
## Internationalization (i18n)
babel = Babel(application)
LANGUAGES = {
'en': 'English',
'fr': 'Francais',
'es': 'Español'
}
@babel.localeselector
def get_locale():
# Uncomment for testing a specific language
#return 'es'
#return 'fr'
# Check if there is a lang in session
if session.has_key('lang'):
return session['lang']
# Else guess the lang from browser request
return request.accept_languages.best_match(LANGUAGES.keys())
@application.route('/i18n.js/<item>')
def i18n_js(item):
""" Translation strings for javascript """
assert(item in ('header','map','prepare')) #basic security check
return render_template('i18n_%s.js'%item)
@application.route('/<lang>/testi18n.js')
def test_i18n_js(lang):
""" To test i18n for javascript because js escaping is not well handled by jinja2 """
session['lang']=lang
return '<html><head></head><body>Press Ctrl+Maj+K and check no errors in console<script>'+render_template('i18n_header.js')+render_template('i18n_map.js')+'</script>'
## Index page
@application.route('/',defaults={'lang':None,'limit':10})
@application.route('/indexall',defaults={'lang':None,'limit':-1})
@application.route('/<lang>/',defaults={'limit':10})
@application.route('/<lang>/indexall',defaults={'limit':10})
def index(lang,limit):
if lang!=None:
session['lang']=lang
maplist = DbGetListOfDates()
cptr = 0
mapsout = []
for date in sorted(maplist.iterkeys(),reverse=True):
maps = maplist[date]
for
|
mapid in maps:
(lat,lon) = DbGet(mapid,'startpoint').split(',')
trackdesc = DbGet(mapid,'trackdesc')
trackuser = DbGet(mapid,'trackuser')
desc=trackdesc.decode('utf8')
mapsout.append({'mapid':mapid,'lat':lat,'lon':lon,'user':trackuser,'desc':desc,'date':date})
cptr += 1
if(limit>-1) and (cptr>limit):
b
|
reak
if(limit>-1) and (cptr>limit):
break
return render_template('index.html',limit=limit,maps=mapsout,GMapsApiKey=keysnpwds['GMapsApiKey'])
## GPX Export
@application.route('/togpx/<mapid>')
def togpx(mapid):
# Read map data
f=gzip.open('data/mapdata/%s.json.gz'%mapid,'rb')
mapdata=json.load(f)
f.close()
return '<?xml version="1.0" encoding="UTF-8"?>\n<gpx version="1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.topografix.com/GPX/1/0" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd"><trk><trkseg>' + ''.join(map(lambda p:'<trkpt lat="%f" lon="%f"></trkpt>'%(p[0],p[1]),mapdata['points'])) + '</trkseg></trk></gpx>'
## Thumbnails
if not os.path.isdir('data'):
os.mkdir('data')
if not os.path.isdir('data/thumbnail_cache'):
os.mkdir('data/thumbnail_cache')
@application.route('/thumbnail/<mapid>')
@application.route('/thumbnail.php',defaults={'mapid':None})
def thumbnail(mapid):
if mapid==None:
mapid = request.args.get('mapid')
filename = 'data/thumbnail_cache/%s.png'%mapid
if os.path.isfile(filename):
# Return image in cache
return send_file(filename, mimetype='image/png')
else:
ptlist = selectPointsForThumbnail(mapid)
# Build map image url
url = thumbnailUrlMapbox(ptlist)
furl = open('data/thumbnail_cache/%s.url'%(mapid),'w')
furl.write(url)
furl.close()
# Download png, put it in cache and send it
f = urllib.urlopen(url)
fcache = open(filename,'wb')
contents = f.read()
fcache.write(contents)
fcache.close()
f.close()
return contents
## Show map
@application.route('/<lang>/showmap/<mapid>', defaults={'map_type': None})
@application.route('/<lang>/showmap/<mapid>/<map_type>')
@application.route('/<lang>/showmap-flot.php',defaults={'mapid':None,'map_type': None})
@application.route('/<lang>/showmap.php',defaults={'mapid':None,'map_type': None})
@application.route('/showmap/<mapid>', defaults={'lang':None,'map_type': None})
@application.route('/showmap/<mapid>/<map_type>',defaults={'lang':None})
@application.route('/showmap-flot.php',defaults={'lang':None,'mapid':None,'map_type': None})
@application.route('/showmap.php',defaults={'lang':None,'mapid':None,'map_type': None})
def showmap(lang,mapid,map_type):
if lang!=None:
session['lang']=lang
if mapid==None:
mapid=request.args.get('mapid')
# Read map data
f=gzip.open('data/mapdata/%s.json.gz'%mapid,'rb')
mapdata=json.load(f)
f.close()
# Read map db
mapdb = anydbm.open('data/maps/%s.db'%mapid, 'r')
if map_type==None:
map_type = mapdata['type']
# Render
_mapdb={}
for key in mapdb:
_mapdb[key] = mapdb[key].decode('utf-8') # We must convert each utf8 string into unicode for jinja2
out = render_template('showmap.html',domain=config['domain'],mapid=mapid,type=map_type,mapdb=_mapdb,mapdata=mapdata,GMapsApiKey=keysnpwds['GMapsApiKey'],GeoPortalApiKey=keysnpwds['GeoPortalApiKey'])
mapdb.close()
return out
@application.route('/mapdata/<mapid>')
def mapdata(mapid):
# Read map data
f=gzip.open('data/mapdata/%s.json.gz'%mapid,'rb')
mapfromfile=json.load(f)
f.close()
return Response(render_template('mapdata.js',mapdata=mapfromfile,chartdata=json.dumps(mapfromfile['chartdata'])), mimetype='text/javascript')
@application.route('/comments/<mapid>')
def comments(mapid):
comments = DbGetComments(mapid)
return Response('<?xml version="1.0" encoding="UTF-8"?><result>%s</result>' % ''.join(map(lambda comment: '<comment user="%s" date="%s">%s</comment>' % (comment[1],comment[0],comment[2]),comments)), mimetype='text/xml')
@application.route('/sendcomment/<mapid>/<comment>')
def sendcomment(mapid,comment):
try:
user = 'unknown'
if request.form.has_key('user'):
user = request.form.getvalue('user')
if not CheckValidUserName(user):
raise Exception('Invalid user name')
sess = request.form.getvalue('sess')
if CheckSession(user,sess):
pass
else:
raise Exception(gettext('Invalid session, please re-login'))
else:
user = request.remote_addr
if not CheckValidMapId(mapid):
raise Exception(gettext('Invalid map id'))
if not CheckValidFreetext(comment):
raise Exception(gettext('Invalid map id'))
DbAddComment(mapid,user,comment)
result = 'OK'
except Exception, e:
result = str(e)
out = '<?xml version="1.0" encoding="UTF-8"?>\n<result>%s</result>'%result
return Response(out, mimetype='text/xml')
@application.route('/nearmaps/<mapid>')
def nearmaps(mapi
|
ytjia/coding-practice
|
algorithms/python/leetcode/tests/test_BinaryTreeMaximumPathSum.py
|
Python
|
mit
| 446
| 0.002242
|
# -*- coding: utf-8 -*-
# Authors: Y. Jia <yt
|
jia.zju@gmail.com>
import unittest
from common.BinaryTree import BinaryTree
from .. import BinaryTreeMaximumPathSum
class test_BinaryTreeMaximumPathSum(unittest.TestCase):
solution = BinaryTreeMaximumPathSum.Solution()
def test_maxPathSum(self):
self.assertEqual(self.solution.maxPathSum(BinaryTree.create_tree([1, 2, 3])[0]), 6)
|
if __name__ == '__main__':
unittest.main()
|
botswana-harvard/edc-appointment
|
edc_appointment/exceptions.py
|
Python
|
gpl-2.0
| 161
| 0
|
class
|
AppointmentStatusError(Exception):
pass
class Appoint
|
mentCreateError(Exception):
pass
class AppointmentSmsReminderError(Exception):
pass
|
lmazuel/azure-sdk-for-python
|
azure-batch/azure/batch/models/task_id_range.py
|
Python
|
mit
| 1,362
| 0
|
# co
|
ding=utf-8
# --------------------------------------------
|
------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TaskIdRange(Model):
"""A range of task IDs that a task can depend on. All tasks with IDs in the
range must complete successfully before the dependent task can be
scheduled.
The start and end of the range are inclusive. For example, if a range has
start 9 and end 12, then it represents tasks '9', '10', '11' and '12'.
:param start: The first task ID in the range.
:type start: int
:param end: The last task ID in the range.
:type end: int
"""
_validation = {
'start': {'required': True},
'end': {'required': True},
}
_attribute_map = {
'start': {'key': 'start', 'type': 'int'},
'end': {'key': 'end', 'type': 'int'},
}
def __init__(self, start, end):
super(TaskIdRange, self).__init__()
self.start = start
self.end = end
|
Kiddinglife/kidding-engine
|
python/Doc/includes/mp_pool.py
|
Python
|
lgpl-3.0
| 3,892
| 0.002055
|
import multiprocessing
import time
import random
import sys
#
# Functions used by test code
#
def calculate(func, args):
result = func(*args)
return '%s says that %s%s = %s' % (
multiprocessing.current_process().name,
func.__name__, args, result
)
def calculatestar(args):
return calculate(*args)
def mul(a, b):
time.sleep(0.5 * random.random())
return a * b
def plus(a, b):
time.sleep(0.5 * random.random())
return a + b
def f(x):
return 1.0 / (x - 5.0)
def pow3(x):
return x ** 3
def noop(x):
pass
#
# Test code
#
def test():
PROCESSES = 4
print('Creating pool with %d processes\n' % PROCESSES)
with multiprocessing.Pool(PROCESSES) as pool:
#
# Tests
#
TASKS = [(mul, (i, 7)) for i in range(10)] + \
[(plus, (i, 8)) for i in range(10)]
results = [pool.apply_async(calculate, t) for t in TASKS]
imap_it = pool.imap(calculatestar, TASKS)
imap_unordered_it = pool.imap_unordered(calculatestar, TASKS)
print('Ordered results using pool.apply_async():')
for r in results:
print('\t', r.get())
print()
print('Ordered results using pool.imap():')
for x in imap_it:
print('\t', x)
print()
print('Unordered results using pool.imap_unordered():')
for x in imap_unordered_it:
print('\t', x)
print()
print('Ordered results using pool.map() --- will block till complete:')
for x in pool.map(calculatestar, TASKS):
print('\t', x)
print()
#
# Test error handling
#
print('Testing error handling:')
try:
print(pool.apply(f, (5,)))
except ZeroDivisionError:
print('\tGot ZeroDivisionError as expected from pool.apply()')
else:
raise AssertionError('expected ZeroDivisionError')
try:
print(pool.map(f, list(range(10))))
except ZeroDivisionError:
print('\tGot ZeroDivisionError as expected from pool.map()')
else:
raise AssertionError('expected ZeroDivisionError')
try:
print(list(pool.imap(f, list(range(10)))))
except ZeroDivisionError:
print('\tGot ZeroDivisionError as expected from list(pool.imap())')
else:
raise AssertionError('expected ZeroDivisionError')
it = pool.imap(f, list(range(10)))
for i in range(10):
try:
x = next(it)
except ZeroDivisionError:
if i == 5:
|
pass
except StopIteration:
break
else:
if i ==
|
5:
raise AssertionError('expected ZeroDivisionError')
assert i == 9
print('\tGot ZeroDivisionError as expected from IMapIterator.next()')
print()
#
# Testing timeouts
#
print('Testing ApplyResult.get() with timeout:', end=' ')
res = pool.apply_async(calculate, TASKS[0])
while 1:
sys.stdout.flush()
try:
sys.stdout.write('\n\t%s' % res.get(0.02))
break
except multiprocessing.TimeoutError:
sys.stdout.write('.')
print()
print()
print('Testing IMapIterator.next() with timeout:', end=' ')
it = pool.imap(calculatestar, TASKS)
while 1:
sys.stdout.flush()
try:
sys.stdout.write('\n\t%s' % it.next(0.02))
except StopIteration:
break
except multiprocessing.TimeoutError:
sys.stdout.write('.')
print()
print()
if __name__ == '__main__':
multiprocessing.freeze_support()
test()
|
adriennemcd/neighborhood-change-index
|
nc-pt1.py
|
Python
|
mit
| 19,716
| 0.005833
|
"""
THIS SCRIPT CREATES AN INDEX OF NEIGHBORHOOD CHANGE USING USER SPECIFIED INDICATORS:
1. CALCULATE Z-SCORE FOR EACH INDICATOR
2. CALCULATE NEGATIVE Z-SCORE FOR INDICATORS THAT SHOULD DETRACT FROM SCORE (I.E. VACANCY RATE)
3. ADD INDICATORS TOGETHER FOR RAW SCO
|
RE
4. DEFINE CLASSIFICATION METHOD
5. ASSIGN EACH RECORD AN INDEX SCORE FROM 1-6
FOR COMPARATIVE
|
PURPOSES, THIS SCRIPT IS SET UP SO THAT MULTIPLE YEARS OF DATA CAN BE
IN THE SAME SHAPEFILE; IN OTHER WORDS, ONE SHAPEFILE CAN BE RUN THROUGH THE TOOL MULTIPLE TIMES
ON DIFFERENT YEARS OF VARIABLES WITHOUT ERRORS.
To create an ArcToolbox tool with which to execute this script, do the following.
1 In ArcMap > Catalog > Toolboxes > My Toolboxes, either select an existing toolbox
or right-click on My Toolboxes and use New > Toolbox to create (then rename) a new one.
2 Drag (or use ArcToolbox > Add Toolbox to add) this toolbox to ArcToolbox.
3 Right-click on the toolbox in ArcToolbox, and use Add > Script to open a dialog box.
4 In this Add Script dialog box, use Label to name the tool being created, and press Next.
5 In a new dialog box, browse to the .py file to be invoked by this tool, and press Next.
6 In the next dialog box, specify the following inputs (using dropdown menus wherever possible)
before pressing OK or Finish.
DISPLAY NAME DATA TYPE PROPERTY>DIRECTION>VALUE
Input Shapefile Shapefile Input
Fields used as indicators in index Field Input > MultiValue: Yes > Obtained from Input Shapefile
2-digit date of variable data String Input > Default '10'
Output Shapefile Shapefile Output
Indicators that should subtract from index Field Input > Type: Optional > MultiValue: Yes > Obtained from Input Shapefile
score if high (ex. high vacancy rates)
Choose Index Classification Method String Input > Filter: Value List (Quantile, Equal Interval)
Choose number of classes Double Input
To later revise any of this, right-click to the tool's name and select Properties.
"""
# Import necessary modules
import sys, os, string, math, arcpy, traceback
# Allow output file to overwrite any existing file of the same name
arcpy.env.overwriteOutput = True
try:
# Request user inputs, name variables
nameOfInputShapefile = arcpy.GetParameterAsText(0)
varFields = arcpy.GetParameterAsText(1)
yearOfData = arcpy.GetParameterAsText(2)
nameOfOutputShapefile = arcpy.GetParameterAsText(3)
negVariables = arcpy.GetParameterAsText(4)
classificationMethod = arcpy.GetParameterAsText(5)
classNumber = arcpy.GetParameterAsText(6)
varList = varFields.split(";") # a list of all variables for index
negList = negVariables.split(";") # a list of the variables from varList that should be multiplied by -1 to detract from raw score (ie vacancy rate)
zScoreList = [] # a list of the variable fields that will count towards raw score (combination of z-scores and some z-scores * -1)
nameOfOutputShapefileTemp = nameOfOutputShapefile[:-4] + "_temp.shp"
# Report input and output files
arcpy.AddMessage('\n' + "The input shapefile name is " + nameOfInputShapefile)
arcpy.AddMessage("The output shapefile name is " + nameOfOutputShapefile)
arcpy.AddMessage("This is an index for year '" + yearOfData)
arcpy.AddMessage("The variables used as indicators in the index are " + str(varList) + "\n")
# Create function to calculate mean (source: http://arcpy.wordpress.com/2012/02/01/calculate-a-mean-value-from-a-field/)
def calculate_mean_value(table, field):
stats_table = r"in_memory\stats"
arcpy.Statistics_analysis(table, stats_table, [[field, "MEAN"]])
mean_field = "MEAN_{0}".format(field)
cursor = arcpy.SearchCursor(stats_table, "", "", mean_field)
row = cursor.next()
mean_value = row.getValue(mean_field)
del cursor
return mean_value
# Create function to calculate standard deviation
def calculate_STD_value(table, field):
stats_table = r"in_memory\stats"
arcpy.Statistics_analysis(table, stats_table, [[field, "STD"]])
STD_field = "STD_{0}".format(field)
cursor = arcpy.SearchCursor(stats_table, "", "", STD_field)
row = cursor.next()
STD_value = row.getValue(STD_field)
del cursor
return STD_value
# Create function to calculate range
def calculate_range_value(table, field):
stats_table = r"in_memory\stats"
arcpy.Statistics_analysis(table, stats_table, [[field, "RANGE"]])
RNG_field = "RANGE_{0}".format(field)
cursor = arcpy.SearchCursor(stats_table, "", "", RNG_field)
row = cursor.next()
RNG_value = row.getValue(RNG_field)
del cursor
return RNG_value
# Create function to calculate minimum value
def calculate_MIN_value(table, field):
stats_table = r"in_memory\stats"
arcpy.Statistics_analysis(table, stats_table, [[field, "MIN"]])
MIN_field = "MIN_{0}".format(field)
cursor = arcpy.SearchCursor(stats_table, "", "", MIN_field)
row = cursor.next()
MIN_value = row.getValue(MIN_field)
del cursor
return MIN_value
# Create function to calculate count of items
def calculate_COUNT_value(table, field):
stats_table = r"in_memory\stats"
arcpy.Statistics_analysis(table, stats_table, [[field, "COUNT"]])
COUNT_field = "COUNT_{0}".format(field)
cursor = arcpy.SearchCursor(stats_table, "", "", COUNT_field)
row = cursor.next()
COUNT_value = row.getValue(COUNT_field)
del cursor
return COUNT_value
# Replicate the input shapefile
arcpy.Copy_management(nameOfInputShapefile, nameOfOutputShapefileTemp)
""" STEP ONE: CALCULATE Z-SCORE OF EACH INDICATOR FIELD """
# Process each variable in the user-defined variable list
for variable in varList:
arcpy.AddMessage("Processing: " + variable)
# Concatenate the list order number to the field name and add a new field called "MEAN"
meanName = ("MEAN" + str(varList.index(variable)) + str("_" + yearOfData))
arcpy.AddField_management(nameOfOutputShapefileTemp, meanName, "FLOAT", 20, 10)
# Concatenate the list order number to the field name and add a new field called "STDDEV"
stdDevName = ("STDV" + str(varList.index(variable)) + str("_" + yearOfData))
arcpy.AddField_management(nameOfOutputShapefileTemp, stdDevName, "FLOAT", 20, 10)
# Concatenate the list order number to the field name and add another new field called "ZSCORE"
zName = ("ZSCR" + str(varList.index(variable)) + str("_" + yearOfData))
arcpy.AddField_management(nameOfOutputShapefileTemp, zName, "FLOAT", 20, 10)
# Create an enumeration of updatable records from the shapefile's attribute table
enumerationOfRecords = arcpy.UpdateCursor(nameOfOutputShapefileTemp)
# Loop through that enumeration, creating mean field column
for nextRecord in enumerationOfRecords:
# Calculate sample mean
mean = (calculate_mean_value(nameOfOutputShapefileTemp, variable))
nextRecord.setValue(meanName,mean)
enumerationOfRecords.updateRow(nextRecord)
# Calculate standard deviation
standardDev = (calculate_STD_value(nameOfOutputShapefileTemp, variable))
nextRecord.setValue(stdDevName,standardDev)
enumerationOfRecords.updateRow(nextRecord)
# Retrieve the row value, mean, and standard deviation
nextVar = nextRecord.getValue(variable)
nextMean = nextRecord.getValue(meanName)
nextStdDev = nextRecord.getValue(stdDevName)
# Calculate and record z-score
zSco
|
tensorflow/tfx
|
tfx/components/schema_gen/component_test.py
|
Python
|
apache-2.0
| 2,491
| 0.001204
|
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.components.schema_gen.component."""
import tensorflow as tf
from tfx.components.schema_gen import component
from tfx.orchestration import data_types
from tfx.types import artifact_utils
from tfx.types import channel_utils
from tfx.types import standard_artifacts
from tfx.types import standard_component_specs
class SchemaGenTest(tf.test.TestCase):
def testConstruct(self):
statistics_artifact = standard_artifacts.ExampleStatistics()
statistics_artifact.split_names = artifact_utils.encode_split_names(
['train', 'eval'])
exclude_splits = ['eval']
schema_gen = component.SchemaGen(
statistics=channel_utils.as_channel([statistics_artifact]),
exclude_splits=exclude_splits)
self.assertEqual(
standard_artifacts.Schema.TYPE_NAME,
schema_gen.outputs[standard_component_specs.SCHEMA_KEY].type_name)
self.assertTrue(schema_gen.spec.exec_properties[
standard_component_specs.INFER_FEATURE_SHAPE_KEY])
self.assertEqual(
schema_gen.spec.exec_properties[
standard_component_specs.EXCLUDE_SPLITS_KEY], '["eval"]')
def testConstructWithParameter(self):
statistics_artifact = standard_artifacts.ExampleStatistics()
statistics_artifact.split_names = artifact_utils.encode_split_names(
['train'])
infer_shape = data_types.RuntimeParameter(name='infer-shape', ptype=int)
schema_gen = component.SchemaGen(
statistics=channel_utils.as_channel([statistics_artifact]),
infer_feature_shape=infer_shape)
self.assertEqual(
|
standard_artifacts.Schema.TYPE_NAME,
schema_gen.outputs[standard_component_specs.SCHEMA_KEY].type_name)
|
self.assertJsonEqual(
str(schema_gen.spec.exec_properties[
standard_component_specs.INFER_FEATURE_SHAPE_KEY]),
str(infer_shape))
if __name__ == '__main__':
tf.test.main()
|
mitya57/debian-buildbot
|
buildbot/process/builder.py
|
Python
|
gpl-2.0
| 25,892
| 0.000657
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import weakref
from twisted.application import internet
from twisted.application import service
from twisted.internet import defer
from twisted.python import failure
from twisted.python import log
from twisted.spread import pb
from zope.interface import implements
from buildbot import config
from buildbot import interfaces
from buildbot.process import buildrequest
from buildbot.process import slavebuilder
from buildbot.process.build import Build
from buildbot.process.properties import Properties
from buildbot.process.slavebuilder import BUILDING
from buildbot.status.builder import RETRY
from buildbot.status.buildrequest import BuildRequestStatus
from buildbot.status.progress import Expectations
def enforceChosenSlave(bldr, slavebuilder, breq):
if 'slavename' in breq.properties:
slavename = breq.properties['slavename']
if isinstance(slavename, basestring):
return slavename == slavebuilder.slave.slavename
return True
class Builder(config.ReconfigurableServiceMixin,
pb.Referenceable,
service.MultiService):
# reconfigure builders before slaves
reconfig_priority = 196
def __init__(self, name, _addServices=True):
service.MultiService.__init__(self)
self.name = name
# this is created the first time we get a good build
self.expectations = None
# build/wannabuild slots: Build objects move along this sequence
self.building = []
# old_building holds active builds that were stolen from a predecessor
self.old_building = weakref.WeakKeyDictionary()
# buildslaves which have connected but which are not yet available.
# These are always in the ATTACHING state.
self.attaching_slaves = []
# buildslaves at our disposal. Each SlaveBuilder instance has a
# .state that is IDLE, PINGING, or BUILDING. "PINGING" is used when a
# Build is about to start, to make sure that they're still alive.
self.slaves = []
self.config = None
self.builder_status = None
if _addServices:
self.reclaim_svc = internet.TimerService(10 * 60,
self.reclaimAllBuilds)
self.reclaim_svc.setServiceParent(self)
# update big status every 30 minutes, working around #1980
self.updateStatusService = internet.TimerService(30 * 60,
self.updateBigStatus)
self.updateStatusService.setServiceParent(self)
def reconfigService(self, new_config):
# find this builder in the config
for builder_config in new_config.builders:
if builder_config.name == self.name:
break
else:
assert 0, "no config found for builder '%s'" % self.name
# set up a builder status object on the first reconfig
if not self.builder_status:
self.builder_status = self.master.status.builderAdded(
builder_config.name,
builder_config.builddir,
builder_config.category,
builder_config.description)
self.config = builder_config
self.builder_status.setDescription(builder_config.description)
self.builder_status.setCategory(builder_config.category)
self.builder_status.setSlavenames(self.config.slavenames)
self.builder_status.setCacheSize(new_config.caches['Builds'])
# if we have any slavebuilders attached which are no longer configured,
# drop them.
new_slavenames = set(builder_config.slavenames)
self.slaves = [s for s in self.slaves
if s.slave.slavename in new_slavenames]
return defer.succeed(None)
def stopService(self):
d = defer.maybeDeferred(lambda:
service.MultiService.stopService(self))
return d
def __repr__(self):
return "<Builder '%r' at %d>" % (self.name, id(self))
@defer.inlineCallbacks
def getOldestRequestTime(self):
"""Returns the submitted_at of the oldest unclaimed build request for
this builder, or None if there are no build requests.
@returns: datetime instance or None, via Deferred
"""
unclaimed = yield self.master.db.buildrequests.getBuildRequests(
buildername=self.name, claimed=False)
if unclaimed:
unclaimed = sorted([brd['submitted_at'] for brd in unclaimed])
defer.returnValue(unclaimed[0])
else:
defer.returnValue(None)
def reclaimAllBuilds(self):
brids = set()
for b in self.building:
brids.update([br.id for br in b.requests])
for b in self.old_building:
brids.update([br.id for br in b.requests])
if not brids:
return defer.succeed(None)
d = self.master.db.buildrequests.reclaimBuildRequests(brids)
d.addErrback(log.err, 'while re-claiming running BuildRequests')
return d
def getBuild(self, number):
for b in self.building:
if b.build_status and b.build_status.number == number:
return b
for b in self.old_building.keys():
if b.build_status and b.build_status.number == number:
return b
return None
def addLatentSlave(self, slave):
assert interfaces.ILatentBuildSlave.providedBy(slave)
for s in self.slaves:
if s == slave:
break
else:
sb = slavebuilder.LatentSlaveBuilder(slave, self)
self.builder_status.addPointEvent(
['added', 'latent', slave.slavename])
self.slaves.append(sb)
self.botmaster.maybeStartBuildsForBuilder(self.name)
def attached(self, slave, remote, commands):
"""This is invoked by the BuildSlave when the self.slavename bot
registers their builder.
@type slave: L{bu
|
ildbot.buildslave.BuildSlave}
@param slave: the BuildSlave that represents the buildslave as a whole
@type remote: L{twisted.spread.pb.RemoteReference}
@param remote: a reference to the L{buildbot.slave.bot.SlaveBuilder}
@type commands: dict: string -> string, or None
@param commands: provides the slave's version of each RemoteCommand
@rtype: L{twisted.internet.defer.Deferred}
@return: a Deferred that fires
|
(with 'self') when the slave-side
builder is fully attached and ready to accept commands.
"""
for s in self.attaching_slaves + self.slaves:
if s.slave == slave:
# already attached to them. This is fairly common, since
# attached() gets called each time we receive the builder
# list from the slave, and we ask for it each time we add or
# remove a builder. So if the slave is hosting builders
# A,B,C, and the config file changes A, we'll remove A and
# re-add it, triggering two builder-list requests, getting
# two redundant calls to attached() for B, and another two
# for C.
#
# Therefore, when we see that we're already attached, we can
# just ignore it.
return defer.succeed(self)
sb = sl
|
shaunduncan/breezeminder
|
breezeminder/views/reminder.py
|
Python
|
mit
| 5,296
| 0.002455
|
from flask import (flash,
redirect,
render_template,
request,
url_for)
from flask.ext.login import (current_user,
fresh_login_required)
from breezeminder.app import app
from breezeminder.forms.reminder import ReminderForm, ReminderType
from breezeminder.models.reminder import Reminder, ReminderHistory
from breezeminder.util.pagination import Paginator
from breezeminder.util.views import nocache
@fresh_login_required
@nocache
def reminders():
ctx = {
'title': 'Manage Your Reminders',
'description': 'Manage reminders for your MARTA Breeze cards',
'reminders': Reminder.objects.filter(owner=current_user._get_current_object())
}
return render_template('user/reminders/index.html', **ctx)
@fresh_login_required
@nocache
def delete_reminder(pk_hash):
reminder = Reminder.objects.get_or_404(pk_hash=pk_hash,
owner=current_user._get_current_object())
reminder.delete()
flash('Reminder deleted successfully', 'success')
return redirect(url_for('user.reminders'))
@fresh_login_required
@nocache
def reminder_history(pk_hash):
reminder = Reminder.objects.get_or_404(pk_hash=pk_hash,
owner=current_user._get_current_object())
history = ReminderHistory.objects.filter(reminder=reminder)
# Paginate
try:
history = Paginator(history, page=int(request.args.get('page'
|
, 1)))
except (IndexError, ValueError):
return redirect(url_for('user.reminders.history'
|
, pk_hash=reminder.pk_hash))
context = {
'title': 'Reminder History',
'description': 'View Reminder Sent History',
'reminder': reminder,
'history': history
}
return render_template('user/reminders/history/index.html', **context)
@fresh_login_required
@nocache
def view_history(pk_hash):
record = ReminderHistory.objects.get_or_404(pk_hash=pk_hash,
owner=current_user._get_current_object())
context = {
'title': 'Reminder History Details',
'description': 'Full reminder details',
'record': record
}
return render_template('user/reminders/history/view.html', **context)
@fresh_login_required
@nocache
def edit_reminder(pk_hash):
reminder = Reminder.objects.get_or_404(pk_hash=pk_hash,
owner=current_user._get_current_object())
form = ReminderForm(request.form)
if request.method == 'POST':
if form.validate():
try:
was_changed = form.is_changed_from(reminder)
reminder = form.populate_reminder(reminder)
reminder.save()
# We need to check for new reminders only if changed
if was_changed:
reminder.check_all_cards(force=True)
flash('Reminder saved successfully', 'success')
except ReminderType.DoesNotExist:
flash('We currently support the type of reminder you are trying to create', 'error')
return redirect(url_for('user.reminders'))
else:
form = ReminderForm.from_reminder(reminder)
# Get cards that have been reminded
reminder_log = reminder.last_reminded()
context = {
'title': 'Edit Reminder',
'description': 'Edit Reminder for your MARTA Breeze Card',
'form': form,
'type_help': ReminderType.objects.all_tuples(field='description'),
'reminder': reminder,
'reminder_log': reminder_log
}
return render_template('user/reminders/edit.html', **context)
@fresh_login_required
@nocache
def add_reminder():
form = ReminderForm(request.form)
if request.method == 'POST':
if form.validate():
try:
reminder = form.populate_reminder(Reminder())
reminder.owner = current_user._get_current_object()
reminder.save()
# We need to check for new reminders
reminder.check_all_cards(force=True)
flash('Reminder created successfully', 'success')
except ReminderType.DoesNotExist:
flash('We currently support the type of reminder you are trying to create', 'error')
return redirect(url_for('user.reminders'))
context = {
'title': 'Add Reminder',
'description': 'Add a new reminder for your MARTA Breeze Card',
'form': form,
'type_help': ReminderType.objects.all_tuples(field='description'),
}
return render_template('user/reminders/add.html', **context)
app.add_url_rule('/reminders/', 'user.reminders', reminders)
app.add_url_rule('/reminders/delete/<pk_hash>', 'user.reminders.delete', delete_reminder)
app.add_url_rule('/reminders/history/<pk_hash>', 'user.reminders.history', reminder_history)
app.add_url_rule('/reminders/history/details/<pk_hash>', 'user.reminders.history.view', view_history)
app.add_url_rule('/reminders/edit/<pk_hash>', 'user.reminders.edit', edit_reminder, methods=['GET', 'POST'])
app.add_url_rule('/reminders/add/', 'user.reminders.add', add_reminder, methods=['GET', 'POST'])
|
rebaltina/DAT210x
|
Module2/assignment3.py
|
Python
|
mit
| 1,075
| 0.015814
|
impo
|
rt pandas as pd
# TODO: Load up the dataset
# Ensuring you set the appropriate header column names
#
df = pd.read_csv('c:/Users/User/workspace/DAT210x/MOdule2/Datasets/servo.data', names = ['motor', 'screw', 'pgain', 'vgain', 'class'])
df.head()
# TODO: Create a slice that contains all entries
# having a vgain equal to 5. Then print the
# length of (# of samples in) that slice:
#
print(len(df[df['vgain']==5]))
# TODO: Create a
|
slice that contains all entries
# having a motor equal to E and screw equal
# to E. # of samples in) that slice:
len(df[ (df.motor == 'E') & (df.screw == 'E')])
# TODO: Create a slice that contains all entries
# having a pgain equal to 4. Use one of the
# various methods of finding the mean vgain
# value for the samples in that slice. Once
# you've found it, print it:
#
mf = df[df['pgain']==4]
print(mf['vgain'].mean())
# TODO: (Bonus) See what happens when you run
# the .dtypes method on y our dataframe!
df.dtypes
#motor object
#screw object
#pgain int64
#vgain int64
#class float64
#dtype: object
|
bit-trade-one/SoundModuleAP
|
lib-src/portmidi/pm_python/pyportmidi/__init__.py
|
Python
|
gpl-2.0
| 25
| 0.04
|
f
|
rom .midi import
|
*
|
feureau/Small-Scripts
|
Blender/Blender config/2.91/scripts/addons/XNALaraMesh/write_ascii_xps.py
|
Python
|
gpl-3.0
| 5,428
| 0.000553
|
# <pep8 compliant>
import io
import operator
from . import read_ascii_xps
from . import xps_const
from mathutils import Vector
def writeBones(xpsSettings, bones):
bonesString = io.StringIO()
if bones:
bonesString.write('{:d} # bones\n'.format(len(bones)))
for bone in bones:
name = bone.name
parentId = bone.parentId
co = bone.co
if parentId is None:
parentId = -1
bonesString.write('{}\n'.format(name))
bonesString.write('{:d} # parent index\n'.format(parentId))
bonesString.write('{:.7G} {:.7G} {:.7G}\n'.format(*co))
bonesString.seek(0)
return bonesString
def writeMeshes(xpsSettings, meshes):
meshesString = io.StringIO()
meshesString.write('{:d} # meshes\n'.format(len(meshes)))
sortedMeshes = sorted(meshes, key=operator.attrgetter('name'))
for mesh in sortedMeshes:
# Name
meshesString.write(mesh.name + '\n')
# uv Count
meshesString.write('{:d} # uv layers\n'.format(mesh.uvCount))
# Textures
meshesString.write('{:d} # textures\n'.format(len(mesh.textures)))
for texture in mesh.textures:
meshesString.write(texture.file + '\n')
meshesString.write(
'{:d} # uv layer index\n'.format(texture.uvLayer))
# Vertices
meshesString.write('{:d} # vertices\n'.format(len(mesh.vertices)))
for vertex in mesh.vertices:
meshesString.write(
'{:.7G} {:.7G} {:.7G} # Coords\n'.format(*vertex.co))
meshesString.write('{:.7G} {:.7G} {:.7G}\n'.format(*vertex.norm))
meshesString.write('{:d} {:d} {:d} {:d}\n'.format(*vertex.vColor))
for uv in vertex.uv:
meshesString.write('{:.7G} {:.7G}\n'.format(*uv))
# if ????
# tangent????
# meshesString.write(write4float(xxx))
length = len(vertex.boneWeights)
idFormatString = ' '.join(['{:d}', ] * length)
weightFormatString = ' '.join(['{:.7G}', ] * length)
# Sort first the biggest weights
boneWeights = sorted(
vertex.boneWeights,
key=lambda bw: bw.weight,
reverse=True)
meshesString.write(
(idFormatString + '\n').format(*[bw.id for bw in boneWeights]))
meshesString.write(
(weightFormatString + '\n').format(*[bw.weight for bw in boneWeights]))
# Faces
meshesString.write('{:d} # faces\n'.format(len(mesh.faces)))
for face in mesh.faces:
meshesString.write('{:d} {:d} {:d}\n'.format(*face))
meshesString.seek(0)
return meshesString
def writePose(xpsData):
poseString = io.StringIO()
sortedPose = sorted(xpsData.items(), key=operator.itemgetter(0))
for boneData in sortedPose:
xpsBoneData = boneData[1]
boneName = xpsBoneData.boneName
rotDelta = roundRot(xpsBoneData.rotDelta)
coordDelta = roundTrans(xpsBoneData.coordDelta)
scale = roundScale(xpsBoneData.scale)
x1 = '{}: '.format(boneName)
x2 = '{:G} {:G} {:G} '.format(*rotDelta)
x3 = '{:G} {:G} {:G} '.format(*coordDelta)
x4 = '{:G} {:G} {:G} '.format(*scale)
poseString.write(x1)
poseString.write(x2)
poseString.write(x3)
poseString.write(x4)
poseString.write('\n')
poseString.seek(0)
return poseString
def writeXpsPose(filename, xpsData):
ioStream = io.StringIO()
print('Export Pose')
ioStream.write(writePose(xpsData).read())
ioStream.seek(0)
writeIoStream(filename, ioStream)
def roundRot(vector):
x = round(vector.x, 1) + 0
y = round(vector.y, 1) + 0
z = round(vector.z, 1) + 0
return Vector((x, y, z))
def roundTrans(vector):
x = round(vector.x, 4) + 0
y = round(vector.y, 4) + 0
z = round(vector.z, 4) + 0
return Vector((x, y, z))
def roundScale(vector):
x = round(vector.x, 3) + 0
y = round(vector.y, 3) + 0
z = round(vector.z, 3) + 0
return Vector((x, y, z))
def writeIoStream(filename, ioStream):
with open(filename, "w", encoding=xps_const.ENCODING_WRITE) as a_file:
a_file.write(ioStream.read())
def writeBoneDict(filename, boneDictList):
ioStream = io.StringIO()
ioStream.write(boneDictList)
ioStream.seek(0)
writeIoStream(filename, ioStream)
def writeXpsModel(xpsSettings, filename, xpsData):
ioStream = io.Str
|
ingIO()
print('Writing Bon
|
es')
ioStream.write(writeBones(xpsSettings, xpsData.bones).read())
print('Writing Meshes')
ioStream.write(writeMeshes(xpsSettings, xpsData.meshes).read())
ioStream.seek(0)
writeIoStream(filename, ioStream)
if __name__ == "__main__":
readfilename = r'G:\3DModeling\XNALara\XNALara_XPS\data\TESTING\Alice Returns - Mods\Alice 001 Fetish Cat\generic_item2.mesh.ascii'
writefilename = r'G:\3DModeling\XNALara\XNALara_XPS\data\TESTING\Alice Returns - Mods\Alice 001 Fetish Cat\generic_item3.mesh.ascii'
# Simulate XPS Data
# from . import mock_xps_data
# xpsData = mock_xps_data.mockData()
# import XPS File
xpsData = read_ascii_xps.readXpsModel(readfilename)
print('----WRITE START----')
writeXpsModel(writefilename, xpsData)
print('----WRITE END----')
|
umitproject/openmonitor-aggregator
|
registration/forms.py
|
Python
|
agpl-3.0
| 6,034
| 0.0058
|
"""
Forms and validation code for user registration.
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from registration.models import RegistrationProfile
# I put this on all required fields, because it's easier to pick up
# on them with CSS or JavaScript if they have a class of "required"
# in the HTML. Your mileage may vary. If/when Django ticket #3515
# lands in trunk, this will no longer be necessary.
attrs_dict = { 'class': 'required' }
class RegistrationForm(forms.Form):
"""
Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should either preserve the base ``save()`` or implement
a ``save()`` which accepts the ``profile_callback`` keyword
argument and passes it through to
``RegistrationProfile.objects.create_inactive_user()``.
"""
username = forms.RegexField(regex=r'^\w+$',
max_length=30,
widget=forms.TextInput(attrs=attrs_dict),
label=_(u'username'))
email = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict,
maxlength=75)),
label=_(u'email address'))
|
password1 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_(u'password'))
password2 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_(u'password (again)'))
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use.
"""
|
try:
user = User.objects.get(username__iexact=self.cleaned_data['username'])
except User.DoesNotExist:
return self.cleaned_data['username']
raise forms.ValidationError(_(u'This username is already taken. Please choose another.'))
def clean(self):
"""
Verifiy that the values entered into the two password fields
match. Note that an error here will end up in
``non_field_errors()`` because it doesn't apply to a single
field.
"""
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_(u'You must type the same password each time'))
return self.cleaned_data
def save(self, request, profile_callback=None):
"""
Create the new ``User`` and ``RegistrationProfile``, and
returns the ``User``.
This is essentially a light wrapper around
``RegistrationProfile.objects.create_inactive_user()``,
feeding it the form data and a profile callback (see the
documentation on ``create_inactive_user()`` for details) if
supplied.
"""
new_user = RegistrationProfile.objects.create_inactive_user(request,
username=self.cleaned_data['username'],
password=self.cleaned_data['password1'],
email=self.cleaned_data['email'],
profile_callback=profile_callback)
return new_user
class RegistrationFormTermsOfService(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which adds a required checkbox
for agreeing to a site's Terms of Service.
"""
tos = forms.BooleanField(widget=forms.CheckboxInput(attrs=attrs_dict),
label=_(u'I have read and agree to the Terms of Service'))
def clean_tos(self):
"""
Validate that the user accepted the Terms of Service.
"""
if self.cleaned_data.get('tos', False):
return self.cleaned_data['tos']
raise forms.ValidationError(_(u'You must agree to the terms to register'))
class RegistrationFormUniqueEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which enforces uniqueness of
email addresses.
"""
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(_(u'This email address is already in use. Please supply a different email address.'))
return self.cleaned_data['email']
class RegistrationFormNoFreeEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which disallows registration with
email addresses from popular free webmail services; moderately
useful for preventing automated spam registrations.
To change the list of banned domains, subclass this form and
override the attribute ``bad_domains``.
"""
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com']
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
email_domain = self.cleaned_data['email'].split('@')[1]
if email_domain in self.bad_domains:
raise forms.ValidationError(_(u'Registration using free email addresses is prohibited. Please supply a different email address.'))
return self.cleaned_data['email']
|
polegithub/shopping_web_python
|
shopping_web/manage.py
|
Python
|
mit
| 246
| 0
|
#!/usr/bin/env python
import os
import sys
if __nam
|
e__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cms.settings")
from django.core.management import execute_from_command_line
execute_from_command_lin
|
e(sys.argv)
|
sigmunau/nav
|
python/nav/mibs/etherlike_mib.py
|
Python
|
gpl-2.0
| 1,422
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# Lic
|
ense along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Implements a EtherLike-MIB MibRetriever and associated functionality."""
from __future__ import absolute_import
from twisted.internet import defer
from . import mibretriever
class EtherLikeMib(mibretriever.MibRetriever):
"""MibRetriever for EtherLike-MIB"""
from nav.s
|
midumps.etherlike_mib import MIB as mib
@defer.deferredGenerator
def get_duplex(self):
"""Get a mapping of ifindexes->duplex status."""
dw = defer.waitForDeferred(
self.retrieve_columns(('dot3StatsDuplexStatus',)))
yield dw
duplex = self.translate_result(dw.getResult())
result = dict([(index[0], row['dot3StatsDuplexStatus'])
for index, row in duplex.items()])
yield result
|
inachen/cs171-hw4-chen-ina
|
ProblemGeoUSA/data_wrangle_total.py
|
Python
|
mit
| 1,779
| 0.014615
|
import json
from pprint import pprint
import time
import io
# from http://www.codigomanso.com/en/2011/05/trucomanso-transformar-el-tiempo-en-formato-24h-a-formato-12h-python/
def ampmformat (hhmmss):
"""
This method converts time in 24h format to 12h format
Example: "00:32" is "12:32 AM"
"13:33" is "01:33 PM"
"""
ampm = hhmmss.split (":")
if (len(ampm) == 0) or (len(ampm) > 3):
return hhmmss
# is AM? from [00:00, 12:00[
hour = int(ampm[0]) % 24
isam = (hour >= 0) and (hour < 12)
# 00:32 should be 12:32 AM not 00:32
if isam:
ampm[0] = ('12' if (hour == 0) else "%02d" % (hour))
else:
ampm[0] = ('12' if (hour == 12) else "%02d" % (hour-12))
return ':'.join (ampm) + (' AM' if is
|
am else ' PM')
json_data=open('allData2003_2004.json')
data = json.load(json_data
|
)
json_data.close()
# k ='690150'
# print data['690150']
output = {}
for k in data.keys():
for d in data[k]:
date = time.strptime(d['date'], "%b %d, %Y %I:%M:%S %p")
if k in output:
t = ampmformat('%02d:%02d:%02d' % (date.tm_hour, date.tm_min, date.tm_sec))
h = date.tm_hour
output[k]['sum'] += d['value']
output[k]['hourly'][h] += d['value']
else:
output[k] = { "sum": 0,
"hourly": [0]*24
}
t = ampmformat('%02d:%02d:%02d' % (date.tm_hour, date.tm_min, date.tm_sec))
h = date.tm_hour
output[k]['sum'] += d['value']
output[k]['hourly'][h] += d['value']
f = io.open('data.json', 'w', encoding='utf-8')
f.write(unicode(json.dumps(output, ensure_ascii=False)))
f.close()
json_output=open('data.json')
output_data = json.load(json_output)
pprint(output_data)
json_output.close()
|
hbussell/pinax-tracker
|
apps/milestones/views.py
|
Python
|
mit
| 10,170
| 0.003441
|
from datetime import date, datetime, timedelta
from itertools import chain
from operator import attrgetter
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.db.models import Q, get_app
from django.http import HttpResponse, HttpResponseRedirect, Http404, HttpResponseForbidden
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.utils.translation import ugettext
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from pinax.utils.importlib import import_module
# Only import dpaste Snippet Model if it's activated
if "dpaste" in getattr(settings, "INSTALLED_APPS"):
from dpaste.models import Snippet
else:
Snippet = False
if "notification" in getattr(settings, "INSTALLED_APPS"):
from notification import models as notification
else:
notification = None
from tagging.models import Tag
from .models import Milestone
from .filters import MilestoneFilter
from .forms import MilestoneForm, EditMilestoneForm
def milestones(request, group_slug=None, template_name="milestones/milestone_list.html", bridge=None):
if bridge:
try:
group = bridge.get_group(group_slug)
except ObjectDoesNotExist:
raise Http404
else:
group = None
if not request.user.is_authenticated():
is_member = False
else:
if group:
is_member = group.user_is_member(request.user)
else:
is_member = True
group_by = request.GET.get("group_by")
if group:
milestones = group.content_objects(Milestone)
group_base = bridge.group_base_template()
else:
milestones = Milestone.objects.filter(object_id=None)
group_base = None
# default filtering
state_keys = dict(Milestone.STATE_CHOICES).keys()
default_states = set(state_keys).difference(
# don"t show these states
set(["2"])
)
filter_data = {"state": list(default_states)}
filter_data.update(request.GET)
milestone_filter = MilestoneFilter(filter_data, queryset=milestones)
group_by_querydict = request.GET.copy()
group_by_querydict.pop("group_by", None)
group_by_querystring = group_by_querydict.urlencode()
return render_to_response(template_name, {
"group": group,
"group_by": group_by,
"gbqs": group_by_querystring,
"is_member": is_member,
"group_base": group_base,
"milestone_filter": milestone_filter,
"milestones": milestone_filter.qs,
"querystring": request.GET.urlencode(),
}, context_instance=RequestContext(request))
def add_milestone(request, group_slug=None, secret_id=None,
form_class=MilestoneForm,
template_name="milestones/add.html", bridge=None):
if bridge:
try:
group = bridge.get_group(group_slug)
except ObjectDoesNotExist:
raise Http404
else:
group = None
if group:
group_base = bridge.group_base_template()
else:
group_base = None
if not request.user.is_authenticated():
is_member = False
else:
if group:
is_member = group.user_is_member(request.user)
else:
is_member = True
initial = {}
if request.method == "POST":
if request.user.is_authenticated():
milestone_form = form_class(request.user, group, request.POST)
if milestone_form.is_valid():
milestone = milestone_form.save(commit=False)
milestone.creator = request.user
milestone.group = group
#if hasattr(workflow, "initial_state"):
# milestone.state = workflow.initial_state(milestone, user)
milestone.save()
#milestone.save_history()
messages.add_message(request, messages.SUCCESS,
ugettext("added milestone '%s'") % milestone.title
)
if notification:
if group:
notify_list = group.member_queryset()
else:
notify_list = User.objects.all() # @@@
notify_list = notify_list.exclude(id__exact=request.user.id)
notification.send(notify_list, "milestones_new",
{"creator": request.user,
"milestone": milestone, "group": group})
if request.POST.has_key("add-another-milestone"):
if group:
redirect_to = bridge.reverse("milestone_add", group)
else:
redirect_to = reverse("milestone_add")
return HttpResponseRedirect(redirect_to)
if group:
redirect_to = bridge.reverse("milestone_list", group)
else:
redirect_to = reverse("milestone_list")
return HttpResponseRedirect(redirect_to)
else:
milestone_form = form_class(request.user, group, initial=initial)
return render_to_response(template_name, {
"group": group,
"is_member": is_member,
"milestone_form": milestone_form,
"group_base": group_base,
}, context_instance=RequestContext(request))
def milestone_detail(request, id, group_slug=None,
template_name="milestones/milestone.html", bridge=None):
if bridge:
try:
group = bridge.get_group(group_slug)
except ObjectDoesNotExist:
raise Http404
else:
group = None
if group:
milestones = group.content_objects(Milestone)
group_base = bridge.group_base_template()
else:
milestones = Milestone.o
|
bjects.filter(object_id=None)
group_base = None
milestone = get_object_or_404(milestones, id=id)
if group:
notify_list = group.member_queryset()
else:
notify_list = User.objects.all()
notify_list = notify_list.exclude(id__exact=request.user.id)
if not request.user.is_authenticated():
is_member = False
else:
if group:
|
is_member = group.user_is_member(request.user)
else:
is_member = True
## milestone tasks
from tasks.models import Task
from tasks import workflow
from tasks.filters import TaskFilter
group_by = request.GET.get("group_by")
if group:
tasks = group.content_objects(Task)
group_base = bridge.group_base_template()
else:
tasks = Task.objects.filter(object_id=None)
group_base = None
tasks = tasks.select_related("assignee")
# default filtering
state_keys = dict(workflow.STATE_CHOICES).keys()
default_states = set(state_keys)
filter_data = {"state": list(default_states)}
filter_data.update(request.GET)
task_filter = TaskFilter(filter_data, queryset=tasks)
group_by_querydict = request.GET.copy()
group_by_querydict.pop("group_by", None)
group_by_querystring = group_by_querydict.urlencode()
return render_to_response(template_name, {
"group": group,
"milestone": milestone,
"is_member": is_member,
"group_base": group_base,
"group_by": group_by,
"gbqs": group_by_querystring,
"is_member": is_member,
"task_filter": task_filter,
"tasks": task_filter.qs.filter(milestone=milestone),
"querystring": request.GET.urlencode(),
}, context_instance=RequestContext(request))
@login_required
def milestone_edit(request, id, group_slug=None,
template_name="milestones/edit.html", bridge=None):
if bridge:
try:
group = b
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.