code stringlengths 3 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int64 3 1.05M |
|---|---|---|---|---|---|
from time import time
from os import remove
from matplotlib.image import imread
import json
import subprocess
import numpy as np
import matplotlib.pyplot as plt
def time_a_function(program, args):
start = time()
subprocess.call([program] + [args])
end = time()
return float(end - start)
def clean(programs):
for p in programs:
remove(p)
def plot_results(times, programs, images):
x = [imread(img)[:,:,0].shape for img in images]
xlabels = [str(xi) for xi in x]
x = [np.prod(xi) for xi in x]
for p in programs:
y, std_y = zip(*times[p])
# plt.plot(x, y, 'o')
plt.errorbar(x, y, yerr=std_y, fmt='o')
plt.xticks(x, xlabels)
plt.xlabel('Image size')
plt.ylabel('Time (s)')
plt.show()
def print_results(times, programs, images):
sizes = [imread(img)[:,:,0].size for img in images]
for p in programs:
print '\n{}'.format(p)
mean_t, std_t = zip(*times[p])
print 'Image'.rjust(13), 'Size'.rjust(8), 'Avg. time'.rjust(10), 'Std. time'.rjust(10)
for img, size, m, s in zip(images, sizes, mean_t, std_t):
print '{:13} {:8d} {:10.5f} {:10.5f}'.format(img, size, m, s)
def main():
print 'Running make...'
subprocess.call(['make', '-j8'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
programs = ['./grayscale.out', './grayscale-seq.out']
images = ['img/emma{}.png'.format(i) for i in range(1, 6)]
n = 20
times = {}
try:
print 'Loading times.json...'
time_file = open('times.json', 'r')
times = json.load(time_file)
except IOError:
print 'Failed, calculating times'
for p in programs:
times[p] = []
for img in images:
t = []
print 'Running {} with {} {} times...'.format(p, img, n),
for _ in range(n):
t.append(time_a_function(p, img))
mean_t = np.mean(t)
std_t = np.std(t)
print '({} +- {})s on average'.format(mean_t, std_t)
times[p].append((mean_t, std_t))
time_file = open('times.json', 'w')
print 'Writing times.json...'
json.dump(times, time_file)
time_file.close()
print_results(times, programs, images)
plot_results(times, programs, images)
clean(programs)
if __name__ == '__main__':
main()
| sebasvega95/HPC-assignments | CUDA/grayscale/timing.py | Python | mit | 2,425 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Early initialization and main entry point.
qutebrowser's initialization process roughly looks like this:
- This file gets imported, either via the setuptools entry point or
__main__.py.
- At import time, we check for the correct Python version and show an error if
it's too old.
- The main() function in this file gets invoked
- Argument parsing takes place
- earlyinit.early_init() gets invoked to do various low-level initialization
and checks whether all dependencies are met.
- app.run() gets called, which takes over.
See the docstring of app.py for details.
"""
import sys
import json
import qutebrowser
try:
from qutebrowser.misc.checkpyver import check_python_version
except ImportError:
try:
# python2
from .misc.checkpyver import check_python_version
except (SystemError, ValueError):
# Import without module - SystemError on Python3, ValueError (?!?) on
# Python2
sys.stderr.write("Please don't run this script directly, do something "
"like python3 -m qutebrowser instead.\n")
sys.stderr.flush()
sys.exit(100)
check_python_version()
import argparse # pylint: disable=wrong-import-order
from qutebrowser.misc import earlyinit
def get_argparser():
"""Get the argparse parser."""
parser = argparse.ArgumentParser(prog='qutebrowser',
description=qutebrowser.__description__)
parser.add_argument('-B', '--basedir', help="Base directory for all "
"storage.")
parser.add_argument('-C', '--config-py', help="Path to config.py.",
metavar='CONFIG')
parser.add_argument('-V', '--version', help="Show version and quit.",
action='store_true')
parser.add_argument('-s', '--set', help="Set a temporary setting for "
"this session.", nargs=2, action='append',
dest='temp_settings', default=[],
metavar=('OPTION', 'VALUE'))
parser.add_argument('-r', '--restore', help="Restore a named session.",
dest='session')
parser.add_argument('-R', '--override-restore', help="Don't restore a "
"session even if one would be restored.",
action='store_true')
parser.add_argument('--target', choices=['auto', 'tab', 'tab-bg',
'tab-silent', 'tab-bg-silent',
'window'],
help="How URLs should be opened if there is already a "
"qutebrowser instance running.")
parser.add_argument('--backend', choices=['webkit', 'webengine'],
help="Which backend to use.")
parser.add_argument('--enable-webengine-inspector', action='store_true',
help="Enable the web inspector for QtWebEngine. Note "
"that this is a SECURITY RISK and you should not "
"visit untrusted websites with the inspector turned "
"on. See https://bugreports.qt.io/browse/QTBUG-50725 "
"for more details. This is not needed anymore since "
"Qt 5.11 where the inspector is always enabled and "
"secure.")
parser.add_argument('--json-args', help=argparse.SUPPRESS)
parser.add_argument('--temp-basedir-restarted', help=argparse.SUPPRESS)
debug = parser.add_argument_group('debug arguments')
debug.add_argument('-l', '--loglevel', dest='loglevel',
help="Set loglevel", default='info',
choices=['critical', 'error', 'warning', 'info',
'debug', 'vdebug'])
debug.add_argument('--logfilter', type=logfilter_error,
help="Comma-separated list of things to be logged "
"to the debug log on stdout.")
debug.add_argument('--loglines',
help="How many lines of the debug log to keep in RAM "
"(-1: unlimited).",
default=2000, type=int)
debug.add_argument('-d', '--debug', help="Turn on debugging options.",
action='store_true')
debug.add_argument('--json-logging', action='store_true', help="Output log"
" lines in JSON format (one object per line).")
debug.add_argument('--nocolor', help="Turn off colored logging.",
action='store_false', dest='color')
debug.add_argument('--force-color', help="Force colored logging",
action='store_true')
debug.add_argument('--nowindow', action='store_true', help="Don't show "
"the main window.")
debug.add_argument('-T', '--temp-basedir', action='store_true', help="Use "
"a temporary basedir.")
debug.add_argument('--no-err-windows', action='store_true', help="Don't "
"show any error windows (used for tests/smoke.py).")
debug.add_argument('--qt-arg', help="Pass an argument with a value to Qt. "
"For example, you can do "
"`--qt-arg geometry 650x555+200+300` to set the window "
"geometry.", nargs=2, metavar=('NAME', 'VALUE'),
action='append')
debug.add_argument('--qt-flag', help="Pass an argument to Qt as flag.",
nargs=1, action='append')
debug.add_argument('-D', '--debug-flag', type=debug_flag_error,
default=[], help="Pass name of debugging feature to be"
" turned on.", action='append', dest='debug_flags')
parser.add_argument('command', nargs='*', help="Commands to execute on "
"startup.", metavar=':command')
# URLs will actually be in command
parser.add_argument('url', nargs='*', help="URLs to open on startup "
"(empty as a window separator).")
return parser
def directory(arg):
if not arg:
raise argparse.ArgumentTypeError("Invalid empty value")
def logfilter_error(logfilter):
"""Validate logger names passed to --logfilter.
Args:
logfilter: A comma separated list of logger names.
"""
from qutebrowser.utils import log
if set(logfilter.lstrip('!').split(',')).issubset(log.LOGGER_NAMES):
return logfilter
else:
raise argparse.ArgumentTypeError(
"filters: Invalid value {} - expected a list of: {}".format(
logfilter, ', '.join(log.LOGGER_NAMES)))
def debug_flag_error(flag):
"""Validate flags passed to --debug-flag.
Available flags:
debug-exit: Turn on debugging of late exit.
pdb-postmortem: Drop into pdb on exceptions.
no-sql-history: Don't store history items.
no-scroll-filtering: Process all scrolling updates.
log-requests: Log all network requests.
log-scroll-pos: Log all scrolling changes.
stack: Enable Chromium stack logging.
chromium: Enable Chromium logging.
werror: Turn Python warnings into errors.
"""
valid_flags = ['debug-exit', 'pdb-postmortem', 'no-sql-history',
'no-scroll-filtering', 'log-requests', 'lost-focusproxy',
'log-scroll-pos', 'stack', 'chromium', 'werror']
if flag in valid_flags:
return flag
else:
raise argparse.ArgumentTypeError("Invalid debug flag - valid flags: {}"
.format(', '.join(valid_flags)))
def main():
parser = get_argparser()
argv = sys.argv[1:]
args = parser.parse_args(argv)
if args.json_args is not None:
# Restoring after a restart.
# When restarting, we serialize the argparse namespace into json, and
# construct a "fake" argparse.Namespace here based on the data loaded
# from json.
data = json.loads(args.json_args)
args = argparse.Namespace(**data)
earlyinit.early_init(args)
# We do this imports late as earlyinit needs to be run first (because of
# version checking and other early initialization)
from qutebrowser import app
return app.run(args)
| t-wissmann/qutebrowser | qutebrowser/qutebrowser.py | Python | gpl-3.0 | 9,154 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# __init__.py
# ccorp_account
# First author: Carlos Vásquez <carlos.vasquez@clearcorp.co.cr> (ClearCorp S.A.)
# Copyright (c) 2010-TODAY ClearCorp S.A. (http://clearcorp.co.cr). All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those of the
# authors and should not be interpreted as representing official policies, either expressed
# or implied, of ClearCorp S.A..
#
##############################################################################
import invoice
| ClearCorp-dev/odoo-clearcorp | TODO-6.1/ccorp_account/report/__init__.py | Python | agpl-3.0 | 1,991 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test runner for TensorFlow tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shlex
import sys
from google.protobuf import text_format
from tensorflow.core.util import test_log_pb2
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
from tensorflow.tools.test import run_and_gather_logs_lib
# pylint: disable=g-import-not-at-top
# pylint: disable=g-bad-import-order
# pylint: disable=unused-import
# Note: cpuinfo and psutil are not installed for you in the TensorFlow
# OSS tree. They are installable via pip.
try:
import cpuinfo
import psutil
except ImportError as e:
tf_logging.error("\n\n\nERROR: Unable to import necessary library: {}. "
"Issuing a soft exit.\n\n\n".format(e))
sys.exit(0)
# pylint: enable=g-bad-import-order
# pylint: enable=unused-import
FLAGS = flags.FLAGS
flags.DEFINE_string("name", "", """Benchmark target identifier.""")
flags.DEFINE_string("test_name", "", """Test target to run.""")
flags.DEFINE_string("test_args", "", """Test arguments, space separated.""")
flags.DEFINE_string("test_log_output", "", """Filename to write logs.""")
flags.DEFINE_bool("test_log_output_use_tmpdir", False,
"""Store the log output into tmpdir?.""")
flags.DEFINE_string("compilation_mode", "",
"""Mode used during this build (e.g. opt, dbg).""")
flags.DEFINE_string("cc_flags", "", """CC flags used during this build.""")
def gather_build_configuration():
build_config = test_log_pb2.BuildConfiguration()
build_config.mode = FLAGS.compilation_mode
# Include all flags except includes
cc_flags = [
flag for flag in shlex.split(FLAGS.cc_flags) if not flag.startswith("-i")
]
build_config.cc_flags.extend(cc_flags)
return build_config
def main(unused_args):
name = FLAGS.name
test_name = FLAGS.test_name
test_args = FLAGS.test_args
test_results, _ = run_and_gather_logs_lib.run_and_gather_logs(name, test_name,
test_args)
# Additional bits we receive from bazel
test_results.build_configuration.CopyFrom(gather_build_configuration())
serialized_test_results = text_format.MessageToString(test_results)
if not FLAGS.test_log_output:
print(serialized_test_results)
return
if FLAGS.test_log_output_use_tmpdir:
tmpdir = test.get_temp_dir()
output_path = os.path.join(tmpdir, FLAGS.test_log_output)
else:
output_path = os.path.abspath(FLAGS.test_log_output)
gfile.GFile(output_path, "w").write(serialized_test_results)
tf_logging.info("Test results written to: %s" % output_path)
if __name__ == "__main__":
app.run()
| krikru/tensorflow-opencl | tensorflow/tools/test/run_and_gather_logs.py | Python | apache-2.0 | 3,586 |
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core import validators
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
from taiga.base.api import serializers
import re
class BaseRegisterSerializer(serializers.Serializer):
full_name = serializers.CharField(max_length=256)
email = serializers.EmailField(max_length=255)
username = serializers.CharField(max_length=255)
password = serializers.CharField(min_length=4)
def validate_username(self, attrs, source):
value = attrs[source]
validator = validators.RegexValidator(re.compile('^[\w.-]+$'), _("invalid username"), "invalid")
try:
validator(value)
except ValidationError:
raise serializers.ValidationError(_("Required. 255 characters or fewer. Letters, numbers "
"and /./-/_ characters'"))
return attrs
class PublicRegisterSerializer(BaseRegisterSerializer):
pass
class PrivateRegisterForNewUserSerializer(BaseRegisterSerializer):
token = serializers.CharField(max_length=255, required=True)
class PrivateRegisterForExistingUserSerializer(serializers.Serializer):
username = serializers.CharField(max_length=255)
password = serializers.CharField(min_length=4)
token = serializers.CharField(max_length=255, required=True)
| Rademade/taiga-back | taiga/auth/serializers.py | Python | agpl-3.0 | 2,267 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2021 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" Test QiBuild Find """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import qibuild.config
from qibuild import find
from qibuild.test.conftest import QiBuildAction
from qitoolchain.test.conftest import QiToolchainAction
def test_find_target_in_project_cmake(qibuild_action, record_messages):
""" Test Find Target In Project CMake """
qibuild_action.add_test_project("world")
qibuild_action.add_test_project("hello")
qibuild_action("configure", "hello")
record_messages.reset()
qibuild_action("find", "--cmake", "hello", "world")
assert record_messages.find("WORLD_LIBRARIES")
def test_find_target_in_toolchain_package_cmake(cd_to_tmpdir, record_messages):
""" Test Find Target In Toolchain Package CMake """
qibuild_action = QiBuildAction()
qitoolchain_action = QiToolchainAction()
build_worktree = qibuild_action.build_worktree
qibuild_action.add_test_project("world")
qibuild_action.add_test_project("hello")
world_package = qibuild_action("package", "world")
qitoolchain_action("create", "foo")
qibuild.config.add_build_config("foo", toolchain="foo")
qitoolchain_action("add-package", "-c", "foo", world_package)
build_worktree.worktree.remove_project("world", from_disk=True)
record_messages.reset()
qibuild_action.chdir("hello")
qibuild_action("configure", "-c", "foo")
qibuild_action("find", "--cmake", "world", "-c", "foo")
assert record_messages.find("WORLD_LIBRARIES")
def test_find_target_in_build_dir(qibuild_action, record_messages):
""" Test Find Target In Build Dir """
qibuild_action.add_test_project("world")
qibuild_action.add_test_project("hello")
qibuild_action("configure", "hello")
qibuild_action("make", "hello")
record_messages.reset()
qibuild_action("find", "hello", "world")
assert record_messages.find(find.library_name("world"))
rc = qibuild_action("find", "hello", "libworld", retcode=True)
assert rc == 1
def test_find_target_in_toolchain_package(cd_to_tmpdir, record_messages):
""" Test Find Target In Toolchain Package """
qibuild_action = QiBuildAction()
qitoolchain_action = QiToolchainAction()
qibuild_action.add_test_project("world")
qibuild_action.add_test_project("hello")
world_package = qibuild_action("package", "world")
qitoolchain_action("create", "foo")
qibuild.config.add_build_config("foo", toolchain="foo")
qitoolchain_action("add-package", "-c", "foo", world_package)
qibuild_action.chdir("hello")
qibuild_action("configure", "-c", "foo")
qibuild_action("make", "-c", "foo")
record_messages.reset()
qibuild_action("find", "world", "-c", "foo")
assert record_messages.find(find.library_name("world"))
record_messages.reset()
qibuild_action("find", "hello", "-c", "foo")
assert record_messages.find(find.binary_name("hello"))
rc = qibuild_action("find", "libeggs", "-c", "foo", retcode=True)
assert rc == 1
| aldebaran/qibuild | python/qibuild/test/test_qibuild_find.py | Python | bsd-3-clause | 3,244 |
#!/usr/bin/env python3
import urllib.request
import json
import argparse
import time
parser = argparse.ArgumentParser()
parser.add_argument('-B', '--base-url',
default = 'https://api.github.com',
help = 'base URL for issues API')
parser.add_argument('-O', '--owner',
default = 'StanfordLegion',
help = 'repository owner')
parser.add_argument('-r', '--repo',
default = 'legion',
help = 'repository name')
parser.add_argument('-s', '--state',
default = 'open', choices = ['open' ,'closed', 'all'],
help = 'state of issues to fetch')
parser.add_argument('-p', '--pull-requests', action='store_true',
help = 'include pull requests as well as issues')
parser.add_argument('-v', '--verbose', action='store_true',
help = 'verbose progress information')
parser.add_argument('-q', '--quiet', action='store_true',
help = 'suppress all messages to stdout')
parser.add_argument('-c', '--count', type=int,
default = 100,
help = 'issues to request per page')
parser.add_argument('-R', '--max-retries', type=int,
default = 3,
help = 'maximum retries for a single request')
parser.add_argument('-d', '--retry-delay', type=int,
default = 3,
help = 'delay (in seconds) between retries')
parser.add_argument('-t', '--tokenfile', type=str,
help = 'file containing API authentication token')
parser.add_argument('--partial', action='store_true',
help = 'write partial issue list in case of errors')
parser.add_argument('output', type=str,
help = 'output file location')
args = parser.parse_args()
issues = {}
headers = {}
if args.tokenfile:
token = open(args.tokenfile, 'r').read().strip()
headers['Authorization'] = 'token ' + token
for page in range(1, 1000):
url = '{}/repos/{}/{}/issues?state={}&count={}&page={}'.format(args.base_url,
args.owner,
args.repo,
args.state,
args.count,
page)
if args.verbose:
print('fetching: {}'.format(url))
retry_count = 0
while True:
try:
req = urllib.request.Request(url, headers=headers)
r = urllib.request.urlopen(req)
j = json.loads(r.read().decode('utf-8'))
break
except KeyboardInterrupt:
exit(1)
except Exception as e:
if retry_count >= args.max_retries:
if args.partial:
j = []
break
raise
if not args.quiet:
print('error: {}'.format(e))
retry_count += 1
if args.retry_delay > 0:
time.sleep(args.retry_delay)
if args.verbose:
print('{} issues read'.format(len(j)))
# an empty list suggests we're at the end
if len(j) == 0:
break
for issue in j:
num = issue['number']
if ('pull_request' in issue) and not(args.pull_requests):
continue
issues[num] = issue
if not args.quiet:
print('writing {} issues to \'{}\''.format(len(issues),
args.output))
# write data out in hopefully-human-readable json
with open(args.output, 'w') as f:
# include information used to fetch the issues
data = { 'base_url': args.base_url,
'owner': args.owner,
'repo': args.repo,
'issues': issues }
json.dump(data, f, sort_keys=True, indent=4)
| StanfordLegion/legion | tools/fetch_github_issues.py | Python | apache-2.0 | 4,029 |
import ConfigParser
import os
global_conf_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'global_conf.cfg')
## Defining the parser
conf_parser = ConfigParser.SafeConfigParser()
conf_parser.read(global_conf_file)
## main project folder
project_folder = conf_parser.get('folder_conf','project_folder')
auto_folder_name = conf_parser.get('folder_conf','auto_folder')
adhoc_folder_name = conf_parser.get('folder_conf','adhoc_folder')
## auto dalla
auto_folder = project_folder + '/'+ auto_folder_name
## adhoc dalla
adhoc_folder = project_folder + '/'+ adhoc_folder_name
## input folder
input_folder = auto_folder + '/input'
hazard_kelas_folder = auto_folder + '/config/kelas_dampak'
input_boundary_folder = input_folder + '/boundary'
input_sql_folder = input_folder + '/sql'
input_exposure_folder = input_folder + '/exposure'
input_exposure_shapefile_folder = input_exposure_folder + '/shapefile'
## auto output
auto_output_folder = auto_folder + '/output'
## adhoc output
adhoc_output_folder = adhoc_folder + '/output'
| frzdian/jaksafe-engine | jaksafe/jaksafe/jakservice/config_folder.py | Python | gpl-2.0 | 1,043 |
#! /usr/bin/python
# Usage:
# ./emulecollector.py COLLECTION_FILE
# a quick&dirty GUI for `ed2k -l` and `ed2k`.
import pygtk
import sys
import subprocess
#pygtk.require("2.0")
import gtk
import gobject
class Interface:
def __init__(self):
self.linklist = []
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_title("Emule collector")
self.window.connect("delete_event", self.delete_event)
send_list = gtk.Button("Send to Amule")
send_list.connect("clicked", self.sendFilesToAmule)
quit_button = gtk.Button("Quit")
self.ht = quit_button.size_request()[1]
quit_button.connect("clicked", lambda w: gtk.main_quit())
buttons = gtk.HBox()
buttons.add(send_list)
buttons.add(quit_button)
self.list_store = gtk.ListStore(gobject.TYPE_BOOLEAN, gobject.TYPE_STRING)
self.the_list = gtk.TreeView(model=self.list_store)
sel_rend = gtk.CellRendererToggle()
col0 = gtk.TreeViewColumn("Selected", sel_rend, active=0)
self.the_list.append_column(col0)
link_rend = gtk.CellRendererText()
col1 = gtk.TreeViewColumn("Link", link_rend, text=1)
self.the_list.append_column(col1)
self.the_list.get_selection().connect("changed", self.change_me)
scrolla = gtk.ScrolledWindow()
scrolla.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS)
scrolla.add_with_viewport(self.the_list)
container = gtk.VBox()
container.add(scrolla)
container.pack_start(buttons, expand=False, fill=False)
self.window.add(container)
def list_render(self):
for t in titles:
cell = gtk.CellRendererText()
col = gtk.TreeViewColumn(t, cell)
self.the_list.append_column(col)
def addLinksFrom(self, theFile):
self.linklist = subprocess.check_output(["ed2k", "-l", theFile]).split("\n")
self.add_links()
def add_links(self):
for a_link in self.linklist:
v = self.list_store.append()
self.list_store.set_value(v, 0, False)
self.list_store.set_value(v, 1, a_link)
def delete_event(self, widget, event, data=None):
gtk.main_quit()
return False
def sendFilesToAmule(self, widget, data=None):
selected_list = []
self.list_store.foreach(lambda m, p, i, u: selected_list.append(m[i][1]) if m[i][0] else False, 0)
for lnk in selected_list:
r = subprocess.call(["ed2k", lnk])
if r != 0:
break
#if r != 0:
# warning cannot send link...
msg = ""
if r == 0:
msg_type = gtk.MESSAGE_INFO
msg = "Everything should be alright"
else:
msg = "Something went wrong"
msg_type = gtk.MESSAGE_ERROR
dia = gtk.MessageDialog(self.window, type=msg_type, buttons=gtk.BUTTONS_OK)
dia.set_markup(msg)
dia.set_default_response(gtk.RESPONSE_OK)
dia.run()
dia.destroy()
def change_me(self, selection):
(model, it) = selection.get_selected()
model.set_value(it, 0, not model[it][0])
def open_up(self):
s = self.the_list.size_request()
scr = self.window.get_screen()
ps = [scr.get_width(), scr.get_height()]
self.window.set_geometry_hints(None, 300, 10*self.ht, ps[0], ps[1], -1, -1, -1, -1, -1, -1)
self.window.set_default_size(s[0], 25*self.ht)
self.window.show_all();
def main():
gtk.main()
return 0
if __name__ == "__main__":
if len(sys.argv) > 1:
o = Interface()
o.addLinksFrom(sys.argv[1])
o.open_up()
main()
else:
print "first argument missing"
| shintakezou/miscellanea | emulecollector/emulecollector.py | Python | cc0-1.0 | 3,850 |
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.racing.DistributedProjectile
from panda3d.core import NodePath
from direct.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from direct.gui.DirectGui import *
from direct.fsm import FSM
from direct.distributed.DistributedSmoothNode import DistributedSmoothNode
from otp.avatar.ShadowCaster import ShadowCaster
class DistributedProjectile(DistributedSmoothNode, ShadowCaster, NodePath):
def __init__(self, cr):
ShadowCaster.__init__(self)
DistributedSmoothNode.__init__(self, cr)
NodePath.__init__(self, 'Projectile')
def announceGenerate(self):
DistributedSmoothNode.announceGenerate(self)
self.name = self.uniqueName('projectile')
self.posHprBroadcastName = self.uniqueName('projectileBroadcast')
geom = loader.loadModel('models/smiley')
self.geom = geom
self.geom.reparentTo(self)
self.startSmooth()
self.reparentTo(render)
def generate(self):
DistributedSmoothNode.generate(self)
self.name = self.uniqueName('projectile')
self.posHprBroadcastName = self.uniqueName('projectileBroadcast')
geom = loader.loadModel('models/smiley')
self.geom = geom
self.geom.reparentTo(self)
self.startSmooth()
self.reparentTo(render)
def setAvId(self, avId):
self.avId = avId
def delete(self):
DistributedSmoothNode.delete(self) | DedMemez/ODS-August-2017 | racing/DistributedProjectile.py | Python | apache-2.0 | 1,543 |
'''
Example of how to use Multilayer Perceptrons.
'''
import sys, os, time
import gzip, pickle
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
# from matplotlib.backends.backend_pdf import PdfPages
# %cd C:/Users/g1rxf01/Downloads/New folder/simpleml/examples
# %cd M:/Libraries/Documents/Code/Python/simpleml/examples
sys.path.insert(1, os.path.join(sys.path[0], '..'))
from simpleml.neural import MultilayerPerceptron
from simpleml.transform import to_dummies
# Load data
with gzip.open('../data/mnist.gz', 'rb') as f:
data = pickle.load(f)
data = {key: (val[0], val[1], to_dummies(val[1])) for key, val in data.items()}
# Setup estimator
num_hidden_nodes = [101]
mlp = MultilayerPerceptron(
num_inputs=data['train'][0].shape[1]+1,
num_outputs=data['train'][2].shape[1],
num_hidden_layers=len(num_hidden_nodes), num_hidden_nodes=num_hidden_nodes,
learn_rate=.5, momentum=.1, seed=23456
)
# Estimate multilayer perceptron
start = time.perf_counter()
mlp.fit(data['train'][0], data['train'][2],
epochnum=10, verbose=1)
pred = mlp.classify(data['test'][0], max_ind=True)
print("Time: {:5.2f}, Error: {:5.4f}".format(
time.perf_counter() - start,
1 - np.mean(pred == data['test'][1])
))
# Visualize first hidden layer
fig1 = plt.figure(figsize=(10, 10))
for i in range(num_hidden_nodes[0]-1):
side = np.sqrt(num_hidden_nodes[0]-1)
ax = fig1.add_subplot(side, side, i+1)
ax.imshow(mlp.layers[0].weights[1:, i].reshape([28, 28]), cmap=cm.Greys_r)
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
fig1.savefig('ex_mlp_mnist.pdf')
| rogerfan/simpleml | examples/multilayer_perceptron_mnist.py | Python | mpl-2.0 | 1,641 |
# -*- coding: utf-8 -*-
#
# Copyright Kevin Deldycke <kevin@deldycke.com> and contributors.
# All Rights Reserved.
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os
from pathlib import Path
from shutil import which
from textwrap import indent
from pelican.contents import Static
from pynpm import NPMPackage
from . import logger, PLUMAGE_ROOT
def setup_webassets(conf):
""" Setup pelican-webassets plugin configuration. """
if not conf.get("WEBASSETS_CONFIG"):
conf["WEBASSETS_CONFIG"] = []
webassets_conf_keys = {i[0] for i in conf.get("WEBASSETS_CONFIG")}
# Search for PostCSS binary location.
cli_name = "postcss"
# The dependency definition file relative to Plumage's install path takes precedence.
node_deps_file = PLUMAGE_ROOT.joinpath("package.json").resolve()
node_bin_path = node_deps_file.parent / "node_modules" / ".bin"
cli_search_path = [
str(node_bin_path),
]
# Check if the path exist in any of the environment locations.
env_path = ":".join(cli_search_path + [os.getenv("PATH")])
postcss_bin = which(cli_name, path=env_path)
if not postcss_bin:
logger.warning(f"{cli_name} CLI not found.")
# Install Node dependencies.
logger.info(
f"Install Plumage's Node.js dependencies from {node_deps_file}:\n"
f"{indent(node_deps_file.read_text(), ' ' * 2)}"
)
pkg = NPMPackage(node_deps_file)
try:
pkg.install()
except FileNotFoundError:
logger.error("npm CLI not found.")
raise
postcss_bin = which(cli_name, path=env_path)
assert postcss_bin
# Register PostCSS to webassets plugin.
logger.info(f"{cli_name} CLI found at {postcss_bin}")
if "POSTCSS_BIN" not in webassets_conf_keys:
conf["WEBASSETS_CONFIG"].append(
("POSTCSS_BIN", postcss_bin),
)
# Force usage of autoprefixer via PostCSS.
if "POSTCSS_EXTRA_ARGS" not in webassets_conf_keys:
conf["WEBASSETS_CONFIG"].append(
("POSTCSS_EXTRA_ARGS", ["--use", "autoprefixer"]),
)
return conf
| kdeldycke/plumage | plumage/webassets.py | Python | gpl-2.0 | 2,808 |
#!/usr/bin/python
"""
This is a tool to verify checksum hashes produced by LOCKSS against hashes
provided by a BagIt manifest document.
Invoke with -h for usage help.
Written by Stephen Eisenhauer
At University of North Texas Libraries
On 2013-04-17
Notes:
* The LOCKSS hash list will have more entries than we actually care about
(checksums for Apache directory listing pages, etc.), so we should just
go down the list of bag manifest entries and ensure that everything
there is also present (and identical) in the LOCKSS list.
"""
import argparse
import os
import re
import urllib
def load_lockss_hashes(hashcus_path):
prefix = None
hashes = dict()
f = open(hashcus_path, 'r')
for line in f:
m = re.match('[0-9A-F]{32} (.+)', line)
if m:
if not prefix:
prefix = len(m.group(1)) + 1
continue
hashes[m.group(1)[prefix:]] = line[:32]
f.close()
print "Found %d hashes in HashCUS file" % len(hashes)
return hashes
def compare_manifest_hashes(manifest_path, hashes):
records = 0
errors = 0
f = open(manifest_path, 'r')
for line in f:
m = re.match('[0-9a-f]{32} (.+)', line)
if m:
records += 1
path = urllib.quote(m.group(1), safe="%/:=&?~#+!$,;'@()*[]")
if not path in hashes:
print "No LOCKSS hash found for path: %s" % path
errors += 1
elif line[:32].upper() != hashes[path]:
print "Hash mismatch: %s != %s for path %s" % (line[:32], hashes[path], path)
errors += 1
f.close()
print "Compared %d records, encountered %d errors." % (records, errors)
def _make_arg_parser():
parser = argparse.ArgumentParser(
description='Compare a LOCKSS hash list to a bag manifest.')
parser.add_argument('HashCUS',
help="path to the HashCUS.txt file downloaded from LOCKSS")
parser.add_argument('manifest',
help="path to the bag manifest (e.g. mybag/manifest-md5.txt")
return parser
if __name__ == "__main__":
parser = _make_arg_parser()
args = parser.parse_args()
hascus_path = os.path.abspath(args.HashCUS)
manifest_path = os.path.abspath(args.manifest)
hashes = load_lockss_hashes(hascus_path)
compare_manifest_hashes(manifest_path, hashes)
| MetaArchive/metaarchive-qa-tools | lockss-manifest-validate/lockss-manifest-validate.py | Python | bsd-3-clause | 2,391 |
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2016 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""OpenShift integration testing"""
from unittest import TestCase
import os
from weblate.openshiftlib import get_openshift_secret_key, import_env_vars
class FakeStorage(object):
pass
class OpenShiftTest(TestCase):
def test_key_missing(self):
cleanup_vars = (
'OPENSHIFT_APP_NAME',
'OPENSHIFT_APP_UUID',
'OPENSHIFT_SECRET_TOKEN'
)
for var in cleanup_vars:
if var in os.environ:
del os.environ[var]
self.assertRaises(ValueError, get_openshift_secret_key)
def test_key_stored(self):
os.environ['OPENSHIFT_SECRET_TOKEN'] = 'TEST TOKEN'
self.assertEqual(get_openshift_secret_key(), 'TEST TOKEN')
del os.environ['OPENSHIFT_SECRET_TOKEN']
def test_key_calc(self):
os.environ['OPENSHIFT_APP_NAME'] = 'TOKEN'
os.environ['OPENSHIFT_APP_UUID'] = 'TEST'
self.assertEqual(
get_openshift_secret_key(),
'9cafcbef936068980e0ddefad417dcaea8c21020c68116bb74e3705ce3b62de4'
)
del os.environ['OPENSHIFT_APP_NAME']
del os.environ['OPENSHIFT_APP_UUID']
def test_import_env_string(self):
storage = FakeStorage()
import_env_vars({'WEBLATE_FOO': '"bar"'}, storage)
self.assertEqual(storage.FOO, 'bar')
def test_import_env_int(self):
storage = FakeStorage()
import_env_vars({'WEBLATE_FOO': '1234'}, storage)
self.assertEqual(storage.FOO, 1234)
def test_import_env_tuple(self):
storage = FakeStorage()
import_env_vars({'WEBLATE_FOO': '(1, 2)'}, storage)
self.assertEqual(storage.FOO, (1, 2))
def test_import_env_env(self):
storage = FakeStorage()
import_env_vars({'WEBLATE_FOO': '"$BAR"', 'BAR': 'baz'}, storage)
self.assertEqual(storage.FOO, 'baz')
def test_import_env_raw(self):
storage = FakeStorage()
import_env_vars({'WEBLATE_FOO': '(r"/project/(.*)$$",)'}, storage)
self.assertEqual(storage.FOO, ('/project/(.*)$',))
| jitka/weblate | weblate/test_openshift.py | Python | gpl-3.0 | 2,837 |
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
import Gaffer
import GafferTest
import GafferScene
import GafferSceneTest
class SceneSwitchTest( GafferSceneTest.SceneTestCase ) :
def testDefaultName( self ) :
s = GafferScene.SceneSwitch()
self.assertEqual( s.getName(), "SceneSwitch" )
def testEnabledPlug( self ) :
s = GafferScene.SceneSwitch()
self.assertTrue( isinstance( s["enabled"], Gaffer.BoolPlug ) )
self.assertTrue( s["enabled"].isSame( s.enabledPlug() ) )
self.assertFalse( "enabled1" in s )
def testAffects( self ) :
plane = GafferScene.Plane()
sphere = GafferScene.Sphere()
switch = GafferScene.SceneSwitch()
switch["in"].setInput( plane["out"] )
switch["in1"].setInput( sphere["out"] )
for p in [ switch["in"], switch["in1"] ] :
for n in p.keys() :
a = switch.affects( p[n] )
self.assertEqual( len( a ), 1 )
self.assertTrue( a[0].isSame( switch["out"][n] ) )
a = set( switch.affects( switch["enabled"] ) )
self.assertEqual( a, set( switch["out"].children() ) )
a = set( switch.affects( switch["index"] ) )
self.assertEqual( a, set( switch["out"].children() ) )
def testSwitching( self ) :
plane = GafferScene.Plane()
sphere = GafferScene.Sphere()
switch = GafferScene.SceneSwitch()
switch["in"].setInput( plane["out"] )
switch["in1"].setInput( sphere["out"] )
self.assertScenesEqual( switch["out"], plane["out"] )
self.assertSceneHashesEqual( switch["out"], plane["out"] )
switch["index"].setValue( 1 )
self.assertScenesEqual( switch["out"], sphere["out"] )
self.assertSceneHashesEqual( switch["out"], sphere["out"] )
switch["enabled"].setValue( False )
self.assertScenesEqual( switch["out"], plane["out"] )
self.assertSceneHashesEqual( switch["out"], plane["out"] )
def testSerialisation( self ) :
script = Gaffer.ScriptNode()
script["switch"] = GafferScene.SceneSwitch()
script["plane"] = GafferScene.Plane()
script["sphere"] = GafferScene.Sphere()
script["switch"]["in"].setInput( script["plane"]["out"] )
script["switch"]["in1"].setInput( script["sphere"]["out"] )
script2 = Gaffer.ScriptNode()
script2.execute( script.serialise() )
self.assertTrue( script2["switch"]["in"].getInput().isSame( script2["plane"]["out"] ) )
self.assertTrue( script2["switch"]["in1"].getInput().isSame( script2["sphere"]["out"] ) )
self.assertTrue( script2["switch"]["in2"].getInput() is None )
if __name__ == "__main__":
unittest.main()
| goddardl/gaffer | python/GafferSceneTest/SceneSwitchTest.py | Python | bsd-3-clause | 4,249 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from marionette.wait import Wait
class TelephonyTestCommon(object):
returnable_calls = """
window.wrappedJSObject.get_returnable_calls = function() {
let calls = {};
for (let i in window.navigator.mozTelephony.calls) {
let call = {
number: window.navigator.mozTelephony.calls[i].number,
state: window.navigator.mozTelephony.calls[i].state
}
calls[i] = call;
}
calls['length'] = window.navigator.mozTelephony.calls['length'];
return calls;
}
"""
def __init__(self):
self.active_call_list = []
def setup_incoming_call(self):
self.marionette.execute_script(self.returnable_calls)
# listen for and answer incoming call
self.marionette.execute_async_script("""
var telephony = window.navigator.mozTelephony;
window.wrappedJSObject.received_incoming = false;
telephony.onincoming = function onincoming(event) {
log("Received 'incoming' call event.");
window.wrappedJSObject.received_incoming = true;
window.wrappedJSObject.incoming_call = event.call;
window.wrappedJSObject.returnable_incoming_call = {
number: event.call.number,
state: event.call.state
};
window.wrappedJSObject.calls = telephony.calls;
};
window.wrappedJSObject.received_callschanged = false;
telephony.oncallschanged = function oncallschanged(event) {
log("Received Telephony 'oncallschanged' event.");
window.wrappedJSObject.received_callschanged = true;
};
window.wrappedJSObject.received_ready = false;
telephony.ready.then(
function() {
console.log("Telephony got ready");
window.wrappedJSObject.received_ready = true;
},
function() {
console.log("Telephony not ready");
window.wrappedJSObject.received_ready = false;
}
);
marionetteScriptFinished(1);
""", special_powers=True)
wait = Wait(self.marionette, timeout=90, interval=0.5)
try:
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_ready"))
except:
self.fail("Telephony.oncallschanged event not found, but should have been "
"since initiated incoming call to firefox OS device")
def verify_incoming_call(self):
try:
received = self.marionette.execute_script("return window.wrappedJSObject.received_incoming")
self.assertTrue(received, "Incoming call not received (Telephony.onincoming event not found)")
self.incoming_call = self.marionette.execute_script("return window.wrappedJSObject.returnable_incoming_call")
self.assertEqual(self.incoming_call['state'], "incoming", "Call state should be 'incoming'")
finally:
self.marionette.execute_script("window.navigator.mozTelephony.onincoming = null;")
def answer_call(self, incoming=True):
# answer incoming call via the webapi; have user answer outgoing call on target
self.marionette.execute_async_script("""
let incoming = arguments[0];
if (incoming) {
var call_to_answer = window.wrappedJSObject.incoming_call;
} else {
var call_to_answer = window.wrappedJSObject.outgoing_call;
};
window.wrappedJSObject.connecting_call_ok = false;
call_to_answer.onconnecting = function onconnecting(event) {
log("Received 'onconnecting' call event.");
if (event.call.state == "connecting") {
window.wrappedJSObject.connecting_call_ok = true;
};
};
window.wrappedJSObject.received_statechange = false;
call_to_answer.onstatechange = function onstatechange(event) {
log("Received TelephonyCall 'onstatechange' event.");
if (event.call.state == "connected") {
window.wrappedJSObject.received_statechange = true;
};
};
window.wrappedJSObject.connected_call_ok = false;
call_to_answer.onconnected = function onconnected(event) {
log("Received 'onconnected' call event.");
if (event.call.state == "connected") {
window.wrappedJSObject.active_call = window.navigator.mozTelephony.active;
window.wrappedJSObject.returnable_active_call = {
state: window.navigator.mozTelephony.active.state,
number: window.navigator.mozTelephony.active.number
};
window.wrappedJSObject.connected_call_ok = true;
};
};
// answer incoming call via webapi; outgoing will be by user interaction
if (incoming) {
call_to_answer.answer();
};
marionetteScriptFinished(1);
""", script_args=[incoming], special_powers=True)
# answer outgoing call via user answering on target
if not incoming:
self.instruct("Please answer the call on the target phone, then click 'OK'")
# should have received both events associated with answering a call
wait = Wait(self.marionette, timeout=90, interval=0.5)
try:
if incoming: # only receive 'onconnecting' for incoming call
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.connecting_call_ok"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.connected_call_ok"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_statechange"))
except:
self.fail("Failed to answer call")
# append new call to the active call list
self.active_call_list.append(self.marionette.execute_script("return window.wrappedJSObject.returnable_active_call"))
def user_guided_incoming_call(self):
# ask user to call the device; answer and verify via webapi
self.setup_incoming_call()
self.instruct("From a different phone, call the Firefox OS device, and when you \
hear the ringing signal click 'OK'")
self.verify_incoming_call()
def hangup_call(self, call_type="Active", remote_hangup=False, active_call_selected=0):
# hangup the active/incoming call, verify
self.marionette.execute_async_script("""
var call_type = arguments[0];
var remote_hangup = arguments[1];
var active_call_selected = arguments[2];
window.wrappedJSObject.rcvd_error = false;
if (call_type == "Incoming") {
var call_to_hangup = window.wrappedJSObject.incoming_call;
} else if (call_type == "Outgoing") {
var call_to_hangup = window.wrappedJSObject.outgoing_call;
} else {
if (active_call_selected >=0 && active_call_selected < window.wrappedJSObject.calls.length) {
var call_to_hangup = window.wrappedJSObject.calls[active_call_selected];
} else {
window.wrappedJSObject.rcvd_error = true;
marionetteScriptFinished(0);
}
};
window.wrappedJSObject.disconnecting_call_ok = false;
call_to_hangup.ondisconnecting = function ondisconnecting(event) {
log("Received 'ondisconnecting' call event.");
if (event.call.state == "disconnecting") {
window.wrappedJSObject.disconnecting_call_ok = true;
};
};
window.wrappedJSObject.received_statechange = false;
call_to_hangup.onstatechange = function onstatechange(event) {
log("Received TelephonyCall 'onstatechange' event.");
if (event.call.state == "disconnected") {
window.wrappedJSObject.received_statechange = true;
};
};
window.wrappedJSObject.disconnected_call_ok = false;
call_to_hangup.ondisconnected = function ondisconnected(event) {
log("Received 'ondisconnected' call event.");
if (event.call.state == "disconnected") {
window.wrappedJSObject.disconnected_call_ok = true;
};
};
if (!remote_hangup) {
call_to_hangup.hangUp();
}
if (window.wrappedJSObject.calls.length > 0) {
window.wrappedJSObject.calls[0].resume();
}
marionetteScriptFinished(1);
""", script_args=[call_type, remote_hangup, active_call_selected], special_powers=True)
if remote_hangup == False:
if self.marionette.execute_script("return window.wrappedJSObject.rcvd_error;"):
self.fail("Received invalid value for active_call_selected")
# should have received both events associated with a active call hangup
wait = Wait(self.marionette, timeout=90, interval=0.5)
try:
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.disconnecting_call_ok"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.disconnected_call_ok"))
except:
# failed to hangup
self.fail("Failed to hangup call")
else:
self.instruct("Hangup the call from secondary phone and press 'OK'")
# should have received only disconnected event associated with a active call hangup
wait = Wait(self.marionette, timeout=90, interval=0.5)
try:
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.disconnected_call_ok"))
except:
# failed to hangup
self.fail("Failed to hangup call")
# verify that the call disconnected from phone which is not the device under test
disconnecting = self.marionette.execute_script("return window.wrappedJSObject.disconnecting_call_ok")
self.assertFalse(disconnecting, "Telephony.ondisconnecting event found, but should not have been "
"since the call was terminated remotely")
# should have received events associated with a state and calls change for with or without remote hangup
wait = Wait(self.marionette, timeout=90, interval=0.5)
try:
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_statechange"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_callschanged"))
except:
self.fail("Failed to receive either statechange or callschanged events")
# remove the call from list
if call_type == "Active":
self.active_call_list.pop(active_call_selected)
def hold_active_call(self, user_initiate_hold=True):
self.marionette.execute_async_script("""
let active = window.wrappedJSObject.active_call;
var user_initiate_hold = arguments[0];
window.wrappedJSObject.onholding_call_ok = false;
active.onholding = function ondisconnecting(event) {
log("Received 'onholding' call event.");
if (event.call.state == "holding") {
window.wrappedJSObject.onholding_call_ok = true;
};
};
window.wrappedJSObject.received_statechange = false;
active.onstatechange = function onstatechange(event) {
log("Received TelephonyCall 'onstatechange' event.");
if (event.call.state == "held") {
window.wrappedJSObject.received_statechange = true;
};
};
window.wrappedJSObject.onheld_call_ok = false;
active.onheld = function ondisconnected(event) {
log("Received 'onheld' call event.");
if (event.call.state == "held") {
window.wrappedJSObject.onheld_call_ok = true;
};
};
if (user_initiate_hold) {
active.hold();
}
marionetteScriptFinished(1);
""", script_args=[user_initiate_hold], special_powers=True)
if user_initiate_hold == True:
# should have received both events associated with a call on hold
wait = Wait(self.marionette, timeout=90, interval=0.5)
try:
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.onholding_call_ok"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.onheld_call_ok"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_statechange"))
except:
# failed to hold
self.fail("Failed to put call on hold initiated by user")
def resume_held_call(self):
self.marionette.execute_async_script("""
let active = window.wrappedJSObject.active_call;
window.wrappedJSObject.received_statechange = false;
active.onstatechange = function onstatechange(event) {
log("Received TelephonyCall 'onstatechange' event.");
if (event.call.state == "resuming") {
window.wrappedJSObject.received_statechange = true;
};
};
window.wrappedJSObject.onresuming_call_ok = false;
active.onresuming = function onresuming(event) {
log("Received 'onresuming' call event.");
if (event.call.state == "resuming") {
window.wrappedJSObject.onresuming_call_ok = true;
};
};
active.resume();
marionetteScriptFinished(1);
""", special_powers=True)
# should have received event associated with a resumed call
wait = Wait(self.marionette, timeout=90, interval=0.5)
try:
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.onresuming_call_ok"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_statechange"))
except:
# failed to resume
self.fail("Failed to resume the held call")
def initiate_outgoing_call(self, destination):
self.marionette.execute_script(self.returnable_calls)
# use the webapi to initiate a call to the specified number
self.marionette.execute_async_script("""
var telephony = window.navigator.mozTelephony;
var destination = arguments[0]
telephony.dial(destination).then(out_call => {
window.wrappedJSObject.received_dialing = false;
if (out_call.state == "dialing") {
window.wrappedJSObject.received_dialing = true;
};
window.wrappedJSObject.received_statechange = false;
out_call.onstatechange = function onstatechange(event) {
log("Received TelephonyCall 'onstatechange' event.");
if (event.call.state == "alerting") {
window.wrappedJSObject.received_statechange = true;
};
};
window.wrappedJSObject.received_alerting = false;
out_call.onalerting = function onalerting(event) {
log("Received TelephonyCall 'onalerting' event.");
if (event.call.state == "alerting") {
window.wrappedJSObject.received_alerting = true;
window.wrappedJSObject.outgoing_call = out_call;
window.wrappedJSObject.returnable_outgoing_call = {
number: out_call.number,
state: out_call.state
};
window.wrappedJSObject.calls = telephony.calls;
};
};
window.wrappedJSObject.received_callschanged = false;
telephony.oncallschanged = function oncallschanged(event) {
log("Received Telephony 'oncallschanged' event.");
window.wrappedJSObject.received_callschanged = true;
};
window.wrappedJSObject.received_busy = false;
out_call.onerror = function onerror(event) {
log("Received TelephonyCall 'onerror' event.");
if (event.call.error.name == "BusyError") {
window.wrappedJSObject.received_busy = true;
};
};
});
window.wrappedJSObject.received_ready = false;
telephony.ready.then(
function() {
console.log("Telephony got ready");
window.wrappedJSObject.received_ready = true;
},
function() {
console.log("Telephony not ready");
window.wrappedJSObject.received_ready = false;
}
);
marionetteScriptFinished(1);
""", script_args=[destination], special_powers=True)
# should have received all events associated with an outgoing call
wait = Wait(self.marionette, timeout=30, interval=0.5)
try:
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_dialing"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_statechange"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_alerting"))
wait.until(lambda x: x.execute_script("return window.wrappedJSObject.received_ready"))
except:
# failed to initiate call; check if the destination phone's line was busy
busy = self.marionette.execute_script("return window.wrappedJSObject.received_busy")
self.assertFalse(busy, "Received busy signal; ensure target phone is available and try again")
self.fail("Failed to initiate call; mozTelephony.dial is broken -or- there is no network signal. Try again")
# verify outgoing call state to be 'alerting'
self.outgoing_call = self.marionette.execute_script("return window.wrappedJSObject.returnable_outgoing_call")
self.assertEqual(self.outgoing_call['state'], "alerting", "Call state should be 'alerting'")
def user_guided_outgoing_call(self):
# ask user to input destination phone number
destination = self.prompt("Please enter a destination phone number (not the Firefox OS device) which will receive a test call")
# can't check format as different around the world, just ensure not empty
if destination is None:
self.fail("Must enter a destination phone number")
destination = destination.strip()
self.assertGreater(len(destination), 3, "Destination phone number entered is incomplete")
# ask user to confirm destination number
self.confirm('Warning: A test call will be made from the Firefox OS device to "%s" is this number correct?' % destination)
# make the call via webapi
self.initiate_outgoing_call(destination)
def disable_dialer(self):
# disable system dialer agent so it doesn't steal the
# incoming/outgoing calls away from the certest app
cur_frame = self.marionette.get_active_frame()
self.marionette.switch_to_frame() # system app
try:
self.marionette.execute_async_script("""
log("disabling system dialer agent");
window.wrappedJSObject.dialerAgent.stop();
marionetteScriptFinished(1);
""", special_powers=True)
except:
self.fail("failed to disable dialer agent")
finally:
self.marionette.switch_to_frame(cur_frame)
def enable_dialer(self):
# enable system dialer agent to handle calls
cur_frame = self.marionette.get_active_frame()
self.marionette.switch_to_frame() # system app
try:
self.marionette.execute_async_script("""
log("enabling system dialer agent");
window.wrappedJSObject.dialerAgent.start();
marionetteScriptFinished(1);
""", special_powers=True)
except:
self.fail("failed to enable dialer agent")
finally:
self.marionette.switch_to_frame(cur_frame)
def mute_call(self, enable=True):
self.marionette.execute_script("""
var enable = arguments[0];
var telephony = window.navigator.mozTelephony;
if (enable) {
log("enabling mute");
telephony.muted = true;
} else {
log("disabling mute");
telephony.muted = false;
}
""", script_args=[enable], special_powers=True)
def set_speaker(self, enable=True):
self.marionette.execute_script("""
var enable = arguments[0];
var telephony = window.navigator.mozTelephony;
if (enable) {
log("enabling speaker");
telephony.speakerEnabled = true;
} else {
log("disabling speaker");
telephony.speakerEnabled = false;
}
""", script_args=[enable], special_powers=True)
| cr/fxos-certsuite | webapi_tests/telephony/telephony_test.py | Python | mpl-2.0 | 21,066 |
import random
from itertools import combinations
class Person:
mu = 0.5
sigma = 0.2
initial_amount = 100
max_money = 100
max_children = 5
def __init__(self, police=False, criminal=False, stoicity=None):
if stoicity is None:
self.stoicity = random.gauss(Person.mu, Person.sigma)
else:
self.stoicity = stoicity
self.police = police
self.criminal = criminal
self.money = Person.initial_amount
def update_money(self, change):
self.money += change
if self.money < 0:
self.money = 0
if self.money > Person.max_money:
Person.max_money = self.money
def get_children(self):
fraction = self.money / Person.max_money
children_needed = int(fraction * Person.max_children)
children = []
for i in range(children_needed):
child = Person(self.police, self.criminal, self.stoicity)
children.append(child)
return children
def __update_stoicity(self, increase):
fraction = self.stoicity * random.random()
if increase:
self.stoicity += fraction
else:
self.stoicity -= fraction
def give_bribe(self):
mark = random.random()
if mark < self.stoicity:
# honest
self.__update_stoicity(True)
return False
else:
# dishonest
self.__update_stoicity(False)
return True
def take_bribe(self):
mark = random.random()
if mark < self.stoicity:
# honest
self.__update_stoicity(True)
return False
else:
# dishonest
self.__update_stoicity(False)
return True
class Society:
def __init__(self,
pop_size,
criminal_fraction,
police_fraction,
reproduction_step,
criminal_fine,
police_reward,
bribe_fine,
growth_rate
):
self.pop_size = pop_size
self.criminal_fraction = criminal_fraction
self.police_fraction = police_fraction
self.reproduction_step = reproduction_step
self.criminal_fine = criminal_fine
self.police_reward = police_reward
self.bribe_fine = bribe_fine
self.growth_rate = growth_rate
# datastructures
self.population = self.__generate_population()
self.__initiate_counters()
self.time = 0
def run(self):
self.time = 0
print('time|pop|pol|cri|sit|of_c|of_n|of_p|br_ac|inter')
for i in range(40):
self.__iteration()
def __initiate_counters(self):
self.bribe_situations = 0
self.bribe_offers = 0
self.bribe_accepted = 0
self.interactions = 0
def __record_state_of_population(self):
time = self.time
pop = self.pop_size
pol = sum((1 for i in self.population if i.police))
crim = sum((1 for i in self.population if i.criminal))
sit = self.bribe_situations
of_p = self.bribe_offers
br_ac = self.bribe_accepted
inter = self.interactions
print('%d|' * 8 % (time, pop, pol, crim, sit, of_p, br_ac, inter))
def __determine_give_take(self, org1, org2):
"Who has to give to who"
pair = [org1, org2]
random.shuffle(pair)
if org1.police:
if org2.police:
giver, taker = pair
else:
giver, taker = org2, org1
else:
if org2.police:
giver, taker = org1, org2
else:
giver, taker = pair
return giver, taker
def __calculate_transaction(self, bribe_given, bribe_accepted, briber_criminal):
briber_update, police_update = 0, 0
if not bribe_given:
if briber_criminal:
briber_update, police_update = -self.criminal_fine, self.police_reward
else:
# bribe corrupt officer
briber_update, police_update = self.bribe_fine, self.bribe_fine
# bribe honest officer
briber_update, police_update = self.bribe_fine, self.police_reward
return briber_update, police_update
def __iteration(self):
self.__initiate_counters()
# Round robin tournament
for org1, org2 in combinations(self.population, 2):
self.interactions += 1
giver, taker = self.__determine_give_take(org1, org2)
if org1.police or org2.police:
self.bribe_situations += 1
briber, police = giver, taker
bribe_given, bribe_accepted = False, False
if briber.give_bribe():
bribe_given = True
self.bribe_offers += 1
if police.take_bribe():
bribe_accepted = True
self.bribe_accepted += 1
briber_update, police_update = self.__calculate_transaction(bribe_given, bribe_accepted, briber.criminal)
briber.update_money(briber_update)
police.update_money(police_update)
else:
transaction_amount = random.random() * giver.money * 0.3
giver.update_money(-transaction_amount)
taker.update_money(transaction_amount)
self.__record_state_of_population()
if self.time % self.reproduction_step == 0:
self.__reproduce_population()
self.time += 1
def __reproduce_population(self):
new_pop = []
for org in self.population:
children = org.get_children()
new_pop.extend(children)
# simulate the random deaths
random.shuffle(new_pop)
mark = int(self.growth_rate * self.pop_size)
self.population = new_pop[:mark]
# generate data
self.pop_size = len(self.population)
criminals = sum((1 for i in self.population if i.criminal))
police = sum((1 for i in self.population if i.police))
try:
self.criminal_fraction = float(criminals) / self.pop_size
self.police_fraction = float(police) / self.pop_size
except ZeroDivisionError:
import sys
sys.exit()
def __generate_population(self):
size = self.pop_size
population = []
for i in range(size):
mark = random.random()
if mark < self.criminal_fraction:
criminal = True
else:
criminal = False
mark = random.random()
if mark < self.police_fraction:
police = True
else:
police = False
person = Person(police, criminal)
population.append(person)
return population
if __name__ == '__main__':
# soc = Society(100, 0.1, 0.1, 1, 5, 4, 6, 1.1)
# soc.run()
# soc = Society(100, 0.5, 0.2, 1, 5, 4, 6, 1.1)
# soc.run()
# soc = Society(100, 0.5, 0.2, 1, 5, 5, 5, 1.1)
# soc.run()
soc = Society(100, 0.5, 0.2, 1, 0, 5, 0, 1.1)
soc.run()
| theSage21/corruption-simulation | source/society.py | Python | gpl-2.0 | 7,272 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Bring in all of the public TensorFlow interface into this
# module.
# pylint: disable=wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import *
| HaebinShin/tensorflow | tensorflow/__init__.py | Python | apache-2.0 | 936 |
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^jquery/$', TemplateView.as_view(template_name='jquery/index.html')),
url(r'^mootools/$', TemplateView.as_view(template_name='mootools/index.html')),
url(r'^prototype/$', TemplateView.as_view(template_name='prototype/index.html')),
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| pevzi/django-debug-toolbar | example/urls.py | Python | bsd-3-clause | 666 |
import json
from uuid import uuid4
import pytest
import transaction
from osgeo import gdal, ogr
from nextgisweb.auth import User
from nextgisweb.models import DBSession
from nextgisweb.spatial_ref_sys import SRS
from nextgisweb.vector_layer import VectorLayer
from nextgisweb.wfsserver.model import Service as WFSService, Layer as WFSLayer
TEST_WFS_VERSIONS = ('2.0.2', '2.0.0', '1.1.0', '1.0.0', )
@pytest.fixture(scope='module')
def vlayer_id(ngw_resource_group):
with transaction.manager:
res_vl = VectorLayer(
parent_id=ngw_resource_group, display_name='test_cyrillic',
owner_user=User.by_keyname('administrator'),
srs=SRS.filter_by(id=3857).one(),
tbl_uuid=uuid4().hex,
).persist()
geojson = {
'type': 'FeatureCollection',
'crs': {'type': 'name', 'properties': {'name': 'urn:ogc:def:crs:EPSG::3857'}},
'features': [{
'type': 'Feature',
'properties': {'field1': 1, 'поле2': 'значение1', '!field3': '!@#$%^&*()значение1'},
'geometry': {'type': 'Point', 'coordinates': [0, 0]}
}, {
'type': 'Feature',
'properties': {'field1': 2, 'поле2': 'значение2', '!field3': '!@#$%^&*()значение2'},
'geometry': {'type': 'Point', 'coordinates': [10, 10]}
}]
}
dsource = ogr.Open(json.dumps(geojson))
layer = dsource.GetLayer(0)
res_vl.setup_from_ogr(layer)
res_vl.load_from_ogr(layer)
DBSession.flush()
DBSession.expunge(res_vl)
yield res_vl.id
with transaction.manager:
DBSession.delete(VectorLayer.filter_by(id=res_vl.id).one())
@pytest.fixture(scope='module')
def service_id(vlayer_id, ngw_resource_group):
with transaction.manager:
res_wfs = WFSService(
parent_id=ngw_resource_group, display_name='test_cyrillic_service',
owner_user=User.by_keyname('administrator'),
).persist()
res_wfs.layers.append(WFSLayer(
resource_id=vlayer_id, keyname='test',
display_name='test', maxfeatures=1000,
))
DBSession.flush()
DBSession.expunge(res_wfs)
yield res_wfs.id
with transaction.manager:
DBSession.delete(WFSService.filter_by(id=res_wfs.id).one())
def test_cyrillic(service_id, vlayer_id, ngw_httptest_app, ngw_auth_administrator):
driver = ogr.GetDriverByName('WFS')
wfs_ds = driver.Open('WFS:{}/api/resource/{}/wfs'.format(
ngw_httptest_app.base_url, service_id), True)
assert wfs_ds is not None, gdal.GetLastErrorMsg()
layer = wfs_ds.GetLayer(0)
defn = layer.GetLayerDefn()
assert defn.GetFieldCount() == 4
field_idxs = list(range(defn.GetFieldCount()))
field_idxs.remove(defn.GetGeomFieldIndex('geom'))
field_idxs.remove(defn.GetFieldIndex('field1'))
field_idxs.remove(defn.GetFieldIndex('поле2'))
assert len(field_idxs) == 1
field = defn.GetFieldDefn(field_idxs[0])
name = field.GetName()
assert name.startswith('wfsfld_')
feature = layer.GetFeature(1)
value = 'test value!'
feature.SetField(name, value)
err = layer.SetFeature(feature)
assert err == 0, gdal.GetLastErrorMsg()
feature_cmp = ngw_httptest_app.get('/api/resource/%s/feature/1' % vlayer_id).json()
assert feature_cmp['fields']['!field3'] == value
| nextgis/nextgisweb | nextgisweb/wfsserver/test/test_data.py | Python | gpl-3.0 | 3,485 |
#############################################################################
# read_parameter_cfg.py
# this file is part of GEOCUBIT #
# #
# Created by Emanuele Casarotti #
# Copyright (c) 2008 Istituto Nazionale di Geofisica e Vulcanologia #
# #
#############################################################################
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., #
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. #
# #
#############################################################################
import os
def readcfg(filename=None, importmenu=False, mpiflag=False):
"""
read the configuration file, filename is defined in the
command line arguments (see menu.py)
"""
if importmenu:
import menu as menu
cfgname = menu.cfg_name
id_proc = menu.id_proc
create_plane = menu.create_plane
menusurface = menu.surface
single = menu.single
elif filename:
cfgname = filename
id_proc = 0
menu = False
create_plane = False
menusurface = False
single = False
else:
print 'error: no configuration file'
import sys
sys.exit()
#
# here I can use pyproj but I prefere to include a function in pure python
# in order to avoid an additional installation
from utilities import geo2utm, get_cubit_version
#
#
import ConfigParser
config = ConfigParser.ConfigParser()
#
#
def converter(s):
if s == 'True':
value = True
elif s == 'False':
value = False
elif s == 'None':
value = None
else:
if s.count(',') != 0:
value = s.split(',')
while value.count(''):
value.remove('')
else:
value = s
try:
if type(value).__name__ == 'str':
if str(value).count('.') != 0:
value = float(value)
else:
value = int(value)
else:
if str(value).count('.') != 0:
value = map(float, value)
else:
value = map(int, value)
except:
pass
return value
#
def section_dict(section):
dict_o = {}
options = config.options(section)
for option in options:
try:
value = converter(config.get(section, option))
dict_o[option] = value
except:
dict_o[option] = None
return dict_o
class attrdict(dict):
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
def __str__(self):
names = []
values = []
for name, value in self.items():
names.append(name)
values.append(value)
print names, values
a = zip(names, values)
a.sort()
arc = ''
for o in a:
if o[0][0] != arc:
print
print o[0], ' -> ', o[1]
arc = o[0][0]
print __name__
return '____'
#
dcfg = {}
#
# CONSTANTS
dcfg['osystem'] = 'linux'
dcfg['debug_cfg'] = False
dcfg['version_cubit'] = get_cubit_version()
dcfg['checkbound'] = False
dcfg['top_partitioner'] = 10000
# if n is the vertical component of the normal at a surface pointing
# horizontally, when -tres < n < tres then the surface is vertical
dcfg['tres'] = 0.3
dcfg['precision'] = 0.02 # precision for the boundary check (0.02 m)
#
# INIT
dcfg['debug'] = True
dcfg['cubit_info'] = "on"
dcfg['echo_info'] = "on"
dcfg['jou_info'] = "on"
dcfg['jer_info'] = "on"
dcfg['monitored_cpu'] = 0
dcfg['parallel_import'] = True
dcfg['save_geometry_cubit'] = True
dcfg['save_surface_cubit'] = False
dcfg['save_geometry_paraview'] = False # not implemented
dcfg['save_geometry_ACIS'] = False # not implemented
dcfg['export_exodus_mesh'] = False
dcfg['manual_adj'] = False
dcfg['play_adj'] = False
dcfg['no_adj'] = False
dcfg['nx'] = False
dcfg['ny'] = False
dcfg['nstep'] = False
dcfg['localdir_is_globaldir'] = True
dcfg['refinement_depth'] = []
dcfg['scratchdir'] = None
dcfg['map_meshing_type'] = 'regularmap'
dcfg['4sideparallel'] = True
dcfg["outlinebasin_curve"] = False
dcfg["transition_curve"] = False
dcfg["faulttrace_curve"] = False
dcfg['geological_imprint'] = False
dcfg['number_processor_xi'] = 1
dcfg['number_processor_eta'] = 1
dcfg['filename'] = None
dcfg['actual_vertical_interval_top_layer'] = 1
dcfg['coarsening_top_layer'] = False
dcfg['refineinsidevol'] = False
dcfg['sea'] = False
dcfg['seaup'] = False
dcfg['sea_level'] = False
dcfg['sea_threshold'] = False
dcfg['subduction'] = False
dcfg['subduction_thres'] = 500
# if true it creates only the surface not the lofted volumes
dcfg['debugsurface'] = False
dcfg['lat_orientation'] = False
dcfg['irregulargridded_surf'] = False
dcfg['chktop'] = False
dcfg['smoothing'] = False
dcfg['ntripl'] = 0
dcfg['debug_geometry'] = False
dcfg['topflat'] = False
if float(dcfg['version_cubit']) >= 13.1:
dcfg['volumecreation_method'] = None
else:
dcfg['volumecreation_method'] = 'loft'
dcfg['nsurf'] = None
if cfgname:
config.read(cfgname)
sections = ['cubit.options', 'simulation.cpu_parameters',
'geometry.surfaces', 'geometry.volumes',
'geometry.volumes.layercake', 'geometry.volumes.flatcake',
'geometry.volumes.partitioner', 'geometry.partitioner',
'meshing']
for section in sections:
try:
d = section_dict(section)
dcfg.update(d)
except:
pass
# print dcfg
if dcfg['nsurf']:
surface_name = []
num_x = []
num_y = []
xstep = []
ystep = []
step = []
directionx = []
directiony = []
unit2 = []
surf_type = []
delimiter = []
nsurf = int(dcfg['nsurf'])
for i in range(1, nsurf + 1):
section = 'surface' + str(i) + '.parameters'
d = section_dict(section)
surface_name.append(d['name'])
surf_type.append(d['surf_type'])
unit2.append(d['unit_surf'])
if d['surf_type'] == 'regular_grid':
xstep.append(d['step_along_x'])
ystep.append(d['step_along_y'])
num_x.append(d['number_point_along_x'])
num_y.append(d['number_point_along_y'])
elif d['surf_type'] == 'skin':
step.append(d['step'])
try:
delimiter.append(d['delimiter'])
except:
pass
directionx.append(d['directionx'])
directiony.append(d['directiony'])
dcfg['surface_name'] = surface_name
dcfg['num_x'] = num_x
dcfg['num_y'] = num_y
dcfg['xstep'] = xstep
dcfg['ystep'] = ystep
dcfg['step'] = step
dcfg['directionx'] = directionx
dcfg['directiony'] = directiony
dcfg['unit2'] = unit2
dcfg['surf_type'] = surf_type
dcfg['delimiter'] = delimiter
try:
tres = 0
xmin, ymin = geo2utm(dcfg['longitude_min'], dcfg[
'latitude_min'], dcfg['unit'])
xmax, ymax = geo2utm(dcfg['longitude_max'], dcfg[
'latitude_max'], dcfg['unit'])
dcfg['xmin'] = xmin
dcfg['ymin'] = ymin
dcfg['xmax'] = xmax
dcfg['ymax'] = ymax
x1, y1 = geo2utm(dcfg['longitude_min'], dcfg[
'latitude_min'], dcfg['unit'])
x2, y2 = geo2utm(dcfg['longitude_max'], dcfg[
'latitude_min'], dcfg['unit'])
x3, y3 = geo2utm(dcfg['longitude_max'], dcfg[
'latitude_max'], dcfg['unit'])
x4, y4 = geo2utm(dcfg['longitude_min'], dcfg[
'latitude_max'], dcfg['unit'])
dcfg['x1_box'] = x1
dcfg['y1_box'] = y1
dcfg['x2_box'] = x2
dcfg['y2_box'] = y2
dcfg['x3_box'] = x3
dcfg['y3_box'] = y3
dcfg['x4_box'] = x4
dcfg['y4_box'] = y4
dcfg['tres_boundarydetection'] = tres
except:
pass
if dcfg['irregulargridded_surf']:
print 'test'
dcfg['xmin'] = dcfg['longitude_min']
dcfg['ymin'] = dcfg['latitude_min']
dcfg['xmax'] = dcfg['longitude_max']
dcfg['ymax'] = dcfg['latitude_max']
if dcfg['sea']:
if not dcfg['sea_level']:
dcfg['sea_level'] = 0
if not dcfg['sea_threshold']:
dcfg['sea_threshold'] = -200
dcfg['actual_vertical_interval_top_layer'] = 1
dcfg['coarsening_top_layer'] = True
dcfg['optionsea'] = {'sea': dcfg['sea'],
'seaup': dcfg['seaup'],
'sealevel': dcfg['sea_level'],
'seathres': dcfg['sea_threshold']}
cfg = attrdict(dcfg)
if menu:
try:
if cfg.working_dir[-1] == '/':
cfg.working_dir = cfg.working_dir[:-1]
if cfg.working_dir[0] != '/':
cfg.working_dir = './' + cfg.working_dir
except:
cfg.working_dir = os.getcwd()
try:
if cfg.output_dir[-1] == '/':
cfg.output_dir = cfg.output_dir[:-1]
if cfg.output_dir[0] != '/':
cfg.output_dir = './' + cfg.output_dir
except:
cfg.output_dir = os.getcwd()
try:
if cfg.SPECFEM3D_output_dir[-1] == '/':
cfg.SPECFEM3D_output_dir = cfg.SPECFEM3D_output_dir[:-1]
if cfg.SPECFEM3D_output_dir[0] != '/':
cfg.SPECFEM3D_output_dir = './' + cfg.SPECFEM3D_output_dir
except:
cfg.SPECFEM3D_output_dir = os.getcwd()
cfg.single = single
if menusurface:
cfg.nsurf = 1
cfg.name = [menu.surface_name]
cfg.num_x = [menu.num_x]
cfg.num_y = [menu.num_x]
cfg.unit = [menu.unit]
cfg.surf_type = [menu.surf_type]
try:
cfg.delimiter = [menu.delimiter]
except:
cfg.delimiter = [' ']
cfg.directionx = [menu.directionx]
cfg.directiony = [menu.directiony]
else:
cfg.SPECFEM3D_output_dir = os.getcwd()
if not cfg.number_processor_eta and cfg.nodes:
cfg.number_processor_xi, cfg.number_processor_eta = split(cfg.nodes)
if isinstance(cfg.filename, str):
cfg.filename = [cfg.filename]
try:
cfg.nproc_eta = cfg.number_processor_eta
cfg.nproc_xi = cfg.number_processor_xi
cfg.cpuy = cfg.number_processor_eta
cfg.cpux = cfg.number_processor_xi
except:
pass
if create_plane:
cfg.x1 = map(float, menu.x1.split(','))
cfg.x2 = map(float, menu.x2.split(','))
cfg.x3 = map(float, menu.x3.split(','))
cfg.x4 = map(float, menu.x4.split(','))
cfg.unit = menu.unit
#
if menu:
cfg.id_proc = menu.id_proc
else:
cfg.id_proc = id_proc
#
try:
if isinstance(cfg.tripl, int):
cfg.tripl = [cfg.tripl]
except:
pass
try:
if isinstance(cfg.iv_interval, int):
cfg.iv_interval = [cfg.iv_interval]
except:
pass
try:
if isinstance(cfg.refinement_depth, int):
cfg.refinement_depth = [cfg.refinement_depth]
except:
pass
return cfg
class getparameter(dict):
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
def split(x):
import math
c = int(math.sqrt(x))
while math.fmod(x, c):
c = c + 1
return c, x / c
| casarotti/GEOCUBIT--experimental | geocubitlib/read_parameter_cfg.py | Python | gpl-3.0 | 14,030 |
# Functional test that boots a Leon3 machine and checks its serial console.
#
# Copyright (c) Philippe Mathieu-Daudé <f4bug@amsat.org>
#
# This work is licensed under the terms of the GNU GPL, version 2 or
# later. See the COPYING file in the top-level directory.
from avocado_qemu import Test
from avocado_qemu import wait_for_console_pattern
from avocado import skip
class Leon3Machine(Test):
timeout = 60
@skip("Test currently broken")
# A Window Underflow exception occurs before booting the kernel,
# and QEMU exit calling cpu_abort(), which makes this test to fail.
def test_leon3_helenos_uimage(self):
"""
:avocado: tags=arch:sparc
:avocado: tags=machine:leon3_generic
:avocado: tags=binfmt:uimage
"""
kernel_url = ('http://www.helenos.org/releases/'
'HelenOS-0.6.0-sparc32-leon3.bin')
kernel_hash = 'a88c9cfdb8430c66650e5290a08765f9bf049a30'
kernel_path = self.fetch_asset(kernel_url, asset_hash=kernel_hash)
self.vm.set_console()
self.vm.add_args('-kernel', kernel_path)
self.vm.launch()
wait_for_console_pattern(self, 'Copyright (c) 2001-2014 HelenOS project')
wait_for_console_pattern(self, 'Booting the kernel ...')
| dslutz/qemu | tests/acceptance/machine_sparc_leon3.py | Python | gpl-2.0 | 1,283 |
#!/usr/bin/env python3
"""
This script has two arguments. A sitemap url and a api profile.
It will download the entire profile from the NPO Front end api, and it will also download the entire sitemap.
Then, it compares the found URL's in both. They should represent the same set.
If there are URL in the API which are not in the Sitemap, which are indeed not existing (give 404's) then the script
supposes this is an error and deletes the object from the API.
If objects are in the API but not in the sitemap, then we suppose the sitemap is outdated.
If objects are in the sitemap but not in the API then there are two possibilities
- The object is in the API, but not in the profile
- The object does not existing in the API at all
In both cases the object needs the be reindexed from the CMS.
"""
import json
import os
import pickle
import requests
from npoapi import Pages
from npoapi import PagesBackend
import urllib.parse
api = Pages().command_line_client()
backend = PagesBackend(env=api.actualenv).configured_login()
api.add_argument('profile', type=str, nargs='?', help='profile')
args = api.parse_args()
profile = args.profile
def filter(item):
return True
def get_urls_from_api_search(max=None) -> set:
offset = 0
new_urls = set()
total = None
while True:
if total != None:
api.logger.info("API: Found %s/%s/%s urls for profile %s", len(new_urls), offset, total, profile)
result = api.search(profile=profile, offset=offset, limit=240, form="{}")
json_object = json.loads(result)
items = json_object['items']
total = json_object['total']
grow = 0
for item in items:
url = item['result']['url']
if url.startswith("http:"):
print(url)
new_urls.add(url)
for crid in item['result']['crids']:
if crid.startswith("crid://vpro/media/vpro/"):
new_urls.add(url)
offset += 1
grow += 1
if grow == 0 or (max != None and len(new_urls) > max):
break
return new_urls
def main():
url_file = "/tmp/" + profile + ".p"
if os.path.exists(url_file):
api_urls= pickle.load(open(url_file, "rb"))
else:
api_urls = get_urls_from_api_search()
pickle.dump(api_urls, open(url_file, "wb"))
for api_url in api_urls:
backend.delete(api_url)
api_url_encoded = urllib.request.quote(api_url, safe='')
print(api_url + "-> " + api_url_encoded)
es_url = 'http://localhost:9208/apipages/page/' + api_url_encoded
print(es_url)
requests.delete(es_url)
print(api_urls)
if __name__ == "__main__":
main()
| npo-poms/scripts | python/clean_from_page_api.py | Python | gpl-2.0 | 2,736 |
from .template_render import render_template
from .template_render import load_data
def load_parameters():
return load_data('parameters.json')
| Statoil/libres | python/res/fm/templating/__init__.py | Python | gpl-3.0 | 148 |
""" Thermo Scientific 'Gallery'
"""
from lims import bikaMessageFactory as _
from lims.utils import t
from lims.exportimport.instruments.resultsimport import \
AnalysisResultsImporter, InstrumentCSVResultsFileParser
class ThermoGalleryTSVParser(InstrumentCSVResultsFileParser):
def __init__(self, tsv):
InstrumentCSVResultsFileParser.__init__(self, tsv)
self._end_header = False
self._columns = []
def _parseline(self, line):
sline = line.replace('"', '').strip()
if self._end_header == False:
return self.parse_headerline(sline)
else:
return self.parse_resultline(sline)
def splitLine(self, line):
return [token.strip() for token in line.split('\t')]
def parse_headerline(self, line):
""" Parses header lines
Header example:
Date 2012/11/15 User anonymous
Time 06:07:08PM Software version: 4.0
Example laboratory
Arizona
"""
if line.startswith('Date'):
splitted = self.splitLine(line)
if len(splitted) > 1:
self._header['Date'] = splitted[1]
if len(splitted) > 2 and splitted[2] == 'User':
self._header['Date'] = splitted[1]
self._header['User'] = splitted[3] \
if len(splitted) > 3 else ''
else:
self.warn("Unexpected header format", numline=self._numline)
else:
self.warn("Unexpected header format", numline=self._numline)
return 0
if line.startswith('Time'):
splitted = self.splitLine(line)
if len(splitted) > 1:
self._header['Time'] = splitted[1]
else:
self.warn("Unexpected header format", numline=self._numline)
return 0
if line.startswith('Sample/ctrl'):
# Sample/ctrl ID Pat/Ctr/cAl Test name Test type
if len(self._header) == 0:
self.warn("No header found", numline=self._numline)
return -1
#Grab column names
self._end_header = True
self._columns = self.splitLine(line)
return 1
def parse_resultline(self, line):
# Sample/ctrl ID Pat/Ctr/cAl Test name Test type
if not line.strip():
return 0
rawdict = {}
splitted = self.splitLine(line)
for idx, result in enumerate(splitted):
if len(self._columns) <= idx:
self.err("Orphan value in column ${index}",
mapping={"index":str(idx + 1)},
numline=self._numline)
break
rawdict[self._columns[idx]] = result
acode = rawdict.get('Test name', '')
if not acode:
self.err("No Analysis Code defined",
numline=self._numline)
return 0
rid = rawdict.get('Sample/ctrl ID')
if not rid:
self.err("No Sample ID defined",
numline=self._numline)
return 0
errors = rawdict.get('Errors', '')
errors = "Errors: %s" % errors if errors else ''
notes = rawdict.get('Notes', '')
notes = "Notes: %s" % notes if notes else ''
rawdict[acode]=rawdict['Result']
rawdict['DefaultResult'] = acode
rawdict['Remarks'] = ' '.join([errors, notes])
rawres = self.getRawResults().get(rid, [])
raw = rawres[0] if len(rawres) > 0 else {}
raw[acode] = rawdict
self._addRawResult(rid, raw, True)
return 0
class ThermoGalleryImporter(AnalysisResultsImporter):
def __init__(self, parser, context, idsearchcriteria, override,
allowed_ar_states=None, allowed_analysis_states=None,
instrument_uid=None):
AnalysisResultsImporter.__init__(self, parser, context,
idsearchcriteria, override,
allowed_ar_states,
allowed_analysis_states,
instrument_uid)
| sciCloud/OLiMS | lims/exportimport/instruments/thermoscientific/gallery/__init__.py | Python | agpl-3.0 | 4,280 |
import math
import numpy as np
from math import sin, cos, pi
def baumstamm(radius, texture, koords, height):
L=[]
for x in range(12):
x/6.*pi
verts=[np.array([sin(x/6.*pi), cos(x/6.*pi), 0]), np.array([sin((x+1)/6.*pi),
cos((x+1)/6.*pi), 0]), np.array([sin((x+1)/6.*pi), cos((x+1)/6.*pi), height]),
np.array([sin((x)/6.*pi), cos((x)/6.*pi), height]), np.array([sin((x)/6.*pi), cos((x)/6.*pi), 0])]
verts= list(np.array(verts)+koords)
faces=range(len(verts))
L.append(Polygon3D(verts, faces, texture))
return L
r=1
texture="braun"
koords=np.array([10,0,0])
height=15
print baumstamm(r, texture, koords, height)
| bradparks/procedural_city_generation | procedural_city_generation/building_generation/Baeume.py | Python | mpl-2.0 | 632 |
"""Computes the average Jenson-Shannon Divergence between attention heads."""
import argparse
import numpy as np
import utils
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--attn-data-file", required=True,
help="Pickle file containing extracted attention maps.")
parser.add_argument("--outfile", required=True,
help="Where to write out the distances between heads.")
args = parser.parse_args()
print("Loading attention data")
data = utils.load_pickle(args.attn_data_file)
print("Computing head distances")
js_distances = np.zeros([144, 144])
for doc in utils.logged_loop(data, n_steps=None):
if "attns" not in doc:
continue
tokens, attns = doc["tokens"], np.array(doc["attns"])
attns_flat = attns.reshape([144, attns.shape[2], attns.shape[3]])
for head in range(144):
head_attns = np.expand_dims(attns_flat[head], 0)
head_attns_smoothed = (0.001 / head_attns.shape[1]) + (head_attns * 0.999)
attns_flat_smoothed = (0.001 / attns_flat.shape[1]) + (attns_flat * 0.999)
m = (head_attns_smoothed + attns_flat_smoothed) / 2
js = -head_attns_smoothed * np.log(m / head_attns_smoothed)
js += -attns_flat_smoothed * np.log(m / attns_flat_smoothed)
js /= 2
js = js.sum(-1).sum(-1)
js_distances[head] += js
utils.write_pickle(js_distances, args.outfile)
if __name__ == "__main__":
main()
| clarkkev/attention-analysis | head_distances.py | Python | mit | 1,460 |
#! /usr/bin/python
"""
@fileoverview Ping message sending script
@author David Parlevliet
@version 20130305
@preserve Copyright 2013 David Parlevliet.
Pinger
======
Sends ping messages to all known Elastic Firewall servers
"""
import os
import sys
import json
import socket
import re
import time
from ext.encryption import Encrypt
log_path = '/var/log/elastic-firewall/pinger.log'
debug = True
def log(output):
output = "[%s] %s" % (time.ctime(), output)
if debug:
print output
open(log_path, 'a').write("\n%s" % output)
def ping(ip, port, salt, api):
log("Pinging: %s:%s" % (ip, port))
command = {
"api_key": api,
"area": "ping"
}
try:
client_sock = socket.socket()
client_sock.connect((ip, port))
client_sock.send(Encrypt(json.dumps(command), salt))
client_sock.close()
except:
log("Connection refused")
def main():
try:
config = json.loads(open('/usr/local/share/elastic-firewall/config.json').read())
except Exception, e:
log("Unable to load config: %s" % e)
return 1
# I hate exec. Keep an eye out for better solutions to this
exec "from api.%s import Api" % config['server_group']
try:
api = Api()
for key in config[config['server_group']]:
setattr(api, key, config[config['server_group']][key])
api.grab_servers()
except Exception, e:
log("Error: %s" % e)
return 1
hostname = socket.gethostname()
for c_hostname in config['hostnames']:
if not re.match(c_hostname, hostname):
continue
log('Config found at: %s' % c_hostname)
server_rules = config['hostnames'][c_hostname]
if 'ping' in server_rules:
for server in server_rules['ping']:
for c_hostname in config['hostnames']:
if not re.match(c_hostname, server):
continue
receiver_rules = config['hostnames'][c_hostname]
for ip in api.get_servers(server):
ping(ip, server_rules['server_port'], receiver_rules['bsalt'], receiver_rules['api_key'])
return 0
if __name__ == '__main__':
sys.exit(main())
| dparlevliet/elastic-firewall | pinger.py | Python | mit | 2,076 |
from validate_app import validateApp
import os
from distutils import spawn
import sys
from parse_files import parseOutHTseq, bringTogether
from bashSub import bashSub
def checkPreprocessApplications():
applications = ["samtools", "htseq-count"]
source = ["http://samtools.sourceforge.net/", "http://www-huber.embl.de/users/anders/HTSeq/doc/install.html"]
i = 0
for app in applications:
if spawn.find_executable(app) is None:
sys.stderr.write("It doesn't look like you have app - " + app + "\n")
sys.stderr.write("Download it here - " + source[i] + "\n")
exit(0)
else:
sys.stderr.write(app + " found\n")
i += 0
def returnReads(dictSampleSeqFiles):
SE = ""
PE1 = ""
PE2 = ""
# data struct
# { (sampleKey, seqKey) : [[SE], [SE], [PE1, PE2], [PE1, PE2]] }
# diving into each of the sub lists in the dictionary value key
for e in dictSampleSeqFiles:
# if sublist only has one elment then it is SE read
if len(e) == 1:
if SE == "":
SE = e[0]
else:
SE += "," + e[0]
else:
if PE1 == "":
PE1 = e[0]
PE2 = e[1]
else:
PE1 += "," + e[0]
PE2 += "," + e[1]
return [SE, PE1, PE2]
def check_dir(Dir):
if not os.path.exists(Dir):
os.mkdir(Dir)
class htseqCMD:
def __init__(self):
self.metaDataFolder = "MetaData"
def index(self, ref):
if not os.path.exists(ref):
print "Would you mind adding a gtf file? (-R) Thank you."
exit(1)
def execute(self, args):
time = 0
checkPreprocessApplications();
logFiles = []
# checkPreprocessApplications()
validate = validateApp()
validate.setValidation(True)
dictSampleSeqFiles = validate.validateSampleSheetHTSeq(args.readFolder, args.finalDir, args.samplesFile, args.force)
self.index(args.refGTF)
for keys in dictSampleSeqFiles.keys():
check_dir(args.finalDir)
check_dir(keys[1])
bamFile = os.path.join(keys[0], keys[0].split("/")[-1]) + ".bam"
outFile = os.path.join(keys[1], keys[0].split("/")[-1]) + ".out"
countFile = os.path.join(keys[1], keys[0].split("/")[-1]) + ".counts"
# runSortByName = bashSub("samtools view -bF 0x100", [bamFile], [''], "| samtools sort -n - " + os.path.join(keys[1], keys[1].split('/')[-1] + ".byreadid"), '/dev/null')
runSortByName = bashSub("samtools sort -n", [bamFile], [''], os.path.join(keys[1], keys[1].split('/')[-1] + ".byreadid"), '/dev/null')
print runSortByName.getCommand()
runSortByName.runCmd("")
runSortByName = bashSub("samtools sort -n ", [os.path.join(keys[1], keys[1].split('/')[-1] + ".byreadid")], [''], os.path.join(keys[1], keys[1].split('/')[-1]) + ".byreadid" , '/dev/null')
print runSortByName.getCommand()
runSortByName.runCmd("")
runView = bashSub("samtools view -F 0x100 ", [os.path.join(keys[1], keys[1].split('/')[-1] + ".byreadid.bam")], [''], "> " + os.path.join(keys[1], keys[1].split('/')[-1] + ".byreadid.sam"), '/dev/null')
print runView.getCommand()
runView.runCmd("")
cmdString = "htseq-count -f sam -s " + args.stranded + " -a " + args.minQual + " -t " + args.type + " -i " + args.idattr + " -m " + args.mode + " " + os.path.join(keys[1], keys[1].split('/')[-1] + ".byreadid.sam ") + args.refGTF + " 2>" + outFile + " >" + countFile
htseqCmd = bashSub(cmdString, [''], [''], '', '')
print htseqCmd.getCommand()
htseqCmd.runCmd("")
sys.stderr.flush()
time += runSortByName.returnTime() + runView.returnTime() + htseqCmd.returnTime()
logFiles.append(parseOutHTseq(keys[1], keys[1].split("/")[-1]))
bringTogether(logFiles, os.path.join(args.finalDir, "Counts_Summary.log"))
print "Total amount of seconds to run all samples"
print "Seconds: " + str(time)
| msettles/expHTS | expHTS/htseqcountCMD.py | Python | apache-2.0 | 4,188 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'TNLCompletionTrack.registration_date'
db.alter_column('tnl_completion_track', 'registration_date', self.gf('django.db.models.fields.DateTimeField')(null=True))
def backwards(self, orm):
# Changing field 'TNLCompletionTrack.registration_date'
db.alter_column('tnl_completion_track', 'registration_date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2015, 11, 9, 0, 0)))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.district': {
'Meta': {'object_name': 'District', 'db_table': "'district'"},
'code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'state': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.State']", 'on_delete': 'models.PROTECT'})
},
'student.state': {
'Meta': {'object_name': 'State', 'db_table': "'state'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'so': ('django.db.models.fields.IntegerField', [], {})
},
'tnl_integration.tnlcompletiontrack': {
'Meta': {'object_name': 'TNLCompletionTrack', 'db_table': "'tnl_completion_track'"},
'course': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tnl_integration.TNLCourses']"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'registered': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'registration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'to': "orm['auth.User']"})
},
'tnl_integration.tnlcourses': {
'Meta': {'object_name': 'TNLCourses', 'db_table': "'tnl_courses'"},
'course': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'registered': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'registration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'section_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'tnl_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'tnl_integration.tnldistricts': {
'Meta': {'object_name': 'TNLDistricts', 'db_table': "'tnl_districts'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'district': ('django.db.models.fields.related.ForeignKey', [], {'default': '0', 'to': "orm['student.District']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['tnl_integration'] | EduPepperPDTesting/pepper2013-testing | lms/djangoapps/tnl_integration/migrations/0004_auto__chg_field_tnlcompletiontrack_registration_date.py | Python | agpl-3.0 | 6,772 |
# -*- coding: utf-8 -*-
import unittest
import six
from w3lib.encoding import resolve_encoding
from scrapy.http import (Request, Response, TextResponse, HtmlResponse,
XmlResponse, Headers)
from scrapy.selector import Selector
from scrapy.utils.python import to_native_str
from scrapy.exceptions import NotSupported
from scrapy.link import Link
from tests import get_testdata
class BaseResponseTest(unittest.TestCase):
response_class = Response
def test_init(self):
# Response requires url in the consturctor
self.assertRaises(Exception, self.response_class)
self.assertTrue(isinstance(self.response_class('http://example.com/'), self.response_class))
if not six.PY2:
self.assertRaises(TypeError, self.response_class, b"http://example.com")
# body can be str or None
self.assertTrue(isinstance(self.response_class('http://example.com/', body=b''), self.response_class))
self.assertTrue(isinstance(self.response_class('http://example.com/', body=b'body'), self.response_class))
# test presence of all optional parameters
self.assertTrue(isinstance(self.response_class('http://example.com/', body=b'', headers={}, status=200), self.response_class))
r = self.response_class("http://www.example.com")
assert isinstance(r.url, str)
self.assertEqual(r.url, "http://www.example.com")
self.assertEqual(r.status, 200)
assert isinstance(r.headers, Headers)
self.assertEqual(r.headers, {})
headers = {"foo": "bar"}
body = b"a body"
r = self.response_class("http://www.example.com", headers=headers, body=body)
assert r.headers is not headers
self.assertEqual(r.headers[b"foo"], b"bar")
r = self.response_class("http://www.example.com", status=301)
self.assertEqual(r.status, 301)
r = self.response_class("http://www.example.com", status='301')
self.assertEqual(r.status, 301)
self.assertRaises(ValueError, self.response_class, "http://example.com", status='lala200')
def test_copy(self):
"""Test Response copy"""
r1 = self.response_class("http://www.example.com", body=b"Some body")
r1.flags.append('cached')
r2 = r1.copy()
self.assertEqual(r1.status, r2.status)
self.assertEqual(r1.body, r2.body)
# make sure flags list is shallow copied
assert r1.flags is not r2.flags, "flags must be a shallow copy, not identical"
self.assertEqual(r1.flags, r2.flags)
# make sure headers attribute is shallow copied
assert r1.headers is not r2.headers, "headers must be a shallow copy, not identical"
self.assertEqual(r1.headers, r2.headers)
def test_copy_meta(self):
req = Request("http://www.example.com")
req.meta['foo'] = 'bar'
r1 = self.response_class("http://www.example.com", body=b"Some body", request=req)
assert r1.meta is req.meta
def test_copy_inherited_classes(self):
"""Test Response children copies preserve their class"""
class CustomResponse(self.response_class):
pass
r1 = CustomResponse('http://www.example.com')
r2 = r1.copy()
assert type(r2) is CustomResponse
def test_replace(self):
"""Test Response.replace() method"""
hdrs = Headers({"key": "value"})
r1 = self.response_class("http://www.example.com")
r2 = r1.replace(status=301, body=b"New body", headers=hdrs)
assert r1.body == b''
self.assertEqual(r1.url, r2.url)
self.assertEqual((r1.status, r2.status), (200, 301))
self.assertEqual((r1.body, r2.body), (b'', b"New body"))
self.assertEqual((r1.headers, r2.headers), ({}, hdrs))
# Empty attributes (which may fail if not compared properly)
r3 = self.response_class("http://www.example.com", flags=['cached'])
r4 = r3.replace(body=b'', flags=[])
self.assertEqual(r4.body, b'')
self.assertEqual(r4.flags, [])
def _assert_response_values(self, response, encoding, body):
if isinstance(body, six.text_type):
body_unicode = body
body_bytes = body.encode(encoding)
else:
body_unicode = body.decode(encoding)
body_bytes = body
assert isinstance(response.body, bytes)
assert isinstance(response.text, six.text_type)
self._assert_response_encoding(response, encoding)
self.assertEqual(response.body, body_bytes)
self.assertEqual(response.body_as_unicode(), body_unicode)
self.assertEqual(response.text, body_unicode)
def _assert_response_encoding(self, response, encoding):
self.assertEqual(response.encoding, resolve_encoding(encoding))
def test_immutable_attributes(self):
r = self.response_class("http://example.com")
self.assertRaises(AttributeError, setattr, r, 'url', 'http://example2.com')
self.assertRaises(AttributeError, setattr, r, 'body', 'xxx')
def test_urljoin(self):
"""Test urljoin shortcut (only for existence, since behavior equals urljoin)"""
joined = self.response_class('http://www.example.com').urljoin('/test')
absolute = 'http://www.example.com/test'
self.assertEqual(joined, absolute)
def test_shortcut_attributes(self):
r = self.response_class("http://example.com", body=b'hello')
if self.response_class == Response:
msg = "Response content isn't text"
self.assertRaisesRegexp(AttributeError, msg, getattr, r, 'text')
self.assertRaisesRegexp(NotSupported, msg, r.css, 'body')
self.assertRaisesRegexp(NotSupported, msg, r.xpath, '//body')
else:
r.text
r.css('body')
r.xpath('//body')
def test_follow_url_absolute(self):
self._assert_followed_url('http://foo.example.com',
'http://foo.example.com')
def test_follow_url_relative(self):
self._assert_followed_url('foo',
'http://example.com/foo')
def test_follow_link(self):
self._assert_followed_url(Link('http://example.com/foo'),
'http://example.com/foo')
def test_follow_whitespace_url(self):
self._assert_followed_url('foo ',
'http://example.com/foo%20')
def test_follow_whitespace_link(self):
self._assert_followed_url(Link('http://example.com/foo '),
'http://example.com/foo%20')
def _assert_followed_url(self, follow_obj, target_url, response=None):
if response is None:
response = self._links_response()
req = response.follow(follow_obj)
self.assertEqual(req.url, target_url)
return req
def _links_response(self):
body = get_testdata('link_extractor', 'sgml_linkextractor.html')
resp = self.response_class('http://example.com/index', body=body)
return resp
class TextResponseTest(BaseResponseTest):
response_class = TextResponse
def test_replace(self):
super(TextResponseTest, self).test_replace()
r1 = self.response_class("http://www.example.com", body="hello", encoding="cp852")
r2 = r1.replace(url="http://www.example.com/other")
r3 = r1.replace(url="http://www.example.com/other", encoding="latin1")
assert isinstance(r2, self.response_class)
self.assertEqual(r2.url, "http://www.example.com/other")
self._assert_response_encoding(r2, "cp852")
self.assertEqual(r3.url, "http://www.example.com/other")
self.assertEqual(r3._declared_encoding(), "latin1")
def test_unicode_url(self):
# instantiate with unicode url without encoding (should set default encoding)
resp = self.response_class(u"http://www.example.com/")
self._assert_response_encoding(resp, self.response_class._DEFAULT_ENCODING)
# make sure urls are converted to str
resp = self.response_class(url=u"http://www.example.com/", encoding='utf-8')
assert isinstance(resp.url, str)
resp = self.response_class(url=u"http://www.example.com/price/\xa3", encoding='utf-8')
self.assertEqual(resp.url, to_native_str(b'http://www.example.com/price/\xc2\xa3'))
resp = self.response_class(url=u"http://www.example.com/price/\xa3", encoding='latin-1')
self.assertEqual(resp.url, 'http://www.example.com/price/\xa3')
resp = self.response_class(u"http://www.example.com/price/\xa3", headers={"Content-type": ["text/html; charset=utf-8"]})
self.assertEqual(resp.url, to_native_str(b'http://www.example.com/price/\xc2\xa3'))
resp = self.response_class(u"http://www.example.com/price/\xa3", headers={"Content-type": ["text/html; charset=iso-8859-1"]})
self.assertEqual(resp.url, 'http://www.example.com/price/\xa3')
def test_unicode_body(self):
unicode_string = u'\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0447\u0435\u0441\u043a\u0438\u0439 \u0442\u0435\u043a\u0441\u0442'
self.assertRaises(TypeError, self.response_class, 'http://www.example.com', body=u'unicode body')
original_string = unicode_string.encode('cp1251')
r1 = self.response_class('http://www.example.com', body=original_string, encoding='cp1251')
# check body_as_unicode
self.assertTrue(isinstance(r1.body_as_unicode(), six.text_type))
self.assertEqual(r1.body_as_unicode(), unicode_string)
# check response.text
self.assertTrue(isinstance(r1.text, six.text_type))
self.assertEqual(r1.text, unicode_string)
def test_encoding(self):
r1 = self.response_class("http://www.example.com", headers={"Content-type": ["text/html; charset=utf-8"]}, body=b"\xc2\xa3")
r2 = self.response_class("http://www.example.com", encoding='utf-8', body=u"\xa3")
r3 = self.response_class("http://www.example.com", headers={"Content-type": ["text/html; charset=iso-8859-1"]}, body=b"\xa3")
r4 = self.response_class("http://www.example.com", body=b"\xa2\xa3")
r5 = self.response_class("http://www.example.com", headers={"Content-type": ["text/html; charset=None"]}, body=b"\xc2\xa3")
r6 = self.response_class("http://www.example.com", headers={"Content-type": ["text/html; charset=gb2312"]}, body=b"\xa8D")
r7 = self.response_class("http://www.example.com", headers={"Content-type": ["text/html; charset=gbk"]}, body=b"\xa8D")
self.assertEqual(r1._headers_encoding(), "utf-8")
self.assertEqual(r2._headers_encoding(), None)
self.assertEqual(r2._declared_encoding(), 'utf-8')
self._assert_response_encoding(r2, 'utf-8')
self.assertEqual(r3._headers_encoding(), "cp1252")
self.assertEqual(r3._declared_encoding(), "cp1252")
self.assertEqual(r4._headers_encoding(), None)
self.assertEqual(r5._headers_encoding(), None)
self._assert_response_encoding(r5, "utf-8")
assert r4._body_inferred_encoding() is not None and r4._body_inferred_encoding() != 'ascii'
self._assert_response_values(r1, 'utf-8', u"\xa3")
self._assert_response_values(r2, 'utf-8', u"\xa3")
self._assert_response_values(r3, 'iso-8859-1', u"\xa3")
self._assert_response_values(r6, 'gb18030', u"\u2015")
self._assert_response_values(r7, 'gb18030', u"\u2015")
# TextResponse (and subclasses) must be passed a encoding when instantiating with unicode bodies
self.assertRaises(TypeError, self.response_class, "http://www.example.com", body=u"\xa3")
def test_declared_encoding_invalid(self):
"""Check that unknown declared encodings are ignored"""
r = self.response_class("http://www.example.com",
headers={"Content-type": ["text/html; charset=UKNOWN"]},
body=b"\xc2\xa3")
self.assertEqual(r._declared_encoding(), None)
self._assert_response_values(r, 'utf-8', u"\xa3")
def test_utf16(self):
"""Test utf-16 because UnicodeDammit is known to have problems with"""
r = self.response_class("http://www.example.com",
body=b'\xff\xfeh\x00i\x00',
encoding='utf-16')
self._assert_response_values(r, 'utf-16', u"hi")
def test_invalid_utf8_encoded_body_with_valid_utf8_BOM(self):
r6 = self.response_class("http://www.example.com",
headers={"Content-type": ["text/html; charset=utf-8"]},
body=b"\xef\xbb\xbfWORD\xe3\xab")
self.assertEqual(r6.encoding, 'utf-8')
self.assertEqual(r6.text, u'WORD\ufffd\ufffd')
def test_bom_is_removed_from_body(self):
# Inferring encoding from body also cache decoded body as sideeffect,
# this test tries to ensure that calling response.encoding and
# response.text in indistint order doesn't affect final
# values for encoding and decoded body.
url = 'http://example.com'
body = b"\xef\xbb\xbfWORD"
headers = {"Content-type": ["text/html; charset=utf-8"]}
# Test response without content-type and BOM encoding
response = self.response_class(url, body=body)
self.assertEqual(response.encoding, 'utf-8')
self.assertEqual(response.text, u'WORD')
response = self.response_class(url, body=body)
self.assertEqual(response.text, u'WORD')
self.assertEqual(response.encoding, 'utf-8')
# Body caching sideeffect isn't triggered when encoding is declared in
# content-type header but BOM still need to be removed from decoded
# body
response = self.response_class(url, headers=headers, body=body)
self.assertEqual(response.encoding, 'utf-8')
self.assertEqual(response.text, u'WORD')
response = self.response_class(url, headers=headers, body=body)
self.assertEqual(response.text, u'WORD')
self.assertEqual(response.encoding, 'utf-8')
def test_replace_wrong_encoding(self):
"""Test invalid chars are replaced properly"""
r = self.response_class("http://www.example.com", encoding='utf-8', body=b'PREFIX\xe3\xabSUFFIX')
# XXX: Policy for replacing invalid chars may suffer minor variations
# but it should always contain the unicode replacement char (u'\ufffd')
assert u'\ufffd' in r.text, repr(r.text)
assert u'PREFIX' in r.text, repr(r.text)
assert u'SUFFIX' in r.text, repr(r.text)
# Do not destroy html tags due to encoding bugs
r = self.response_class("http://example.com", encoding='utf-8', \
body=b'\xf0<span>value</span>')
assert u'<span>value</span>' in r.text, repr(r.text)
# FIXME: This test should pass once we stop using BeautifulSoup's UnicodeDammit in TextResponse
#r = self.response_class("http://www.example.com", body=b'PREFIX\xe3\xabSUFFIX')
#assert u'\ufffd' in r.text, repr(r.text)
def test_selector(self):
body = b"<html><head><title>Some page</title><body></body></html>"
response = self.response_class("http://www.example.com", body=body)
self.assertIsInstance(response.selector, Selector)
self.assertEqual(response.selector.type, 'html')
self.assertIs(response.selector, response.selector) # property is cached
self.assertIs(response.selector.response, response)
self.assertEqual(
response.selector.xpath("//title/text()").extract(),
[u'Some page']
)
self.assertEqual(
response.selector.css("title::text").extract(),
[u'Some page']
)
self.assertEqual(
response.selector.re("Some (.*)</title>"),
[u'page']
)
def test_selector_shortcuts(self):
body = b"<html><head><title>Some page</title><body></body></html>"
response = self.response_class("http://www.example.com", body=body)
self.assertEqual(
response.xpath("//title/text()").extract(),
response.selector.xpath("//title/text()").extract(),
)
self.assertEqual(
response.css("title::text").extract(),
response.selector.css("title::text").extract(),
)
def test_selector_shortcuts_kwargs(self):
body = b"<html><head><title>Some page</title><body><p class=\"content\">A nice paragraph.</p></body></html>"
response = self.response_class("http://www.example.com", body=body)
self.assertEqual(
response.xpath("normalize-space(//p[@class=$pclass])", pclass="content").extract(),
response.xpath("normalize-space(//p[@class=\"content\"])").extract(),
)
self.assertEqual(
response.xpath("//title[count(following::p[@class=$pclass])=$pcount]/text()",
pclass="content", pcount=1).extract(),
response.xpath("//title[count(following::p[@class=\"content\"])=1]/text()").extract(),
)
def test_urljoin_with_base_url(self):
"""Test urljoin shortcut which also evaluates base-url through get_base_url()."""
body = b'<html><body><base href="https://example.net"></body></html>'
joined = self.response_class('http://www.example.com', body=body).urljoin('/test')
absolute = 'https://example.net/test'
self.assertEqual(joined, absolute)
body = b'<html><body><base href="/elsewhere"></body></html>'
joined = self.response_class('http://www.example.com', body=body).urljoin('test')
absolute = 'http://www.example.com/test'
self.assertEqual(joined, absolute)
body = b'<html><body><base href="/elsewhere/"></body></html>'
joined = self.response_class('http://www.example.com', body=body).urljoin('test')
absolute = 'http://www.example.com/elsewhere/test'
self.assertEqual(joined, absolute)
def test_follow_selector(self):
resp = self._links_response()
urls = [
'http://example.com/sample2.html',
'http://example.com/sample3.html',
'http://example.com/sample3.html',
'http://example.com/sample3.html#foo',
'http://www.google.com/something',
'http://example.com/innertag.html'
]
# select <a> elements
for sellist in [resp.css('a'), resp.xpath('//a')]:
for sel, url in zip(sellist, urls):
self._assert_followed_url(sel, url, response=resp)
# select <link> elements
self._assert_followed_url(
Selector(text='<link href="foo"></link>').css('link')[0],
'http://example.com/foo',
response=resp
)
# href attributes should work
for sellist in [resp.css('a::attr(href)'), resp.xpath('//a/@href')]:
for sel, url in zip(sellist, urls):
self._assert_followed_url(sel, url, response=resp)
# non-a elements are not supported
self.assertRaises(ValueError, resp.follow, resp.css('div')[0])
def test_follow_selector_list(self):
resp = self._links_response()
self.assertRaisesRegexp(ValueError, 'SelectorList',
resp.follow, resp.css('a'))
def test_follow_selector_invalid(self):
resp = self._links_response()
self.assertRaisesRegexp(ValueError, 'Unsupported',
resp.follow, resp.xpath('count(//div)')[0])
def test_follow_selector_attribute(self):
resp = self._links_response()
for src in resp.css('img::attr(src)'):
self._assert_followed_url(src, 'http://example.com/sample2.jpg')
def test_follow_selector_no_href(self):
resp = self.response_class(
url='http://example.com',
body=b'<html><body><a name=123>click me</a></body></html>',
)
self.assertRaisesRegexp(ValueError, 'no href',
resp.follow, resp.css('a')[0])
def test_follow_whitespace_selector(self):
resp = self.response_class(
'http://example.com',
body=b'''<html><body><a href=" foo\n">click me</a></body></html>'''
)
self._assert_followed_url(resp.css('a')[0],
'http://example.com/foo',
response=resp)
self._assert_followed_url(resp.css('a::attr(href)')[0],
'http://example.com/foo',
response=resp)
def test_follow_encoding(self):
resp1 = self.response_class(
'http://example.com',
encoding='utf8',
body=u'<html><body><a href="foo?привет">click me</a></body></html>'.encode('utf8')
)
req = self._assert_followed_url(
resp1.css('a')[0],
'http://example.com/foo?%D0%BF%D1%80%D0%B8%D0%B2%D0%B5%D1%82',
response=resp1,
)
self.assertEqual(req.encoding, 'utf8')
resp2 = self.response_class(
'http://example.com',
encoding='cp1251',
body=u'<html><body><a href="foo?привет">click me</a></body></html>'.encode('cp1251')
)
req = self._assert_followed_url(
resp2.css('a')[0],
'http://example.com/foo?%EF%F0%E8%E2%E5%F2',
response=resp2,
)
self.assertEqual(req.encoding, 'cp1251')
class HtmlResponseTest(TextResponseTest):
response_class = HtmlResponse
def test_html_encoding(self):
body = b"""<html><head><title>Some page</title><meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head><body>Price: \xa3100</body></html>'
"""
r1 = self.response_class("http://www.example.com", body=body)
self._assert_response_values(r1, 'iso-8859-1', body)
body = b"""<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
Price: \xa3100
"""
r2 = self.response_class("http://www.example.com", body=body)
self._assert_response_values(r2, 'iso-8859-1', body)
# for conflicting declarations headers must take precedence
body = b"""<html><head><title>Some page</title><meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head><body>Price: \xa3100</body></html>'
"""
r3 = self.response_class("http://www.example.com", headers={"Content-type": ["text/html; charset=iso-8859-1"]}, body=body)
self._assert_response_values(r3, 'iso-8859-1', body)
# make sure replace() preserves the encoding of the original response
body = b"New body \xa3"
r4 = r3.replace(body=body)
self._assert_response_values(r4, 'iso-8859-1', body)
def test_html5_meta_charset(self):
body = b"""<html><head><meta charset="gb2312" /><title>Some page</title><body>bla bla</body>"""
r1 = self.response_class("http://www.example.com", body=body)
self._assert_response_values(r1, 'gb2312', body)
class XmlResponseTest(TextResponseTest):
response_class = XmlResponse
def test_xml_encoding(self):
body = b"<xml></xml>"
r1 = self.response_class("http://www.example.com", body=body)
self._assert_response_values(r1, self.response_class._DEFAULT_ENCODING, body)
body = b"""<?xml version="1.0" encoding="iso-8859-1"?><xml></xml>"""
r2 = self.response_class("http://www.example.com", body=body)
self._assert_response_values(r2, 'iso-8859-1', body)
# make sure replace() preserves the explicit encoding passed in the constructor
body = b"""<?xml version="1.0" encoding="iso-8859-1"?><xml></xml>"""
r3 = self.response_class("http://www.example.com", body=body, encoding='utf-8')
body2 = b"New body"
r4 = r3.replace(body=body2)
self._assert_response_values(r4, 'utf-8', body2)
def test_replace_encoding(self):
# make sure replace() keeps the previous encoding unless overridden explicitly
body = b"""<?xml version="1.0" encoding="iso-8859-1"?><xml></xml>"""
body2 = b"""<?xml version="1.0" encoding="utf-8"?><xml></xml>"""
r5 = self.response_class("http://www.example.com", body=body)
r6 = r5.replace(body=body2)
r7 = r5.replace(body=body2, encoding='utf-8')
self._assert_response_values(r5, 'iso-8859-1', body)
self._assert_response_values(r6, 'iso-8859-1', body2)
self._assert_response_values(r7, 'utf-8', body2)
def test_selector(self):
body = b'<?xml version="1.0" encoding="utf-8"?><xml><elem>value</elem></xml>'
response = self.response_class("http://www.example.com", body=body)
self.assertIsInstance(response.selector, Selector)
self.assertEqual(response.selector.type, 'xml')
self.assertIs(response.selector, response.selector) # property is cached
self.assertIs(response.selector.response, response)
self.assertEqual(
response.selector.xpath("//elem/text()").extract(),
[u'value']
)
def test_selector_shortcuts(self):
body = b'<?xml version="1.0" encoding="utf-8"?><xml><elem>value</elem></xml>'
response = self.response_class("http://www.example.com", body=body)
self.assertEqual(
response.xpath("//elem/text()").extract(),
response.selector.xpath("//elem/text()").extract(),
)
def test_selector_shortcuts_kwargs(self):
body = b'''<?xml version="1.0" encoding="utf-8"?>
<xml xmlns:somens="http://scrapy.org">
<somens:elem>value</somens:elem>
</xml>'''
response = self.response_class("http://www.example.com", body=body)
self.assertEqual(
response.xpath("//s:elem/text()", namespaces={'s': 'http://scrapy.org'}).extract(),
response.selector.xpath("//s:elem/text()", namespaces={'s': 'http://scrapy.org'}).extract(),
)
response.selector.register_namespace('s2', 'http://scrapy.org')
self.assertEqual(
response.xpath("//s1:elem/text()", namespaces={'s1': 'http://scrapy.org'}).extract(),
response.selector.xpath("//s2:elem/text()").extract(),
)
| Parlin-Galanodel/scrapy | tests/test_http_response.py | Python | bsd-3-clause | 26,643 |
import logging
import os
import commands
from autotest.client.shared import error
from virttest import libvirt_vm, virsh, data_dir
from virttest.libvirt_xml import xcepts, vm_xml
from virttest.libvirt_xml.devices import disk
class MFError(Exception):
pass
class MFCheckDiskError(MFError):
def __init__(self, output):
super(MFCheckDiskError, self).__init__(output)
self.output = output
def __str__(self):
return ("Check disk in vm failed:\n%s" % self.output)
def cleanup_vm(vm_name=None, disk_removed=None):
"""
Cleanup the vm with its disk deleted.
"""
try:
if vm_name is not None:
virsh.undefine(vm_name)
except error.CmdError:
pass
try:
if disk_removed is not None:
os.remove(disk_removed)
except IOError:
pass
def prepare_disk_params(target_list, params):
"""
Prepare params lists for creating disk xml.
:param target_list: devices which need disk xml.
:param params: base slot/func value in config file.
"""
addr_multifunction = params.get("mf_addr_multifunction")
addr_type = params.get("mf_addr_type")
base_domain = params.get("mf_addr_domain", "0x0000")
base_bus = params.get("mf_addr_bus", "0x00")
base_slot = params.get("mf_addr_slot", "0x0a")
base_function = params.get("mf_addr_function", "0x0")
# slot_metric: the metric which slot will increase.
# func_metric: the metric which func will increase.
try:
slot_metric = int(params.get("mf_slot_metric", 0))
except ValueError, detail: # illegal metric
logging.warn(detail)
slot_metric = 0
try:
func_metric = int(params.get("mf_func_metric", 0))
except ValueError, detail: # illegal metric
logging.warn(detail)
func_metric = 0
disk_params_dict = {}
for target_dev in target_list:
disk_params = {}
disk_params['addr_multifunction'] = addr_multifunction
disk_params['addr_type'] = addr_type
# Do not support increated metric of domain and bus yet
disk_params['addr_domain'] = base_domain
disk_params['addr_bus'] = base_bus
disk_params['addr_slot'] = base_slot
disk_params['addr_function'] = base_function
# Convert string hex to number hex for operation
try:
base_slot = int(base_slot, 16)
base_function = int(base_function, 16)
except ValueError:
pass # Can not convert, use original string
# Increase slot/func for next target_dev
if slot_metric:
try:
base_slot += slot_metric
except TypeError, detail:
logging.warn(detail)
if func_metric:
try:
base_function += func_metric
except TypeError, detail:
logging.warn(detail)
# Convert number hex back to string hex if necessary
try:
base_slot = hex(base_slot)
base_function = hex(base_function)
except TypeError:
pass # Can not convert, directly pass
disk_params_dict[target_dev] = disk_params
return disk_params_dict
def create_disk_xml(params):
"""
Create a disk configuration file.
:param params: a dict contains values of disk
{'device_type': "file",
'source_file': ...,
'target_dev': ...,
'target_bus': "virtio",
'addr_type': ...,
'addr_domain': ...,
'addr_bus':...,
'addr_slot': ...,
'addr_function': ...,
'addr_multifunction': ...}
"""
# Create attributes dict for disk's address element
addr_attr = {}
addr_type = params.get("addr_type", "pci")
addr_attr['domain'] = params.get("addr_domain", "0x0000")
addr_attr['bus'] = params.get("addr_bus", "0x00")
addr_attr['slot'] = params.get("addr_slot", "0x0a")
addr_attr['function'] = params.get("addr_function", "0x0")
if params.get("addr_multifunction") is not None:
addr_attr['multifunction'] = params.get("addr_multifunction")
type_name = params.get("type_name", "file")
source_file = params.get("source_file")
target_dev = params.get("target_dev", "vdb")
target_bus = params.get("target_bus", "virtio")
diskxml = disk.Disk(type_name)
diskxml.device = params.get("device_type", "disk")
diskxml.source = diskxml.new_disk_source(attrs={'file': source_file})
diskxml.target = {'dev': target_dev, 'bus': target_bus}
diskxml.address = diskxml.new_disk_address(addr_type, attrs=addr_attr)
logging.debug("Disk XML:\n%s", str(diskxml))
return diskxml.xml
def device_exists(vm, target_dev):
"""
Check if given target device exists on vm.
"""
targets = vm.get_blk_devices().keys()
if target_dev in targets:
return True
return False
def attach_additional_device(vm_name, disksize, targetdev, params):
"""
Create a disk with disksize, then attach it to given vm.
@param vm: Libvirt VM name.
@param disksize: size of attached disk
@param targetdev: target of disk device
"""
logging.info("Attaching disk...")
disk_path = os.path.join(data_dir.get_tmp_dir(), targetdev)
cmd = "qemu-img create %s %s" % (disk_path, disksize)
status, output = commands.getstatusoutput(cmd)
if status:
return (False, output)
# Update params for source file
params['source_file'] = disk_path
params['target_dev'] = targetdev
# Create a file of device
xmlfile = create_disk_xml(params)
# To confirm attached device do not exist.
virsh.detach_disk(vm_name, targetdev, extra="--config")
return virsh.attach_device(domain_opt=vm_name, file_opt=xmlfile,
flagstr="--config", debug=True)
def define_new_vm(vm_name, new_name):
"""
Just define a new vm from given name
"""
try:
vmxml = vm_xml.VMXML.new_from_dumpxml(vm_name)
vmxml.vm_name = new_name
del vmxml.uuid
vmxml.define()
return True
except xcepts.LibvirtXMLError, detail:
logging.error(detail)
return False
def check_disk(vm, target_dev, part_size):
"""
Check disk on vm.
Create a new partition and mount it.
"""
if not vm.is_alive():
vm.start()
session = vm.wait_for_login()
device = "/dev/%s" % target_dev
if session.cmd_status("ls %s" % device):
raise MFCheckDiskError("Can not find '%s' in guest." % device)
else:
if session.cmd_status("which parted"):
logging.error("Did not find command 'parted' in guest, SKIP...")
return
ret1, output1 = session.cmd_status_output("parted %s \"mklabel msdos\""
% device, timeout=5)
ret2, output2 = session.cmd_status_output("parted %s \"mkpart p 1M %s\""
% (device, part_size), timeout=5)
logging.debug("Create part:\n:%s\n%s", output1, output2)
if ret1 or ret2:
raise MFCheckDiskError("Create partition for '%s' failed." % device)
if session.cmd_status("mkfs.ext3 %s1" % device):
raise MFCheckDiskError("Format created partition failed.")
if session.cmd_status("mount %s1 /mnt" % device):
raise MFCheckDiskError("Can not mount '%s' to /mnt." % device)
def run(test, params, env):
"""
Test multi function of vm devices.
"""
vm_name = params.get("main_vm")
vm = env.get_vm(vm_name)
# To avoid dirty after starting new vm
if vm.is_alive():
vm.destroy()
new_vm_name = params.get("mf_updated_new_vm")
define_new_vm(vm_name, new_vm_name)
# Create a new vm object for convenience
new_vm = libvirt_vm.VM(new_vm_name, vm.params, vm.root_dir,
vm.address_cache)
try:
# Get parameters
disk_count = int(params.get("mf_added_devices_count", 1))
disk_size = params.get("mf_added_devices_size", "50M")
status_error = "yes" == params.get("status_error", "no")
check_disk_error = "yes" == params.get("mf_check_disk_error", "no")
target_list = []
index = 0
while len(target_list) < disk_count:
target_dev = "vd%s" % chr(ord('a') + index)
if not device_exists(new_vm, target_dev):
target_list.append(target_dev)
index += 1
disk_params_dict = prepare_disk_params(target_list, params)
# To record failed attach
fail_info = []
for target_dev in target_list:
result = attach_additional_device(new_vm_name, disk_size,
target_dev,
disk_params_dict[target_dev])
if result.exit_status:
if status_error:
# Attach fail is expected.
# TODO: check output of fail info
logging.info("Failed as expected.")
return
else:
raise error.TestFail("Attach device %s failed."
% target_dev)
else:
if status_error and not check_disk_error:
fail_info.append("Attach %s successfully "
"but not expected." % target_dev)
if len(fail_info):
raise error.TestFail(fail_info)
logging.debug("New VM XML:\n%s", new_vm.get_xml())
# Login to check attached devices
for target_dev in target_list:
try:
check_disk(new_vm, target_dev, disk_size)
except MFCheckDiskError, detail:
if check_disk_error:
logging.debug("Check disk failed as expected:\n%s", detail)
return
else:
raise
if check_disk_error:
raise error.TestFail("Check disk didn't fail as expected.")
finally:
if new_vm.is_alive():
new_vm.destroy()
cleanup_vm(new_vm_name)
| PandaWei/tp-libvirt | libvirt/tests/src/multifunction.py | Python | gpl-2.0 | 10,299 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-02-11 20:52
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('namubufferiapp', '0006_auto_20170129_1639'),
]
operations = [
migrations.AlterField(
model_name='account',
name='magic_token_ttl',
field=models.DateTimeField(default=datetime.datetime(2017, 2, 11, 21, 7, 31, 934487, tzinfo=utc)),
),
migrations.AlterField(
model_name='product',
name='name',
field=models.CharField(max_length=128, unique=True),
),
]
| oangervuori/namubufferi | namubufferiapp/migrations/0007_auto_20170211_2052.py | Python | mit | 746 |
from django.db import models
from django.conf import settings
from products.models import Variation
from django.core.urlresolvers import reverse
from django.db.models.signals import pre_save, post_save,post_delete
from decimal import Decimal
class CartItem(models.Model):
cart=models.ForeignKey("Cart")
item=models.ForeignKey(Variation)
quantity=models.PositiveIntegerField(default=1)
line_item_total=models.DecimalField(max_digits=10,decimal_places=2)
# line_total=models.DecimalField(max_digits=10,decimal_places=2)
def __str__(self):
return self.item.title
def remove(self):
return self.item.remove_from_cart()
def get_title(self):
return "%s - %s" %(self.item.product.title, self.item.title)
def cart_item_pre_save_receiver(sender,instance,*args,**kwargs):
qty=instance.quantity
if int(qty) >=1:
price=instance.item.get_price()
line_item_total=Decimal(qty)*Decimal(price)
instance.line_item_total=line_item_total
pre_save.connect(cart_item_pre_save_receiver,sender=CartItem)
def cart_item_post_save_receiver(sender,instance,*args,**kwargs):
instance.cart.update_subtotal()
post_save.connect(cart_item_post_save_receiver,sender=CartItem)
post_delete.connect(cart_item_post_save_receiver, sender=CartItem)
# Create your models here.
class Cart(models.Model):
user=models.ForeignKey(settings.AUTH_USER_MODEL,null=True,blank=True)
items=models.ManyToManyField(Variation,through=CartItem)
timestamp=models.DateTimeField(auto_now_add=True,auto_now=False)
updated=models.DateTimeField(auto_now_add=False,auto_now=True)
subtotal=models.DecimalField(max_digits=50,decimal_places=2,null=True,default=25.00)
tax_total=models.DecimalField(max_digits=50,decimal_places=2,null=True,default=25.00)
total=models.DecimalField(max_digits=50,decimal_places=2,null=True,default=25.00)
def __str__(self):
return str(self.id)
def update_subtotal(self):
subtotal=0
items=self.cartitem_set.all()
for item in items:
subtotal+=item.line_item_total
self.subtotal=subtotal
self.save()
def do_tax_and_total_receiver(sender,instance,*args,**kwargs):
subtotal=instance.subtotal
tax_total= round(Decimal(subtotal) * Decimal(0.085),2)
total=round(Decimal(subtotal)+Decimal(tax_total),2)
instance.tax_total=tax_total
instance.total=total
pre_save.connect(do_tax_and_total_receiver,sender=Cart) | abhijitbangera/ecommerce | src/carts/models.py | Python | mit | 2,330 |
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404, redirect
import numpy
from .admin import RoundReportResource, TeamResource, CompetitionResource
from .forms import TeamForm, RoundReportForm, CompetitionForm, CompetitionSelectForm
from .models import Team, RoundReport, Competition
def index_view(request):
return render(request, "index.html")
def leaderboard_avg_score_view(request):
teams = Team.objects.all()
sort = sorted(teams, key=lambda team: -team.get_average_score())
return render(request, "leaderboards/avg_alliance_score.html", context={"teams": sort,
"thing": "Average alliance score"})
def leaderboard_max_low_boulders(request):
teams = Team.objects.all()
sort = sorted(teams, key=lambda team: -team.get_max_boulders_scored_low())
return render(request, "leaderboards/max_low_boulders.html", context={"teams": sort,
"thing": "Max boulders scored in low goal"})
def competition_add_view(request):
if request.method == "POST":
form = CompetitionForm(request.POST)
if form.is_valid():
form.save()
return redirect('scattergun-index')
else:
form = CompetitionForm()
return render(request, "generic_form.html", context={"form": form, "thing": "competition"})
def competition_list_view(request):
comps = Competition.objects.all()
return render(request, "competition_list.html", context={"comps": comps})
def competition_show_view(request, comp_number):
comp = get_object_or_404(Competition, pk=comp_number)
reports = RoundReport.objects.all().filter(competition=comp)
teams = [report.team for report in reports]
sort = set(sorted(teams, key=lambda team: -team.get_average_score()))
return render(request, "competition.html", context={"reports": reports, "teams": sort})
def roundreport_add_view(request):
if request.method == "POST":
form = RoundReportForm(request.POST)
if form.is_valid():
form.save()
return redirect('scattergun-roundreport-list')
else:
form = RoundReportForm()
return render(request, "roundreport_add.html", context={"form": form})
def roundreport_list_view(request):
reports = RoundReport.objects.all()
return render(request, "roundreport_list.html", context={"reports": reports})
def roundreport_show_view(request, report):
report = get_object_or_404(RoundReport, pk=report)
return render(request, "roundreport.html", context={"report": report})
def team_list_view(request):
teams = Team.objects.all()
return render(request, "team_list.html", context={"teams": teams})
def team_add_view(request):
if request.method == "POST":
form = TeamForm(request.POST)
if form.is_valid():
team = form.save()
return redirect('scattergun-team-show', team_number=team.number)
else:
form = TeamForm()
return render(request, "generic_form.html", context={"form": form, "thing": "team"})
def team_view(request, team_number):
competition = Competition.objects.latest('date') # Assume that competition we want to view is the latest one by default.
if request.method == "POST":
form = CompetitionSelectForm(request.POST)
if form.is_valid():
competition = form.cleaned_data["competition"]
form = CompetitionSelectForm()
team = get_object_or_404(Team, number=team_number)
reports = RoundReport.objects.filter(team=team, competition=competition)
comments = []
print([report["a_defense_ability"] for report in RoundReport.objects.filter(team=team, a_defense="Portcullis").values()])
abilities = {
"portcullis": numpy.mean([report["a_defense_ability"] for report in RoundReport.objects.filter(team=team, a_defense="Portcullis").values()]),
"cheval_de_frise": numpy.mean([report["a_defense_ability"] for report in RoundReport.objects.filter(team=team, a_defense="Cheval de Frise").values()]),
"ramparts": numpy.mean([report["b_defense_ability"] for report in RoundReport.objects.filter(team=team, b_defense="Ramparts").values()]),
"moats": numpy.mean([report["b_defense_ability"] for report in RoundReport.objects.filter(team=team, b_defense="Moats").values()]),
"drawbridge": numpy.mean([report["c_defense_ability"] for report in RoundReport.objects.filter(team=team, c_defense="Drawbridge").values()]),
"sally_port": numpy.mean([report["c_defense_ability"] for report in RoundReport.objects.filter(team=team, c_defense="Sally Port").values()]),
"rock_wall": numpy.mean([report["d_defense_ability"] for report in RoundReport.objects.filter(team=team, d_defense="Rock Wall").values()]),
"rough_terrain": numpy.mean([report["d_defense_ability"] for report in RoundReport.objects.filter(team=team, d_defense="Rough Terrain").values()]),
}
pointsdataset = {
"color": "blue",
"name": "{0}_{1}".format(team.number, "points"),
"xy": [],
}
rankdataset = {
"color": "blue",
"name": "{0}_{1}".format(team.number, "rank"),
"xy": [],
}
boulder_dataset = [
{
"color": "light-blue",
"name": "{0}_{1}".format(team.number, "low"),
"xy": [],
},
{
"color": "blue",
"name": "{0}_{1}".format(team.number, "high"),
"xy": [],
},
]
for report in reports:
pointsdataset["xy"].append({'x': report.match_number, 'y': report.friendly_alliance_score})
rankdataset["xy"].append({'x': report.match_number, 'y': report.friendly_alliance_rank_points})
boulder_dataset[0]["xy"].append({'x': report.match_number, 'y': report.boulders_scored_in_low})
boulder_dataset[1]["xy"].append({'x': report.match_number, 'y': report.boulders_scored_in_high})
if not report.tech_issues_comment == "":
comments.append(report.tech_issues_comment)
pointsdataset["xy"] = sorted(pointsdataset["xy"], key=lambda score: score["x"])
rankdataset["xy"] = sorted(rankdataset["xy"], key=lambda score: score["x"])
boulder_dataset[0]["xy"] = sorted(boulder_dataset[0]["xy"], key=lambda score: score["x"])
boulder_dataset[1]["xy"] = sorted(boulder_dataset[1]["xy"], key=lambda score: score["x"])
context = {
"team": team,
"reports": reports,
"pointsdataset": [pointsdataset],
"rankdataset": [rankdataset],
"boulders": boulder_dataset,
"comments": comments,
"abilities": abilities,
"form": form,
}
return render(request, "team.html", context=context)
def export_roundreport_view(request, target):
all = RoundReportResource().export()
content_type = None
data = None
print(target)
if target == "csv":
data = all.csv
content_type = "text/plain"
elif target == "xlsx":
data = all.xlsx
content_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
else:
return HttpResponse("Unrecognized format", status=500)
return HttpResponse(data, content_type=content_type)
def export_team_view(request, target):
all = TeamResource().export()
content_type = None
data = None
print(target)
if target == "csv":
data = all.csv
content_type = "text/plain"
elif target == "xlsx":
data = all.xlsx
content_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
else:
return HttpResponse("Unrecognized format", status=500)
return HttpResponse(data, content_type=content_type)
def export_competition_view(request, target):
all = CompetitionResource().export()
content_type = None
data = None
print(target)
if target == "csv":
data = all.csv
content_type = "text/plain"
elif target == "xlsx":
data = all.xlsx
content_type = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
else:
return HttpResponse("Unrecognized format", status=500)
return HttpResponse(data, content_type=content_type)
def export_index_view(request):
return render(request, "export.html")
| Team4761/Scattergun | scattergun/scattergun_coreapp/views.py | Python | mit | 8,417 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 - Frank Lin
from skrutil import string_utils
from skrutil.string_utils import indent
_JNI_BR = '\n\n'
_JNI_SPACE = ' '
class JniClass:
"""JNI part generator, responsible for generating JNI source code for Object and ObjectManager.
"""
def __init__(self, group_name, class_name, jni_variable_list, jni_manager_or_none):
"""Init JniClass with necessary parameters.
Args:
group_name: A string which is the C++ folder name.
class_name: A string which is the C++ class name.
jni_variable_list: List of <JniVariable>.
jni_manager_or_none: <JniManager>.
"""
self.__group_name = group_name
self.__class_name = class_name
self.__jni_var_list = jni_variable_list
self.__jni_manager_or_none = jni_manager_or_none
if self.__jni_manager_or_none is not None:
self.__jni_manager_or_none.set_object_name(class_name, class_name + 's')
self.__jni_manager_or_none.set_jni_variable_list(jni_variable_list)
self.__jni_manager_or_none.set_group_name(group_name)
self.__def_cpp = '#ifdef __cplusplus\nextern "C" {\n#endif'
self.__end_def_cpp = '#ifdef __cplusplus\n}\n#endif'
def generate_header(self):
"""Gets JNI object header. It is not required, so since 5.0, we don't use this method anymore.
Returns:
A string which is the declaration of JNI object header.
"""
file_name = 'com_lesschat_core_{0}_{1}.h'.format(self.__group_name, self.__class_name)
file_path = 'build/jni/' + file_name
output_header = open(file_path, 'w')
def_guard = '#ifndef _Included_com_lesschat_core_{0}_{1}\n#define _Included_com_lesschat_core_{0}_{1}'.format(
self.__group_name, self.__class_name)
end_def_guard = '#endif'
output_header.write('#include <jni.h>')
output_header.write(_JNI_BR)
output_header.write(def_guard + '\n')
output_header.write(self.__def_cpp + _JNI_BR)
# release method
output_header.write(self.__release())
output_header.write(_JNI_BR)
for jni_var in self.__jni_var_list:
output_header.write(jni_var.getter())
output_header.write(_JNI_BR)
output_header.write(_JNI_BR)
output_header.write(self.__end_def_cpp + '\n')
output_header.write(end_def_guard + '\n')
def generate_implementation(self):
"""Gets JNI implementation which is used before 4.0.
Returns:
A string which is JNI object implementation.
"""
file_name = 'com_lesschat_core_{0}_{1}.cc'.format(self.__group_name, self.__class_name)
file_path = 'build/jni/' + file_name
output_header = open(file_path, 'w')
style_class_name = string_utils.cpp_class_name_to_cpp_file_name(self.__class_name)
header_name = 'com_lesschat_core_{0}_{1}.h'.format(self.__group_name, self.__class_name)
cpp_name = '{0}/{1}.h'.format(self.__group_name, style_class_name)
output_header.write('#include "{0}"'.format(header_name) + '\n')
output_header.write('#include "{0}"'.format(cpp_name) + '\n')
output_header.write('#include "utils/android/jni_helper.h"')
output_header.write(_JNI_BR)
output_header.write(self.__def_cpp + _JNI_BR)
# release method
output_header.write(self.__release_impl())
output_header.write(_JNI_BR)
for jni_var in self.__jni_var_list:
output_header.write(jni_var.getter_impl())
output_header.write(_JNI_BR)
output_header.write(self.__end_def_cpp)
def generate_jni_helper_implementation(self, config):
"""Gets JNI helper object converting method implementation & declaration.
Returns:
A string which is JNI helper object converting method implementation & declaration.
"""
file_name = 'jni_helper_{0}.cc'.format(self.__class_name)
file_path = 'build/jni/' + file_name
output_cc = open(file_path, 'w')
impl = '// Copy belows to core/utils/android/jni_helper.h\n\n\n'
impl += '{0}\n\n'.format(self.__jni_get_jobject_by_core_object_declaration())
impl += self.__jni_get_jobjects_array_by_core_objects_declaration() + '\n'
impl += '\n\n\n'
impl += '// Copy belows to core/utils/android/jni_helper.cc\n\n\n'
impl += self.__jni_get_jobject_by_core_object_implementation(config)
impl += '\n\n'
impl += self.__jni_get_jobjects_array_by_core_objects_implementation()
impl += '\n'
output_cc.write(impl)
def generate_manager_header(self):
"""Gets JNI object manager header. It is not required, so since 5.0, we don't use this method anymore.
Returns:
A string which is the declaration of JNI object manager header.
"""
if self.__jni_manager_or_none is None:
return
jni_manager = self.__jni_manager_or_none
file_name = 'com_lesschat_core_{0}_{1}Manager.h'.format(self.__group_name, self.__class_name)
file_path = 'build/jni/' + file_name
output_header = open(file_path, 'w')
def_header = '#ifndef _Included_com_lesschat_core_{0}_{1}Manager\n' \
'#define _Included_com_lesschat_core_{0}_{1}Manager'
def_guard = def_header.format(self.__group_name, self.__class_name)
end_def_guard = '#endif'
output_header.write('#include <jni.h>' + _JNI_BR)
output_header.write(def_guard + '\n')
output_header.write(self.__def_cpp + _JNI_BR)
output_header.write(jni_manager.generate_fetch_declarations())
output_header.write(jni_manager.generate_http_function_declarations())
output_header.write(self.__end_def_cpp + '\n')
output_header.write(end_def_guard + '\n')
def generate_manager_implementation(self, version, config):
"""Gets JNI object manager implementation.
Args:
version: A float version number of <JniModelXmlParser>.
config: A <Config> object describes user-defined names.
Returns:
A string which is JNI object manager implementation.
"""
if self.__jni_manager_or_none is None:
return
jni_manager = self.__jni_manager_or_none
file_name = '{2}_{0}_{1}Manager.cc'.format(self.__group_name, self.__class_name, config.jni_package_path)
file_path = 'build/jni/' + file_name
output_header = open(file_path, 'w')
header_name = '#include "{2}_{0}_{1}Manager.h"\n'.format(self.__group_name,
self.__class_name,
config.jni_package_path)
cpp_name = '#include "{0}/{1}_manager.h"\n'\
.format(self.__group_name, string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
output_header.write(header_name)
output_header.write(cpp_name)
output_header.write('#include "utils/android/jni_helper.h"')
output_header.write(_JNI_BR)
output_header.write(self.__def_cpp)
output_header.write(_JNI_BR)
output_header.write(jni_manager.generate_fetch_implementations(version, config))
output_header.write(jni_manager.generate_http_function_implementations(config))
output_header.write(self.__end_def_cpp + '\n')
def __release(self):
return self.__release_method_name() + '\n' + ' (JNIEnv *, jobject, jlong);'
def __release_method_name(self):
return 'JNIEXPORT void JNICALL Java_com_lesschat_core_{0}_{1}_nativeRelease{1}'.\
format(self.__group_name, self.__class_name)
def __release_impl(self):
method_name = self.__release_method_name()
para_name = ' (JNIEnv *env, jobject thiz, jlong handler)'
step_1 = 'lesschat::{0}* {1} = reinterpret_cast<lesschat::{0}*>(handler);'\
.format(self.__class_name, string_utils.first_char_to_lower(self.__class_name))
step_2 = 'LCC_SAFE_DELETE({0});'.format(string_utils.first_char_to_lower(self.__class_name))
return method_name + '\n' + para_name + '{{\n {0}\n {1}\n}}'.format(step_1, step_2)
def __jni_get_jobject_by_core_object_declaration(self):
return 'static jobject GetJ{0}ByCore{0}(const {0}& {1});'.format(
self.__class_name, string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
def __jni_get_jobject_by_core_object_implementation(self, config):
impl = 'jobject JniHelper::GetJ{0}ByCore{0}(const {0}& {1}) {{\n'.format(
self.__class_name, string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
impl += indent(2) + 'JNIEnv* env = GetJniEnv();\n'
impl += indent(2) + 'if (!env) {\n'
impl += indent(4) + 'sakura::log_error("Failed to get JNIEnv");\n'
impl += indent(4) + 'return nullptr;\n'
impl += indent(2) + '}\n\n'
impl += indent(2) + 'jclass {0}Jclass = JniReferenceCache::SharedCache()->{1}_jclass();\n'.format(
string_utils.first_char_to_lower(self.__class_name),
string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
impl += indent(2) + 'jmethodID {0}ConstructorMethodID = env->GetMethodID({0}Jclass, "<init>", "('.format(
string_utils.first_char_to_lower(self.__class_name))
for jni_var in self.__jni_var_list:
impl += jni_var.var_type.to_jni_signature()
impl += ')V");\n\n'
for jni_var in self.__jni_var_list:
impl += indent(2) + jni_var.jni_var_assignment_by_cpp_variable(config) + '\n'
impl += '\n'
constructor_fst_line = indent(2) + 'jobject j{0}Object = env->NewObject('.format(self.__class_name)
num_constructor_indent = len(constructor_fst_line)
impl += constructor_fst_line
parameters = []
jclass_instance_name = '{0}Jclass'.format(string_utils.first_char_to_lower(self.__class_name))
constructor_method_id = '{0}ConstructorMethodID'.format(string_utils.first_char_to_lower(self.__class_name))
parameters.append(constructor_method_id)
for jni_var in self.__jni_var_list:
parameters.append('j{0}'.format(string_utils.to_title_style_name(jni_var.name)))
impl += jclass_instance_name + ',\n'
for parameter in parameters:
impl += indent(num_constructor_indent) + parameter + ',\n'
impl = impl[:-2]
impl += ');'
impl += '\n'
for jni_var in self.__jni_var_list:
delete_method = jni_var.jni_delete_local_ref()
if delete_method != '':
impl += indent(2) + delete_method + '\n'
impl += '\n'
impl += indent(2) + 'return j{0}Object;'.format(self.__class_name)
impl += '\n'
impl += '}\n'
impl += '\n'
return impl
def __jni_get_jobjects_array_by_core_objects_declaration(self):
return 'static jobjectArray GetJ{0}sArrayByCore{0}s(const std::vector<std::unique_ptr<{0}>>& {1}s);'.format(
self.__class_name, string_utils.cpp_class_name_to_cpp_file_name(self.__class_name))
def __jni_get_jobjects_array_by_core_objects_implementation(self):
object_name = string_utils.cpp_class_name_to_cpp_file_name(self.__class_name)
impl = 'jobjectArray JniHelper::GetJ{0}sArrayByCore{0}s(const std::vector<std::unique_ptr<{0}>>& {1}s) {{'.format(
self.__class_name, object_name)
impl += '\n'
impl += indent(2) + 'jclass {0}Jclass = JniReferenceCache::SharedCache()->{1}_jclass();\n'.format(
string_utils.first_char_to_lower(self.__class_name),
object_name)
impl += indent(2) + 'JNIEnv* env = GetJniEnv();\n'
impl += indent(2) + 'if (!env) {\n'
impl += indent(4) + 'return env->NewObjectArray(0, {0}Jclass, NULL);\n'.format(
string_utils.first_char_to_lower(self.__class_name))
impl += indent(2) + '}\n\n'
impl += indent(2) + 'jobjectArray jobjs = env->NewObjectArray({0}s.size(), {1}Jclass, NULL);\n\n'.format(
object_name,
string_utils.first_char_to_lower(self.__class_name))
impl += indent(2) + 'jsize i = 0;\n'
impl += indent(2) + 'for (auto it = {0}s.begin(); it != {0}s.end(); ++it) {{\n'.format(object_name)
impl += indent(4) + 'jobject j{0} = GetJ{0}ByCore{0}(**it);\n'.format(self.__class_name)
impl += indent(4) + 'env->SetObjectArrayElement(jobjs, i, j{0});\n'.format(self.__class_name)
impl += indent(4) + 'env->DeleteLocalRef(j{0});\n'.format(self.__class_name)
impl += indent(4) + '++i;\n'
impl += indent(2) + '}\n'
impl += indent(2) + 'return jobjs;\n'
impl += '}'
return impl
| DaYeSquad/worktilerwdemo | model-builder/skr_jni_builder/jni_class.py | Python | mit | 13,021 |
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import asyncio
import os
from azure.identity.aio import DefaultAzureCredential
from azure.keyvault.certificates import CertificateContentType, CertificatePolicy, WellKnownIssuerNames
from azure.keyvault.certificates.aio import CertificateClient
# ----------------------------------------------------------------------------------------------------------
# Prerequisites:
# 1. An Azure Key Vault (https://docs.microsoft.com/en-us/azure/key-vault/quick-create-cli)
#
# 2. azure-keyvault-certificates and azure-identity packages (pip install these)
#
# 3. Set up your environment to use azure-identity's DefaultAzureCredential. To authenticate a service principal with
# environment variables, set AZURE_CLIENT_ID, AZURE_CLIENT_SECRET, and AZURE_TENANT_ID
# (See https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/keyvault/azure-keyvault-administration#authenticate-the-client)
#
# 4. A PFX certificate on your machine. Set an environment variable, PFX_CERT_PATH, with the path to this certificate.
#
# 5. A PEM-formatted certificate on your machine. Set an environment variable, PEM_CERT_PATH, with the path to this
# certificate.
#
# ----------------------------------------------------------------------------------------------------------
# Sample - demonstrates importing a PFX and PEM-formatted certificate into Azure Key Vault
#
# 1. Import an existing PFX certificate (import_certificate)
#
# 2. Import an existing PEM-formatted certificate (import_certificate)
#
# ----------------------------------------------------------------------------------------------------------
async def run_sample():
# Instantiate a certificate client that will be used to call the service.
# Here we use the DefaultAzureCredential, but any azure-identity credential can be used.
VAULT_URL = os.environ["VAULT_URL"]
credential = DefaultAzureCredential()
client = CertificateClient(vault_url=VAULT_URL, credential=credential)
# Let's import a PFX certificate first.
# Assuming you already have a PFX containing your key pair, you can import it into Key Vault.
# You can do this without setting a policy, but the policy is needed if you want the private key to be exportable
# or to configure actions when a certificate is close to expiration.
pfx_cert_name = "pfxCert"
with open(os.environ["PFX_CERT_PATH"], "rb") as f:
pfx_cert_bytes = f.read()
imported_pfx_cert = await client.import_certificate(
certificate_name=pfx_cert_name, certificate_bytes=pfx_cert_bytes
)
print("PFX certificate '{}' imported successfully.".format(imported_pfx_cert.name))
# Now let's import a PEM-formatted certificate.
# To import a PEM-formatted certificate, you must provide a CertificatePolicy that sets the content_type to
# CertificateContentType.pem or the certificate will fail to import (the default content type is PFX).
pem_cert_name = "pemCert"
with open(os.environ["PEM_CERT_PATH"], "rb") as f:
pem_cert_bytes = f.read()
pem_cert_policy = CertificatePolicy(issuer_name=WellKnownIssuerNames.self, content_type=CertificateContentType.pem)
imported_pem_cert = await client.import_certificate(
certificate_name=pem_cert_name, certificate_bytes=pem_cert_bytes, policy=pem_cert_policy
)
print("PEM-formatted certificate '{}' imported successfully.".format(imported_pem_cert.name))
await credential.close()
await client.close()
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(run_sample())
loop.close()
| Azure/azure-sdk-for-python | sdk/keyvault/azure-keyvault-certificates/samples/import_certificate_async.py | Python | mit | 3,725 |
from lcapy import R, C, L
((R(1) + L(2)) | C(3)).s_model().draw('pickup-s.png')
| mph-/lcapy | doc/examples/networks/pickup-s.py | Python | lgpl-2.1 | 81 |
#!/usr/bin/env python -t
# -*- coding: utf-8 -*-
# Copyright (C) 2017 Jonathan Delvaux <pyshell@djoproject.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from pyshell.arg.accessor.abstract import AbstractAccessor
| djo938/supershell | pyshell/arg/accessor/test/abstract_test.py | Python | gpl-3.0 | 790 |
# -*- coding: utf-8 -*-
from gitdh.modules import Module
class DatabaseSource(Module):
def isEnabled(self, action):
return action == 'cron' and not self.dbBe is None
def source(self):
return self.dbBe.getQueuedCommits()
| seoester/Git-Deployment-Handler | gitdh/modules/databasesource.py | Python | mit | 229 |
#!/usr/local/bin/python3
""" Demonstrates an opportunity for refactoring. """
def list_multiply(LIST_A, LIST_B):
""" Sums two lists of integers and multiplies them together
>>> list_multiply([3,4],[3,4])
49
>>> list_multiply([1,2,3,4],[10,20])
300
"""
TOTAL_A = 0
for i in LIST_A:
TOTAL_A += i
TOTAL_B = 0
counter = 0
while True:
if counter > len(LIST_B) - 1:
break
TOTAL_B = TOTAL_B + LIST_B[counter]
counter += 1
return TOTAL_A * TOTAL_B
def _test():
import doctest, refactor
return doctest.testmod(refactor)
if __name__ == "__main__":
_test()
| ceeblet/OST_PythonCertificationTrack | Python1/python1/refactor.py | Python | mit | 675 |
# -*- coding: utf-8 -*-
TITLE = "SUUUUPPPEEERRR Paper Plane v01"
HOME_OPTION_PLAY = "Play !"
HOME_OPTION_PLAY_LVL = "Level"
HOME_OPTION_PLAY_SHORT_LVL = "LVL"
HOME_OPTION_OPTIONS = "Options"
HOME_OPTION_OPTIONS_COLOR = "Color"
HOME_OPTION_OPTIONS_COLOR_BLUE = "Blue"
HOME_OPTION_OPTIONS_COLOR_BLACK = "Black"
HOME_OPTION_OPTIONS_COLOR_BROWN = "Brown"
HOME_OPTION_OPTIONS_COLOR_RED = "Red"
HOME_OPTION_OPTIONS_COLOR_ORANGE = "Orange"
HOME_OPTION_OPTIONS_DISPLAY = "Display"
HOME_OPTION_OPTIONS_DISPLAY_WINDOWED = "Windowed"
HOME_OPTION_OPTIONS_DISPLAY_FULLSCREEN = "Fullscreen"
HOME_OPTION_OPTIONS_LANG = "Language"
HOME_OPTION_QUIT = "Quit"
GAME_CRASHED_NONHIGHSCORE = "You're a loooooser !"
GAME_CRASHED_NEWHIGHSCORE = "IS THE NEW BEST SCORE"
GAME_CRASHED_BUTTON_ESCAPE = "<ESCAPE> Main menu"
GAME_CRASHED_BUTTON_RETRY = "<ENTER> Retry"
GAME_PAUSE = "Pause"
GAME_PAUSE_BUTTON_CONTINUE = "Press <p> to continue the game looser !"
DONE = "Done !"
RESTART_REQUIRED = "You'll need to restart the app" | ghighi-du63000/Paper-Plane-Projekt | Source/lang/en_EN.py | Python | mit | 1,092 |
# -*- coding: latin1 -*-
################################################################################################
#
#
import snap, datetime, sys, time, json, os, os.path, shutil, time, random, math
import numpy as np
from math import*
# Script auxiliar para cálculos matemáticos que deve estar no mesmo diretório deste aqui.
import calc
# Script auxiliar para gerar histogramas
import histogram
import networkx as nx
reload(sys)
sys.setdefaultencoding('utf-8')
######################################################################################################################################################################
## Status - Versão 1 - Script para gerar coeficiente de clustering por ego. Nesse cálculo a média que é armazenada já é a média do modelo.
##
## ERRRO DE ALOCAÇÃO DE MEMÓRIA!!!!!
######################################################################################################################################################################
######################################################################################################################################################################
#
# Armazenar as propriedades do dataset
#
######################################################################################################################################################################
def net_structure(dataset_dir,output_dir,net,IsDir, weight):
print("\n######################################################################\n")
if os.path.isfile(str(output_dir)+str(net)+"_clustering_coef.json"):
print ("Arquivo já existe: "+str(output_dir)+str(net)+"_clustering_coef.json")
else:
print ("Dataset clustering coefficient - " +str(dataset_dir))
cf = [] # Média dos coeficientes de clusterings por rede-ego
gcf = [] # Média usando opção global
n = [] # vetor com número de vértices para cada rede-ego
e = [] # vetor com número de arestas para cada rede-ego
i = 0
for file in os.listdir(dataset_dir):
i+=1
print (str(output_dir)+str(net)+"/"+str(file)+" - Calculando propriedades para o ego "+str(i)+": "+str(file))
if IsDir is True:
G = snap.LoadEdgeList(snap.PNGraph, dataset_dir+file, 0, 1) # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
else:
G = snap.LoadEdgeList(snap.PUNGraph, dataset_dir+file, 0, 1) # load from a text file - pode exigir um separador.: snap.LoadEdgeList(snap.PNGraph, file, 0, 1, '\t')
# G.Dump()
# time.sleep(5)
#####################################################################################
n.append(G.GetNodes()) # Numero de vertices
e.append(G.GetEdges()) # Numero de arestas
n_nodes = G.GetNodes()
n_edges = G.GetEdges()
#####################################################################################
#Usando opção local - Retorna o mesmo resultado do global
if n_edges == 0:
a = 0
cf.append(a)
print ("Nenhuma aresta encontrada para a rede-ego "+str(i)+" - ("+str(file))
else:
NIdCCfH = snap.TIntFltH()
snap.GetNodeClustCf(G, NIdCCfH)
_cf = []
for item in NIdCCfH:
_cf.append(NIdCCfH[item]) # Clusterinf Coefficient
result = calc.calcular(_cf)
cf.append(result['media'])
print ("Clustering Coef para o ego "+str(i)+" ("+str(file)+"): "+str(result['media']))
print
#####################################################################################
#Usando opção global - Retorna o mesmo resultado do local
#
# if n_edges == 0:
# a = 0
# gcf.append(a)
# else:
# GraphClustCoeff = snap.GetClustCf (G)
# gcf.append(GraphClustCoeff)
# print "Clustering coefficient: %f" % GraphClustCoeff
# print
#####################################################################################
CF = calc.calcular_full(cf)
overview = {}
overview['ClusteringCoefficient'] = CF
with open(str(output_dir)+str(net)+"_clustering_coef.json", 'w') as f:
f.write(json.dumps(overview))
with open(str(output_dir)+str(net)+"_clustering_coef.txt", 'w') as f:
f.write("\n######################################################################\n")
f.write ("Clustering Coef: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"% (CF['media'],CF['variancia'],CF['desvio_padrao']))
f.write("\n######################################################################\n")
print ("\n######################################################################\n")
print ("Clustering Coef: Média: %5.3f -- Var:%5.3f -- Des. Padrão: %5.3f \n"% (CF['media'],CF['variancia'],CF['desvio_padrao']))
print ("\n######################################################################\n")
print("\n######################################################################\n")
######################################################################################################################################################################
######################################################################################################################################################################
#
# Método principal do programa.
#
######################################################################################################################################################################
######################################################################################################################################################################
def main():
os.system('clear')
print "################################################################################"
print" "
print" Script para cálculo do coeficiente de agrupamento do dataset (rede-ego) "
print" "
print"#################################################################################"
print
print" 1 - Follow"
print" 9 - Follwowers"
print" 2 - Retweets"
print" 3 - Likes"
print" 3 - Mentions"
print " "
print" 5 - Co-Follow"
print" 10 - Co-Followers"
print" 6 - Co-Retweets"
print" 7 - Co-Likes"
print" 8 - Co-Mentions"
print
op = int(raw_input("Escolha uma opção acima: "))
if op in (5,6,7,8,10): # Testar se é um grafo direcionado ou não
isdir = False
elif op in (1,2,3,4,9):
isdir = True
else:
print("Opção inválida! Saindo...")
sys.exit()
if op == 1 or op == 9: # Testar se é um grafo direcionado ou não
weight = False
else:
weight = True
######################################################################
net = "n"+str(op)
######################################################################
######################################################################
dataset_dir = "/home/amaury/graphs_hashmap/"+str(net)+"/graphs_with_ego/" ############### Arquivo contendo arquivos com a lista de arestas das redes-ego
if not os.path.isdir(dataset_dir):
print("Diretório dos grafos não encontrado: "+str(dataset_dir))
else:
output_dir = "/home/amaury/Dropbox/net_structure_hashmap/clustering_coefficient_global/graphs_with_ego/"
if not os.path.exists(output_dir):
os.makedirs(output_dir)
net_structure(dataset_dir,output_dir,net,isdir,weight) # Inicia os cálculos...
######################################################################
######################################################################
dataset_dir2 = "/home/amaury/graphs_hashmap/"+str(net)+"/graphs_without_ego/" ############### Arquivo contendo arquivos com a lista de arestas das redes-ego
if not os.path.isdir(dataset_dir2):
print("Diretório dos grafos não encontrado: "+str(dataset_dir2))
else:
output_dir2 = "/home/amaury/Dropbox/net_structure_hashmap/clustering_coefficient_global/graphs_without_ego/"
if not os.path.exists(output_dir2):
os.makedirs(output_dir2)
net_structure(dataset_dir2,output_dir2,net,isdir,weight) # Inicia os cálculos...
######################################################################
######################################################################
print("\n######################################################################\n")
print("Script finalizado!")
print("\n######################################################################\n")
######################################################################################################################################################################
#
# INÍCIO DO PROGRAMA
#
######################################################################################################################################################################
#Executa o método main
if __name__ == "__main__": main() | amaurywalbert/twitter | net_structure/hashmap_clustering_coefficient_global_v1.py | Python | gpl-3.0 | 9,021 |
# coding=utf-8
__author__ = 'stasstels'
import cv2
import sys
image = sys.argv[1]
targets = sys.argv[2]
# Load an color image in grayscale
img = cv2.imread(image, cv2.IMREAD_COLOR)
with open(targets, "r") as f:
for line in f:
print line
(_, x, y) = line.split()
cv2.circle(img, (int(x), int(y)), 20, (255, 0, 255), -1)
cv2.namedWindow("image", cv2.WINDOW_NORMAL)
cv2.imshow('image', img)
cv2.waitKey(0)
cv2.destroyAllWindows()
| OsipovStas/ayc-2013 | ayc/show.py | Python | gpl-2.0 | 455 |
from .fangraphs_stats_base import FangraphsStatsBase
class FangraphsFieldingStats(FangraphsStatsBase):
COMMON = 'c'
LINE_BREAK = '-1'
NAME = '0'
TEAM = '1'
SEASON = '2'
POS = '3'
POSITION = POS
G = '4'
GAMES = G
GS = '5'
GAMES_STARTED = GS
INN = '6'
INNINGS = INN
PO = '7'
PUT_OUTS = PO
A = '8'
ASSISTS = A
E = '9'
ERRORS = E
FE = '10'
FORCED_ERRORS = FE
TE = '11' # ?
DP = '12' # ?
DPS = '13' # ?
DPT = '14' # ?
DPF = '15' # ?
SCP = '16' # ?
SB = '17' # ?
CS = '18' # ?
PB = '19' # ?
WP = '20' # ?
FP = '21'
FIELDING_PCT = FP
TZ = '22' # ?
RSB = '23' # ?
RGDP = '24' # ?
RARM = '25' # ?
RGFP = '26' # ?
RPM = '27' # ?
DRS = '28' # ?
BIZ = '29' # ?
PLAYS = '30'
RZR = '31' # ?
OOZ = '32' # ?
TZL = '33' # ?
FSR = '34' # ?
ARM = '35' # ?
DPR = '36' # ?
RNGR = '37' # ?
ERRR = '38' # ?
UZR = '39'
ULTIMATE_ZONE_RATING = UZR
UZR_150 = '40' # UZR/150
ULTIMATE_ZONE_RATING_PER_150 = UZR_150
CPP = '41' # ?
RPP = '42' # ?
DEF = '43' # ?
MADE_ZERO_PCT = '44' # 0%
ZERO_PCT = MADE_ZERO_PCT
PLAYS_ZERO_PCT = '45' # '# 0%'
NUMBER_ZERO_PCT = PLAYS_ZERO_PCT
MADE_ONE_TO_TEN_PCT = '46' # 1-10%
ONE_TO_TEN_PCT = MADE_ONE_TO_TEN_PCT
PLAYS_ONE_TO_TEN_PCT = '47' # '# 1-10%'
NUMBER_ONE_TO_TEN_PCT = PLAYS_ONE_TO_TEN_PCT
MADE_TEN_TO_FORTY_PCT = '48' # 10-40%
TEN_TO_FORTY_PCT = MADE_TEN_TO_FORTY_PCT
PLAYS_TEN_TO_FORTY_PCT = '49' # '# 10-40%'
NUMBER_TEN_TO_FORTY_PCT = PLAYS_TEN_TO_FORTY_PCT
MADE_FORTY_TO_SIXTY_PCT = '50' # 40-60%
FORTY_TO_SIXTY_PCT = MADE_FORTY_TO_SIXTY_PCT
PLAYS_FORTY_TO_SIXTY_PCT = '51' # '# 40-60%'
NUMBER_FORTY_TO_SIXTY_PCT = PLAYS_FORTY_TO_SIXTY_PCT
MADE_SIXTY_TO_NINETY_PCT = '52' # 60-90%
SIXTY_TO_NINETY_PCT = MADE_SIXTY_TO_NINETY_PCT
PLAYS_SIXTY_TO_NINETY_PCT = '53' # '# 60-90%'
NUMBER_SIXTY_TO_NINETY_PCT = PLAYS_SIXTY_TO_NINETY_PCT
MADE_NINETY_TO_ONE_HUNDRED_PCT = '54' # 90-100%
NINETY_TO_ONE_HUNDRED_PCT = MADE_NINETY_TO_ONE_HUNDRED_PCT
PLAYS_NINETY_TO_ONE_HUNDRED_PCT = '55' # '# 90-100%'
NUMBER_NINETY_TO_ONE_HUNDRED_PCT = PLAYS_NINETY_TO_ONE_HUNDRED_PCT
RSZ = '56' # ?
RCERA = '57' # ?
RTS = '58' # ?
FRAMING = '59'
FRM = FRAMING
| jldbc/pybaseball | pybaseball/enums/fangraphs/fielding_data_enum.py | Python | mit | 4,344 |
from os.path import exists, join
import shutil
import tempfile
import time
from lwr.managers.queued import QueueManager
from lwr.managers.stateful import StatefulManagerProxy
from lwr.tools.authorization import get_authorizer
from .test_utils import TestDependencyManager
from galaxy.util.bunch import Bunch
from galaxy.jobs.metrics import NULL_JOB_INSTRUMENTER
def test_persistence():
"""
Tests persistence of a managers jobs.
"""
staging_directory = tempfile.mkdtemp()
try:
app = Bunch(staging_directory=staging_directory,
persistence_directory=staging_directory,
authorizer=get_authorizer(None),
dependency_manager=TestDependencyManager(),
job_metrics=Bunch(default_job_instrumenter=NULL_JOB_INSTRUMENTER),
)
assert not exists(join(staging_directory, "queued_jobs"))
queue1 = StatefulManagerProxy(QueueManager('test', app, num_concurrent_jobs=0))
job_id = queue1.setup_job('4', 'tool1', '1.0.0')
touch_file = join(staging_directory, 'ran')
queue1.launch(job_id, 'touch %s' % touch_file)
time.sleep(.4)
assert (not(exists(touch_file)))
queue1.shutdown()
queue2 = StatefulManagerProxy(QueueManager('test', app, num_concurrent_jobs=1))
time.sleep(1)
assert exists(touch_file)
finally:
shutil.rmtree(staging_directory)
try:
queue2.shutdown()
except:
pass
| jmchilton/lwr | test/persistence_test.py | Python | apache-2.0 | 1,526 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.ops.linalg_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
def _random_pd_matrix(n, rng):
"""Random positive definite matrix."""
temp = rng.randn(n, n)
return temp.dot(temp.T)
class CholeskySolveTest(test.TestCase):
_use_gpu = False
def setUp(self):
self.rng = np.random.RandomState(0)
def test_works_with_five_different_random_pos_def_matrices(self):
with self.test_session():
for n in range(1, 6):
for np_type, atol in [(np.float32, 0.05), (np.float64, 1e-5)]:
# Create 2 x n x n matrix
array = np.array(
[_random_pd_matrix(n, self.rng), _random_pd_matrix(n, self.rng)
]).astype(np_type)
chol = linalg_ops.cholesky(array)
for k in range(1, 3):
rhs = self.rng.randn(2, n, k).astype(np_type)
x = linalg_ops.cholesky_solve(chol, rhs)
self.assertAllClose(
rhs, math_ops.matmul(array, x).eval(), atol=atol)
class CholeskySolveGpuTest(CholeskySolveTest):
_use_gpu = True
class EyeTest(test.TestCase):
def test_non_batch_2x2(self):
num_rows = 2
dtype = np.float32
np_eye = np.eye(num_rows).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows, dtype=dtype)
self.assertAllEqual((num_rows, num_rows), eye.get_shape())
self.assertAllEqual(np_eye, eye.eval())
def test_non_batch_2x3(self):
num_rows = 2
num_columns = 3
dtype = np.float32
np_eye = np.eye(num_rows, num_columns).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows, num_columns=num_columns, dtype=dtype)
self.assertAllEqual((num_rows, num_columns), eye.get_shape())
self.assertAllEqual(np_eye, eye.eval())
def test_1x3_batch_4x4(self):
num_rows = 4
batch_shape = [1, 3]
dtype = np.float32
np_eye = np.eye(num_rows).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows, batch_shape=batch_shape, dtype=dtype)
self.assertAllEqual(batch_shape + [num_rows, num_rows], eye.get_shape())
eye_v = eye.eval()
for i in range(batch_shape[0]):
for j in range(batch_shape[1]):
self.assertAllEqual(np_eye, eye_v[i, j, :, :])
def test_1x3_batch_4x4_dynamic(self):
num_rows = 4
batch_shape = [1, 3]
dtype = np.float32
np_eye = np.eye(num_rows).astype(dtype)
with self.test_session():
num_rows_ph = array_ops.placeholder(dtypes.int32)
batch_shape_ph = array_ops.placeholder(dtypes.int32)
eye = linalg_ops.eye(num_rows_ph, batch_shape=batch_shape_ph, dtype=dtype)
eye_v = eye.eval(
feed_dict={num_rows_ph: num_rows,
batch_shape_ph: batch_shape})
for i in range(batch_shape[0]):
for j in range(batch_shape[1]):
self.assertAllEqual(np_eye, eye_v[i, j, :, :])
def test_1x3_batch_5x4(self):
num_rows = 5
num_columns = 4
batch_shape = [1, 3]
dtype = np.float32
np_eye = np.eye(num_rows, num_columns).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows,
num_columns=num_columns,
batch_shape=batch_shape,
dtype=dtype)
self.assertAllEqual(batch_shape + [num_rows, num_columns],
eye.get_shape())
eye_v = eye.eval()
for i in range(batch_shape[0]):
for j in range(batch_shape[1]):
self.assertAllEqual(np_eye, eye_v[i, j, :, :])
def test_1x3_batch_5x4_dynamic(self):
num_rows = 5
num_columns = 4
batch_shape = [1, 3]
dtype = np.float32
np_eye = np.eye(num_rows, num_columns).astype(dtype)
with self.test_session():
num_rows_ph = array_ops.placeholder(dtypes.int32)
num_columns_ph = array_ops.placeholder(dtypes.int32)
batch_shape_ph = array_ops.placeholder(dtypes.int32)
eye = linalg_ops.eye(num_rows_ph,
num_columns=num_columns_ph,
batch_shape=batch_shape_ph,
dtype=dtype)
eye_v = eye.eval(feed_dict={
num_rows_ph: num_rows,
num_columns_ph: num_columns,
batch_shape_ph: batch_shape
})
for i in range(batch_shape[0]):
for j in range(batch_shape[1]):
self.assertAllEqual(np_eye, eye_v[i, j, :, :])
def test_non_batch_0x0(self):
num_rows = 0
dtype = np.int64
np_eye = np.eye(num_rows).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows, dtype=dtype)
self.assertAllEqual((num_rows, num_rows), eye.get_shape())
self.assertAllEqual(np_eye, eye.eval())
def test_non_batch_2x0(self):
num_rows = 2
num_columns = 0
dtype = np.int64
np_eye = np.eye(num_rows, num_columns).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows, num_columns=num_columns, dtype=dtype)
self.assertAllEqual((num_rows, num_columns), eye.get_shape())
self.assertAllEqual(np_eye, eye.eval())
def test_non_batch_0x2(self):
num_rows = 0
num_columns = 2
dtype = np.int64
np_eye = np.eye(num_rows, num_columns).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows, num_columns=num_columns, dtype=dtype)
self.assertAllEqual((num_rows, num_columns), eye.get_shape())
self.assertAllEqual(np_eye, eye.eval())
def test_1x3_batch_0x0(self):
num_rows = 0
batch_shape = [1, 3]
dtype = np.float32
np_eye = np.eye(num_rows).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows, batch_shape=batch_shape, dtype=dtype)
self.assertAllEqual((1, 3, 0, 0), eye.get_shape())
eye_v = eye.eval()
for i in range(batch_shape[0]):
for j in range(batch_shape[1]):
self.assertAllEqual(np_eye, eye_v[i, j, :, :])
def test_1x3_batch_2x0(self):
num_rows = 2
num_columns = 0
batch_shape = [1, 3]
dtype = np.float32
np_eye = np.eye(num_rows, num_columns).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows,
num_columns=num_columns,
batch_shape=batch_shape,
dtype=dtype)
self.assertAllEqual(batch_shape + [num_rows, num_columns],
eye.get_shape())
eye_v = eye.eval()
for i in range(batch_shape[0]):
for j in range(batch_shape[1]):
self.assertAllEqual(np_eye, eye_v[i, j, :, :])
def test_1x3_batch_0x2(self):
num_rows = 0
num_columns = 2
batch_shape = [1, 3]
dtype = np.float32
np_eye = np.eye(num_rows, num_columns).astype(dtype)
with self.test_session():
eye = linalg_ops.eye(num_rows,
num_columns=num_columns,
batch_shape=batch_shape,
dtype=dtype)
self.assertAllEqual(batch_shape + [num_rows, num_columns],
eye.get_shape())
eye_v = eye.eval()
for i in range(batch_shape[0]):
for j in range(batch_shape[1]):
self.assertAllEqual(np_eye, eye_v[i, j, :, :])
if __name__ == '__main__':
test.main()
| sandeepgupta2k4/tensorflow | tensorflow/python/kernel_tests/linalg_ops_test.py | Python | apache-2.0 | 8,265 |
# -*- coding: utf-8 -*-
# This file is part of periscope3.
# Copyright (c) 2013 Roman Hudec <black3r@klikni.cz>
#
# This file contains parts of code from periscope.
# Copyright (c) 2008-2011 Patrick Dessalle <patrick@dessalle.be>
#
# periscope is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# periscope is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with emesene; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#from SubtitleSource import SubtitleSource # require a key in the config file
## Working plug-ins:
from .OpenSubtitles import OpenSubtitles
from .Subtitulos import Subtitulos
from .TheSubDB import TheSubDB
from .SubsWiki import SubsWiki
from .Addic7ed import Addic7ed
from .Podnapisi2 import Podnapisi # not really sure if working, needs proper testing
## Currently not working (untested yet)
#from .SubScene import SubScene
## Currently not working (code broken / unfinished)
#from .Podnapisi import Podnapisi # Podnapisi plug-in not working currently (on-site changes)
#from .TvSubtitles import TvSubtitles # Unfinished/Not working plug-in
#from .SubDivX import SubDivX # Broken & Not worth it (only espanol subtitles and can't really search well)
## Currently not working (site faults)
#from .LegendasTV import LegendasTV # LegendasTV plug-in not working currently (site offline + requires username/password)
#from .BierDopje import BierDopje # BierDopje plug-in not working currently (need API key)
| black3r/periscope3 | periscope/plugins/__init__.py | Python | gpl-2.0 | 1,976 |
'''Module to calculate combinations of elements
This module contains a function to calculate all subsets of a given length of
a set of n elements.
E.g. All subsets of length 2 from a set of 5 elements.
There exist 10 subsets:
>>> ncombinations(5, 2)
10
And the subsets are:
>>> list(combinations(5, 2))
[[0, 1], [0, 2], [0, 3], [0, 4], [1, 2], [1, 3], [1, 4], [2, 3], [2, 4], [3, 4]]
subsets() is a convenience function that yields the actual subsets from a list.
Based on: Kenneth H. Rosen, Discrete Mathematics and Its Applications, 2nd edition (NY: McGraw-Hill, 1991), pp. 284-286
But also interesting:
http://code.activestate.com/recipes/190465-generator-for-permutations-combinations-selections
# License: GNU General Public License, see http://www.clips.ua.ac.be/~vincent/scripts/LICENSE.txt
'''
__date__ = 'July 2013'
__author__ = 'Vincent Van Asch'
__version__ = '1.2.0'
### Helper functions #################################################
def fact(number, bound=1):
'''Return the faculty: number*(number-1)*(number-2)*...*bound'''
if number < bound:
raise ValueError('number should be equal or greater than bound')
if number == bound:
return bound
else:
return number*fact(number-1, bound)
def _next(a, numLeft, n, r, total):
'''Calculate next step'''
if numLeft == total:
return a, numLeft-1
else:
i=r-1
while a[i] == n-r+i:
i=i-1
a[i] = a[i] + 1
for j in xrange(i+1, r):
a[j] = a[i] + j - i
return a, numLeft-1
### Main function ####################################################
def ncombinations(n, r):
"""
Returns the total number of unique subsets of length r
you can take from n elements.
n is the number of elements.
r is the length of the subsets.
"""
# Check
if n < 0: raise ValueError('n should be positive.')
if r < 0: raise ValueError('r should be positive.')
if r == 0: return 1
# total = fact(n, 1)/(fact(r, 1)*fact(n-r, 1)) # Don't use this because calculating fact() of high numbers gives a RuntimeError
return fact(n, n-r+1)/fact(r, 1)
def combinations(n, r):
"""
Yields all unique subsets of length r that you
can take from n elements.
n is the number of elements.
r is the length of the subsets.
"""
# Check
if n < 0: raise ValueError('n should be positive.')
if r < 0: raise ValueError('r should be positive.')
if r == 0:
yield ()
raise StopIteration
if r > n: raise StopIteration
if n < 1: raise StopIteration
if r == n:
yield range(0, n)
raise StopIteration
# Initialize
getallen = xrange(0,n)
a = range(r)
# The total number of possible combinations
total = ncombinations(n, r)
# Produce all pairs
numLeft = total
while numLeft > 0:
comb=[]
a, numLeft = _next(a, numLeft, n, r, total)
for i in a:
comb.append(getallen[i])
yield comb
def subsets(l, r):
'''Takes a list with elements and yields all
unique subsets of length r.
l: a list
r: an integer (length of the subset)
'''
for c in combinations(len(l), r):
yield tuple([l[x] for x in c])
def subcombinations(*sizes):
'''Yields all element combinations.
For example:
>>> subcombinatins(3,2)
[[0, 0], [0, 1], [1, 0], [1, 1], [2, 0], [2, 1]]
Thus, each element of range(3) is combined with each element of
range(2). Yielding 2*3 element combinations.
The number of arguments is free.
'''
total = reduce(lambda x,y:x*y, sizes)
limit=10000000
if total > limit: raise ValueError('The number of combinations would exceed the limit %d' %limit)
data=[[]]
for size in sizes:
cache=[]
for part in data:
for i in range(size):
cache.append(part + [i])
data = cache[:]
assert len(data) == total
return data
| mikekestemont/ruzicka | code/ruzicka/combinations.py | Python | mit | 4,154 |
import _plotly_utils.basevalidators
class HoverinfosrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="hoverinfosrc", parent_name="funnel", **kwargs):
super(HoverinfosrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/funnel/_hoverinfosrc.py | Python | mit | 452 |
# This file is part of pybliographer
#
# Copyright (C) 1998-2004 Frederic GOBRY
# Email : gobry@pybliographer.org
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
""" This module is a formatter for HTML output """
from Pyblio import Formatter, Autoload
import string
class HTML (Formatter.Formatter):
coding = 'HTML'
def write (self, text, style = None):
if style == 'bold':
self.out.write ('<b>%s</b>' % text)
elif style == 'italic' or style == 'slanted':
self.out.write ('<i>%s</i>' % text)
elif style == 'emph':
self.out.write ('<em>%s</em>' % text)
else:
self.out.write (text)
def start_group (self, id, table = None):
self.out.write ('<dl>\n')
return
def end_group (self):
self.out.write ('</dl>\n')
self.out.write ('<p align="right"><small>')
self.out.write ('Generated by Pybliographer')
self.out.write ('</small></p>\n')
return
def start (self, key, entry):
if key is None: key = self.next_key ()
self.out.write ('<dt>[%s]<dd>' % key)
return
def separator (self):
self.out.write (" ")
return
Autoload.register ('output', 'HTML', HTML)
| matthew-brett/pyblio | Pyblio/Output/html.py | Python | gpl-2.0 | 1,954 |
# -*- coding: utf-8 -*-
# Copyright 2018 OpenSynergy Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, models
from openerp.addons.hr_attendance.hr_attendance import hr_attendance
@api.multi
def _altern_si_so(self):
return True
class HrAttendanceMonkeypatch(models.TransientModel):
_name = "hr.attendance_monkeypatch"
def _register_hook(self, cr):
hr_attendance._altern_si_so = _altern_si_so
_super = super(HrAttendanceMonkeypatch, self)
return _super._register_hook(cr)
| open-synergy/opnsynid-hr | hr_attendance_monkeypatch/models/hr_attendance_monkeypatch.py | Python | agpl-3.0 | 561 |
from __future__ import absolute_import
import re
from copy import copy
import numpy as nm
from sfepy.base.base import assert_, Struct
import six
from six.moves import range
_depends = re.compile('r\.([a-zA-Z_\-0-9.]+)').findall
def get_parents(selector):
"""
Given a region selector, return names of regions it is based on.
"""
parents = _depends(selector)
return parents
def get_dependency_graph(region_defs):
"""
Return a dependency graph and a name-sort name mapping for given
region definitions.
"""
graph = {}
name_to_sort_name = {}
for sort_name, rdef in six.iteritems(region_defs):
name, sel = rdef.name, rdef.select
if name in name_to_sort_name:
msg = 'region %s/%s already defined!' % (sort_name, name)
raise ValueError(msg)
name_to_sort_name[name] = sort_name
if name not in graph:
graph[name] = [0]
for parent in get_parents(sel):
graph[name].append(parent)
if rdef.get('parent', None) is not None:
graph[name].append(rdef.parent)
return graph, name_to_sort_name
def sort_by_dependency(graph):
out = []
n_nod = len(graph)
idone = 0
idone0 = -1
while idone < n_nod:
dep_removed = 0
for node, deps in six.iteritems(graph):
if (len(deps) == 1) and not deps[0]:
out.append(node)
deps[0] = 1
idone += 1
elif not deps[0]:
for ii, dep in enumerate(deps[1:]):
if not dep in graph:
msg = 'dependency %s of region %s does not exist!'
raise ValueError(msg % (dep, node))
if graph[dep][0]:
ir = deps.index(dep)
deps.pop(ir)
dep_removed += 1
if (idone <= idone0) and not dep_removed:
raise ValueError('circular dependency')
idone0 = idone
return out
def are_disjoint(r1, r2):
"""
Check if the regions `r1` and `r2` are disjoint.
Uses vertices for the check - `*_only` regions not allowed.
"""
return len(nm.intersect1d(r1.vertices, r2.vertices,
assume_unique=True)) == 0
def _join(def1, op, def2):
return '(' + def1 + ' ' + op + ' ' + def2 + ')'
class Region(Struct):
"""
Region defines a subset of a FE domain.
Region kinds:
- cell_only, facet_only, face_only, edge_only, vertex_only - only the
specified entities are included, others are empty sets (so that the
operators are still defined)
- cell, facet, face, edge, vertex - entities of higher dimension are not
included
The 'cell' kind is the most general and it is the default.
Region set-like operators: + (union), - (difference), * (intersection),
followed by one of ('v', 'e', 'f', 'c', and 's') for vertices, edges,
faces, cells, and facets.
Created: 31.10.2005
"""
__can = {
'cell' : (1, 1, 1, 1),
'face' : (1, 1, 1, 0),
'edge' : (1, 1, 0, 0),
'vertex' : (1, 0, 0, 0),
'cell_only' : (0, 0, 0, 1),
'face_only' : (0, 0, 1, 0),
'edge_only' : (0, 1, 0, 0),
'vertex_only' : (1, 0, 0, 0),
}
__facet_kinds = {
1 : {'facet' : 'vertex', 'facet_only' : 'vertex_only'},
2 : {'facet' : 'edge', 'facet_only' : 'edge_only'},
3 : {'facet' : 'face', 'facet_only' : 'face_only'},
}
__op_to_fun = {
'+' : nm.union1d,
'-' : nm.setdiff1d,
'*' : nm.intersect1d,
}
@staticmethod
def from_vertices(vertices, domain, name='region', kind='cell'):
"""
Create a new region containing given vertices.
Parameters
----------
vertices : array
The array of vertices.
domain : Domain instance
The domain containing the vertices.
name : str, optional
The name of the region.
kind : str, optional
The kind of the region.
Returns
-------
obj : Region instance
The new region.
"""
obj = Region(name, 'given vertices', domain, '', kind=kind)
obj.vertices = vertices
return obj
@staticmethod
def from_facets(facets, domain, name='region', kind='facet', parent=None):
"""
Create a new region containing given facets.
Parameters
----------
facets : array
The array with indices to unique facets.
domain : Domain instance
The domain containing the facets.
name : str, optional
The name of the region.
kind : str, optional
The kind of the region.
parent : str, optional
The name of the parent region.
Returns
-------
obj : Region instance
The new region.
"""
obj = Region(name, 'given faces', domain, '', kind=kind, parent=parent)
obj.facets = facets
return obj
@staticmethod
def from_cells(cells, domain, name='region', kind='cell', parent=None):
"""
Create a new region containing given cells.
Parameters
----------
cells : array
The array of cells.
domain : Domain instance
The domain containing the facets.
name : str, optional
The name of the region.
kind : str, optional
The kind of the region.
parent : str, optional
The name of the parent region.
Returns
-------
obj : Region instance
The new region.
"""
obj = Region(name, 'given cells', domain, '', kind=kind, parent=parent)
obj.cells = cells
return obj
def __init__(self, name, definition, domain, parse_def, kind='cell',
parent=None):
"""
Create region instance.
Parameters
----------
name : str
The region name, either given, or automatic for intermediate
regions.
definition : str
The region selector definition.
domain : Domain instance
The domain of the region.
parse_def : str
The parsed definition of the region.
kind : str
The region kind - one of 'cell', 'facet', 'face', 'edge', 'vertex',
'cell_only', ..., 'vertex_only'.
parent : str, optional
The name of the parent region.
"""
tdim = domain.shape.tdim
Struct.__init__(self,
name=name, definition=definition,
domain=domain, parse_def=parse_def,
n_v_max=domain.shape.n_nod, dim=domain.shape.dim,
tdim=tdim, kind_tdim=None,
entities=[None] * (tdim + 1),
kind=None, parent=parent, shape=None,
mirror_region=None, is_empty=False)
self.set_kind(kind)
def set_kind(self, kind):
if kind == self.kind: return
self.kind = kind
if 'facet' in kind:
self.true_kind = self.__facet_kinds[self.tdim][kind]
else:
self.true_kind = kind
can = [bool(ii) for ii in self.__can[self.true_kind]]
self.can_vertices = can[0]
if self.tdim == 1:
self.can = (can[0], can[3])
self.can_cells = can[1]
elif self.tdim == 2:
self.can = (can[0], can[1], can[3])
self.can_edges = can[1]
self.can_cells = can[2]
else:
self.can = can
self.can_edges = can[1]
self.can_faces = can[2]
self.can_cells = can[3]
for ii, ican in enumerate(self.can):
if not ican:
self.entities[ii] = nm.empty(0, dtype=nm.uint32)
self.set_kind_tdim()
def set_kind_tdim(self):
if 'vertex' in self.true_kind:
self.kind_tdim = 0
elif 'edge' in self.true_kind:
self.kind_tdim = 1
elif 'face' in self.true_kind:
self.kind_tdim = 2
elif 'cell' in self.true_kind:
self.kind_tdim = self.tdim
@property
def vertices(self):
if self.entities[0] is None:
self._access(1)
self.setup_from_highest(0)
return self.entities[0]
@vertices.setter
def vertices(self, vals):
if self.can_vertices:
self.entities[0] = nm.asarray(vals, dtype=nm.uint32)
else:
raise ValueError('region "%s" cannot have vertices!' % self.name)
@property
def edges(self):
if self.tdim <= 1:
raise AttributeError('1D region has no edges!')
if self.entities[1] is None:
if 'edge' in self.true_kind:
self.setup_from_vertices(1)
else:
self._access(2)
self.setup_from_highest(1)
return self.entities[1]
@edges.setter
def edges(self, vals):
if self.can_edges:
self.entities[1] = nm.asarray(vals, dtype=nm.uint32)
else:
raise ValueError('region "%s" cannot have edges!' % self.name)
@property
def faces(self):
if self.tdim <= 2:
raise AttributeError('1D or 2D region has no faces!')
if self.entities[2] is None:
if 'face' in self.true_kind:
self.setup_from_vertices(2)
else:
self._access(3)
self.setup_from_highest(2)
return self.entities[2]
@faces.setter
def faces(self, vals):
if self.can_faces:
self.entities[2] = nm.asarray(vals, dtype=nm.uint32)
else:
raise ValueError('region "%s" cannot have faces!' % self.name)
@property
def facets(self):
if self.tdim == 3:
return self.faces
elif self.tdim == 2:
return self.edges
else:
return self.vertices
@facets.setter
def facets(self, vals):
if self.tdim == 3:
self.faces = vals
elif self.tdim == 2:
self.edges = vals
else:
self.vertices = vals
@property
def cells(self):
if self.entities[self.tdim] is None:
self.setup_from_vertices(self.tdim)
return self.entities[self.tdim]
@cells.setter
def cells(self, vals):
if self.can_cells:
self.entities[self.tdim] = nm.asarray(vals, dtype=nm.uint32)
else:
raise ValueError('region "%s" cannot have cells!' % self.name)
def _access(self, dim):
"""
Helper to access region entities of dimension `dim`.
"""
if dim == 0:
self.vertices
elif dim == 1:
if self.tdim == 1:
self.cells
else:
self.edges
elif dim == 2:
if self.tdim == 3:
self.faces
else:
self.cells
else:
self.cells
def setup_from_highest(self, dim, allow_lower=True, allow_empty=False):
"""
Setup entities of topological dimension `dim` using the available
entities of the highest topological dimension.
"""
if not self.can[dim]: return
for idim in range(self.tdim, -1, -1):
if self.entities[idim] is not None:
if self.entities[idim].shape[0] > 0:
break
else:
if not (allow_empty or self.is_empty):
msg = 'region "%s" has no entities!'
raise ValueError(msg % self.name)
if self.entities[dim] is None:
self.entities[dim] = nm.empty(0, dtype=nm.uint32)
self.is_empty = True
return
cmesh = self.domain.cmesh
if idim <= dim:
if not (allow_lower or allow_empty):
msg = 'setup_from_highest() can be used only with dim < %d'
raise ValueError(msg % idim)
if allow_lower:
cmesh.setup_connectivity(dim, idim)
ents = self.get_entities(idim)
self.entities[dim] = cmesh.get_complete(dim, ents, idim)
else:
for idim in range(self.kind_tdim - 1, -1, -1):
self.entities[idim] = nm.empty(0, dtype=nm.uint32)
self.is_empty = True
else:
cmesh.setup_connectivity(idim, dim)
incident = cmesh.get_incident(dim, self.entities[idim], idim)
self.entities[dim] = nm.unique(incident)
def setup_from_vertices(self, dim):
"""
Setup entities of topological dimension `dim` using the region
vertices.
"""
if not self.can[dim]: return
cmesh = self.domain.cmesh
cmesh.setup_connectivity(dim, 0)
vv = self.vertices
self.entities[dim] = cmesh.get_complete(dim, vv, 0)
def finalize(self, allow_empty=False):
"""
Initialize the entities corresponding to the region kind and regenerate
all already existing (accessed) entities of lower topological dimension
from the kind entities.
"""
self._access(self.kind_tdim)
if allow_empty and ('cell' in self.true_kind):
self.is_empty = self.cells.shape[0] == 0
for idim in range(self.kind_tdim - 1, -1, -1):
if self.can[idim] and self.entities[idim] is not None:
try:
self.setup_from_highest(idim, allow_lower=False,
allow_empty=allow_empty)
except ValueError as exc:
msg = '\n'.join((str(exc),
'fix region kind? (region: %s, kind: %s)'
% (self.name, self.kind)))
raise ValueError(msg)
def eval_op_vertices(self, other, op):
parse_def = _join(self.parse_def, '%sv' % op, other.parse_def)
tmp = self.light_copy('op', parse_def)
tmp.vertices = self.__op_to_fun[op](self.vertices, other.vertices)
return tmp
def eval_op_edges(self, other, op):
parse_def = _join(self.parse_def, '%se' % op, other.parse_def)
tmp = self.light_copy('op', parse_def)
tmp.edges = self.__op_to_fun[op](self.edges, other.edges)
return tmp
def eval_op_faces(self, other, op):
parse_def = _join(self.parse_def, '%sf' % op, other.parse_def)
tmp = self.light_copy('op', parse_def)
tmp.faces = self.__op_to_fun[op](self.faces, other.faces)
return tmp
def eval_op_facets(self, other, op):
parse_def = _join(self.parse_def, '%ss' % op, other.parse_def)
tmp = self.light_copy('op', parse_def)
tmp.facets = self.__op_to_fun[op](self.facets, other.facets)
return tmp
def eval_op_cells(self, other, op):
parse_def = _join(self.parse_def, '%sc' % op, other.parse_def)
tmp = self.light_copy('op', parse_def)
tmp.cells = self.__op_to_fun[op](self.cells, other.cells)
return tmp
def light_copy(self, name, parse_def):
return Region(name, self.definition, self.domain, parse_def,
kind=self.kind)
def copy(self):
"""
Vertices-based copy.
"""
tmp = self.light_copy('copy', self.parse_def)
tmp.vertices = copy(self.vertices)
return tmp
def delete_zero_faces(self, eps=1e-14):
raise NotImplementedError
def update_shape(self):
"""
Update shape of each group according to region vertices, edges,
faces and cells.
"""
n_vertex = self.vertices.shape[0]
n_cell = self.cells.shape[0]
n_edge = self.edges.shape[0] if self.tdim > 1 else 0
n_face = self.faces.shape[0] if self.tdim == 3 else 0
n_facet = self.facets.shape[0] if self.tdim > 1 else 0
self.shape = Struct(n_vertex=n_vertex,
n_edge=n_edge,
n_face=n_face,
n_facet=n_facet,
n_cell=n_cell)
def get_entities(self, dim):
"""
Return mesh entities of dimension `dim`.
"""
if dim <= self.tdim:
self._access(dim)
out = self.entities[dim]
else:
out = nm.empty(0, dtype=nm.uint32)
return out
def get_cells(self, true_cells_only=True):
"""
Get cells of the region.
Raises ValueError if `true_cells_only` is True and the region kind does
not allow cells. For `true_cells_only` equal to False, cells incident
to facets are returned if the region itself contains no cells. Obeys
parent region, if given.
"""
if self.cells.shape[0] == 0:
if true_cells_only:
msg = 'region %s has not true cells! (has kind: %s)' \
% (self.name, self.kind)
raise ValueError(msg)
else:
# Has to be consistent with get_facet_indices()!
cmesh = self.domain.cmesh
cmesh.setup_connectivity(self.tdim - 1, self.tdim)
out = cmesh.get_incident(self.tdim, self.facets, self.tdim - 1)
if self.parent is not None:
pcells = self.domain.regions[self.parent].cells
ip = nm.in1d(out, pcells, assume_unique=False)
out = out[ip]
else:
out = self.cells
return out
def get_cell_indices(self, cells, true_cells_only=True):
"""
Return indices of `cells` in the region cells.
Raises ValueError if `true_cells_only` is True and the region kind does
not allow cells. For `true_cells_only` equal to False, cells incident
to facets are returned if the region itself contains no cells.
Notes
-----
If the number of unique values in `cells` is smaller or equal to the
number of cells in the region, all `cells` has to be also the region
cells (`self` is a superset of `cells`). The region cells are
considered depending on `true_cells_only`.
Otherwise, indices of all cells in `self` that are in `cells` are
returned.
"""
fcells = self.get_cells(true_cells_only=true_cells_only)
if len(nm.unique(cells)) <= len(nm.unique(fcells)):
# self is a superset of cells.
ii = nm.searchsorted(fcells, cells)
assert_((fcells[ii] == cells).all())
else:
aux = nm.searchsorted(cells, fcells)
ii = nm.where(nm.take(cells, aux, mode='clip') == fcells)[0]
return ii
def get_facet_indices(self):
"""
Return an array (per group) of (iel, ifa) for each facet. A facet can
be in 1 (surface) or 2 (inner) cells.
"""
cmesh = self.domain.cmesh
cmesh.setup_connectivity(self.tdim - 1, self.tdim)
facets = self.facets
cells, offs = cmesh.get_incident(self.tdim, facets, self.tdim - 1,
ret_offsets=True)
if self.parent is not None:
pcells = self.domain.regions[self.parent].cells
ip = nm.in1d(cells, pcells, assume_unique=False)
cells = cells[ip]
counts = nm.diff(offs).astype(nm.int32)
pos = nm.repeat(nm.arange(facets.shape[0], dtype=nm.int32), counts)
new_counts = nm.bincount(pos, weights=ip).astype(nm.uint32)
offs = nm.cumsum(nm.r_[0, new_counts], dtype=nm.uint32)
ii = cmesh.get_local_ids(facets, self.tdim - 1, cells, offs, self.tdim)
fis = nm.c_[cells, ii]
return fis
def setup_mirror_region(self):
"""
Find the corresponding mirror region, set up element mapping.
"""
regions = self.domain.regions
for reg in regions:
mirror_parent = regions.find(reg.parent)
if mirror_parent is None: continue
if ((reg is not self)
and nm.all(self.vertices == reg.vertices)):
mirror_region = reg
break
else:
raise ValueError('cannot find mirror region! (%s)' % self.name)
self.mirror_region = mirror_region
def get_mirror_region(self):
return self.mirror_region
def get_n_cells(self, is_surface=False):
"""
Get number of region cells.
Parameters
----------
is_surface : bool
If True, number of edges or faces according to domain
dimension is returned instead.
Returns
-------
n_cells : int
The number of cells.
"""
if is_surface:
return self.shape.n_facet
else:
return self.shape.n_cell
def has_cells(self):
return self.cells.size > 0
def contains(self, other):
"""
Return True in the region contains the `other` region.
The check is performed using entities corresponding to the other region
kind.
"""
tdim = other.kind_tdim
se = self.get_entities(tdim)
oe = other.entities[tdim]
return len(nm.intersect1d(se, oe))
def get_charfun(self, by_cell=False, val_by_id=False):
"""
Return the characteristic function of the region as a vector of values
defined either in the mesh vertices (by_cell == False) or cells. The
values are either 1 (val_by_id == False) or sequential id + 1.
"""
if by_cell:
chf = nm.zeros((self.domain.shape.n_el,), dtype=nm.float64)
if val_by_id:
chf[self.cells] = self.cells + 1
else:
chf[self.cells] = 1.0
else:
chf = nm.zeros((self.domain.shape.n_nod,), dtype=nm.float64)
if val_by_id:
chf[self.vertices] = self.vertices + 1
else:
chf[self.vertices] = 1.0
return chf
def get_edge_graph(self):
"""
Return the graph of region edges as a sparse matrix having uid(k) + 1
at (i, j) if vertex[i] is connected with vertex[j] by the edge k.
Degenerate edges are ignored.
"""
from scipy.sparse import csr_matrix
cmesh = self.domain.cmesh
e_verts = cmesh.get_incident(0, self.edges, 1)
e_verts.shape = (e_verts.shape[0] // 2, 2)
ii = nm.where(e_verts[:, 0] != e_verts[:, 1])[0]
edges = self.edges[ii]
e_verts = e_verts[ii]
vals = edges + 1
rows = e_verts[:, 0]
cols = e_verts[:, 1]
num = self.vertices.max() + 1
graph = csr_matrix((vals, (rows, cols)), shape=(num, num))
nnz = graph.nnz
# Symmetrize.
graph = graph + graph.T
assert_(graph.nnz == 2 * nnz)
return graph
| lokik/sfepy | sfepy/discrete/common/region.py | Python | bsd-3-clause | 23,393 |
import os
from collections import namedtuple
Request = namedtuple('Request', 'id proxy_id function args kwargs')
Response = namedtuple('Response', 'id exception return_value')
ProxyHandle = namedtuple('ProxyHandle', 'id obj_type exposed')
DispatcherState = namedtuple('DispatcherState', 'state')
def generate_id(obj=None):
if obj and hasattr(obj, 'id'):
return obj.id
return os.urandom(32) # 256bit random numbers
| ekarulf/pymp | src/pymp/messages.py | Python | mit | 435 |
"""replace"""
from runtime import *
def main():
a = 'abc'
b = a.replace('a', 'A')
assert( b == 'Abc')
a = 'aaa'
b = a.replace('a', 'A')
assert( b == 'AAA')
main() | pombredanne/Rusthon | regtests/str/replace.py | Python | bsd-3-clause | 171 |
"""Helper methods to handle the time in Home Assistant."""
import datetime as dt
import re
from typing import Any, Union, Optional, Tuple, List, cast, Dict
import pytz
import pytz.exceptions as pytzexceptions
import pytz.tzinfo as pytzinfo
from homeassistant.const import MATCH_ALL
DATE_STR_FORMAT = "%Y-%m-%d"
UTC = pytz.utc
DEFAULT_TIME_ZONE: dt.tzinfo = pytz.utc
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# https://github.com/django/django/blob/master/LICENSE
DATETIME_RE = re.compile(
r"(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
r"[T ](?P<hour>\d{1,2}):(?P<minute>\d{1,2})"
r"(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?"
r"(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$"
)
def set_default_time_zone(time_zone: dt.tzinfo) -> None:
"""Set a default time zone to be used when none is specified.
Async friendly.
"""
global DEFAULT_TIME_ZONE
# NOTE: Remove in the future in favour of typing
assert isinstance(time_zone, dt.tzinfo)
DEFAULT_TIME_ZONE = time_zone
def get_time_zone(time_zone_str: str) -> Optional[dt.tzinfo]:
"""Get time zone from string. Return None if unable to determine.
Async friendly.
"""
try:
return pytz.timezone(time_zone_str)
except pytzexceptions.UnknownTimeZoneError:
return None
def utcnow() -> dt.datetime:
"""Get now in UTC time."""
return dt.datetime.now(UTC)
def now(time_zone: Optional[dt.tzinfo] = None) -> dt.datetime:
"""Get now in specified time zone."""
return dt.datetime.now(time_zone or DEFAULT_TIME_ZONE)
def as_utc(dattim: dt.datetime) -> dt.datetime:
"""Return a datetime as UTC time.
Assumes datetime without tzinfo to be in the DEFAULT_TIME_ZONE.
"""
if dattim.tzinfo == UTC:
return dattim
if dattim.tzinfo is None:
dattim = DEFAULT_TIME_ZONE.localize(dattim) # type: ignore
return dattim.astimezone(UTC)
def as_timestamp(dt_value: dt.datetime) -> float:
"""Convert a date/time into a unix time (seconds since 1970)."""
if hasattr(dt_value, "timestamp"):
parsed_dt: Optional[dt.datetime] = dt_value
else:
parsed_dt = parse_datetime(str(dt_value))
if parsed_dt is None:
raise ValueError("not a valid date/time.")
return parsed_dt.timestamp()
def as_local(dattim: dt.datetime) -> dt.datetime:
"""Convert a UTC datetime object to local time zone."""
if dattim.tzinfo == DEFAULT_TIME_ZONE:
return dattim
if dattim.tzinfo is None:
dattim = UTC.localize(dattim)
return dattim.astimezone(DEFAULT_TIME_ZONE)
def utc_from_timestamp(timestamp: float) -> dt.datetime:
"""Return a UTC time from a timestamp."""
return UTC.localize(dt.datetime.utcfromtimestamp(timestamp))
def start_of_local_day(
dt_or_d: Union[dt.date, dt.datetime, None] = None
) -> dt.datetime:
"""Return local datetime object of start of day from date or datetime."""
if dt_or_d is None:
date: dt.date = now().date()
elif isinstance(dt_or_d, dt.datetime):
date = dt_or_d.date()
return DEFAULT_TIME_ZONE.localize( # type: ignore
dt.datetime.combine(date, dt.time())
)
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
# https://github.com/django/django/blob/master/LICENSE
def parse_datetime(dt_str: str) -> Optional[dt.datetime]:
"""Parse a string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raises ValueError if the input is well formatted but not a valid datetime.
Returns None if the input isn't well formatted.
"""
match = DATETIME_RE.match(dt_str)
if not match:
return None
kws: Dict[str, Any] = match.groupdict()
if kws["microsecond"]:
kws["microsecond"] = kws["microsecond"].ljust(6, "0")
tzinfo_str = kws.pop("tzinfo")
tzinfo: Optional[dt.tzinfo] = None
if tzinfo_str == "Z":
tzinfo = UTC
elif tzinfo_str is not None:
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
offset_hours = int(tzinfo_str[1:3])
offset = dt.timedelta(hours=offset_hours, minutes=offset_mins)
if tzinfo_str[0] == "-":
offset = -offset
tzinfo = dt.timezone(offset)
kws = {k: int(v) for k, v in kws.items() if v is not None}
kws["tzinfo"] = tzinfo
return dt.datetime(**kws)
def parse_date(dt_str: str) -> Optional[dt.date]:
"""Convert a date string to a date object."""
try:
return dt.datetime.strptime(dt_str, DATE_STR_FORMAT).date()
except ValueError: # If dt_str did not match our format
return None
def parse_time(time_str: str) -> Optional[dt.time]:
"""Parse a time string (00:20:00) into Time object.
Return None if invalid.
"""
parts = str(time_str).split(":")
if len(parts) < 2:
return None
try:
hour = int(parts[0])
minute = int(parts[1])
second = int(parts[2]) if len(parts) > 2 else 0
return dt.time(hour, minute, second)
except ValueError:
# ValueError if value cannot be converted to an int or not in range
return None
# Found in this gist: https://gist.github.com/zhangsen/1199964
def get_age(date: dt.datetime) -> str:
"""
Take a datetime and return its "age" as a string.
The age can be in second, minute, hour, day, month or year. Only the
biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will
be returned.
Make sure date is not in the future, or else it won't work.
"""
def formatn(number: int, unit: str) -> str:
"""Add "unit" if it's plural."""
if number == 1:
return f"1 {unit}"
return f"{number:d} {unit}s"
def q_n_r(first: int, second: int) -> Tuple[int, int]:
"""Return quotient and remaining."""
return first // second, first % second
delta = now() - date
day = delta.days
second = delta.seconds
year, day = q_n_r(day, 365)
if year > 0:
return formatn(year, "year")
month, day = q_n_r(day, 30)
if month > 0:
return formatn(month, "month")
if day > 0:
return formatn(day, "day")
hour, second = q_n_r(second, 3600)
if hour > 0:
return formatn(hour, "hour")
minute, second = q_n_r(second, 60)
if minute > 0:
return formatn(minute, "minute")
return formatn(second, "second")
def parse_time_expression(parameter: Any, min_value: int, max_value: int) -> List[int]:
"""Parse the time expression part and return a list of times to match."""
if parameter is None or parameter == MATCH_ALL:
res = [x for x in range(min_value, max_value + 1)]
elif isinstance(parameter, str) and parameter.startswith("/"):
parameter = int(parameter[1:])
res = [x for x in range(min_value, max_value + 1) if x % parameter == 0]
elif not hasattr(parameter, "__iter__"):
res = [int(parameter)]
else:
res = list(sorted(int(x) for x in parameter))
for val in res:
if val < min_value or val > max_value:
raise ValueError(
"Time expression '{}': parameter {} out of range ({} to {})"
"".format(parameter, val, min_value, max_value)
)
return res
# pylint: disable=redefined-outer-name
def find_next_time_expression_time(
now: dt.datetime, seconds: List[int], minutes: List[int], hours: List[int]
) -> dt.datetime:
"""Find the next datetime from now for which the time expression matches.
The algorithm looks at each time unit separately and tries to find the
next one that matches for each. If any of them would roll over, all
time units below that are reset to the first matching value.
Timezones are also handled (the tzinfo of the now object is used),
including daylight saving time.
"""
if not seconds or not minutes or not hours:
raise ValueError("Cannot find a next time: Time expression never " "matches!")
def _lower_bound(arr: List[int], cmp: int) -> Optional[int]:
"""Return the first value in arr greater or equal to cmp.
Return None if no such value exists.
"""
left = 0
right = len(arr)
while left < right:
mid = (left + right) // 2
if arr[mid] < cmp:
left = mid + 1
else:
right = mid
if left == len(arr):
return None
return arr[left]
result = now.replace(microsecond=0)
# Match next second
next_second = _lower_bound(seconds, result.second)
if next_second is None:
# No second to match in this minute. Roll-over to next minute.
next_second = seconds[0]
result += dt.timedelta(minutes=1)
result = result.replace(second=next_second)
# Match next minute
next_minute = _lower_bound(minutes, result.minute)
if next_minute != result.minute:
# We're in the next minute. Seconds needs to be reset.
result = result.replace(second=seconds[0])
if next_minute is None:
# No minute to match in this hour. Roll-over to next hour.
next_minute = minutes[0]
result += dt.timedelta(hours=1)
result = result.replace(minute=next_minute)
# Match next hour
next_hour = _lower_bound(hours, result.hour)
if next_hour != result.hour:
# We're in the next hour. Seconds+minutes needs to be reset.
result = result.replace(second=seconds[0], minute=minutes[0])
if next_hour is None:
# No minute to match in this day. Roll-over to next day.
next_hour = hours[0]
result += dt.timedelta(days=1)
result = result.replace(hour=next_hour)
if result.tzinfo is None:
return result
# Now we need to handle timezones. We will make this datetime object
# "naive" first and then re-convert it to the target timezone.
# This is so that we can call pytz's localize and handle DST changes.
tzinfo: pytzinfo.DstTzInfo = result.tzinfo
result = result.replace(tzinfo=None)
try:
result = tzinfo.localize(result, is_dst=None)
except pytzexceptions.AmbiguousTimeError:
# This happens when we're leaving daylight saving time and local
# clocks are rolled back. In this case, we want to trigger
# on both the DST and non-DST time. So when "now" is in the DST
# use the DST-on time, and if not, use the DST-off time.
use_dst = bool(now.dst())
result = tzinfo.localize(result, is_dst=use_dst)
except pytzexceptions.NonExistentTimeError:
# This happens when we're entering daylight saving time and local
# clocks are rolled forward, thus there are local times that do
# not exist. In this case, we want to trigger on the next time
# that *does* exist.
# In the worst case, this will run through all the seconds in the
# time shift, but that's max 3600 operations for once per year
result = result.replace(tzinfo=tzinfo) + dt.timedelta(seconds=1)
return find_next_time_expression_time(result, seconds, minutes, hours)
result_dst = cast(dt.timedelta, result.dst())
now_dst = cast(dt.timedelta, now.dst())
if result_dst >= now_dst:
return result
# Another edge-case when leaving DST:
# When now is in DST and ambiguous *and* the next trigger time we *should*
# trigger is ambiguous and outside DST, the excepts above won't catch it.
# For example: if triggering on 2:30 and now is 28.10.2018 2:30 (in DST)
# we should trigger next on 28.10.2018 2:30 (out of DST), but our
# algorithm above would produce 29.10.2018 2:30 (out of DST)
# Step 1: Check if now is ambiguous
try:
tzinfo.localize(now.replace(tzinfo=None), is_dst=None)
return result
except pytzexceptions.AmbiguousTimeError:
pass
# Step 2: Check if result of (now - DST) is ambiguous.
check = now - now_dst
check_result = find_next_time_expression_time(check, seconds, minutes, hours)
try:
tzinfo.localize(check_result.replace(tzinfo=None), is_dst=None)
return result
except pytzexceptions.AmbiguousTimeError:
pass
# OK, edge case does apply. We must override the DST to DST-off
check_result = tzinfo.localize(check_result.replace(tzinfo=None), is_dst=False)
return check_result
| Cinntax/home-assistant | homeassistant/util/dt.py | Python | apache-2.0 | 12,610 |
# -*- coding: utf-8 -*-
import sys
import os
import json
from flask import Flask
from capybara import Capybara
app = Flask(__name__)
@app.route("/ping", methods=['GET'])
def ping():
return "pong"
@app.route("/info/<service>/", methods=['GET'])
def info(service):
available = capy.isAvailable(service)
if available:
msg = ""
msg += "Service %s is available!\n" % service
msg += "Information:\n"
msg += "\tconfig=%s, \n" % json.dumps(capy.wrappers[service].config)
msg += "\ttokens=%s, \n" % len(capy.wrappers[service].tokens)
msg += "\taccess_count=%s, \n" % capy.wrappers[service].access_count
return msg
else:
return "Service %s is unavailable!" % service
@app.route("/get/<service>/<item>/", methods=['GET'])
def get(service, item):
res = capy.get(service=service.strip(), item=item.strip())
if res:
return str(res)
else:
return "Unable to get item %s at %s. \nSee the log for detail." % (item, service)
# @app.route("/get/<service>/<item>/json", methods=['GET'])
# def get_json(service, item):
# res = capy.get(service.strip(), item.strip())
# if res:
# return str(res)
# else:
# return "Unable to get item %s at %s. \nSee the log for detail." % (item, service)
@app.route("/get/<service>/<item>/<attr>/", methods=['GET'])
def get_title(service, item, attr):
res = capy.get(service=service.strip(), item=item.strip())
if res:
return res[attr]
else:
return "Unable to get item %s at %s. \nSee the log for detail." % (item, service)
if __name__=="__main__":
args = sys.argv
argc = len(args)
if argc == 1:
config = os.path.join(os.getcwd(), "./config")
tokens = os.path.join(os.getcwd(), "./tokens")
elif argc == 3:
config = os.path.join(os.getcwd(), args[1])
tokens = os.path.join(os.getcwd(), args[2])
else:
print "Invalid number of arguments(%s)" % argc-1
exit()
capy = Capybara(config_dir=config, tokens_dir=tokens)
app.run()
| AkihikoITOH/capybara | capybara/api.py | Python | mit | 2,082 |
#!/usr/bin/env python
'''
Loader for all 2009 Dorado missions written for Monique's notice of bad
depths in Dorado389_2009_084_02_084_02_decim.nc.
Mike McCann
MBARI 15 January 2013
@var __date__: Date of last svn commit
@undocumented: __doc__ parser
@status: production
@license: GPL
'''
import os
import sys
import datetime
parentDir = os.path.join(os.path.dirname(__file__), "../")
sys.path.insert(0, parentDir) # So that CANON is found
from CANON import CANONLoader
import timing
cl = CANONLoader('stoqs_dorado2009', 'Dorado - All 2009 missions',
description = 'In Monterey Bay and Santa Monica Basin - includes processed Gulper Samples',
x3dTerrains = {
'https://stoqs.mbari.org/x3d/Monterey25_10x/Monterey25_10x_scene.x3d': {
'position': '-2822317.31255 -4438600.53640 3786150.85474',
'orientation': '0.89575 -0.31076 -0.31791 1.63772',
'centerOfRotation': '-2711557.9403829873 -4331414.329506527 3801353.4691465236',
'VerticalExaggeration': '10',
'speed': '.1',
}
},
grdTerrain = os.path.join(parentDir, 'Monterey25.grd')
)
# Dorado surveys in 2009
cl.dorado_base = 'http://dods.mbari.org/opendap/data/auvctd/surveys/2009/netcdf/'
cl.dorado_files = [
'Dorado389_2009_055_05_055_05_decim.nc',
'Dorado389_2009_084_00_084_00_decim.nc',
'Dorado389_2009_084_02_084_02_decim.nc',
'Dorado389_2009_085_02_085_02_decim.nc',
'Dorado389_2009_111_00_111_00_decim.nc',
'Dorado389_2009_111_01_111_01_decim.nc',
'Dorado389_2009_112_07_112_07_decim.nc',
'Dorado389_2009_113_00_113_00_decim.nc',
'Dorado389_2009_124_03_124_03_decim.nc',
'Dorado389_2009_125_00_125_00_decim.nc',
'Dorado389_2009_126_00_126_00_decim.nc',
'Dorado389_2009_152_00_152_00_decim.nc',
'Dorado389_2009_153_01_153_01_decim.nc',
'Dorado389_2009_154_00_154_00_decim.nc',
'Dorado389_2009_155_03_155_03_decim.nc',
'Dorado389_2009_182_01_182_01_decim.nc',
'Dorado389_2009_272_00_272_00_decim.nc',
'Dorado389_2009_274_03_274_03_decim.nc',
'Dorado389_2009_278_01_278_01_decim.nc',
'Dorado389_2009_278_01_278_02_decim.nc',
'Dorado389_2009_279_00_279_00_decim.nc',
'Dorado389_2009_280_00_280_00_decim.nc',
'Dorado389_2009_281_01_281_01_decim.nc',
'Dorado389_2009_308_04_308_04_decim.nc',
'Dorado389_2009_309_00_309_03_decim.nc',
'Dorado389_2009_313_02_313_02_decim.nc',
'Dorado389_2009_342_04_342_04_decim.nc',
'Dorado389_2009_348_05_348_05_decim.nc',
]
cl.dorado_parms = [ 'temperature', 'oxygen', 'nitrate', 'bbp420', 'bbp700',
'fl700_uncorr', 'salinity', 'biolume',
'sepCountList', 'mepCountList',
'roll', 'pitch', 'yaw']
# Mooring M1ts
cl.m1ts_base = 'http://elvis.shore.mbari.org/thredds/dodsC/agg/'
cl.m1ts_files = ['OS_MBARI-M1_R_TS']
cl.m1ts_parms = [ 'PSAL', 'TEMP' ]
cl.m1ts_startDatetime = datetime.datetime(2009, 1, 1)
cl.m1ts_endDatetime = datetime.datetime(2009, 12, 31)
# Mooring M1met
cl.m1met_base = 'http://elvis.shore.mbari.org/thredds/dodsC/agg/'
cl.m1met_files = ['OS_MBARI-M1_R_M']
cl.m1met_parms = [ 'WSPD', 'WDIR', 'ATMP', 'SW', 'RELH' ]
cl.m1met_startDatetime = datetime.datetime(2009, 1, 1)
cl.m1met_endDatetime = datetime.datetime(2009, 12, 31)
# Execute the load
cl.process_command_line()
if cl.args.test:
cl.loadDorado(stride=20)
##cl.loadM1ts(stride=10)
##cl.loadM1met(stride=10)
elif cl.args.optimal_stride:
cl.loadDorado(stride=2)
cl.loadM1ts(stride=1)
cl.loadM1met(stride=1)
else:
cl.loadDorado(stride=cl.args.stride)
##cl.loadM1ts(stride=cl.args.stride)
##cl.loadM1met(stride=cl.args.stride)
# Add any X3D Terrain information specified in the constructor to the database - must be done after a load is executed
cl.addTerrainResources()
print("All Done.")
| duane-edgington/stoqs | stoqs/loaders/MolecularEcology/load_dorado2009.py | Python | gpl-3.0 | 4,550 |
#
# Copyright (c) 2014 Juniper Networks, Inc. All rights reserved.
#
from neutron.api.v2 import attributes
from neutron.common import exceptions as n_exc
from neutron.extensions import loadbalancer
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from vnc_api.vnc_api import IdPermsType, NoIdError
from vnc_api.vnc_api import InstanceIp, VirtualMachineInterface
from vnc_api.vnc_api import SecurityGroup
from vnc_api.vnc_api import VirtualIp, VirtualIpType
from resource_manager import ResourceManager
import utils
LOG = logging.getLogger(__name__)
class VirtualIpManager(ResourceManager):
_virtual_ip_type_mapping = {
'address': 'address',
'protocol': 'protocol',
'protocol_port': 'protocol_port',
'connection_limit': 'connection_limit',
'subnet_id': 'subnet_id',
'admin_state': 'admin_state_up',
}
@property
def property_type_mapping(self):
return self._virtual_ip_type_mapping
def make_properties(self, vip):
props = VirtualIpType()
for key, mapping in self._virtual_ip_type_mapping.iteritems():
if mapping in vip:
setattr(props, key, vip[mapping])
sp = vip['session_persistence']
if sp is not None:
props.persistence_type = sp['type']
if 'cookie_name' in sp:
props.persistence_cookie_name = sp['cookie_name']
return props
def _get_vip_pool_id(self, vip):
pool_refs = vip.get_loadbalancer_pool_refs()
if pool_refs is None:
return None
return pool_refs[0]['uuid']
def _get_interface_params(self, vip, props):
vmi_list = vip.get_virtual_machine_interface_refs()
if vmi_list is None:
return None
port_id = vmi_list[0]['uuid']
if not props.address or props.address == attributes.ATTR_NOT_SPECIFIED:
try:
vmi = self._api.virtual_machine_interface_read(id=port_id)
except NoIdError as ex:
LOG.error(ex)
return None
ip_refs = vmi.get_instance_ip_back_refs()
if ip_refs:
try:
iip = self._api.instance_ip_read(ip_refs[0]['uuid'])
except NoIdError as ex:
LOG.error(ex)
return None
props.address = iip.get_instance_ip_address()
return port_id
def make_dict(self, vip, fields=None):
props = vip.get_virtual_ip_properties()
port_id = self._get_interface_params(vip, props)
res = {'id': vip.uuid,
'tenant_id': vip.parent_uuid,
'name': vip.display_name,
'description': self._get_object_description(vip),
'subnet_id': props.subnet_id,
'address': props.address,
'port_id': port_id,
'protocol_port': props.protocol_port,
'protocol': props.protocol,
'pool_id': self._get_vip_pool_id(vip),
'session_persistence': None,
'connection_limit': props.connection_limit,
'admin_state_up': props.admin_state,
'status': self._get_object_status(vip)}
if props.persistence_type:
sp = {'type': props.persistence_type}
if props.persistence_type == 'APP_COOKIE':
sp['cookie_name'] = props.persistence_cookie_name
res['session_persistence'] = sp
return self._fields(res, fields)
def resource_read(self, id):
return self._api.virtual_ip_read(id=id)
def resource_list(self, tenant_id=None):
return self._api.virtual_ips_list(parent_id=tenant_id)
def resource_update(self, obj):
return self._api.virtual_ip_update(obj)
def resource_delete(self, id):
return self._api.virtual_ip_delete(id=id)
def get_exception_notfound(self, id=None):
return loadbalancer.VipNotFound(vip_id=id)
@property
def neutron_name(self):
return "vip"
@property
def resource_name_plural(self):
return "virtual-ips"
def _create_virtual_interface(self, project, vip_id, subnet_id,
ip_address):
network_id = utils.get_subnet_network_id(self._api, subnet_id)
try:
vnet = self._api.virtual_network_read(id=network_id)
except NoIdError:
raise n_exc.NetworkNotFound(net_id=network_id)
vmi = VirtualMachineInterface(vip_id, project)
vmi.set_virtual_network(vnet)
sg_obj = SecurityGroup("default", project)
vmi.add_security_group(sg_obj)
self._api.virtual_machine_interface_create(vmi)
fq_name = list(project.get_fq_name())
fq_name.append(vip_id)
iip_obj = InstanceIp(fq_name=fq_name)
iip_obj.set_virtual_network(vnet)
iip_obj.set_virtual_machine_interface(vmi)
if ip_address and ip_address != attributes.ATTR_NOT_SPECIFIED:
iip_obj.set_instance_ip_address(ip_address)
self._api.instance_ip_create(fq_name)
return vmi
def _delete_virtual_interface(self, vmi_list):
if vmi_list is None:
return
for vmi_ref in vmi_list:
interface_id = vmi_ref['uuid']
try:
vmi = self._api.virtual_machine_interface_read(id=interface_id)
except NoIdError as ex:
LOG.error(ex)
continue
ip_refs = vmi.get_instance_ip_back_refs()
if ip_refs:
for ref in ip_refs:
self._api.instance_ip_delete(id=ref['uuid'])
self._api.virtual_machine_interface_delete(id=interface_id)
def create(self, context, vip):
"""
Create a VIP.
"""
v = vip['vip']
tenant_id = self._get_tenant_id_for_create(context, v)
project = self._project_read(project_id=tenant_id)
if v['pool_id']:
try:
pool = self._api.loadbalancer_pool_read(id=v['pool_id'])
except NoIdError:
raise loadbalancer.PoolNotFound(pool_id=v['pool_id'])
project_id = pool.parent_uuid
if tenant_id != project_id:
raise n_exc.NotAuthorized()
# TODO: check that the pool has no vip configured
# if pool.protocol != v['protocol']:
# raise loadbalancer.ProtocolMismatch(
# vip_proto=v['protocol'], pool_proto=pool.protocol)
else:
pool = None
uuid = uuidutils.generate_uuid()
name = self._get_resource_name('virtual-ip', project, v['name'], uuid)
props = self.make_properties(v)
id_perms = IdPermsType(uuid=uuid, enable=True,
description=v['description'])
vip = VirtualIp(name, project, virtual_ip_properties=props,
id_perms=id_perms, display_name=v['name'])
vip.uuid = uuid
if pool:
vip.set_loadbalancer_pool(pool)
vmi = self._create_virtual_interface(project, uuid, v['subnet_id'],
v.get('address'))
vip.set_virtual_machine_interface(vmi)
self._api.virtual_ip_create(vip)
return self.make_dict(vip)
def delete(self, context, id):
try:
vip = self._api.virtual_ip_read(id=id)
except NoIdError:
loadbalancer.VipNotFound(vip_id=id)
self._delete_virtual_interface(
vip.get_virtual_machine_interface_refs())
super(VirtualIpManager, self).delete(context, id)
def _update_virtual_ip_properties(self, props, id, vip):
"""
Update virtual ip properties and return True if the have been
modified
"""
# according to the spec:
# status, subnet_id, address, port and protocol are immutable
immutable = ['address', 'protocol', 'protocol_port', 'subnet_id']
for field in immutable:
if field not in vip:
continue
if getattr(props, field) != vip[field]:
msg = 'Attribute %s in vip %s is immutable' % (field, id)
raise n_exc.BadRequest(resource='vip', msg=msg)
# update
change = self.update_properties_subr(props, vip)
if 'session_persistence' in vip:
sp = vip['session_persistence']
if props.persistence_type != sp['type']:
props.persistence_type = sp['type']
change = True
if 'cookie_name' in sp and \
props.persistence_cookie_name != sp['cookie_name']:
props.persistence_cookie_name != sp['cookie_name']
change = True
return change
def update_properties(self, vip_db, id, v):
props = vip_db.get_virtual_ip_properties()
if self._update_virtual_ip_properties(props, id, v):
vip_db.set_virtual_ip_properties(props)
return True
return False
def update_object(self, vip_db, id, v):
if 'pool_id' in v and self._get_vip_pool_id(vip_db) != v['pool_id']:
try:
pool = self._api.loadbalancer_pool_read(id=v['pool_id'])
except NoIdError:
raise loadbalancer.PoolNotFound(pool_id=v['pool_id'])
if vip_db.parent_uuid != pool.parent_uuid:
raise n_exc.NotAuthorized()
# TODO: check that the pool has no vip configured
# TODO: check that the protocol matches
# TODO: check that the pool is in valid state
# TODO: check that the provider is the same.
vip_db.set_localbalancer_pool(pool)
return True
return False
| cloudwatt/contrail-neutron-plugin | neutron_plugin_contrail/plugins/opencontrail/loadbalancer/virtual_ip.py | Python | apache-2.0 | 9,900 |
# -*- coding: utf-8 -*-
import copy
from functools import wraps
import json
import sys
import django
from django.contrib.admin.helpers import AdminForm
from django.conf import settings
from django.conf.urls import url
from django.contrib import admin, messages
from django.contrib.admin.models import LogEntry, CHANGE
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.admin.util import get_deleted_objects
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site, get_current_site
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist, ValidationError
from django.db import router, transaction
from django.db.models import Q
from django.http import HttpResponseRedirect, HttpResponse, Http404, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import render_to_response, get_object_or_404
from django.template.context import RequestContext
from django.template.defaultfilters import escape
from django.utils.encoding import force_text
from django.utils.six.moves.urllib.parse import unquote
from django.utils.translation import ugettext_lazy as _, get_language
from django.utils.decorators import method_decorator
from django.views.decorators.http import require_POST
from cms.admin.change_list import CMSChangeList
from cms.admin.dialog.views import get_copy_dialog
from cms.admin.forms import (PageForm, AdvancedSettingsForm, PagePermissionForm,
PublicationDatesForm)
from cms.admin.permissionadmin import (PERMISSION_ADMIN_INLINES, PagePermissionInlineAdmin, ViewRestrictionInlineAdmin)
from cms.admin.placeholderadmin import PlaceholderAdminMixin
from cms.admin.views import revert_plugins
from cms.constants import PAGE_TYPES_ID, PUBLISHER_STATE_PENDING
from cms.models import Page, Title, CMSPlugin, PagePermission, GlobalPagePermission, StaticPlaceholder
from cms.models.managers import PagePermissionsPermissionManager
from cms.plugin_pool import plugin_pool
from cms.toolbar_pool import toolbar_pool
from cms.utils import helpers, permissions, get_language_from_request, admin as admin_utils, copy_plugins
from cms.utils.i18n import get_language_list, get_language_tuple, get_language_object, force_language
from cms.utils.admin import jsonify_request
from cms.utils.compat.dj import is_installed
from cms.utils.conf import get_cms_setting
from cms.utils.helpers import find_placeholder_relation, current_site
from cms.utils.permissions import has_global_page_permission, has_generic_permission
from cms.utils.urlutils import add_url_parameters, admin_reverse
require_POST = method_decorator(require_POST)
if is_installed('reversion'):
from reversion.admin import VersionAdmin as ModelAdmin
from reversion import create_revision
else: # pragma: no cover
from django.contrib.admin import ModelAdmin
class ReversionContext(object):
def __enter__(self):
yield
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def __call__(self, func):
"""Allows this revision context to be used as a decorator."""
@wraps(func)
def do_revision_context(*args, **kwargs):
self.__enter__()
exception = False
try:
try:
return func(*args, **kwargs)
except:
exception = True
if not self.__exit__(*sys.exc_info()):
raise
finally:
if not exception:
self.__exit__(None, None, None)
return do_revision_context
def create_revision():
return ReversionContext()
PUBLISH_COMMENT = "Publish"
INITIAL_COMMENT = "Initial version."
class PageAdmin(PlaceholderAdminMixin, ModelAdmin):
form = PageForm
search_fields = ('=id', 'title_set__slug', 'title_set__title', 'reverse_id')
revision_form_template = "admin/cms/page/history/revision_header.html"
recover_form_template = "admin/cms/page/history/recover_header.html"
add_general_fields = ['title', 'slug', 'language', 'template']
change_list_template = "admin/cms/page/tree/base.html"
list_filter = ['in_navigation', 'template', 'changed_by', 'soft_root']
title_frontend_editable_fields = ['title', 'menu_title', 'page_title']
inlines = PERMISSION_ADMIN_INLINES
def get_urls(self):
"""Get the admin urls
"""
info = "%s_%s" % (self.model._meta.app_label, self.model._meta.model_name)
pat = lambda regex, fn: url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))
url_patterns = [
pat(r'^([0-9]+)/advanced-settings/$', self.advanced),
pat(r'^([0-9]+)/dates/$', self.dates),
pat(r'^([0-9]+)/permission-settings/$', self.permissions),
pat(r'^([0-9]+)/delete-translation/$', self.delete_translation),
pat(r'^([0-9]+)/move-page/$', self.move_page),
pat(r'^([0-9]+)/copy-page/$', self.copy_page),
pat(r'^([0-9]+)/copy-language/$', self.copy_language),
pat(r'^([0-9]+)/dialog/copy/$', get_copy_dialog), # copy dialog
pat(r'^([0-9]+)/change-navigation/$', self.change_innavigation),
pat(r'^([0-9]+)/permissions/$', self.get_permissions),
pat(r'^([0-9]+)/undo/$', self.undo),
pat(r'^([0-9]+)/redo/$', self.redo),
pat(r'^([0-9]+)/change_template/$', self.change_template),
pat(r'^([0-9]+)/([a-z\-]+)/descendants/$', self.descendants), # menu html for page descendants
pat(r'^([0-9]+)/([a-z\-]+)/edit-field/$', self.edit_title_fields),
pat(r'^([0-9]+)/([a-z\-]+)/publish/$', self.publish_page),
pat(r'^([0-9]+)/([a-z\-]+)/unpublish/$', self.unpublish),
pat(r'^([0-9]+)/([a-z\-]+)/revert/$', self.revert_page),
pat(r'^([0-9]+)/([a-z\-]+)/preview/$', self.preview_page),
pat(r'^add-page-type/$', self.add_page_type),
pat(r'^published-pages/$', self.get_published_pagelist),
url(r'^resolve/$', self.resolve, name="cms_page_resolve"),
]
if plugin_pool.get_all_plugins():
url_patterns += plugin_pool.get_patterns()
url_patterns += super(PageAdmin, self).get_urls()
return url_patterns
def get_revision_instances(self, request, object):
"""Returns all the instances to be used in the object's revision."""
if isinstance(object, Title):
object = object.page
if isinstance(object, Page) and not object.publisher_is_draft:
object = object.publisher_public
placeholder_relation = find_placeholder_relation(object)
data = [object]
filters = {'placeholder__%s' % placeholder_relation: object}
for plugin in CMSPlugin.objects.filter(**filters):
data.append(plugin)
plugin_instance, admin = plugin.get_plugin_instance()
if plugin_instance:
data.append(plugin_instance)
if isinstance(object, Page):
titles = object.title_set.all()
for title in titles:
title.publisher_public = None
data.append(title)
return data
def save_model(self, request, obj, form, change):
"""
Move the page in the tree if necessary and save every placeholder
Content object.
"""
target = request.GET.get('target', None)
position = request.GET.get('position', None)
if 'recover' in request.path_info:
pk = obj.pk
if obj.parent_id:
try:
parent = Page.objects.get(pk=obj.parent_id)
except Page.DoesNotExist:
parent = None
else:
parent = None
obj.pk = None
obj.path = None
obj.numchild = 0
obj.depth = 0
if parent:
saved_obj = parent.add_child(instance=obj)
else:
saved_obj = obj.add_root(instance=obj)
tmp_pk = saved_obj.pk
saved_obj.pk = pk
Page.objects.get(pk=tmp_pk).delete()
saved_obj.save(no_signals=True)
else:
if 'history' in request.path_info:
old_obj = Page.objects.get(pk=obj.pk)
obj.depth = old_obj.depth
obj.parent_id = old_obj.parent_id
obj.path = old_obj.path
obj.numchild = old_obj.numchild
new = False
if not obj.pk:
new = True
obj.save()
if 'recover' in request.path_info or 'history' in request.path_info:
revert_plugins(request, obj.version.pk, obj)
if target is not None and position is not None:
try:
target = self.model.objects.get(pk=target)
except self.model.DoesNotExist:
pass
else:
if position == 'last-child' or position == 'first-child':
obj.parent_id = target.pk
else:
obj.parent_id = target.parent_id
obj.save()
obj = obj.move(target, pos=position)
page_type_id = form.cleaned_data.get('page_type')
copy_target_id = request.GET.get('copy_target')
if copy_target_id or page_type_id:
if page_type_id:
copy_target_id = page_type_id
copy_target = Page.objects.get(pk=copy_target_id)
if not copy_target.has_view_permission(request):
raise PermissionDenied()
obj = Page.objects.get(pk=obj.pk) #mptt reload
copy_target._copy_attributes(obj, clean=True)
obj.save()
for lang in copy_target.languages.split(','):
copy_target._copy_contents(obj, lang)
if not 'permission' in request.path_info:
language = form.cleaned_data['language']
Title.objects.set_or_create(
request,
obj,
form,
language,
)
# is it home? publish it right away
if new and Page.objects.filter(site_id=obj.site_id).count() == 1:
obj.publish(language)
def get_fieldsets(self, request, obj=None):
form = self.get_form(request, obj, fields=None)
if getattr(form, 'fieldsets', None) is None:
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [(None, {'fields': fields})]
else:
return form.fieldsets
def get_inline_classes(self, request, obj=None, **kwargs):
if obj and 'permission' in request.path_info:
return PERMISSION_ADMIN_INLINES
return []
def get_form_class(self, request, obj=None, **kwargs):
if 'advanced' in request.path_info:
return AdvancedSettingsForm
elif 'permission' in request.path_info:
return PagePermissionForm
elif 'dates' in request.path_info:
return PublicationDatesForm
return self.form
def get_form(self, request, obj=None, **kwargs):
"""
Get PageForm for the Page model and modify its fields depending on
the request.
"""
language = get_language_from_request(request, obj)
form_cls = self.get_form_class(request, obj)
form = super(PageAdmin, self).get_form(request, obj, form=form_cls, **kwargs)
# get_form method operates by overriding initial fields value which
# may persist across invocation. Code below deepcopies fields definition
# to avoid leaks
for field in form.base_fields.keys():
form.base_fields[field] = copy.deepcopy(form.base_fields[field])
if 'language' in form.base_fields:
form.base_fields['language'].initial = language
if 'page_type' in form.base_fields:
if 'copy_target' in request.GET or 'add_page_type' in request.GET or obj:
del form.base_fields['page_type']
elif not Title.objects.filter(page__parent__reverse_id=PAGE_TYPES_ID, language=language).exists():
del form.base_fields['page_type']
if 'add_page_type' in request.GET:
del form.base_fields['menu_title']
del form.base_fields['meta_description']
del form.base_fields['page_title']
self.inlines = self.get_inline_classes(request, obj, **kwargs)
if obj:
if 'history' in request.path_info or 'recover' in request.path_info:
version_id = request.path_info.split('/')[-2]
else:
version_id = None
title_obj = obj.get_title_obj(language=language, fallback=False, version_id=version_id, force_reload=True)
if 'site' in form.base_fields and form.base_fields['site'].initial is None:
form.base_fields['site'].initial = obj.site
for name in ('slug', 'title', 'meta_description', 'menu_title', 'page_title', 'redirect'):
if name in form.base_fields:
form.base_fields[name].initial = getattr(title_obj, name)
if 'overwrite_url' in form.base_fields:
if title_obj.has_url_overwrite:
form.base_fields['overwrite_url'].initial = title_obj.path
else:
form.base_fields['overwrite_url'].initial = ''
else:
for name in ('slug', 'title'):
form.base_fields[name].initial = u''
if 'target' in request.GET or 'copy_target' in request.GET:
target = request.GET.get('copy_target') or request.GET.get('target')
if 'position' in request.GET:
position = request.GET['position']
if position == 'last-child' or position == 'first-child':
form.base_fields['parent'].initial = request.GET.get('target', None)
else:
sibling = Page.objects.get(pk=target)
form.base_fields['parent'].initial = sibling.parent_id
else:
form.base_fields['parent'].initial = request.GET.get('target', None)
form.base_fields['site'].initial = request.session.get('cms_admin_site', None)
return form
def advanced(self, request, object_id):
page = get_object_or_404(Page, pk=object_id)
if not page.has_advanced_settings_permission(request):
raise PermissionDenied("No permission for editing advanced settings")
return self.change_view(request, object_id, extra_context={'advanced_settings': True, 'title': _("Advanced Settings")})
def dates(self, request, object_id):
return self.change_view(request, object_id, extra_context={'publishing_dates': True, 'title': _("Publishing dates")})
def permissions(self, request, object_id):
page = get_object_or_404(Page, pk=object_id)
if not page.has_change_permissions_permission(request):
raise PermissionDenied("No permission for editing advanced settings")
return self.change_view(request, object_id, extra_context={'show_permissions': True, 'title': _("Change Permissions")})
def get_inline_instances(self, request, obj=None):
inlines = super(PageAdmin, self).get_inline_instances(request, obj)
if get_cms_setting('PERMISSION') and obj:
filtered_inlines = []
for inline in inlines:
if (isinstance(inline, PagePermissionInlineAdmin)
and not isinstance(inline, ViewRestrictionInlineAdmin)):
if "recover" in request.path or "history" in request.path:
# do not display permissions in recover mode
continue
if not obj.has_change_permissions_permission(request):
continue
filtered_inlines.append(inline)
inlines = filtered_inlines
return inlines
def get_unihandecode_context(self, language):
if language[:2] in get_cms_setting('UNIHANDECODE_DECODERS'):
uhd_lang = language[:2]
else:
uhd_lang = get_cms_setting('UNIHANDECODE_DEFAULT_DECODER')
uhd_host = get_cms_setting('UNIHANDECODE_HOST')
uhd_version = get_cms_setting('UNIHANDECODE_VERSION')
if uhd_lang and uhd_host and uhd_version:
uhd_urls = [
'%sunihandecode-%s.core.min.js' % (uhd_host, uhd_version),
'%sunihandecode-%s.%s.min.js' % (uhd_host, uhd_version, uhd_lang),
]
else:
uhd_urls = []
return {'unihandecode_lang': uhd_lang, 'unihandecode_urls': uhd_urls}
def add_view(self, request, form_url='', extra_context=None):
extra_context = extra_context or {}
language = get_language_from_request(request)
extra_context.update({
'language': language,
})
if not request.GET.get('add_page_type') is None:
extra_context.update({
'add_page_type': True,
'title': _("Add Page Type"),
})
elif 'copy_target' in request.GET:
extra_context.update({
'title': _("Add Page Copy"),
})
else:
extra_context = self.update_language_tab_context(request, context=extra_context)
extra_context.update(self.get_unihandecode_context(language))
return super(PageAdmin, self).add_view(request, form_url, extra_context=extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
"""
The 'change' admin view for the Page model.
"""
if extra_context is None:
extra_context = {'basic_info': True}
try:
obj = self.model.objects.get(pk=object_id)
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
else:
#activate(user_lang_set)
context = {
'page': obj,
'CMS_PERMISSION': get_cms_setting('PERMISSION'),
'ADMIN_MEDIA_URL': settings.STATIC_URL,
'can_change': obj.has_change_permission(request),
'can_change_permissions': obj.has_change_permissions_permission(request),
'current_site_id': settings.SITE_ID,
}
context.update(extra_context or {})
extra_context = self.update_language_tab_context(request, obj, context)
tab_language = get_language_from_request(request)
extra_context.update(self.get_unihandecode_context(tab_language))
response = super(PageAdmin, self).change_view(
request, object_id, form_url=form_url, extra_context=extra_context)
if tab_language and response.status_code == 302 and response._headers['location'][1] == request.path_info:
location = response._headers['location']
response._headers['location'] = (location[0], "%s?language=%s" % (location[1], tab_language))
return response
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
# add context variables
filled_languages = []
if obj:
filled_languages = [t[0] for t in obj.title_set.filter(title__isnull=False).values_list('language')]
allowed_languages = [lang[0] for lang in self._get_site_languages(obj)]
context.update({
'filled_languages': [lang for lang in filled_languages if lang in allowed_languages],
})
return super(PageAdmin, self).render_change_form(request, context, add, change, form_url, obj)
def _get_site_languages(self, obj=None):
site_id = None
if obj:
site_id = obj.site_id
else:
site_id = Site.objects.get_current().pk
return get_language_tuple(site_id)
def update_language_tab_context(self, request, obj=None, context=None):
if not context:
context = {}
language = get_language_from_request(request, obj)
languages = self._get_site_languages(obj)
context.update({
'language': language,
'language_tabs': languages,
# Dates are not language dependent, thus we hide the language
# selection bar: the language is forced through the form class
'show_language_tabs': len(list(languages)) > 1 and not context.get('publishing_dates', False),
})
return context
def response_change(self, request, obj):
"""Called always when page gets changed, call save on page, there may be
some new stuff, which should be published after all other objects on page
are collected.
"""
# save the object again, so all the related changes to page model
# can be published if required
obj.save()
return super(PageAdmin, self).response_change(request, obj)
def has_add_permission(self, request):
"""
Return true if the current user has permission to add a new page.
"""
if get_cms_setting('PERMISSION'):
return permissions.has_page_add_permission(request)
return super(PageAdmin, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
"""
Return true if the current user has permission on the page.
Return the string 'All' if the user has all rights.
"""
if get_cms_setting('PERMISSION'):
if obj:
return obj.has_change_permission(request)
else:
return permissions.has_page_change_permission(request)
return super(PageAdmin, self).has_change_permission(request, obj)
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance. If CMS_PERMISSION are in use also takes look to
object permissions.
"""
if get_cms_setting('PERMISSION') and obj is not None:
return obj.has_delete_permission(request)
return super(PageAdmin, self).has_delete_permission(request, obj)
def has_recover_permission(self, request):
"""
Returns True if the use has the right to recover pages
"""
if not is_installed('reversion'):
return False
user = request.user
if user.is_superuser:
return True
try:
if has_global_page_permission(request, can_recover_page=True):
return True
except:
pass
return False
def has_add_plugin_permission(self, request, placeholder, plugin_type):
if not permissions.has_plugin_permission(request.user, plugin_type, "add"):
return False
page = placeholder.page
if page and not page.has_change_permission(request):
return False
if page and not page.publisher_is_draft:
return False
return True
def has_copy_plugin_permission(self, request, source_placeholder, target_placeholder, plugins):
source_page = source_placeholder.page
if source_page and not source_page.has_change_permission(request):
return False
target_page = target_placeholder.page
if target_page and not target_page.has_change_permission(request):
return False
if target_page and not target_page.publisher_is_draft:
return False
for plugin in plugins:
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "add"):
return False
return True
def has_change_plugin_permission(self, request, plugin):
page = plugin.placeholder.page if plugin.placeholder else None
if page and not page.has_change_permission(request):
return False
if page and not page.publisher_is_draft:
return False
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"):
return False
return True
def has_move_plugin_permission(self, request, plugin, target_placeholder):
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "change"):
return False
page = plugin.placeholder.page
if page and not page.has_change_permission(request):
return False
if page and not page.publisher_is_draft:
return False
return True
def has_delete_plugin_permission(self, request, plugin):
if not permissions.has_plugin_permission(request.user, plugin.plugin_type, "delete"):
return False
page = plugin.placeholder.page
if page:
if not page.publisher_is_draft:
return False
if not page.has_change_permission(request):
return False
return True
def has_clear_placeholder_permission(self, request, placeholder):
page = placeholder.page if placeholder else None
if page:
if not page.publisher_is_draft:
return False
if not page.has_change_permission(request):
return False
return True
def post_add_plugin(self, request, placeholder, plugin):
if is_installed('reversion') and placeholder.page:
plugin_name = force_text(plugin_pool.get_plugin(plugin.plugin_type).name)
message = _(u"%(plugin_name)s plugin added to %(placeholder)s") % {
'plugin_name': plugin_name, 'placeholder': placeholder}
self.cleanup_history(placeholder.page)
helpers.make_revision_with_plugins(placeholder.page, request.user, message)
def post_copy_plugins(self, request, source_placeholder, target_placeholder, plugins):
page = target_placeholder.page
if page and is_installed('reversion'):
message = _(u"Copied plugins to %(placeholder)s") % {'placeholder': target_placeholder}
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, message)
def post_edit_plugin(self, request, plugin):
page = plugin.placeholder.page
if page:
# if reversion is installed, save version of the page plugins
if is_installed('reversion') and page:
plugin_name = force_text(plugin_pool.get_plugin(plugin.plugin_type).name)
message = _(
u"%(plugin_name)s plugin edited at position %(position)s in %(placeholder)s") % {
'plugin_name': plugin_name,
'position': plugin.position,
'placeholder': plugin.placeholder.slot
}
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, message)
def post_move_plugin(self, request, source_placeholder, target_placeholder, plugin):
page = target_placeholder.page
if page and is_installed('reversion'):
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, _(u"Plugins were moved"))
def post_delete_plugin(self, request, plugin):
plugin_name = force_text(plugin_pool.get_plugin(plugin.plugin_type).name)
page = plugin.placeholder.page
if page:
page.save()
comment = _("%(plugin_name)s plugin at position %(position)s in %(placeholder)s was deleted.") % {
'plugin_name': plugin_name,
'position': plugin.position,
'placeholder': plugin.placeholder,
}
if is_installed('reversion'):
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, comment)
def post_clear_placeholder(self, request, placeholder):
page = placeholder.page
if page:
page.save()
comment = _('All plugins in the placeholder "%(name)s" were deleted.') % {
'name': force_text(placeholder)
}
if is_installed('reversion'):
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, comment)
def get_placeholder_template(self, request, placeholder):
page = placeholder.page
if page:
return page.get_template()
def changelist_view(self, request, extra_context=None):
"The 'change list' admin view for this model."
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
return HttpResponseForbidden(force_text(_("You do not have permission to change pages.")))
try:
cl = CMSChangeList(request, self.model, self.list_display, self.list_display_links, self.list_filter,
self.date_hierarchy, self.search_fields, self.list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable, self)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given and
# the 'invalid=1' parameter was already in the query string, something
# is screwed up with the database, so display an error page.
if ERROR_FLAG in request.GET.keys():
return render_to_response('admin/invalid_setup.html', {'title': _('Database error')})
return HttpResponseRedirect(request.path_info + '?' + ERROR_FLAG + '=1')
cl.set_items(request)
site_id = request.GET.get('site__exact', None)
if site_id is None:
site_id = current_site(request).pk
site_id = int(site_id)
# languages
languages = get_language_list(site_id)
# parse the cookie that saves which page trees have
# been opened already and extracts the page ID
djangocms_nodes_open = request.COOKIES.get('djangocms_nodes_open', '')
raw_nodes = unquote(djangocms_nodes_open).split(',')
try:
open_menu_trees = [int(c.split('page_', 1)[1]) for c in raw_nodes]
except IndexError:
open_menu_trees = []
# Language may be present in the GET dictionary but empty
language = request.GET.get('language', get_language())
if not language:
language = get_language()
context = {
'title': cl.title,
'is_popup': cl.is_popup,
'cl': cl,
'opts': opts,
'has_add_permission': self.has_add_permission(request),
'root_path': admin_reverse('index'),
'app_label': app_label,
'preview_language': language,
'CMS_MEDIA_URL': get_cms_setting('MEDIA_URL'),
'CMS_PERMISSION': get_cms_setting('PERMISSION'),
'DEBUG': settings.DEBUG,
'site_languages': languages,
'open_menu_trees': open_menu_trees,
}
if is_installed('reversion'):
context['has_recover_permission'] = self.has_recover_permission(request)
context['has_change_permission'] = self.has_change_permission(request)
context.update(extra_context or {})
return render_to_response(self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.object_name.lower()),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context, context_instance=RequestContext(request))
def recoverlist_view(self, request, extra_context=None):
if not self.has_recover_permission(request):
raise PermissionDenied
return super(PageAdmin, self).recoverlist_view(request, extra_context)
def recover_view(self, request, version_id, extra_context=None):
if not self.has_recover_permission(request):
raise PermissionDenied
extra_context = self.update_language_tab_context(request, None, extra_context)
return super(PageAdmin, self).recover_view(request, version_id, extra_context)
def revision_view(self, request, object_id, version_id, extra_context=None):
if not self.has_change_permission(request, Page.objects.get(pk=object_id)):
raise PermissionDenied
extra_context = self.update_language_tab_context(request, None, extra_context)
response = super(PageAdmin, self).revision_view(request, object_id, version_id, extra_context)
return response
def history_view(self, request, object_id, extra_context=None):
if not self.has_change_permission(request, Page.objects.get(pk=object_id)):
raise PermissionDenied
extra_context = self.update_language_tab_context(request, None, extra_context)
return super(PageAdmin, self).history_view(request, object_id, extra_context)
def render_revision_form(self, request, obj, version, context, revert=False, recover=False):
# reset parent to null if parent is not found
if version.field_dict['parent']:
try:
Page.objects.get(pk=version.field_dict['parent'])
except:
if revert and obj.parent_id != int(version.field_dict['parent']):
version.field_dict['parent'] = obj.parent_id
if recover:
obj.parent = None
obj.parent_id = None
version.field_dict['parent'] = None
obj.version = version
return super(PageAdmin, self).render_revision_form(request, obj, version, context, revert, recover)
@require_POST
def undo(self, request, object_id):
if not is_installed('reversion'):
return HttpResponseBadRequest('django reversion not installed')
page = get_object_or_404(Page, pk=object_id)
if not page.publisher_is_draft:
page = page.publisher_draft
if not page.has_change_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to change this page")))
try:
reverted, clean = page.undo()
if not clean:
messages.error(request, _("Page reverted but slug stays the same because of url collisions."))
except IndexError as e:
return HttpResponseBadRequest(e.message)
return HttpResponse("ok")
@require_POST
def redo(self, request, object_id):
if not is_installed('reversion'):
return HttpResponseBadRequest('django reversion not installed')
page = get_object_or_404(Page, pk=object_id)
if not page.publisher_is_draft:
page = page.publisher_draft
if not page.has_change_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to change this page")))
try:
reverted, clean = page.redo()
if not clean:
messages.error(request, _("Page reverted but slug stays the same because of url collisions."))
except IndexError as e:
return HttpResponseBadRequest(e.message)
return HttpResponse("ok")
@require_POST
@create_revision()
def change_template(self, request, object_id):
page = get_object_or_404(Page, pk=object_id)
if not page.has_change_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to change the template")))
to_template = request.POST.get("template", None)
if to_template not in dict(get_cms_setting('TEMPLATES')):
return HttpResponseBadRequest(force_text(_("Template not valid")))
page.template = to_template
page.save()
if is_installed('reversion'):
message = _("Template changed to %s") % dict(get_cms_setting('TEMPLATES'))[to_template]
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, message)
return HttpResponse(force_text(_("The template was successfully changed")))
@transaction.atomic
def move_page(self, request, page_id, extra_context=None):
"""
Move the page to the requested target, at the given position
"""
target = request.POST.get('target', None)
position = request.POST.get('position', None)
if target is None or position is None:
return HttpResponseRedirect('../../')
try:
page = self.model.objects.get(pk=page_id)
target = self.model.objects.get(pk=target)
except self.model.DoesNotExist:
return jsonify_request(HttpResponseBadRequest("error"))
# does he haves permissions to do this...?
if not page.has_move_page_permission(request) or \
not target.has_add_permission(request):
return jsonify_request(
HttpResponseForbidden(force_text(_("Error! You don't have permissions to move this page. Please reload the page"))))
# move page
page.move_page(target, position)
if is_installed('reversion'):
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, _("Page moved"))
return jsonify_request(HttpResponse(admin_utils.render_admin_menu_item(request, page).content))
def get_permissions(self, request, page_id):
page = get_object_or_404(Page, id=page_id)
can_change_list = Page.permissions.get_change_id_list(request.user, page.site_id)
global_page_permissions = GlobalPagePermission.objects.filter(sites__in=[page.site_id])
page_permissions = PagePermission.objects.for_page(page)
all_permissions = list(global_page_permissions) + list(page_permissions)
# does he can change global permissions ?
has_global = permissions.has_global_change_permissions_permission(request)
permission_set = []
for permission in all_permissions:
if isinstance(permission, GlobalPagePermission):
if has_global:
permission_set.append([(True, True), permission])
else:
permission_set.append([(True, False), permission])
else:
if can_change_list == PagePermissionsPermissionManager.GRANT_ALL:
can_change = True
else:
can_change = permission.page_id in can_change_list
permission_set.append([(False, can_change), permission])
context = {
'page': page,
'permission_set': permission_set,
}
return render_to_response('admin/cms/page/permissions.html', context)
@require_POST
@transaction.atomic
def copy_language(self, request, page_id):
with create_revision():
source_language = request.POST.get('source_language')
target_language = request.POST.get('target_language')
page = Page.objects.get(pk=page_id)
placeholders = page.get_placeholders()
if not target_language or not target_language in get_language_list():
return HttpResponseBadRequest(force_text(_("Language must be set to a supported language!")))
for placeholder in placeholders:
plugins = list(
placeholder.cmsplugin_set.filter(language=source_language).order_by('path'))
if not self.has_copy_plugin_permission(request, placeholder, placeholder, plugins):
return HttpResponseForbidden(force_text(_('You do not have permission to copy these plugins.')))
copy_plugins.copy_plugins_to(plugins, placeholder, target_language)
if page and is_installed('reversion'):
message = _(u"Copied plugins from %(source_language)s to %(target_language)s") % {
'source_language': source_language, 'target_language': target_language}
self.cleanup_history(page)
helpers.make_revision_with_plugins(page, request.user, message)
return HttpResponse("ok")
@transaction.atomic
def copy_page(self, request, page_id, extra_context=None):
"""
Copy the page and all its plugins and descendants to the requested target, at the given position
"""
context = {}
page = Page.objects.get(pk=page_id)
target = request.POST.get('target', None)
position = request.POST.get('position', None)
site = request.POST.get('site', None)
if target is not None and position is not None and site is not None:
try:
target = self.model.objects.get(pk=target)
# does he have permissions to copy this page under target?
assert target.has_add_permission(request)
site = Site.objects.get(pk=site)
except (ObjectDoesNotExist, AssertionError):
return HttpResponse("error")
#context.update({'error': _('Page could not been moved.')})
else:
try:
kwargs = {
'copy_permissions': request.REQUEST.get('copy_permissions', False),
}
page.copy_page(target, site, position, **kwargs)
return jsonify_request(HttpResponse("ok"))
except ValidationError:
exc = sys.exc_info()[1]
return jsonify_request(HttpResponseBadRequest(exc.messages))
context.update(extra_context or {})
return HttpResponseRedirect('../../')
@transaction.atomic
@create_revision()
def publish_page(self, request, page_id, language):
try:
page = Page.objects.get(id=page_id, publisher_is_draft=True)
except Page.DoesNotExist:
page = None
# ensure user has permissions to publish this page
all_published = True
if page:
if not page.has_publish_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to publish this page")))
published = page.publish(language)
if not published:
all_published = False
statics = request.GET.get('statics', '')
if not statics and not page:
return Http404("No page or stack found for publishing.")
if statics:
static_ids = statics .split(',')
for pk in static_ids:
static_placeholder = StaticPlaceholder.objects.get(pk=pk)
published = static_placeholder.publish(request, language)
if not published:
all_published = False
if page:
if all_published:
if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
messages.warning(request, _("Page not published! A parent page is not published yet."))
else:
messages.info(request, _('The content was successfully published.'))
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Page).pk,
object_id=page_id,
object_repr=page.get_title(language),
action_flag=CHANGE,
)
else:
if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
messages.warning(request, _("Page not published! A parent page is not published yet."))
else:
messages.warning(request, _("There was a problem publishing your content"))
if is_installed('reversion') and page:
self.cleanup_history(page, publish=True)
helpers.make_revision_with_plugins(page, request.user, PUBLISH_COMMENT)
# create a new publish reversion
if 'node' in request.REQUEST:
# if request comes from tree..
return admin_utils.render_admin_menu_item(request, page)
if 'redirect' in request.GET:
return HttpResponseRedirect(request.GET['redirect'])
referrer = request.META.get('HTTP_REFERER', '')
path = admin_reverse("cms_page_changelist")
if request.GET.get('redirect_language'):
path = "%s?language=%s&page_id=%s" % (path, request.GET.get('redirect_language'), request.GET.get('redirect_page_id'))
if admin_reverse('index') not in referrer:
if all_published:
if page:
if page.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
path = page.get_absolute_url(language, fallback=True)
else:
public_page = Page.objects.get(publisher_public=page.pk)
path = '%s?%s' % (public_page.get_absolute_url(language, fallback=True), get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
else:
path = '%s?%s' % (referrer, get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
else:
path = '/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
return HttpResponseRedirect(path)
def cleanup_history(self, page, publish=False):
if is_installed('reversion') and page:
# delete revisions that are not publish revisions
from reversion.models import Version
content_type = ContentType.objects.get_for_model(Page)
# reversion 1.8+ removes type field, revision filtering must be based on comments
versions_qs = Version.objects.filter(content_type=content_type, object_id_int=page.pk)
history_limit = get_cms_setting("MAX_PAGE_HISTORY_REVERSIONS")
deleted = []
for version in versions_qs.exclude(revision__comment__in=(INITIAL_COMMENT, PUBLISH_COMMENT)).order_by(
'-revision__pk')[history_limit - 1:]:
if not version.revision_id in deleted:
revision = version.revision
revision.delete()
deleted.append(revision.pk)
# delete all publish revisions that are more then MAX_PAGE_PUBLISH_REVERSIONS
publish_limit = get_cms_setting("MAX_PAGE_PUBLISH_REVERSIONS")
if publish_limit and publish:
deleted = []
for version in versions_qs.filter(revision__comment__exact=PUBLISH_COMMENT).order_by(
'-revision__pk')[publish_limit - 1:]:
if not version.revision_id in deleted:
revision = version.revision
revision.delete()
deleted.append(revision.pk)
@transaction.atomic
def unpublish(self, request, page_id, language):
"""
Publish or unpublish a language of a page
"""
site = Site.objects.get_current()
page = get_object_or_404(Page, pk=page_id)
if not page.has_publish_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to unpublish this page")))
if not page.publisher_public_id:
return HttpResponseForbidden(force_text(_("This page was never published")))
try:
page.unpublish(language)
message = _('The %(language)s page "%(page)s" was successfully unpublished') % {
'language': get_language_object(language, site)['name'], 'page': page}
messages.info(request, message)
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Page).pk,
object_id=page_id,
object_repr=page.get_title(),
action_flag=CHANGE,
change_message=message,
)
except RuntimeError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
except ValidationError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
path = admin_reverse("cms_page_changelist")
if request.GET.get('redirect_language'):
path = "%s?language=%s&page_id=%s" % (path, request.GET.get('redirect_language'), request.GET.get('redirect_page_id'))
return HttpResponseRedirect(path)
@transaction.atomic
def revert_page(self, request, page_id, language):
page = get_object_or_404(Page, id=page_id)
# ensure user has permissions to publish this page
if not page.has_change_permission(request):
return HttpResponseForbidden(force_text(_("You do not have permission to change this page")))
page.revert(language)
messages.info(request, _('The page "%s" was successfully reverted.') % page)
if 'node' in request.REQUEST:
# if request comes from tree..
return admin_utils.render_admin_menu_item(request, page)
referer = request.META.get('HTTP_REFERER', '')
path = '../../'
if admin_reverse('index') not in referer:
path = '%s?%s' % (referer.split('?')[0], get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
return HttpResponseRedirect(path)
@create_revision()
def delete_translation(self, request, object_id, extra_context=None):
if 'language' in request.GET:
language = request.GET['language']
else:
language = get_language_from_request(request)
opts = Page._meta
titleopts = Title._meta
app_label = titleopts.app_label
pluginopts = CMSPlugin._meta
try:
obj = self.get_queryset(request).get(pk=unquote(object_id))
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
if not self.has_delete_permission(request, obj):
return HttpResponseForbidden(force_text(_("You do not have permission to change this page")))
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name),
'key': escape(object_id)
})
if not len(list(obj.get_languages())) > 1:
raise Http404(_('There only exists one translation for this page'))
titleobj = get_object_or_404(Title, page__id=object_id, language=language)
saved_plugins = CMSPlugin.objects.filter(placeholder__page__id=object_id, language=language)
using = router.db_for_read(self.model)
kwargs = {
'admin_site': self.admin_site,
'user': request.user,
'using': using
}
deleted_objects, perms_needed = get_deleted_objects(
[titleobj],
titleopts,
**kwargs
)[:2]
to_delete_plugins, perms_needed_plugins = get_deleted_objects(
saved_plugins,
pluginopts,
**kwargs
)[:2]
deleted_objects.append(to_delete_plugins)
perms_needed = set(list(perms_needed) + list(perms_needed_plugins))
if request.method == 'POST':
if perms_needed:
raise PermissionDenied
message = _('Title and plugins with language %(language)s was deleted') % {
'language': force_text(get_language_object(language)['name'])
}
self.log_change(request, titleobj, message)
messages.info(request, message)
titleobj.delete()
for p in saved_plugins:
p.delete()
public = obj.publisher_public
if public:
public.save()
if is_installed('reversion'):
self.cleanup_history(obj)
helpers.make_revision_with_plugins(obj, request.user, message)
if not self.has_change_permission(request, None):
return HttpResponseRedirect("../../../../")
return HttpResponseRedirect("../../")
context = {
"title": _("Are you sure?"),
"object_name": force_text(titleopts.verbose_name),
"object": titleobj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": admin_reverse('index'),
"app_label": app_label,
}
context.update(extra_context or {})
context_instance = RequestContext(request, current_app=self.admin_site.name)
return render_to_response(self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, titleopts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, context_instance=context_instance)
def preview_page(self, request, object_id, language):
"""Redirecting preview function based on draft_id
"""
page = get_object_or_404(Page, id=object_id)
attrs = "?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
attrs += "&language=" + language
with force_language(language):
url = page.get_absolute_url(language) + attrs
site = get_current_site(request)
if not site == page.site:
url = "http%s://%s%s" % ('s' if request.is_secure() else '',
page.site.domain, url)
return HttpResponseRedirect(url)
def change_innavigation(self, request, page_id):
"""
Switch the in_navigation of a page
"""
page = get_object_or_404(Page, pk=page_id)
if page.has_change_permission(request):
page.toggle_in_navigation()
language = request.GET.get('language') or get_language_from_request(request)
return admin_utils.render_admin_menu_item(request, page, language=language)
return HttpResponseForbidden(force_text(_("You do not have permission to change this page's in_navigation status")))
def descendants(self, request, page_id, language):
"""
Get html for descendants of given page
Used for lazy loading pages in cms.changelist.js
Permission checks is done in admin_utils.get_admin_menu_item_context
which is called by admin_utils.render_admin_menu_item.
"""
page = get_object_or_404(Page, pk=page_id)
return admin_utils.render_admin_menu_item(request, page,
template="admin/cms/page/tree/lazy_menu.html", language=language)
def add_page_type(self, request):
site = Site.objects.get_current()
language = request.GET.get('language') or get_language()
target = request.GET.get('copy_target')
type_root, created = Page.objects.get_or_create(reverse_id=PAGE_TYPES_ID, publisher_is_draft=True, site=site,
defaults={'in_navigation': False})
type_title, created = Title.objects.get_or_create(page=type_root, language=language, slug=PAGE_TYPES_ID,
defaults={'title': _('Page Types')})
url = add_url_parameters(admin_reverse('cms_page_add'), target=type_root.pk, position='first-child',
add_page_type=1, copy_target=target, language=language)
return HttpResponseRedirect(url)
def resolve(self, request):
if not request.user.is_staff:
return HttpResponse('/', content_type='text/plain')
obj = False
url = False
if request.session.get('cms_log_latest', False):
log = LogEntry.objects.get(pk=request.session['cms_log_latest'])
try:
obj = log.get_edited_object()
except (ObjectDoesNotExist, ValueError):
obj = None
del request.session['cms_log_latest']
if obj and obj.__class__ in toolbar_pool.get_watch_models() and hasattr(obj, 'get_absolute_url'):
# This is a test if the object url can be retrieved
# In case it can't, object it's not taken into account
try:
force_text(obj.get_absolute_url())
except:
obj = None
else:
obj = None
if not obj:
pk = request.REQUEST.get('pk')
full_model = request.REQUEST.get('model')
if pk and full_model:
app_label, model = full_model.split('.')
if pk and app_label:
ctype = ContentType.objects.get(app_label=app_label, model=model)
try:
obj = ctype.get_object_for_this_type(pk=pk)
except ctype.model_class().DoesNotExist:
obj = None
try:
force_text(obj.get_absolute_url())
except:
obj = None
if obj:
if not request.toolbar or not request.toolbar.edit_mode:
if isinstance(obj, Page):
if obj.get_public_object():
url = obj.get_public_object().get_absolute_url()
else:
url = '%s?%s' % (
obj.get_draft_object().get_absolute_url(),
get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
)
else:
url = obj.get_absolute_url()
else:
url = obj.get_absolute_url()
if url:
return HttpResponse(force_text(url), content_type='text/plain')
return HttpResponse('', content_type='text/plain')
def lookup_allowed(self, key, *args, **kwargs):
if key == 'site__exact':
return True
return super(PageAdmin, self).lookup_allowed(key, *args, **kwargs)
def edit_title_fields(self, request, page_id, language):
title = Title.objects.get(page_id=page_id, language=language)
saved_successfully = False
raw_fields = request.GET.get("edit_fields", 'title')
edit_fields = [field for field in raw_fields.split(",") if field in self.title_frontend_editable_fields]
cancel_clicked = request.POST.get("_cancel", False)
opts = Title._meta
if not edit_fields:
# Defaults to title
edit_fields = ('title',)
if not has_generic_permission(title.page.pk, request.user, "change",
title.page.site.pk):
return HttpResponseForbidden(force_text(_("You do not have permission to edit this page")))
class PageTitleForm(django.forms.ModelForm):
"""
Dynamic form showing only the fields to be edited
"""
class Meta:
model = Title
fields = edit_fields
if not cancel_clicked and request.method == 'POST':
form = PageTitleForm(instance=title, data=request.POST)
if form.is_valid():
form.save()
saved_successfully = True
else:
form = PageTitleForm(instance=title)
admin_form = AdminForm(form, fieldsets=[(None, {'fields': edit_fields})], prepopulated_fields={},
model_admin=self)
media = self.media + admin_form.media
context = {
'CMS_MEDIA_URL': get_cms_setting('MEDIA_URL'),
'title': 'Title',
'plugin': title.page,
'plugin_id': title.page.id,
'adminform': admin_form,
'add': False,
'is_popup': True,
'media': media,
'opts': opts,
'change': True,
'save_as': False,
'has_add_permission': False,
'window_close_timeout': 10,
}
if cancel_clicked:
# cancel button was clicked
context.update({
'cancel': True,
})
return render_to_response('admin/cms/page/plugin/confirm_form.html', context, RequestContext(request))
if not cancel_clicked and request.method == 'POST' and saved_successfully:
return render_to_response('admin/cms/page/plugin/confirm_form.html', context, RequestContext(request))
return render_to_response('admin/cms/page/plugin/change_form.html', context, RequestContext(request))
def get_published_pagelist(self, *args, **kwargs):
"""
This view is used by the PageSmartLinkWidget as the user type to feed the autocomplete drop-down.
"""
request = args[0]
if request.is_ajax():
query_term = request.GET.get('q','').strip('/')
language_code = request.GET.get('language_code', settings.LANGUAGE_CODE)
matching_published_pages = Page.objects.published().public().filter(
Q(title_set__title__icontains=query_term, title_set__language=language_code)
| Q(title_set__path__icontains=query_term, title_set__language=language_code)
| Q(title_set__menu_title__icontains=query_term, title_set__language=language_code)
| Q(title_set__page_title__icontains=query_term, title_set__language=language_code)
).distinct()
results = []
for page in matching_published_pages:
results.append(
{
'path': page.get_path(language=language_code),
'title': page.get_title(language=language_code),
'redirect_url': page.get_absolute_url(language=language_code)
}
)
return HttpResponse(json.dumps(results), content_type='application/json')
else:
return HttpResponseForbidden()
def add_plugin(self, *args, **kwargs):
with create_revision():
return super(PageAdmin, self).add_plugin(*args, **kwargs)
def copy_plugins(self, *args, **kwargs):
with create_revision():
return super(PageAdmin, self).copy_plugins(*args, **kwargs)
def edit_plugin(self, *args, **kwargs):
with create_revision():
return super(PageAdmin, self).edit_plugin(*args, **kwargs)
def move_plugin(self, *args, **kwargs):
with create_revision():
return super(PageAdmin, self).move_plugin(*args, **kwargs)
def delete_plugin(self, *args, **kwargs):
with create_revision():
return super(PageAdmin, self).delete_plugin(*args, **kwargs)
def clear_placeholder(self, *args, **kwargs):
with create_revision():
return super(PageAdmin, self).clear_placeholder(*args, **kwargs)
admin.site.register(Page, PageAdmin)
| DylannCordel/django-cms | cms/admin/pageadmin.py | Python | bsd-3-clause | 64,240 |
import sys
import os
def get():
config_file = os.path.abspath('./configuration')
if not os.path.exists(config_file):
sys.stderr.write('Can not find configuration file')
raise
config = {}
execfile(config_file, config)
return config
| markbreedlove/soundlocale | config.py | Python | gpl-3.0 | 269 |
# Author - Hades.y2k
# Date - 04/06/2015
# Version - 2.0
# License - <GPL v2>
import socket
import thread
from sys import platform
# Initialize
portlist = [21,22,25,80,110,443] # You can add more.
class bcolors:
RED = '\033[91m'
BOLD = '\033[1m'
OKGREEN = '\033[92m'
ENDC = '\033[0m'
WARNING = '\033[93m'
class BGrab():
print bcolors.RED + "\n #====" + bcolors.ENDC + bcolors.BOLD + " SIMPLE BANNER GRABBING " + bcolors.ENDC + bcolors.RED + "=============#" + bcolors.ENDC
print bcolors.RED + " #==================" + bcolors.ENDC + bcolors.BOLD + " v2 04/06/2015 " + bcolors.ENDC + bcolors.RED + "=========#" + bcolors.ENDC
print bcolors.RED + " #=======================" + bcolors.ENDC + bcolors.BOLD + " Hades.y2k " + bcolors.ENDC + bcolors.RED + "=======#" + bcolors.ENDC
print bcolors.RED + " #==== " + bcolors.ENDC + bcolors.BOLD + "www.github.com/Hadesy2k/banner-grab" + bcolors.ENDC + bcolors.RED + " ====#\n" + bcolors.ENDC
def __init__(self):
self.enumerate()
def first(self,ip):
for x in range(0,129):
for port in portlist:
ip_address = ip + str(x)
banner = self.Banner(ip_address, port)
if banner:
print bcolors.OKGREEN + "[+] " + bcolors.ENDC + bcolors.BOLD + ip_address + bcolors.ENDC + ':' + bcolors.BOLD + banner + bcolors.ENDC
def second(self,ip):
for x in range(129,256):
for port in portlist:
ip_address = ip + str(x)
banner = self.Banner(ip_address, port)
if banner:
print bcolors.OKGREEN + "[+] " + bcolors.ENDC + bcolors.BOLD + ip_address + bcolors.ENDC + ':' + bcolors.BOLD + banner + bcolors.ENDC
def Banner(self, ip, port):
try:
socket.setdefaulttimeout(2)
soc = socket.socket()
soc.connect((ip, port))
banner = soc.recv(1024)
return banner
except:
return
def enumerate(self):
# Initialize
host = raw_input("Enter the host name: ")
print bcolors.WARNING + '[+]' + bcolors.ENDC + bcolors.BOLD + " Engaging the Target" + bcolors.ENDC + "\n"
init_ip = socket.gethostbyname(host)
# This first-3 of ip method comes from pingsweep.py from the-c0d3r pynmap.
octets = init_ip.split('.')
# got 4 octets now
# get first 3 octets, combine them with '.'
# and iterate from 0 to 255 for the last octet
ip = str(octets[0]+"."+octets[1]+"."+octets[2]+".")
thread.start_new_thread(self.first(ip), ())
thread.start_new_thread(self.second(ip), ())
if __name__ == '__main__':
if 'linux' in platform:
BGrab()
else:
print '[!] This script works only in Linux.'
| Hadesy2k/banner-grab | banenrgrab.py | Python | gpl-2.0 | 2,843 |
import numpy as np
from functools import reduce
from operator import mul
from .logger import logger
from .compression import CompressedObject
import pickle
class NumpyStorage(object):
"""Holds a chunk of memory large enough to store all checkpoints. The
[]-operator is overloaded to return a pointer to the memory reserved for a
given checkpoint number. Revolve will typically use this as LIFO, but the
storage also supports random access."""
"""Allocates memory on initialisation. Requires number of checkpoints and
size of one checkpoint. Memory is allocated in C-contiguous style."""
def __init__(self, size_ckp, n_ckp, dtype, profiler):
self.storage = np.zeros((n_ckp, size_ckp), order='C', dtype=dtype)
self.shapes = {}
self.profiler = profiler
"""Returns a pointer to the contiguous chunk of memory reserved for the
checkpoint with number `key`."""
def __getitem__(self, key):
return self.storage[key, :]
def save(self, key, data_pointers):
slot = self[key]
offset = 0
shapes = []
for ptr in data_pointers:
assert(ptr.strides[-1] == ptr.itemsize)
with self.profiler.get_timer('storage', 'flatten'):
data = ptr.ravel()
with self.profiler.get_timer('storage', 'copy_save'):
np.copyto(slot[offset:len(data)+offset], data)
offset += len(data)
shapes.append(ptr.shape)
self.shapes[key] = shapes
def load(self, key, locations):
slot = self[key]
offset = 0
for shape, ptr in zip(self.shapes[key], locations):
size = reduce(mul, ptr.shape)
with self.profiler.get_timer('storage', 'copy_load'):
np.copyto(ptr, slot[offset:offset+size].reshape(ptr.shape))
offset += size
class BytesStorage(object):
"""Holds a chunk of memory large enough to store all checkpoints. The
[]-operator is overloaded to return a pointer to the memory reserved for a
given checkpoint number. Revolve will typically use this as LIFO, but the
storage also supports random access."""
"""Allocates memory on initialisation. Requires number of checkpoints and
size of one checkpoint. Memory is allocated in C-contiguous style."""
def __init__(self, size_ckp, n_ckp, dtype, compression, auto_pickle=False):
size = size_ckp * n_ckp
self.size_ckp = size_ckp
self.n_ckp = n_ckp
self.dtype = dtype
self.storage = memoryview(bytearray(size))
self.auto_pickle = auto_pickle
self.compressor, self.decompressor = compression
self.lengths = {}
self.metadata = {}
"""Returns a pointer to the contiguous chunk of memory reserved for the
checkpoint with number `key`. May be a copy."""
def __getitem__(self, key):
ptr, start, end = self.get_location(key)
return ptr[start:end]
def get_location(self, key):
assert(key < self.n_ckp)
start = self.size_ckp * key
end = start + self.size_ckp * np.dtype(self.dtype).itemsize
return (self.storage, start, end)
def save(self, key, data):
logger.debug("ByteStorage: Saving to location %d/%d" % (key,
self.n_ckp))
dataset = [self.compressor(x) for x in data]
logger.debug("ByteStorage: Compression complete")
offset = 0
sizes = []
metadatas = []
ptr, start, end = self.get_location(key)
for compressed_object in dataset:
if not (isinstance(compressed_object, CompressedObject)):
if not self.auto_pickle:
raise TypeError("Expecting data to be bytes/bytearray, " +
"found %s" % type(compressed_object))
else:
assert(isinstance(data, tuple) and len(data) == 2)
data, metadata = data
data = pickle.dumps(metadata)
start += offset
compressed_data = compressed_object.data
metadata = compressed_object.metadata
logger.debug("Start: %d, End: %d" % (start, end))
allowed_size = end - start
actual_size = len(compressed_data)
logger.debug("Actual size: %d" % actual_size)
assert(actual_size <= allowed_size)
logger.debug(type(compressed_data))
self.storage[start:(start+actual_size)] = compressed_data
sizes.append(actual_size)
offset += actual_size
metadatas.append(metadata)
self.lengths[key] = sizes
self.metadata[key] = metadatas
def load(self, key, locations):
logger.debug("ByteStorage: Loading from location %d" % key)
ptr, start, end = self.get_location(key)
sizes = self.lengths[key]
metadatas = self.metadata[key]
assert(len(locations) == len(sizes) == len(metadatas))
offset = 0
for actual_size, metadata, location in zip(sizes, metadatas,
locations):
logger.debug("Start: %d, End: %d" % (start, end))
start += offset
compressed_data = self.storage[start:(start+actual_size)]
compressed_object = CompressedObject(compressed_data,
metadata=metadata)
decompressed = self.decompressor(compressed_object)
location[:] = decompressed
offset += actual_size
logger.debug("ByteStorage: Load complete")
| opesci/pyrevolve | pyrevolve/storage.py | Python | epl-1.0 | 5,677 |
import soldiers;
import towers;
class Ability(object):
def __init__(self, owner):
self.targetType = None;
self.group = "foes"; # friends/foes/traps
self.frequency = "battle"; # battle/constant/# for uses/0.# for probability
self.owner = owner;
def applyTo(self, target, friends, foes, traps=[]):
if (not self.isApplicable(target, friends, foes, traps)):
# DEBUG LINES
#print "Not applicable to " + type(target).__name__ + " in " +\
#self.group + " group.";
return None; # Indicate problem
#print "Applying " + type(self).__name__ + " to " +\
#str(target) + ".";
return target;
def isApplicable(self, target, friends, foes, traps=[]):
# DEBUG LINES
#print "Self.group:" + str(eval(self.group));
return isinstance(target, self.targetType) and (target in eval(self.group));
def __str__(self):
try:
return self.msg;
except:
return object.__str__(self);
class Heal(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType =soldiers.SoldierClass;
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, friends, foes);
if (target == None):
return target;
target.currentHp += 0.10 * target.hp;
return target;
class Harm(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = soldiers.SoldierClass;
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.currentHp -= 0.10 * target.hp;
self.msg = "* %s HARMS %s, who now has %d hp remaining."%(str(self.owner), str(target), target.currentHp);
return target;
class Haste(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType =soldiers.SoldierClass;
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.attackSpeed *= 2.0;
return target;
class Slow(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType =soldiers.SoldierClass;
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.attackSpeed /= 2.0;
self.msg = "* %s SLOWS %s, who now has %d attacks per minute."%(str(self.owner), str(target), target.attackSpeed);
return target;
class DustToDust(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = towers.TowerClass;
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.currentHp /= 10.0;
self.msg = "* %s casts DUST TO DUST on %s, which now has %d hp remaining."%(str(self.owner), str(target), target.currentHp);
return target;
class BowAttack(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = soldiers.SoldierClass;
self.frequency = "2";
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.currentHp -= max(0.1 * self.owner.attack, 1);
self.msg = "* %s attacks %s with a BOW, leaving the latter with %d hp."%(str(self.owner), str(target), target.currentHp);
return target;
class DaggerThrownAttack(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = soldiers.SoldierClass;
self.frequency = "5";
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.currentHp -= max(0.05 * self.owner.attack, 1);
self.msg = "* %s THROWS A DAGGER to %s, leaving the latter with %d hp."%(str(self.owner), str(target), target.currentHp);
return target;
class CriticalAttack(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = soldiers.SoldierClass;
self.frequency = "0.1";
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.currentHp -= max(self.owner.attack, 1);
self.msg = "* %s causes a CRITICAL HIT to %s, leaving the latter with %d hp."%(str(self.owner), str(target), target.currentHp);
return target;
class CharmFoe(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = soldiers.SoldierClass;
self.frequency = "0.05";
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
foes.remove(target);
friends.append(target);
self.msg = "* %s CHARMS %s, who now changes sides!"%(str(self.owner), str(target));
return target;
class Rage(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = soldiers.SoldierClass;
self.frequency = "constant";
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
self.owner.defence /= 1.0 + (self.owner.hp - self.owner.currentHp) / (5 * self.owner.hp);
self.owner.attackSpeed *= 1.0 + (self.owner.hp - self.owner.currentHp ) / (20 * self.owner.hp);
self.msg = "* %s is enraged! Defense is decreased. Attack speed is increased. Now defence is %4.2f and attack speed is %4.2f."%(str(self.owner), self.owner.defence, self.owner.attackSpeed);
return target;
class Exhaust(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = soldiers.SoldierClass;
self.frequency = "0.10";
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.fullness -= 10;
self.msg = "* %s EXHAUSTS %s. Now fullness of the latter is %4.2f."%(str(self.owner), str(target), target.fullness);
return target;
class Poison(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = soldiers.SoldierClass;
self.frequency = "0.20";
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.attack *= 0.9;
target.attackSpeed *= 0.9;
target.defence *= 0.9;
self.msg = "* %s POISONS %s. Now attack, defence and speed of the latter are reduced."%(str(self.owner), str(target));
return target;
class Disease(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = soldiers.SoldierClass;
self.frequency = "0.20";
def applyTo(self, target, friends, foes, traps=[]):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.vulnerabilities += ['physical'];
self.msg = "* %s causes DISEASE to %s. Now the target is vulnerable to physical attacks."%(str(self.owner), str(target));
return target;
from traps import *;
class MapLabyrinth(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = Labyrinth;
self.group = "traps";
self.frequency = "0.90";
def applyTo(self, target, friends, foes, traps):
target = Ability.applyTo(self, target, friends, foes);
if (target == None):
return None;
target.hp = 0;
self.msg = "* %s MAPS %s, rendering it useless."%(str(self.owner), str(target));
return target;
class DisarmTrap(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = ArrowSlit;
self.group = "traps";
self.frequency = "0.90";
def applyTo(self, target, friends, foes, traps):
target = Ability.applyTo(self, target, friends, foes, traps);
if (target == None):
return None;
target.hp = 0;
self.msg = "* %s DISABLES %s, rendering it useless."%(str(self.owner), str(target));
return target;
class BridgeGap(Ability):
def __init__(self, owner):
Ability.__init__(self, owner);
self.targetType = Pit;
self.group = "traps";
self.frequency = "0.90";
def applyTo(self, target, friends, foes, traps):
target = Ability.applyTo(self, target, friends, foes, traps);
if (target == None):
return None;
target.hp = 0;
self.msg = "* %s COVERS %s, rendering it useless."%(str(self.owner), str(target));
return target;
| ggianna/Foragers | abilities.py | Python | apache-2.0 | 9,200 |
# pylint: disable=R0201,E0711
"""This is the __init__ module for the plugins. It contains the base class for
all plugings
"""
import logging
import importlib
_LOG = logging.getLogger(__name__)
class SoCoPlugin(object):
""" The base class for SoCo plugins """
def __init__(self, soco):
cls = self.__class__.__name__
_LOG.info('Initializing SoCo plugin %s', cls)
self.soco = soco
@property
def name(self):
""" human-readable name of the plugin """
raise NotImplemented('Plugins should overwrite the name property')
@classmethod
def from_name(cls, fullname, soco, *args, **kwargs):
""" Instantiate a plugin by its full name """
_LOG.info('Loading plugin %s', fullname)
parts = fullname.split('.')
modname = '.'.join(parts[:-1])
clsname = parts[-1]
mod = importlib.import_module(modname)
cls = getattr(mod, clsname)
_LOG.info('Loaded class %s', cls)
return cls(soco, *args, **kwargs)
| xxdede/SoCo | soco/plugins/__init__.py | Python | mit | 1,031 |
from flask.ext.wtf import Form
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired
class LoginForm(Form):
email = StringField('email', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()])
remember_me = BooleanField('remember_me', default=False)
| russorat/savage-leads | settings/models/loginForm.py | Python | apache-2.0 | 353 |
import json
from zoom.common.types import UpdateType
class TimeEstimateMessage(object):
def __init__(self):
self._message_type = UpdateType.TIMING_UPDATE
self._contents = dict()
@property
def message_type(self):
return self._message_type
@property
def contents(self):
return self._contents
def update(self, item):
"""
:type item: dict
"""
self._contents.update(item)
def combine(self, message):
"""
:type message: TimeEstimateMessage
"""
self._contents.update(message.contents)
def clear(self):
self._contents.clear()
def to_json(self):
_dict = {}
_dict.update({
"update_type": self._message_type,
})
_dict.update(self.contents)
return json.dumps(_dict)
| spottradingllc/zoom | server/zoom/www/messages/timing_estimate.py | Python | gpl-2.0 | 855 |
from os.path import join
from scipy._build_utils import numpy_nodepr_api
def configuration(parent_package='', top_path=None):
import warnings
from numpy.distutils.misc_util import Configuration
from scipy._build_utils.system_info import get_info, BlasNotFoundError
config = Configuration('odr', parent_package, top_path)
libodr_files = ['d_odr.f',
'd_mprec.f',
'dlunoc.f']
blas_info = get_info('blas_opt')
if blas_info:
libodr_files.append('d_lpk.f')
else:
warnings.warn(BlasNotFoundError.__doc__)
libodr_files.append('d_lpkbls.f')
odrpack_src = [join('odrpack', x) for x in libodr_files]
config.add_library('odrpack', sources=odrpack_src)
sources = ['__odrpack.c']
libraries = ['odrpack'] + blas_info.pop('libraries', [])
include_dirs = ['.'] + blas_info.pop('include_dirs', [])
blas_info['define_macros'].extend(numpy_nodepr_api['define_macros'])
config.add_extension('__odrpack',
sources=sources,
libraries=libraries,
include_dirs=include_dirs,
depends=(['odrpack.h'] + odrpack_src),
**blas_info
)
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| aeklant/scipy | scipy/odr/setup.py | Python | bsd-3-clause | 1,348 |
import pytest
import numpy as np
from nimble.sources import IntegerSource
from nimble.composition import ConcatenateSource, VStackSource
def test_concatenate_sources():
nb_elements = 14
ints_a = IntegerSource(stop=nb_elements)
ints_b = IntegerSource(stop=nb_elements)
ints_c = IntegerSource(stop=nb_elements)
s = ConcatenateSource([ints_a, ints_b, ints_c])
assert s.seekable
assert s.cached
assert s.parallel_possible
assert s.size == 3 * nb_elements
assert s.get_data() is None
assert s.advance()
assert s.get_data() == 0
assert s.seek(nb_elements)
assert s.get_data() == 0
assert s.advance()
assert s.get_data() == 1
assert not s.seek(s.size)
assert s.get_data() == 1
while s.advance():
assert s.get_data() == s.position % nb_elements
def test_concatenate_same_source():
nb_elements = 345
source = IntegerSource(stop=nb_elements)
concat = ConcatenateSource([source, source])
assert concat.size == 2*nb_elements
assert concat.seekable
assert concat.cached
assert concat.parallel_possible
assert concat.advance()
assert concat.get_data() == 0
assert concat.advance()
assert concat.get_data() == 1
assert source._get_data_at(42) == 42
assert concat.get_data() == 1
def test_vstack_source():
ints = IntegerSource()
dim_size = 8
stacked = VStackSource(ints, dim_size)
assert stacked.has_fixed_shape()
assert stacked.shape == (8, 1)
assert stacked.advance()
data = stacked.get_data()
assert data.shape == stacked.shape == (8, 1)
np.testing.assert_equal(stacked.get_data(), np.reshape(np.arange(8), data.shape))
assert not stacked.seek(ints.size - (dim_size//2))
if __name__ == '__main__':
pytest.main([__file__])
| risteon/nimble | tests/nimble/test_composition.py | Python | mit | 1,803 |
"""
Stub Controller for testing
"""
def run():
pass
| sprockets/sprockets.cli | sstubs/app.py | Python | bsd-3-clause | 59 |
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='oasis',
version='0.1.3',
description='Optimal Asymptotic Sequential Importance Sampling',
long_description=readme(),
keywords='F-measure active sampling evaluation classification recall precision',
url='http://ngmarchant.github.io/oasis',
author='Neil G. Marchant',
author_email='ngmarchant@gmail.com',
license='MIT',
packages=['oasis'],
install_requires=[
'numpy',
'tables',
'scipy',
'sklearn'
],
include_package_data=True,
zip_safe=False,
classifiers=['Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Programming Language :: Python :: 3',]
)
| ngmarchant/oasis | setup.py | Python | mit | 1,052 |
import numpy as np
import theano
import theano.tensor as T
import lasagne as nn
import data
import load
import nn_plankton
import dihedral
import tmp_dnn
import tta
patch_sizes = [(95, 95), (47, 47)]
augmentation_params = {
'zoom_range': (1 / 1.6, 1.6),
'rotation_range': (0, 360),
'shear_range': (-20, 20),
'translation_range': (-10, 10),
'do_flip': True,
'allow_stretch': 1.3,
}
batch_size = 128 // 4
chunk_size = 32768 // 4
num_chunks_train = 840
momentum = 0.9
learning_rate_schedule = {
0: 0.0015,
700: 0.00015,
800: 0.000015,
}
validate_every = 20
save_every = 20
def estimate_scale(img):
return np.maximum(img.shape[0], img.shape[1]) / 85.0
scale_factors = [estimate_scale, 5.0] # combine size-based rescaling + fixed rescaling
# augmentation_transforms_test = []
# for flip in [True, False]:
# for zoom in [1/1.3, 1/1.2, 1/1.1, 1.0, 1.1, 1.2, 1.3]:
# for rot in np.linspace(0.0, 360.0, 5, endpoint=False):
# tf = data.build_augmentation_transform(zoom=(zoom, zoom), rotation=rot, flip=flip)
# augmentation_transforms_test.append(tf)
augmentation_transforms_test = tta.build_quasirandom_transforms(70, **{
'zoom_range': (1 / 1.4, 1.4),
'rotation_range': (0, 360),
'shear_range': (-10, 10),
'translation_range': (-8, 8),
'do_flip': True,
'allow_stretch': 1.2,
})
data_loader = load.ZmuvMultiscaleDataLoader(scale_factors=scale_factors, num_chunks_train=num_chunks_train,
patch_sizes=patch_sizes, chunk_size=chunk_size, augmentation_params=augmentation_params,
augmentation_transforms_test=augmentation_transforms_test)
# Conv2DLayer = nn.layers.cuda_convnet.Conv2DCCLayer
# MaxPool2DLayer = nn.layers.cuda_convnet.MaxPool2DCCLayer
Conv2DLayer = tmp_dnn.Conv2DDNNLayer
MaxPool2DLayer = tmp_dnn.MaxPool2DDNNLayer
def build_model():
l0_variable = nn.layers.InputLayer((batch_size, 1, patch_sizes[0][0], patch_sizes[0][1]))
l0c = dihedral.CyclicSliceLayer(l0_variable)
l1a = Conv2DLayer(l0c, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l1b = Conv2DLayer(l1a, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l1 = MaxPool2DLayer(l1b, ds=(3, 3), strides=(2, 2))
l1r = dihedral.CyclicConvRollLayer(l1)
l2a = Conv2DLayer(l1r, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l2b = Conv2DLayer(l2a, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l2 = MaxPool2DLayer(l2b, ds=(3, 3), strides=(2, 2))
l2r = dihedral.CyclicConvRollLayer(l2)
l3a = Conv2DLayer(l2r, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l3b = Conv2DLayer(l3a, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l3c = Conv2DLayer(l3b, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l3 = MaxPool2DLayer(l3c, ds=(3, 3), strides=(2, 2))
l3r = dihedral.CyclicConvRollLayer(l3)
l4a = Conv2DLayer(l3r, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l4b = Conv2DLayer(l4a, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l4c = Conv2DLayer(l4b, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l4 = MaxPool2DLayer(l4c, ds=(3, 3), strides=(2, 2))
l4r = dihedral.CyclicConvRollLayer(l4)
l4f = nn.layers.flatten(l4r)
l5 = nn.layers.DenseLayer(nn.layers.dropout(l4f, p=0.5), num_units=256, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l5r = dihedral.CyclicRollLayer(l5)
l6 = nn.layers.DenseLayer(nn.layers.dropout(l5r, p=0.5), num_units=256, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l_variable = dihedral.CyclicPoolLayer(l6, pool_function=nn_plankton.rms)
# fixed scale part
l0_fixed = nn.layers.InputLayer((batch_size, 1, patch_sizes[1][0], patch_sizes[1][1]))
l0c = dihedral.CyclicSliceLayer(l0_fixed)
l1a = Conv2DLayer(l0c, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1))
l1b = Conv2DLayer(l1a, num_filters=8, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1))
l1 = MaxPool2DLayer(l1b, ds=(3, 3), strides=(2, 2))
l1r = dihedral.CyclicConvRollLayer(l1)
l2a = Conv2DLayer(l1r, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1))
l2b = Conv2DLayer(l2a, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1))
l2 = MaxPool2DLayer(l2b, ds=(3, 3), strides=(2, 2))
l2r = dihedral.CyclicConvRollLayer(l2)
l3a = Conv2DLayer(l2r, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1))
l3b = Conv2DLayer(l3a, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1))
l3c = Conv2DLayer(l3b, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1))
l3 = MaxPool2DLayer(l3c, ds=(3, 3), strides=(2, 2))
l3r = dihedral.CyclicConvRollLayer(l3)
l3f = nn.layers.flatten(l3r)
l4 = nn.layers.DenseLayer(nn.layers.dropout(l3f, p=0.5), num_units=128, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1))
l4r = dihedral.CyclicRollLayer(l4)
l5 = nn.layers.DenseLayer(nn.layers.dropout(l4r, p=0.5), num_units=128, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1))
l_fixed = dihedral.CyclicPoolLayer(l5, pool_function=nn_plankton.rms)
# merge the parts
l_merged = nn.layers.concat([l_variable, l_fixed])
l7 = nn.layers.DenseLayer(nn.layers.dropout(l_merged, p=0.5), num_units=data.num_classes, nonlinearity=T.nnet.softmax, W=nn_plankton.Orthogonal(1.0))
return [l0_variable, l0_fixed], l7
| yejingxin/kaggle-ndsb | configurations/convroll4_doublescale_fs5.py | Python | mit | 6,898 |
import itertools
N = int(input())
S = input().split(' ')
K = int(input())
S = list(S)
C = list(itertools.combinations(S, K))
T = len(C)
a = list(filter(lambda x: 'a' in x, C))
print(len(a)/T)
| kakaba2009/MachineLearning | python/src/algorithm/coding/itertools/probability.py | Python | apache-2.0 | 197 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 30, transform = "RelativeDifference", sigma = 0.0, exog_count = 0, ar_order = 0); | antoinecarme/pyaf | tests/artificial/transf_RelativeDifference/trend_Lag1Trend/cycle_30/ar_/test_artificial_1024_RelativeDifference_Lag1Trend_30__0.py | Python | bsd-3-clause | 274 |
# -*- test-case-name: twisted.test.test_newcred-*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
from zope.interface import implements, Interface
import hmac, time, random
from twisted.python.hashlib import md5
from twisted.python.randbytes import secureRandom
from twisted.cred._digest import calcResponse, calcHA1, calcHA2
from twisted.cred import error
class ICredentials(Interface):
"""
I check credentials.
Implementors _must_ specify which sub-interfaces of ICredentials
to which it conforms, using zope.interface.implements().
"""
class IUsernameDigestHash(ICredentials):
"""
This credential is used when a CredentialChecker has access to the hash
of the username:realm:password as in an Apache .htdigest file.
"""
def checkHash(digestHash):
"""
@param digestHash: The hashed username:realm:password to check against.
@return: C{True} if the credentials represented by this object match
the given hash, C{False} if they do not, or a L{Deferred} which
will be called back with one of these values.
"""
class IUsernameHashedPassword(ICredentials):
"""
I encapsulate a username and a hashed password.
This credential is used when a hashed password is received from the
party requesting authentication. CredentialCheckers which check this
kind of credential must store the passwords in plaintext (or as
password-equivalent hashes) form so that they can be hashed in a manner
appropriate for the particular credentials class.
@type username: C{str}
@ivar username: The username associated with these credentials.
"""
def checkPassword(password):
"""Validate these credentials against the correct password.
@param password: The correct, plaintext password against which to
check.
@return: a deferred which becomes, or a boolean indicating if the
password matches.
"""
class IUsernamePassword(ICredentials):
"""
I encapsulate a username and a plaintext password.
This encapsulates the case where the password received over the network
has been hashed with the identity function (That is, not at all). The
CredentialsChecker may store the password in whatever format it desires,
it need only transform the stored password in a similar way before
performing the comparison.
@type username: C{str}
@ivar username: The username associated with these credentials.
@type password: C{str}
@ivar password: The password associated with these credentials.
"""
def checkPassword(password):
"""Validate these credentials against the correct password.
@param password: The correct, plaintext password against which to
check.
@return: a deferred which becomes, or a boolean indicating if the
password matches.
"""
class IAnonymous(ICredentials):
"""
I am an explicitly anonymous request for access.
"""
class DigestedCredentials(object):
"""
Yet Another Simple HTTP Digest authentication scheme.
"""
implements(IUsernameHashedPassword, IUsernameDigestHash)
def __init__(self, username, method, realm, fields):
self.username = username
self.method = method
self.realm = realm
self.fields = fields
def checkPassword(self, password):
"""
Verify that the credentials represented by this object agree with the
given plaintext C{password} by hashing C{password} in the same way the
response hash represented by this object was generated and comparing
the results.
"""
response = self.fields.get('response')
uri = self.fields.get('uri')
nonce = self.fields.get('nonce')
cnonce = self.fields.get('cnonce')
nc = self.fields.get('nc')
algo = self.fields.get('algorithm', 'md5').lower()
qop = self.fields.get('qop', 'auth')
expected = calcResponse(
calcHA1(algo, self.username, self.realm, password, nonce, cnonce),
calcHA2(algo, self.method, uri, qop, None),
algo, nonce, nc, cnonce, qop)
return expected == response
def checkHash(self, digestHash):
"""
Verify that the credentials represented by this object agree with the
credentials represented by the I{H(A1)} given in C{digestHash}.
@param digestHash: A precomputed H(A1) value based on the username,
realm, and password associate with this credentials object.
"""
response = self.fields.get('response')
uri = self.fields.get('uri')
nonce = self.fields.get('nonce')
cnonce = self.fields.get('cnonce')
nc = self.fields.get('nc')
algo = self.fields.get('algorithm', 'md5').lower()
qop = self.fields.get('qop', 'auth')
expected = calcResponse(
calcHA1(algo, None, None, None, nonce, cnonce, preHA1=digestHash),
calcHA2(algo, self.method, uri, qop, None),
algo, nonce, nc, cnonce, qop)
return expected == response
class DigestCredentialFactory(object):
"""
Support for RFC2617 HTTP Digest Authentication
@cvar CHALLENGE_LIFETIME_SECS: The number of seconds for which an
opaque should be valid.
@type privateKey: C{str}
@ivar privateKey: A random string used for generating the secure opaque.
@type algorithm: C{str}
@param algorithm: Case insensitive string specifying the hash algorithm to
use. Must be either C{'md5'} or C{'sha'}. C{'md5-sess'} is B{not}
supported.
@type authenticationRealm: C{str}
@param authenticationRealm: case sensitive string that specifies the realm
portion of the challenge
"""
CHALLENGE_LIFETIME_SECS = 15 * 60 # 15 minutes
scheme = "digest"
def __init__(self, algorithm, authenticationRealm):
self.algorithm = algorithm
self.authenticationRealm = authenticationRealm
self.privateKey = secureRandom(12)
def getChallenge(self, address):
"""
Generate the challenge for use in the WWW-Authenticate header.
@param address: The client address to which this challenge is being
sent.
@return: The C{dict} that can be used to generate a WWW-Authenticate
header.
"""
c = self._generateNonce()
o = self._generateOpaque(c, address)
return {'nonce': c,
'opaque': o,
'qop': 'auth',
'algorithm': self.algorithm,
'realm': self.authenticationRealm}
def _generateNonce(self):
"""
Create a random value suitable for use as the nonce parameter of a
WWW-Authenticate challenge.
@rtype: C{str}
"""
return secureRandom(12).encode('hex')
def _getTime(self):
"""
Parameterize the time based seed used in C{_generateOpaque}
so we can deterministically unittest it's behavior.
"""
return time.time()
def _generateOpaque(self, nonce, clientip):
"""
Generate an opaque to be returned to the client. This is a unique
string that can be returned to us and verified.
"""
# Now, what we do is encode the nonce, client ip and a timestamp in the
# opaque value with a suitable digest.
now = str(int(self._getTime()))
if clientip is None:
clientip = ''
key = "%s,%s,%s" % (nonce, clientip, now)
digest = md5(key + self.privateKey).hexdigest()
ekey = key.encode('base64')
return "%s-%s" % (digest, ekey.strip('\n'))
def _verifyOpaque(self, opaque, nonce, clientip):
"""
Given the opaque and nonce from the request, as well as the client IP
that made the request, verify that the opaque was generated by us.
And that it's not too old.
@param opaque: The opaque value from the Digest response
@param nonce: The nonce value from the Digest response
@param clientip: The remote IP address of the client making the request
or C{None} if the request was submitted over a channel where this
does not make sense.
@return: C{True} if the opaque was successfully verified.
@raise error.LoginFailed: if C{opaque} could not be parsed or
contained the wrong values.
"""
# First split the digest from the key
opaqueParts = opaque.split('-')
if len(opaqueParts) != 2:
raise error.LoginFailed('Invalid response, invalid opaque value')
if clientip is None:
clientip = ''
# Verify the key
key = opaqueParts[1].decode('base64')
keyParts = key.split(',')
if len(keyParts) != 3:
raise error.LoginFailed('Invalid response, invalid opaque value')
if keyParts[0] != nonce:
raise error.LoginFailed(
'Invalid response, incompatible opaque/nonce values')
if keyParts[1] != clientip:
raise error.LoginFailed(
'Invalid response, incompatible opaque/client values')
try:
when = int(keyParts[2])
except ValueError:
raise error.LoginFailed(
'Invalid response, invalid opaque/time values')
if (int(self._getTime()) - when >
DigestCredentialFactory.CHALLENGE_LIFETIME_SECS):
raise error.LoginFailed(
'Invalid response, incompatible opaque/nonce too old')
# Verify the digest
digest = md5(key + self.privateKey).hexdigest()
if digest != opaqueParts[0]:
raise error.LoginFailed('Invalid response, invalid opaque value')
return True
def decode(self, response, method, host):
"""
Decode the given response and attempt to generate a
L{DigestedCredentials} from it.
@type response: C{str}
@param response: A string of comma seperated key=value pairs
@type method: C{str}
@param method: The action requested to which this response is addressed
(GET, POST, INVITE, OPTIONS, etc).
@type host: C{str}
@param host: The address the request was sent from.
@raise error.LoginFailed: If the response does not contain a username,
a nonce, an opaque, or if the opaque is invalid.
@return: L{DigestedCredentials}
"""
def unq(s):
if s[0] == s[-1] == '"':
return s[1:-1]
return s
response = ' '.join(response.splitlines())
parts = response.split(',')
auth = {}
for (k, v) in [p.split('=', 1) for p in parts]:
auth[k.strip()] = unq(v.strip())
username = auth.get('username')
if not username:
raise error.LoginFailed('Invalid response, no username given.')
if 'opaque' not in auth:
raise error.LoginFailed('Invalid response, no opaque given.')
if 'nonce' not in auth:
raise error.LoginFailed('Invalid response, no nonce given.')
# Now verify the nonce/opaque values for this client
if self._verifyOpaque(auth.get('opaque'), auth.get('nonce'), host):
return DigestedCredentials(username,
method,
self.authenticationRealm,
auth)
class CramMD5Credentials:
implements(IUsernameHashedPassword)
challenge = ''
response = ''
def __init__(self, host=None):
self.host = host
def getChallenge(self):
if self.challenge:
return self.challenge
# The data encoded in the first ready response contains an
# presumptively arbitrary string of random digits, a timestamp, and
# the fully-qualified primary host name of the server. The syntax of
# the unencoded form must correspond to that of an RFC 822 'msg-id'
# [RFC822] as described in [POP3].
# -- RFC 2195
r = random.randrange(0x7fffffff)
t = time.time()
self.challenge = '<%d.%d@%s>' % (r, t, self.host)
return self.challenge
def setResponse(self, response):
self.username, self.response = response.split(None, 1)
def moreChallenges(self):
return False
def checkPassword(self, password):
verify = hmac.HMAC(password, self.challenge).hexdigest()
return verify == self.response
class UsernameHashedPassword:
implements(IUsernameHashedPassword)
def __init__(self, username, hashed):
self.username = username
self.hashed = hashed
def checkPassword(self, password):
return self.hashed == password
class UsernamePassword:
implements(IUsernamePassword)
def __init__(self, username, password):
self.username = username
self.password = password
def checkPassword(self, password):
return self.password == password
class Anonymous:
implements(IAnonymous)
class ISSHPrivateKey(ICredentials):
"""
L{ISSHPrivateKey} credentials encapsulate an SSH public key to be checked
against a user's private key.
@ivar username: The username associated with these credentials.
@type username: C{str}
@ivar algName: The algorithm name for the blob.
@type algName: C{str}
@ivar blob: The public key blob as sent by the client.
@type blob: C{str}
@ivar sigData: The data the signature was made from.
@type sigData: C{str}
@ivar signature: The signed data. This is checked to verify that the user
owns the private key.
@type signature: C{str} or C{NoneType}
"""
class SSHPrivateKey:
implements(ISSHPrivateKey)
def __init__(self, username, algName, blob, sigData, signature):
self.username = username
self.algName = algName
self.blob = blob
self.sigData = sigData
self.signature = signature
class IPluggableAuthenticationModules(ICredentials):
"""I encapsulate the authentication of a user via PAM (Pluggable
Authentication Modules. I use PyPAM (available from
http://www.tummy.com/Software/PyPam/index.html).
@ivar username: The username for the user being logged in.
@ivar pamConversion: A function that is called with a list of tuples
(message, messageType). See the PAM documentation
for the meaning of messageType. The function
returns a Deferred which will fire with a list
of (response, 0), one for each message. The 0 is
currently unused, but is required by the PAM library.
"""
class PluggableAuthenticationModules:
implements(IPluggableAuthenticationModules)
def __init__(self, username, pamConversion):
self.username = username
self.pamConversion = pamConversion
| Donkyhotay/MoonPy | twisted/cred/credentials.py | Python | gpl-3.0 | 15,066 |
from amon.apps.core.views import *
from amon.apps.servers.models import server_model
from amon.apps.alerts.models import alerts_model
from amon.apps.alerts.forms import HealthCheckAlertForm, EditHealthCheckAlertForm
from amon.apps.tags.models import tags_model
from amon.apps.notifications.models import notifications_model
@login_required
def add_alert(request):
all_servers = server_model.get_all()
tags = tags_model.get_all()
notifications = notifications_model.get_all_formated()
if request.method == 'POST':
form = HealthCheckAlertForm(request.POST, all_servers=all_servers)
if form.is_valid():
data = form.cleaned_data
form_data = {
"command": request.POST.get('command'),
"param": request.POST.get('param'),
"tags": request.POST.getlist('tags'),
"notifications": request.POST.getlist('notifications'),
"rule_type": "health_check",
}
data.update(form_data)
alerts_model.save(data)
return redirect(reverse('alerts'))
else:
form = HealthCheckAlertForm(all_servers=all_servers)
return render(request, 'alerts/add_healthcheck.html', {
"form": form,
'tags': tags,
'notifications': notifications,
"all_servers": all_servers
})
@login_required
def edit_alert(request, alert_id):
all_servers = server_model.get_all(account_id=request.account_id)
alert = alerts_model.get_by_id(alert_id, recipients_dict=False)
tags = tags_model.get_all()
server = alert.get('server', None) # If the alert is for specific server, it could be global
selected_command = " ".join([alert.get("command", ""), alert.get('params', "")])
notifications = notifications_model.get_all_formated()
if request.method == 'POST':
form = EditHealthCheckAlertForm(request.POST, all_servers=all_servers)
if form.is_valid():
data = form.cleaned_data
form_data = {
"tags": request.POST.getlist('tags', None),
"status": data.get('status'),
"period": data.get('period'),
"server": server,
"notifications": request.POST.getlist('notifications')
}
alerts_model.update(form_data, alert_id)
return redirect(reverse('alerts'))
else:
form = EditHealthCheckAlertForm(
all_servers=all_servers,
initial={
'period': alert['period'],
'server':server,
"status": alert['status'],
})
return render(request, 'alerts/edit_healthcheck.html', {
"server": server,
'tags': tags,
"alert": alert,
"form": form,
"selected_command": selected_command,
"notifications": notifications,
})
| martinrusev/amonone | amon/apps/alerts/views/healthchecks.py | Python | mit | 2,930 |
# -*- coding: utf-8 -*-
# Copyright: Damien Elmes <anki@ichi2.net>
# License: GNU GPL, version 3 or later; http://www.gnu.org/copyleft/gpl.html
"""\
Hooks - hook management and tools for extending Anki
==============================================================================
To find available hooks, grep for runHook in the source code.
Instrumenting allows you to modify functions that don't have hooks available.
If you call wrap() with pos='around', the original function will not be called
automatically but can be called with _old().
"""
# Hooks
##############################################################################
_hooks = {}
def runHook(hook, *args):
"Run all functions on hook."
hook = _hooks.get(hook, None)
if hook:
for func in hook:
func(*args)
def runFilter(hook, arg, *args):
hook = _hooks.get(hook, None)
if hook:
for func in hook:
arg = func(arg, *args)
return arg
def addHook(hook, func):
"Add a function to hook. Ignore if already on hook."
if not _hooks.get(hook, None):
_hooks[hook] = []
if func not in _hooks[hook]:
_hooks[hook].append(func)
def removeHook(hook, func):
"Remove a function if is on hook."
hook = _hooks.get(hook, [])
if func in hook:
hook.remove(func)
# Instrumenting
##############################################################################
def wrap(old, new, pos="after"):
"Override an existing function."
def repl(*args, **kwargs):
if pos == "after":
old(*args, **kwargs)
return new(*args, **kwargs)
elif pos == "before":
new(*args, **kwargs)
return old(*args, **kwargs)
else:
return new(_old=old, *args, **kwargs)
return repl
| jmazon/libanki | anki/hooks.py | Python | gpl-3.0 | 1,806 |
from django.contrib import admin
from .models import ItemSet, Item
admin.site.register(Item)
admin.site.register(ItemSet)
| jammons/prioritize | prioritize/admin.py | Python | bsd-3-clause | 123 |
#!/usr/bin/python
# Imports
import os
# Run pi-temp.py multiple times in the background
os.system('python pi-temp.py &')
os.system('python pi-temp.py &')
os.system('python pi-temp.py &')
os.system('python pi-temp.py &')
# write `ps -ef` to see all the scripts running
# write `kill -9 <id>` to stop a background script
| Belax8/my-pi-projects | Tests/background-test.py | Python | mit | 326 |
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mock import patch
from oslo_serialization import jsonutils
from sqlalchemy.sql import not_
from nailgun import objects
from nailgun import consts
from nailgun.db.sqlalchemy.models import Cluster
from nailgun.db.sqlalchemy.models import NetworkGroup
from nailgun.db.sqlalchemy.models import Release
from nailgun.network.nova_network import NovaNetworkManager
from nailgun.test.base import BaseIntegrationTest
from nailgun.utils import reverse
class TestHandlers(BaseIntegrationTest):
def _get_cluster_networks(self, cluster_id):
nets = self.app.get(
reverse('NovaNetworkConfigurationHandler',
{"cluster_id": cluster_id}),
headers=self.default_headers,
).json_body["networks"]
return nets
def test_cluster_list_empty(self):
resp = self.app.get(
reverse('ClusterCollectionHandler'),
headers=self.default_headers
)
self.assertEqual(200, resp.status_code)
self.assertEqual([], resp.json_body)
def test_cluster_create(self):
release_id = self.env.create_release(api=False).id
resp = self.app.post(
reverse('ClusterCollectionHandler'),
jsonutils.dumps({
'name': 'cluster-name',
'release': release_id,
}),
headers=self.default_headers
)
self.assertEqual(201, resp.status_code)
def test_cluster_create_no_ip_addresses(self):
"""Two clusters having same networks updated to use full CIDR is ok"""
cluster = self.env.create_cluster(
api=True,
net_provider=consts.CLUSTER_NET_PROVIDERS.nova_network)
cluster_db = self.db.query(Cluster).get(cluster["id"])
cluster2 = self.env.create_cluster(
api=True,
net_provider=consts.CLUSTER_NET_PROVIDERS.nova_network,
release_id=cluster_db.release.id)
cluster2_db = self.db.query(Cluster).get(cluster2["id"])
for clstr in (cluster_db, cluster2_db):
management_net = self.db.query(NetworkGroup).filter_by(
name="management",
group_id=objects.Cluster.get_default_group(clstr).id
).first()
NovaNetworkManager.update(
clstr,
{
"networks": [
{
"name": "management",
"ip_ranges": [
["192.168.0.2", "192.168.255.254"]
],
"id": management_net.id,
"cluster_id": clstr.id,
"vlan_start": 101,
"cidr": "192.168.0.0/16",
"gateway": "192.168.0.1"
}
]
}
)
cluster1_nets = self._get_cluster_networks(cluster["id"])
cluster2_nets = self._get_cluster_networks(cluster2["id"])
for net1, net2 in zip(cluster1_nets, cluster2_nets):
for f in ('group_id', 'id'):
del net1[f]
del net2[f]
cluster1_nets = sorted(cluster1_nets, key=lambda n: n['name'])
cluster2_nets = sorted(cluster2_nets, key=lambda n: n['name'])
self.assertEquals(cluster1_nets, cluster2_nets)
def test_cluster_creation_same_networks(self):
cluster1_id = self.env.create_cluster(
api=True,
net_provider=consts.CLUSTER_NET_PROVIDERS.nova_network)["id"]
cluster2_id = self.env.create_cluster(
api=True,
net_provider=consts.CLUSTER_NET_PROVIDERS.nova_network)["id"]
cluster1_nets = self._get_cluster_networks(cluster1_id)
cluster2_nets = self._get_cluster_networks(cluster2_id)
for net1, net2 in zip(cluster1_nets, cluster2_nets):
for f in ('group_id', 'id'):
del net1[f]
del net2[f]
cluster1_nets = sorted(cluster1_nets, key=lambda n: n['name'])
cluster2_nets = sorted(cluster2_nets, key=lambda n: n['name'])
self.assertEqual(cluster1_nets, cluster2_nets)
def test_if_cluster_creates_correct_networks(self):
release = Release()
release.version = "1111-6.0"
release.name = u"release_name_" + str(release.version)
release.description = u"release_desc" + str(release.version)
release.operating_system = consts.RELEASE_OS.ubuntu
release.state = consts.RELEASE_STATES.available
release.networks_metadata = self.env.get_default_networks_metadata()
release.attributes_metadata = {
"editable": {
"keystone": {
"admin_tenant": "admin"
}
},
"generated": {
"mysql": {
"root_password": ""
}
}
}
release.vmware_attributes_metadata = {}
self.db.add(release)
self.db.commit()
resp = self.app.post(
reverse('ClusterCollectionHandler'),
jsonutils.dumps({
'name': 'cluster-name',
'release': release.id,
'net_provider': consts.CLUSTER_NET_PROVIDERS.nova_network,
}),
headers=self.default_headers
)
self.assertEqual(201, resp.status_code)
nets = self.db.query(NetworkGroup).filter(
not_(NetworkGroup.name == "fuelweb_admin")
).all()
obtained = []
for net in nets:
obtained.append({
'release': net.release,
'name': net.name,
'vlan_id': net.vlan_start,
'cidr': net.cidr,
'gateway': net.gateway
})
expected = [
{
'release': release.id,
'name': u'public',
'vlan_id': None,
'cidr': '172.16.0.0/24',
'gateway': '172.16.0.1'
},
{
'release': release.id,
'name': u'fixed',
'vlan_id': None,
'cidr': None,
'gateway': None
},
{
'release': release.id,
'name': u'storage',
'vlan_id': 102,
'cidr': '192.168.1.0/24',
'gateway': None
},
{
'release': release.id,
'name': u'management',
'vlan_id': 101,
'cidr': '192.168.0.0/24',
'gateway': None
}
]
self.assertItemsEqual(expected, obtained)
@patch('nailgun.rpc.cast')
def test_verify_networks(self, mocked_rpc):
cluster = self.env.create_cluster(
api=True,
net_provider=consts.CLUSTER_NET_PROVIDERS.nova_network)
nets = self.env.nova_networks_get(cluster['id']).json_body
resp = self.env.nova_networks_put(cluster['id'], nets)
self.assertEqual(200, resp.status_code)
| huntxu/fuel-web | nailgun/nailgun/test/integration/test_cluster_collection_handlers.py | Python | apache-2.0 | 7,815 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Cloudscaling Group, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The MatchMaker classes should accept a Topic or Fanout exchange key and
return keys for direct exchanges, per (approximate) AMQP parlance.
"""
from oslo.config import cfg
from openstack.common import importutils
from openstack.common import log as logging
from openstack.common.rpc import matchmaker as mm_common
redis = importutils.try_import('redis')
matchmaker_redis_opts = [
cfg.StrOpt('host',
default='127.0.0.1',
help='Host to locate redis'),
cfg.IntOpt('port',
default=6379,
help='Use this port to connect to redis host.'),
cfg.StrOpt('password',
default=None,
help='Password for Redis server. (optional)'),
]
CONF = cfg.CONF
opt_group = cfg.OptGroup(name='matchmaker_redis',
title='Options for Redis-based MatchMaker')
CONF.register_group(opt_group)
CONF.register_opts(matchmaker_redis_opts, opt_group)
LOG = logging.getLogger(__name__)
class RedisExchange(mm_common.Exchange):
def __init__(self, matchmaker):
self.matchmaker = matchmaker
self.redis = matchmaker.redis
super(RedisExchange, self).__init__()
class RedisTopicExchange(RedisExchange):
"""Exchange where all topic keys are split, sending to second half.
i.e. "compute.host" sends a message to "compute" running on "host"
"""
def run(self, topic):
while True:
member_name = self.redis.srandmember(topic)
if not member_name:
# If this happens, there are no
# longer any members.
break
if not self.matchmaker.is_alive(topic, member_name):
continue
host = member_name.split('.', 1)[1]
return [(member_name, host)]
return []
class RedisFanoutExchange(RedisExchange):
"""Return a list of all hosts."""
def run(self, topic):
topic = topic.split('~', 1)[1]
hosts = self.redis.smembers(topic)
good_hosts = filter(
lambda host: self.matchmaker.is_alive(topic, host), hosts)
return [(x, x.split('.', 1)[1]) for x in good_hosts]
class MatchMakerRedis(mm_common.HeartbeatMatchMakerBase):
"""MatchMaker registering and looking-up hosts with a Redis server."""
def __init__(self):
super(MatchMakerRedis, self).__init__()
if not redis:
raise ImportError("Failed to import module redis.")
self.redis = redis.StrictRedis(
host=CONF.matchmaker_redis.host,
port=CONF.matchmaker_redis.port,
password=CONF.matchmaker_redis.password)
self.add_binding(mm_common.FanoutBinding(), RedisFanoutExchange(self))
self.add_binding(mm_common.DirectBinding(), mm_common.DirectExchange())
self.add_binding(mm_common.TopicBinding(), RedisTopicExchange(self))
def ack_alive(self, key, host):
topic = "%s.%s" % (key, host)
if not self.redis.expire(topic, CONF.matchmaker_heartbeat_ttl):
# If we could not update the expiration, the key
# might have been pruned. Re-register, creating a new
# key in Redis.
self.register(self.topic_host[host], host)
def is_alive(self, topic, host):
if self.redis.ttl(host) == -1:
self.expire(topic, host)
return False
return True
def expire(self, topic, host):
with self.redis.pipeline() as pipe:
pipe.multi()
pipe.delete(host)
pipe.srem(topic, host)
pipe.execute()
def backend_register(self, key, key_host):
with self.redis.pipeline() as pipe:
pipe.multi()
pipe.sadd(key, key_host)
# No value is needed, we just
# care if it exists. Sets aren't viable
# because only keys can expire.
pipe.set(key_host, '')
pipe.execute()
def backend_unregister(self, key, key_host):
with self.redis.pipeline() as pipe:
pipe.multi()
pipe.srem(key, key_host)
pipe.delete(key_host)
pipe.execute()
| JioCloud/oslo-incubator | openstack/common/rpc/matchmaker_redis.py | Python | apache-2.0 | 4,835 |
from pySDC import Level as levclass
from pySDC import Stats as statclass
from pySDC import Hooks as hookclass
import copy as cp
import sys
class step():
"""
Step class, referencing most of the structure needed for the time-stepping
This class bundles multiple levels and the corresponding transfer operators and is used by the methods
(e.g. SDC and MLSDC). Status variables like the current time are hidden via properties and setters methods.
Attributes:
__t: current time (property time)
__dt: current step size (property dt)
__k: current iteration (property iter)
__transfer_dict: data structure to couple levels and transfer operators
levels: list of levels
params: parameters given by the user
__slots__: list of attributes to avoid accidential creation of new class attributes
"""
__slots__ = ('params','levels','__transfer_dict','status','__prev')
def __init__(self, params):
"""
Initialization routine
Args:
params: parameters given by the user, will be added as attributes
"""
# short helper class to add params as attributes
class pars():
def __init__(self,params):
defaults = dict()
defaults['maxiter'] = 20
defaults['fine_comm'] = True
for k,v in defaults.items():
setattr(self,k,v)
for k,v in params.items():
setattr(self,k,v)
pass
# short helper class to bundle all status variables
class status():
__slots__ = ('iter','stage','slot','first','last','pred_cnt','done','time','dt','step')
def __init__(self):
self.iter = None
self.stage = None
self.slot = None
self.first = None
self.last = None
self.pred_cnt = None
self.done = None
self.time = None
self.dt = None
self.step = None
# set params and status
self.params = pars(params)
self.status = status()
# empty attributes
self.__transfer_dict = {}
self.levels = []
self.__prev = None
def generate_hierarchy(self,descr):
"""
Routine to generate the level hierarchy for a single step
This makes the explicit generation of levels in the frontend obsolete and hides a few dirty hacks here and
there.
Args:
descr: dictionary containing the description of the levels as list per key
"""
# assert the existence of all the keys we need to set up at least on level
assert 'problem_class' in descr
assert 'problem_params' in descr
assert 'dtype_u' in descr
assert 'dtype_f' in descr
assert 'collocation_class' in descr
assert 'num_nodes' in descr
assert 'sweeper_class' in descr
assert 'level_params' in descr
# convert problem-dependent parameters consisting of dictionary of lists to a list of dictionaries with only a
# single entry per key, one dict per level
pparams_list = self.__dict_to_list(descr['problem_params'])
# put this newly generated list into the description dictionary (copy to avoid changing the original one)
descr_new = cp.deepcopy(descr)
descr_new['problem_params'] = pparams_list
# generate list of dictionaries out of the description
descr_list = self.__dict_to_list(descr_new)
# sanity check: is there a transfer class? is there one even if only a single level is specified?
if len(descr_list) > 1:
assert 'transfer_class' in descr_new
assert 'transfer_params' in descr_new
elif 'transfer_class' in descr_new:
print('WARNING: you have specified transfer classes, but only a single level...')
# generate levels, register and connect if needed
for l in range(len(descr_list)):
# check if we have a hook on this list. if not, use default class.
if 'hook_class' in descr_list[l]:
hook = descr_list[l]['hook_class']
else:
hook = hookclass.hooks
L = levclass.level(problem_class = descr_list[l]['problem_class'],
problem_params = descr_list[l]['problem_params'],
dtype_u = descr_list[l]['dtype_u'],
dtype_f = descr_list[l]['dtype_f'],
collocation_class = descr_list[l]['collocation_class'],
num_nodes = descr_list[l]['num_nodes'],
sweeper_class = descr_list[l]['sweeper_class'],
level_params = descr_list[l]['level_params'],
hook_class = hook,
id = 'L'+str(l))
self.register_level(L)
if l > 0:
self.connect_levels(transfer_class = descr_list[l]['transfer_class'],
transfer_params = descr_list[l]['transfer_params'],
fine_level = self.levels[l-1],
coarse_level = self.levels[l])
@staticmethod
def __dict_to_list(dict):
"""
Straightforward helper function to convert dictionary of list to list of dictionaries
Args:
dict: dictionary of lists
Returns:
list of dictionaries
"""
max_val = 1
for k,v in dict.items():
if type(v) is list:
if len(v) > 1 and (max_val > 1 and len(v) is not max_val):
#FIXME: get a real error here
sys.exit('All lists in cparams need to be of length 1 or %i.. key %s has this list: %s' %(max_val,k,v))
max_val = max(max_val,len(v))
ld = [{} for l in range(max_val)]
for d in range(len(ld)):
for k,v in dict.items():
if type(v) is not list:
ld[d][k] = v
else:
if len(v) == 1:
ld[d][k] = v[0]
else:
ld[d][k] = v[d]
return ld
def register_level(self,L):
"""
Routine to register levels
This routine will append levels to the level list of the step instance and link the step to the newly
registered level (Level 0 will be considered as the finest level). It will also allocate the tau correction,
if this level is not the finest one.
Args:
L: level to be registered
"""
assert isinstance(L,levclass.level)
# add level to level list
self.levels.append(L)
# pass this step to the registered level
self.levels[-1]._level__set_step(self)
# if this is not the finest level, allocate tau correction
if len(self.levels) > 1:
L._level__add_tau()
def connect_levels(self, transfer_class, transfer_params, fine_level, coarse_level):
"""
Routine to couple levels with transfer operators
Args:
transfer_class: the class which can transfer between the two levels
transfer_params: parameters for the transfer class
fine_level: the fine level
coarse_level: the coarse level
"""
# create new instance of the specific transfer class
T = transfer_class(fine_level,coarse_level)
# use transfer dictionary twice to set restrict and prologn operator
self.__transfer_dict[tuple([fine_level,coarse_level])] = T.restrict
assert 'finter' in transfer_params
if transfer_params['finter']:
self.__transfer_dict[tuple([coarse_level,fine_level])] = T.prolong_f
else:
self.__transfer_dict[tuple([coarse_level,fine_level])] = T.prolong
def transfer(self,source,target):
"""
Wrapper routine to ease the call of the transfer functions
This function can be called in the multilevel stepper (e.g. MLSDC), passing a source and a target level.
Using the transfer dictionary, the calling stepper does not need to specify whether to use restrict of prolong.
Args:
source: source level
target: target level
"""
self.__transfer_dict[tuple([source,target])]()
def reset_step(self):
"""
Routine so clean-up step structure and the corresp. levels for further uses
"""
# reset all levels
for l in self.levels:
l.reset_level()
def init_step(self,u0):
"""
Initialization routine for a new step.
This routine uses initial values u0 to set up the u[0] values at the finest level
Args:
u0: initial values
"""
assert len(self.levels) >=1
assert len(self.levels[0].u) >=1
# pass u0 to u[0] on the finest level 0
P = self.levels[0].prob
self.levels[0].u[0] = P.dtype_u(u0)
@property
def prev(self):
"""
Getter for previous step
Returns:
prev
"""
return self.__prev
@prev.setter
def prev(self,p):
"""
Setter for previous step
Args:
p: new previous step
"""
assert type(p) is type(self)
self.__prev = p | torbjoernk/pySDC | pySDC/Step.py | Python | bsd-2-clause | 9,751 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Aneesh Dogra <aneesh@activitycentral.com>
"""Fabric settings file."""
SETTINGS = {}
#
# SSH connection for Fabric
#
#: List of hosts to work on
SETTINGS['hosts'] = ['localhost']
#: Username to log in in the remote machine
SETTINGS['user'] = 'root'
#
# Database
#
# DB Engine
# Replace 'mysql' with 'postgresql_psycopg2', 'sqlite3' or 'oracle'.
SETTINGS['db_engine'] = "django.db.backends.mysql"
# DB Name
# db_name is actually the path of db file when the db_engine is set to sqlite3
SETTINGS['db_name'] = 'pathagar'
# DB user will be used for creating the database
SETTINGS['db_user'] = 'root'
SETTINGS['db_password'] = ''
SETTINGS['db_password_opt'] = '-p'
#
# Project
#
#: A meaningful name for your Pootle installation
SETTINGS['project_name'] = 'pathagar'
#: This URL will be used in the VirtualHost section
SETTINGS['project_url'] = 'localhost'
#: Change the first path part if your Apache websites are stored somewhere else
SETTINGS['project_path'] = '/var/www/sites/%s' % SETTINGS['project_name']
#
# The rest of the settings probably won't need any changes
#
SETTINGS['project_repo_path'] = '%s/src/pathagar' % SETTINGS['project_path']
SETTINGS['project_repo'] = 'https://github.com/PathagarBooks/pathagar.git'
SETTINGS['project_settings_path'] = '%s/settings.py' % \
SETTINGS['project_repo_path']
#
# Secret key
#
from base64 import b64encode
from os import urandom
SETTINGS['secret_key'] = b64encode(urandom(50))
#
# Virtualenv
#
#: Python version that will be used in the virtualenv
SETTINGS['python'] = 'python2.7'
SETTINGS['env_path'] = '%s/env' % SETTINGS['project_path']
#
# Apache + VirtualHost + WSGI
#
#: The group your web server is running on
SETTINGS['server_group'] = 'apache'
SETTINGS['vhost_file'] = '/etc/httpd/conf/httpd.conf'
SETTINGS['wsgi_file'] = '%s/wsgi.py' % SETTINGS['project_repo_path']
# Check http://httpd.apache.org/docs/2.4/logs.html
SETTINGS['access_log_format'] = "%h %l %u %t '%r' %>s %b"
| aristippe/pathagar | deploy/fedora/fabric.py | Python | gpl-2.0 | 2,055 |
import commands
import httplib
import json
import logging
import os
import socket
from django.conf import settings
from django.db import models
from cloud.models.virtual_interface import VirtualInterface
from cloud.models.virtual_machine import VirtualMachine
from cloud.models.virtual_router import VirtualRouter
from cloud.models.base_model import BaseModel
# Get an instance of a logger
logger = logging.getLogger(__name__)
LINK_STATES = (
(u'created', u'Created'),
(u'waiting', u'Waiting'),
(u'establish', u'Established'),
(u'inactive', u'Inactive'),
(u'failed', u'Failed'),
)
class VirtualLink(BaseModel):
belongs_to_slice = models.ForeignKey(
'Slice', # Import as string to avoid circular import problems
blank=True,
null=True
)
if_start = models.ForeignKey(
VirtualInterface,
verbose_name="Link start",
related_name='virtuallink_set_start'
)
if_end = models.ForeignKey(
VirtualInterface,
verbose_name="Link end",
related_name='virtuallink_set_end'
)
state = models.CharField(max_length=10, choices=LINK_STATES, default='created', db_index=True)
path = models.TextField(blank=True, null=True)
def current_state(self):
# Check first internal state
if self.state == 'created' or self.state == 'waiting':
return self.get_state_display()
# Router to Router link
if hasattr(self.if_start.attached_to, "virtualrouter") and hasattr(self.if_end.attached_to, "virtualrouter"):
# TODO: return current_state_ovspatch()
pass
# VM to VM link
elif hasattr(self.if_start.attached_to, "virtualmachine") and hasattr(self.if_end.attached_to, "virtualmachine"):
return self.current_state_of()
# VM to Router (or vice-versa) link
else:
# TODO: return current_state_ovs()
pass
return self.get_state_display()
def establish(self):
result = False
# Finding attachment devices
if hasattr(self.if_start.attached_to, "virtualrouter"):
dev_start = self.if_start.attached_to.virtualrouter
elif hasattr(self.if_start.attached_to, "virtualmachine"):
dev_start = self.if_start.attached_to.virtualmachine
else:
raise self.VirtualLinkException('Interface not attached to a valid virtual device')
if hasattr(self.if_end.attached_to, "virtualrouter"):
dev_end = self.if_end.attached_to.virtualrouter
elif hasattr(self.if_end.attached_to, "virtualmachine"):
dev_end = self.if_end.attached_to.virtualmachine
else:
raise self.VirtualLinkException('Interface not attached to a valid virtual device')
# Router to Router link
if isinstance(dev_start, VirtualRouter) and isinstance(dev_end, VirtualRouter):
# If both routers are at the same host establish with ovspatch
if dev_start.host == dev_end.host:
result = self.establish_ovspatch()
else:
# TODO: Implement this, maybe a gre tunnel can make it
raise self.VirtualLinkException('Cannot connect two routers in different hosts')
# VM to VM link
elif isinstance(dev_start, VirtualMachine) and isinstance(dev_end, VirtualMachine):
result = self.establish_of()
# VM to Router (or vice-versa) link
else:
# If both devices are at the same host establish with ovs
if dev_start.host == dev_end.host:
result = self.establish_ovs()
else:
# TODO: Implement this, maybe a gre tunnel can make it
raise self.VirtualLinkException('Cannot connect two virtual devices in different hosts')
if result:
self.state = 'establish'
self.save()
return result
def unestablish(self):
result = False
# Router to Router link
if hasattr(self.if_start.attached_to, "virtualrouter") and hasattr(self.if_end.attached_to, "virtualrouter"):
# TODO: Old Way with iplink
#return self.unestablish_iplink()
result = self.unestablish_ovspatch()
# VM to VM link
elif hasattr(self.if_start.attached_to, "virtualmachine") and hasattr(self.if_end.attached_to, "virtualmachine"):
result = self.unestablish_of()
# VM to Router (or vice-versa) link
else:
result = self.unestablish_ovs()
if result:
self.state = 'created'
self.save()
return True
# Self migrate links to follow their endpoints
def migrate(self):
# TODO: find way to migrate links more intelligently, for now just unestablish them
# at one locations and restablish at another
self.unestablish()
self.establish()
# Check status of virtual link on OpenFlow Controller
def current_state_of(self):
return self.get_state_display()
# Create the virtual link on OpenFlow Controller
def establish_of(self):
# Link is already established or is set to be
current_state = self.current_state()
if current_state == 'Established' or current_state == 'Waiting':
return True
eth1 = self.if_start.target
eth2 = self.if_end.target
if eth1 == "" or eth2 == "":
raise self.VirtualLinkException("Invalid pair of interfaces (" + eth1 + "-" + eth2 + ")")
mac1 = self.if_start.mac_address
mac2 = self.if_end.mac_address
vm1 = self.if_start.attached_to.virtualmachine
h1 = vm1.host
vm2 = self.if_end.attached_to.virtualmachine
h2 = vm2.host
br1 = h1.get_openvswitch_bridge()
br2 = h2.get_openvswitch_bridge()
if h1 is None:
raise self.VirtualLinkException("Target virtual device not deployed (" + str(vm1) + ")")
if h2 is None:
raise self.VirtualLinkException("Target virtual device not deployed (" + str(vm2) + ")")
# Add interfaces to host bridges
h1.add_openvswitch_port(eth1)
h2.add_openvswitch_port(eth2)
# Update local state to waiting
self.state = 'waiting'
self.save()
# TODO: don't use curl, because we need to handle HTTP error codes
command = 'curl -s http://%s:8080/wm/core/controller/switches/json' % settings.SDN_CONTROLLER['ip']
result = os.popen(command).read()
parsed_result = json.loads(result)
logger.debug(command)
#logger.debug(result)
# Search for source and destination switches and ports
src_switch = src_port = dst_switch = dst_port = None
for sw in parsed_result:
for pt in sw['ports']:
if pt['name'] == eth1:
src_port = pt['portNumber']
if pt['name'] == br1:
src_switch = sw['dpid']
if pt['name'] == eth2:
dst_port = pt['portNumber']
if pt['name'] == br2:
dst_switch = sw['dpid']
if src_port is None or src_switch is None or dst_port is None or dst_switch is None:
# Update local state to waiting
self.state = 'failed'
self.save()
raise self.VirtualLinkException("Could not establish find switch/port in the network")
# Everything found, will create circuit
logger.debug("Creating circuit: from %s port %s -> %s port %s" % (src_switch, src_port, dst_switch, dst_port))
command = "curl -s http://%s:8080/wm/topology/route/%s/%s/%s/%s/json" % (settings.SDN_CONTROLLER['ip'], src_switch, src_port, dst_switch, dst_port)
result = os.popen(command).read()
parsed_result = json.loads(result)
logger.debug(command)
#logger.debug(result)
# Set link path to be recorded
self.path = result
# Result is a list of coupled switch in/out ports, every two ports (items on the list) represent a hop
for i in range(len(parsed_result)):
if i % 2 == 0:
dpid = parsed_result[i]['switch']
port1 = parsed_result[i]['port']
else:
port2 = parsed_result[i]['port']
# IMPORTANT NOTE: current Floodlight StaticFlowEntryPusher (0.90)
# assumes all flow entries to have unique name across all switches
# Forward flow
command = 'curl -s -d \'{"switch": "%s", "name":"%slink%d.f", "src-mac":"%s", "cookie":"0", "priority":"32768", "ingress-port":"%d","active":"true", "actions":"output=%d"}\' http://%s:8080/wm/staticflowentrypusher/json' % (dpid, dpid.replace(':', ''), self.id, mac1, port1, port2, settings.SDN_CONTROLLER['ip'])
result = os.popen(command).read()
logger.debug(command)
#logger.debug(result)
# Backward flow
command = 'curl -s -d \'{"switch": "%s", "name":"%slink%d.r", "src-mac":"%s", "cookie":"0", "priority":"32768", "ingress-port":"%d","active":"true", "actions":"output=%d"}\' http://%s:8080/wm/staticflowentrypusher/json' % (dpid, dpid.replace(':', ''), self.id, mac2, port2, port1, settings.SDN_CONTROLLER['ip'])
result = os.popen(command).read()
logger.debug(command)
#logger.debug(result)
logger.info('Link established with length: %d' % (len(parsed_result)/2))
# Update local state to established
self.state = 'establish'
self.save()
return True
# Deletes a virtual link on OpenFlow Controller
def unestablish_of(self):
# Was not established
current_state = self.current_state()
if current_state == 'Created':
return True
eth1 = self.if_start.target
eth2 = self.if_end.target
if eth1 == "" or eth2 == "":
raise self.VirtualLinkException("Invalid pair of interfaces (" + eth1 + "-" + eth2 + ")")
vm1 = self.if_start.attached_to.virtualmachine
vm2 = self.if_end.attached_to.virtualmachine
h1 = vm1.host
h2 = vm2.host
br1 = h1.get_openvswitch_bridge()
br2 = h2.get_openvswitch_bridge()
if h1 is None:
raise self.VirtualLinkException("Target virtual device not deployed (" + str(vm1) + ")")
if h2 is None:
raise self.VirtualLinkException("Target virtual device not deployed (" + str(vm2) + ")")
# Remove interfaces from host bridges
h1.del_openvswitch_port(eth1)
h2.del_openvswitch_port(eth2)
# No path recorded
if self.path is None:
self.state = 'failed'
self.save()
logger.warning('Path for link %s not recorded' % str(self))
return True
# Recover path originally established to remove every entry
parsed_result = json.loads(self.path)
if type(parsed_result) is not list:
self.state = 'failed'
self.save()
logger.warning('Invalid path for link %s' % str(self))
return True
# Result is a list of coupled switch in/out ports, every two ports (items on the list) represent a hop
for i in range(len(parsed_result)):
if not (type(parsed_result[i]) is dict and parsed_result[i].has_key('switch') and parsed_result[i].has_key('port')):
self.state = 'failed'
self.save()
logger.warning('Invalid path for link %s' % str(self))
return True
if i % 2 == 0:
dpid = parsed_result[i]['switch']
port1 = parsed_result[i]['port']
else:
port2 = parsed_result[i]['port']
# Forward flow
command = 'curl -X DELETE -d \'{"name":"%slink%d.f"}\' http://%s:8080/wm/staticflowentrypusher/json' % ( dpid.replace(':', ''), self.id, settings.SDN_CONTROLLER['ip'] )
result = os.popen(command).read()
logger.debug(command)
#logger.debug(result)
# Backward flow
command = 'curl -X DELETE -d \'{"name":"%slink%d.r"}\' http://%s:8080/wm/staticflowentrypusher/json' % ( dpid.replace(':', ''), self.id, settings.SDN_CONTROLLER['ip'] )
result = os.popen(command).read()
logger.debug(command)
#logger.debug(result)
self.state = 'inactive'
self.save()
return True
def establish_ovspatch(self):
bridge1 = self.if_start.attached_to.virtualrouter.dev_name
bridge2 = self.if_end.attached_to.virtualrouter.dev_name
port1 = bridge1 + '_to_' + bridge2
port2 = bridge2 + '_to_' + bridge1
h1 = self.if_start.attached_to.virtualrouter.host
h2 = self.if_end.attached_to.virtualrouter.host
if h1 is None:
raise self.VirtualLinkException("Target virtual device not deployed (" + str(bridge1) + ")")
if h2 is None:
raise self.VirtualLinkException("Target virtual device not deployed (" + str(bridge2) + ")")
# Add patch port in bridge 1
cmd = 'ovs-vsctl --db=' + h1.ovsdb + ' --timeout=3 -- add-port ' + bridge1 + ' ' + port1 + ' -- set interface ' + port1 + ' type=patch options:peer=' + port2
out = commands.getstatusoutput(cmd)
if out[0] != 0:
logger.warning(cmd)
raise self.VirtualLinkException("Could not add patch (" + port1 + ") to bridges (" + bridge1 + "-" + bridge2 + "): " + out[1])
# Add patch port in bridge 2
cmd = 'ovs-vsctl --db=' + h2.ovsdb + ' --timeout=3 -- add-port ' + bridge2 + ' ' + port2 + ' -- set interface ' + port2 + ' type=patch options:peer=' + port1
out = commands.getstatusoutput(cmd)
if out[0] != 0:
logger.warning(cmd)
raise self.VirtualLinkException("Could not add patch (" + port2 + ") to bridges (" + bridge1 + "-" + bridge2 + "): " + out[1])
return True
def unestablish_ovspatch(self):
bridge1 = self.if_start.attached_to.virtualrouter.dev_name
bridge2 = self.if_end.attached_to.virtualrouter.dev_name
port1 = bridge1 + '_to_' + bridge2
port2 = bridge2 + '_to_' + bridge1
h1 = self.if_start.attached_to.virtualrouter.host
h2 = self.if_end.attached_to.virtualrouter.host
if h1 is None:
raise self.VirtualLinkException("Target virtual device not deployed (" + str(bridge1) + ")")
if h2 is None:
raise self.VirtualLinkException("Target virtual device not deployed (" + str(bridge2) + ")")
# Remove port on source bridge
cmd = 'ovs-vsctl --db=' + h1.ovsdb + ' --timeout=3 -- --if-exists del-port ' + bridge1 + ' ' + port1
out = commands.getstatusoutput(cmd)
if out[0] != 0:
logger.warning(cmd)
raise self.VirtualLinkException("Could not delete patch (" + port1 + ") to bridges (" + bridge1 + "-" + bridge2 + "): " + out[1])
# Remove port on destination bridge
cmd = 'ovs-vsctl --db=' + h2.ovsdb + ' --timeout=3 -- --if-exists del-port ' + bridge2 + ' ' + port2
out = commands.getstatusoutput(cmd)
if out[0] != 0:
logger.warning(cmd)
raise self.VirtualLinkException("Could not delete patch (" + port2 + ") to bridges (" + bridge1 + "-" + bridge2 + "): " + out[1])
return True
def establish_ovs(self):
bridge = eth = ""
if hasattr(self.if_start.attached_to, 'virtualrouter'):
bridge = self.if_start.attached_to.virtualrouter.dev_name
bridge_host = self.if_start.attached_to.virtualrouter.host
elif hasattr(self.if_start.attached_to, 'virtualmachine'):
eth = self.if_start.target
if hasattr(self.if_end.attached_to, 'virtualrouter'):
bridge = self.if_end.attached_to.virtualrouter.dev_name
bridge_host = self.if_end.attached_to.virtualrouter.host
elif hasattr(self.if_end.attached_to, 'virtualmachine'):
eth = self.if_end.target
if bridge == "" or eth == "":
raise self.VirtualLinkException("Invalid pair of interfaces (" + eth + "-" + bridge + ")")
if bridge_host is None:
raise self.VirtualLinkException("Target virtual router not deployed (" + str(bridge) + ")")
# First remove interface from previous ovs (if it was somewhere else)
out = commands.getstatusoutput('ovs-vsctl --db=' + bridge_host.ovsdb + ' --timeout=3 port-to-br ' + eth)
if out[0] == 0:
# Interface was found at another switch
out = commands.getstatusoutput('ovs-vsctl --db=' + bridge_host.ovsdb + ' --timeout=3 del-port "' + out[1] + '" ' + eth)
out = commands.getstatusoutput('ovs-vsctl --db=' + bridge_host.ovsdb + ' --timeout=3 add-port "' + bridge + '" ' + eth)
if out[0] != 0:
raise self.VirtualLinkException("Could not add port (" + eth + ") to bridge (" + bridge + "): " + out[1])
return True
def unestablish_ovs(self):
bridge = eth = ""
if hasattr(self.if_start.attached_to, 'virtualrouter'):
bridge = self.if_start.attached_to.virtualrouter.dev_name
bridge_host = self.if_start.attached_to.virtualrouter.host
elif hasattr(self.if_start.attached_to, 'virtualmachine'):
eth = self.if_start.target
if hasattr(self.if_end.attached_to, 'virtualrouter'):
bridge = self.if_end.attached_to.virtualrouter.dev_name
bridge_host = self.if_end.attached_to.virtualrouter.host
elif hasattr(self.if_end.attached_to, 'virtualmachine'):
eth = self.if_end.target
if bridge == "" or eth == "":
raise self.VirtualLinkException("Invalid pair of interfaces (" + eth + "-" + bridge + ")")
if bridge_host is None:
raise self.VirtualLinkException("Target virtual router not deployed (" + str(bridge) + ")")
out = commands.getstatusoutput('ovs-vsctl --db=' + bridge_host.ovsdb + ' --timeout=3 del-port "' + bridge + '" ' + eth)
if out[0] != 0:
logger.warning("Could not add port (" + eth + ") to bridge (" + bridge + "): " + out[1])
raise self.VirtualLinkException("Could not delete port (" + eth + ") to bridge (" + bridge + "): " + out[1])
return True
def __unicode__(self):
return "VirtualLink #%d" % self.id
class VirtualLinkException(BaseModel.ModelException):
pass
| ComputerNetworks-UFRGS/Aurora | cloud/models/virtual_link.py | Python | gpl-2.0 | 18,789 |
#!/usr/bin/env python3
import json
import requests
from utils import parse_map
def extract_points(path):
subpaths = [i.lstrip('M') for i in path.rstrip('Z').split('Z')]
contours = []
for sp in subpaths:
coords = [i.split(',') for i in sp.split('l')]
x, y = map(float, coords[0])
contour = [(x, y)]
for c in coords[1:]:
x += float(c[0])
y += float(c[1])
contour.append((x, y))
contours.append(contour)
return contours
def find_box(points):
return [min(i[0] for i in points), min(i[1] for i in points),
max(i[0] for i in points), max(i[1] for i in points)]
def find_centroid(box):
return [(box[0] + box[2]) / 2, (box[1] + box[3]) / 2]
def flatten(contours):
return {i for contour in contours for i in contour}
BOX_EPS = 1
def boxes_intersect(box1, box2):
x = box1[0] < box2[2] + BOX_EPS and box2[0] < box1[2] + BOX_EPS
y = box1[1] < box2[3] + BOX_EPS and box2[1] < box1[3] + BOX_EPS
return x and y
def find_box_neighbors(boxes):
res = {}
for k in boxes:
res[k] = sorted(r for r in boxes if r != k and boxes_intersect(boxes[k], boxes[r]))
return res
BORDER_DIST_EPS = 1
def segment_point_dist(x1, y1, x2, y2, x, y):
den = ((x2 - x1) ** 2 + (y2 - y1) ** 2)
if den == 0:
return (x1 - x) ** 2 + (y1 - y) ** 2
offset_coeff = ((x - x1) * (x2 - x1) + (y - y1) * (y2 - y1)) / den
if offset_coeff >= 1 or offset_coeff <= 0:
return min((x1 - x) ** 2 + (y1 - y) ** 2, (x2 - x) ** 2 + (y2 - y) ** 2)
projx, projy = x1 + (x2 - x1) * offset_coeff, y1 + (y2 - y1) * offset_coeff
return (projx - x) ** 2 + (projy - y) ** 2
def segment_dist(x1, y1, x2, y2, x3, y3, x4, y4):
return min(segment_point_dist(x1, y1, x2, y2, x3, y3),
segment_point_dist(x1, y1, x2, y2, x4, y4),
segment_point_dist(x3, y3, x4, y4, x1, y1),
segment_point_dist(x3, y3, x4, y4, x2, y2))
def path_dist(path1, path2):
return min(segment_dist(*a[0], *a[1], *b[0], *b[1]) for a in zip(path1, path1[1:] + path1[:1])
for b in zip(path2, path2[1:] + path2[:1]))
def border_dist(paths1, paths2):
return min(path_dist(a, b) for a in paths1 for b in paths2)
def are_neighbors(a, b):
return border_dist(a, b) < BORDER_DIST_EPS
def main():
map_data = parse_map(requests.get('https://worldroulette.ru/world_mill_ru.js').text)
borders = {r: extract_points(v['path']) for r, v in map_data.items()}
boxes = {r: find_box(flatten(v)) for r, v in borders.items()}
centroids = {r: find_centroid(v) for r, v in boxes.items()}
print('Centroids generated')
box_neighbors = find_box_neighbors(boxes)
print('Box neighbors generated')
neighbors = {}
for c in sorted(borders):
print(c, end=' ', flush=True)
neighbors[c] = sorted(i for i in box_neighbors[c] if are_neighbors(borders[c], borders[i]))
print()
print('Real neighbors generated')
with open('neighbors.json', 'w') as f:
json.dump(neighbors, f, sort_keys=True)
if __name__ == '__main__':
main()
| kalinochkind/worldroulette-bot | geometry.py | Python | mit | 3,155 |
# -*- coding: utf-8 -*-
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
import unittest
import gdb
import os.path
from kdump.target import Target
class TestTarget(unittest.TestCase):
def setUp(self):
gdb.execute("file")
self.do_real_tests = os.path.exists("tests/vmcore")
def tearDown(self):
try:
x = gdb.current_target()
del x
except:
pass
gdb.execute('target exec')
def test_bad_file(self):
x = Target()
with self.assertRaises(gdb.error):
gdb.execute('target kdumpfile /does/not/exist')
x.unregister()
def test_real_open_with_no_kernel(self):
if self.do_real_tests:
x = Target()
with self.assertRaises(gdb.error):
gdb.execute('target kdumpfile tests/vmcore')
x.unregister()
| jeffmahoney/crash-python | tests/test_target.py | Python | gpl-2.0 | 883 |
#!/usr/bin/env python
"""
PyVTK provides tools for manipulating VTK files in Python.
VtkData - create VTK files from Python / read VTK files to Python
"""
"""
Copyright 2001 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the LGPL. See http://www.fsf.org
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Revision: 1.11 $
$Date: 2003-04-07 14:56:08 $
Pearu Peterson
"""
__author__ = "Pearu Peterson <pearu@cens.ioc.ee>"
__license__ = "LGPL (see http://www.fsf.org)"
from .__version__ import __version__
__all__ = ['StructuredPoints','StructuredGrid','UnstructuredGrid',
'RectilinearGrid','PolyData',
'Scalars','ColorScalars','LookupTable','Vectors','Normals',
'TextureCoordinates','Tensors','Field',
'PointData','CellData',
'VtkData']
import types
import os
from . import common
from .StructuredPoints import StructuredPoints, structured_points_fromfile
from .StructuredGrid import StructuredGrid, structured_grid_fromfile
from .UnstructuredGrid import UnstructuredGrid, unstructured_grid_fromfile
from .RectilinearGrid import RectilinearGrid, rectilinear_grid_fromfile
from .PolyData import PolyData, polydata_fromfile
from .Scalars import Scalars,scalars_fromfile
from .ColorScalars import ColorScalars, color_scalars_fromfile
from .LookupTable import LookupTable, lookup_table_fromfile
from .Vectors import Vectors, vectors_fromfile
from .Normals import Normals, normals_fromfile
from .TextureCoordinates import TextureCoordinates, texture_coordinates_fromfile
from .Tensors import Tensors, tensors_fromfile
from .Field import Field, field_fromfile
from .Data import PointData,CellData
class VtkData(common.Common):
"""
VtkData
=======
Represents VTK file that has four relevant parts:
header - string up to length 255
format - string: ascii | binary
DataSet - StructuredPoints | StructuredGrid | UnstructuredGrid
| RectilinearGrid | PolyData
Data - PointData | CellData
Usage:
------
v = VtkData(<DataSet instance> [,<header string>,<Data instances>,..])
v = VtkData(<filename>, only_structure = 0) - read VTK data from file.
v.tofile(filename, format = 'ascii') - save VTK data to file.
Attributes:
header
structure
point_data
cell_data
Public methods:
to_string(format = 'ascii')
tofile(filename, format = 'ascii')
DataSet
=======
StructuredPoints(<3-sequence of dimensions>
[,<3-sequence of origin> [, <3-sequence of spacing>]])
StructuredGrid(<3-sequence of dimensions>,
<sequence of 3-sequences of points>)
UnstructuredGrid(<sequence of 3-sequences of points>
[,<cell> = <sequence of (sequences of) integers>])
cell - vertex | poly_vertex | line | poly_line | triangle
| triangle_strip | polygon | pixel | quad | tetra
| voxel | hexahedron | wedge | pyramid
RectilinearGrid([x = <sequence of x-coordinates>],
[y = <sequence of y-coordinates>],
[z = <sequence of z-coordinates>])
PolyData(<sequence of 3-sequences of points>,
[vertices = <sequence of (sequences of) integers>],
[lines = <sequence of (sequences of) integers>],
[polygons = <sequence of (sequences of) integers>],
[triangle_strips = <sequence of (sequences of) integers>])
Data
====
PointData | CellData ([<DataSetAttr instances>]) - construct Data instance
DataSetAttr
===========
DataSetAttr - Scalars | ColorScalars | LookupTable | Vectors
| Normals | TextureCoordinates | Tensors | Field
Scalars(<sequence of scalars> [,name[, lookup_table]])
ColorScalars(<sequence of scalar sequences> [,name])
LookupTable(<sequence of 4-sequences> [,name])
Vectors(<sequence of 3-sequences> [,name])
Normals(<sequence of 3-sequences> [,name])
TextureCoordinates(<sequence of (1,2, or 3)-sequences> [,name])
Tensors(<sequence of (3x3)-sequences> [,name])
Field([name,] [arrayname_1 = sequence of n_1-sequences, ...
arrayname_m = sequence of n_m-sequences,])
where len(array_1) == .. == len(array_m) must hold.
"""
header = None
point_data = None
cell_data = None
def __init__(self,*args,**kws):
assert args,'expected at least one argument'
if type(args[0]) is bytes:
if 'only_structure' in kws and kws['only_structure']:
self.fromfile(args[0],1)
else:
self.fromfile(args[0])
return
else:
structure = args[0]
args = list(args)[1:]
if not common.is_dataset(structure):
raise TypeError('argument structure must be StructuredPoints|StructuredGrid|UnstructuredGrid|RectilinearGrid|PolyData but got %s'%(type(structure)))
self.structure = structure
for a in args:
if common.is_string(a):
if len(a)>255:
self.skipping('striping header string to a length =255')
self.header = a[:255]
elif common.is_pointdata(a):
self.point_data = a
elif common.is_celldata(a):
self.cell_data = a
else:
self.skipping('unexpexted argument %s'%(type(a)))
if self.header is None:
self.header = 'Really cool data'
self.warning('Using header=%s'%(repr(self.header)))
if self.point_data is None and self.cell_data is None:
self.warning('No data defined')
if self.point_data is not None:
s = self.structure.get_size()
s1 = self.point_data.get_size()
if s1 != s:
raise ValueError('DataSet (size=%s) and PointData (size=%s) have different sizes'%(s,s1))
else:
self.point_data = PointData()
if self.cell_data is not None:
s = self.structure.get_cell_size()
s1 = self.cell_data.get_size()
if s1 != s:
raise ValueError('DataSet (cell_size=%s) and CellData (size=%s) have different sizes'%(s,s1))
else:
self.cell_data = CellData()
def to_string(self, format = 'ascii'):
ret = ['# vtk DataFile Version 2.0',
self.header,
format.upper(),
self.structure.to_string(format)
]
if self.cell_data.data:
ret.append(self.cell_data.to_string(format))
if self.point_data.data:
ret.append(self.point_data.to_string(format))
#print `ret`
return '\n'.join(ret)
def tofile(self, filename, format = 'ascii'):
"""Save VTK data to file.
"""
if not common.is_string(filename):
raise TypeError('argument filename must be string but got %s'%(type(filename)))
if format not in ['ascii','binary']:
raise TypeError('argument format must be ascii | binary')
filename = filename.strip()
if not filename:
raise ValueError('filename must be non-empty string')
if filename[-4:]!='.vtk':
filename += '.vtk'
#print 'Creating file',`filename`
f = open(filename,'wb')
f.write(self.to_string(format))
f.close()
def fromfile(self,filename, only_structure = 0):
filename = filename.strip()
if filename[-4:]!='.vtk':
filename += '.vtk'
#print 'Reading file',`filename`
f = open(filename,'rb')
l = f.readline()
if not l.strip().replace(' ','').lower() == '#vtkdatafileversion2.0':
raise TypeError('File '+repr(filename)+' is not VTK 2.0 format')
self.header = f.readline().rstrip()
format = f.readline().strip().lower()
if format not in ['ascii','binary']:
raise ValueError('Expected ascii|binary but got %s'%(repr(format)))
if format == 'binary':
raise NotImplementedError('reading vtk binary format')
l = common._getline(f).lower().split(' ')
if l[0].strip() != 'dataset':
raise ValueError('expected dataset but got %s'%(l[0]))
try:
ff = eval(l[1]+'_fromfile')
except NameError:
raise NotImplementedError('%s_fromfile'%(l[1]))
self.structure,l = ff(f,self)
for i in range(2):
if only_structure: break
if not l: break
l = [s.strip() for s in l.lower().split(' ')]
assert len(l)==2 and l[0] in ['cell_data','point_data'], l[0]
data = l[0]
n = eval(l[1])
lst = []
while 1:
l = common._getline(f)
if not l: break
sl = [s.strip() for s in l.split()]
k = sl[0].lower()
if k not in ['scalars','color_scalars','lookup_table','vectors',
'normals','texture_coordinates','tensors','field']:
break
try:
ff = eval(k+'_fromfile')
except NameError:
raise NotImplementedError('%s_fromfile'%(k))
lst.append(ff(f,n,sl[1:]))
if data == 'point_data':
self.point_data = PointData(*lst)
if data == 'cell_data':
self.cell_data = CellData(*lst)
if self.point_data is None:
self.point_data = PointData()
if self.cell_data is None:
self.cell_data = CellData()
f.close()
if __name__ == "__main__":
vtk = VtkData(StructuredPoints((3,1,1)),
'This is title',
PointData(Scalars([3,4,5]))
)
vtk.tofile('test')
| ddempsey/PyFEHM | pyvtk/__init__.py | Python | lgpl-2.1 | 10,343 |
# This file is part of HamsiManager.
#
# Copyright (c) 2010 - 2015 Murat Demir <mopened@gmail.com>
#
# Hamsi Manager is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Hamsi Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HamsiManager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys, os
from Core.MyObjects import *
from Core import Universals as uni
from Core import Dialogs
from Core import ReportBug
from Core import Organizer
import Options
class SuggestIdea(MDialog):
def __init__(self):
MDialog.__init__(self, getMainWindow())
if isActivePyKDE4:
self.setButtons(MDialog.NoDefault)
pnlMain = MWidget(self)
self.namMain = None
self.nrqPost = None
self.nrpBack = None
self.vblMain = MVBoxLayout(pnlMain)
self.pbtnSendAndClose = MPushButton(translate("SuggestIdea", "Send And Close"))
self.pbtnCancel = MPushButton(translate("SuggestIdea", "Cancel"))
self.cckbIsSendMySettings = Options.MyCheckBox(self, translate("SuggestIdea",
"Send my settings for more better default settings."),
0, _stateChanged=self.isSendMySettings)
self.connect(self.pbtnSendAndClose, SIGNAL("clicked()"), self.sendAndClose)
self.connect(self.pbtnCancel, SIGNAL("clicked()"), self.cancel)
lblIdea = MLabel(translate("SuggestIdea", "Idea : "))
lblName = MLabel(translate("SuggestIdea", "Name And Surname : "))
lblEMailAddress = MLabel(translate("SuggestIdea", "E-mail Address : "))
lblAlert = MLabel(translate("SuggestIdea",
"Note : Will be kept strictly confidential. It will be used solely to learn information about of your idea."))
self.teIdea = MTextEdit(self)
self.leName = MLineEdit(self)
self.leEMailAddress = MLineEdit(self)
hbox1 = MHBoxLayout()
hbox1.addWidget(lblIdea, 1)
hbox1.addWidget(self.teIdea, 20)
hbox2 = MHBoxLayout()
hbox2.addWidget(lblName, 1)
hbox2.addWidget(self.leName, 20)
hbox3 = MHBoxLayout()
hbox3.addWidget(lblEMailAddress, 1)
hbox3.addWidget(self.leEMailAddress, 20)
hbox0 = MHBoxLayout()
hbox0.addWidget(self.cckbIsSendMySettings, 1)
hbox0.addStretch(2)
hbox0.addWidget(self.pbtnSendAndClose, 1)
hbox0.addWidget(self.pbtnCancel, 1)
VBox1 = MVBoxLayout()
VBox1.addLayout(hbox2)
VBox1.addLayout(hbox3)
VBox1.addWidget(lblAlert)
gboxContactInformations = MGroupBox(translate("SuggestIdea", "Contact Informations : "))
gboxContactInformations.setLayout(VBox1)
self.vblMain.addLayout(hbox1, 20)
self.vblMain.addWidget(gboxContactInformations, 1)
self.vblMain.addLayout(hbox0, 1)
try:
if isActivePyKDE4:
self.setMainWidget(pnlMain)
else:
self.setLayout(self.vblMain)
except:
self.setLayout(self.vblMain)
self.setWindowTitle(translate("SuggestIdea", "Please Suggest Idea"))
self.setMaximumSize(600, 375)
self.show()
self.setMaximumSize(10000, 10000)
def sendAndClose(self):
try:
uni.isCanBeShowOnMainWindow = False
self.namMain = MNetworkAccessManager(self)
self.connect(self.namMain, SIGNAL("finished (QNetworkReply *)"), self.sendFinished)
self.nrqPost = MNetworkRequest(MUrl("http://hamsiapps.com/ForMyProjects/SuggestIdea.php"))
self.nrpBack = self.namMain.post(self.nrqPost,
"p=HamsiManager&l=" + str(uni.MySettings["language"]) + "&v=" + str(
uni.intversion) +
"&thankYouMessages=new style" +
"&userNotes=" + Organizer.quote(str(self.teIdea.toHtml())) +
"&nameAndSurname=" + Organizer.quote(str(self.leName.text())) +
"&mail=" + Organizer.quote(str(self.leEMailAddress.text()))
)
self.connect(self.nrpBack, SIGNAL("downloadProgress (qint64,qint64)"), self.sending)
Dialogs.showState(translate("SuggestIdea", "Sending Your Idea"), 0, 100, True, self.cancelSending)
except:
ReportBug.ReportBug()
def sending(self, _currentValue, _maxValue):
Dialogs.showState(translate("SuggestIdea", "Sending Your Idea"), _currentValue, _maxValue, True,
self.cancelSending)
def cancelSending(self):
if self.nrpBack is not None:
self.nrpBack.abort()
def sendFinished(self, _nrpBack):
try:
Dialogs.showState(translate("SuggestIdea", "Sending Your Idea"), 100, 100)
if _nrpBack.error() == MNetworkReply.NoError:
Dialogs.show(translate("SuggestIdea", "Suggestion Received Successfully"), translate("SuggestIdea",
"Thank you for sending us your idea. You have contributed a lot to make the next release even better."))
self.close()
elif _nrpBack.error() == MNetworkReply.OperationCanceledError:
Dialogs.show(translate("SuggestIdea", "Suggestion Canceled"),
translate("SuggestIdea", "Suggestion canceled successfully."))
else:
Dialogs.show(translate("SuggestIdea", "An Error Has Occurred."),
translate("SuggestIdea", "An unknown error has occurred. Please try again."))
uni.isCanBeShowOnMainWindow = True
self.namMain = None
self.nrqPost = None
self.nrpBack = None
except:
ReportBug.ReportBug()
def cancel(self):
if self.nrpBack is not None:
self.nrpBack.abort()
self.close()
def isSendMySettings(self):
try:
currentText = str(self.teIdea.toHtml())
if self.cckbIsSendMySettings.checkState() == Mt.Checked:
settingText = "<br><br>"
for keyName in uni.MySettings:
if uni.willNotReportSettings.count(keyName) == 0:
settingText += "<b>" + str(keyName) + " :</b> " + str(uni.MySettings[keyName]) + "<br>"
self.teIdea.setHtml(str(
currentText + "<br>----------------------////////----------------------<br><br><b>" + str(
translate("SuggestIdea",
"Note : You can check and delete your personal informations.")) + "</b>" + settingText))
else:
currentText = currentText.split("----------------------////////----------------------")[0]
self.teIdea.setHtml(str(currentText))
except:
ReportBug.ReportBug()
| supermurat/hamsi-manager | Core/SuggestIdea.py | Python | gpl-3.0 | 7,693 |
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'es3a10.github.io/matematicas/'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
| es3a10/matematicas | fuente_web/publishconf.py | Python | mit | 537 |
from bar import path | siosio/intellij-community | python/testData/quickFixes/PyAddImportQuickFixTest/combinedElementOrdering/first/second/__init__.py | Python | apache-2.0 | 20 |
'''
Created on 2016年9月22日
@author: huangzhinan
'''
import xlrd
import urllib
wb = xlrd.open_workbook("F://test.xls")
sh=wb.sheet_by_index(0)#第一个表
for i in range(0,2):
for j in range(0,2):
print(sh.cell(i,j).value, end='\t')
print()
| egassem/python_study | src/com/xiaobei/base/read_excel.py | Python | apache-2.0 | 265 |
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cPickle
import logging
import collections
import functools
import itertools
logging.basicConfig(format="[%(levelname)s %(asctime)s %(filename)s:%(lineno)s]"
" %(message)s")
class SequenceType(object):
NO_SEQUENCE = 0
SEQUENCE = 1
SUB_SEQUENCE = 2
# TODO(yuyang18): Add string data type here.
class DataType(object):
Dense = 0
SparseNonValue = 1
SparseValue = 2
Index = 3
class CacheType(object):
NO_CACHE = 0 # No cache at all
# First pass, read data from python. And store them in memory. Read from
# memory during rest passes.
CACHE_PASS_IN_MEM = 1
class InputType(object):
__slots__ = ['dim', 'seq_type', 'type']
def __init__(self, dim, seq_type, tp):
self.dim = dim
self.seq_type = seq_type
self.type = tp
def dense_slot(dim, seq_type=SequenceType.NO_SEQUENCE):
return InputType(dim, seq_type, DataType.Dense)
def sparse_non_value_slot(dim, seq_type=SequenceType.NO_SEQUENCE):
return InputType(dim, seq_type, DataType.SparseNonValue)
def sparse_value_slot(dim, seq_type=SequenceType.NO_SEQUENCE):
return InputType(dim, seq_type, DataType.SparseValue)
def index_slot(dim, seq_type=SequenceType.NO_SEQUENCE):
return InputType(dim, seq_type, DataType.Index)
dense_vector = dense_slot
sparse_binary_vector = sparse_non_value_slot
sparse_vector = sparse_value_slot
integer_value = index_slot
def dense_vector_sequence(dim):
return dense_vector(dim, seq_type=SequenceType.SEQUENCE)
def dense_vector_sub_sequence(dim):
return dense_vector(dim, seq_type=SequenceType.SUB_SEQUENCE)
def sparse_binary_vector_sequence(dim):
return sparse_binary_vector(dim, seq_type=SequenceType.SEQUENCE)
def sparse_binary_vector_sub_sequence(dim):
return sparse_binary_vector(dim, seq_type=SequenceType.SUB_SEQUENCE)
def sparse_vector_sequence(dim):
return sparse_vector(dim, seq_type=SequenceType.SEQUENCE)
def sparse_vector_sub_sequence(dim):
return sparse_vector(dim, seq_type=SequenceType.SUB_SEQUENCE)
def integer_value_sequence(dim):
return integer_value(dim, seq_type=SequenceType.SEQUENCE)
def integer_value_sub_sequence(dim):
return integer_value(dim, seq_type=SequenceType.SUB_SEQUENCE)
def integer_sequence(dim):
return index_slot(dim, seq_type=SequenceType.SEQUENCE)
class SingleSlotWrapper(object):
def __init__(self, generator):
self.generator = generator
def __call__(self, obj, filename):
for item in self.generator(obj, filename):
if isinstance(item, dict):
yield item
else:
yield [item]
class InputOrderWrapper(object):
def __init__(self, generator, input_order):
self.generator = generator
self.input_order = input_order
def __call__(self, obj, filename):
for item in self.generator(obj, filename):
if isinstance(item, dict):
yield [
item.get(input_name, None)
for input_name in self.input_order
]
else:
yield item
class CheckWrapper(object):
def __init__(self, generator, input_types, check_fail_continue, logger):
self.generator = generator
self.input_types = input_types
self.check_fail_continue = check_fail_continue
self.logger = logger
def __call__(self, obj, filename):
for items in self.generator(obj, filename):
try:
assert len(items) == len(self.input_types)
assert len(filter(lambda x: x is None, items)) == 0
for item, input_type in itertools.izip(items, self.input_types):
callback = functools.partial(CheckWrapper.loop_callback,
input_type)
for _ in xrange(input_type.seq_type):
callback = functools.partial(CheckWrapper.loop_check,
callback)
callback(item)
yield items
except AssertionError as e:
self.logger.warning(
"Item (%s) is not fit the input type with error %s" %
(repr(item), repr(e)))
if self.check_fail_continue:
continue
else:
raise
@staticmethod
def loop_callback(input_type, each):
assert isinstance(input_type, InputType)
if input_type.type == DataType.Dense:
assert isinstance(each, collections.Sequence)
for d in each:
assert isinstance(d, float)
assert len(each, input_type.dim)
elif input_type.type == DataType.Index:
assert isinstance(each, int)
assert each < input_type.dim
elif input_type.type == DataType.SparseNonValue \
or input_type.type == DataType.SparseValue:
assert isinstance(each, collections.Sequence)
sparse_id = set()
for k in each:
if input_type.type == DataType.SparseValue:
k, v = k
assert isinstance(v, float)
assert isinstance(k, int)
assert k < input_type.dim
sparse_id.add(k)
assert len(sparse_id) == len(each)
else:
raise RuntimeError("Not support input type")
@staticmethod
def loop_check(callback, item):
for each in item:
callback(each)
def provider(input_types=None,
should_shuffle=None,
pool_size=-1,
min_pool_size=-1,
can_over_batch_size=True,
calc_batch_size=None,
cache=CacheType.NO_CACHE,
check=False,
check_fail_continue=False,
init_hook=None,
**kwargs):
"""
Provider decorator. Use it to make a function into PyDataProvider2 object.
In this function, user only need to get each sample for some train/test
file.
The basic usage is:
.. code-block:: python
@provider(some data provider config here...)
def process(settings, file_name):
while not at end of file_name:
sample = readOneSampleFromFile(file_name)
yield sample.
The configuration of data provider should be setup by\:
:param input_types: Specify the input types, can also be set in init_hook.
It could be a list of InputType object. For example,
input_types=[dense_vector(9), integer_value(2)]. Or user
can set a dict of InputType object, which key is
data_layer's name. For example, input_types=\
{'img': img_features, 'label': label}. when using dict of
InputType, user could yield a dict of feature values, which
key is also data_layer's name.
:type input_types: list|tuple|dict
:param should_shuffle: True if data should shuffle. Pass None means shuffle
when is training and not to shuffle when is testing.
:type should_shuffle: bool
:param pool_size: Max number of sample in data pool.
:type pool_size: int
:param min_pool_size: Set minimal sample in data pool. The PaddlePaddle will
random pick sample in pool. So the min_pool_size
effect the randomize of data.
:type min_pool_size: int
:param can_over_batch_size: True if paddle can return a mini-batch larger
than batch size in settings. It is useful when
custom calculate one sample's batch_size.
It is very danger to set it to false and use
calc_batch_size together. Default is false.
:type can_over_batch_size: bool
:param calc_batch_size: a method to calculate each sample's batch size.
Default each sample's batch size is 1. But to you
can customize each sample's batch size.
:type calc_batch_size: callable
:param cache: Cache strategy of Data Provider. Default is CacheType.NO_CACHE
:type cache: int
:param init_hook: Initialize hook. Useful when data provider need load some
external data like dictionary. The parameter is
(settings, file_list, \*\*kwargs).
- settings. It is the global settings object. User can set
settings.input_types here.
- file_list. All file names for passed to data provider.
- is_train. Is this data provider used for training or not.
- kwargs. Other keyword arguments passed from
trainer_config's args parameter.
:type init_hook: callable
:param check: Check the yield data format is as same as input_types. Enable
this will make data provide process slow but it is very useful
for debug. Default is disabled.
:type check: bool
:param check_fail_continue: Continue train or not when check failed. Just
drop the wrong format data when it is True. Has
no effect when check set to False.
:type check_fail_continue: bool
"""
def __wrapper__(generator):
class DataProvider(object):
def __init__(self, file_list, **kwargs):
self.logger = logging.getLogger("")
self.logger.setLevel(logging.INFO)
self.input_types = None
if 'slots' in kwargs:
self.logger.warning('setting slots value is deprecated, '
'please use input_types instead.')
self.slots = kwargs['slots']
self.slots = input_types
self.should_shuffle = should_shuffle
true_table = [1, 't', 'true', 'on']
false_table = [0, 'f', 'false', 'off']
if not isinstance(self.should_shuffle, bool) and \
self.should_shuffle is not None:
if isinstance(self.should_shuffle, basestring):
self.should_shuffle = self.should_shuffle.lower()
if self.should_shuffle in true_table:
self.should_shuffle = True
elif self.should_shuffle in false_table:
self.should_shuffle = False
else:
self.logger.warning(
"Could not recognize should_shuffle (%s), "
"just use default value of should_shuffle."
" Please set should_shuffle to bool value or "
"something in %s" %
(repr(self.should_shuffle),
repr(true_table + false_table)))
self.should_shuffle = None
self.pool_size = pool_size
self.can_over_batch_size = can_over_batch_size
self.calc_batch_size = calc_batch_size
self.file_list = file_list
self.generator = generator
self.cache = cache
self.min_pool_size = min_pool_size
self.input_order = kwargs['input_order']
self.check = check
if init_hook is not None:
init_hook(self, file_list=file_list, **kwargs)
if self.input_types is not None:
self.slots = self.input_types
assert self.slots is not None
assert self.generator is not None
use_dynamic_order = False
if isinstance(self.slots, dict): # reorder input_types
self.slots = [self.slots[ipt] for ipt in self.input_order]
use_dynamic_order = True
if len(self.slots) == 1:
self.generator = SingleSlotWrapper(self.generator)
if use_dynamic_order:
self.generator = InputOrderWrapper(self.generator,
self.input_order)
if self.check:
self.generator = CheckWrapper(self.generator, self.slots,
check_fail_continue,
self.logger)
return DataProvider
return __wrapper__
def deserialize_args(args):
"""
Internal use only.
:param args:
:return:
"""
return cPickle.loads(args)
| helinwang/Paddle | python/paddle/trainer/PyDataProvider2.py | Python | apache-2.0 | 13,600 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function
from distutils.spawn import find_executable
from distutils.version import LooseVersion
import json
import os
import distro
import shutil
import subprocess
import six
import six.moves.urllib as urllib
from six.moves import input
from subprocess import PIPE
from zipfile import BadZipfile
import servo.packages as packages
from servo.util import extract, download_file, host_triple
def check_gstreamer_lib():
return subprocess.call(["pkg-config", "--atleast-version=1.16", "gstreamer-1.0"],
stdout=PIPE, stderr=PIPE) == 0
def run_as_root(command, force=False):
if os.geteuid() != 0:
command.insert(0, 'sudo')
if force:
command.append('-y')
return subprocess.call(command)
def install_linux_deps(context, pkgs_ubuntu, pkgs_fedora, pkgs_void, force):
install = False
pkgs = []
if context.distro in ['Ubuntu', 'Debian GNU/Linux']:
command = ['apt-get', 'install']
pkgs = pkgs_ubuntu
if subprocess.call(['dpkg', '-s'] + pkgs, stdout=PIPE, stderr=PIPE) != 0:
install = True
elif context.distro in ['CentOS', 'CentOS Linux', 'Fedora']:
installed_pkgs = str(subprocess.check_output(['rpm', '-qa'])).replace('\n', '|')
pkgs = pkgs_fedora
for p in pkgs:
command = ['dnf', 'install']
if "|{}".format(p) not in installed_pkgs:
install = True
break
elif context.distro == 'void':
installed_pkgs = str(subprocess.check_output(['xbps-query', '-l']))
pkgs = pkgs_void
for p in pkgs:
command = ['xbps-install', '-A']
if "ii {}-".format(p) not in installed_pkgs:
install = force = True
break
if install:
print("Installing missing dependencies...")
run_as_root(command + pkgs, force)
return install
def install_salt_dependencies(context, force):
pkgs_apt = ['build-essential', 'libssl-dev', 'libffi-dev', 'python-dev']
pkgs_dnf = ['gcc', 'libffi-devel', 'python-devel', 'openssl-devel']
pkgs_xbps = ['gcc', 'libffi-devel', 'python-devel']
if not install_linux_deps(context, pkgs_apt, pkgs_dnf, pkgs_xbps, force):
print("Dependencies are already installed")
def gstreamer(context, force=False):
cur = os.curdir
gstdir = os.path.join(cur, "support", "linux", "gstreamer")
if not os.path.isdir(os.path.join(gstdir, "gst", "lib")):
subprocess.check_call(["bash", "gstreamer.sh"], cwd=gstdir)
return True
return False
def bootstrap_gstreamer(context, force=False):
if not gstreamer(context, force):
print("gstreamer is already set up")
return 0
def linux(context, force=False):
# Please keep these in sync with the packages in README.md
pkgs_apt = ['git', 'curl', 'autoconf', 'libx11-dev', 'libfreetype6-dev',
'libgl1-mesa-dri', 'libglib2.0-dev', 'xorg-dev', 'gperf', 'g++',
'build-essential', 'cmake', 'libssl-dev',
'liblzma-dev', 'libxmu6', 'libxmu-dev',
"libxcb-render0-dev", "libxcb-shape0-dev", "libxcb-xfixes0-dev",
'libgles2-mesa-dev', 'libegl1-mesa-dev', 'libdbus-1-dev',
'libharfbuzz-dev', 'ccache', 'clang', 'libunwind-dev',
'libgstreamer1.0-dev', 'libgstreamer-plugins-base1.0-dev',
'libgstreamer-plugins-bad1.0-dev', 'autoconf2.13',
'libunwind-dev', 'llvm-dev']
pkgs_dnf = ['libtool', 'gcc-c++', 'libXi-devel', 'freetype-devel',
'libunwind-devel', 'mesa-libGL-devel', 'mesa-libEGL-devel',
'glib2-devel', 'libX11-devel', 'libXrandr-devel', 'gperf',
'fontconfig-devel', 'cabextract', 'ttmkfdir', 'expat-devel',
'rpm-build', 'openssl-devel', 'cmake',
'libXcursor-devel', 'libXmu-devel',
'dbus-devel', 'ncurses-devel', 'harfbuzz-devel', 'ccache',
'clang', 'clang-libs', 'llvm', 'autoconf213', 'python3-devel',
'gstreamer1-devel', 'gstreamer1-plugins-base-devel',
'gstreamer1-plugins-bad-free-devel']
pkgs_xbps = ['libtool', 'gcc', 'libXi-devel', 'freetype-devel',
'libunwind-devel', 'MesaLib-devel', 'glib-devel', 'pkg-config',
'libX11-devel', 'libXrandr-devel', 'gperf', 'bzip2-devel',
'fontconfig-devel', 'cabextract', 'expat-devel', 'cmake',
'cmake', 'libXcursor-devel', 'libXmu-devel', 'dbus-devel',
'ncurses-devel', 'harfbuzz-devel', 'ccache', 'glu-devel',
'clang', 'gstreamer1-devel', 'autoconf213',
'gst-plugins-base1-devel', 'gst-plugins-bad1-devel']
installed_something = install_linux_deps(context, pkgs_apt, pkgs_dnf,
pkgs_xbps, force)
if not check_gstreamer_lib():
installed_something |= gstreamer(context, force)
if not installed_something:
print("Dependencies were already installed!")
return 0
def salt(context, force=False):
# Ensure Salt dependencies are installed
install_salt_dependencies(context, force)
# Ensure Salt is installed in the virtualenv
# It's not installed globally because it's a large, non-required dependency,
# and the installation fails on Windows
print("Checking Salt installation...", end='')
reqs_path = os.path.join(context.topdir, 'python', 'requirements-salt.txt')
process = subprocess.Popen(
["pip", "install", "-q", "-I", "-r", reqs_path],
stdout=PIPE,
stderr=PIPE
)
process.wait()
if process.returncode:
out, err = process.communicate()
print('failed to install Salt via pip:')
print('Output: {}\nError: {}'.format(out, err))
return 1
print("done")
salt_root = os.path.join(context.sharedir, 'salt')
config_dir = os.path.join(salt_root, 'etc', 'salt')
pillar_dir = os.path.join(config_dir, 'pillars')
# In order to allow `mach bootstrap` to work from any CWD,
# the `root_dir` must be an absolute path.
# We place it under `context.sharedir` because
# Salt caches data (e.g. gitfs files) in its `var` subdirectory.
# Hence, dynamically generate the config with an appropriate `root_dir`
# and serialize it as JSON (which is valid YAML).
config = {
'hash_type': 'sha384',
'master': 'localhost',
'root_dir': salt_root,
'state_output': 'changes',
'state_tabular': True,
}
if 'SERVO_SALTFS_ROOT' in os.environ:
config.update({
'fileserver_backend': ['roots'],
'file_roots': {
'base': [os.path.abspath(os.environ['SERVO_SALTFS_ROOT'])],
},
})
else:
config.update({
'fileserver_backend': ['git'],
'gitfs_env_whitelist': 'base',
'gitfs_provider': 'gitpython',
'gitfs_remotes': [
'https://github.com/servo/saltfs.git',
],
})
if not os.path.exists(config_dir):
os.makedirs(config_dir, mode=0o700)
with open(os.path.join(config_dir, 'minion'), 'w') as config_file:
config_file.write(json.dumps(config) + '\n')
# Similarly, the pillar data is created dynamically
# and temporarily serialized to disk.
# This dynamism is not yet used, but will be in the future
# to enable Android bootstrapping by using
# context.sharedir as a location for Android packages.
pillar = {
'top.sls': {
'base': {
'*': ['bootstrap'],
},
},
'bootstrap.sls': {
'fully_managed': False,
},
}
if os.path.exists(pillar_dir):
shutil.rmtree(pillar_dir)
os.makedirs(pillar_dir, mode=0o700)
for filename in pillar:
with open(os.path.join(pillar_dir, filename), 'w') as pillar_file:
pillar_file.write(json.dumps(pillar[filename]) + '\n')
cmd = [
# sudo escapes from the venv, need to use full path
find_executable('salt-call'),
'--local',
'--config-dir={}'.format(config_dir),
'--pillar-root={}'.format(pillar_dir),
'state.apply',
'servo-build-dependencies',
]
if not force:
print('Running bootstrap in dry-run mode to show changes')
# Because `test=True` mode runs each state individually without
# considering how required/previous states affect the system,
# it will often report states with requisites as failing due
# to the requisites not actually being run,
# even though these are spurious and will succeed during
# the actual highstate.
# Hence `--retcode-passthrough` is not helpful in dry-run mode,
# so only detect failures of the actual salt-call binary itself.
retcode = run_as_root(cmd + ['test=True'])
if retcode != 0:
print('Something went wrong while bootstrapping')
return retcode
proceed = input(
'Proposed changes are above, proceed with bootstrap? [y/N]: '
)
if proceed.lower() not in ['y', 'yes']:
return 0
print('')
print('Running Salt bootstrap')
retcode = run_as_root(cmd + ['--retcode-passthrough'])
if retcode == 0:
print('Salt bootstrapping complete')
else:
print('Salt bootstrapping encountered errors')
return retcode
def windows_msvc(context, force=False):
'''Bootstrapper for MSVC building on Windows.'''
deps_dir = os.path.join(context.sharedir, "msvc-dependencies")
deps_url = "https://servo-deps-2.s3.amazonaws.com/msvc-deps/"
def version(package):
return packages.WINDOWS_MSVC[package]
def package_dir(package):
return os.path.join(deps_dir, package, version(package))
def check_cmake(version):
cmake_path = find_executable("cmake")
if cmake_path:
cmake = subprocess.Popen([cmake_path, "--version"], stdout=PIPE)
cmake_version_output = six.ensure_str(cmake.stdout.read()).splitlines()[0]
cmake_version = cmake_version_output.replace("cmake version ", "")
if LooseVersion(cmake_version) >= LooseVersion(version):
return True
return False
def prepare_file(zip_path, full_spec):
if not os.path.isfile(zip_path):
zip_url = "{}{}.zip".format(deps_url, urllib.parse.quote(full_spec))
download_file(full_spec, zip_url, zip_path)
print("Extracting {}...".format(full_spec), end='')
try:
extract(zip_path, deps_dir)
except BadZipfile:
print("\nError: %s.zip is not a valid zip file, redownload..." % full_spec)
os.remove(zip_path)
prepare_file(zip_path, full_spec)
else:
print("done")
to_install = {}
for package in packages.WINDOWS_MSVC:
# Don't install CMake if it already exists in PATH
if package == "cmake" and check_cmake(version("cmake")):
continue
if not os.path.isdir(package_dir(package)):
to_install[package] = version(package)
if not to_install:
return 0
print("Installing missing MSVC dependencies...")
for package in to_install:
full_spec = '{}-{}'.format(package, version(package))
parent_dir = os.path.dirname(package_dir(package))
if not os.path.isdir(parent_dir):
os.makedirs(parent_dir)
zip_path = package_dir(package) + ".zip"
prepare_file(zip_path, full_spec)
extracted_path = os.path.join(deps_dir, full_spec)
os.rename(extracted_path, package_dir(package))
return 0
LINUX_SPECIFIC_BOOTSTRAPPERS = {
"salt": salt,
"gstreamer": bootstrap_gstreamer,
}
def get_linux_distribution():
distrib, version, _ = distro.linux_distribution()
distrib = six.ensure_str(distrib)
version = six.ensure_str(version)
if distrib in ['LinuxMint', 'Linux Mint', 'KDE neon']:
if '.' in version:
major, _ = version.split('.', 1)
else:
major = version
if major == '20':
base_version = '20.04'
elif major == '19':
base_version = '18.04'
elif major == '18':
base_version = '16.04'
else:
raise Exception('unsupported version of %s: %s' % (distrib, version))
distrib, version = 'Ubuntu', base_version
elif distrib == 'Pop!_OS':
if '.' in version:
major, _ = version.split('.', 1)
else:
major = version
if major == '21':
base_version = '21.04'
elif major == '20':
base_version = '20.04'
elif major == '19':
base_version = '18.04'
elif major == '18':
base_version = '16.04'
else:
raise Exception('unsupported version of %s: %s' % (distrib, version))
distrib, version = 'Ubuntu', base_version
elif distrib.lower() == 'elementary':
if version == '5.0':
base_version = '18.04'
elif version[0:3] == '0.4':
base_version = '16.04'
else:
raise Exception('unsupported version of %s: %s' % (distrib, version))
distrib, version = 'Ubuntu', base_version
elif distrib.lower() == 'ubuntu':
if version > '21.10':
raise Exception('unsupported version of %s: %s' % (distrib, version))
# Fixme: we should allow checked/supported versions only
elif distrib.lower() not in [
'centos',
'centos linux',
'debian gnu/linux',
'fedora',
'void',
'nixos',
]:
raise Exception('mach bootstrap does not support %s, please file a bug' % distrib)
return distrib, version
def bootstrap(context, force=False, specific=None):
'''Dispatches to the right bootstrapping function for the OS.'''
bootstrapper = None
if "windows-msvc" in host_triple():
bootstrapper = windows_msvc
elif "linux-gnu" in host_triple():
distrib, version = get_linux_distribution()
if distrib.lower() == 'nixos':
print('NixOS does not need bootstrap, it will automatically enter a nix-shell')
print('Just run ./mach build')
print('')
print('You will need to run a nix-shell if you are trying to run any of the built binaries')
print('To enter the nix-shell manually use:')
print(' $ nix-shell etc/shell.nix')
return
context.distro = distrib
context.distro_version = version
bootstrapper = LINUX_SPECIFIC_BOOTSTRAPPERS.get(specific, linux)
if bootstrapper is None:
print('Bootstrap support is not yet available for your OS.')
return 1
return bootstrapper(context, force=force)
| CYBAI/servo | python/servo/bootstrap.py | Python | mpl-2.0 | 15,264 |
"""
How to create views
"""
import logging
from pkg_resources import resource_string
from jenkinsapi.jenkins import Jenkins
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
jenkins_url = "http://localhost:8080/"
jenkins = Jenkins(jenkins_url, lazy=True)
# Create ListView in main view
logger.info('Attempting to create new view')
test_view_name = 'SimpleListView'
# Views object appears as a dictionary of views
if test_view_name not in jenkins.views:
new_view = jenkins.views.create(test_view_name)
if new_view is None:
logger.error('View %s was not created', test_view_name)
else:
logger.info('View %s has been created: %s',
new_view.name, new_view.baseurl)
else:
logger.info('View %s already exists', test_view_name)
# No error is raised if view already exists
logger.info('Attempting to create view that already exists')
my_view = jenkins.views.create(test_view_name)
logger.info('Create job and assign it to a view')
job_name = 'foo_job2'
xml = resource_string('examples', 'addjob.xml')
my_job = jenkins.create_job(jobname=job_name, xml=xml)
# add_job supports two parameters: job_name and job object
# passing job object will remove verification calls to Jenkins
my_view.add_job(job_name, my_job)
assert len(my_view) == 1
logger.info('Attempting to delete view that already exists')
del jenkins.views[test_view_name]
if test_view_name in jenkins.views:
logger.error('View was not deleted')
else:
logger.info('View has been deleted')
# No error will be raised when attempting to remove non-existing view
logger.info('Attempting to delete view that does not exist')
del jenkins.views[test_view_name]
# Create CategorizedJobsView
config = '''
<org.jenkinsci.plugins.categorizedview.CategorizedJobsView>
<categorizationCriteria>
<org.jenkinsci.plugins.categorizedview.GroupingRule>
<groupRegex>.dev.</groupRegex>
<namingRule>Development</namingRule>
</org.jenkinsci.plugins.categorizedview.GroupingRule>
<org.jenkinsci.plugins.categorizedview.GroupingRule>
<groupRegex>.hml.</groupRegex>
<namingRule>Homologation</namingRule>
</org.jenkinsci.plugins.categorizedview.GroupingRule>
</categorizationCriteria>
</org.jenkinsci.plugins.categorizedview.CategorizedJobsView>
'''
view = jenkins.views.create('My categorized jobs view',
jenkins.views.CATEGORIZED_VIEW, config=config)
| salimfadhley/jenkinsapi | examples/how_to/create_views.py | Python | mit | 2,441 |
# To change this license header, choose License Headers in Project Properties.
# To change this template file, choose Tools | Templates
# and open the template in the editor.
import socket
import struct
# CAN frame packing/unpacking (see 'struct can_frame' in <linux/can.h>)
can_frame_fmt = "=IB3x8s"
can_frame_size = struct.calcsize(can_frame_fmt)
def build_can_frame(can_id, data):
can_dlc = len(data)
data = data.ljust(8, b'\x00')
return struct.pack(can_frame_fmt, can_id, can_dlc, data)
def dissect_can_frame(frame):
can_id, can_dlc, data = struct.unpack(can_frame_fmt, frame)
return (can_id, can_dlc, data[:can_dlc])
# create a raw socket and bind it to the 'vcan0' interface
s = socket.socket(socket.AF_CAN, socket.SOCK_RAW, socket.CAN_RAW)
s.bind(('vcan0',))
while True:
cf, addr = s.recvfrom(can_frame_size)
print('Received: can_id=%x, can_dlc=%x, data=%s' % dissect_can_frame(cf))
try:
s.send(cf)
except OSError:
print('Error sending CAN frame')
try:
s.send(build_can_frame(0x01, b'\x01\x02\x03'))
except OSError:
print('Error sending CAN frame')
| keeyanajones/Samples | pythonProject/NetworkPythonProject/src/socket.py | Python | mit | 1,161 |
#!/usr/bin/env python3
"""
Given the name of a provider from cfme_data and using credentials from
the credentials stash, call the corresponding action on that provider, along
with any additional action arguments.
See wrapanapi for documentation on the callable methods themselves.
Example usage:
scripts/providers.py providername stop_vm vm-name
Note that attempts to be clever will likely be successful, but fruitless.
For example, this will work but not do anyhting helpful:
scripts/providers.py providername __init__ username password
You can also specify keyword arguments, similarly like the argparse works:
scripts/providers.py somevsphere do_action 1 2 --foo bar
It expects pairs in format ``--key value``. If you fail to provide such formatted arguments, an
error will happen.
"""
import argparse
import os
import sys
from cfme.utils import iterate_pairs
from cfme.utils import process_shell_output
from cfme.utils.providers import get_mgmt
# Make sure the parent dir is on the path before importing get_mgmt
cfme_tests_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, cfme_tests_path)
def main():
parser = argparse.ArgumentParser(epilog=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('provider_name',
help='provider name in cfme_data')
parser.add_argument('action',
help='action to take (list_vm, stop_vm, delete_vm, etc.)')
parser.add_argument('action_args', nargs='*',
help='foo')
args, kw_argument_list = parser.parse_known_args()
kwargs = {}
for key, value in iterate_pairs(kw_argument_list):
if not key.startswith('--'):
raise Exception('Wrong kwargs specified!')
key = key[2:]
try:
value = int(value)
except ValueError:
try:
value = float(value)
except ValueError:
pass
kwargs[key] = value
try:
result = call_provider(args.provider_name, args.action, *args.action_args, **kwargs)
exit, output = process_shell_output(result)
except Exception as e:
exit = 1
exc_type = type(e).__name__
if str(e):
sys.stderr.write('{}: {}\n'.format(exc_type, str(e)))
else:
sys.stderr.write('{}\n'.format(exc_type))
else:
if output is not None:
print(output)
return exit
def call_provider(provider_name, action, *args, **kwargs):
# Given a provider class, find the named method and call it with
# *args. This could possibly be generalized for other CLI tools.
provider = get_mgmt(provider_name)
try:
call = getattr(provider, action)
except AttributeError:
raise Exception('Action {} not found'.format(repr(action)))
return call(*args, **kwargs)
if __name__ == '__main__':
sys.exit(main())
| Yadnyawalkya/integration_tests | scripts/providers.py | Python | gpl-2.0 | 2,926 |
"""
Core plugin.
"""
import sys
import time
from itertools import cycle
import asks
import asyncqlio
import contextlib
import curio
import curious
import git
import matplotlib.pyplot as plt
import numpy as np
import pkg_resources
import platform
import psutil
import tabulate
import traceback
from asks.response_objects import Response
from curio import subprocess
from curio.thread import spawn_thread
from curious import Channel, Embed, EventContext, event
from curious.commands import Plugin, command
from curious.commands.context import Context
from curious.commands.decorators import ratelimit
from curious.commands.ratelimit import BucketNamer
from curious.exc import HTTPException, PermissionsError
from io import BytesIO, StringIO
from jokusoramame.bot import Jokusoramame
from jokusoramame.utils import display_time, is_owner, rgbize
class Core(Plugin):
"""
Joku v2 core plugin.
"""
@command()
@is_owner()
async def gay(self, ctx: Context):
await ctx.channel.messages.send('BIG GAY ' * 10)
@event("channel_create")
async def first(self, ctx: EventContext, channel: Channel):
if channel.guild_id is None:
return
try:
await channel.messages.send("first")
except PermissionsError: # clobber
pass
@command()
async def invite(self, ctx: Context):
"""
Gets an invite for this bot.
"""
await ctx.channel.messages.send(f"<{ctx.bot.invite_url}>")
@command()
async def ping(self, ctx: Context):
"""
Ping!
"""
gw_latency = "{:.2f}".format(
ctx.bot.gateways[ctx.guild.shard_id].heartbeat_stats.gw_time * 1000
)
fmt = f":ping_pong: Ping! | Gateway latency: {gw_latency}ms"
before = time.monotonic()
initial = await ctx.channel.messages.send(fmt)
after = time.monotonic()
fmt = fmt + f" | HTTP latency: {(after - before) * 1000:.2f}ms"
await initial.edit(fmt)
@command()
async def pong(self, ctx: Context, *, location: str = "8.8.8.8"):
"""
Pong!
"""
async with ctx.channel.typing:
words = location.split(" ")
word = words[0]
if word.startswith("-"):
await ctx.channel.messages.send("No")
return
command = "ping {} -D -s 16 -i 0.2 -c 4".format(words[0])
try:
proc = await subprocess.run(command.split(),
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except subprocess.CalledProcessError as e:
result = e.stderr.decode()
else:
result = proc.stdout.decode()
result += "\n" + proc.stderr.decode()
await ctx.channel.messages.send(f"```joku@discord $ {command}\n{result}```")
@command()
async def uptime(self, ctx: Context):
"""
Shows the bot's uptime.
"""
seconds_booted = int(time.time() - psutil.Process().create_time())
uptime_str = display_time(seconds_booted)
await ctx.channel.messages.send(f"{uptime_str} (total: {int(seconds_booted)}s)")
@command()
@is_owner()
async def eval(self, ctx: Context, *, code: str):
"""
Evaluates some code.
"""
code = code.lstrip("`").rstrip("`")
lines = code.split("\n")
lines = [" " + i for i in lines]
lines = '\n'.join(lines)
_no_return = object()
f_code = f"async def _():\n{lines}\n return _no_return"
stdout = StringIO()
try:
namespace = {
"ctx": ctx,
"message": ctx.message,
"guild": ctx.message.guild,
"channel": ctx.message.channel,
"author": ctx.message.author,
"bot": ctx.bot,
"_no_return": _no_return,
**sys.modules
}
exec(f_code, namespace, namespace)
func = namespace["_"]
with contextlib.redirect_stdout(stdout):
result = await func()
except Exception as e:
result = ''.join(traceback.format_exception(None, e, e.__traceback__))
finally:
stdout.seek(0)
if result is _no_return:
result = "(Eval returned nothing)"
fmt = f"```py\n{stdout.read()}\n{result}\n```"
await ctx.channel.messages.send(fmt)
@command()
@is_owner()
async def sql(self, ctx: Context, *, sql: str):
"""
Executes some SQL.
"""
before = time.monotonic()
try:
sess = ctx.bot.db.get_session()
async with sess:
cursor = await sess.cursor(sql)
rows = await cursor.flatten()
except Exception as e:
await ctx.channel.messages.send(f"`{str(e)}`")
return
# get timings of the runtime
after = time.monotonic()
taken = after - before
# TODO: Pagination
if not rows:
fmt = "```\nNo rows returned.\n\n"
else:
headers = rows[0].keys()
values = [row.values() for row in rows]
result = tabulate.tabulate(values, headers, tablefmt="orgtbl")
fmt = f"```\n{result}\n\n"
fmt += f"Query returned in {taken:.3f}s```"
await ctx.channel.messages.send(fmt)
@command()
@is_owner()
async def changename(self, ctx: Context, *, name: str):
"""
Changes the name of the bot.
"""
await ctx.bot.user.edit(username=name)
await ctx.channel.messages.send(":heavy_check_mark: Changed name.")
@command()
@is_owner()
async def changeavatar(self, ctx: Context, *, link: str):
"""
Changes the name of the bot.
"""
sess = asks.Session()
resp: Response = await sess.get(link)
if resp.status_code != 200:
await ctx.channel.messages.send(f":x: Failed to download avatar. "
f"(code: {resp.status_code})")
return
data = resp.raw
try:
await ctx.bot.user.edit(avatar=data)
except HTTPException:
await ctx.channel.messages.send(":x: Failed to edit avatar.")
return
await ctx.channel.messages.send(":heavy_check_mark: Changed avatar.")
@command()
async def info(self, ctx: Context):
"""
Shows some quick info about the bot.
"""
repo = git.Repo()
curr_branch = repo.active_branch
commits = list(repo.iter_commits(curr_branch, max_count=3))
memory_usage = psutil.Process().memory_full_info().uss / 1024 ** 2
d = "**Git Log:**\n"
for commit in commits:
d += "[`{}`](https://github.com/SunDwarf/Jokusoramame/commit/{}) {}\n".format(
commit.hexsha[len(commit.hexsha) - 6:len(commit.hexsha)],
commit.hexsha,
commit.message.split("\n")[0]
)
d += "\n[Icon credit](https://www.pixiv.net/member_illust.php?mode=medium&illust_id" \
"=69621921)"
em = Embed()
em.title = "Jokusoramame v2! New! Improved!"
em.description = d
em.author.icon_url = ctx.bot.user.static_avatar_url
em.author.name = ctx.bot.user.username
em.colour = ctx.guild.me.colour if ctx.guild else 0x000000
em.url = "https://www.youtube.com/watch?v=hgcLyZ3QYo8"
em.add_field(name="Python", value=platform.python_version())
em.add_field(name="curious", value=curious.__version__)
em.add_field(name="asyncqlio", value=asyncqlio.__version__)
em.add_field(name="curio", value=curio.__version__)
em.add_field(name="asks", value=pkg_resources.get_distribution("asks").version)
em.add_field(name="asyncpg", value=pkg_resources.get_distribution("asyncpg").version)
em.add_field(name="Memory usage", value=f"{memory_usage:.2f} MiB")
em.add_field(name="Servers", value=str(len(ctx.bot.guilds)))
em.add_field(name="Shards", value=str(ctx.event_context.shard_count))
em.set_footer(text=f"香港快递 | Git branch: {curr_branch.name}")
await ctx.channel.messages.send(embed=em)
@command()
@ratelimit(limit=1, time=60, bucket_namer=BucketNamer.GLOBAL)
async def stats(self, ctx: Context):
"""
Shows some bot stats.
"""
palette = [0xabcdef, 0xbcdefa, 0xcdefab, 0xdefabc, 0xefabcd, 0xfabcde]
palette = cycle(palette)
async with ctx.channel.typing, spawn_thread():
with ctx.bot._plot_lock:
names, values = [], []
for name, value in ctx.bot.events_handled.most_common():
names.append(name)
values.append(value)
colours = rgbize([next(palette) for _ in names])
y_pos = np.arange(len(names))
plt.bar(y_pos, values, align='center', color=colours)
plt.xticks(y_pos, names, rotation=90)
plt.ylabel("Count")
plt.xlabel("Event")
plt.tight_layout()
plt.title("Event stats")
buf = BytesIO()
plt.savefig(buf, format='png')
plt.cla()
plt.clf()
buf.seek(0)
data = buf.read()
await ctx.channel.messages.upload(data, filename="stats.png")
@command()
@is_owner()
async def reload(self, ctx: Context, *, module_name: str):
"""
Reloads a plugin.
"""
bot: Jokusoramame = ctx.bot
await bot.manager.unload_plugins_from(module_name)
await bot.manager.load_plugins_from(module_name)
await ctx.channel.messages.send(f":heavy_check_mark: Reloaded {module_name}.")
@command(name="load")
@is_owner()
async def _load(self, ctx: Context, *, module_name: str):
"""
Loads a plugin.
"""
bot: Jokusoramame = ctx.bot
await bot.manager.load_plugins_from(module_name)
await ctx.channel.messages.send(f":heavy_check_mark: Loaded {module_name}.")
@command()
@is_owner()
async def update(self, ctx):
"""
Updates the bot from git.
"""
try:
proc = await subprocess.run('git pull'.split(),
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except subprocess.CalledProcessError as e:
result = e.stderr.decode()
else:
result = proc.stdout.decode()
result += "\n" + proc.stderr.decode()
await ctx.channel.messages.send(f"```\n{result}```")
| SunDwarf/Jokusoramame | jokusoramame/plugins/core.py | Python | gpl-3.0 | 10,883 |
'''
Created on June 09, 2014
@author: sscepano
'''
# This one serves for the starting point
import logging
import traceback
import multiprocessing
#####################################################
# imports distributor
#####################################################
from distribute import task_manager as D
#####################################################
_log = logging.getLogger(__name__)
def test():
print 'cpu_count() = %d\n' % multiprocessing.cpu_count()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format='%(name)s: %(levelname)-8s %(message)s')
test()
# data1 will be the read in from all 10 parallel processes
# data2 will be processed & arranged from those
data1 = None
data2 = None
while True:
raw_input("Press enter to start a process cycle:\n")
try:
reload(D)
except NameError:
_log.error("Could not reload the module.")
try:
# THIS THE FUNCTION YOU ARE TESTING
####################################################
# this is for distributing the task
####################################################
print "Distribute task started."
# homework
# data1, data2 = D.distribute_task(data1, data2)
# commuting
data1, data2 = D.distribute_task_commuting(data1, data2)
print "Distribute task finished."
####################################################
except Exception as e:
_log.error("Caught exception from the process\n%s\n%s" % (e, traceback.format_exc()))
_log.info("Cycle ready.") | sanja7s/CI_urban_rural | CI_urban_rural/__main__.py | Python | mit | 1,723 |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
# Standard library imports
from __future__ import print_function
import os
import os.path as osp
# Third party imports
from qtpy.QtCore import Signal, QEvent, QObject, QRegExp, QSize, Qt
from qtpy.QtGui import (QIcon, QRegExpValidator, QTextCursor)
from qtpy.QtWidgets import (QDialog, QHBoxLayout, QLabel, QLineEdit,
QListWidget, QListWidgetItem, QVBoxLayout)
# Local imports
from spyder.config.base import _
from spyder.py3compat import iteritems, to_text_string
from spyder.utils import icon_manager as ima
from spyder.utils.stringmatching import get_search_scores
from spyder.widgets.helperwidgets import HelperToolButton, HTMLDelegate
# --- Python Outline explorer helpers
def process_python_symbol_data(oedata):
"""Returns a list with line number, definition name, fold and token."""
symbol_list = []
for key in oedata:
val = oedata[key]
if val and key != 'found_cell_separators':
if val.is_class_or_function():
symbol_list.append((key, val.def_name, val.fold_level,
val.get_token()))
return sorted(symbol_list)
def get_python_symbol_icons(oedata):
"""Return a list of icons for oedata of a python file."""
class_icon = ima.icon('class')
method_icon = ima.icon('method')
function_icon = ima.icon('function')
private_icon = ima.icon('private1')
super_private_icon = ima.icon('private2')
symbols = process_python_symbol_data(oedata)
# line - 1, name, fold level
fold_levels = sorted(list(set([s[2] for s in symbols])))
parents = [None]*len(symbols)
icons = [None]*len(symbols)
indexes = []
parent = None
for level in fold_levels:
for index, item in enumerate(symbols):
line, name, fold_level, token = item
if index in indexes:
continue
if fold_level == level:
indexes.append(index)
parent = item
else:
parents[index] = parent
for index, item in enumerate(symbols):
parent = parents[index]
if item[-1] == 'def':
icons[index] = function_icon
elif item[-1] == 'class':
icons[index] = class_icon
else:
icons[index] = QIcon()
if parent is not None:
if parent[-1] == 'class':
if item[-1] == 'def' and item[1].startswith('__'):
icons[index] = super_private_icon
elif item[-1] == 'def' and item[1].startswith('_'):
icons[index] = private_icon
else:
icons[index] = method_icon
return icons
def shorten_paths(path_list, is_unsaved):
"""
Takes a list of paths and tries to "intelligently" shorten them all. The
aim is to make it clear to the user where the paths differ, as that is
likely what they care about. Note that this operates on a list of paths
not on individual paths.
If the path ends in an actual file name, it will be trimmed off.
"""
# TODO: at the end, if the path is too long, should do a more dumb kind of
# shortening, but not completely dumb.
# Convert the path strings to a list of tokens and start building the
# new_path using the drive
path_list = path_list[:] # Make a local copy
new_path_list = []
for ii, (path, is_unsav) in enumerate(zip(path_list, is_unsaved)):
if is_unsav:
new_path_list.append(_('unsaved file'))
path_list[ii] = None
else:
drive, path = osp.splitdrive(osp.dirname(path))
new_path_list.append(drive + osp.sep)
path_list[ii] = [part for part in path.split(osp.sep) if part]
def recurse_level(level_idx):
sep = os.sep
# If toks are all empty we need not have recursed here
if not any(level_idx.values()):
return
# Firstly, find the longest common prefix for all in the level
# s = len of longest common prefix
sample_toks = list(level_idx.values())[0]
if not sample_toks:
s = 0
else:
for s, sample_val in enumerate(sample_toks):
if not all(len(toks) > s and toks[s] == sample_val
for toks in level_idx.values()):
break
# Shorten longest common prefix
if s == 0:
short_form = ''
else:
if s == 1:
short_form = sample_toks[0]
elif s == 2:
short_form = sample_toks[0] + sep + sample_toks[1]
else:
short_form = "..." + sep + sample_toks[s-1]
for idx in level_idx:
new_path_list[idx] += short_form + sep
level_idx[idx] = level_idx[idx][s:]
# Group the remaining bit after the common prefix, shorten, and recurse
while level_idx:
k, group = 0, level_idx # k is length of the group's common prefix
while True:
# Abort if we've gone beyond end of one or more in the group
prospective_group = {idx: toks for idx, toks
in group.items() if len(toks) == k}
if prospective_group:
if k == 0: # we spit out the group with no suffix
group = prospective_group
break
# Only keep going if all n still match on the kth token
_, sample_toks = next(iteritems(group))
prospective_group = {idx: toks for idx, toks
in group.items()
if toks[k] == sample_toks[k]}
if len(prospective_group) == len(group) or k == 0:
group = prospective_group
k += 1
else:
break
_, sample_toks = next(iteritems(group))
if k == 0:
short_form = ''
elif k == 1:
short_form = sample_toks[0]
elif k == 2:
short_form = sample_toks[0] + sep + sample_toks[1]
else: # k > 2
short_form = sample_toks[0] + "..." + sep + sample_toks[k-1]
for idx in group.keys():
new_path_list[idx] += short_form + (sep if k > 0 else '')
del level_idx[idx]
recurse_level({idx: toks[k:] for idx, toks in group.items()})
recurse_level({i: pl for i, pl in enumerate(path_list) if pl})
return [path.rstrip(os.sep) for path in new_path_list]
class KeyPressFilter(QObject):
"""
Use with `installEventFilter` to get up/down arrow key press signal.
"""
UP, DOWN = [-1, 1] # Step constants
sig_up_key_pressed = Signal()
sig_down_key_pressed = Signal()
def eventFilter(self, src, e):
if e.type() == QEvent.KeyPress:
if e.key() == Qt.Key_Up:
self.sig_up_key_pressed.emit()
elif e.key() == Qt.Key_Down:
self.sig_down_key_pressed.emit()
return super(KeyPressFilter, self).eventFilter(src, e)
class FileSwitcher(QDialog):
"""A Sublime-like file switcher."""
sig_goto_file = Signal(int)
sig_close_file = Signal(int)
# Constants that define the mode in which the list widget is working
# FILE_MODE is for a list of files, SYMBOL_MODE if for a list of symbols
# in a given file when using the '@' symbol.
FILE_MODE, SYMBOL_MODE = [1, 2]
def __init__(self, parent, tabs, data):
QDialog.__init__(self, parent)
# Variables
self.tabs = tabs # Editor stack tabs
self.data = data # Editor data
self.mode = self.FILE_MODE # By default start in this mode
self.initial_cursors = None # {fullpath: QCursor}
self.initial_path = None # Fullpath of initial active editor
self.initial_editor = None # Initial active editor
self.line_number = None # Selected line number in filer
self.is_visible = False # Is the switcher visible?
help_text = _("Press <b>Enter</b> to switch files or <b>Esc</b> to "
"cancel.<br><br>Type to filter filenames.<br><br>"
"Use <b>:number</b> to go to a line, e.g. "
"<b><code>main:42</code></b><br>"
"Use <b>@symbol_text</b> to go to a symbol, e.g. "
"<b><code>@init</code></b>"
"<br><br> Press <b>Ctrl+W</b> to close current tab.<br>")
# Either allow searching for a line number or a symbol but not both
regex = QRegExp("([A-Za-z0-9_]{0,100}@[A-Za-z0-9_]{0,100})|" +
"([A-Za-z0-9_]{0,100}:{0,1}[0-9]{0,100})")
# Widgets
self.edit = QLineEdit(self)
self.help = HelperToolButton()
self.list = QListWidget(self)
self.filter = KeyPressFilter()
regex_validator = QRegExpValidator(regex, self.edit)
# Widgets setup
self.setWindowFlags(Qt.Popup | Qt.FramelessWindowHint)
self.setWindowOpacity(0.95)
self.edit.installEventFilter(self.filter)
self.edit.setValidator(regex_validator)
self.help.setToolTip(help_text)
self.list.setItemDelegate(HTMLDelegate(self))
# Layout
edit_layout = QHBoxLayout()
edit_layout.addWidget(self.edit)
edit_layout.addWidget(self.help)
layout = QVBoxLayout()
layout.addLayout(edit_layout)
layout.addWidget(self.list)
self.setLayout(layout)
# Signals
self.rejected.connect(self.restore_initial_state)
self.filter.sig_up_key_pressed.connect(self.previous_row)
self.filter.sig_down_key_pressed.connect(self.next_row)
self.edit.returnPressed.connect(self.accept)
self.edit.textChanged.connect(self.setup)
self.list.itemSelectionChanged.connect(self.item_selection_changed)
self.list.clicked.connect(self.edit.setFocus)
# Setup
self.save_initial_state()
self.set_dialog_position()
self.setup()
# --- Properties
@property
def editors(self):
return [self.tabs.widget(index) for index in range(self.tabs.count())]
@property
def line_count(self):
return [editor.get_line_count() for editor in self.editors]
@property
def save_status(self):
return [getattr(td, 'newly_created', False) for td in self.data]
@property
def paths(self):
return [getattr(td, 'filename', None) for td in self.data]
@property
def filenames(self):
return [os.path.basename(getattr(td, 'filename',
None)) for td in self.data]
@property
def current_path(self):
return self.paths_by_editor[self.get_editor()]
@property
def paths_by_editor(self):
return dict(zip(self.editors, self.paths))
@property
def editors_by_path(self):
return dict(zip(self.paths, self.editors))
@property
def filter_text(self):
"""Get the normalized (lowecase) content of the filter text."""
return to_text_string(self.edit.text()).lower()
def set_search_text(self, _str):
self.edit.setText(_str)
def save_initial_state(self):
"""Saves initial cursors and initial active editor."""
paths = self.paths
self.initial_editor = self.get_editor()
self.initial_cursors = {}
for i, editor in enumerate(self.editors):
if editor is self.initial_editor:
self.initial_path = paths[i]
self.initial_cursors[paths[i]] = editor.textCursor()
def accept(self):
self.is_visible = False
QDialog.accept(self)
self.list.clear()
def restore_initial_state(self):
"""Restores initial cursors and initial active editor."""
self.list.clear()
self.is_visible = False
editors = self.editors_by_path
for path in self.initial_cursors:
cursor = self.initial_cursors[path]
if path in editors:
self.set_editor_cursor(editors[path], cursor)
if self.initial_editor in self.paths_by_editor:
index = self.paths.index(self.initial_path)
self.sig_goto_file.emit(index)
def set_dialog_position(self):
"""Positions the file switcher dialog in the center of the editor."""
parent = self.parent()
geo = parent.geometry()
width = self.list.width() # This has been set in setup
left = parent.geometry().width()/2 - width/2
top = 0
while parent:
geo = parent.geometry()
top += geo.top()
left += geo.left()
parent = parent.parent()
# Note: the +1 pixel on the top makes it look better
self.move(left, top + self.tabs.tabBar().geometry().height() + 1)
def fix_size(self, content, extra=50):
"""
Adjusts the width and height of the file switcher,
based on its content.
"""
# Update size of dialog based on longest shortened path
strings = []
if content:
for rich_text in content:
label = QLabel(rich_text)
label.setTextFormat(Qt.PlainText)
strings.append(label.text())
fm = label.fontMetrics()
# Max width
max_width = max([fm.width(s) * 1.3 for s in strings])
self.list.setMinimumWidth(max_width + extra)
# Max height
if len(strings) < 8:
max_entries = len(strings)
else:
max_entries = 8
max_height = fm.height() * max_entries * 2.5
self.list.setMinimumHeight(max_height)
# Set position according to size
self.set_dialog_position()
# --- Helper methods: List widget
def count(self):
"""Gets the item count in the list widget."""
return self.list.count()
def current_row(self):
"""Returns the current selected row in the list widget."""
return self.list.currentRow()
def set_current_row(self, row):
"""Sets the current selected row in the list widget."""
return self.list.setCurrentRow(row)
def select_row(self, steps):
"""Select row in list widget based on a number of steps with direction.
Steps can be positive (next rows) or negative (previous rows).
"""
row = self.current_row() + steps
if 0 <= row < self.count():
self.set_current_row(row)
def previous_row(self):
"""Select previous row in list widget."""
self.select_row(-1)
def next_row(self):
"""Select next row in list widget."""
self.select_row(+1)
# --- Helper methods: Editor
def get_editor(self, index=None, path=None):
"""Get editor by index or path.
If no path or index specified the current active editor is returned
"""
if index:
return self.tabs.widget(index)
elif path:
return self.tabs.widget(index)
else:
return self.parent().get_current_editor()
def set_editor_cursor(self, editor, cursor):
"""Set the cursor of an editor."""
pos = cursor.position()
anchor = cursor.anchor()
new_cursor = QTextCursor()
if pos == anchor:
new_cursor.movePosition(pos)
else:
new_cursor.movePosition(anchor)
new_cursor.movePosition(pos, QTextCursor.KeepAnchor)
editor.setTextCursor(cursor)
def goto_line(self, line_number):
"""Go to specified line number in current active editor."""
if line_number:
line_number = int(line_number)
editor = self.get_editor()
editor.go_to_line(min(line_number, editor.get_line_count()))
# --- Helper methods: Outline explorer
def get_symbol_list(self):
"""Get the list of symbols present in the file."""
try:
oedata = self.get_editor().get_outlineexplorer_data()
except AttributeError:
oedata = {}
return oedata
# --- Handlers
def item_selection_changed(self):
"""List widget item selection change handler."""
row = self.current_row()
if self.count() and row >= 0:
if self.mode == self.FILE_MODE:
try:
stack_index = self.paths.index(self.filtered_path[row])
self.sig_goto_file.emit(stack_index)
self.goto_line(self.line_number)
self.edit.setFocus()
except ValueError:
pass
else:
line_number = self.filtered_symbol_lines[row]
self.goto_line(line_number)
def setup_file_list(self, filter_text, current_path):
"""Setup list widget content for file list display."""
short_paths = shorten_paths(self.paths, self.save_status)
paths = self.paths
results = []
trying_for_line_number = ':' in filter_text
# Get optional line number
if trying_for_line_number:
filter_text, line_number = filter_text.split(':')
else:
line_number = None
# Get all available filenames and get the scores for "fuzzy" matching
scores = get_search_scores(filter_text, self.filenames,
template="<b>{0}</b>")
# Build the text that will appear on the list widget
for index, score in enumerate(scores):
text, rich_text, score_value = score
if score_value != -1:
text_item = '<big>' + rich_text.replace('&', '') + '</big>'
if trying_for_line_number:
text_item += " [{0:} {1:}]".format(self.line_count[index],
_("lines"))
text_item += u"<br><i>{0:}</i>".format(short_paths[index])
results.append((score_value, index, text_item))
# Sort the obtained scores and populate the list widget
self.filtered_path = []
for result in sorted(results):
index = result[1]
text = result[-1]
path = paths[index]
item = QListWidgetItem(ima.icon('FileIcon'), text)
item.setToolTip(path)
item.setSizeHint(QSize(0, 25))
self.list.addItem(item)
self.filtered_path.append(path)
# To adjust the delegate layout for KDE themes
self.list.files_list = True
# Move selected item in list accordingly and update list size
if current_path in self.filtered_path:
self.set_current_row(self.filtered_path.index(current_path))
elif self.filtered_path:
self.set_current_row(0)
self.fix_size(short_paths, extra=200)
# If a line number is searched look for it
self.line_number = line_number
self.goto_line(line_number)
def setup_symbol_list(self, filter_text, current_path):
"""Setup list widget content for symbol list display."""
# Get optional symbol name
filter_text, symbol_text = filter_text.split('@')
# Fetch the Outline explorer data, get the icons and values
oedata = self.get_symbol_list()
icons = get_python_symbol_icons(oedata)
symbol_list = process_python_symbol_data(oedata)
line_fold_token = [(item[0], item[2], item[3]) for item in symbol_list]
choices = [item[1] for item in symbol_list]
scores = get_search_scores(symbol_text, choices, template="<b>{0}</b>")
# Build the text that will appear on the list widget
results = []
lines = []
self.filtered_symbol_lines = []
for index, score in enumerate(scores):
text, rich_text, score_value = score
line, fold_level, token = line_fold_token[index]
lines.append(text)
if score_value != -1:
results.append((score_value, line, text, rich_text,
fold_level, icons[index], token))
template = '{0}{1}'
for (score, line, text, rich_text, fold_level, icon,
token) in sorted(results):
fold_space = ' '*(fold_level)
line_number = line + 1
self.filtered_symbol_lines.append(line_number)
textline = template.format(fold_space, rich_text)
item = QListWidgetItem(icon, textline)
item.setSizeHint(QSize(0, 16))
self.list.addItem(item)
# To adjust the delegate layout for KDE themes
self.list.files_list = False
# Move selected item in list accordingly
# NOTE: Doing this is causing two problems:
# 1. It makes the cursor to auto-jump to the last selected
# symbol after opening or closing a different file
# 2. It moves the cursor to the first symbol by default,
# which is very distracting.
# That's why this line is commented!
# self.set_current_row(0)
# Update list size
self.fix_size(lines, extra=125)
def setup(self):
"""Setup list widget content."""
if not self.tabs.count():
self.close()
return
self.list.clear()
current_path = self.current_path
filter_text = self.filter_text
# Get optional line or symbol to define mode and method handler
trying_for_symbol = ('@' in self.filter_text)
if trying_for_symbol:
self.mode = self.SYMBOL_MODE
self.setup_symbol_list(filter_text, current_path)
else:
self.mode = self.FILE_MODE
self.setup_file_list(filter_text, current_path)
| bgris/ODL_bgris | lib/python3.5/site-packages/spyder/widgets/fileswitcher.py | Python | gpl-3.0 | 22,325 |
from flask_login import UserMixin
from sqlalchemy import DDL, event
from sqlalchemy.ext.hybrid import hybrid_property
from werkzeug.security import check_password_hash, generate_password_hash
from . import BaseModel
from ..core import db
class User(BaseModel, UserMixin):
id = db.Column(db.Integer, primary_key=True)
phone = db.Column(db.String(20), unique=True)
email = db.Column(db.String(191), unique=True)
_password = db.Column('password', db.String(128))
@hybrid_property
def password(self):
# return self._password
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, value):
self._password = generate_password_hash(value)
def check_password(self, password):
return check_password_hash(self._password, password)
# UserID从1001开始
event.listen(
User.__table__,
"after_create",
DDL("ALTER TABLE %(table)s AUTO_INCREMENT = 1001;")
)
| codeif/flask-demo | demo/models/user.py | Python | mit | 972 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.