repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
saidsef/cloudflare
|
lib/__init__.py
|
Python
|
mit
| 67
| 0
|
#
|
!/usr/bin/python
# -*- coding: utf-8 -*-
|
__author__ = 'Said Sef'
|
RandallDW/Aruba_plugin
|
plugins/org.python.pydev/pysrc/_pydevd_bundle/pydevd_frame.py
|
Python
|
epl-1.0
| 33,951
| 0.006038
|
import linecache
import os.path
import re
import sys
import traceback # @Reimport
from _pydev_bundle import pydev_log
from _pydevd_bundle import pydevd_dont_trace
from _pydevd_bundle import pydevd_vars
from _pydevd_bundle.pydevd_breakpoints import get_exception_breakpoint
from _pydevd_bundle.pydevd_comm import CMD_STEP_CAUGHT_EXCEPTION, CMD_STEP_RETURN, CMD_STEP_OVER, CMD_SET_BREAK, \
CMD_STEP_INTO, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO_MY_CODE
from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, dict_contains, get_thread_id, STATE_RUN, dict_iter_values, IS_PY3K, \
dict_keys, dict_pop, RETURN_VALUES_DICT
from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised
from _pydevd_bundle.pydevd_utils import get_clsname_for_code
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame
try:
from inspect import CO_GENERATOR
except:
CO_GENERATOR = 0
try:
from _pydevd_bundle.pydevd_signature import send_signature_call_trace
except ImportError:
def send_signature_call_trace(*args, **kwargs):
pass
basename = os.path.basename
IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException')
DEBUG_START = ('pydevd.py', 'run')
DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile')
TRACE_PROPERTY = 'pydevd_traceproperty.py'
get_file_type = DONT_TRACE.get
#=======================================================================================================================
# PyDBFrame
#=======================================================================================================================
class PyDBFrame: # No longer cdef because object was dying when only a reference to trace_dispatch was kept (need to check alternatives).
'''This makes the tracing for a given frame, so, the trace_dispatch
is used initially when we enter into a new context ('call') and then
is reused for the entire context.
'''
#Note: class (and not instance) attributes.
#Same thing in the main debugger but only considering the file contents, while the one in the main debugger
#considers the user input (so, the actual result must be a join of both).
filename_to_lines_where_exceptions_are_ignored = {}
filename_to_stat_info = {}
should_skip = -1
# IFDEF CYTHON
# def __init__(self, args):
# self._args = args # In the cython version we don't need to pass the frame
# ELSE
def __init__(self, args):
#args = main_debugger, filename, base, info, t, frame
#yeap, much faster than putting in self and then getting it from self later on
self._args = args[:-1] # Remove the frame (we don't want to have a reference to it).
# ENDIF
def set_suspend(self, *args, **kwargs):
self._args[0].set_suspend(*args, **kwargs)
def do_wait_suspend(self, *args, **kwargs):
self._args[0].do_wait_suspend(*args, **kwargs)
# IFDEF CYTHON
# def trace_exception(self, frame, str event, arg):
# cdef bint flag;
# ELSE
def trace_exception(self, frame, event, arg):
# ENDIF
if event == 'exception':
flag, frame = self.should_stop_on_exception(frame, event, arg)
if flag:
self.handle_exception(frame, event, arg)
|
return self.trace_dispatch
return self.trace_exception
# IFDEF CYTHON
# def should_stop_on_exception(self, frame, str event, arg):
# cdef PyDBAdditionalThreadInfo info;
# cdef bint flag;
# ELSE
def should_stop_on_exception(self, frame, event, arg):
# ENDIF
# main_debugger, _filename, info
|
, _thread = self._args
main_debugger = self._args[0]
info = self._args[2]
flag = False
if info.pydev_state != STATE_SUSPEND: #and breakpoint is not None:
exception, value, trace = arg
if trace is not None: #on jython trace is None on the first event
exception_breakpoint = get_exception_breakpoint(
exception, main_debugger.break_on_caught_exceptions)
if exception_breakpoint is not None:
if exception_breakpoint.ignore_libraries:
if exception_breakpoint.notify_on_first_raise_only:
if main_debugger.first_appearance_in_scope(trace):
add_exception_to_frame(frame, (exception, value, trace))
try:
info.pydev_message = exception_breakpoint.qname
except:
info.pydev_message = exception_breakpoint.qname.encode('utf-8')
flag = True
else:
pydev_log.debug("Ignore exception %s in library %s" % (exception, frame.f_code.co_filename))
flag = False
else:
if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace):
add_exception_to_frame(frame, (exception, value, trace))
try:
info.pydev_message = exception_breakpoint.qname
except:
info.pydev_message = exception_breakpoint.qname.encode('utf-8')
flag = True
else:
flag = False
else:
try:
if main_debugger.plugin is not None:
result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg)
if result:
(flag, frame) = result
except:
flag = False
return flag, frame
def handle_exception(self, frame, event, arg):
try:
# print 'handle_exception', frame.f_lineno, frame.f_code.co_name
# We have 3 things in arg: exception type, description, traceback object
trace_obj = arg[2]
main_debugger = self._args[0]
if not hasattr(trace_obj, 'tb_next'):
return #Not always there on Jython...
initial_trace_obj = trace_obj
if trace_obj.tb_next is None and trace_obj.tb_frame is frame:
#I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check).
if main_debugger.break_on_exceptions_thrown_in_same_context:
#Option: Don't break if an exception is caught in the same function from which it is thrown
return
else:
#Get the trace_obj from where the exception was raised...
while trace_obj.tb_next is not None:
trace_obj = trace_obj.tb_next
if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception:
for check_trace_obj in (initial_trace_obj, trace_obj):
filename = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame)[1]
filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored
lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(filename)
if lines_ignored is None:
lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {}
try:
curr_stat = os.stat(filename)
curr_stat = (curr_stat.st_size, curr_stat.st_mtime)
except:
curr_stat = None
last_stat = self.filename_to_stat_info.get(filename)
if last_stat != curr_stat:
self.filename_to_stat_info[filename] = curr_stat
lin
|
ekasitk/sahara
|
sahara/plugins/vanilla/v1_2_1/run_scripts.py
|
Python
|
apache-2.0
| 4,367
| 0
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from sahara.utils import files
LOG = logging.getLogger(__name__)
def start_processes(remote, *processes):
for proc in processes:
remote.execute_command('sudo su -c "/usr/sbin/hadoop-daemon.sh '
'start %s" hadoop' % proc)
def refresh_nodes(remote, service):
remote.execute_command("sudo su -c 'hadoop %s -refreshNodes' hadoop"
% service)
def format_namenode(remote):
remote.execute_command("sudo su -c 'hadoop namenode -format' hadoop")
def hive_create_warehouse_dir(remote):
LOG.debug("Creating Hive warehouse dir")
remote.execute_command("sudo su - -c 'hadoop fs -mkdir "
"/user/hive/warehouse' hadoop")
def hive_copy_shared_conf(remote, dest):
LOG.debug("Copying shared Hive conf")
remote.execute_command(
"sudo su - -c 'hadoop fs -put /opt/hive/conf/hive-site.xml "
"%s' hadoop" % dest)
def oozie_share_lib(remote, nn_hostname):
LOG.debug("Sharing Oozie libs to hdfs://{host}:8020".format(
host=nn_hostname))
# remote.execute_command('sudo su - -c "/opt/oozie/bin/oozie-setup.sh '
# 'sharelib create -fs hdfs://%s:8020" hadoop'
# % nn_hostname)
# TODO(alazarev) return 'oozie-setup.sh sharelib create' back
# when #1262023 is resolved
remote.execute_command(
'sudo su - -c "mkdir /tmp/oozielib && '
'tar zxf /opt/oozie/oozie-sharelib-4.0.0.tar.gz -C /tmp/oozielib && '
'hadoop fs -put /tmp/oozielib/share share && '
'rm -rf /tmp/oozielib" hadoop')
LOG.debug("Creating sqlfile for Oozie")
remote.execute_command('sudo su - -c "/opt/oozie/bin/ooziedb.sh '
'create -sqlfile oozie.sql '
'-run Validate DB Connection" hadoop')
def check_datanodes_count(remote, count):
if count < 1:
return True
LOG.debug("Checking datanode count")
exit_code, stdout = remote.execute_command(
'sudo su -c "hadoop dfsadmin -report | '
'grep \'Datanodes available:\' | '
'awk \'{print \\$3}\'" hadoop')
LOG.debug("Datanode count={count}".format(count=stdout.rstrip()))
return exit_c
|
ode == 0 and stdout and int(stdout) == count
def mysql_start(remote):
LOG.debug("Starting MySQL")
remote.execute_command("/opt/start-mysql.sh")
def oozie_create_db(remote):
LOG.debug("Creating Oozie DB Schema")
sql_script = files.get_file_text(
'plugin
|
s/vanilla/v1_2_1/resources/create_oozie_db.sql')
script_location = "create_oozie_db.sql"
remote.write_file_to(script_location, sql_script)
remote.execute_command('mysql -u root < %(script_location)s && '
'rm %(script_location)s' %
{"script_location": script_location})
def start_oozie(remote):
remote.execute_command(
'sudo su - -c "/opt/oozie/bin/oozied.sh start" hadoop')
def hive_create_db(remote, hive_mysql_passwd):
LOG.debug("Creating Hive metastore db")
sql_script = files.get_file_text(
'plugins/vanilla/v1_2_1/resources/create_hive_db.sql')
sql_script = sql_script.replace('pass', hive_mysql_passwd)
script_location = "create_hive_db.sql"
remote.write_file_to(script_location, sql_script)
remote.execute_command('mysql -u root < %(script_location)s && '
'rm %(script_location)s' %
{"script_location": script_location})
def hive_metastore_start(remote):
LOG.debug("Starting Hive Metastore Server")
remote.execute_command("sudo su - -c 'nohup /opt/hive/bin/hive"
" --service metastore > /dev/null &' hadoop")
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/rest_framework/tests/test_routers.py
|
Python
|
agpl-3.0
| 7,115
| 0.000703
|
from __future__ import unicode_literals
from django.db import models
from django.test import TestCase
from django.core.exceptions import ImproperlyConfigured
from rest_framework import serializers, viewsets, permissions
from rest_framework.compat import include, patterns, url
from rest_framework.decorators import link, action
from rest_framework.response import Response
from rest_framework.routers import SimpleRouter, DefaultRouter
from rest_framework.test import APIRequestFactory
factory = APIRequestFactory()
urlpatterns = patterns('',)
class BasicViewSet(viewsets.ViewSet):
def list(self, request, *args, **kwargs):
return Response({'method': 'list'})
@action()
def action1(self, request, *args, **kwargs):
return Response({'method': 'action1'})
@action()
def action2(self, request, *args, **kwargs):
return Response({'method': 'action2'})
@action(methods=['post', 'delete'])
def action3(self, request, *args, **kwargs):
return Response({'method': 'action2'})
@link()
def link1(self, request, *args, **kwargs):
return Response({'method': 'link1'})
@link()
def link2(self, request, *args, **kwargs):
return Response({'method': 'link2'})
class TestSimpleRouter(TestCase):
def setUp(self):
self.router = SimpleRouter()
def test_link_and_action_decorator(self):
routes = self.router.get_routes(BasicViewSet)
decorator_routes = routes[2:]
# Make sure all these endpoints exist and none have been clobbered
for i, endpoint in enumerate(['action1', 'action2', 'action3', 'link1', 'link2']):
route = decorator_routes[i]
# check url listing
self.assertEqual(route.url,
'^{{prefix}}/{{lookup}}/{0}{{trailing_slash}}$'.format(endpoint))
# check method to function mapping
if endpoint == 'action3':
methods_map = ['post', 'delete']
elif endpoint.startswith('action'):
methods_map = ['post']
else:
methods_map = ['get']
for method in methods_map:
self.assertEqual(route.mapping[method], endpoint)
class RouterTestModel(models.Model):
uuid = models.CharField(max_length=20)
text = models.CharField(max_length=200)
class TestCustomLookupFields(TestCase):
"""
Ensure that custom lookup fields are correctly routed.
"""
urls = 'rest_framework.tests.test_routers'
def setUp(self):
class NoteSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = RouterTestModel
lookup_field = 'uuid'
fields = ('url', 'uuid', 'text')
class NoteViewSet(viewsets.ModelViewSet):
queryset = RouterTestModel.objects.all()
serializer_class = NoteSerializer
lookup_field = 'uuid'
RouterTestModel.objects.create(uuid='123', text='foo bar')
self.router = SimpleRouter()
self.router.register(r'notes', NoteViewSet)
from rest_framework.tests import test_routers
urls = getattr(test_routers, 'urlpatterns')
urls += patterns('',
url(r'^', include(self.router.urls)),
)
def test_custom_lookup_field_route(self):
detail_route = self.router.urls[-1]
detail_url_pattern = detail_route.regex.pattern
self.assertIn('<uuid>', detail_url_pattern)
def test_retrieve_lookup_field_list_view(self):
response = self.client.get('/notes/')
self.assertEqual(response.data,
[{
"url": "http://testserver/notes/123/",
"uuid": "123", "text": "foo bar"
}]
)
def test_retrieve_lookup_field_detail_view(self):
response = self.client.get('/notes/123/')
self.assertEqual(response.data,
{
"url": "http://testserver/notes/123/",
"uuid": "123", "text": "foo bar"
}
)
class TestTrailingSlashIncluded(TestCase):
def setUp(self):
class NoteViewSet(viewsets.ModelViewSet):
model = RouterTestModel
self.router = SimpleRouter()
self.router.register(r'notes', NoteVi
|
ewSet)
self.urls = self.router.urls
def test_urls_have_trailing_slash_by_default(self):
expected = ['^notes/$', '^notes/(?P<pk>[^/]+)/$']
for idx in range(len(expected)):
self.assertEqual(expected[idx], self.urls[idx].regex.pattern)
class TestTrailingSlashRemoved(TestCase):
def setUp(self):
class NoteViewSet(viewsets.ModelViewSet):
model = RouterTestModel
self.router =
|
SimpleRouter(trailing_slash=False)
self.router.register(r'notes', NoteViewSet)
self.urls = self.router.urls
def test_urls_can_have_trailing_slash_removed(self):
expected = ['^notes$', '^notes/(?P<pk>[^/.]+)$']
for idx in range(len(expected)):
self.assertEqual(expected[idx], self.urls[idx].regex.pattern)
class TestNameableRoot(TestCase):
def setUp(self):
class NoteViewSet(viewsets.ModelViewSet):
model = RouterTestModel
self.router = DefaultRouter()
self.router.root_view_name = 'nameable-root'
self.router.register(r'notes', NoteViewSet)
self.urls = self.router.urls
def test_router_has_custom_name(self):
expected = 'nameable-root'
self.assertEqual(expected, self.urls[0].name)
class TestActionKeywordArgs(TestCase):
"""
Ensure keyword arguments passed in the `@action` decorator
are properly handled. Refs #940.
"""
def setUp(self):
class TestViewSet(viewsets.ModelViewSet):
permission_classes = []
@action(permission_classes=[permissions.AllowAny])
def custom(self, request, *args, **kwargs):
return Response({
'permission_classes': self.permission_classes
})
self.router = SimpleRouter()
self.router.register(r'test', TestViewSet, base_name='test')
self.view = self.router.urls[-1].callback
def test_action_kwargs(self):
request = factory.post('/test/0/custom/')
response = self.view(request)
self.assertEqual(
response.data,
{'permission_classes': [permissions.AllowAny]}
)
class TestActionAppliedToExistingRoute(TestCase):
"""
Ensure `@action` decorator raises an except when applied
to an existing route
"""
def test_exception_raised_when_action_applied_to_existing_route(self):
class TestViewSet(viewsets.ModelViewSet):
@action()
def retrieve(self, request, *args, **kwargs):
return Response({
'hello': 'world'
})
self.router = SimpleRouter()
self.router.register(r'test', TestViewSet, base_name='test')
with self.assertRaises(ImproperlyConfigured):
self.router.urls
|
MadManRises/Madgine
|
shared/assimp/test/regression/run.py
|
Python
|
mit
| 11,801
| 0.006949
|
#!/usr/bin/env python3
# -*- Coding: UTF-8 -*-
# ---------
|
------------------------------------------------------------------
# Open Asset Import Library (ASSIMP)
# ---------------------------------------------------------------------------
#
# Copyright (c) 2006-2010, ASSIMP Development Team
#
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# * Redistributions of sourc
|
e code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# * Neither the name of the ASSIMP team, nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior
# written permission of the ASSIMP Development Team.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ---------------------------------------------------------------------------
"""
Run the regression test suite using settings from settings.py.
The assimp_cmd (or assimp) binary to use is specified by the first
command line argument and defaults to ``assimp``.
To build, set ``ASSIMP_BUILD_ASSIMP_TOOLS=ON`` in CMake. If generating
configs for an IDE, make sure to build the assimp_cmd project.
On Windows, use ``py run.py <path to assimp>`` to make sure the command
line parameter is forwarded to the script.
"""
import sys
import os
import subprocess
import zipfile
import collections
import multiprocessing
import settings
import utils
# -------------------------------------------------------------------------------
EXPECTED_FAILURE_NOT_MET, DATABASE_LENGTH_MISMATCH, \
DATABASE_VALUE_MISMATCH, IMPORT_FAILURE, \
FILE_NOT_READABLE, COMPARE_SUCCESS, EXPECTED_FAILURE = range(7)
messages = collections.defaultdict(lambda: "<unknown", {
EXPECTED_FAILURE_NOT_MET:
"""Unexpected success during import\n\
\tReturn code was 0""",
DATABASE_LENGTH_MISMATCH:
"""Database mismatch: lengths don't match\n\
\tExpected: {0} Actual: {1}""",
DATABASE_VALUE_MISMATCH:
"""Database mismatch: """,
IMPORT_FAILURE:
"""Unexpected failure during import\n\
\tReturn code was {0}""",
FILE_NOT_READABLE:
"""Unexpected failure reading file""",
COMPARE_SUCCESS:
"""Results match archived reference dump in database\n\
\tNumber of bytes compared: {0}""",
EXPECTED_FAILURE:
"""Expected failure was met.""",
})
outfilename_output = "run_regression_suite_output.txt"
outfilename_failur = "run_regression_suite_failures.csv"
Environment = {}
# -------------------------------------------------------------------------------
class results:
""" Handle formatting of results"""
def __init__(self, zipin):
"""Init, given a ZIPed database """
self.failures = []
self.success = []
self.zipin = zipin
def fail(self, failfile, filename_expect, pp, msg, *args):
"""
Report failure of a sub-test
File f failed a test for pp config pp, failure notice is msg,
*args is format()ting args for msg
"""
print("[FAILURE] " + messages[msg].format(*args))
self.failures.append((failfile, filename_expect, pp))
def ok(self, f, pp, msg, *args):
"""
Report success of a sub-test
File f passed the test, msg is a happy success note,
*args is format()ing args for msg.
"""
print("[SUCCESS] " + messages[msg].format(*args))
self.success.append(f)
def report_results(self):
"""Write results to ../results/run_regression_suite_failures.txt"""
count_success = len(self.success)
count_fail = len(self.failures)
percent_good = float(count_success) / (count_success + count_fail)
print("\n" + ('='*60) + "\n" + "SUCCESS: {0}\nFAILURE: {1}\nPercentage good: {2}".format(
count_success, count_fail, percent_good) +
"\n" + ('='*60) + "\n")
with open(os.path.join('..', 'results',outfilename_failur), "wt") as f:
f.write("ORIGINAL FILE;EXPECTED DUMP\n")
f.writelines(map(
lambda x: x[0] + ' ' + x[2] + ";" + x[1] + "\n", self.failures))
if self.failures:
print("\nSee " + settings.results + "\\" + outfilename_failur
+ " for more details\n\n")
def hasFailures( self ):
""" Return True, if any failures there. """
return 0 != len( self.failures )
# -------------------------------------------------------------------------------
def setEnvVar( var, value ):
print ( "set var " + var +" to" + value)
Environment[ var ] = value
# -------------------------------------------------------------------------------
def getEnvVar( var ):
if var in Environment:
return Environment[ var ]
else:
print ( "Error: cannot find " + var )
return ""
# -------------------------------------------------------------------------------
def prepare_output_dir(fullpath, myhash, app):
outfile = os.path.join(settings.results, "tmp", os.path.split(fullpath)[1] + "_" + myhash)
try:
os.mkdir(outfile)
except OSError:
pass
outfile = os.path.join(outfile, app)
return outfile
# -------------------------------------------------------------------------------
def process_dir(d, outfile_results, zipin, result ):
shellparams = {'stdout':outfile_results, 'stderr':outfile_results, 'shell':False}
print("Processing directory " + d)
all = ""
for f in sorted(os.listdir(d)):
fullpath = os.path.join(d, f)
if os.path.isdir(fullpath) and not f[:1] == '.':
process_dir(fullpath, outfile_results, zipin, result)
continue
if f in settings.files_to_ignore or os.path.splitext(f)[1] in settings.exclude_extensions:
print("Ignoring " + f)
return
for pppreset in settings.pp_configs_to_test:
filehash = utils.hashing(fullpath, pppreset)
failure = False
try:
input_expected = zipin.open(filehash, "r").read()
# empty dump files indicate 'expected import failure'
if not len(input_expected):
failure = True
except KeyError:
# TODO(acgessler): Keep track of this and report as error in the end.
print("Didn't find "+fullpath+" (Hash is "+filehash+") in database. Outdated "+\
"regression database? Use gen_db.zip to re-generate.")
continue
print("-"*60 + "\n " + os.path.realpath(fullpath) + " pp: " + pppreset)
outfile_actual = prepare_output_dir(fullpath, filehash, "ACTUAL")
outfile_expect = prepare_output_dir(fullpath, filehash, "EXPECT")
outfile_results.write("assimp dump "+"-"*80+"\n")
outfile_results.flush()
assimp_bin_path = getEnvVar("assimp_path")
command = [assimp_bin_path,
"dump",
fullpath, outfile_actual, "-b", "-s", "-l" ] +\
pppreset.split()
print( "
|
nkoukou/University_Projects_Year_3
|
EDM_Assembly/base_class.py
|
Python
|
mit
| 11,366
| 0.006159
|
'''
Defines the base class of an electric potential grid.
'''
import numpy as np
import matplotlib as mpl
import matplotlib.pylab as plt
from numba import jit
# Global dimensions (used for plots)
sqc_x = (2., 'cm') # unit length for SquareCable
sqc_u = (10., 'V') # unit potential for SquareCable
edm_x = (10., 'mm') # unit length for Edm
edm_u = (2., 'k
|
V') # unit potential for Edm
# Plot parameters
font = {'family
|
' : 'normal',
'weight' : 'normal'}
mpl.rc('font', **font)
mpl.rcParams['lines.linewidth'] = 5.
# Functions compiled just-in-time
@jit
def gen_sc_grid(b, t, u):
'''
Generates SquareCable grid.
'''
grid = np.full((b,b), u)
fix = np.ones((b,b))
grid = np.pad(grid, ((t-b)/2,), 'constant', constant_values=(0,))
fix = np.pad(fix, ((t-b)/2,), 'constant', constant_values=(0,))
grid = np.pad(grid, 1, 'constant', constant_values=(0,))
fix = np.pad(fix, 1, 'constant', constant_values=(1,))
return grid, fix
@jit
def gen_edm_grid(tube_dist, scale=1):
'''
Generates Edm grid.
'''
small_plate = np.full(2,1, dtype='float64')
big_plate = np.full(20,4, dtype='float64')
gap = np.zeros(1, dtype='float64')
row_one = np.concatenate((small_plate, gap, big_plate, gap, small_plate))
row_two = np.zeros(row_one.size)
row_three = -row_one
grid = np.vstack((row_one, row_two, row_three))
grid = np.pad(grid, tube_dist, 'constant', constant_values=(0,))
fix = np.where(grid==0, 0, 1)
if scale != 1:
scale = np.ones((scale, scale))
grid = np.kron(grid, scale)
fix = np.kron(fix, scale)
grid = np.pad(grid, 1, 'constant', constant_values=(0,))
fix = np.pad(fix, 1, 'constant', constant_values=(1,))
return grid, fix
@jit
def update(grid, fix, scale, w=-1):
'''
Updates SquareCable or Edm grid.
Relaxation parameter w (0 < w < 2) affects the speed of convergence.
- w = 'j': solves with Jacobi method
- w = -1: solves with estimated optimal w
'''
if w=='j' or w=='J':
new_grid=np.copy(grid)
for index, fixed in np.ndenumerate(fix):
if fixed: continue
new_grid[index] = 0.25*( grid[index[0]-1, index[1]] +
grid[index[0]+1, index[1]] +
grid[index[0], index[1]-1] +
grid[index[0], index[1]+1] )
return new_grid
if w==-1:
coef = float(grid.shape[1])/grid.shape[0]
const = 2.0 if coef==1. else 5.5
w = 2./(1+const/(coef*scale))
for index, fixed in np.ndenumerate(fix):
if fixed: continue
grid[index] = ((1-w) * grid[index] + 0.25 * w *
( grid[index[0]-1, index[1]] +
grid[index[0]+1, index[1]] +
grid[index[0], index[1]-1] +
grid[index[0], index[1]+1] ))
return grid
# Base class
class PotentialGrid(object):
def update_grid(self, w=-1):
'''
Updates grid once.
'''
self.grid = update(self.grid, self.fix, self.scale, w)
def converge_grid(self, w=-1, accuracy=0.05):
'''
Updates grid until convergence.
'''
temporal_spread = 1.
spatial_spread = 0.
updates = 0
while temporal_spread > accuracy*spatial_spread:
horizontal_spread = np.absolute(np.diff(self.grid, axis=-1)).max()
vertical_spread = np.absolute(np.diff(self.grid, axis=0)).max()
spatial_spread = max(horizontal_spread, vertical_spread)
old_grid = np.copy(self.grid)
self.update_grid(w)
temporal_spread = np.linalg.norm( (self.grid - old_grid) )
updates += 1
if updates%1000==0:
print '\nspatial spread = ', spatial_spread
print 'temporal spread = ', temporal_spread
print 'updates = ', updates
return temporal_spread, spatial_spread, updates
def plot_grid(self, title=None):
'''
Plots grid's potential field. Parameter title sets the title of the
plot.
'''
if self.grid.shape[0] == self.grid.shape[1]:
colour, shrink, aspect = 'YlOrRd', 1, (1, 10)
else:
colour, shrink, aspect = 'RdYlBu', 0.5, (1.2, 8)
grid = self.dim['u'][0]*self.grid
xedge = (grid.shape[1]-2.)*self.dim['x'][0]/self.scale/2.
yedge = (grid.shape[0]-2.)*self.dim['x'][0]/self.scale/2.
fig = plt.figure()
ax = fig.add_subplot(111)
if title=='intro':
ax.set_title(r'EDM experiment plate assembly', fontsize=45)
elif title=='results':
ax.set_title(r'Electric Potential Field', fontsize=45)
axx = ax.imshow(grid, extent= [-xedge, xedge, -yedge, yedge],
aspect=aspect[0], interpolation='None',
cmap=plt.cm.get_cmap(colour))
ax.set_xlabel(r'$system\ size\ ({0})$'.format(self.dim['x'][1]),
fontsize=45)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.tick_params(axis='both', labelsize=40)
cbar = fig.colorbar(axx, shrink=shrink, aspect=aspect[1])
cbar.ax.tick_params(labelsize=40)
cbar.set_label(r'$Potential\ \phi\ ({0})$'.format(self.dim['u'][1]),
size=50)
def analyse_scale(self, w=-1, datapoints=20, accuracy=0.05, plot=True):
'''
Plots number of updates against scale for given relaxation parameter w,
number of datapoints and accuracy of convergence. If plot=False,
returns computed updates and scales.
Plots also maximum spatial spread of potential against scale.
'''
scales = np.linspace(10, 10*datapoints, datapoints)
mesh, updates = [], []
for s in scales:
print s
self.set_scale(s, silent=True)
data = self.converge_grid(w, accuracy)
updates.append(data[2])
mesh.append(data[1]*self.dim['u'][0])
if not plot: return scales, updates
if w=='j':
xaxis = scales*scales
lab= r'$scale^2\ \left(\frac{1}{(%g%s)^2}\right)$'% (
self.dim['x'][0], self.dim['x'][1])
else:
xaxis = scales
lab= r'$scale\ \left(\frac{1}{%g%s}\right)$'% (self.dim['x'][0],
self.dim['x'][1])
slope = updates[-1]/xaxis[-1]
fit = slope*xaxis
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_title(r'Number of updates against Scale', fontsize=45)
ax.plot(xaxis, updates, label=r'Numerical data')
ax.plot(xaxis, fit, label=r'Linear fit ($slope=%.2f$)'% (slope))
ax.set_xlabel(lab, fontsize=35)
ax.set_ylabel(r'$temporal\ updates$', fontsize=35)
ax.tick_params(axis='both', labelsize=25)
ax.legend(loc='upper left', prop={'size':40})
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_title(r'Spatial spread against Scale', fontsize=45)
ax.plot(scales, mesh)
ax.set_xlabel(r'$scale\ \left(\frac{1}{%g%s}\right)$'%
(self.dim['x'][0], self.dim['x'][1]), fontsize=40)
ax.set_ylabel(r'$spatial\ spread\ (%s)$'% (self.dim['u'][1]),
fontsize=40)
ax.tick_params(axis='both', labelsize=25)
def analyse_spread(self, w=-1, datapoints=10):
'''
Plots spatial spread of potential against accuracy of convergence for
given relaxation parameter w and number of datapoints.
'''
fig = plt.figure()
ax = fig.add_subplot(111)
#ax.set_title(r'Spatial spread against Accuracy of convergence',
# fontsize=75)
ax.set_xlabel(r'$fraction\ of\ spatial\ spread$', fontsize=40)
ax.invert_xaxis()
ax.set_ylabel(r'$spatial\ spread\ (%s)$'% (self.dim['u'][1]),
fontsize=40)
ax.tick_params(axis='both', labelsize=30)
accura
|
Samael500/social-inspector
|
inspector/tests/test.py
|
Python
|
mit
| 105
| 0
|
import uni
|
ttest
class TestC
|
olor(unittest.TestCase):
def test(self):
self.assertTrue(True)
|
sipdbg/sipdbg
|
server/main_server.py
|
Python
|
gpl-2.0
| 6,551
| 0.020608
|
import os
import sys
import random
import logging
import signal
main_path = os.path.abspath (os.path.dirname (__file__))
prev_path = main_path + "/.."
sys.path.append (prev_path)
from utils import _redis_connect, log_init
from command import *
from RedisHelper import RedisServer
from Config import Config
from sipcore import SIPpacket
config_path = main_path + "/../config"
# Main Server
class MainServerCommand (object):
def __init__ (self, _redis, _log
|
ger, _confi
|
g):
self.s_id = 0 # Identifier for RTP/SIP service
self.rr = 1 # round robin (server id)
self.instance_cnt = 1 # server count
self.rtp = {} # rtp instance
self.sip = {} # sip instance
self._redis = _redis
self._logger = _logger
self._config = _config
self.load_config ()
self.cmd_handler = {
"CREATE_RTP" : self.create_rtp, # ckey, [|port, |bind_ip]
"CREATE_SIPTCP" : self.create_siptcp, # ckey, bind_port, |bind_ip
"CREATE_SIPUDP" : self.create_sipudp, # ckey, bind_port, |bind_ip
"REMOVE_RTP" : self.remove_rtp, # ckey, port, |bind_ip
"REMOVE_SIP" : self.remove_sip # ckey, bind_ip, bind_port
}
return
def load_config (self):
self._rtp_port_range_start = self._config.rtp_port_range_start
self._rtp_port_range_end = self._config.rtp_port_range_end
self._local_host = self._config.local_host
return
inc_rr = lambda self: 1 if self.rr == self.instance_cnt else self.rr + 1
def execute (self, cmd, ckey, args):
if not self.cmd_handler.has_key (cmd):
raise ServerUnknownCommand ("Command not found: %s"%cmd)
self._logger.debug ("Executing '%s':%s"%(cmd, args))
return self.cmd_handler [cmd] (ckey, *args)
def create_rtp (self, ckey, port = None, bind_ip = None):
bind_ip = bind_ip or self._local_host
if self.rtp.has_key ((bind_ip, port)):
_id = self.rtp [(bind_ip, port)][0]
return RedisServer.server_rtp_create_reply (self._redis, ckey, _id)
while True if not port else False:
port = random.randrange (self.rtp_port_range_start,
self.rtp_port_range_end)
if not self.rtp.has_key ((bind_ip, port)):
break
RedisServer.server_rtp_create (self._redis, ckey, self.s_id, port,
bind_ip, self.rr)
self.rtp [(bind_ip, port)] = (self.s_id, self.rr)
self.s_id += 1
self.rr = self.inc_rr ()
return
def create_sipudp (self, ckey, bind_port, bind_ip = None):
return self.create_sip (ckey, bind_port, bind_ip, "UDP")
def create_siptcp (self, ckey, bind_port, bind_ip = None):
return self.create_sip (ckey, bind_port, bind_ip, "TCP")
def create_sip (self, ckey, bind_port, bind_ip = None, proto = None):
bind_ip = bind_ip or self._local_host
if self.sip.has_key ((bind_ip, bind_port, proto)):
_id = self.sip [(bind_ip, bind_port, proto)][0]
self._logger.warn (
"CreateSIP: %s:%d: Already exist"%(bind_ip, bind_port))
return RedisServer.server_sip_create_reply (self._redis, ckey, _id,
proto)
RedisServer.server_sip_create (self._redis, ckey, self.s_id,
bind_ip, bind_port, self.rr, proto)
self._logger.debug (
"Contacting transmitter %d"%self.rr)
self.sip [(bind_ip, bind_port, proto)] = (self.s_id, self.rr)
self.s_id += 1
self.rr = self.inc_rr ()
self._logger.info (
"Create SIP executed (%s:%d)"%(bind_ip, bind_port))
return
def remove_rtp (self, ckey, port, bind_ip = None):
bind_ip = bind_ip or self._local_host
if not self.rtp.has_key ((bind_ip, port)):
RedisServer.server_error_reply (ckey, "RTP not existing")
return
RedisServer.server_rtp_remove (self._redis,
ckey,
self.rtp [(bind_ip, port)][1])
self.rtp.pop ((bind_ip, bind_port))
RedisServer.server_success_reply (self._redis, ckey)
return
def remove_sip (self, ckey, bind_ip, bind_port):
if not self.sip.has_key ((bind_ip, port, proto)):
RedisServer.server_error_reply (ckey, "SIP not existing")
return
RedisServer.server_sip_remove (self._redis, ckey,
self.sip[(bind_ip, bind_port, proto)][1])
self.sip.pop ((bind_ip, bind_port, proto))
RedisServer.server_success_reply (self._redis, ckey)
return
class Server (CommandHandler):
def __init__ (self, * args, ** kwargs):
super (Server, self).__init__ (** kwargs)
self._command_handler = MainServerCommand (self._redis, self._logger,
self._config)
return
def _validate (self):
if not super (Server, self)._validate ():
return False
return True if 3 is len (self._data) else False
def _parse (self):
r = self._data
self.cmd = r [0]
self.cmd_args = r [1]
self.ckey = r [2]
return True
_locals_enter = _locals_close = lambda self: True
def _process (self):
try:
self._command_handler.execute (self.cmd, self.ckey, self.cmd_args)
except ServerUnknownCommand:
self._logger.error (
"Unknown command : %s ('%s')"%(self.cmd, self._data))
except Exception, e:
self._logger.exception (e)
else:
return True
return False
def _shutdown (signo = None, frame = None):
sys.exit (0)
if '__main__' == __name__:
config_file = "%s/config_server.py"%config_path
config = Config (config_file)
logger = log_init (__file__, config.log_level or logging.DEBUG,
config.log_local or "local3")
redis = _redis_connect (config.redis_host)
s = Server (logger = logger, redis = redis, config = config)
signal.signal (signal.SIGTERM, _shutdown)
logger.info ("Starting server")
try:
s.run ()
except KeyboardInterrupt:
logger.info (
"Hit ^C Exiting")
logger.info ("Stopping server")
del (s)
sys.exit (0)
|
AnthonyDiGirolamo/todotxt-machine
|
todotxt_machine/todo.py
|
Python
|
gpl-3.0
| 38,728
| 0.003823
|
#!/usr/bin/env python
# coding=utf-8
import re
import random
from datetime import date
class Todo:
"""Single Todo item"""
_priority_regex = re.compile(r'\(([A-Z])\) ')
def __init__(self, item, index,
colored="", priority="", contexts=[], projects=[],
creation_date="", due_date="", completed_date=""):
self.raw = item.strip()
self.raw_index = index
self.creation_date = creation_date
self.priority = priority
self.contexts = contexts
self.projects = projects
self.due_date = due_date
self.completed_date = completed_date
self.colored = self.highlight()
# self.colored_length = TerminalOperations.length_ignoring_escapes(self.colored)
def update(self, item):
self.raw = item.strip()
self.priority = Todos.priority(item)
self.contexts = Todos.contexts(item)
self.projects = Todos.projects(item)
self.creation_date = Todos.creation_date(item)
self.due_date = Todos.due_date(item)
self.completed_date = Todos.completed_date(item)
self.colored = self.highlight()
# self.colored_length = TerminalOperations.length_ignoring_escapes(self.colored)
def __repr__(self):
return repr({
"raw": self.raw,
"colored": self.colored,
"raw_index": self.raw_index,
"priority": self.priority,
"contexts": self.contexts,
"projects": self.projects,
"creation_date": self.creation_date,
"due_date": self.due_date,
"completed_date": self.completed_date
})
def highlight(self, line="", show_due_date=True, show_contexts=True, show_projects=True):
colored = self.raw if line == "" else line
color_list = [colored]
if colored[:2] == "x ":
color_list = ('completed', color_list)
else:
words_to_be_highlighted = self.contexts + self.projects
if self.due_date:
words_to_be_highlighted.append("due:" + self.due_date)
if self.creation_date:
words_to_be_highlighted.append(self.creation_date)
if words_to_be_highlighted:
color_list = re.split("(" + "|".join([re.escape(w) for w in words_to_be_highlighted]) + ")", self.raw)
for index, w in enumerate(color_list):
if w in self.contexts:
color_list[index] = ('context', w) if show_contexts else ''
|
elif w in self.projects:
color_list[index] = ('project', w) if show_projects else ''
elif w == "due:" + self.due_date:
color_list[index] = ('due_date', w)
|
if show_due_date else ''
elif w == self.creation_date:
color_list[index] = ('creation_date', w)
if self.priority and self.priority in "ABCDEF":
color_list = ("priority_{0}".format(self.priority.lower()), color_list)
else:
color_list = ("plain", color_list)
return color_list
def highlight_search_matches(self, line=""):
colored = self.raw if line == "" else line
color_list = [colored]
if self.search_matches:
color_list = re.split("(" + "|".join([re.escape(match) for match in self.search_matches]) + ")", self.raw)
for index, w in enumerate(color_list):
if w in self.search_matches:
color_list[index] = ('search_match', w)
return color_list
def change_priority(self, new_priority):
self.priority = new_priority
if new_priority:
new_priority = '({}) '.format(new_priority)
if re.search(self._priority_regex, self.raw):
self.raw = re.sub(self._priority_regex, '{}'.format(new_priority), self.raw)
elif re.search(r'^x \d{4}-\d{2}-\d{2}', self.raw):
self.raw = re.sub(r'^(x \d{4}-\d{2}-\d{2}) ', r'\1 {}'.format(new_priority), self.raw)
else:
self.raw = '{}{}'.format(new_priority, self.raw)
self.update(self.raw)
def is_complete(self):
if self.raw[0:2] == "x ":
return True
elif self.completed_date == "":
return False
else:
return True
def complete(self):
today = date.today()
self.raw = "x {0} ".format(today) + self.raw
self.completed_date = "{0}".format(today)
self.update(self.raw)
def incomplete(self):
self.raw = re.sub(Todos._completed_regex, "", self.raw)
self.completed_date = ""
self.update(self.raw)
def add_creation_date(self):
if self.creation_date == "":
p = "({0}) ".format(self.priority) if self.priority != "" else ""
self.update("{0}{1} {2}".format(p, date.today(), self.raw.replace(p, "")))
class Todos:
"""Todo items"""
_context_regex = re.compile(r'(?:^|\s+)(@\S+)')
_project_regex = re.compile(r'(?:^|\s+)(\+\S+)')
_creation_date_regex = re.compile(r'^'
r'(?:x \d\d\d\d-\d\d-\d\d )?'
r'(?:\(\w\) )?'
r'(\d\d\d\d-\d\d-\d\d)\s*')
_due_date_regex = re.compile(r'\s*due:(\d\d\d\d-\d\d-\d\d)\s*')
_priority_regex = re.compile(r'\(([A-Z])\) ')
_completed_regex = re.compile(r'^x (\d\d\d\d-\d\d-\d\d) ')
def __init__(self, todo_items, file_path, archive_path):
self.file_path = file_path
self.archive_path = archive_path
self.update(todo_items)
def reload_from_file(self):
with open(self.file_path, "r") as todotxt_file:
self.update(todotxt_file.readlines())
def save(self):
with open(self.file_path, "w") as todotxt_file:
for t in self.todo_items:
todotxt_file.write(t.raw + '\n')
def archive_done(self):
if self.archive_path is not None:
with open(self.archive_path, "a") as donetxt_file:
done = self.done_items()
for t in done:
donetxt_file.write(t.raw + '\n')
self.todo_items.remove(t)
self.save()
return True
return False
def update(self, todo_items):
self.parse_raw_entries(todo_items)
def append(self, item, add_creation_date=True):
self.insert(len(self.todo_items), item, add_creation_date)
return len(self.todo_items) - 1
def insert(self, index, item, add_creation_date=True):
self.todo_items.insert(index, self.create_todo(item, index))
self.update_raw_indices()
newtodo = self.todo_items[index]
if add_creation_date and newtodo.creation_date == "":
newtodo.add_creation_date()
return index
def delete(self, index):
del self.todo_items[index]
self.update_raw_indices()
def __iter__(self):
self.index = -1
return self
def __next__(self):
self.index = self.index + 1
if self.index == len(self.todo_items):
raise StopIteration
return self.todo_items[self.index]
def next(self):
self.index = self.index + 1
if self.index == len(self.todo_items):
raise StopIteration
return self.todo_items[self.index]
def __len__(self):
return len(self.todo_items)
def pending_items(self):
return [t for t in self.todo_items if not t.is_complete()]
def done_items(self):
return [t for t in self.todo_items if t.is_complete()]
def pending_items_count(self):
return len(self.pending_items())
def done_items_count(self):
return len(self.done_items())
def __getitem__(self, index):
return self.todo_items[index]
def __repr__(self):
return repr([i for i in self.todo_items])
def create_todo(self, todo, index):
return Todo(todo, index,
contexts=Todos.contexts(todo),
projects=Tod
|
xmnlab/hermes
|
hermes/__init__.py
|
Python
|
mit
| 120
| 0
|
# -*-
|
coding: utf-8 -*-
__author__ = """Ivan Ogasawara"""
__email__ = 'ivan.ogasawara@gmail.com'
__version__ = '0.1
|
.0'
|
coreycb/charms.openstack
|
unit_tests/pci_responses.py
|
Python
|
apache-2.0
| 10,045
| 0.005077
|
import copy
# flake8: noqa
LSPCI = """
0000:00:00.0 "Host bridge" "Intel Corporation" "Haswell-E DMI2" -r02 "Intel Corporation" "Device 0000"
0000:00:03.0 "PCI bridge" "Intel Corporation" "Haswell-E PCI Express Root Port 3" -r02 "" ""
0000:00:03.2 "PCI bridge" "Intel Corporation" "Haswell-E PCI Express Root Port 3" -r02 "" ""
0000:00:05.0 "System peripheral" "Intel Corporation" "Haswell-E Address Map, VTd_Misc, System Management" -r02 "" ""
0000:00:05.1 "System peripheral" "Intel Corporation" "Haswell-E Hot Plug" -r02 "" ""
0000:00:05.2 "System peripheral" "Intel Corporation" "Haswell-E RAS, Control Status and Global Errors" -r02 "" ""
0000:00:05.4 "PIC" "Intel Corporation" "Haswell-E I/O Apic" -r02 -p20 "Intel Corporation" "Device 0000"
0000:00:11.0 "Unassigned class [ff00]" "Intel Corporation" "Wellsburg SPSR" -r05 "Intel Corporation" "Device 7270"
0000:00:11.4 "SATA controller" "Intel Corporation" "Wellsburg sSATA Controller [AHCI mode]" -r05 -p01 "Cisco Systems Inc" "Device 0067"
0000:00:16.0 "Communication controller" "Intel Corporation" "Wellsburg MEI Controller #1" -r05 "Intel Corporation" "Device 7270"
0000:00:16.1 "Communication controller" "Intel Corporation" "Wellsburg MEI Controller #2" -r05 "Intel Corporation" "Device 7270"
0000:00:1a.0 "USB controller" "Intel Corporation" "Wellsburg USB Enhanced Host Controller #2" -r05 -p20 "Intel Corporation" "Device 7270"
0000:00:1c.0 "PCI bridge" "Intel Corporation" "Wellsburg PCI Express Root Port #1" -rd5 "" ""
0000:00:1c.3 "PCI bridge" "Intel Corporation" "Wellsburg PCI Express Root Port #4" -rd5 "" ""
0000:00:1c.4 "PCI bridge" "Intel Corporation" "Wellsburg PCI Express Root Port #5" -rd5 "" ""
0000:00:1d.0 "USB controller" "Intel Corporation" "Wellsburg USB Enhanced Host Controller #1" -r05 -p20 "Intel Corporation" "Device 7270"
0000:00:1f.0 "ISA bridge" "Intel Corporation" "Wellsburg LPC Controller" -r05 "Intel Corporation" "Device 7270"
0000:00:1f.2 "SATA controller" "Intel Corporation" "Wellsburg 6-Port SATA Controller [AHCI mode]" -r05 -p01 "Cisco Systems Inc" "Device 0067"
0000:01:00.0 "PCI bridge" "Cisco Systems Inc" "VIC 82 PCIe Upstream Port" -r01 "" ""
0000:02:00.0 "PCI bridge" "Cisco Systems Inc" "VIC PCIe Downstream Port" -ra2 "" ""
0000:02:01.0 "PCI bridge" "Cisco Systems Inc" "VIC PCIe Downstream Port" -ra2 "" ""
0000:03:00.0 "Unclassified device [00ff]" "Cisco Systems Inc" "VIC Management Controller" -ra2 "Cisco Systems Inc" "Device 012e"
0000:04:00.0 "PCI bridge" "Cisco Systems Inc" "VIC PCIe Upstream Port" -ra2 "" ""
0000:05:00.0 "PCI bridge" "Cisco Systems Inc" "VIC PCIe Downstream Port" -ra2 "" ""
0000:05:01.0 "PCI bridge" "Cisco Systems Inc" "VIC PCIe Downstream Port" -ra2 "" ""
0000:05:02.0 "PCI bridge" "Cisco Systems Inc" "VIC PCIe Downstream Port" -ra2 "" ""
0000:05:03.0 "PCI bridge" "Cisco Systems Inc" "VIC PCIe Downstream Port" -ra2 "" ""
0000:06:00.0 "Ethernet controller" "Cisco Systems Inc" "VIC Ethernet NIC" -ra2 "Cisco Systems Inc" "Device 012e"
0000:07:00.0 "Ethernet controller" "Cisco Systems Inc" "VIC Ethernet NIC" -ra2 "Cisco Systems Inc" "Device 012e"
0000:08:00.0 "Fibre Channel" "Cisco Systems Inc" "VIC FCoE HBA" -ra2 "Cisco Systems Inc" "Device 012e"
0000:09:00.0 "Fibre Channel" "Cisco Systems Inc" "VIC FCoE HBA" -ra2 "Cisco Systems Inc" "Device 012e"
0000:0b:00.0 "RAID bus controller" "LSI Logic / Symbios Logic" "MegaRAID SAS-3 3108 [Invader]" -r02 "Cisco Systems
|
Inc" "Device 00db"
0000:0f:00.0 "VGA compatible controller" "Matrox Electronics Systems Ltd." "MGA G200e [Pilot] ServerEngines (SEP1)" -r02 "Cisco Systems Inc" "Device 0101"
0000:10:00.0 "Ethernet controller" "Intel Corporation" "I350 Gigabit Network Connection" -r01 "Cisco Systems Inc" "Device 00d6"
0000:10:00.1 "Ethernet
|
controller" "Intel Corporation" "I350 Gigabit Network Connection" -r01 "Cisco Systems Inc" "Device 00d6"
0000:7f:08.0 "System peripheral" "Intel Corporation" "Haswell-E QPI Link 0" -r02 "Intel Corporation" "Haswell-E QPI Link 0"
"""
CONFD_CLI = """
NAME PHYS ADDRESS
--------------------------------------------
TenGigabitEthernet6/0/0 84:b8:02:2a:5f:c3
TenGigabitEthernet7/0/0 84:b8:02:2a:5f:c4
local0 -
"""
CONFD_CLI_ONE_MISSING = """
NAME PHYS ADDRESS
--------------------------------------------
TenGigabitEthernet6/0/0 84:b8:02:2a:5f:c3
local0 -
"""
CONFD_CLI_INVMAC = """
NAME PHYS ADDRESS
--------------------------------------------
TenGigabitEthernet6/0/0 no:ta:va:li:dm:ac
TenGigabitEthernet7/0/0 84:b8:02:2a:5f:c4
local0 -
"""
CONFD_CLI_NODEVS = """
NAME PHYS ADDRESS
--------------------------------------------
local0 -
"""
CONFD_CLI_NOLOCAL = """
NAME PHYS ADDRESS
--------------------------------------------
"""
SYS_TREE = {
'/sys/class/net/eth2': '../../devices/pci0000:00/0000:00:1c.4/0000:10:00.0/net/eth2',
'/sys/class/net/eth3': '../../devices/pci0000:00/0000:00:1c.4/0000:10:00.1/net/eth3',
'/sys/class/net/juju-br0': '../../devices/virtual/net/juju-br0',
'/sys/class/net/lo': '../../devices/virtual/net/lo',
'/sys/class/net/lxcbr0': '../../devices/virtual/net/lxcbr0',
'/sys/class/net/veth1GVRCF': '../../devices/virtual/net/veth1GVRCF',
'/sys/class/net/veth7AXEUK': '../../devices/virtual/net/veth7AXEUK',
'/sys/class/net/vethACOIJJ': '../../devices/virtual/net/vethACOIJJ',
'/sys/class/net/vethMQ819H': '../../devices/virtual/net/vethMQ819H',
'/sys/class/net/virbr0': '../../devices/virtual/net/virbr0',
'/sys/class/net/virbr0-nic': '../../devices/virtual/net/virbr0-nic',
'/sys/devices/pci0000:00/0000:00:1c.4/0000:10:00.0/net/eth2/device': '../../../0000:10:00.0',
'/sys/devices/pci0000:00/0000:00:1c.4/0000:10:00.1/net/eth3/device': '../../../0000:10:00.1',
}
LSPCI_KS_IGB_UNBOUND = """
{} Ethernet controller: Intel Corporation I350 Gigabit Network Connection (rev 01)
Subsystem: Cisco Systems Inc Device 00d6
"""
LSPCI_KS_IGB_BOUND = """
{} Ethernet controller: Intel Corporation I350 Gigabit Network Connection (rev 01)
Subsystem: Cisco Systems Inc Device 00d6
Kernel driver in use: igb
"""
LSPCI_KS_IGBUIO_BOUND = """
{} Ethernet controller: Cisco Systems Inc VIC Ethernet NIC (rev a2)
Subsystem: Cisco Systems Inc VIC 1240 MLOM Ethernet NIC
Kernel driver in use: igb_uio
"""
LSPCI_KS = {
'0000:06:00.0': LSPCI_KS_IGBUIO_BOUND.format('06:00.0'),
'0000:10:00.0': LSPCI_KS_IGB_BOUND.format('10:00.0'),
}
MODALIAS = """
alias pci:v00001137d00000071sv*sd*bc*sc*i* enic
alias pci:v00001137d00000044sv*sd*bc*sc*i* enic
alias pci:v00001137d00000043sv*sd*bc*sc*i* enic
alias pci:v00008086d000010D6sv*sd*bc*sc*i* igb
alias pci:v00008086d000010A9sv*sd*bc*sc*i* igb
alias pci:v00008086d00001522sv*sd*bc*sc*i* igb
alias pci:v00008086d00001521sv*sd*bc*sc*i* igb
alias pci:v00008086d0000157Csv*sd*bc*sc*i* igb
"""
LSPCI_NS = {
'0000:06:00.0': "06:00.0 0200: 1137:0043 (rev a2)",
'0000:07:00.0': "07:00.0 0200: 1137:0043 (rev a2)",
'0000:10:00.0': "10:00.0 0200: 8086:1521 (rev 01)",
'0000:10:00.1': "10:00.1 0200: 8086:1521 (rev 01)",
}
FILE_CONTENTS = {
'/sys/class/net/eth2/address': 'a8:9d:21:cf:93:fc',
'/sys/class/net/eth3/address': 'a8:9d:21:cf:93:fd',
'/sys/class/net/eth2/operstate': 'up',
'/sys/class/net/eth3/operstate': 'down',
'/lib/modules/3.13.0-35-generic/modules.alias': MODALIAS,
}
COMMANDS = {
'LSPCI_MD': ['lspci', '-m', '-D'],
'LSPCI_KS': ['lspci', '-ks'],
'LSPCI_NS': ['lspci', '-ns'],
'UNAME_R': ['uname', '-r'],
'CONFD_CLI': ['/opt/cisco/vpe/bin/confd_cli', '-N', '-C', '-u', 'system'],
}
NET_SETUP = {
'LSPCI_MD': LSPCI,
'UNAME_R': '3.13.0-35-generic',
'CONFD_CLI': CONFD_CLI,
'0000:06:00.0': {
'LSPCI_KS': LSPCI_KS_IGBUIO_BOUND.format('06:00.0'),
'LSPCI_NS': "06:00.0 0200: 1137:0043 (rev a2)",
},
'0000:07:00.0': {
'LSPCI_KS': LSPCI_KS_IGBUIO_BOUND.format('07:00.0'),
'LSPCI_NS': "07:00.0 0200: 1137:0043 (rev a2)",
},
'0000:10:00.0': {
'LSPCI_KS': LSPCI_KS_IGB_BOUND.format('10:00.0'),
|
ragupta-git/ImcSdk
|
imcsdk/mometa/equipment/EquipmentLocatorLed.py
|
Python
|
apache-2.0
| 5,417
| 0.011076
|
"""This module contains the general information for EquipmentLocatorLed ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class EquipmentLocatorLedConsts:
ADMIN_STATE_INACTIVE = "inactive"
ADMIN_STATE_OFF = "off"
ADMIN_STATE_ON = "on"
COLOR_AMBER = "amber"
COLOR_BLUE = "blue"
COLOR_GREEN = "green"
COLOR_RED = "red"
COLOR_UNKNOWN = "unknown"
OPER_STATE_BLINKING = "blinking"
OPER_STATE_ETH = "eth"
OPER_STATE_FC = "fc"
OPER_STATE_OFF = "off"
OPER_STATE_ON = "on"
OPER_STATE_UNKNOWN = "unknown"
class EquipmentLocatorLed(ManagedObject):
"""This is EquipmentLocatorLed class."""
consts = EquipmentLocatorLedConsts()
naming_props = set([])
mo_meta = {
"classic": MoMeta("EquipmentLocatorLed", "equipmentLocatorLed", "locator-led", VersionMeta.Version151f, "InputOutput", 0x1f, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [], ["Get", "Set"]),
"modular": MoMeta("EquipmentLocatorLed", "equipmentLocatorLed", "locator-led", VersionMeta.Version2013e, "InputOutput", 0x1f, [], ["admin", "read-only", "user"], [u'computeServerNode'], [], ["Get", "Set"])
}
prop_meta = {
"classic": {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["inactive", "off", "on"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x8, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version151f, MoProper
|
tyMeta.
|
INTERNAL, None, None, None, None, [], []),
"color": MoPropertyMeta("color", "color", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, None, None, None, ["amber", "blue", "green", "red", "unknown"], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 512, None, [], []),
"oper_state": MoPropertyMeta("oper_state", "operState", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, None, None, None, ["blinking", "eth", "fc", "off", "on", "unknown"], []),
},
"modular": {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["inactive", "off", "on"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"color": MoPropertyMeta("color", "color", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, ["amber", "blue", "green", "red", "unknown"], []),
"id": MoPropertyMeta("id", "id", "uint", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 512, None, [], []),
"oper_state": MoPropertyMeta("oper_state", "operState", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, ["blinking", "eth", "fc", "off", "on", "unknown"], []),
},
}
prop_map = {
"classic": {
"adminState": "admin_state",
"dn": "dn",
"rn": "rn",
"status": "status",
"childAction": "child_action",
"color": "color",
"id": "id",
"name": "name",
"operState": "oper_state",
},
"modular": {
"adminState": "admin_state",
"dn": "dn",
"rn": "rn",
"status": "status",
"childAction": "child_action",
"color": "color",
"id": "id",
"name": "name",
"operState": "oper_state",
},
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.admin_state = None
self.status = None
self.child_action = None
self.color = None
self.id = None
self.name = None
self.oper_state = None
ManagedObject.__init__(self, "EquipmentLocatorLed", parent_mo_or_dn, **kwargs)
|
birsoyo/conan
|
conans/test/functional/search_recorder_test.py
|
Python
|
mit
| 10,512
| 0.00333
|
import unittest
from conans.client.recorder.search_recorder import SearchRecorder
class SearchRecorderTest(unittest.TestCase):
def setUp(self):
self.recorder = SearchRecorder()
def empty_test(self):
info = self.recorder.get_info()
expected_result = {'error': False, 'results': []}
self.assertEqual(expected_result, info)
def sequential_test(self):
self.recorder.add_recipe("remote1", "fake/0.1@user/channel")
self.recorder.add_package("remote1", "fake/0.1@user/channel", "fake_package_id",
"fake_options", "fake_settings", "fake_requires", False)
self.recorder.add_recipe("remote2", "fakefake/0.1@user/channel")
self.recorder.add_package("remote2", "fakefake/0.1@user/channel", "fakefake_package_id1",
"fakefake_options1", "fakefake_settings1", "fakefake_requires1",
False)
self.recorder.add_package("remote2", "fakefake/0.1@user/channel", "fakefake_package_id2",
"fakefake_options2", "fakefake_settings2", "fakefake_requires2",
False)
info = self.recorder.get_info()
expected_result = {
"error": False,
"results": [
{
"remote": "remote1",
"items": [
{
"recipe": {
"id": "fake/0.1@user/channel"
},
"packages": [
{
"id": "fake_package_id",
"options": "fake_options",
"settings": "fake_settings",
"requires": "fake_requires",
"outdated": False
}
]
}
]
},
{
"remote": "remote2",
"items": [
{
"recipe": {
"id": "fakefake/0.1@user/channel"
},
"packages": [
{
"id": "fakefake_package_id1",
"options": "fakefake_options1",
"settings": "fakefake_settings1",
"requires": "fakefake_requires1",
"outdated": False
},
{
"id": "fakefake_package_id2",
"options": "fakefake_options2",
"settings": "fakefake_settings2",
"requires": "fakefake_requires2",
"outdated": False
}
]
}
]
}
]
}
self.assertEqual(expected_result, info)
def unordered_test(self):
self.recorder.add_recipe("my_remote1", "fake1/0.1@user/channel")
self.recorder.add_recipe("my_remote2", "fake2/0.1@user/channel")
self.recorder.add_recipe("my_remote3", "fake3/0.1@user/channel")
self.recorder.add_package("my_remote1", "fake1/0.1@user/channel", "fake1_package_id1",
"fake1_options1", "fake1_settings1", "fake1_requires1", False)
self.recorder.add_package("my_remote2", "fake2/0.1@user/channel", "fake2_package_id1",
"fake2_options1", "fake2_settings1", "fake2_requires1", False)
self.recorder.add_package("my_remote2", "fake2/0.1@user/channel", "fake2_package_id2",
"fake2_options2", "fake2_settings2", "fake2_requires2", False)
info = self.recorder.get_info()
expected_result = {
"error": False,
"results": [
{
"remote": "my_remote1",
"items": [
{
"recipe": {
"id": "fake1/0.1@user/channel"
|
},
"packages": [
{
"id": "fake1_package_id1",
"
|
options": "fake1_options1",
"settings": "fake1_settings1",
"requires": "fake1_requires1",
"outdated": False
}
]
}
]
},
{
"remote": "my_remote2",
"items": [
{
"recipe": {
"id": "fake2/0.1@user/channel"
},
"packages": [
{
"id": "fake2_package_id1",
"options": "fake2_options1",
"settings": "fake2_settings1",
"requires": "fake2_requires1",
"outdated": False
},
{
"id": "fake2_package_id2",
"options": "fake2_options2",
"settings": "fake2_settings2",
"requires": "fake2_requires2",
"outdated": False
}
]
}
]
},
{
"remote": "my_remote3",
"items": [
{
"recipe": {
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/python/lib/test/test_nis.py
|
Python
|
gpl-3.0
| 1,167
| 0.001714
|
from test import support
import unittest
import sys
# Skip test if nis module does not exist.
nis = support.import_module('nis')
class NisTests(unittest.TestCase):
def test_maps(self):
try:
maps = nis.maps()
except nis.error as msg:
# NIS is probably not active, so this test isn't useful
self.skipTest(str(msg))
try:
# On some systems, this map is only accessible to the
# super user
maps.remove("passwd.adjunct.byname")
except ValueError:
pass
done = 0
for nismap in maps:
mapping = nis.cat(nismap)
for k, v in mapping.items():
if no
|
t k:
continue
if nis.match(k, nismap) != v:
self.fail("NIS match failed f
|
or key `%s' in map `%s'" % (k, nismap))
else:
# just test the one key, otherwise this test could take a
# very long time
done = 1
break
if done:
break
if __name__ == '__main__':
unittest.main()
|
tomreitsma/the-weird-science
|
tws_game/server.py
|
Python
|
mit
| 3,918
| 0.00051
|
from autobahn.twisted.websocket import WebSocketServerFactory, \
WebSocketServerProtocol
from tws_game.lobby import Lobby
from tws_game.tetris import Tetris
from pprint import pprint
import json
class ClientConnection(WebSocketServerProtocol):
def onOpen(self):
self.factory.register(self)
def onMessage(self, payload, is_binary):
if not is_binary:
self.factory.command(self, payload)
def connectionLost(self, reason):
WebSocketServerProtocol.connectionLost(self, reason)
self.factory.unregister(self)
class TwsServerFactory(WebSocketServerFactory):
AVAILABLE_GAMES = {
'TETRIS': Tetris
}
def __init__(self, url):
WebSocketServerFactory.__init__(self, url)
self.clients = []
self.lobbies = {}
self._lobby_id_counter = 0
""" Client registration
"""
def register(self, client):
if client not in self.clients:
print "registered client {}".format(client.peer)
client.lobby = None
self.clients.append(client)
def unregister(self, client):
if client in self.clients:
print "unregistered client {}".format(client.peer)
if client.lobby:
client.lobby.remove_client(client)
self.clients.remove(client)
""" Message encode/decode functionality
"""
def encode_message(self, command, data):
return '|'.join([command, json.dumps(data)])
def decode_message(self, message):
command, data = message.split('|', 1)
data
|
= json.loads(data)
return command, data
""" Basic game co
|
mmands
"""
def create_lobby(self, client, data):
if not 'game' in data and data['game'] in self.AVAILABLE_GAMES:
raise Exception('Game unavailable')
lobby = Lobby(self.AVAILABLE_GAMES[data['game']])
lobby.set_factory(self)
self._lobby_id_counter += 1
lobby.name = 'Lobby %s' % self._lobby_id_counter
self.lobbies[lobby.id] = lobby
self.send_command(client, 'create_lobby', {
'id': lobby.id
})
def join_lobby(self, client, data):
if client.lobby:
client.lobby.remove_client(client)
if data['id'] in self.lobbies and client not in self.lobbies[data['id']].clients:
self.lobbies[data['id']].add_client(client)
self.send_command(client, 'join_lobby', True)
def list_lobbies(self, client, data):
lobbies = []
for id in self.lobbies:
lobby = self.lobbies[id]
lobbies.append({
'id': lobby.id,
'name': lobby.name,
'clients': len(lobby.clients)
})
self.send_command(client, 'list_lobbies', lobbies)
return lobbies
def leave_lobby(self, client, data):
pass
def start_game(self, client, data):
client.lobby.start_game(client)
def set_nickname(self, client, data):
print "Setting nickname"
pprint(data)
""" Communication methods
"""
def send_command(self, client, command, data):
msg = self.encode_message(command, data)
client.sendMessage(
msg
)
def command(self, client, msg):
command, data = self.decode_message(msg)
commands = {
'create_lobby': self.create_lobby,
'join_lobby': self.join_lobby,
'list_lobbies': self.list_lobbies,
'leave_lobby': self.leave_lobby,
'start_game': self.start_game,
'set_nickname': self.set_nickname
}
if command in commands:
print "Executing command %s" % (command,)
commands[command](client, data)
else:
if client.lobby.game:
client.lobby.game.handle_command(client, command, data)
|
RCOS-Grading-Server/HWserver
|
migration/migrator/migrations/course/20200704004101_gradeable_access.py
|
Python
|
bsd-3-clause
| 1,205
| 0.004149
|
"""Migration for a given Submitty course database."""
def up(config, database, semester, course):
"""
Run up migration.
:param config: Object holding configuration details
|
about Submitty
:type config: migrator.config.Config
:param database: Object for interacting with given database for environment
:type database: migrator.db.Database
:param semester: Semester of the course being migrated
:type semester: str
:param course: Code of course bei
|
ng migrated
:type course: str
"""
database.execute(
"""
CREATE TABLE IF NOT EXISTS gradeable_access (
id SERIAL NOT NULL PRIMARY KEY,
g_id character varying(255) NOT NULL REFERENCES gradeable (g_id) ON DELETE CASCADE,
user_id character varying(255) REFERENCES users (user_id) ON DELETE CASCADE,
team_id character varying(255) REFERENCES gradeable_teams (team_id),
accessor_id character varying(255) REFERENCES users (user_id) ON DELETE CASCADE,
"timestamp" timestamp with time zone NOT NULL,
CONSTRAINT access_team_id_check CHECK (((user_id IS NOT NULL) OR (team_id IS NOT NULL)))
);
"""
)
|
eicher31/compassion-modules
|
sbc_compassion/controllers/__init__.py
|
Python
|
agpl-3.0
| 426
| 0
|
# -*- coding: utf-8
|
-*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
#
|
@author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from . import b2s_image
|
nicholasserra/sentry
|
src/sentry/web/urls.py
|
Python
|
bsd-3-clause
| 18,988
| 0.003318
|
"""
sentry.web.urls
~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
__all__ = ('urlpatterns',)
from django.conf.urls import include, patterns, url
from django.conf import settings
from django.http import HttpResponse
from django.views.generic import RedirectView
from sentry.web import api
from sentry.web.frontend import accounts, admin, generic
from sentry.web.frontend.admin_queue import AdminQueueView
from sentry.web.frontend.accept_organization_invite import AcceptOrganizationInviteView
from sentry.web.frontend.auth_login import AuthLoginView
from sentry.web.frontend.auth_logout import AuthLogoutView
from sentry.web.frontend.auth_organization_login import AuthOrganizationLoginView
from sentry.web.frontend.auth_provider_login import AuthProviderLoginView
from sentry.web.frontend.error_page_embed import ErrorPageEmbedView
from sentry.web.frontend.group_event_json import GroupEventJsonView
from sentry.web.frontend.group_plugin_action import GroupPluginActionView
from sentry.web.frontend.home import HomeView
from sentry.web.frontend.mailgun_inbound_webhook import MailgunInboundWebhookView
from sentry.web.frontend.organization_api_keys import OrganizationApiKeysView
from sentry.web.frontend.organization_api_key_settings import OrganizationApiKeySettingsView
from sentry.web.frontend.organization_audit_log import OrganizationAuditLogView
from sentry.web.frontend.organization_auth_settings import OrganizationAuthSettingsView
from sentry.web.frontend.organization_members import OrganizationMembersView
from sentry.web.frontend.organization_member_settings import OrganizationMemberSettingsView
from sentry.web.frontend.organization_settings import OrganizationSettingsView
from sentry.web.frontend.create_organization import CreateOrganizationView
from sentry.web.frontend.create_organization_member import CreateOrganizationMemberView
from sentry.web.frontend.create_project import CreateProjectView
from sentry.web.frontend.create_project_key import CreateProjectKeyView
from sentry.web.frontend.create_team import CreateTeamView
from sentry.web.frontend.disable_project_key import DisableProjectKeyView
from sentry.web.frontend.edit_project_key import EditProjectKeyView
from sentry.web.frontend.enable_project_key import EnableProjectKeyView
from sentry.web.frontend.remove_project_key import RemoveProjectKeyView
from sentry.web.frontend.project_issue_tracking import ProjectIssueTrackingView
from sentry.web.frontend.project_keys import ProjectKeysView
from sentry.web.frontend.project_plugins import ProjectPluginsView
from sentry.web.frontend.project_plugin_configure import ProjectPluginConfigureView
from sentry.web.frontend.project_plugin_disable import ProjectPluginDisableView
from sentry.web.frontend.project_plugin_enable import ProjectPluginEnableView
from sentry.web.frontend.project_plugin_reset import ProjectPluginResetView
from sentry.web.frontend.project_notifications import ProjectNotificationsView
from sentry.web.frontend.project_quotas import ProjectQuotasView
from sentry.web.frontend.project_release_tracking import ProjectReleaseTrackingView
from sentry.web.frontend.project_rules import ProjectRulesView
from sentry.web.frontend.project_rule_edit import ProjectRuleEditView
from sentry.web.frontend.project_rule_remove import ProjectRuleRemoveView
from sentry.web.frontend.project_settings import ProjectSettingsView
from sentry.web.frontend.project_tags import ProjectTagsView
from sentry.web.frontend.react_page import GenericReactPageView, ReactPageView
from sentry.web.frontend.release_webhook import ReleaseWebhookView
from sentry.web.frontend.remove_account import RemoveAccountView
from sentry.web.frontend.remove_organization import RemoveOrganizationView
from sentry.web.frontend.remove_project import RemoveProjectView
from sentry.web.frontend.remove_team import RemoveTeamView
from sentry.web.frontend.replay_event import ReplayEventView
def init_all_applications():
"""
Forces import of all applications to ensure code is registered.
"""
from django.db.models import get_apps, get_models
for app in get_apps():
try:
get_models(app)
except Exception:
continue
init_all_applications()
# Only create one instance of the ReactPageView since it's duplicated errywhere
react_page_view = ReactPageView.as_view()
urlpatterns = patterns('')
if settings.DEBUG:
import sentry.web.frontend.debug.mail
from sentry.web.frontend.debug.debug_trigger_error import DebugTriggerErrorView
from sentry.web.frontend.debug.debug
|
_error_embed import DebugErrorPageEmbedView
from sentry.web.fronten
|
d.debug.debug_new_release_email import DebugNewReleaseEmailView
urlpatterns += patterns(
'',
url(r'^debug/mail/new-event/$',
sentry.web.frontend.debug.mail.new_event),
url(r'^debug/mail/new-note/$',
sentry.web.frontend.debug.mail.new_note),
url(r'^debug/mail/new-release/$',
DebugNewReleaseEmailView.as_view()),
url(r'^debug/mail/assigned/$',
sentry.web.frontend.debug.mail.assigned),
url(r'^debug/mail/digest/$',
sentry.web.frontend.debug.mail.digest),
url(r'^debug/mail/request-access/$',
sentry.web.frontend.debug.mail.request_access),
url(r'^debug/mail/access-approved/$',
sentry.web.frontend.debug.mail.access_approved),
url(r'^debug/mail/invitation/$',
sentry.web.frontend.debug.mail.invitation),
url(r'^debug/embed/error-page/$',
DebugErrorPageEmbedView.as_view()),
url(r'^debug/trigger-error/$',
DebugTriggerErrorView.as_view()),
)
urlpatterns += patterns(
'',
# Store endpoints first since they are the most active
url(r'^api/store/$', api.StoreView.as_view(),
name='sentry-api-store'),
url(r'^api/(?P<project_id>[\w_-]+)/store/$', api.StoreView.as_view(),
name='sentry-api-store'),
url(r'^api/(?P<project_id>\d+)/csp-report/$', api.CspReportView.as_view(),
name='sentry-api-csp-report'),
# The static version is either a 10 digit timestamp, a sha1, or md5 hash
url(r'^_static/(?:(?P<version>\d{10}|[a-f0-9]{32,40})/)?(?P<module>[^/]+)/(?P<path>.*)$', generic.static_media,
name='sentry-media'),
# API
url(r'^api/0/', include('sentry.api.urls')),
url(r'^api/hooks/mailgun/inbound/', MailgunInboundWebhookView.as_view(),
name='sentry-mailgun-inbound-hook'),
url(r'^api/hooks/release/(?P<plugin_id>[^/]+)/(?P<project_id>[^/]+)/(?P<signature>[^/]+)/', ReleaseWebhookView.as_view(),
name='sentry-release-hook'),
url(r'^api/embed/error-page/$', ErrorPageEmbedView.as_view(),
name='sentry-error-page-embed'),
# Auth
url(r'^auth/link/(?P<organization_slug>[^/]+)/$', AuthOrganizationLoginView.as_view(),
name='sentry-auth-link-identity'),
url(r'^auth/login/$', AuthLoginView.as_view(),
name='sentry-login'),
url(r'^auth/login/(?P<organization_slug>[^/]+)/$', AuthOrganizationLoginView.as_view(),
name='sentry-auth-organization'),
url(r'^auth/sso/$', AuthProviderLoginView.as_view(),
name='sentry-auth-sso'),
url(r'^auth/logout/$', AuthLogoutView.as_view(),
name='sentry-logout'),
# Account
url(r'^login-redirect/$', accounts.login_redirect,
name='sentry-login-redirect'),
url(r'^register/$', AuthLoginView.as_view(),
name='sentry-register'),
url(r'^account/sudo/$', 'sudo.views.sudo',
{'template_name': 'sentry/account/sudo.html'},
name='sentry-sudo'),
url(r'^account/recover/$', accounts.recover,
name='sentry-account-recover'),
url(r'^account/recover/confirm/(?P<user_id>[\d]+)/(?P<hash>[0-9a-zA-Z]+)/$', accounts.recover_confirm,
name='sentry-account-recover-confirm'),
url(r'^account/settings/$', accounts.settings,
name='sentry-account-settings'),
url(r'^account/settings/appearance/$', accounts.appearance_settings,
name='sentry-ac
|
Aneapiy/graph_visualization_Yelp
|
dataProcScripts/replaceLinesFull.py
|
Python
|
apache-2.0
| 698
| 0.018625
|
############################
## replaceLinesFull.py
############################
"""
Script for replacing special escape characters in the original yelp academic dataset files.
Special characters such as \n cause problems for
|
the JSON to CSV conversions.
If you want t
|
o include review text in the edge file, use this script to replace
special characters such as \n in the JSON review file. This script is much faster
than doing the replacement in R.
"""
with open("yelp_academic_dataset_review.json", "rt") as fin:
with open("reviewFullOut.json", "wt") as fout:
for line in fin:
fout.write(line.replace(r'\n', '')) #.replace(r'\\','').replace(r'\u',''))
fout.close()
fin.close()
|
lepture/terminal
|
terminal/prompt.py
|
Python
|
bsd-3-clause
| 3,060
| 0.000327
|
# -*- coding: utf-8 -*-
"""
terminal.prompt
~~~~~~~~~~~~~~~
Prompt support on terminal.
:copyright: (c) 2013 by Hsiaoming Yang.
"""
import getpass
import sys
# Python 3
if sys.version_info[0] == 3:
string_type = str
else:
string_type = (unicode, str)
def pro
|
mpt(name, default=None):
"""
Grab user input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') a
|
nd ' ' or ': '
while True:
try:
rv = raw_input(prompt)
except NameError:
rv = input(prompt)
if rv:
return rv
if default is not None:
return default
def password(name, default=None):
"""
Grabs hidden (password) input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = getpass.getpass(prompt)
if rv:
return rv
if default is not None:
return default
def confirm(name, default=False, yes_choices=None, no_choices=None):
"""
Grabs user input from command line and converts to boolean
value.
:param name: prompt text
:param default: default value if no input provided.
:param yes_choices: default 'y', 'yes', '1', 'on', 'true', 't'
:param no_choices: default 'n', 'no', '0', 'off', 'false', 'f'
"""
yes_choices = yes_choices or ('y', 'yes', '1', 'on', 'true', 't')
no_choices = no_choices or ('n', 'no', '0', 'off', 'false', 'f')
while True:
rv = prompt(name + '?', default and yes_choices[0] or no_choices[0])
if not rv:
return default
if rv.lower() in yes_choices:
return True
elif rv.lower() in no_choices:
return False
def choose(name, choices, default=None, resolve=None, no_choice=('none',)):
"""
Grabs user input from command line from set of provided choices.
:param name: prompt text
:param choices: list or tuple of available choices. Choices may be
single strings or (key, value) tuples.
:param default: default value if no input provided.
:param no_choice: acceptable list of strings for "null choice"
"""
if not resolve:
resolve = lambda o: o.lower()
_choices = []
options = []
for choice in choices:
if isinstance(choice, string_type):
options.append(choice)
else:
options.append("%s [%s]" % (choice[1], choice[0]))
choice = choice[0]
_choices.append(choice)
while True:
rv = prompt(name + '? - (%s)' % ', '.join(options), default)
if not rv:
return default
rv = resolve(rv)
if rv in no_choice:
return None
if rv in _choices:
return rv
|
webrecorder/webrecorder
|
webrecorder/auto_tests/runauto.py
|
Python
|
apache-2.0
| 4,145
| 0.003619
|
import requests
import pytest
import subprocess
# ============================================================================
class TestAuto(object):
PREFIX = 'http://localhost:8089'
USER = 'testauto'
LIST_ID = ''
AUTO_ID = ''
NUM_BROWSERS = 2
@classmethod
def setup_class(cls):
cls.session = requests.session()
@classmethod
def teardown_class(cls):
pass
def get(self, url, **kwargs):
full_url = self.PREFIX + url
return self.session.get(full_url, **kwargs)
def post(self, url, **kwargs):
full_url = self.PREFIX + url
return self.session.post(full_url
|
, **kwargs)
@classmethod
def delete(self, url, **kwargs):
full_url = self.PREFIX + url
return self.session.delete(fu
|
ll_url, **kwargs)
@pytest.mark.always
def test_create_user(self):
res = subprocess.run(['docker', 'exec', 'webrecorder_app_1', "python", "-m", "webrecorder.admin",
"-c", "testauto@example.com", "testauto", "TestTest123", "archivist", "Auto Test"],
stdout=subprocess.PIPE)
assert b'Created user testauto' in res.stdout or b'A user already exists' in res.stdout
assert res.returncode == 0
@pytest.mark.always
def test_login(self):
params = {'username': self.USER,
'password': 'TestTest123',
}
res = self.post('/api/v1/auth/login', json=params)
assert res.json()['user']['username'] == self.USER
def test_create_coll(self):
res = self.post('/api/v1/collections?user=testauto',
json={'title': 'Auto Test'})
assert res.json()['collection']['id'] == 'auto-test'
assert res.json()['collection']['title'] == 'Auto Test'
def test_create_auto(self):
params = {'scope_type': 'single-page',
'num_browsers': self.NUM_BROWSERS,
}
res = self.post('/api/v1/auto?user=testauto&coll=auto-test', json=params)
assert res.json()['auto']
TestAuto.AUTO_ID = res.json()['auto']
def test_add_urls(self):
params = {'urls': [
'https://twitter.com/webrecorder_io',
'https://rhizome.org/'
]}
res = self.post('/api/v1/auto/{0}/queue_urls?user=testauto&coll=auto-test'.format(self.AUTO_ID), json=params)
assert res.json()['success']
def test_start(self):
res = self.post('/api/v1/auto/{0}/start?user=testauto&coll=auto-test'.format(self.AUTO_ID))
print(res.json())
assert res.json()['success']
@pytest.mark.append
def _test_append_only(self, append, auto_id):
params = {'title': 'Add Url'}
res = self.post('/api/v1/lists?user=testauto&coll=auto-test', json=params)
list_id = res.json()['list']['id']
bookmarks = [{'url': append, 'title': append}]
res = self.post('/api/v1/list/%s/bulk_bookmarks?user=testauto&coll=auto-test' % list_id,
json=bookmarks)
assert res.json()['list']
params = {'list': list_id}
res = self.post('/api/v1/auto/{0}/queue_list?user=testauto&coll=auto-test'.format(auto_id), json=params)
assert res.json()['status']
def test_get_auto(self):
res = self.get('/api/v1/auto/{0}?user=testauto&coll=auto-test'.format(self.AUTO_ID))
auto = res.json()['auto']
assert auto['queue'] is not None
assert auto['seen'] is not None
assert auto['pending'] is not None
assert len(auto['browsers']) == self.NUM_BROWSERS
assert auto['scope_type'] == 'single-page'
@pytest.mark.delete
def _test_delete_auto(self):
res = self.delete('/api/v1/auto/{0}?user=testauto&coll=auto-test'.format(self.AUTO_ID))
assert res.json() == {'deleted_id': str(self.AUTO_ID)}
@pytest.mark.delete
def test_delete_coll(self):
res = self.delete('/api/v1/collection/auto-test?user=testauto')
assert res.json() == {'deleted_id': 'auto-test'} or res.json() == {'error': 'no_such_collection'}
|
opcon/plutokore
|
setup.py
|
Python
|
mit
| 588
| 0.003401
|
from setuptools import setup, find_packages
setup(
name='plutokore',
packages=find_packages(),
version='0.10',
description='Python tool for analysing PLUTO simulation data',
author='Patrick Yates',
author_email='patrick.yates@utas.edu.au',
url='https://github.com/pmyates/plutokore',
keywords=['pluto', 'astrophsyics'],
license='GPL-3.0',
install_requires=['numpy', 'matplotlib', 'tabulate', 'astropy', 'h5py', 'py
|
yaml', 'scipy', 'contextlib2', 'future'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'pytest-datafiles'],
|
)
|
xtompok/uvod-do-prg
|
cv10/cv10.py
|
Python
|
mit
| 886
| 0.01812
|
class Zvire(object):
def __init__(self,jmeno = "",druh = "",hmotnost =0):
self._jmeno = jmeno
self._druh = druh
self._hmotnost = hmotnost
@property
def jmeno(self):
return self._jmeno
@jmeno.setter
def jmeno(self,ajmeno):
s
|
elf._jmeno = ajmeno
@property
def druh(self):
return self._druh
@druh.setter
def druh(self,adruh):
|
self._druh = adruh
@property
def hmotnost(self):
return self._hmotnost
@hmotnost.setter
def hmotnost(self,ahm):
self._hmotnost = ahm
def __str__(self):
return "Zvire {} druhu {} a hmotnosti {} kg".format(
self.jmeno,self.druh,self.hmotnost
)
k = Zvire()
k.jmeno = "Mikeš"
k.druh = "kočka"
k.hmotnost = 3
l = Zvire(jmeno = "Alík", hmotnost=5)
print(k)
print(type(k))
print(l)
print(type(l))
|
udaykrishna/unCap
|
checks.py
|
Python
|
mit
| 1,028
| 0.019455
|
import socket as sk
from kivy.logger import Logger
def getWebsit
|
e():
return "www.google.com"
def getIpPort():
sock_info=sk.getaddrinfo(getWebsite(),80,proto=sk.IPPROTO_TCP)
return sock_info[0][-1]
def checkInternet():
sock=sk.socket()
sock.settimeout(1)
try:
sock.connect(getIpPort())
sock.send(b'GET /HTTP/1.0\r\n\r\n')
resp=sock.recv(8)
sock.shutdown(1)
sock.close()
|
if(resp==b'HTTP/1.0'):
return True
else:
return False
except Exception as e:
Logger.error(e)
return False
def checkSpeed():
import psutil
import time
init=[psutil.net_io_counters().bytes_sent,psutil.net_io_counters().bytes_recv]
time.sleep(1)
final=[psutil.net_io_counters().bytes_sent,psutil.net_io_counters().bytes_recv]
readings=[(final[0]-init[0]),(final[1]-init[1])]
print(readings)
if readings[0] < 200 or readings[1] < 200:
return False
else:
return True
|
jessekl/flixr
|
fbone/modules/user/commands.py
|
Python
|
mit
| 1,438
| 0.000695
|
# -*- coding: utf-8 -*-
"""
fbone.modules.user
~~~~~~~~~~~~~~~~~~~~~~~~
user management commands
"""
from flask.ext.script import Command, prompt, prompt_pass
from werkzeug.datastructures import MultiDict
from .models import User
class CreateUserCommand(Command):
"""Create a user"""
"""!!!broken!!!"""
def run(self):
email = prompt('Email')
password = prompt_pass('Password')
password_confirm = prompt_pass('Confirm Password')
data = MultiDict(dict(email=email, password=password, password_confirm=password_confirm))
form = RegisterForm(data, csrf_enabled=False)
if form.validate():
user = register_user(email=email, password=password)
print '\nUser created successfully'
print 'User(id=%s email=%s)' % (user.id, user.email)
return
print '\nError creating user:'
for errors in form.errors.values():
print '\n'.join(errors)
class DeleteUserCommand(Command):
"""Delete a user"""
def run(self):
|
email = prompt('Email')
user = User.first(email=email)
if no
|
t user:
print 'Invalid user'
return
User.delete(user)
print 'User deleted successfully'
class ListUsersCommand(Command):
"""List all users"""
def run(self):
for u in User.all():
print 'User(id=%s email=%s)' % (u.id, u.email)
|
MagicSolutions/django-form-designer
|
form_designer/migrations/0010_auto__chg_field_formdefinitionfield_help_text.py
|
Python
|
bsd-3-clause
| 10,400
| 0.008173
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'FormDefinitionField.help_text'
db.alter_column('form_designer_formdefinitionfield', 'help_text', self.gf('django.db.models.fields.TextField')(null=True))
def backwards(self, orm):
# Changing field 'FormDefinitionField.help_text'
db.alter_column('form_designer_formdefinitionfield', 'help_text', self.gf('django.db.models.fields.CharField')(max_length=255, null=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'form_designer.formdefinition': {
'Meta': {'object_name': 'FormDefinition'},
'action': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'allow_get_initial': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'body': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'display_logged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'error_message': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'form_template_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log_data': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mail_from': ('form_desi
|
gner.fields.TemplateCharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'mail_subject': ('form_designer.fields.TemplateCharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'mail_to': ('form_de
|
signer.fields.TemplateCharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'mail_uploaded_files': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'message_template': ('form_designer.fields.TemplateTextField', [], {'null': 'True', 'blank': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'default': "'POST'", 'max_length': '10'}),
'name': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'private_hash': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'public_hash': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40'}),
'require_hash': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'save_uploaded_files': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'submit_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'success_clear': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'success_message': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'success_redirect': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'form_designer.formdefinitionfield': {
'Meta': {'ordering': "['position']", 'object_name': 'FormDefinitionField'},
'choice_empty_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'choice_labels': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'choice_model': ('form_designer.fields.ModelNameField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'choice_model_empty_label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'choice_values': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'decimal_places': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'field_class': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'form_definition': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['form_designer.FormDefinition']"}),
'help_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_result': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'initial': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'max_digits': ('django.db.models.fields.IntegerFi
|
salv-orlando/MyRepo
|
nova/tests/test_vsa.py
|
Python
|
apache-2.0
| 7,062
| 0.000142
|
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import stubout
from xml.etree import ElementTree
from xml.etree.ElementTree import Element, SubElement
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova import test
from nova import vsa
from nova import volume
from nova.volume import volume_types
from nova.vsa import utils as vsa_utils
import nova.image.fake
FLAGS = flags.FLAGS
LOG = logging.getLogger('nova.tests.vsa')
class VsaTestCase(test.TestCase):
def setUp(self):
super(VsaTestCase, self).setUp()
self.stubs = stubout.StubOutForTesting()
self.vsa_api = vsa.API()
self.volume_api = volume.API()
FLAGS.quota_volumes = 100
FLAGS.quota_gigabytes = 10000
self.context = context.get_admin_context()
volume_types.create(self.context,
'SATA_500_7200',
extra_specs={'type': 'vsa_drive',
'drive_name': 'SATA_500_7200',
'drive_type': 'SATA',
'drive_size': '500',
'drive_rpm': '7200'})
def fake_show_by_name(meh, context, name):
if name == 'wrong_image_name':
LOG.debug(_("Test: Emulate wrong VSA name. Raise"))
raise exception.ImageNotFound
return {'id': 1, 'properties': {'kernel_id': 1, 'ramdisk_id': 1}}
self.stubs.Set(nova.image.fake._FakeImageService,
'show_by_name',
fake_show_by_name)
def tearDown(self):
self.stubs.UnsetAll()
super(VsaTestCase, self).tearDown()
def test_vsa_create_delete_defaults(self):
param = {'display_name': 'VSA name test'}
vsa_ref = self.vsa_api.create(self.context, **param)
self.assertEqual(vsa_ref['display_name'], param['display_name'])
self.vsa_api.delete(self.context, vsa_ref['id'])
def test_vsa_create_delete_check_in_db(self):
vsa_list1 = self.vsa_api.get_all(self.context)
vsa_ref = self.vsa_api.create(self.context)
vsa_list2 = self.vsa_api.get_all(self.context)
self.assertEqual(len(vsa_list2), len(vsa_list1) + 1)
self.vsa_api.delete(self.context, vsa_ref['id'])
vsa_list3 = self.vsa_api.get_all(self.context)
self.assertEqual(len(vsa_list3), len(vsa_list2) - 1)
def test_vsa_create_delete_high_vc_count(self):
param = {'vc_count': FLAGS.max_vcs_in_vsa + 1}
vsa_ref = self.vsa_api.create(self.context, **param)
self.assertEqual(vsa_ref['vc_count'], FLAGS.max_vcs_in_vsa)
self.vsa_api.delete(self.context, vsa_ref['id'])
def test_vsa_create_wrong_image_name(self):
param = {'image_name': 'wrong_image_name'}
self.assertRaises(exception.ApiError,
self.vsa_api.create, self.context, **param)
def test_vsa_create_db_error(self):
def fake_vsa_create(context, options):
LOG.debug(_("Test: Emulate DB error. Raise"))
raise exception.Error
self.stubs.Set(nova.db, 'vsa_create', fake_vsa_create)
self.assertRaises(exception.ApiError,
self.vsa_api.create, self.context)
def test_vsa_create_wrong_storage_params(self):
vsa_list1 = self.vsa_api.get_all(self.context)
param = {'storage': [{'stub': 1}]}
self.assertRaises(exception.ApiError,
self.vsa_api.create, self.context, **param)
vsa_list2 = self.vsa_api.get_all(self.context)
self.assertEqual(len(vsa_list2), len(vsa_list1))
param = {'storage': [{'drive_name': 'wrong name'}]}
self.assertRaises(exception.ApiError,
self.vsa_api.create, self.context, **param)
def test_vsa_create_with_storage(self, multi_vol_creation=True):
"""Test creation of VSA with BE storage"""
FLAGS.vsa_multi_vol_creation = multi_vol_creation
param = {'storage': [{'drive_name': 'SATA_500_7200',
'num_drives': 3}]}
vsa_ref = self.vsa_api.create(self.context, **param)
self.assertEqual(vsa_ref['vol_count'], 3)
self.vsa_api.delete(self.context, vsa_ref['id'])
param = {'storage': [{'drive_name': 'SATA_500_7200',
'num_drives': 3}],
'shared': True}
vsa_ref = self.vsa_api.create(self.context, **param)
self.assertEqual(vsa_ref['vol_count'], 15)
self.vsa_api.delete(self.context, vsa_ref['id'])
def test_vsa_create_with_storage_single_volumes(self):
self.test_vsa_create_with_storage(multi_vol_creation=False)
def test_vsa_update(self):
vsa_ref = self.vsa_api.create(self.context)
param = {'vc_count': FLAGS.max_vcs_in_vsa + 1}
vsa_ref = self.v
|
sa_api.update(self.context, vsa_ref['id'], **param)
self.assertEqual(vsa_ref['vc_count'], FLAGS.max_vcs_in_vsa)
param = {'vc_count': 2}
vsa_ref = self.vsa_api.update(self.context, vsa_ref['id'], **param)
self.assertEqual(vsa_ref['vc_count'], 2)
self.vsa_api.delete(self.context, vsa_ref['id'])
def t
|
est_vsa_generate_user_data(self):
FLAGS.vsa_multi_vol_creation = False
param = {'display_name': 'VSA name test',
'display_description': 'VSA desc test',
'vc_count': 2,
'storage': [{'drive_name': 'SATA_500_7200',
'num_drives': 3}]}
vsa_ref = self.vsa_api.create(self.context, **param)
volumes = self.vsa_api.get_all_vsa_drives(self.context,
vsa_ref['id'])
user_data = vsa_utils.generate_user_data(vsa_ref, volumes)
user_data = base64.b64decode(user_data)
LOG.debug(_("Test: user_data = %s"), user_data)
elem = ElementTree.fromstring(user_data)
self.assertEqual(elem.findtext('name'),
param['display_name'])
self.assertEqual(elem.findtext('description'),
param['display_description'])
self.assertEqual(elem.findtext('vc_count'),
str(param['vc_count']))
self.vsa_api.delete(self.context, vsa_ref['id'])
|
PawelPamula/who-are-you
|
webfest/extensions/cache/__init__.py
|
Python
|
mit
| 178
| 0
|
"""Init Extension."""
from flask_cache import Cache
cache = Cache()
def setup_app(app):
"""Ini
|
t the extension with app context."""
cache.init_app(app)
|
return app
|
teonlamont/mne-python
|
mne/gui/_kit2fiff_gui.py
|
Python
|
bsd-3-clause
| 28,774
| 0
|
"""Mayavi/traits GUI for converting data from KIT systems."""
# Authors: Christian Brodbeck <christianbrodbeck@nyu.edu>
#
# License: BSD (3-clause)
from collections import Counter
import os
import sys
from warnings import warn
import numpy as np
from scipy.linalg import inv
from threading import Thread
from ..externals.six.moves import queue
from ..io.meas_info import _read_dig_points, _make_dig_points
from ..utils import get_config, set_config, logger
from mayavi.core.ui.mayavi_scene import MayaviScene
from mayavi.tools.mlab_scene_model import MlabSceneModel
from pyface.api import (confirm, error, FileDialog, OK, YES, information,
ProgressDialog, warning)
from traits.api import (HasTraits, HasPrivateTraits, cached_property, Instance,
Property, Bool, Button, Enum, File, Float, Int, List,
Str, Array, DelegatesTo, on_trait_change)
from traits.trait_base import ETSConfig
from traitsui.api import (View, Item, HGroup, VGroup, spring, TextEditor,
CheckListEditor, EnumEditor, Handler)
from traitsui.menu import NoButtons
from tvtk.pyface.scene_editor import SceneEditor
from ..io.constants import FIFF
from ..io.kit.kit import (RawKIT, KIT, _make_stim_channel, _default_stim_chs,
UnsupportedKITFormat)
from ..transforms import (apply_trans, als_ras_trans,
get_ras_to_neuromag_trans, Transform)
from ..coreg import _decimate_points, fit_matched_points
from ..event import _find_events
from ._marker_gui import CombineMarkersPanel, CombineMarkersModel
from ._help import read_tooltips
from ._viewer import HeadViewController, PointObject
use_editor = CheckListEditor(cols=5, values=[(i, str(i)) for i in range(5)])
backend_is_wx = False # is there a way to determine this?
if backend_is_wx:
# wx backend allows labels for wildcards
hsp_wildcard = ['Head Shape Points (*.hsp;*.txt)|*.hsp;*.txt']
elp_wildcard = ['Head Shape Fiducials (*.elp;*.txt)|*.elp;*.txt']
kit_con_wildcard = ['Continuous KIT Files (*.sqd;*.con)|*.sqd;*.con']
elif sys.platform in ('win32', 'linux2'):
# on Windows and Ubuntu, multiple wildcards does not seem to work
hsp_wildcard = ['*.hsp', '*.txt']
elp_wildcard = ['*.elp', '*.txt']
kit_con_wildcard = ['*.sqd', '*.con']
else:
hsp_wildcard = ['*.hsp;*.txt']
elp_wildcard = ['*.elp;*.txt']
kit_con_wildcard = ['*.sqd;*.con']
tooltips = read_tooltips('kit2fiff')
class Kit2FiffModel(HasPrivateTraits):
"""Data Model for Kit2Fiff conversion.
- Markers are transformed into RAS coordinate system (as are the sensor
coordinates).
- Head shape digitizer data is transformed into neuromag-like space.
"""
# Input Traits
markers = Instance(CombineMarkersModel, ())
sqd_file = File(exists=True, filter=kit_con_wildcard)
allow_unknown_format = Bool(False)
hsp_file = File(exists=True, filter=hsp_wildcard)
fid_file = File(exists=True, filter=elp_wildcard)
stim_coding = Enum(">", "<", "channel")
stim_chs = Str("")
stim_chs_array = Property(depends_on=['raw', 'stim_chs', 'stim_coding'])
stim_chs_ok = Property(depends_on='stim_chs_array')
stim_chs_comment = Property(depends_on='stim_chs_array')
stim_slope = Enum("-", "+")
stim_threshold = Float(1.)
# Marker Points
use_mrk = List(list(range(5)), desc="Which marker points to use for the "
"device head coregistration.")
# Derived Traits
mrk = Property(depends_on='markers.mrk3.points')
# Polhemus Fiducials
elp_raw = Property(depends_on=['fid_file'])
hsp_raw = Property(depends_on=['hsp_file'])
polhemus_neuromag_trans = Property(depends_on=['elp_raw'])
# Polhemus data (in neuromag space)
elp = Property(depends_on=['elp_raw', 'polhemus_neuromag_trans'])
fid = Property(depends_on=['elp_raw', 'polhemus_neuromag_trans'])
hsp = Property(depends_on=['hsp_raw', 'polhemus_neuromag_trans'])
# trans
dev_head_trans = Property(depends_on=['elp', 'mrk', 'use_mrk'])
head_dev_trans = Property(depends_on=['dev_head_trans'])
# event preview
raw = Property(depends_on='sqd_file')
misc_chs = Property(List, depends_on='raw')
misc_chs_desc = Property(Str, depends_on='misc_chs')
misc_data = Property(Array, depends_on='raw')
can_test_stim = Property(Bool, depends_on='raw')
# info
sqd_fname = Property(Str, depends_on='sqd_file')
hsp_fname = Property(Str, depends_on='hsp_file')
fid_fname = Property(Str, depends_on='fid_file')
can_save = Property(Bool, depends_on=['stim_chs_ok', 'fid',
'elp', 'hsp', 'dev_head_trans'])
# Show GUI feedback (like error messages and progress bar)
show_gui = Bool(False)
@cached_property
def _get_can_save(self):
"""Only allow saving when all or no head shape elements are set."""
if not self.stim_chs_ok:
return False
has_all_hsp = (np.any(self.dev_head_trans) and np.any(self.hsp) and
np.any(self.elp) and np.any(self.fid))
if has_all_hsp:
return True
has_any_hsp = self.hsp_file or self.fid_file or np.any(self.mrk)
return not has_any_hsp
@cached_property
def _get_can_test_stim(self):
return self.raw is not None
@cached_property
def _get_dev_head_trans(self):
if (self.mrk is None) or not np.any(self.fid):
return np.eye(4)
src_pts = self.mrk
dst_pts = self.elp
n_use = len(self.use_mrk)
if n_use < 3:
if self.show_gui:
error(None, "Estimating the device head transform requires at "
"least 3 marker points. Please adjust the markers used.",
"Not Enough Marker Points")
return
elif n_use < 5:
src_pts = src_pts[self.use_mrk]
dst_pts = dst_pts[self.use_mrk]
trans = fit_matched_points(src_pts, dst_pts, out='trans')
return trans
@cached_property
def _get_elp(self):
if self.elp_raw is None:
return np.empty((0, 3))
pts = self.elp_raw[3:8]
pts = apply_trans(self.polhemus_neuromag_trans, pts)
return pts
@cached_property
def _get_elp_raw(self):
if not self.fid_file:
return
try:
pts = _read_dig_points(self.fid_file)
if len(pts) < 8:
raise ValueError("File contains %i points, need 8" % len(pts))
except Exception as err:
if self.show_gui:
error(None, str(err), "Error Reading Fiducials")
self.reset_traits(['fid_file'])
raise
else:
return pts
@cached_property
def _get_fid(self):
if self.elp_raw is None:
return np.empty((0, 3))
pts = self.elp_raw[:3]
pts
|
= apply_trans(self.
|
polhemus_neuromag_trans, pts)
return pts
@cached_property
def _get_fid_fname(self):
if self.fid_file:
return os.path.basename(self.fid_file)
else:
return '-'
@cached_property
def _get_head_dev_trans(self):
return inv(self.dev_head_trans)
@cached_property
def _get_hsp(self):
if (self.hsp_raw is None) or not np.any(self.polhemus_neuromag_trans):
return np.empty((0, 3))
else:
pts = apply_trans(self.polhemus_neuromag_trans, self.hsp_raw)
return pts
@cached_property
def _get_hsp_fname(self):
if self.hsp_file:
return os.path.basename(self.hsp_file)
else:
return '-'
@cached_property
def _get_hsp_raw(self):
fname = self.hsp_file
if not fname:
return
try:
pts = _read_dig_points(fname)
n_pts = len(pts)
if n_pts > KIT.DIG_POINTS:
msg = ("The selected head shape contains {n_in} points, "
"which is more than the recommended maximum ({n_rec}). "
|
mrambausek/PPFem
|
ppfem/__init__.py
|
Python
|
gpl-3.0
| 1,648
| 0.00182
|
# PPFem: An educational finite element code
# Copyright (C) 2015 Matthias Rambausek
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ppfem.user_elements
import ppfem.user_equations
import ppfem.quadrature
from ppfem.user_elements import *
from ppfem.user_equations import *
from ppfem.quadrature import *
from ppfem.mesh.mesh import Mesh
from ppfem.geometry import Point, Vertex, Line, Face, Cell, Mapping
from ppfem.fem.assembler import DefaultSystemAssembler
from ppfem.fem.form import Functional, LinearForm, BilinearForm, FormCollection
from ppfem.fem.function import FEFunction
|
, FunctionEvaluator
from ppfem.fem.function_space
|
import FunctionSpace
from ppfem.fem.partial_differential_equation import PDE
__all__ = ["Mesh", "Point", "Line", "Vertex", "Face", "Cell", "Mapping", "FunctionSpace", "Functional",
"LinearForm", "BilinearForm", "FormCollection", "DefaultSystemAssembler", "FEFunction", "FunctionEvaluator",
"PDE"]
__all__ += ppfem.user_elements.__all__ + ppfem.quadrature.__all__ + ppfem.user_equations.__all__
|
dbtsai/spark
|
dev/create-release/translate-contributors.py
|
Python
|
apache-2.0
| 12,628
| 0.003088
|
#!/usr/bin/env python3
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this
|
file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required
|
by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script translates invalid authors in the contributors list generated
# by generate-contributors.py. When the script encounters an author name that
# is considered invalid, it searches Github and JIRA in an attempt to search
# for replacements. This tool runs in two modes:
#
# (1) Interactive mode: For each invalid author name, this script presents
# all candidate replacements to the user and awaits user response. In this
# mode, the user may also input a custom name. This is the default.
#
# (2) Non-interactive mode: For each invalid author name, this script replaces
# the name with the first valid candidate it can find. If there is none, it
# uses the original name. This can be enabled through the --non-interactive flag.
import os
import sys
from releaseutils import *
# You must set the following before use!
JIRA_API_BASE = os.environ.get("JIRA_API_BASE", "https://issues.apache.org/jira")
JIRA_USERNAME = os.environ.get("JIRA_USERNAME", None)
JIRA_PASSWORD = os.environ.get("JIRA_PASSWORD", None)
GITHUB_API_TOKEN = os.environ.get("GITHUB_API_TOKEN", None)
if not JIRA_USERNAME or not JIRA_PASSWORD:
sys.exit("Both JIRA_USERNAME and JIRA_PASSWORD must be set")
if not GITHUB_API_TOKEN:
sys.exit("GITHUB_API_TOKEN must be set")
# Write new contributors list to <old_file_name>.final
if not os.path.isfile(contributors_file_name):
print("Contributors file %s does not exist!" % contributors_file_name)
print("Have you run ./generate-contributors.py yet?")
sys.exit(1)
contributors_file = open(contributors_file_name, "r")
warnings = []
# In non-interactive mode, this script will choose the first replacement that is valid
INTERACTIVE_MODE = True
if len(sys.argv) > 1:
options = set(sys.argv[1:])
if "--non-interactive" in options:
INTERACTIVE_MODE = False
if INTERACTIVE_MODE:
print("Running in interactive mode. To disable this, provide the --non-interactive flag.")
# Setup Github and JIRA clients
jira_options = {"server": JIRA_API_BASE}
jira_client = JIRA(options=jira_options, basic_auth=(JIRA_USERNAME, JIRA_PASSWORD))
github_client = Github(GITHUB_API_TOKEN)
# Load known author translations that are cached locally
known_translations = {}
known_translations_file_name = "known_translations"
known_translations_file = open(known_translations_file_name, "r")
for line in known_translations_file:
if line.startswith("#"):
continue
[old_name, new_name] = line.strip("\n").split(" - ")
known_translations[old_name] = new_name
known_translations_file.close()
# Open again in case the user adds new mappings
known_translations_file = open(known_translations_file_name, "a")
# Generate candidates for the given author. This should only be called if the given author
# name does not represent a full name as this operation is somewhat expensive. Under the
# hood, it makes several calls to the Github and JIRA API servers to find the candidates.
#
# This returns a list of (candidate name, source) 2-tuples. E.g.
# [
# (NOT_FOUND, "No full name found for Github user andrewor14"),
# ("Andrew Or", "Full name of JIRA user andrewor14"),
# ("Andrew Orso", "Full name of SPARK-1444 assignee andrewor14"),
# ("Andrew Ordall", "Full name of SPARK-1663 assignee andrewor14"),
# (NOT_FOUND, "No assignee found for SPARK-1763")
# ]
NOT_FOUND = "Not found"
def generate_candidates(author, issues):
candidates = []
# First check for full name of Github user
github_name = get_github_name(author, github_client)
if github_name:
candidates.append((github_name, "Full name of Github user %s" % author))
else:
candidates.append((NOT_FOUND, "No full name found for Github user %s" % author))
# Then do the same for JIRA user
jira_name = get_jira_name(author, jira_client)
if jira_name:
candidates.append((jira_name, "Full name of JIRA user %s" % author))
else:
candidates.append((NOT_FOUND, "No full name found for JIRA user %s" % author))
# Then do the same for the assignee of each of the associated JIRAs
# Note that a given issue may not have an assignee, or the assignee may not have a full name
for issue in issues:
try:
jira_issue = jira_client.issue(issue)
except JIRAError as e:
# Do not exit just because an issue is not found!
if e.status_code == 404:
warnings.append("Issue %s not found!" % issue)
continue
raise e
jira_assignee = jira_issue.fields.assignee
if jira_assignee:
user_name = jira_assignee.name
display_name = jira_assignee.displayName
if display_name:
candidates.append(
(display_name, "Full name of %s assignee %s" % (issue, user_name)))
else:
candidates.append(
(NOT_FOUND, "No full name found for %s assignee %s" % (issue, user_name)))
else:
candidates.append((NOT_FOUND, "No assignee found for %s" % issue))
# Guard against special characters in candidate names
# Note that the candidate name may already be in unicode (JIRA returns this)
for i, (candidate, source) in enumerate(candidates):
try:
candidate = unicode(candidate, "UTF-8")
except TypeError:
# already in unicode
pass
candidate = unidecode.unidecode(candidate).strip()
candidates[i] = (candidate, source)
return candidates
# Translate each invalid author by searching for possible candidates from Github and JIRA
# In interactive mode, this script presents the user with a list of choices and have the user
# select from this list. Additionally, the user may also choose to enter a custom name.
# In non-interactive mode, this script picks the first valid author name from the candidates
# If no such name exists, the original name is used (without the JIRA numbers).
print("\n========================== Translating contributor list ==========================")
lines = contributors_file.readlines()
contributions = []
for i, line in enumerate(lines):
# It is possible that a line in the contributor file only has the github name, e.g. yhuai.
# So, we need a strip() to remove the newline.
temp_author = line.strip(" * ").split(" -- ")[0].strip()
print("Processing author %s (%d/%d)" % (temp_author, i + 1, len(lines)))
if not temp_author:
error_msg = " ERROR: Expected the following format \" * <author> -- <contributions>\"\n"
error_msg += " ERROR: Actual = %s" % line
print(error_msg)
warnings.append(error_msg)
contributions.append(line)
continue
author = temp_author.split("/")[0]
# Use the local copy of known translations where possible
if author in known_translations:
line = line.replace(temp_author, known_translations[author])
elif not is_valid_author(author):
new_author = author
issues = temp_author.split("/")[1:]
candidates = generate_candidates(author, issues)
# Print out potential replacement candidates along with the sources, e.g.
# [X] No full name found for Github user andrewor14
# [X] No assignee found for SPARK-1763
# [0] Andrew Or - Full name of JIRA user andrewor14
# [1] Andrew Orso
|
dudanogueira/microerp
|
microerp/solicitacao/models.py
|
Python
|
lgpl-3.0
| 6,439
| 0.006412
|
# -*- coding: utf-8 -*-
"""This file is part of the microerp project.
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
__author__ = 'Duda Nogueira <dudanogueira@gmail.com>'
__copyright__ = 'Copyright (c) 2013 Duda Nogueira'
__version__ = '2.0.0'
import datetime
from django.conf import s
|
ettings
from django.db import models
from django.core.exceptions import ValidationError
SOLICITACAO_STATUS_CHOICES = (
('aberta', u'Solicitação Aberta'),
('analise', u'Solicitação em Análise'),
('contato', u'Solicitação em Contato'),
('visto', u'Solicitação em Visto'),
('resol
|
vida', u'Solicitação Resolvida'),
('naoresolvido', u'Solicitação Não Resolvida'),
)
SOLICITACAO_PRIORIDADE_CHOICES = (
(0, u'Baixa Prioridade'),
(5, u'Média Prioridade'),
(10, u'Alta Prioridade'),
)
class Solicitacao(models.Model):
def __unicode__(self):
return u"Solicitação ID#%d status: %s" % (self.id, self.status)
def reclamante(self):
return self.cliente or self.precliente or self.contato
def contato_principal(self):
if self.cliente:
return self.cliente.telefone_fixo or None, self.cliente.telefone_celular or None, self.cliente.fax or None
elif self.precliente:
return [self.precliente.contato]
else:
return [self.contato]
# cliente / pre cliente / contato
cliente = models.ForeignKey('cadastro.Cliente', blank=True, null=True)
precliente = models.ForeignKey('cadastro.PreCliente', blank=True, null=True)
contato = models.TextField(blank=True, null=True)
# solicitacao
prioridade = models.IntegerField(blank=False, null=False, default=5, choices=SOLICITACAO_PRIORIDADE_CHOICES)
descricao = models.TextField(u"Descrição", blank=False, null=False)
tipo = models.ForeignKey('TipoSolicitacao', verbose_name="Tipo de Solicitação")
canal = models.ForeignKey('CanalSolicitacao', verbose_name="Canal da Solicitação")
status = models.CharField(blank=False, max_length=100, choices=SOLICITACAO_STATUS_CHOICES, default="aberta")
procede = models.BooleanField(default=True)
nao_procede_porque = models.TextField(blank=True)
providencia = models.TextField(blank=True)
# prazo
prazo = models.DateField(default=datetime.datetime.today)
resolucao_final = models.TextField("Resolução Final", blank=True)
resolucao_final_data = models.DateTimeField(blank=True, null=True)
# departamento / abrangencia
departamentos_afetados = models.ManyToManyField('rh.Departamento', related_name="solicitacao_afetada_set", blank=True)
departamento_direto = models.ForeignKey('rh.Departamento', related_name="solicitacao_direta_set", blank=True, null=True)
# responsavel correcao
responsavel_correcao = models.ForeignKey('rh.Funcionario', related_name="solicitacao_correcao_set", blank=True, null=True)
correcao_iniciada = models.DateTimeField(blank=True, null=True)
responsavel_contato = models.ForeignKey('rh.Funcionario', related_name="solicitacao_contato_set", blank=True, null=True)
contato_realizado = models.DateTimeField(blank=True, null=True)
responsavel_visto = models.ForeignKey('rh.Funcionario', related_name="solicitacao_visto_set", blank=True, null=True)
visto_data = models.DateTimeField(blank=True, default=datetime.datetime.now)
# metadata
adicionado_por = models.ForeignKey('rh.Funcionario', related_name="solicitacao_adicionada_set", blank=True, null=True)
despachado_por = models.ForeignKey('rh.Funcionario', related_name="solicitacao_despachado_set", blank=True, null=True)
despachado_data = models.DateTimeField(blank=True, null=True)
criado = models.DateTimeField(blank=True, auto_now_add=True, verbose_name="Criado")
atualizado = models.DateTimeField(blank=True, auto_now=True, verbose_name="Atualizado")
def clean(self):
# se nao procede, motivo obrigatorio
if not self.procede and not self.nao_procede_porque:
raise ValidationError(u"Se a solicitação não procede, deve ser informado um motivo.")
# deve haver pelo menos 1 contato
if not self.cliente and not self.precliente and not self.contato:
raise ValidationError(u"Se não houver um Cliente ou Pré Cliente Relacionado, deve haver pelo menos um contato")
if self.cliente and self.precliente:
raise ValidationError(u"Deve haver um Cliente ou um Pré Cliente, não os dois.")
if self.status == 'analise' and not self.providencia:
raise ValidationError(u"Para alterar o status para Análise, deve ser preenchido o campo Providência")
if self.status == 'resolvida' and not self.resolucao_final:
raise ValidationError(u"Para alterar o status para Resolvida, deve ser preenchido o campo Resolução Final")
class Meta:
ordering = ['prioridade', 'criado',]
class TipoSolicitacao(models.Model):
def __unicode__(self):
return self.nome
nome = models.CharField(blank=True, max_length=100)
class CanalSolicitacao(models.Model):
def __unicode__(self):
return self.nome
nome = models.CharField(blank=True, max_length=100)
class PerfilAcessoSolicitacao(models.Model):
'''Perfil de Acesso ao módulo Solicitação'''
class Meta:
verbose_name = u"Perfil de Acesso à Solicitação"
verbose_name_plural = u"Perfis de Acesso às Solicitação"
gerente = models.BooleanField(default=False)
analista = models.BooleanField(default=True)
user = models.OneToOneField(settings.AUTH_USER_MODEL)
# metadata
criado = models.DateTimeField(blank=True, auto_now_add=True, verbose_name="Criado")
atualizado = models.DateTimeField(blank=True, auto_now=True, verbose_name="Atualizado")
|
DemocracyClub/UK-Polling-Stations
|
polling_stations/apps/data_importers/tests/stubs/stub_addressimport.py
|
Python
|
bsd-3-clause
| 1,144
| 0.001748
|
import os
from django.contrib.gis.geos import Point
from data_importers.management.commands import BaseCsvStationsCsvAddressesImporter
"""
Define a stub implementation of address importer we can run tests against
"""
class Command(BaseCsvStationsCsvAddressesImporter):
srid = 4326
council_id = "ABC"
addresses_name = "addresses.csv"
stations_name = "stations.csv"
base_folder_path = os.path.join(
os.path.dirname(__file__),
|
"../fixtures/csv_importer"
)
def address_record_to_dict(self, record):
return {
"council": self.council,
"uprn": record.uprn,
"address": record.address,
"postcode": record.postcode,
"polling_station_id": record.polling_station,
}
def station_reco
|
rd_to_dict(self, record):
location = Point(float(record.lng), float(record.lat), srid=self.get_srid())
return {
"council": self.council,
"internal_council_id": record.internal_council_id,
"postcode": record.postcode,
"address": record.address,
"location": location,
}
|
ACS-Community/ACS
|
LGPL/CommonSoftware/nctest/ws/test/pyStructureEventTest.py
|
Python
|
lgpl-2.1
| 1,355
| 0.030996
|
#!/usr/bin/env python
from Acspy.Nc.CommonNC import CommonNC
from Acspy.Nc.Supplier import Supplier
import datacapEx
from datacapEx
|
import ExecBlockProcessedEvent, DataCapturerId, ExecBlockStartedEvent, ScanStartedEvent
import asdmEX
s = Supplier('pyTest-NC')
name = 'DATACAP1'
s.publishEvent(name)
sessionId = asdmEX.IDLEntityRef('SessionId','X1','SID','1.0')
sb = asdmEX.IDLEntityRef('SB1','X1','SB1','1.0')
dcId = DataCapturerId (name, 'a
|
rrayId', sessionId, sb)
execBlockId = asdmEX.IDLEntityRef('ExecBlockId','X1','SB1','1.0')
d = ExecBlockProcessedEvent( dcId, 'statu', execBlockId, 0)
s.publishEvent(d)
execId = asdmEX.IDLEntityRef('4','3','2', '1')
execBlockId = asdmEX.IDLEntityRef('1','2','3','4')
sse = ScanStartedEvent(execId, "something", 4, [datacapEx.LAST, datacapEx.LAST],0)
s.publishEvent(sse)
execId = "23"
execBlockEntityRef = asdmEX.IDLEntityRef(execId,"X00000000","0","0")
sbId = asdmEX.IDLEntityRef(execId,"X00000000","0","0")
arrayId = "1"
time = 100
startExecBlock = datacapEx.ExecBlockStartedEvent(execBlockEntityRef,sbId,sessionId,arrayId,time)
s.publishEvent(startExecBlock)
endExecBlock = datacapEx.ExecBlockEndedEvent(execBlockEntityRef,sbId,sessionId,arrayId,datacapEx.SUCCESS,time+10)
s.publishEvent(endExecBlock)
print "All structures successfully sent!!"
s.destroyNotificationChannel()
|
vgrem/Office365-REST-Python-Client
|
office365/directory/licenses/service_plan_info.py
|
Python
|
mit
| 1,595
| 0.004389
|
from office365.runtime.client_value import ClientValue
class ServicePlanInfo(ClientValue):
"""Contains information about a service plan associated with a subscribed SKU. The servicePlans property of
the subscribedSku entity is a collection of servicePlanInfo."""
def __init__(self, _id=None, name=None, provisioning_status=None, applies_to=None):
"""
:param str applies_to: The object the service plan can be assigned to. Possible values:
"User" - service plan can be assigned to individual users.
"Company" - service plan can be assigned to the entire tenant.
:param str provisioning_status: The provisioning status of the service plan. Possible values:
"Success" - Service is fully provisioned.
"Disabled" - Service has been disabled.
"PendingInput" - Service is not yet provisioned; awaiting service confirmation.
"PendingActivation" - Service is provisioned but requires explicit activation by administrator
|
(for example, Intune_O365 service plan)
"PendingProvisioning" - Microsoft has added a new service to the product SKU and it has not been
activated in the tenant, yet.
:param str name: The name of the service plan.
:param str _id: The unique identifier of the service plan.
"""
super(ServicePlanInfo, self).__init__()
self.servicePlanId = _id
self.servicePlanName = name
self.provisioningSta
|
tus = provisioning_status
self.appliesTo = applies_to
|
vlvkobal/netdata
|
collectors/python.d.plugin/python_modules/urllib3/packages/ssl_match_hostname/__init__.py
|
Python
|
gpl-3.0
| 719
| 0.001391
|
# SPDX-License-Identifier: MIT
import sys
try:
# Our match_hostname function is the same as 3.5's, so we only want to
# import the match_hostname function if it's at least that good.
if sys.version_info < (3, 5):
raise ImportError("Fallback to vendored code")
from ssl import CertificateError, match_hostname
except ImportError:
try:
# Backport of the function from a pypi module
from backports.ssl_match_hostname import CertificateError, match_hostname
except ImportError:
# Our vendored copy
from ._implementation import
|
CertificateError, match_hostname
# Not needed, but documenting what we provide.
__all__ = ('CertificateError', 'match_hostn
|
ame')
|
dcherian/pyroms
|
pyroms_toolbox/pyroms_toolbox/BGrid_GFDL/get_nc_BGrid_GFDL.py
|
Python
|
bsd-3-clause
| 1,731
| 0.009821
|
import numpy as np
import pyroms
from pyroms_toolbox.BGrid_GFDL import BGrid_GFDL
def get_nc_BGrid_GFDL(grdfile, name='GFDL_CM2.1_North_Pacific', \
# xrange=(80,189), yrange=(96,198)):
xrange=(60,175), yrange=(120, 190)):
"""
Bgrd = get_nc_BGrid_GFDL(grdfile)
Load B-Grid grid object for GFDL CM2.1 from netCDF grid file
"""
nc = pyroms.io.Dataset(grdfile)
lon_t = nc.variables['geolon_t'][:]
lat_t = nc.variables['geolat_t'][:]
lon_uv = nc.variables['geolon_c'][:]
lat_uv = nc.variables['geolat_c'][:]
h = nc.variables['ht'][:]
f = nc.variables['coriolis_param'][:]
kmt = nc.variables['kmt'][:]
z_t = nc.variables['st_ocean'][:]
z_t_edges = nc.variables['st_edges_ocean'][:]
kmu = nc.variables['kmu'][:]
z_uv = nc.variables['sw_ocean'][:]
z_uv_edges = nc.variables['sw_edges_ocean'][:]
# compute mask at t-point
M_t, L_t = kmt.shape
N_t = z_t.shape[0]
mask_t = np.zeros((N_t, M_t, L_t))
for j in range(M_t):
for i in range(L_t):
try:
mask_t[0:kmt[j,i], j,i] = 1
|
except:
mask_t[:, j,i] = 0
# compute mask at uv-point
M_uv, L_uv = km
|
u.shape
N_uv = z_uv.shape[0]
mask_uv = np.zeros((N_uv, M_uv, L_uv))
for j in range(M_uv):
for i in range(L_uv):
try:
mask_uv[0:kmu[j,i], j,i] = 1
except:
mask_uv[:, j,i] = 0
return BGrid_GFDL(lon_t, lat_t, lon_uv, lat_uv, \
mask_t, mask_uv, h, z_t, z_t_edges, \
z_uv, z_uv_edges, f, \
name, xrange=xrange, yrange=yrange)
|
meco-group/omg-tools
|
examples/p2p_3dquadrotor.py
|
Python
|
lgpl-3.0
| 2,057
| 0.000972
|
# This file is part of OMG-tools.
#
# OMG-tools -- Optimal Motion Generation-tools
# Copyright (C) 2016 Ruben Van Parys & Tim Mercy, KU Leuven.
# All rights reserved.
#
# OMG-tools is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from omgtools import *
import numpy as np
# create vehicle
vehicle = Quadrotor3D(0.5)
vehicle.set_initial_conditions([-3, -2, -0.5, 0, 0, 0, 0, 0])
vehicle.set_terminal_conditions([3, 2, 0.5])
vehicle.set_options({'safety_distance': 0.1, 'safety_weight': 10})
# create environment
environment = Environment(room={'shape': Cuboid(8, 6, 8)})
trajectory = {'velocity': {'time': [1.5], 'values': [[0, 0, -0.6]]}}
obst1
|
= Obstacle({'position': [-2, 0, -2]}, shape=Plate(Rectangle(5., 8.), 0.1,
orientation=[0., np.pi/2, 0.]), options={'draw': True})
obst2 = Obstacle({'position': [2, 0, 3.5]}, shape=Plate(Rectangle(5., 8.), 0
|
.1,
orientation=[0., np.pi/2, 0.]),
simulation={'trajectories': trajectory}, options={'draw': True})
environment.add_obstacle([obst1, obst2])
# create a point-to-point problem
problem = Point2point(vehicle, environment, freeT=False, options={'horizon_time': 5.})
problem.init()
vehicle.problem = problem
# create simulator
simulator = Simulator(problem, sample_time=0.01, update_time=0.4)
vehicle.plot('input', knots=True)
problem.plot('scene', view=[20, -80])
# run it!
simulator.run()
|
ChrisHirsch/robotframework
|
atest/testdata/keywords/DupeKeywords.py
|
Python
|
apache-2.0
| 372
| 0.016129
|
from robot.api.deco import keyword
def defined_twice():
1/0
@keyword('Defined twice')
def this_time_using_custom_name():
|
2/0
def defined_thrice():
1/0
def definedThrice():
2/0
def Defined_Thrice():
3/0
@keyword('Embedded ${arguments} twice')
def embedded1(arg):
1/0
@keyword('Embedded ${arguments match} TWICE')
def embedded2(arg):
|
2/0
|
summanlp/gensim
|
gensim/scripts/glove2word2vec.py
|
Python
|
lgpl-2.1
| 2,742
| 0.002918
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016 Radim Rehurek <radimrehurek@seznam.cz>
# Copyright (C) 2016 Manas Ranjan Kar <manasrkar91@gmail.com>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
USAGE:
$ python -m gensim.scripts.glove2word2vec --input <GloVe vector file> --output <Word2vec vector file>
Where:
<GloVe vector file>: Input GloVe .txt file
<Word2vec vector file>: Desired name of output Word2vec .txt file
This script is used to convert GloVe vectors in text format into the word2vec text format.
The only difference between the two formats is an extra header line in word2vec,
which contains the number of vectors and their dimensionality (two integers).
"""
import os
import sys
import logging
import argparse
from smart_open import smart_open
logger = logging.getLogger(__name__)
def get_glove_info(glove_file_name):
"""Return the number of vectors and dimensions in a file in GloVe format."""
with smart_open(glove_file_name) as f:
num_lines = sum(1 for line in f)
with smart_open(glove_file_name) as f:
num_dims = len(f.readline().split()) - 1
return num_lines, num_dims
def glove2word2vec(glove_input_file, word2vec_output_file):
"""Convert `glove_input_file` in GloVe format into `word2vec_output_file` in word2vec format."""
num_lines, num_dims = get_glove_info(glove_input_file)
logger.info("converting %i vectors from %s to %s", num_lines, glove_input_file, word2vec_output_file)
with sm
|
art_open(word2vec_output_file, 'wb') as fout:
fout.write("{0} {1}\n".format
|
(num_lines, num_dims).encode('utf-8'))
with smart_open(glove_input_file, 'rb') as fin:
for line in fin:
fout.write(line)
return num_lines, num_dims
if __name__ == "__main__":
logging.basicConfig(format='%(asctime)s : %(threadName)s : %(levelname)s : %(message)s', level=logging.INFO)
logging.root.setLevel(level=logging.INFO)
logger.info("running %s", ' '.join(sys.argv))
# check and process cmdline input
program = os.path.basename(sys.argv[0])
if len(sys.argv) < 2:
print(globals()['__doc__'] % locals())
sys.exit(1)
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input", required=True,
help="Input file, in gloVe format (read-only).")
parser.add_argument(
"-o", "--output", required=True,
help="Output file, in word2vec text format (will be overwritten).")
args = parser.parse_args()
# do the actual conversion
num_lines, num_dims = glove2word2vec(args.input, args.output)
logger.info('Converted model with %i vectors and %i dimensions', num_lines, num_dims)
|
keurfonluu/StochOPy
|
stochopy/optimize/pso/_pso.py
|
Python
|
mit
| 5,326
| 0.003192
|
from .. import cpso
from .._helpers import register
__all__ = [
"minimize",
]
def minimize(
fun,
bounds,
x0=None,
args=(),
maxiter=100,
popsize=10,
inertia=0.7298,
cognitivity=1.49618,
sociability=1.49618,
seed=None,
xtol=1.0e-8,
ftol=1.0e-8,
constraints=None,
updating="immediate",
workers=1,
backend=None,
return_all=False,
callback=None,
):
"""
Minimize an objective function using Competitive Particle Swarm Optimization (CPSO).
Parameters
----------
fun : callable
The objective function to be minimized. Must be in the form ``f(x, *args)``, where ``x`` is the argument in the form of a 1-D array and args is a tuple of any additional fixed parameters needed to completely specify the function.
bounds : array_like
Bounds for variables. ``(min, max)`` pairs for each element in ``x``, defining the finite lower and upper bounds for the optimizing argument of ``fun``. It is required to have ``len(bounds) == len(x)``. ``len(bounds)`` is used to determine the number of parameters in ``x``.
x0 : array_like or None, optional, default None
Initial population. Array of real elements with shape (``popsize``, ``ndim``), where ``ndim`` is the number of independent variables. If ``x0`` is not specified, the population is initialized using Latin Hypercube sampling.
args : tuple, optional, default None
Extra arguments passed to the objective function.
maxiter : int, optional, default 100
|
The maximum number of generations over which the entire population is evolved.
popsize : int, optional, default 10
Total population size.
inertia : scalar, optional, default 0.7298
Inertial weight, denoted by w in the literature. It should be in the range [0, 1].
cognitivity : scalar, optional, default 1.49618
Cognition parameter, denoted by c1 in the literature. It should be in the range [0, 4].
sociabilit
|
y: scalar, optional, default 1.49618
Sociability parameter, denoted by c2 in the literature. It should be in the range [0, 4].
seed : int or None, optional, default None
Seed for random number generator.
xtol : scalar, optional, default 1.0e-8
Solution tolerance for termination.
ftol : scalar, optional, default 1.0e-8
Objective function value tolerance for termination.
constraints : str or None, optional, default None
Constraints definition:
- None: no constraint
- 'Shrink': infeasible solutions are repaired by shrinking particles' velocity vector
updating : str {'immediate', 'deferred'}, optional, default 'immediate'
If ``'immediate'``, the best solution vector is continuously updated within a single generation. This can lead to faster convergence as candidate solutions can take advantage of continuous improvements in the best solution. With ``'deferred'``, the best solution vector is updated once per generation. Only ``'deferred'`` is compatible with parallelization, and is overridden when ``workers`` is not ``0`` or ``1`` or ``backend == 'mpi'``.
workers : int, optional, default 1
The population is subdivided into workers sections and evaluated in parallel (uses :class:`joblib.Parallel`). Supply -1 to use all available CPU cores.
backend : str {'loky', 'threading', 'mpi'}, optional, default 'threading'
Parallel backend to use when ``workers`` is not ``0`` or ``1``:
- 'loky': disable threading
- 'threading': enable threading
- 'mpi': use MPI (uses :mod:`mpi4py`)
return_all : bool, optional, default False
Set to True to return an array with shape (``nit``, ``popsize``, ``ndim``) of all the solutions at each iteration.
callback : callable or None, optional, default None
Called after each iteration. It is a callable with the signature ``callback(X, OptimizeResult state)``, where ``X`` is the current population and ``state`` is a partial :class:`stochopy.optimize.OptimizeResult` object with the same fields as the ones from the return (except ``"success"``, ``"status"`` and ``"message"``).
Returns
-------
:class:`stochopy.optimize.OptimizeResult`
The optimization result represented as a :class:`stochopy.optimize.OptimizeResult`. Important attributes are:
- ``x``: the solution array
- ``fun``: the solution function value
- ``success``: a Boolean flag indicating if the optimizer exited successfully
- ``message``: a string which describes the cause of the termination
References
----------
.. [1] J. Kennedy and R. Eberhart, *Particle swarm optimization*, Proceedings of ICNN'95 - International Conference on Neural Networks, 1995, 4: 1942-1948
.. [2] F. Van Den Bergh, *An analysis of particle swarm optimizers*, University of Pretoria, 2001
"""
competitivity = None
return cpso(
fun,
bounds,
x0,
args,
maxiter,
popsize,
inertia,
cognitivity,
sociability,
competitivity,
seed,
xtol,
ftol,
constraints,
updating,
workers,
backend,
return_all,
callback,
)
register("pso", minimize)
|
brosander/kettle-vertx-webapp
|
src/util/copyData.py
|
Python
|
apache-2.0
| 1,824
| 0.014254
|
#!/usr/bin/python
import urllib2, base64
import shutil
import json
import json
import argparse
import os
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='''
This script will copy the step/jobentry images from a running carte (with kthin-server) to the img folder
''', formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-u', '--user', help='The user to connect to carte as', default='cluster')
parser.add_argument('-p', '--password', help='The password to connect to carte with', default='cluster')
parser.add_argument('-q', '--port', help='The port to connect to carte at', default='8001')
parser.add_argument('-d', '--destination', help='The top level destination directory', default='../app')
args = parser.parse_args()
def getData(relPath):
request = urllib2.Request("http://localhost:%s/%s" % (args.port, relPath))
auth = base64.encodestring('%s:%s' %(args.user, args.password)).replace('\n', '')
request.add_header('Authorization', 'Basic %s' %auth)
result = urllib2.urlopen(request)
return result.read()
def getListAndImages(listUrl, listFile, entryName):
categories = json.loads(getData(listUrl))
for category in categories:
for entry in category[entryName]:
print 'Getting image for ' + entry['name']
data = getData(entry['image'])
entryPath = 'img/' + entryName + '/' + entry['name'] + '.png'
entry['image'] = entryPath
with open(
|
args.destination + '/' + entryPath, 'wb
|
') as f:
f.write(data)
with open(args.destination + listFile, 'w') as f:
f.write(json.dumps(categories))
getListAndImages('kettle/kthin/stepList/', '/kettle/kthin/stepList', 'steps')
getListAndImages('kettle/kthin/jobEntryList/', '/kettle/kthin/jobEntryList', 'jobEntries')
|
LJMNilsson/memtran
|
src/tokens.py
|
Python
|
gpl-3.0
| 12,189
| 0.008696
|
# Copyright (C) 2017 Martin Nilsson
# This file is part of the Memtran compiler.
#
# The Memtran compiler is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The Memtran compiler is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the Memtran compiler. If not, see http://www.gnu.org/licenses/ .
def enum(*sequential, **named): # for Python 2.7 compatibility, I guess
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
Tok = enum(
'EOF', #
'LCURLYBRACKET', #
'RCURLYBRACKET', #
'LSQUAREBRACKET', #
'RSQUAREBRACKET', #
'LPAREN', #
'RPAREN', #
'COLON', #
'SEMICOLON', #
'PERIOD', #
'COMMA', #
'STRING', #
'IF', #
'ELSE', #
'FOR', #
'IN', #
'OVER', #
'RETURN', #
'INDEXOFFSET', #
'INDEXFACTOR', #
'LOOP', #
'MUT', #
'REF', #
'INLINE', #
'TO', #
'ASSIGNMENTOPERATOR', #
'CASE', #
'DEFAULT', #
'ANDSYMBOL', #
'ORSYMBOL', #
'TRUE', #
'FALSE', #
'FUN', #
'END', #
'IMPORT', #
'INTERNAL', #
'TYPE', #
'LABEL', #
'IDENTIFIER', #
'INTEGER', #
'FLOAT', #
'NILTYPE', #
'BOOL', #
'I8', #
'I16', #
'I32', #
'I64', #
'ISIZE', #
'U8', #
'U16', #
'U32', #
'U64', #
'USIZE', #
'F32', #
'F64', #
'PERCENT', #
'STAR', #
'PLUS', #
'SLASH', #
'MINUS', #
'LESSTHAN', #
'GREATERTHAN', #
'EXCLAMATION', #
'BACKTICK', #
'PERCENTASSIGNMENTOPERATOR', #
'STARASSIGNMENTOPERATOR', #
'PLUSASSIGNMENTOPERATOR', #
'SLASHASSIGNMENTOPERATOR', #
'MINUSASSIGNMENTOPERATOR', #
'LESSTHANOREQUALS', #
'GREATERTHANOREQUALS', #
'EQUALS', #
'EQUALSNOT', #
'BREAK', #
'CONTINUE', #
'SINGLEQUOTE', #
'TRIPLECOLON', #
'CONTENTTYPE', #
'PREFIXIMPORT', #
'CONSTRUAND',
'DOWNTO',
'REPEAT',
'TRASH',
'UNINITIALIZED',
'IFUPPERCASE',
'ELSEUPPERCASE',
'SWITCHUPPERCASE',
'CONTENTTYPEUPPERCASE',
'CASEUPPERCASE',
'DEFAULTUPPERCASE',
'SWITCH',
'BACKSLASH',
'DOUBLEPERIOD',
'TRIPLEPERIOD',
'DOUBLESINGLEQUOTE',
'ARR',
'TILDE',
'VBOX',
'ERRATIC'
)
class Token:
# public long lineNr;
# public long rowNr;
# public Tok kind;
# public String tokString;
def __init__(self, lineNr, rowNr, kind, tokString):
self.lineNr = lineNr
self.rowNr = rowNr
self.kind = kind
self.tokString = tokString
# def __init__(self, toBeCopied):
# self.lineNr = toBeCopied.lineNr
# self.rowNr = toBeCopied.rowNr
# self.kind = toBeCopied.kind
# self.tokString = toBeCopied.tokString
def print_it(self): # for testing purposes
if self.kind == Tok.EOF:
print("EOF", end='')
elif self.kind == Tok.LCURLYBRACKET:
print("{", end='')
elif self
|
.kind == Tok.RCURLYBRACKET:
print("}", end='')
elif self.kind == Tok.LSQUAREBRACKET:
print("[", end='')
elif self.kind == Tok.RSQUAREBRACKET:
print("]", end='')
elif self.kind == Tok.LPAREN:
print("(", end='')
elif self.kind == Tok.RPAREN:
print(")", end='')
elif self.kind == To
|
k.COLON:
print(":", end='')
elif self.kind == Tok.SEMICOLON:
print(";", end='')
elif self.kind == Tok.PERIOD:
print(".", end='')
elif self.kind == Tok.COMMA:
print(",", end='')
elif self.kind == Tok.STRING:
print("\"" + self.tokString + "\"", end='') # will print newlines and cr:s and escape chars in a funny way though
elif self.kind == Tok.IF:
print("if", end='')
elif self.kind == Tok.ELSE:
print("else", end='')
elif self.kind == Tok.FOR:
print("for", end='')
elif self.kind == Tok.IN:
print("in", end='')
elif self.kind == Tok.OVER:
print("over", end='')
elif self.kind == Tok.RETURN:
print("return", end='')
elif self.kind == Tok.INDEXOFFSET:
print("indexoffset", end='')
elif self.kind == Tok.INDEXFACTOR:
print("indexfactor", end='')
elif self.kind == Tok.LOOP:
print("loop", end='')
elif self.kind == Tok.MUT:
print("mu", end='')
elif self.kind == Tok.REF:
print("ref", end='')
elif self.kind == Tok.INLINE:
print("inline", end='')
elif self.kind == Tok.TO:
print("to", end='')
elif self.kind == Tok.ASSIGNMENTOPERATOR:
print("=", end='')
elif self.kind == Tok.CASE:
print("case", end='')
elif self.kind == Tok.DEFAULT:
print("default", end='')
elif self.kind == Tok.ANDSYMBOL:
print("&&", end='')
elif self.kind == Tok.ORSYMBOL:
print("||", end='')
elif self.kind == Tok.TRUE:
print("true", end='')
elif self.kind == Tok.FALSE:
print("false", end='')
elif self.kind == Tok.FUN:
print("fn", end='')
elif self.kind == Tok.END:
print("end", end='')
elif self.kind == Tok.IMPORT:
print("import", end='')
elif self.kind == Tok.INTERNAL:
print("internal", end='')
elif self.kind == Tok.TYPE:
print("type", end='')
elif self.kind == Tok.LABEL:
print("label", end='')
elif self.kind == Tok.IDENTIFIER:
print("$" + self.tokString, end='')
elif self.kind == Tok.INTEGER:
print("#" + self.tokString, end='')
elif self.kind == Tok.FLOAT:
print("##" + self.tokString, end='')
elif self.kind == Tok.NILTYPE:
print("nil", end='')
elif self.kind == Tok.BOOL:
print("bool", end='')
elif self.kind == Tok.I8:
print("i8", end='')
elif self.kind == Tok.I16:
print("i16", end='')
elif self.kind == Tok.I32:
print("i32", end='')
elif self.kind == Tok.I64:
print("i64", end='')
eli
|
SoftwareEngineeringToolDemos/FSE-2011-EvoSuite
|
release_results/script/experiments_base.py
|
Python
|
lgpl-3.0
| 5,733
| 0.017966
|
#!/usr/bin/python
#
# Copyright (C) 2010-2015 Gordon Fraser, Andrea Arcuri and EvoSuite
# contributors
#
# This file is part of EvoSuite.
#
# EvoSuite is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser Public License as published by the
# Free Software Foundation, either version 3.0 of the License, or (at your
# option) any later version.
#
# EvoSuite is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License along
# with EvoSuite. If not, see <http://www.gnu.org/licenses/>.
#
import math
import sys
import os
import re
import random
import getpass
# Path to this script
SRC=os.path.dirname(sys.argv[0])
USERNAME=getpass.getuser()
if len(sys.argv) != 7:
print "Usage:\n<nameOfScript>.py <dir> <minSeed> <maxSeed> <classFile> <maxJobs> <cores>"
exit(1)
BASEDIR =
|
os.path.abspath(sys.argv[1])
if not os.path.isdir(BASEDIR):
print "creating folder: " + BASEDIR
os.makedirs(BASEDIR)
else:
print "target folder already exists"
exit(1)
# Where to put stuff (default in subdirs of BASEDIR)
REPORTS="%s/reports" % BASEDIR
SCRIPTDIR="%s/scripts" % BASEDIR
LOGDIR="%s/logs" % BASEDIR
TESTDIR="%s/tests" % BASEDIR
os.makedirs(REPORTS)
os.makedirs(SCRIPTDIR)
os.makedirs(LOGDIR)
os.makedirs(TESTDIR)
MINSEED = int(sys.argv[2])
MAXSEE
|
D = int(sys.argv[3])
MAX_JOBS = int(sys.argv[5])
# Initialize DB of target classes
CLASSES_FILE=sys.argv[4]
if not os.path.isfile(CLASSES_FILE):
print 'Could not find class file ' + sys.argv[4]
exit(1)
CLASSES = []
f = open(CLASSES_FILE)
for line in f:
entry = line.rstrip().split()
CLASSES.append(entry)
f.close()
NUM_CLASSES=len(CLASSES)
CORES = int(sys.argv[6])
if CORES <= 0 :
print 'Wrong number of cores'
exit(1)
# Global counter of jobs created
JOB_ID=0
CALL_ID=0
CONFIG_ID=0
SEARCH_BUDGET=0
# Creates a single call to EvoSuite
def getEvoSuiteCall(seed, configId, config, project, clazz, id, strategy, coreIndex):
global SCRIPTDIR
global CASESTUDY_DIR
global JOB_ID
global EVOSUITE
global REPORTS
global SEARCH_BUDGET
global FIXED
global CALL_ID
global CORES
global TIMEOUT
logfile = "%s/%d_%s_%s_%s" % (LOGDIR, JOB_ID, configId, seed, project)
reportfile="%s/%d/c%d" % (REPORTS, id, coreIndex)
project = project.rstrip()
result = "pushd . > /dev/null 2>&1 \n"
result += "cd %s/%s\n" % (CASESTUDY_DIR, project)
if CORES != 1 :
result += "timeout -k " + TIMEOUT + " " + TIMEOUT + " "
result += ""+EVOSUITE+" "+strategy+" -class "+ clazz +" -seed "+str(seed)
result += " -Dconfiguration_id="+configId+ " -Dgroup_id="+project
result += " "+config+" "+FIXED
result += " -Dreport_dir="+reportfile
result += " -Dtest_dir=" +TESTDIR + "/s" + str(seed) + "/" + project +"/evosuite-tests"
result += " 2>&1 | tee -a "+logfile
if CORES != 1 :
result += " & "
result += "\npids+=($!) "
result += "\n"
result += "popd > /dev/null 2>&1 \n\n"
CALL_ID += 1
return result
# Creates the scripts for a given config and seed range
def createJobs(minSeed, maxSeed, configId, config, startNum, strategy="-generateSuite"):
global SCRIPTDIR
global CASESTUDY_DIR
global JOB_ID
global CONFIG_ID
global TIMEOUT
path_1 = "%s/%s_EvoSuite_%d.sh" %(SCRIPTDIR, USERNAME, JOB_ID)
script=open(path_1, "a")
script.write(getScriptHead())
num = startNum
coreIndex = 0
for seed in range(minSeed, maxSeed):
#important if cluster gives issue
random.shuffle(CLASSES)
for entry in CLASSES:
if num >= ENTRIES_PER_JOB:
if(CORES > 1):
script.write("wait \"${pids[@]}\" \n")
coreIndex = 0
script.close()
JOB_ID +=1
num = 1
path_2 = "%s/%s_EvoSuite_%d.sh" %(SCRIPTDIR, USERNAME, JOB_ID)
script=open(path_2, "a")
script.write(getScriptHead())
else:
num += 1
if(CORES>1 and coreIndex==0):
#first call
script.write("pids=() \n\n")
script.write(getEvoSuiteCall(seed, configId, config, entry[0], entry[1], JOB_ID, strategy, coreIndex))
if(CORES > 1):
coreIndex += 1
if(CORES > 1 and coreIndex == CORES):
script.write("\n\n wait \"${pids[@]}\" \n\n")
coreIndex = 0
if(CORES > 1):
script.write("wait \"${pids[@]}\" \n")
script.close()
#JOB_ID += 1
CONFIG_ID += 1
return num
TIMEOUT="10m"
# Fixed set of parameters to use in all jobs
FIXED = " -mem 2500 \
-Dhtml=false \
-Dplot=false \
-Dtest_comments=false \
-Dshow_progress=false \
-Denable_asserts_for_evosuite=true \
-Dsearch_budget=120 \
-Dinitialization_timeout=120 \
-Dglobal_timeout=120 \
-Dminimization_timeout=60 \
-Dassertion_timeout=60 \
-Dextra_timeout=60 \
-Djunit_check_timeout=60 \
-Doutput_variables=\"configuration_id,group_id,TARGET_CLASS,search_budget,Length,Size,LineCoverage,BranchCoverage,OutputCoverage,WeakMutationScore,Implicit_MethodExceptions\" \
"
# How many calls to EvoSuite should go in one script
N_CONF = 1 #(depends on number of configurations)
ENTRIES_PER_JOB= math.ceil( (N_CONF * (NUM_CLASSES * (MAXSEED - MINSEED)) / float(MAX_JOBS) ) )
# Create the actual jobs
createJobs(MINSEED, MAXSEED, CONFIG_NAME , " " , 0, "-generateSuite")
print "Seeds: %d, projects: %d, configs: %d" % ((MAXSEED - MINSEED), NUM_CLASSES, CONFIG_ID)
print "Total number of jobs created: %d" % (JOB_ID+1)
print "Total number of calls to EvoSuite: %d" % CALL_ID
print "Calls per job: %d" % ENTRIES_PER_JOB
|
goozbach/ansible
|
lib/ansible/inventory/expand_hosts.py
|
Python
|
gpl-3.0
| 4,357
| 0.00459
|
# (c) 2012, Zettar Inc.
# Written by Chin Fang <fangchin@zettar.com>
#
# This
|
file is part of Ansible
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# ME
|
RCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
#
'''
This module is for enhancing ansible's inventory parsing capability such
that it can deal with hostnames specified using a simple pattern in the
form of [beg:end], example: [1:5], [a:c], [D:G]. If beg is not specified,
it defaults to 0.
If beg is given and is left-zero-padded, e.g. '001', it is taken as a
formatting hint when the range is expanded. e.g. [001:010] is to be
expanded into 001, 002 ...009, 010.
Note that when beg is specified with left zero padding, then the length of
end must be the same as that of beg, else an exception is raised.
'''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import string
from ansible import errors
def detect_range(line = None):
'''
A helper function that checks a given host line to see if it contains
a range pattern described in the docstring above.
Returnes True if the given line contains a pattern, else False.
'''
if '[' in line:
return True
else:
return False
def expand_hostname_range(line = None):
'''
A helper function that expands a given line that contains a pattern
specified in top docstring, and returns a list that consists of the
expanded version.
The '[' and ']' characters are used to maintain the pseudo-code
appearance. They are replaced in this function with '|' to ease
string splitting.
References: http://ansible.github.com/patterns.html#hosts-and-groups
'''
all_hosts = []
if line:
# A hostname such as db[1:6]-node is considered to consists
# three parts:
# head: 'db'
# nrange: [1:6]; range() is a built-in. Can't use the name
# tail: '-node'
# Add support for multiple ranges in a host so:
# db[01:10:3]node-[01:10]
# - to do this we split off at the first [...] set, getting the list
# of hosts and then repeat until none left.
# - also add an optional third parameter which contains the step. (Default: 1)
# so range can be [01:10:2] -> 01 03 05 07 09
(head, nrange, tail) = line.replace('[','|',1).replace(']','|',1).split('|')
bounds = nrange.split(":")
if len(bounds) != 2 and len(bounds) != 3:
raise errors.AnsibleError("host range must be begin:end or begin:end:step")
beg = bounds[0]
end = bounds[1]
if len(bounds) == 2:
step = 1
else:
step = bounds[2]
if not beg:
beg = "0"
if not end:
raise errors.AnsibleError("host range must specify end value")
if beg[0] == '0' and len(beg) > 1:
rlen = len(beg) # range length formatting hint
if rlen != len(end):
raise errors.AnsibleError("host range must specify equal-length begin and end formats")
fill = lambda _: str(_).zfill(rlen) # range sequence
else:
fill = str
try:
i_beg = string.ascii_letters.index(beg)
i_end = string.ascii_letters.index(end)
if i_beg > i_end:
raise errors.AnsibleError("host range must have begin <= end")
seq = list(string.ascii_letters[i_beg:i_end+1:int(step)])
except ValueError: # not an alpha range
seq = range(int(beg), int(end)+1, int(step))
for rseq in seq:
hname = ''.join((head, fill(rseq), tail))
if detect_range(hname):
all_hosts.extend( expand_hostname_range( hname ) )
else:
all_hosts.append(hname)
return all_hosts
|
VitalPet/addons-onestein
|
hr_employee_display_own_info/tests/__init__.py
|
Python
|
agpl-3.0
| 192
| 0
|
# -*- coding:
|
utf-8 -*-
# Copyright 2017 Onestein (<http://www.onestein.eu>)
# Licen
|
se AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import test_employee_display_own_info
|
ooici/marine-integrations
|
mi/dataset/driver/ctdpf_ckl/mmp_cds/driver.py
|
Python
|
bsd-2-clause
| 2,429
| 0.003705
|
"""
@package mi.dataset.driver.ctdpf_ckl.mmp_cds.driver
@file marine-integrations/mi/dataset/driver/ctdpf_ckl/mmp_cds/driver.py
@author Mark Worden
@brief Driver for the CtdpfCklMmpCds
Release notes:
initial release
"""
__author__ = 'Mark Worden'
__license__ = 'Apache 2.0'
from mi.core.log import get_logger
log = get_logger()
from mi.dataset.dataset_driver import SimpleDataSetDriver, DataSetDriverConfigKeys
from mi.dataset.parser.ctdpf_ckl_mmp_cds import CtdpfCklMmpCdsParser, CtdpfCklMmpCdsParserDataParticle
from mi.dataset.harvester import SingleDirectoryHarvester
class CtdpfCklMmpCdsDataSetDriver(SimpleDataSetDriver):
def __init__(self, config, memento, data_callback, state_callback, event_callback, exception_callback):
super(CtdpfCklMmpCdsDataSetDriver, self).__init__(config,
memento,
data_callback,
state_callback,
event_callback,
|
exception_callback)
self._parser = None
@classmethod
def stream_config(cls):
return [CtdpfCklMmpCdsParserDataParticle.type()]
def _build_parser(self, parser_state, infile):
"""
Build and return the parser
"""
config = self._parser_config
config.update({
|
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.ctdpf_ckl_mmp_cds',
DataSetDriverConfigKeys.PARTICLE_CLASS: 'CtdpfCklMmpCdsParserDataParticle'
})
log.debug("My Config: %s", config)
self._parser = CtdpfCklMmpCdsParser(
config,
parser_state,
infile,
self._save_parser_state,
self._data_callback,
self._sample_exception_callback
)
return self._parser
def _build_harvester(self, driver_state):
"""
Build and return the harvester
"""
# *** Replace the following with harvester initialization ***
self._harvester = SingleDirectoryHarvester(
self._harvester_config,
driver_state,
self._new_file_callback,
self._modified_file_callback,
self._exception_callback
)
return self._harvester
|
SUSE/azure-sdk-for-python
|
azure-mgmt/tests/test_graphrbac.py
|
Python
|
mit
| 1,961
| 0.002552
|
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import unittest
import azure.graphrbac
from testutils.common_recordingtestcase import record
from tests.mgmt_testcase import HttpStatusCode, AzureMgmtTestCase
class GraphRbacTest(AzureMgmtTestCase):
def setUp(self):
super(GraphRbacTest, self).setUp()
self.graphrbac_client = self.create_basic_client(
azure.graphrbac.GraphRbacManagementClient,
tenant_id=self.settings.AD_DOMAIN
)
@record
def test_graphrbac_users(self):
user = self.graphrbac_client.users.create(
azure.graphrbac.models.UserCreateParameters(
user_principal_name="testbuddy@{}".format(self.settings.AD_DOMAIN),
account_enabled=False,
display_name='Test Buddy',
mail_nickname='testbuddy',
password_profile=azure.graphrbac.models.PasswordProfile(
password='MyStr0ngP4ssword',
force_change_password_next_login=True
)
)
)
self.assertEqual(user.display_name, 'Test Buddy'
|
)
user = self.graphrbac_client.users.get(user.object_id)
self.assertEqual(user.display_name, 'Test Buddy')
users = self.graphrbac_client.users.list(
filter="displayName eq 'Test Buddy'"
)
users = list(users)
self.assertEqual(len(users), 1)
self.assertE
|
qual(users[0].display_name, 'Test Buddy')
self.graphrbac_client.users.delete(user.object_id)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
|
sileht/deb-openstack-nova
|
nova/db/sqlalchemy/migrate_repo/versions/026_add_agent_table.py
|
Python
|
apache-2.0
| 3,336
| 0.003897
|
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Boolean, Column, DateTime, Integer
from sqlalchemy import MetaData, String, Table
from nova import log as logging
LOG = logging.getLogger(__name__)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine;
# bind migrate_engine to your metadata
meta = MetaData()
meta.bind = migrate_engine
#
# New Tables
#
builds = Table('agent_builds', meta,
Column('created_at', DateTime(timezone=False)),
Column('updated_at', DateTime(timezone=False)),
Column('deleted_at', DateTime(timezone=False)),
Column('deleted', Boolean(create_constraint=True, name=None)),
Column('id', Integer(), primary_key=True, nullable=False),
Column('hypervisor',
String(length=255, convert_unicode=False,
assert_unicode=None,
unicode_error=None, _warn_on_bytestring=False)),
Column('os',
String(length=255, convert_unicode=False,
assert_unicode=None,
unicode_error=None, _warn_on_bytestring=False)),
Column('architecture',
String(length=255, convert_unicode=False
|
,
assert_unicode=None,
unicode_error=None, _warn_on_bytestring=False)),
Column('version',
String(length=255, convert_unic
|
ode=False,
assert_unicode=None,
unicode_error=None, _warn_on_bytestring=False)),
Column('url',
String(length=255, convert_unicode=False,
assert_unicode=None,
unicode_error=None, _warn_on_bytestring=False)),
Column('md5hash',
String(length=255, convert_unicode=False,
assert_unicode=None,
unicode_error=None, _warn_on_bytestring=False)),
)
for table in (builds, ):
try:
table.create()
except Exception:
LOG.info(repr(table))
instances = Table('instances', meta, autoload=True)
#
# New Columns
#
architecture = Column('architecture', String(length=255))
# Add columns to existing tables
instances.create_column(architecture)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
builds = Table('agent_builds', meta, autoload=True)
for table in (builds, ):
table.drop()
instances = Table('instances', meta, autoload=True)
instances.drop_column('architecture')
|
alaudet/hcsr04sensor
|
hcsr04sensor/sensor.py
|
Python
|
mit
| 8,573
| 0.001166
|
"""Measure the distance or depth with an HCSR04 Ultrasonic sound
sensor and a Raspberry Pi. Imperial and Metric measurements are available"""
# Al Audet
# MIT License
from __future__ import division
import time
import math
import warnings
import RPi.GPIO as GPIO
class Measurement(object):
"""Create a measurement using a HC-SR04 Ultrasonic Sensor connected to
the GPIO pins of a Raspberry Pi.
Metric values are used by default. For imperial values use
unit='imperial'
temperature=<Desired temperature in Fahrenheit>
"""
def __init__(
self, trig_pin, echo_pin, temperature=20, unit="metric", gpio_mode=GPIO.BCM
):
self.trig_pin = trig_pin
self.echo_pin = echo_pin
self.temperature = temperature
self.unit = unit
self.gpio_mode = gpio_mode
self.pi = math.pi
def raw_distance(self, sample_size=11, sample_wait=0.1):
"""Return an error corrected unrounded distance, in cm, of an object
adjusted for temperature in Celcius. The distance calculated
is the median value of a sample of `sample_size` readings.
Speed of readings is a result of two variables. The sample_size
per reading and the sample_wait (interval between individual samples).
Example: To use a sample size of 5 instead of 11 will increase the
speed of your reading but could increase variance in readings;
value = sensor.Measurement(trig_pin, echo_pin)
r = value.raw_distance(sample_size=5)
Adjusting the interval between individual samples can also
increase the speed of the reading. Increasing the speed will also
increase CPU usage. Setting it too low will cause errors. A default
of sample_wait=0.1 is a good balance between speed and minimizing
CPU usage. It is also a safe setting that should not cause errors.
e.g.
r = value.raw_distance(sample_wait=0.03)
"""
if self.unit == "imperial":
self.temperature = (self.temperature - 32) * 0.5556
elif self.unit == "metric":
pass
else:
raise ValueError("Wrong Unit Type. Unit Must be imperial or metric")
speed_of_sound = 331.3 * math.sqrt(1 + (self.temperature / 273.15))
sample = []
# setup input/output pins
GPIO.setwarnings(False)
GPIO.setmode(self.gpio_mode)
GPIO.setup(self.trig_pin, GPIO.OUT)
GPIO.setup(self.echo_pin, GPIO.IN)
for distance_reading in range(sample_size):
GPIO.output(self.trig_pin, GPIO.LOW)
time.sleep(sample_wait)
GPIO.output(self.trig_pin, True)
time.sleep(0.00001)
GPIO.output(self.trig_pin, False)
echo_status_counter = 1
while GPIO.input(self.echo_pin) == 0:
if echo_status_counter < 1000:
sonar_signal_off = time.time()
echo_status_counter += 1
else:
raise SystemError("Echo pulse was not received")
while GPIO.input(self.echo_pin) == 1:
sonar_signal_on = time.time()
time_passed = sonar_signal_on - sonar_signal_off
distance_cm = time_passed * ((speed_of_sound * 100) / 2)
sample.append(distance_cm)
sorted_sample = sorted(sample)
# Only cleanup the pins used to prevent clobbering
# any others in use by the program
|
GPIO.cleanup((self.trig_pin, self.echo_pin))
return sorted_sample[sample_size // 2]
def depth(self, median_reading, hole_depth):
"""Calculate the depth of a liquid. hole_depth is the
distance from the sensor to the bottom of the hole."""
if self.unit == "metric":
return hole_depth
|
- median_reading
else:
return hole_depth - (median_reading * 0.394)
def distance(self, median_reading):
"""Calculate the distance from the sensor to an object."""
if self.unit == "imperial":
return median_reading * 0.394
else:
# don't need this method if using metric. Use raw_distance
# instead. But it will return median_reading anyway if used.
return median_reading
def cylinder_volume_side(self, depth, length, radius):
"""Calculate the liquid volume of a cylinder on its side"""
if depth > (radius * 2) or depth < 0:
raise ValueError(
"Depth must be less than diameter (radius * 2) and not less than 0"
)
volume = length * (
(radius * radius * math.acos((radius - depth) / radius))
- (radius - depth) * math.sqrt((2 * depth * radius) - (depth * depth))
)
if self.unit == "metric":
return volume / 1000
else:
return volume / 231
def cylinder_volume_standing(self, depth, radius):
"""Calculate the liquid volume of a standing cylinder"""
volume = self.pi * radius * radius * depth
if self.unit == "metric":
return volume / 1000
else:
return volume / 231
def elliptical_cylinder_volume(self, depth, semi_maj_axis, semi_min_axis):
"""Calculate the liquid volume of a standing elliptical cylinder"""
volume = self.pi * semi_maj_axis * semi_min_axis * depth
if self.unit == "metric":
return volume / 1000
else:
return volume / 231
def elliptical_side_cylinder_volume(self, depth, height, width, length):
"""Calculate the liquid volume of an elliptical cylinder on its side"""
s_maj_a = width / 2 # semi major axis
s_min_a = height / 2 # semi minor axis
if depth > height or depth < 0:
raise ValueError("Depth must be less than the height and not less than 0")
volume = (
length
* (s_maj_a / s_min_a)
* (
(self.pi * (s_min_a ** 2)) / 2
+ (depth - s_min_a)
* math.sqrt((s_min_a ** 2) - ((depth - s_min_a) ** 2))
+ (s_min_a ** 2) * math.asin(depth / s_min_a - 1)
)
)
if self.unit == "metric":
return volume / 1000
else:
return volume / 231
def cuboid_volume(self, depth, width, length):
"""Calculate amount of liquid in a cuboid
(square or rectangle shaped container)"""
volume = width * length * depth
if self.unit == "metric":
return volume / 1000
else:
return volume / 231
@staticmethod
def basic_distance(trig_pin, echo_pin, celsius=20):
"""Return an unformatted distance in cm's as read directly from
RPi.GPIO."""
speed_of_sound = 331.3 * math.sqrt(1 + (celsius / 273.15))
GPIO.setup(trig_pin, GPIO.OUT)
GPIO.setup(echo_pin, GPIO.IN)
GPIO.output(trig_pin, GPIO.LOW)
time.sleep(0.1)
GPIO.output(trig_pin, True)
time.sleep(0.00001)
GPIO.output(trig_pin, False)
echo_status_counter = 1
while GPIO.input(echo_pin) == 0:
if echo_status_counter < 1000:
sonar_signal_off = time.time()
echo_status_counter += 1
else:
raise SystemError("Echo pulse was not received")
while GPIO.input(echo_pin) == 1:
sonar_signal_on = time.time()
time_passed = sonar_signal_on - sonar_signal_off
return time_passed * ((speed_of_sound * 100) / 2)
def depth_metric(self, median_reading, hole_depth):
"""This method is deprecated, use depth method instead."""
warnings.warn("use depth method instead", DeprecationWarning)
return hole_depth - median_reading
def depth_imperial(self, median_reading, hole_depth):
"""This method is deprecated, use depth method instead."""
warnings.warn("use depth method instead", DeprecationWarning)
return hole_depth - (median_reading * 0.394)
def distance_metric(self, median_reading):
"""
|
nmatsui/wasted_energy_of_tv_iot
|
power_ir_send_server.py
|
Python
|
mit
| 939
| 0.001065
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
import SocketServer
from lib import const
class PowerIRSendHandler(SocketServer.StreamR
|
equestHandler):
def handle(self):
# print "connected from:", self.client_address
sent_data = ""
while True:
data = self.request.recv(1024)
if len(data) == 0:
break
sent_data += data
if sent_data == const.IR_SEND_MSG:
subprocess.call(const.IR_SEND_CMD, shell=True)
self.request.close()
if __name__ == "__main__":
address = (const.IR_SEND_HOST, const.IR_SEND_PORT)
try:
|
server = SocketServer.ThreadingTCPServer(address, PowerIRSendHandler)
print "power_ir_send_server listening", server.socket.getsockname()
server.serve_forever()
except KeyboardInterrupt as err:
server.socket.close()
print "power_ir_send_server stop"
|
enthought/etsproxy
|
enthought/plugins/refresh_code/refresh_code_plugin.py
|
Python
|
bsd-3-clause
| 118
| 0
|
# pr
|
oxy module
from __future__ import absolute_import
from envisage.plugins.refresh_code.refresh_
|
code_plugin import *
|
tamland/python-actors
|
actors/internal/messages.py
|
Python
|
apache-2.0
| 995
| 0
|
# -*- coding: utf-8 -*-
#
# Copyright 2015 Thomas Amland
#
# Licensed under the Apache Lice
|
nse, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
|
See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
Start = object()
Restart = object()
Resume = object()
Terminate = object()
Restart = "restart"
Terminate = "terminate"
Failure = namedtuple('Failure', ['ref', 'exception', 'traceback'])
Supervise = namedtuple('Supervise', ['ref'])
Terminated = namedtuple('Terminated', ['ref'])
DeadLetter = namedtuple('DeadLetter', ['message', 'sender', 'recipient'])
|
arju88nair/projectCulminate
|
venv/lib/python3.5/site-packages/aiohttp/log.py
|
Python
|
apache-2.0
| 326
| 0
|
import logging
access_logger = logging.getLogger('aiohttp.access')
client_logger = logging.getLog
|
ger('aiohttp.client')
internal_logger = logging.getLogger('aiohttp.internal')
server_logger = logging.getLogger('aiohttp.server')
web_logger = logging.getLogger('aioht
|
tp.web')
ws_logger = logging.getLogger('aiohttp.websocket')
|
hoho/dosido
|
nodejs/tools/gyp/gyptest.py
|
Python
|
mit
| 8,032
| 0.015065
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
__doc__ = """
gyptest.py -- test runner for GYP tests.
"""
import os
import optparse
import shlex
import subprocess
import sys
class CommandRunner(object):
"""
Executor class for commands, including "commands" implemented by
Python functions.
"""
verbose = True
active = True
def __init__(self, dictionary={}):
self.subst_dictionary(dictionary)
def subst_dictionary(self, dictionary):
self._subst_dictionary = dictionary
def subst(self, string, dictionary=None):
"""
Substitutes (via the format operator) the values in the specified
dictionary into the specified command.
The command can be an (action, string) tuple. In all cases, we
perform substitution on strings and don't worry if something isn't
a string. (It's probably a Python function to be executed.)
"""
if dictionary is None:
dictionary = self._subst_dictionary
if dictionary:
try:
string = string % dictionary
except TypeError:
pass
return string
def display(self, command, stdout=None, stderr=None):
if not self.verbose:
return
if type(command) == type(()):
func = command[0]
args = command[1:]
s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
if type(command) == type([]):
# TODO: quote arguments containing spaces
# TODO: handle meta characters?
s = ' '.join(command)
else:
s = self.subst(command)
if not s.endswith('\n'):
s += '\n'
sys.stdout.write(s)
sys.stdout.flush()
def execute(self, command, stdout=None, stderr=None):
"""
Executes a single command.
"""
if not self.active:
return 0
if type(command) == type(''):
command = self.subst(command)
cmdargs = shlex.split(command)
if cmdargs[0] == 'cd':
command = (os.chdir,) + tuple(cmdargs[1:])
if type(command) == type(()):
func = command[0]
args = command[1:]
return func(*args)
else:
if stdout is sys.stdout:
# Same as passing sys.stdout, except python2.4 doesn't fail on it.
subout = None
else:
# Open pipe for anything else so Popen works on python2.4.
subout
|
= subprocess.PIPE
if stderr is sys.stderr:
# Same as passing sys.stderr, except python2.4 doesn't fail on it.
suberr = None
elif stderr is None:
# Merge with stdout if stderr isn't specified.
suberr = subprocess.STDOUT
else:
# Open pipe for anything else so Popen works on python2.4.
suberr = subprocess.PIPE
p = subprocess.Popen(command,
shell=(sys.platform ==
|
'win32'),
stdout=subout,
stderr=suberr)
p.wait()
if stdout is None:
self.stdout = p.stdout.read()
elif stdout is not sys.stdout:
stdout.write(p.stdout.read())
if stderr not in (None, sys.stderr):
stderr.write(p.stderr.read())
return p.returncode
def run(self, command, display=None, stdout=None, stderr=None):
"""
Runs a single command, displaying it first.
"""
if display is None:
display = command
self.display(display)
return self.execute(command, stdout, stderr)
class Unbuffered(object):
def __init__(self, fp):
self.fp = fp
def write(self, arg):
self.fp.write(arg)
self.fp.flush()
def __getattr__(self, attr):
return getattr(self.fp, attr)
sys.stdout = Unbuffered(sys.stdout)
sys.stderr = Unbuffered(sys.stderr)
def is_test_name(f):
return f.startswith('gyptest') and f.endswith('.py')
def find_all_gyptest_files(directory):
result = []
for root, dirs, files in os.walk(directory):
if '.svn' in dirs:
dirs.remove('.svn')
result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ])
result.sort()
return result
def main(argv=None):
if argv is None:
argv = sys.argv
usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
parser = optparse.OptionParser(usage=usage)
parser.add_option("-a", "--all", action="store_true",
help="run all tests")
parser.add_option("-C", "--chdir", action="store", default=None,
help="chdir to the specified directory")
parser.add_option("-f", "--format", action="store", default='',
help="run tests with the specified formats")
parser.add_option("-G", '--gyp_option', action="append", default=[],
help="Add -G options to the gyp command line")
parser.add_option("-l", "--list", action="store_true",
help="list available tests and exit")
parser.add_option("-n", "--no-exec", action="store_true",
help="no execute, just print the command line")
parser.add_option("--passed", action="store_true",
help="report passed tests")
parser.add_option("--path", action="append", default=[],
help="additional $PATH directory")
parser.add_option("-q", "--quiet", action="store_true",
help="quiet, don't print test command lines")
opts, args = parser.parse_args(argv[1:])
if opts.chdir:
os.chdir(opts.chdir)
if opts.path:
extra_path = [os.path.abspath(p) for p in opts.path]
extra_path = os.pathsep.join(extra_path)
os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
if not args:
if not opts.all:
sys.stderr.write('Specify -a to get all tests.\n')
return 1
args = ['test']
tests = []
for arg in args:
if os.path.isdir(arg):
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
else:
if not is_test_name(os.path.basename(arg)):
print >>sys.stderr, arg, 'is not a valid gyp test name.'
sys.exit(1)
tests.append(arg)
if opts.list:
for test in tests:
print test
sys.exit(0)
CommandRunner.verbose = not opts.quiet
CommandRunner.active = not opts.no_exec
cr = CommandRunner()
os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
if not opts.quiet:
sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
passed = []
failed = []
no_result = []
if opts.format:
format_list = opts.format.split(',')
else:
# TODO: not duplicate this mapping from pylib/gyp/__init__.py
format_list = {
'aix5': ['make'],
'freebsd7': ['make'],
'freebsd8': ['make'],
'openbsd5': ['make'],
'cygwin': ['msvs'],
'win32': ['msvs', 'ninja'],
'linux2': ['make', 'ninja'],
'linux3': ['make', 'ninja'],
'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
}[sys.platform]
for format in format_list:
os.environ['TESTGYP_FORMAT'] = format
if not opts.quiet:
sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
gyp_options = []
for option in opts.gyp_option:
gyp_options += ['-G', option]
if gyp_options and not opts.quiet:
sys.stdout.write('Extra Gyp options: %s\n' % gyp_options)
for test in tests:
status = cr.run([sys.executable, test] + gyp_options,
stdout=sys.stdout,
stderr=sys.stderr)
if status == 2:
no_result.append(test)
elif status:
failed.append(test)
else:
passed.append(test)
if not opts.quiet:
def report(description, tests):
if tests:
if len(tests) == 1:
sys.stdout.write("\n%s the following test:\n" % description)
else:
fmt = "\n%s the following %d tests:\n"
sys.stdout.write(fmt % (description, len(tests)))
sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
if opts.passed:
report("Passed", passed)
report("Failed", failed)
report("No result from", no_result)
if failed:
return 1
else:
return 0
if __name__ == "__main__":
sys.exit(main())
|
chriszs/redash
|
redash/models/types.py
|
Python
|
bsd-2-clause
| 3,048
| 0.000656
|
import pytz
from sqlalchemy.types import TypeDecorator
from sqlalchemy.ext.indexable import index_property
from sqlalchemy.ext.mutable import Mutable
from sqlalchemy_utils import EncryptedType
from redash.utils import json_dumps, json_loads
from redash.utils.configuration import ConfigurationContai
|
ner
from .base import db
class Configuration(TypeDecorator):
impl = db.Text
def process_bind_param(self, value, dialect):
return value.to_json()
def process_result_value(self, value, dialect):
retu
|
rn ConfigurationContainer.from_json(value)
class EncryptedConfiguration(EncryptedType):
def process_bind_param(self, value, dialect):
return super(EncryptedConfiguration, self).process_bind_param(value.to_json(), dialect)
def process_result_value(self, value, dialect):
return ConfigurationContainer.from_json(super(EncryptedConfiguration, self).process_result_value(value, dialect))
# XXX replace PseudoJSON and MutableDict with real JSON field
class PseudoJSON(TypeDecorator):
impl = db.Text
def process_bind_param(self, value, dialect):
if value is None:
return value
return json_dumps(value)
def process_result_value(self, value, dialect):
if not value:
return value
return json_loads(value)
class MutableDict(Mutable, dict):
@classmethod
def coerce(cls, key, value):
"Convert plain dictionaries to MutableDict."
if not isinstance(value, MutableDict):
if isinstance(value, dict):
return MutableDict(value)
# this call will raise ValueError
return Mutable.coerce(key, value)
else:
return value
def __setitem__(self, key, value):
"Detect dictionary set events and emit change events."
dict.__setitem__(self, key, value)
self.changed()
def __delitem__(self, key):
"Detect dictionary del events and emit change events."
dict.__delitem__(self, key)
self.changed()
class MutableList(Mutable, list):
def append(self, value):
list.append(self, value)
self.changed()
def remove(self, value):
list.remove(self, value)
self.changed()
@classmethod
def coerce(cls, key, value):
if not isinstance(value, MutableList):
if isinstance(value, list):
return MutableList(value)
return Mutable.coerce(key, value)
else:
return value
class json_cast_property(index_property):
"""
A SQLAlchemy index property that is able to cast the
entity attribute as the specified cast type. Useful
for JSON and JSONB colums for easier querying/filtering.
"""
def __init__(self, cast_type, *args, **kwargs):
super(json_cast_property, self).__init__(*args, **kwargs)
self.cast_type = cast_type
def expr(self, model):
expr = super(json_cast_property, self).expr(model)
return expr.astext.cast(self.cast_type)
|
lsp84ch83/PyText
|
UItestframework/run.py
|
Python
|
gpl-3.0
| 730
| 0.011204
|
#coding=utf-8
import unittest
import HTMLTestRunner
import time
from config import globalparam
from public.common import sendmail
def run():
test_dir = './testcase'
|
suite = unittest.defaultTestLoader.discover(start_dir=test_dir,pattern='test*.py')
now = time.strftime('%Y-%m-%d_%H_%M_%S')
reportname = globalparam.report_path + '\\' + 'TestResult' + now + '.html'
with open(reportname,'wb') as f:
|
runner = HTMLTestRunner.HTMLTestRunner(
stream=f,
title='测试报告',
description='Test the import testcase'
)
runner.run(suite)
time.sleep(3)
# 发送邮件
mail = sendmail.SendMail()
mail.send()
if __name__=='__main__':
run()
|
dougbenjamin/panda-harvester
|
pandaharvester/harvestermessenger/shared_file_messenger.py
|
Python
|
apache-2.0
| 35,562
| 0.002362
|
import json
import os
import shutil
import datetime
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
import subprocess32 as subprocess
except ImportError:
import subprocess
try:
from os import scandir, walk
except ImportError:
from scandir import scandir, walk
import re
import uuid
import os.path
import fnmatch
import distutils.spawn
import multiprocessing
from future.utils import iteritems
from past.builtins import long
from concurrent.futures import ThreadPoolExecutor as Pool
from pandaharvester.harvestercore import core_utils
from pandaharvester.harvestercore.work_spec import WorkSpec
from .base_messenger import BaseMessenger
from pandaharvester.harvesterconfig import harvester_config
# json for worker attributes
jsonAttrsFileName = harvester_config.payload_interaction.workerAttributesFile
# json for job report
jsonJobReport = harvester_config.payload_interaction.jobReportFile
# json for outputs
jsonOutputsFileName = harvester_config.payload_interaction.eventStatusDumpJsonFile
# xml for outputs
xmlOutputsBaseFileName = harvester_config.payload_interaction.eve
|
ntStatusDumpXmlFile
# json for job request
jsonJobRequestFileName = harvester_config.payload_interaction.jobRequestFile
# json for job spec
jobSpecFileName = harvester_config.payload_interaction.jobSpecFile
# json for event request
jsonEventsRequestFileName = harvester_config.payload_interaction.eventRequestFile
# json to feed events
jsonE
|
ventsFeedFileName = harvester_config.payload_interaction.eventRangesFile
# json to update events
jsonEventsUpdateFileName = harvester_config.payload_interaction.updateEventsFile
# PFC for input files
xmlPoolCatalogFileName = harvester_config.payload_interaction.xmlPoolCatalogFile
# json to get PandaIDs
pandaIDsFile = harvester_config.payload_interaction.pandaIDsFile
# json to kill worker itself
try:
killWorkerFile = harvester_config.payload_interaction.killWorkerFile
except Exception:
killWorkerFile = 'kill_worker.json'
# json for heartbeats from the worker
try:
heartbeatFile = harvester_config.payload_interaction.heartbeatFile
except Exception:
heartbeatFile = 'worker_heartbeat.json'
# suffix to read json
suffixReadJson = '.read'
# logger
_logger = core_utils.setup_logger('shared_file_messenger')
def set_logger(master_logger):
global _logger
_logger = master_logger
# filter for log.tgz
def filter_log_tgz(extra=None):
patt = ['*.log', '*.txt', '*.xml', '*.json', 'log*']
if extra is not None:
patt += extra
return '-o '.join(['-name "{0}" '.format(i) for i in patt])
# tar a single directory
def tar_directory(dir_name, tar_name=None, max_depth=None, extra_files=None):
if tar_name is None:
tarFilePath = os.path.join(os.path.dirname(dir_name), '{0}.subdir.tar.gz'.format(os.path.basename(dir_name)))
else:
tarFilePath = tar_name
com = 'cd {0}; '.format(dir_name)
com += 'find . '
if max_depth is not None:
com += '-maxdepth {0} '.format(max_depth)
com += r'-type f \( ' + filter_log_tgz(extra_files) + r'\) -print0 '
com += '| '
com += 'tar '
if distutils.spawn.find_executable('pigz') is None:
com += '-z '
else:
com += '-I pigz '
com += '-c -f {0} --null -T -'.format(tarFilePath)
p = subprocess.Popen(com,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdOut, stdErr = p.communicate()
retCode = p.returncode
return com, retCode, stdOut, stdErr
# scan files in a directory
def scan_files_in_dir(dir_name, patterns=None):
fileList = []
for root, dirs, filenames in walk(dir_name):
for filename in filenames:
# check filename
if patterns is not None:
matched = False
for pattern in patterns:
if re.search(pattern, filename) is not None:
matched = True
break
if not matched:
continue
# make dict
tmpFileDict = dict()
pfn = os.path.join(root, filename)
lfn = os.path.basename(pfn)
tmpFileDict['path'] = pfn
tmpFileDict['fsize'] = os.stat(pfn).st_size
tmpFileDict['type'] = 'es_output'
tmpFileDict['guid'] = str(uuid.uuid4())
tmpFileDict['chksum'] = core_utils.calc_adler32(pfn)
tmpFileDict['eventRangeID'] = lfn.split('.')[-1]
tmpFileDict['eventStatus'] = "finished"
fileList.append(tmpFileDict)
return fileList
# messenger with shared file system
class SharedFileMessenger(BaseMessenger):
# constructor
def __init__(self, **kwarg):
self.jobSpecFileFormat = 'json'
self.stripJobParams = False
self.scanInPostProcess = False
self.leftOverPatterns = None
BaseMessenger.__init__(self, **kwarg)
# get access point
def get_access_point(self, workspec, panda_id):
if workspec.mapType == WorkSpec.MT_MultiJobs:
accessPoint = os.path.join(workspec.get_access_point(), str(panda_id))
else:
accessPoint = workspec.get_access_point()
return accessPoint
# get attributes of a worker which should be propagated to job(s).
# * the worker needs to put a json under the access point
def get_work_attributes(self, workspec):
# get logger
tmpLog = core_utils.make_logger(_logger, 'workerID={0}'.format(workspec.workerID),
method_name='get_work_attributes')
allRetDict = dict()
numofreads = 0
sw_readreports = core_utils.get_stopwatch()
for pandaID in workspec.pandaid_list:
# look for the json just under the access point
accessPoint = self.get_access_point(workspec, pandaID)
jsonFilePath = os.path.join(accessPoint, jsonAttrsFileName)
tmpLog.debug('looking for attributes file {0}'.format(jsonFilePath))
retDict = dict()
if not os.path.exists(jsonFilePath):
# not found
tmpLog.debug('not found attributes file')
else:
try:
with open(jsonFilePath) as jsonFile:
retDict = json.load(jsonFile)
except Exception:
tmpLog.debug('failed to load {0}'.format(jsonFilePath))
# look for job report
jsonFilePath = os.path.join(accessPoint, jsonJobReport)
tmpLog.debug('looking for job report file {0}'.format(jsonFilePath))
sw_checkjobrep = core_utils.get_stopwatch()
if not os.path.exists(jsonFilePath):
# not found
tmpLog.debug('not found job report file')
else:
try:
sw_readrep = core_utils.get_stopwatch()
with open(jsonFilePath) as jsonFile:
tmpDict = json.load(jsonFile)
retDict['metaData'] = tmpDict
tmpLog.debug('got {0} kB of job report. {1} sec.'.format(os.stat(jsonFilePath).st_size / 1024,
sw_readrep.get_elapsed_time()))
numofreads += 1
except Exception:
tmpLog.debug('failed to load {0}'.format(jsonFilePath))
tmpLog.debug("Check file and read file time: {0} sec.".format(sw_checkjobrep.get_elapsed_time()))
allRetDict[pandaID] = retDict
tmpLog.debug("Reading {0} job report files {1}".format(numofreads, sw_readreports.get_elapsed_time()))
return allRetDict
# get files to stage-out.
# * the worker needs to put a json under the access point
def get_files_to_stage_out(self, workspec):
# get logger
tmpLog = core_utils.make_logger(_logger, 'workerID={0}'.format(workspec.workerID),
method_name='get_files_to_stage_out')
|
pivonroll/Qt_Creator
|
scripts/dependencyinfo.py
|
Python
|
gpl-3.0
| 8,279
| 0.002416
|
#! /usr/bin/env python2
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
import glob
import logging
import os
import re
import subprocess
import sys
import platform
class Library:
def __init__(self, path):
self.path = path
self.name = ''
self.exportedSymbols = {}
self.name = re.sub('^(.*/)?lib', '', path)
self.name = re.sub('\.so.*$', '', self.name)
self._runNM(self.path)
def isLibrary(self):
return True
def isPlugin(self):
return False
def debugDump(self):
log.debug('Library "%s" exports %d symbols.', self.name, len(self.exportedSymbols))
def _runNM(self, path):
try:
output = subprocess.check_output(['/usr/bin/nm', '--demangle', path], stderr=subprocess.STDOUT).splitlines()
except:
output = []
for line in output:
self._parseNMline(line)
def _parseNMline(self, line):
m = re.search('^[0-9a-fA-F]{8,16} [TD] (.*)$', line)
if m:
self.exportedSymbols[m.group(1)] = 1
class Plugin(Library):
def __init__(self, spec):
self.pluginSpec = spec
self.specDependencies = {}
self.symbolDependencies = {}
self.name = ''
self.importedSymbols = []
self.path = self._parsePluginSpec(spec)
Library.__init__(self, self.path)
self.importedSymbols.sort()
def isLibrary(self):
return False
def isPlugin(self):
return True
def debugDump(self):
log.debug('Plugin "%s" imports %d symbols and exports %d symbols.', self.name, len(self.importedSymbols),
len(self.exportedSymbols))
for i in self.specDependencies:
log.debug(' Spec declares dependency on "%s"', i)
for i in self.symbolDependencies:
tmp = 'plugin'
if i.isLibrary():
tmp = 'lib'
log.debug(' Symbol dependency on %s "%s" (%d)', tmp, i.nam
|
e, self.symbolDependencies[i])
def _parsePluginSpec(self, spec):
dirname = os.path.dirname(spec)
with open(spec) as f:
content = f.readlines()
for line in content:
|
m = re.search('(plugin|dependency)\s+name="([^"]+)"(?:.*\stype="([^"]+)")?', line)
if not(m):
continue
if m.group(1) == 'plugin':
if self.name != '':
log.critical('Plugin name already set to "%s"!', self.name)
else:
self.name = m.group(2)
else:
kind = m.group(3)
if not(kind):
kind = 'strong'
self.specDependencies[m.group(2)] = kind
if self.name == '':
log.critical('Plugin name not set for spec "%s".', spec)
return os.path.join(dirname, "lib%s.so" % self.name)
def _parseNMline(self, line):
m = re.search('^\s+ U (.*)$', line)
if m:
self.importedSymbols.append(m.group(1))
else:
Library._parseNMline(self, line)
def addSymbolDependency(self, dep, symbol):
if dep in self.symbolDependencies:
self.symbolDependencies[dep]['total'] += 1
else:
self.symbolDependencies[dep] = {}
self.symbolDependencies[dep]['total'] = 1
self.symbolDependencies[dep][symbol] = 1
class SymbolResolver:
def __init__(self, plugins, libraries):
self.libraries = libraries
self.libraries.extend(plugins)
for i in plugins:
self._resolve(i)
def _resolve(self, plugin):
print 'Resolving symbols for {}...'.format(plugin.name)
for symbol in plugin.importedSymbols:
lib = self._resolveSymbol(symbol)
if lib:
plugin.addSymbolDependency(lib, symbol)
def _resolveSymbol(self, symbol):
for i in self.libraries:
if symbol in i.exportedSymbols:
return i
return None
class Reporter:
def __init__(self, plugins):
for i in plugins:
self._reportPluginSpecIssues(i)
def _reportPluginSpecIssues(self, plugin):
print 'Plugin "{}" imports {} symbols and exports {} symbols.'.format(
plugin.name, len(plugin.importedSymbols), len(plugin.exportedSymbols))
spec = plugin.specDependencies
symb = {}
lib = {}
for p in plugin.symbolDependencies:
if p.isPlugin():
symb[p.name] = plugin.symbolDependencies[p]
else:
lib[p.name] = plugin.symbolDependencies[p]
for i in spec:
if i in symb:
total = symb[i]['total']
print ' {}: OK ({} usages)'.format(i, total)
self._printSome(symb[i])
del symb[i]
else:
if spec[i] == 'optional':
print ' {}: OK (optional)'.format(i)
else:
print ' {}: WARNING: unused'.format(i)
for i in symb:
total = symb[i]['total']
print ' {}: ERROR: undeclared ({} usages)'.format(i, total)
self._printSome(symb[i])
for i in lib:
total = lib[i]['total']
print ' LIBRARY {} used ({} usages)'.format(i, total)
def _printSome(self, data):
keys = data.keys()
if len(keys) <= 11:
for i in keys:
if i != 'total':
print ' {}'.format(i)
class BinaryDirExaminer:
def __init__(self, path):
self.libraries = []
self.plugins = []
self.binaryDir = path
log.debug('Examining directory "%s".', path)
self._findLibraries(path)
self._findPlugins(path)
def _findLibraries(self, path):
libdir = glob.glob(os.path.join(path, "lib", "qtcreator", "lib*"))
for l in libdir:
if os.path.islink(l):
continue
log.debug(' Looking at library "%s".', l)
self.libraries.append(Library(l))
def _findPlugins(self, path):
pluginspecs = glob.glob(os.path.join(path, "lib", "qtcreator", "plugins", "*.pluginspec"))
for spec in pluginspecs:
log.debug(' Looking at plugin "%s".', spec)
self.plugins.append(Plugin(spec))
if __name__ == '__main__':
# Setup logging:
log = logging.getLogger('log')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
# Make sure we are on linux:
if platform.system() != 'Linux':
log.critical("This check can only run on Linux")
sys.exit(1)
# Sanity check:
if not(os.path.exists(os.path.join(os.getcwd(), "bin", "qtcreator"))):
log.critical('Not a top level Qt Creator build directory.')
sys.exit(1)
binExaminer = BinaryDirExaminer(os.path.abspath(os.getcwd()))
# Find symbol dependencies:
resolver = Symbo
|
johnparker007/mame
|
3rdparty/bgfx/3rdparty/spirv-cross/test_shaders.py
|
Python
|
gpl-2.0
| 29,937
| 0.008852
|
#!/usr/bin/env python3
import sys
import os
import os.path
import subprocess
import tempfile
import re
import itertools
import hashlib
import shutil
import argparse
import codecs
import json
import multiprocessing
import errno
from functools import partial
class Paths():
def __init__(self, spirv_cross, glslang, spirv_as, spirv_val, spirv_opt):
self.spirv_cross = spirv_cross
self.glslang = glslang
self.spirv_as = spirv_as
self.spirv_val = spirv_val
self.spirv_opt = spirv_opt
def remove_file(path):
#print('Removing file:', path)
os.remove(path)
def create_temporary(suff = '')
|
:
f, path = tempfile.mkstemp(suffix = suff)
os.close(f)
#print('Creating temporary:', path)
return path
def parse_stats(stats):
m = re.search('([0-9]+) work registers', stats)
registers = int(m.group(1)) if m else 0
m = re.search('([0-9]+) uniform registers', stats)
uniform_regs = int(m.group(1)) if m else 0
m_list = re.findall('(-?[0-9]+)\s+(-?[0-9]+)\s+(-?[0-9]+)', stats)
alu
|
_short = float(m_list[1][0]) if m_list else 0
ls_short = float(m_list[1][1]) if m_list else 0
tex_short = float(m_list[1][2]) if m_list else 0
alu_long = float(m_list[2][0]) if m_list else 0
ls_long = float(m_list[2][1]) if m_list else 0
tex_long = float(m_list[2][2]) if m_list else 0
return (registers, uniform_regs, alu_short, ls_short, tex_short, alu_long, ls_long, tex_long)
def get_shader_type(shader):
_, ext = os.path.splitext(shader)
if ext == '.vert':
return '--vertex'
elif ext == '.frag':
return '--fragment'
elif ext == '.comp':
return '--compute'
elif ext == '.tesc':
return '--tessellation_control'
elif ext == '.tese':
return '--tessellation_evaluation'
elif ext == '.geom':
return '--geometry'
else:
return ''
def get_shader_stats(shader):
path = create_temporary()
p = subprocess.Popen(['malisc', get_shader_type(shader), '--core', 'Mali-T760', '-V', shader], stdout = subprocess.PIPE, stderr = subprocess.PIPE)
stdout, stderr = p.communicate()
remove_file(path)
if p.returncode != 0:
print(stderr.decode('utf-8'))
raise OSError('malisc failed')
p.wait()
returned = stdout.decode('utf-8')
return parse_stats(returned)
def print_msl_compiler_version():
try:
subprocess.check_call(['xcrun', '--sdk', 'iphoneos', 'metal', '--version'])
print('...are the Metal compiler characteristics.\n') # display after so xcrun FNF is silent
except OSError as e:
if (e.errno != errno.ENOENT): # Ignore xcrun not found error
raise
except subprocess.CalledProcessError:
pass
def msl_compiler_supports_22():
try:
subprocess.check_call(['xcrun', '--sdk', 'macosx', 'metal', '-x', 'metal', '-std=macos-metal2.2', '-'],
stdin = subprocess.DEVNULL, stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL)
print('Current SDK supports MSL 2.2. Enabling validation for MSL 2.2 shaders.')
return True
except OSError as e:
print('Failed to check if MSL 2.2 is not supported. It probably is not.')
return False
except subprocess.CalledProcessError:
print('Current SDK does NOT support MSL 2.2. Disabling validation for MSL 2.2 shaders.')
return False
def path_to_msl_standard(shader):
if '.ios.' in shader:
if '.msl2.' in shader:
return '-std=ios-metal2.0'
elif '.msl21.' in shader:
return '-std=ios-metal2.1'
elif '.msl22.' in shader:
return '-std=ios-metal2.2'
elif '.msl11.' in shader:
return '-std=ios-metal1.1'
elif '.msl10.' in shader:
return '-std=ios-metal1.0'
else:
return '-std=ios-metal1.2'
else:
if '.msl2.' in shader:
return '-std=macos-metal2.0'
elif '.msl21.' in shader:
return '-std=macos-metal2.1'
elif '.msl22.' in shader:
return '-std=macos-metal2.2'
elif '.msl11.' in shader:
return '-std=macos-metal1.1'
else:
return '-std=macos-metal1.2'
def path_to_msl_standard_cli(shader):
if '.msl2.' in shader:
return '20000'
elif '.msl21.' in shader:
return '20100'
elif '.msl22.' in shader:
return '20200'
elif '.msl11.' in shader:
return '10100'
else:
return '10200'
def validate_shader_msl(shader, opt):
msl_path = reference_path(shader[0], shader[1], opt)
try:
if '.ios.' in msl_path:
msl_os = 'iphoneos'
else:
msl_os = 'macosx'
subprocess.check_call(['xcrun', '--sdk', msl_os, 'metal', '-x', 'metal', path_to_msl_standard(msl_path), '-Werror', '-Wno-unused-variable', msl_path])
print('Compiled Metal shader: ' + msl_path) # display after so xcrun FNF is silent
except OSError as oe:
if (oe.errno != errno.ENOENT): # Ignore xcrun not found error
raise
except subprocess.CalledProcessError:
print('Error compiling Metal shader: ' + msl_path)
raise RuntimeError('Failed to compile Metal shader')
def cross_compile_msl(shader, spirv, opt, iterations, paths):
spirv_path = create_temporary()
msl_path = create_temporary(os.path.basename(shader))
spirv_cmd = [paths.spirv_as, '--target-env', 'vulkan1.1', '-o', spirv_path, shader]
if '.preserve.' in shader:
spirv_cmd.append('--preserve-numeric-ids')
if spirv:
subprocess.check_call(spirv_cmd)
else:
subprocess.check_call([paths.glslang, '--amb' ,'--target-env', 'vulkan1.1', '-V', '-o', spirv_path, shader])
if opt:
if '.graphics-robust-access.' in shader:
subprocess.check_call([paths.spirv_opt, '--skip-validation', '-O', '--graphics-robust-access', '-o', spirv_path, spirv_path])
else:
subprocess.check_call([paths.spirv_opt, '--skip-validation', '-O', '-o', spirv_path, spirv_path])
spirv_cross_path = paths.spirv_cross
msl_args = [spirv_cross_path, '--entry', 'main', '--output', msl_path, spirv_path, '--msl', '--iterations', str(iterations)]
msl_args.append('--msl-version')
msl_args.append(path_to_msl_standard_cli(shader))
if '.swizzle.' in shader:
msl_args.append('--msl-swizzle-texture-samples')
if '.ios.' in shader:
msl_args.append('--msl-ios')
if '.pad-fragment.' in shader:
msl_args.append('--msl-pad-fragment-output')
if '.capture.' in shader:
msl_args.append('--msl-capture-output')
if '.domain.' in shader:
msl_args.append('--msl-domain-lower-left')
if '.argument.' in shader:
msl_args.append('--msl-argument-buffers')
if '.texture-buffer-native.' in shader:
msl_args.append('--msl-texture-buffer-native')
if '.framebuffer-fetch.' in shader:
msl_args.append('--msl-framebuffer-fetch')
if '.invariant-float-math.' in shader:
msl_args.append('--msl-invariant-float-math')
if '.emulate-cube-array.' in shader:
msl_args.append('--msl-emulate-cube-array')
if '.discrete.' in shader:
# Arbitrary for testing purposes.
msl_args.append('--msl-discrete-descriptor-set')
msl_args.append('2')
msl_args.append('--msl-discrete-descriptor-set')
msl_args.append('3')
if '.line.' in shader:
msl_args.append('--emit-line-directives')
if '.multiview.' in shader:
msl_args.append('--msl-multiview')
if '.viewfromdev.' in shader:
msl_args.append('--msl-view-index-from-device-index')
if '.dispatchbase.' in shader:
msl_args.append('--msl-dispatch-base')
if '.dynamic-buffer.' in shader:
# Arbitrary for testing purposes.
msl_args.append('--msl-dynamic-buffer')
msl_args.append('0')
msl_args.append('0')
msl_args.append('--msl-dynamic-buffer')
msl_args.append('1')
msl_args.append('2')
if '.device-argument-buffer.' in shader:
msl_args.append('--msl-device-
|
mpetyx/pyrif
|
3rdPartyLibraries/FuXi-master/test/OWLsuite.py
|
Python
|
mit
| 20,462
| 0.002883
|
import unittest
import os
import time
# import itertools
from pprint import (
pprint,
pformat
)
from FuXi.DLP import non_DHL_OWL_Semantics # , MapDLPtoNetwork
from FuXi.DLP.ConditionalAxioms import AdditionalRules
from FuXi.Horn.HornRules import HornFromN3
from FuXi.Horn.PositiveConditions import BuildUnitermFromTuple # , Uniterm
# from FuXi.Rete.AlphaNode import SUBJECT, PREDICATE, OBJECT, VARIABLE
# from FuXi.Rete.BetaNode import LEFT_MEMORY, RIGHT_MEMORY
from FuXi.Rete.RuleStore import SetupRuleStore
from FuXi.Rete import *
from FuXi.Rete.Magic import *
from FuXi.Rete.SidewaysInformationPassing import *
from FuXi.Rete.Util import generateTokenSet # , renderNetwork
from FuXi.SPARQL.BackwardChainingStore import TopDownSPARQLEntailingStore
from FuXi.Syntax.InfixOWL import *
# from FuXi.Rete.TopDown import (
# PrepareSipCollection,
# SipStrategy,
# RDFTuplesToSPARQL
# )
# from FuXi.Rete.Proof import ProofBuilder, PML, GMP_NS
from rdflib import Namespace, RDF, RDFS, URIRef, plugin
from rdflib.graph import Graph # , ReadOnlyGraphAggregate, ConjunctiveGraph
from rdfextras.sparql.parser import parse
# from rdflib.namespace import NamespaceManager
from rdflib.store import Store
from cStringIO import StringIO
from glob import glob
import logging
import warnings
warnings.filterwarnings('ignore', '.*', UserWarning)
warnings.filterwarnings('ignore', '.*', RuntimeWarning)
log = logging.getLogger(__name__)
RDFLIB_CONNECTION = ''
RDFLIB_STORE = 'IOMemory'
CWM_NS = Namespace("http://cwmTest/")
DC_NS = Namespace("http://purl.org/dc/elements/1.1/")
STRING_NS = Namespace("http://www.w3.org/2000/10/swap/string#")
MATH_NS = Namespace("http://www.w3.org/2000/10/swap/math#")
FOAF_NS = Namespace("http://xmlns.com/foaf/0.1/")
OWL_NS = Namespace("http://www.w3.org/2002/07/owl#")
TEST_NS = Namespace("http://metacognition.info/FuXi/DL-SHIOF-test.n3#")
LOG = Namespace("http://www.w3.org/2000/10/swap/log#")
RDF_TEST = Namespace('http://www.w3.org/2000/10/rdf-tests/rdfcore/testSchema#')
OWL_TEST = Namespace('http://www.w3.org/2002/03owlt/testOntology#')
LIST = Namespace('http://www.w3.org/2000/10/swap/list#')
passcount = failcount = 0
queryNsMapping = {
'test': 'http://metacognition.info/FuXi/test#',
'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'foaf': 'http://xmlns.com/foaf/0.1/',
'dc': 'http://purl.org/dc/elements/1.1/',
'rss': 'http://purl.org/rss/1.0/',
'rdfs': 'http://www.w3.org/2000/01/rdf-schema#',
'rdf': 'http://www.w3.org/1999/02/22-rdf-s
|
yntax-ns#',
'owl': OWL_NS,
'rdfs': RDFS,
}
nsMap = {
u'rdfs': RDFS,
u'rdf': RDF,
u'rete': RETE_NS,
u'owl': OWL_NS,
u'': TEST_NS,
u'otest': OWL_TEST,
u'rtest': RDF_TEST,
u'foaf': URIRef("http://xmlns.com/foaf/0.1/"),
u'math': URIRef("http://www.w3.org/2000/10/swap/math#"),
}
MANIFEST_QUERY = \
"""
SELECT ?status ?premise ?conclusion ?feature ?descr
WHERE {
[
a otest:PositiveEn
|
tailmentTest;
otest:feature ?feature;
rtest:description ?descr;
rtest:status ?status;
rtest:premiseDocument ?premise;
rtest:conclusionDocument ?conclusion
]
}"""
PARSED_MANIFEST_QUERY = parse(MANIFEST_QUERY)
Features2Skip = [
URIRef('http://www.w3.org/2002/07/owl#sameClassAs'),
]
NonNaiveSkip = [
'OWL/oneOf/Manifest002.rdf', # see Issue 25
'OWL/unionOf/Manifest002.rdf', # support for disjunctive horn logic
]
MagicTest2Skip = [
'OWL/oneOf/Manifest002.rdf', # needs 2nd order predicate derivation
'OWL/oneOf/Manifest003.rdf', # needs 2nd order predicate derivation
'OWL/disjointWith/Manifest001.rdf' # needs 2nd order predicate derivation
]
BFPTests2SKip = [
'OWL/FunctionalProperty/Manifest002.rdf', # Haven't reconciled *all* 2nd order predicate queries
'OWL/InverseFunctionalProperty/Manifest002.rdf', # " " " "
# 'OWL/oneOf/Manifest002.rdf', # " " " "
'OWL/oneOf/Manifest003.rdf', # " " " "
]
TopDownTests2Skip = [
'OWL/FunctionalProperty/Manifest002.rdf', # requires second order predicate derivation
'OWL/FunctionalProperty/Manifest004.rdf',
'OWL/InverseFunctionalProperty/Manifest002.rdf',
'OWL/InverseFunctionalProperty/Manifest004.rdf',
'OWL/oneOf/Manifest003.rdf', # Requires quantification over predicate symbol (2nd order)
# 'OWL/AllDifferent/Manifest001.rdf', # Not sure why
'OWL/distinctMembers/Manifest001.rdf' # Not sure why
]
Tests2Skip = [
'OWL/InverseFunctionalProperty/Manifest001.rdf', # owl:sameIndividualAs deprecated
'OWL/FunctionalProperty/Manifest001.rdf', # owl:sameIndividualAs deprecated
'OWL/Nothing/Manifest002.rdf', # owl:sameClassAs deprecated
]
patterns2Skip = [
'OWL/cardinality',
'OWL/samePropertyAs'
]
def tripleToTriplePattern(graph, triple):
return "%s %s %s" % tuple(
[renderTerm(graph, term)
for term in triple])
def renderTerm(graph, term):
if term == RDF.type:
return ' a '
else:
try:
return isinstance(term, BNode) and term.n3() or graph.qname(term)
except:
return term.n3()
class OwlTestSuite(unittest.TestCase):
def setUp(self):
rule_store, rule_graph, self.network = SetupRuleStore(makeNetwork=True)
self.network.nsMap = nsBinds
def tearDown(self):
pass
def calculateEntailments(self, factGraph):
start = time.time()
self.network.feedFactsToAdd(generateTokenSet(factGraph))
sTime = time.time() - start
if sTime > 1:
sTimeStr = "%s seconds" % sTime
else:
sTime = sTime * 1000
sTimeStr = "%s ms" % sTime
log.debug("Time to calculate closure on working memory: ", sTimeStr)
log.debug(self.network)
tNodeOrder = [tNode
for tNode in self.network.terminalNodes
if self.network.instantiations.get(tNode, 0)]
tNodeOrder.sort(key=lambda x: self.network.instantiations[x], reverse=True)
for termNode in tNodeOrder:
log.debug(termNode)
log.debug("\t", termNode.rules)
log.debug("\t\t%s instantiations" % self.network.instantiations[termNode])
# for c in AllClasses(factGraph):
# print CastClass(c,factGraph)
log.debug("==============")
self.network.inferredFacts.namespace_manager = factGraph.namespace_manager
return sTimeStr
def MagicOWLProof(self, goals, rules, factGraph, conclusionFile):
progLen = len(rules)
magicRuleNo = 0
dPreds = []
for rule in AdditionalRules(factGraph):
rules.append(rule)
if not GROUND_QUERY and REASONING_STRATEGY != 'gms':
goalDict = dict([((Variable('SUBJECT'), goalP, goalO), goalS)
for goalS, goalP, goalO in goals])
goals = goalDict.keys()
assert goals
if REASONING_STRATEGY == 'gms':
for rule in MagicSetTransformation(factGraph,
rules,
goals,
dPreds):
magicRuleNo += 1
self.network.buildNetworkFromClause(rule)
self.network.rules.add(rule)
if DEBUG:
log.debug("\t", rule)
log.debug("rate of reduction in the size of the program: ",
(100 - (float(magicRuleNo) / float(progLen)) * 100))
if REASONING_STRATEGY in ['bfp', 'sld']: # and not GROUND_QUERY:
reasoningAlg = TOP_DOWN_METHOD if REASONING_STRATEGY == 'sld' \
else BFP_METHOD
topDownStore = TopDownSPARQLEntailingStore(
factGraph.store,
factGraph,
idb=rules,
DEBUG=DEBUG,
nsBindings=nsMap,
decisionProcedure=reasoningAlg,
identi
|
xuleiboy1234/autoTitle
|
tensorflow/tensorflow/contrib/boosted_trees/lib/learner/batch/ordinal_split_handler_test.py
|
Python
|
mit
| 44,613
| 0.005828
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You ma
|
y obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing p
|
ermissions and
# limitations under the License.
# ==============================================================================
"""Test for checking stats accumulator related ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.boosted_trees.lib.learner.batch import ordinal_split_handler
from tensorflow.contrib.boosted_trees.proto import learner_pb2
from tensorflow.contrib.boosted_trees.proto import split_info_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import resources
from tensorflow.python.platform import googletest
def get_empty_tensors(gradient_shape, hessian_shape):
empty_hess_shape = [1] + hessian_shape.as_list()
empty_grad_shape = [1] + gradient_shape.as_list()
empty_gradients = constant_op.constant(
[], dtype=dtypes.float32, shape=empty_grad_shape)
empty_hessians = constant_op.constant(
[], dtype=dtypes.float32, shape=empty_hess_shape)
return empty_gradients, empty_hessians
class DenseSplitHandlerTest(test_util.TensorFlowTestCase):
def testGenerateFeatureSplitCandidates(self):
with self.test_session() as sess:
# The data looks like the following:
# Example | Gradients | Partition | Dense Quantile |
# i0 | (0.2, 0.12) | 0 | 1 |
# i1 | (-0.5, 0.07) | 0 | 1 |
# i2 | (1.2, 0.2) | 0 | 0 |
# i3 | (4.0, 0.13) | 1 | 1 |
dense_column = array_ops.constant([0.52, 0.52, 0.3, 0.52])
gradients = array_ops.constant([0.2, -0.5, 1.2, 4.0])
hessians = array_ops.constant([0.12, 0.07, 0.2, 0.13])
partition_ids = array_ops.constant([0, 0, 0, 1], dtype=dtypes.int32)
class_id = -1
gradient_shape = tensor_shape.scalar()
hessian_shape = tensor_shape.scalar()
split_handler = ordinal_split_handler.DenseSplitHandler(
l1_regularization=0.1,
l2_regularization=1,
tree_complexity_regularization=0,
min_node_weight=0,
epsilon=0.001,
num_quantiles=10,
feature_column_group_id=0,
dense_float_column=dense_column,
init_stamp_token=0,
gradient_shape=gradient_shape,
hessian_shape=hessian_shape,
multiclass_strategy=learner_pb2.LearnerConfig.TREE_PER_CLASS)
resources.initialize_resources(resources.shared_resources()).run()
empty_gradients, empty_hessians = get_empty_tensors(
gradient_shape, hessian_shape)
example_weights = array_ops.ones([4, 1], dtypes.float32)
update_1 = split_handler.update_stats_sync(
0,
partition_ids,
gradients,
hessians,
empty_gradients,
empty_hessians,
example_weights,
is_active=array_ops.constant([True, True]))
with ops.control_dependencies([update_1]):
are_splits_ready = split_handler.make_splits(0, 1, class_id)[0]
with ops.control_dependencies([are_splits_ready]):
update_2 = split_handler.update_stats_sync(
1,
partition_ids,
gradients,
hessians,
empty_gradients,
empty_hessians,
example_weights,
is_active=array_ops.constant([True, True]))
with ops.control_dependencies([update_2]):
are_splits_ready2, partitions, gains, splits = (
split_handler.make_splits(1, 2, class_id))
are_splits_ready, are_splits_ready2, partitions, gains, splits = (
sess.run([
are_splits_ready, are_splits_ready2, partitions, gains, splits
]))
# During the first iteration, inequality split handlers are not going to
# have any splits. Make sure that we return not_ready in that case.
self.assertFalse(are_splits_ready)
self.assertTrue(are_splits_ready2)
self.assertAllEqual([0, 1], partitions)
# Check the split on partition 0.
# -(1.2 - 0.1) / (0.2 + 1)
expected_left_weight = -0.91666
# expected_left_weight * -(1.2 - 0.1)
expected_left_gain = 1.0083333333333331
# (-0.5 + 0.2 + 0.1) / (0.19 + 1)
expected_right_weight = 0.1680672
# expected_right_weight * -(-0.5 + 0.2 + 0.1))
expected_right_gain = 0.033613445378151252
# (0.2 + -0.5 + 1.2 - 0.1) ** 2 / (0.12 + 0.07 + 0.2 + 1)
expected_bias_gain = 0.46043165467625885
split_info = split_info_pb2.SplitInfo()
split_info.ParseFromString(splits[0])
left_child = split_info.left_child.vector
right_child = split_info.right_child.vector
split_node = split_info.split_node.dense_float_binary_split
self.assertAllClose(
expected_left_gain + expected_right_gain - expected_bias_gain, gains[0],
0.00001)
self.assertAllClose([expected_left_weight], left_child.value, 0.00001)
self.assertAllClose([expected_right_weight], right_child.value, 0.00001)
self.assertEqual(0, split_node.feature_column)
self.assertAllClose(0.3, split_node.threshold, 0.00001)
# Check the split on partition 1.
# (-4 + 0.1) / (0.13 + 1)
expected_left_weight = -3.4513274336283186
# (-4 + 0.1) ** 2 / (0.13 + 1)
expected_left_gain = 13.460176991150442
expected_right_weight = 0
expected_right_gain = 0
# (-4 + 0.1) ** 2 / (0.13 + 1)
expected_bias_gain = 13.460176991150442
# Verify candidate for partition 1, there's only one active bucket here
# so zero gain is expected.
split_info = split_info_pb2.SplitInfo()
split_info.ParseFromString(splits[1])
left_child = split_info.left_child.vector
right_child = split_info.right_child.vector
split_node = split_info.split_node.dense_float_binary_split
self.assertAllClose(0.0, gains[1], 0.00001)
self.assertAllClose([expected_left_weight], left_child.value, 0.00001)
self.assertAllClose([expected_right_weight], right_child.value, 0.00001)
self.assertEqual(0, split_node.feature_column)
self.assertAllClose(0.52, split_node.threshold, 0.00001)
def testGenerateFeatureSplitCandidatesMulticlassFullHessian(self):
with self.test_session() as sess:
dense_column = array_ops.constant([0.52, 0.52, 0.3, 0.52])
# Batch size is 4, 2 gradients per each instance.
gradients = array_ops.constant(
[[0.2, 0.1], [-0.5, 0.2], [1.2, 3.4], [4.0, -3.5]], shape=[4, 2])
# 2x2 matrix for each instance
hessian_0 = [[0.12, 0.02], [0.3, 0.11]]
hessian_1 = [[0.07, -0.2], [-0.5, 0.2]]
hessian_2 = [[0.2, -0.23], [-0.8, 0.9]]
hessian_3 = [[0.13, -0.3], [-1.5, 2.2]]
hessians = array_ops.constant(
[hessian_0, hessian_1, hessian_2, hessian_3])
partition_ids = array_ops.constant([0, 0, 0, 1], dtype=dtypes.int32)
class_id = -1
gradient_shape = tensor_shape.TensorShape([2])
hessian_shape = tensor_shape.TensorShape([2, 2])
split_handler = ordinal_split_handler.DenseSplitHandler(
l1_regularization=0,
l2_regularization=1,
tree_complexity_regularization=0,
min_node_weight=0,
epsilon=0.001,
num_quantiles=3,
feature_column_group_id=0,
dense_float_column=dens
|
molobrakos/home-assistant
|
homeassistant/components/cloud/const.py
|
Python
|
apache-2.0
| 1,006
| 0
|
"""Constants for the cloud component."""
DOMAIN = 'cloud'
REQUEST_TIMEOUT = 10
PREF_ENABLE_ALEXA = 'alexa_enabled'
PREF_ENABLE_GOOGLE = 'google_enabled'
PREF_ENABLE_REMOTE = 'remote_enabled'
PREF_GOOGLE_ALLOW_UNLOCK = 'google_allow_unlock'
PREF_CLOUDHOOKS = 'cloudhooks'
PREF_CLOUD_USER = 'cloud_user'
CONF_ALEXA = 'alexa'
CONF_ALIASES = 'aliases'
CONF_COGNITO_CLIENT_ID = 'cognito_client_id'
CONF_ENTITY_CONFIG = 'entity_config'
CONF_FILTER = 'filter'
CONF_GOOGLE_ACTIONS = 'google_actions'
CONF_RELAYER = 'relayer'
CONF_USER_POOL_ID = 'user_pool_id'
CONF_GOOGLE_ACTIONS_SYNC_URL = 'google_actions_sync_url'
CONF_SUBSCRIP
|
TION_INFO_URL = 'subscription_info_url'
CONF_CLOUDHOOK_CREATE_URL = 'cloudhook_create_url'
CONF_REMOTE_API_URL = 'remote_api_url'
CONF_ACME_DIRECTORY_SERVER = 'acme_directory_server'
MODE_DEV = "development"
MODE_PROD = "production"
DISPATCHER_REMOTE_UPDATE = 'cloud_remote_update'
cla
|
ss InvalidTrustedNetworks(Exception):
"""Raised when invalid trusted networks config."""
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractTheevilduketranslationsBlogspotCom.py
|
Python
|
bsd-3-clause
| 584
| 0.032534
|
def extractTheevilduketranslationsBlogspotCom(i
|
tem):
'''
Parser for 'theevilduketranslations.blogspot.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, na
|
me, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
terabyte/Jira-Commit-Acceptance-Plugin
|
src/main/resources/client/python/cvs/jira-client.py
|
Python
|
bsd-3-clause
| 1,566
| 0.01341
|
#!/usr/bin/python
# JIRA commit acceptance python client for CVS
# Author: istvan.vamosi@midori.hu
# $Id$
import sys
import urlparse
import xmlrpclib
# configure JIRA access
# ("projectKey" can contain multiple comma-separated JIRA project keys like "projectKey = 'TST,ARP'".
# If you specify multiple keys, the commit will be accepted if at least one project listed accepts it.
# Or you can specify "projectKey = '*'" to force using the global commit acceptance settings if you don't
# want to specify any exact project key.)
jiraBaseURL = '<JIRA base URL>'
jiraLogin = '<JIRA user name>'
jiraPassword = '<JIRA password>'
projectKey = '<JIRA project key>'
# get c
|
ommitter passed as arg[1]
committer = sys.argv[1]
# slurp
|
log message from log message file passed as arg[2]
try:
f = open(sys.argv[2])
commitMessage = f.read()
f.close()
commitMessage = commitMessage.rstrip('\n\r')
except:
print 'Unable to open ' + sys.argv[2] + '.'
sys.exit(1)
# print arguments
print 'Committer: ' + committer
print 'Commit message: "' + commitMessage + '"'
# invoke JIRA web service
xmlrpcUrl = jiraBaseURL + '/rpc/xmlrpc'
try:
s = xmlrpclib.ServerProxy(xmlrpcUrl)
acceptance, comment = s.commitacc.acceptCommit(jiraLogin, jiraPassword, committer, projectKey, commitMessage).split('|');
except:
acceptance, comment = ['false', 'Unable to connect to the JIRA server at "' + jiraBaseURL + '".']
if acceptance == 'true':
print 'Commit accepted.'
sys.exit(0)
else:
print 'Commit rejected: ' + comment
sys.exit(1)
|
rousseab/pymatgen
|
pymatgen/analysis/molecule_matcher.py
|
Python
|
mit
| 24,855
| 0.000483
|
# coding: utf-8
from __future__ import unicode_literals
"""
This module provides classes to perform fitting of molecule with arbitrary
atom orders.
This module is supposed to perform exact comparisons without the atom order
correspondence prerequisite, while molecule_structure_comparator is supposed
to do rough comparisons with the atom order correspondence prerequisite.
"""
__author__ = "Xiaohui Qu"
__copyright__ = "Copyright 2011, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Xiaohui Qu"
__email__ = "xhqu1981@gmail.com"
__status__ = "Experimental"
__date__ = "Jun 7, 2013"
import re
import math
import abc
import itertools
import copy
from pymatgen.serializers.json_coders import PMGSONable
from monty.dev import requires
from pymatgen.io.babel import BabelMolAdaptor
import six
from six.moves import zip
try:
import openbabel as ob
except ImportError:
ob = None
class AbstractMolAtomMapper(six.with_metaclass(abc.ABCMeta, PMGSONable)):
"""
Abstract molecular atom order mapping class. A mapping will be able to
find the uniform atom order of two molecules that can pair the
geometrically equivalent atoms.
"""
@abc.abstractmethod
def uniform_labels(self, mol1, mol2):
"""
Pair the geometrically equivalent atoms of the molecules.
Args:
mol1: First molecule. OpenBabel OBMol or pymatgen Molecule object.
mol2: Second molecule. OpenBabel OBMol or pymatgen Molecule object.
Returns:
(list1, list2) if uniform atom order is found. list1 and list2
are for mol1 and mol2, respectively. Their length equal
to the number of atoms. They represents the uniform atom order
of the two molecules. The value of each element is the original
atom index in mol1 or mol2 of the current atom in uniform atom
order.
(None, None) if unform atom is not available.
"""
pass
@abc.abstractmethod
def get_molecule_hash(self, mol):
"""
Defines a hash for molecules. This allows molecules to be grouped
efficiently for comparison.
Args:
mol: The molecule. OpenBabel OBMol or pymatgen Molecule object
Returns:
A hashable object. Examples can be string formulas, etc.
"""
pass
@classmethod
def from_dict(cls, d):
for trans_modules in ['molecule_matcher']:
mod = __import__('pymatgen.analysis.' + trans_modules,
globals(), locals(), [d['@class']], -1)
if hasattr(mod, d['@class']):
class_proxy = getattr(mod, d['@class'])
from_dict_proxy = getattr(class_proxy, "from_dict")
return from_dict_proxy(d)
raise ValueError("Invalid Comparator dict")
class IsomorphismMolAtomMapper(AbstractMolAtomMapper):
"""
Pair atoms by isomorphism permutations in the OpenBabel::OBAlign class
"""
def uniform_labels(self, mol1, mol2):
"""
Pair the geometrically equivalent atoms of the molecules.
Calculate RMSD on all possible isomorphism mappings and return mapping
with the least RMSD
Args:
mol1: First molecule. OpenBabel OBMol or pymatgen Molecule object.
mol2: Second molecule. OpenBabel OBMol or pymatgen Molecule object.
Returns:
(list1, list2) if uniform atom order is found. list1 and list2
are for mol1 and mol2, respectively. Their length equal
to the number of atoms. They represents the uniform atom order
of the two molecules. The value of each element is the original
atom index in mol1 or mol2 of the current atom in uniform atom
order.
(None, None) if unform atom is not available.
"""
obmol1 = BabelMolAdaptor(mol1).openbabel_mol
obmol2 = BabelMolAdaptor(mol2).openbabel_mol
h1 = self.get_molecule_hash(obmol1)
h2 = self.get_molecule_hash(obmol2)
if h1 != h2:
return None, None
query = ob.CompileMoleculeQuery(obmol1)
isomapper = ob.OBIsomorphismMapper.GetInstance(query)
isomorph = ob.vvpairUIntUInt()
isomapper.MapAll(obmol2, isomorph)
sorted_isomorph = [sorted(x, key=lambda morp: morp[0])
for x in isomorph]
label2_list = tuple([tuple([p[1] + 1 for p in x])
for x in sorted_isomorph])
vmol1 = obmol1
aligner = ob.OBAlign(True, False)
aligner.SetRefMol(vmol1)
least_rmsd = float("Inf")
best_label2 = None
label1 = list(range(1, obmol1.NumAtoms() + 1))
# noinspection PyProtectedMember
elements1 = InchiMolAtomMapper._get_elements(vmol1, label1)
for label2 in label2_list:
# noinspection PyProtectedMember
elements2 = InchiMolAtomMapper._get_elements(obmol2, label2)
if elements1 != elements2:
continue
vmol2 = ob.OBMol()
for i in label2:
vmol2.AddAtom(obmol2.GetAtom(i))
aligner.SetTargetMol(vmol2)
aligner.Align()
rmsd = aligner.GetRMSD()
if rmsd < least_rmsd:
least_rmsd = rmsd
best_label2 = copy.copy(label2)
return label1, best_label2
def get_molecule_hash(self, mol):
"""
Return inchi as molecular hash
"""
obconv = ob.OBConversion()
obconv.SetOutFormat(str("inchi"))
obconv.AddOption(str("X"), ob.OBConversion.OUTOPTIONS, str("DoNotAddH"))
inchi_text = obconv.WriteString(mol)
match = re.search("InChI=(?P<inchi>.+)\n", inchi_text)
return match.group("inchi")
def as_dict(self):
return {"version": __version__, "@module": self.__class__.__module__,
"@class": self.__class__.__name__}
@classmethod
def from_dict(cls, d):
return IsomorphismMolAtomMapper()
class InchiMolAtomMapper(AbstractMolAtomMapper):
"""
Pair atoms by inchi labels.
Args:
angle_tolerance: Angle threshold to assume linear molecule. In degrees.
"""
def __init__(self, angle_tolerance=10.0):
self._angle_tolerance = angle_tolerance
self._assistant_mapper = IsomorphismMolAtomMapper()
def as_dict(self):
return {"version": __version__, "@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"angle_tolerance": self._angle_tolerance}
@classmethod
def from_dict(cls, d):
return InchiMolAtomMapper(angle_tolerance=d["angle_tolerance"])
@staticmethod
def _inchi_labels(mol):
"""
Get the inchi canonical labels of the heavy atoms in the molecule
Args:
mol: The molecule. OpenBabel OBMol object
Returns:
The label mappings. List of tuple of canonical label,
original label
List of equivalent atoms.
"""
obconv =
|
ob.OBConversion()
obconv.SetOutFormat(str("inchi"))
obconv.AddOption(str("a"), ob.OBConversion.OUTOPTIONS)
obconv.AddOption(str("X"), ob.OBConversion.OUTOPTIONS, str("DoNotAddH"))
inchi_text = obconv.WriteString(mol)
match = re.search("InChI=(?P<inchi>.+)\nAuxInfo=.+"
|
"/N:(?P<labels>[0-9,;]+)/(E:(?P<eq_atoms>[0-9,"
";\(\)]*)/)?", inchi_text)
inchi = match.group("inchi")
label_text = match.group("labels")
eq_atom_text = match.group("eq_atoms")
heavy_atom_labels = tuple([int(i) for i in label_text.replace(
';', ',').split(',')])
eq_atoms = []
if eq_atom_text is not None:
eq_tokens = re.findall('\(((?:[0-9]+,)+[0-9]+)\)', eq_atom_text
.replace(';', ','))
eq_atoms = tuple([tuple([int(i) for i in t.split(',')])
for t in eq_tokens])
return heavy_atom_labels, eq_atoms, inchi
|
pjryan126/solid-start-careers
|
store/api/zillow/venv/lib/python2.7/site-packages/pandas/tests/test_base.py
|
Python
|
gpl-2.0
| 38,533
| 0.000052
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import re
import sys
from datetime import datetime, timedelta
import numpy as np
import pandas as pd
import pandas.compat as compat
import pandas.core.common as com
import pandas.util.testing as tm
from pandas import (Series, Index, DatetimeIndex, TimedeltaIndex, PeriodIndex,
Timedelta)
from pandas.compat import u, StringIO
from pandas.core.base import (FrozenList, FrozenNDArray, PandasDelegate,
NoNewAttributesMixin)
from pandas.tseries.base import DatetimeIndexOpsMixin
from pandas.util.testing import (assertRaisesRegexp, assertIsInstance)
class CheckStringMixin(object):
def test_string_methods_dont_fail(self):
repr(self.container)
str(self.container)
bytes(self.container)
if not compat.PY3:
unicode(self.container) # noqa
def test_tricky_container(self):
if not hasattr(self, 'unicode_container'):
raise nose.SkipTest('Need unicode_container to test with this')
repr(self.unicode_container)
str(self.unicode_container)
bytes(self.unicode_container)
if not compat.PY3:
unicode(self.unicode_container) # noqa
class CheckImmutable(object):
mutable_regex = re.compile('does not support mutable operations')
def check_mutable_error(self, *args, **kwargs):
# pass whatever functions you normally would to assertRaises (after the
# Exception kind)
assertRaisesRegexp(TypeError, self.mutable_regex, *args, **kwargs)
def test_no_mutable_funcs(self):
def setitem():
self.container[0] = 5
self.check_mutable_error(setitem)
def setslice():
self.container[1:2] = 3
self.check_mutable_error(setslice)
def delitem():
del self.container[0]
self.check_mutable_error(delitem)
def delslice():
del self.container[0:3]
self.check_mutable_error(delslice)
mutable_methods = getattr(self, "mutable_methods", [])
for meth in mutable_methods:
self.check_mutable_error(getattr(self.container, meth))
def test_slicing_maintains_type(self):
result = self.container[1:2]
expected = self.lst[1:2]
self.check_result(result, expected)
def check_result(self, result, expected, klass=None):
klass = klass or self.klass
assertIsInstance(result, klass)
self.assertEqual(result, expected)
class TestFrozenList(CheckImmutable, CheckStringMixin, tm.TestCase):
mutable_methods = ('extend', 'pop', 'remove', 'insert')
unicode_container = FrozenList([u("\u05d0"), u("\u05d1"), "c"])
def setUp(self):
self.lst = [1, 2, 3, 4, 5]
self.container = FrozenList(self.lst)
self.klass = FrozenList
def test_add(self):
result = self.container + (1, 2, 3)
expected = FrozenList(self.lst + [1, 2, 3])
self.check_result(result, expected)
result = (1, 2, 3) + self.container
expected = FrozenList([1, 2, 3] + self.lst)
self.check_result(result, expected)
def test_inplace(self):
q = r = self.container
q += [5]
self.check_result(q, self.lst + [5])
# other shouldn't be mutated
self.check_result(r, self.lst)
class TestFrozenNDArray(CheckImmutable, CheckStringMixin, tm.TestCase):
mutable_methods = ('put', 'itemset', 'fill')
unicode_container = FrozenNDArray([u("\u05d0"), u("\u05d1"), "c"])
def setUp(self):
self.lst = [3, 5, 7, -2]
self.container = FrozenNDArray(self.lst)
self.klass = FrozenNDArray
def test_shallow_copying(self):
original = self.container.copy()
assertIsInstance(self.container.view(), FrozenNDArray)
self.assertFalse(isinstance(
self.container.view(np.ndarray), FrozenNDArray))
self.assertIsNot(self.container.view(), self.container)
self.assert_numpy_array_equal(self.container, original)
# shallow copy should be the same too
assertIsInstance(self.container._shallow_copy(), FrozenNDArray)
# setting should not be allowed
def testit(container):
container[0] = 16
self.check_mutable_error(testit, self.container)
def test_values(self):
original = self.container.view(np.ndarray).copy()
n = original[0] + 15
vals = self.container.values()
self.assert_numpy_array_equal(original, vals)
self.assertIsNot(original, vals)
vals[0] = n
self.assert_numpy_array_equal(self.container, original)
self.assertEqual(vals[0], n)
class TestPandasDelegate(tm.TestCase):
def setUp(self):
pass
def test_invalida_delgation(self):
# these show that in order for the delegation to work
# the _delegate_* methods need to be overriden to not raise a TypeError
class Delegator(object):
_properties = ['foo']
_methods = ['bar']
def _set_foo(self, value):
self.foo = value
def _get_foo(self):
return self.foo
foo = prope
|
rty(_get_foo, _set_foo, doc="foo property")
def bar(self, *args, **kwargs):
""" a test bar method """
|
pass
class Delegate(PandasDelegate):
def __init__(self, obj):
self.obj = obj
Delegate._add_delegate_accessors(delegate=Delegator,
accessors=Delegator._properties,
typ='property')
Delegate._add_delegate_accessors(delegate=Delegator,
accessors=Delegator._methods,
typ='method')
delegate = Delegate(Delegator())
def f():
delegate.foo
self.assertRaises(TypeError, f)
def f():
delegate.foo = 5
self.assertRaises(TypeError, f)
def f():
delegate.foo()
self.assertRaises(TypeError, f)
class Ops(tm.TestCase):
def _allow_na_ops(self, obj):
"""Whether to skip test cases including NaN"""
if (isinstance(obj, Index) and
(obj.is_boolean() or not obj._can_hold_na)):
# don't test boolean / int64 index
return False
return True
def setUp(self):
self.bool_index = tm.makeBoolIndex(10, name='a')
self.int_index = tm.makeIntIndex(10, name='a')
self.float_index = tm.makeFloatIndex(10, name='a')
self.dt_index = tm.makeDateIndex(10, name='a')
self.dt_tz_index = tm.makeDateIndex(10, name='a').tz_localize(
tz='US/Eastern')
self.period_index = tm.makePeriodIndex(10, name='a')
self.string_index = tm.makeStringIndex(10, name='a')
self.unicode_index = tm.makeUnicodeIndex(10, name='a')
arr = np.random.randn(10)
self.int_series = Series(arr, index=self.int_index, name='a')
self.float_series = Series(arr, index=self.float_index, name='a')
self.dt_series = Series(arr, index=self.dt_index, name='a')
self.dt_tz_series = self.dt_tz_index.to_series(keep_tz=True)
self.period_series = Series(arr, index=self.period_index, name='a')
self.string_series = Series(arr, index=self.string_index, name='a')
types = ['bool', 'int', 'float', 'dt', 'dt_tz', 'period', 'string',
'unicode']
fmts = ["{0}_{1}".format(t, f)
for t in types for f in ['index', 'series']]
self.objs = [getattr(self, f)
for f in fmts if getattr(self, f, None) is not None]
def check_ops_properties(self, props, filter=None, ignore_failures=False):
for op in props:
for o in self.is_valid_objs:
# if a filter, skip if it doesn't match
if filter is not None:
filt = o.index if isinstance(o, Series) else o
if not filter(filt):
|
acehanks/projects
|
tripadvisor_scrapy/tripad/items.py
|
Python
|
mit
| 285
| 0
|
# -*- coding: utf-8 -*-
# Define here the mo
|
dels for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class TripadItem(scrapy.Item):
#
|
define the fields for your item here like:
# name = scrapy.Field()
pass
|
Cito/sqlalchemy
|
test/orm/test_versioning.py
|
Python
|
mit
| 27,671
| 0.003795
|
import datetime
import sqlalchemy as sa
from sqlalchemy.testing import engines
from sqlalchemy import testing
from sqlalchemy import Integer, String, Date, ForeignKey, literal_column, \
orm, exc, select, TypeDecorator
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, Session, \
create_session, column_property, sessionmaker,\
exc as orm_exc
from sqlalchemy.testing import eq_, ne_, assert_raises, assert_raises_message
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.testing.assertsql import AllOf, CompiledSQL
_uuids = [
'1fc614acbb904742a2990f86af6ded95',
'23e253786f4d491b9f9d6189dc33de9b',
'fc44910db37e43fd93e9ec8165b885cf',
'0187a1832b4249e6b48911821d86de58',
'778af6ea2fb74a009d8d2f5abe5dc29a',
'51a6ce031aff47e4b5f2895c4161f120',
'7434097cd319401fb9f15fa443ccbbbb',
'9bc548a8128e4a85ac18060bc3f4b7d3',
'59548715e3c440b7bcb96417d06f7930',
'd7647c7004734de196885ca2bd73adf8',
'70cef121d3ff48d39906b6d1ac77f41a',
'ee37a8a6430c466aa322b8a215a0dd70',
'782a5f04b4364a53a6fce762f48921c1',
'bef510f2420f4476a7629013ead237f5',
]
def make_uuid():
"""generate uuids even on Python 2.4 which has no 'uuid'"""
return _uuids.pop(0)
class VersioningTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('version_table', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('version_id', Integer, nullable=False),
Column('value', String(40), nullable=False))
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
def _fixture(self):
Foo, version_table = self.classes.Foo, self.tables.version_table
mapper(Foo, version_table,
version_id_col=version_table.c.version_id)
s1 = Session()
return s1
@engines.close_open_connections
def test_notsane_warning(self):
Foo = self.classes.Foo
save = testing.db.dialect.supports_sane_rowcount
testing.db.dialect.supports_sane_rowcount = False
try:
s1 = self._fixture()
f1 = Foo(value='f1')
f2 = Foo(value='f2')
s1.add_all((f1, f2))
s1.commit()
f1.value='f1rev2'
assert_raises(sa.exc.SAWarning, s1.commit)
finally:
testing.db.dialect.supports_sane_rowcount = save
@testing.emits_warning_on('+zxjdbc', r'.*does not support (update|delete)d rowcount')
def test_basic(self):
Foo = self.classes.Foo
s1 = self._fixture()
f1 = Foo(value='f1')
f2 = Foo(value='f2')
s1.add_all((f1, f2))
s1.commit()
f1.value='f1rev2'
s1.commit()
s2 = create_session(autocommit=False)
f1_s = s2.query(Foo).get(f1.id)
f1_s.value='f1rev3'
s2.commit()
f1.value='f1rev3mine'
# Only dialects with a sane rowcount can detect the
# StaleDataError
if testing.db.dialect.supports_sane_rowcount:
assert_raises_message(sa.orm.exc.StaleDataError,
r"UPDATE statement on table 'version_table' expected "
r"to update 1 row\(s\); 0 were matched.",
s1.commit),
s1.rollback()
else:
s1.commit()
# new in 0.5 ! dont need to close the session
f1 = s1.query(Foo).get(f1.id)
f2 = s1.query(Foo).get(f2.id)
f1_s.value='f1rev4'
s2.commit()
s1.delete(f1)
s1.delete(f2)
if testing.db.dialect.supports_sane_rowcount:
assert_raises_message(
sa.orm.exc.StaleDataError,
r"DELETE statement on table 'version_table' expected "
r"to delete 2 row\(s\); 1 were matched.",
s1.commit)
else:
s1.commit()
@testing.emits_warning_on('+zxjdbc', r'.*does not support (update|delete)d rowcount')
def test_bump_version(self):
"""test that version number can be bumped.
Ensures that the UPDATE or DELETE is against the
last committed version of version_id_col, not the modified
state.
"""
Foo = self.classes.Foo
s1 = self._fixture()
f1 = Foo(value='f1')
s1.add(f1)
s1.commit()
eq_(f1.version_id, 1)
f1.version_id = 2
s1.commit()
eq_(f1.version_id, 2)
# skip an id, test that history
# is honored
f1.version_id = 4
f1.value = "something new"
s1.commit()
eq_(f1.version_id, 4)
f1.version_id = 5
s1.delete(f1)
s1.commit()
eq_(s1.query(Foo).count(), 0)
@testing.emits_warning(r'.*does not support updated rowcount')
@engines.close_open_connections
def test_versioncheck(self):
"""query.with_lockmode performs a 'version check' on an already loaded instance"""
Foo = self.classes.Foo
s1 = self._fixture()
f1s1 = Foo(value='f1 value')
s1.add(f1s1)
s1.commit()
s2 = create_session(autocommit=False)
f1s2 = s2.query(Foo).get(f1s1.id)
f1s2.value='f1 new value'
s2.commit()
# load, version is wrong
assert_raises_message(
sa.orm.exc.StaleDataError,
r"Instance .* has version id '\d+' which does not "
r"match database-loaded v
|
ersion id '\d+'",
s1.query(Foo).with_lockmode('read').get, f1s1.id
)
# reload it - this expires the old version first
s1.refresh(f1s1, lockmode='read')
# now assert version OK
s1.query(Foo).with_lockmo
|
de('read').get(f1s1.id)
# assert brand new load is OK too
s1.close()
s1.query(Foo).with_lockmode('read').get(f1s1.id)
@testing.emits_warning(r'.*does not support updated rowcount')
@engines.close_open_connections
@testing.requires.update_nowait
def test_versioncheck_for_update(self):
"""query.with_lockmode performs a 'version check' on an already loaded instance"""
Foo = self.classes.Foo
s1 = self._fixture()
f1s1 = Foo(value='f1 value')
s1.add(f1s1)
s1.commit()
s2 = create_session(autocommit=False)
f1s2 = s2.query(Foo).get(f1s1.id)
s2.refresh(f1s2, lockmode='update')
f1s2.value='f1 new value'
assert_raises(
exc.DBAPIError,
s1.refresh, f1s1, lockmode='update_nowait'
)
s1.rollback()
s2.commit()
s1.refresh(f1s1, lockmode='update_nowait')
assert f1s1.version_id == f1s2.version_id
@testing.emits_warning(r'.*does not support updated rowcount')
@engines.close_open_connections
def test_noversioncheck(self):
"""test query.with_lockmode works when the mapper has no version id col"""
Foo, version_table = self.classes.Foo, self.tables.version_table
s1 = create_session(autocommit=False)
mapper(Foo, version_table)
f1s1 = Foo(value="foo", version_id=0)
s1.add(f1s1)
s1.commit()
s2 = create_session(autocommit=False)
f1s2 = s2.query(Foo).with_lockmode('read').get(f1s1.id)
assert f1s2.id == f1s1.id
assert f1s2.value == f1s1.value
@testing.emits_warning_on('+zxjdbc', r'.*does not support updated rowcount')
def test_merge_no_version(self):
Foo = self.classes.Foo
s1 = self._fixture()
f1 = Foo(value='f1')
s1.add(f1)
s1.commit()
f1.value = 'f2'
s1.commit()
f2 = Foo(id=f1.id, value='f3')
f3 = s1.merge(f2)
assert f3 is f1
s1.commit()
eq_(f3.version_id, 3)
@testing.emits_warning_on('+zxjdbc', r'.*does not support updated rowcount')
def test_merge_correct_version(self):
Foo = self.classes.Foo
s1 = self._fixture()
f1 = Foo(value='f1')
s1.add(f1)
s1.commit()
f1
|
DataViva/dataviva-api
|
app/models/cnes_professional.py
|
Python
|
mit
| 2,123
| 0.000471
|
from sqlalchemy import Column, Integer, String, func, Boolean
from app import db
class CnesProfessional(db.Model):
__tablename__ = 'cnes_professional'
year = Column(Integer, primary_key=True)
region = Column(String(1), primary_key=True)
mesoregion = Column(String(4), primary_key=True)
microregion = Column(String(5), primary_key=True)
state = Column(String(2), primary_key=True)
municipality = Column(String(7), primary_key=True)
establishment = Column(String(7), primary_key=True)
unit_type = Column(String(2), primary_key=True)
occupation_family = Column(String(4), primary_key=True)
occupation_group = Column(String(1), primary_key=True)
cns_number = Column(String(15), primary_key=True)
professional_link = Column(String(8), primary_key=True)
sus_healthcare_professional = Column(String(1), primary_key=True)
other_hours_worked = Column(Integer, primary_key=True)
hospital_hour = Column(Integer, primary_key=True)
ambulatory_hour = Column(Integer, primary_key=True)
health_region = Column(String(5), primary_key=True)
hierarchy_level = Column(String(2), primary_key=True)
hidden = Column(Boolean)
@classmethod
def dimensions(cls):
return [
'year',
'region',
'mesoregion',
'microregion',
'state',
'municipality',
'occupation_family',
'occupation_group',
'establishment',
'unit_type',
'cns_number',
'professional_link',
'sus_healthcare
|
_professional',
'health_region',
'hierarchy_level',
]
@classmethod
def aggregate(cls, value):
return {
'professionals': func.count(),
'other_hours_worked': func.sum(cls.other_hours_worked),
'hospital_hour': func.sum(cls.hospital_hour),
'ambulatory_hour': func.sum(cls.ambulatory_hour),
}[value]
@classmethod
def values(cls):
return ['professionals', 'other_ho
|
urs_worked', 'hospital_hour', 'ambulatory_hour']
|
doctori/PythonTDD
|
functional_tests/base.py
|
Python
|
gpl-2.0
| 1,832
| 0.027293
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
import sys
from .server_tools import reset_database
class FunctionalTest(StaticLiveServerTestCase):
@classmethod
def setUpClass(cls):
for arg in sys.argv:
if 'liveserver' in arg:
cls.server_host = arg.split('=')[1]
cls.server_url = 'http://' + cls.server_host
cls.against_staging = True
return
super().setUpClass()
cls.against_staging = False
cls.server_url = cls.live_server_url
@classmethod
def tearDownClass(cls):
if not cls.against_staging:
super().tearDownClass()
def setUp(self):
if self.against_staging:
reset_database(self.server_host)
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def get_item_input_box(self):
return self.browser.find_element_by_id('id_text')
def check_for_row_in_list_table(self, row_text):
table = self.browser.find_element_by_id('id_list_table')
rows = table.find_elements_by_tag_name('tr')
self.assertIn(row_text, [row.text for row in rows])
def wait_for_element_with_id(self, element_id):
WebDriverWait(self.browser, tim
|
eout=30).until(
|
lambda b: b.find_element_by_id(element_id),
'Could not find element with id {}. Page text was:\n{}'.format(
element_id, self.browser.find_element_by_tag_name('body').text
)
)
def wait_to_be_logged_in(self, email):
self.wait_for_element_with_id('id_logout')
navbar = self.browser.find_element_by_css_selector('.navbar')
self.assertIn(email, navbar.text)
def wait_to_be_logged_out(self, email):
self.wait_for_element_with_id('id_login')
navbar = self.browser.find_element_by_css_selector('.navbar')
self.assertNotIn(email, navbar.text)
|
vmahuli/contrail-controller
|
src/vnsw/opencontrail-vrouter-netns/opencontrail_vrouter_netns/tests/test_vrouter_netns.py
|
Python
|
apache-2.0
| 3,842
| 0
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2014 Cloudwatt
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Edouard Thuleau, Cloudwatt.
import mock
import netaddr
import requests
import unittest
import uuid
from opencontrail_vrouter_netns.vrouter_netns import NetnsManager
NIC1_UUID = str(uuid.uuid4())
NIC1 = {'uuid': NIC1_UUID,
'mac': netaddr.EUI('00:11:22:33:44:55'),
'ip': netaddr.IPNetwork('172.16.0.12/24')}
NIC2_UUID = str(uuid.uuid4())
NIC2 = {'uuid': NIC2_UUID,
'mac': netaddr.EUI('66:77:88:99:aa:bb'),
'ip': netaddr.IPNetwork('80.0.0.123/29')}
class NetnsManagerTest(unittest.TestCase):
def setUp(self):
self.ip_cls_p = mock.patch('opencontrail_vrouter_netns.linux.ip_lib.'
'IPWrapper')
self.ip_cls_p.start()
self.post_p = mock.patch('requests.post')
self.mock_post = self.post_p.start()
self.delete_p = mock.patch('requests.delete')
self.mock_delete = self.delete_p.start()
def tearDown(self):
self.ip_cls_p.stop()
self.post_p.stop()
self.delete_p.stop()
def _add_port_to_agent(self, status_code=200):
self.netns_mgr = NetnsManager('fake_vm_uuid', NIC1, NIC2)
self.netns_mgr.vrouter_client = mock.Mock()
self.netns_mgr._get_tap_name = mock.Mock()
self.netns_mgr._get_tap_name.return_value = 'tap1234'
resp = requests.Response()
resp.status_code = status_code
self.mock_post.return_value = resp
self.netns_mgr._add_port_to_agent(NIC1)
def test_add_port_to_agent(self):
self._add_port_to_agent()
self.mock_post.assert_called_with(
|
'http://localhost:9091/port',
headers={'content-type': 'application/json'},
data=('{"tx-vlan-id": -1, '
'"ip-address": "172.16.0.12", '
'"display-name": null, '
'"id": "%s", '
'"instance-id": "fake_vm_uuid", '
'"ip6-address": "", '
'"rx-vlan-id": -1, '
'"vn-id": "", '
|
'"vm-project-id": "", '
'"type": 1, '
'"mac-address": "00-11-22-33-44-55", '
'"system-name": "tap1234"}') % NIC1_UUID)
def test_add_port_to_agent_fails(self):
self.assertRaises(ValueError,
self._add_port_to_agent,
500)
def _delete_port_from_agent(self, status_code=200):
self.netns_mgr = NetnsManager('fake_vm_uuid', NIC1, NIC2)
self.netns_mgr.vrouter_client = mock.Mock()
resp = requests.Response()
resp.status_code = status_code
self.mock_delete.return_value = resp
self.netns_mgr._delete_port_to_agent(NIC1)
def test_delete_port_from_agent(self):
self._delete_port_from_agent()
self.mock_delete.assert_called_with(
'http://localhost:9091/port/%s' % NIC1_UUID,
headers={'content-type': 'application/json'},
data=None)
def test_delete_port_to_agent_fails(self):
self.assertRaises(ValueError,
self._delete_port_from_agent,
500)
|
barnabytprowe/great3-public
|
inputs/galdata/make_fits_catalogs.py
|
Python
|
bsd-3-clause
| 15,061
| 0.007503
|
# Copyright (c) 2014, the GREAT3 executive committee (http://www.great3challenge.info/?q=contacts)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted
# provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions
# and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to
# endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for making catalogs of galaxy fit data corresponding to a real galaxy training set used by
GalSim. It has to collect information from several large files."""
import pyfits
import numpy as np
# Define filenames, etc.
galsim_catfile = 'real_galaxy_catalog_23.5.fits'
fit_catfiles = ['BRIGHTtotalRAW00000.26113.fits',
'totalRAW00000.29949.fits.gz']
n_catfiles = len(fit_catfiles)
cosmos_catfile = 'lensing14.fits.gz'
out_fitfile = 'real_galaxy_catalog_23.5_fits.fits'
out_catfile = 'real_galaxy_catalog_23.5.fits'
# Read in real galaxy catalog.
galsim_cat = pyfits.getdata(galsim_catfile)
n_galsim_cat = len(galsim_cat)
print 'Read in ',n_galsim_cat,' from GalSim catalog ',galsim_catfile
galsim_ident = galsim_cat.field('ident')
# Fields: ('IDENT', 'RA', 'DEC', 'MAG', 'BAND', 'WEIGHT', 'GAL_FILENAME', 'PSF_FILENAME', 'GAL_HDU',
# 'PSF_HDU', 'PIXEL_SCALE', 'NOISE_MEAN', 'NOISE_VARIANCE')
# Read in the full COSMOS catalog.
cosmos_cat = pyfits.getdata(cosmos_catfile)
n_cosmos_cat = len(cosmos_cat)
print 'Read in ',n_cosmos_cat,' from COSMOS catalog ',cosmos_catfile
# Fields: ('IDENT', 'MAG_AUTO', 'FLUX_AUTO', 'MAGERR_AUTO', 'FLUX_RADIUS', 'FLUXERR_AUTO',
# 'KRON_RADIUS', 'MU_MAX', 'MU_CLASS', 'CLEAN', 'GOOD', 'FLAGS', 'SN', 'SN_NON_CORR', 'FWHM_IMAGE',
# 'ALPHA_J2000', 'DELTA_J2000', 'X_IMAGE', 'Y_IMAGE', 'A_IMAGE', 'B_IMAGE', 'THETA_IMAGE',
# 'PETRO_RADIUS', 'RRG_XX', 'RRG_YY', 'XXC', 'YYC', 'XYC', 'D', 'E1_R', 'E2_R', 'E1_RU', 'E2_RU',
# 'GAMMA1', 'GAMMA2', 'FOCUS_MODEL', 'IXX', 'IYY', 'IXY', 'WEIGHT_FUNCT_RADIUS', 'VAR_E1', 'VAR_E2',
# 'BOX', 'SPECZ', 'SPECZ_MARA', 'SPECZ_CLASS', 'SPECZ_ORIGIN', 'GOOD_SPECZ', 'SPECZ_BL_AGN',
# 'SPECZ_SELECTION', 'MIPS_Z', 'MIPS_LOG_L', 'MIPS_MASS', 'ZEST_TYPE', 'ZEST_BULGE',
# 'ZEST_IRREGULARITY', 'ZEST_ELONGATION', 'ZEST_GINI', 'ZEST_M20', 'ZEST_CONCENTRATION',
# 'ZEST_ASYMMETRY', 'BULGE', 'KT', 'OLD_ZPHOT', 'OLD_GOOD_ZPHOT', 'HL_KPC', 'MARA_AGN',
# 'MARA_AGN_ZPHOT', 'MARA_AGN_ZPHOT_LOW68', 'MARA_AGN_ZPHOT_HIGH68', 'KNUD_AGN', 'G1_TS', 'G2_TS',
# 'WEIGHT_TS', 'CHANDRA_GOOD', 'CHANDRA_AGN', 'CHANDRA_LX_HARD', 'CHANDRA_LX_SOFT',
# 'CHANDRA_LX_FULL', 'CHANDRA_ZETA', 'CHANDRA_ZSPEC', 'CHANDRA_CLASSZSPEC', 'CHANDRA_MODEL',
# 'CHANDRA_XMM_ID', 'XMM_GOOD', 'XMM_AGN', 'XMM_LX_HARD', 'XMM_LX_SOFT', 'XMM_LX_FULL', 'XMM_ZETA',
# 'XMM_ZSPEC', 'XMM_CLASSZSPEC', 'XMM_MODEL', 'XMM_CHANDRA_ID', 'EZE_AGN_SPECZ', 'EZE_AGN_PHOTOZ',
# 'EZE_LX', 'EZE_HR', 'EZE_SPECZ', 'EZE_PHOTOZ', 'K_CFHT', 'MATCH_CFHT', 'ERR_K_CFHT',
# 'KEVIN_MSTAR', 'KEVIN_MSTAR2', 'KEVIN_MASSERR', 'OLIV_MSTAR', 'MVIR', 'COLOR', 'TYPE2_ZPHOT_MARA',
# 'PETER_PASSIVE', 'PETER_ANGLE_PA', 'PETER_ELLIP', 'PHOTOZ_ORDER', 'PHOTOZ_NON_COMB',
# 'PHOTOZ_NON_COMB_LOW_68', 'PHOTOZ_NON_COMB_HIGH_68', 'PBZK', 'PBZK_ZPHOT', 'PBZK_MK', 'PBZK_MASS',
# 'SIGNALTONOISERATIO', 'QUASIPETROSIANAREAFRACTION', 'QUASIPETROSIANFRACTION', 'AXISRATIO', 'GINI',
# 'CONCENTRATION', 'BOB_E', 'BOB_GOOD', 'BOB_S0', 'FLUX_GIM2D', 'R_GIM2D', 'ELL_GIM2D', 'PA_GIM2D',
# 'DX_GIM2D', 'DY_GIM2D', 'SERSIC_N_GIM2D', 'R_0P5_GIM2D', 'CHI_GIM2D', 'CECILE_SL_Z',
# 'CECILE_SL_SAT', 'CECILE_SL', 'CECILE_SL_FLAG1', 'CECILE_SL_FLAG2', 'ISOLATED', 'BCG_SCALE',
# 'BCG_R200', 'ALL_P_MEM', 'ALL_GROUP_ID', 'N_GROUP_OVERLAP', 'BEST_P_MEM', 'BEST_GROUP_ID',
# 'ZPHOT', 'TYPE', 'ZPDF', 'PHOTZ_LOW_68', 'PHOTZ_HIGH_68', 'CHI', 'MODD', 'EBV', 'NBFILT',
# 'ZMINCHI2', 'ZL68_MINCHI2', 'ZU68_MINCHI2', 'ZP2', 'CHI2', 'NUV', 'U', 'SUBARU_R', 'SUBARU_I',
# 'J_WFCAM', 'K_WIRCAM', 'M36', 'DNUV', 'DU', 'DJ_WFCAM', 'DK_WIRCAM', 'DM36', 'AUTO_OFFSET',
# 'AUTO_FLAG', 'MNUV', 'MU', 'MB', 'MV', 'MG', 'MR', 'MI', 'MJ', 'MK', 'MNUV_MR', 'SFR_MED',
# 'STR_INF', 'SFR_SUP', 'SSFR_MED', 'SSFR_INF', 'SSFR_SUP', 'MATCH_S', 'MASK_S', 'GOOD_ZPHOT_LENS',
# 'GOOD_ZPHOT_SOURCE')
# That's a lot of info, so let's just pick out the things we care about: galaxy identifier, apparent
# magnitude, size, photo-z.
cos_ident = cosmos_cat.field('ident')
cos_mag_auto = cosmos_cat.field('mag_auto')
cos_flux_rad = cosmos_cat.field('flux_radius')
cos_zphot = cosmos_cat.field('zphot')
# Read in catalogs with fit parameters from Lackner & Gunn.
print "Reading in catalogs of fit parameters"
n_fit_tot = 0
for i_cat in range(n_catfiles):
# Get this catalog
dat = pyfits.getdata(fit_catfiles[i_cat])
n = len(dat)
print "Read in ",n," fit results from file ",fit_catfiles[i_cat]
# Just extract the columns we want, and append to previous if i_cat!=0.
if i_cat == 0:
fit_ident = dat.field('ident')
fit_sersicfit = dat.field('sersicfit')
fit_bulgefit = dat.field('bulgefit')
fit_status = dat.field('mpfit_status')
fit_mag_auto = dat.field('mag_auto')
fit_mad_s = dat.field('mad_sersic_mask')
fit_mad_b = dat.field('mad_dvcb_mask')
fit_dvc_btt = dat.field('dvc_btt')
if i_cat > 0:
fit_ident = np.append(fit_ident, dat.field('galid'))
fit_sersicfit = np.append(fit_sersicfit, dat.field('sersicfit'), axis=0)
fit_bulgefit = np.append(fit_bulgefit, dat.field('bulgefit'), axis=0)
fit_status = np.append(fit_status, dat.field('mpfit_status'), axis=0)
fit_mag_auto = np.append(fit_mag_auto, np.zeros_like(dat.field('galid')), axis=0)
fit_mad_s = np.append(fit_mad_s, dat.field('mad_sersic_mask'), axis=0)
fit_mad_b = np.append(fit_mad_b, dat.field('mad_dvcb_mask'), axis=0)
fit_dvc_btt = np.append(fit_dvc_btt, dat.field('dvc_btt'), axis=0)
# Increment counter.
n_fit_tot += n
# Unfortunately, the files do not have the same column names. Here are their contents -
# Fields in first file: ('IDENT', 'MAG_AUTO', 'FLUX_AUTO', 'MAGERR_AUTO', 'FLUX_
|
RADIUS',
# 'FLUXERR_AUTO', 'KRON_RADIUS', 'MU_MAX', 'MU_CLASS', 'CLEAN', 'GOOD', 'FLAGS', 'SN',
# 'SN_NON_CORR', 'FWHM_IMAGE', 'ALPHA_J2000', 'DELTA_J2000', 'X_IMAGE', 'Y_IMAGE', 'A_IMAGE',
# 'B_IMAGE', 'THETA_IMAGE', 'PETRO_RADIUS', 'D', 'E1_R', 'E2_R', 'E1_RU', 'E2_RU', 'GAMMA1',
# 'GAMMA2', 'FOCUS_MODEL', 'IXX', 'IYY', 'IXY', '
|
WEIGHT_FUNCT_RADIUS', 'VAR_E1', 'VAR_E2',
# 'BOX', 'SPECZ', 'SPECZ_MARA', 'SPECZ_CLASS', 'SPECZ_ORIGIN', 'GOOD_SPECZ', 'SPECZ_BL_AGN',
# 'FORS2_OBJECT_FLAG', 'MIPS_Z', 'MIPS_LOG_L', 'MIPS_MASS', 'ZEST_TYPE', 'ZEST_BULGE',
# 'ZEST_IRREGULARITY', 'ZEST_ELONGATION', 'ZEST_GINI', 'ZEST_M20', 'ZEST_CONCENTRATION',
# 'ZEST_ASYMMETRY', 'BULGE', 'KT', 'OLD_ZPHOT', 'OLD_GOOD_ZPHOT', 'HL_KPC', 'CHANDRA_GOOD',
# 'CHANDRA_AGN', 'CHANDRA_LX_HARD', 'CHANDRA_LX_SOFT', 'CHANDRA_LX_FULL', 'CHANDRA_ZETA',
# 'CHANDRA_ZSPEC', 'CHANDRA_CLASSZS
|
nicfit/nicfit.py
|
cookiecutter/{{cookiecutter.project_name}}/{{cookiecutter.py_module}}/__main__.py
|
Python
|
mit
| 614
| 0.061889
|
# -*- coding: utf-8 -*-
{%- if cook
|
iecutter.pyapp_type == "asyncio
|
" -%}
{%- set async = "async" -%}
{%- set appmod = "nicfit.aio" -%}
{%- else -%}
{%- set async = "" -%}
{%- set appmod = "nicfit" -%}
{%- endif %}
from {{ appmod }} import Application
from nicfit.console import pout
from . import version
{{ async }} def main(args):
pout("\m/")
app = Application(main, version=version,
{%- if cookiecutter.gettext_domain != "None" %}
gettext_domain="{{ cookiecutter.gettext_domain }}")
{% else %}
gettext_domain=None)
{% endif %}
if __name__ == "__main__":
app.run()
|
ai-se/Tree-Learner
|
CROSSTREES.py
|
Python
|
unlicense
| 7,714
| 0.012315
|
#! /Users/rkrsn/anaconda/bin/python
from __future__ import print_function
from __future__ import division
from os import environ, getcwd
from pdb import set_trace
from random import uniform, randint, shuffle
import sys
# Update PYTHONPATH
HOME = environ['HOME']
axe = HOME + '/git/axe/axe/' # AXE
pystat = HOME + '/git/pystat/' # PySTAT
cwd = getcwd() # Current Directory
sys.path.extend([axe, pystat, cwd])
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.tree import DecisionTreeClassifier
from collections import Counter
from Prediction import *
from _imports import *
from abcd import _Abcd
from cliffsDelta import *
# from contrastset import *
# from dectree import *
from hist import *
from smote import *
import makeAmodel as mam
from methods1 import *
import numpy as np
import pandas as pd
import sk
class deltas():
def __init__(self, row, myTree, majority=True):
self.row = row
self.loc = drop(row, myTree)
self.contrastSet = None
self.newRow = row
self.score = self.scorer(self.loc)
def scorer(self, node):
return mean([r.cells[-2] for r in node.rows])
def createNew(self, stuff, keys, N=1):
newElem = []
tmpRow = self.row
for _ in xrange(N):
for s in stuff:
lo, hi = s[1]
pos = keys[s[0].name]
tmpRow.cells[pos] = float(max(lo, min(hi, lo + rand() * abs(hi - lo))))
newElem.append(tmpRow)
return newElem
def patches(self, keys, N_Patches=10):
# Search for the best possible contrast set and apply it
isles = []
newRow = self.row
for stuff in self.contrastSet:
isles.append(self.createNew(stuff, keys, N=N_Patches))
return isles
class store():
def __init__(self, node, majority=False):
self.node = node
self.dist = 0
self.DoC = 0
self.majority = majority
self.score = self.scorer(node)
def minority(self, node):
unique = list(set([r.cells[-1] for r in node.rows]))
counts = len(unique) * [0]
# set_trace()
for n in xrange(len(unique)):
for d in [r.cells[-1] for r in node.rows]:
if unique[n] == d:
counts[n] += 1
return unique, counts
def scorer(self, node):
if self.majority:
unq, counts = self.minority(node)
id, maxel = 0, 0
for i, el in enumerate(counts):
if el > maxel:
maxel = el
id = i
return mean([r.cells[-2] for r in node.rows if r.cells[-1] == unq[id]])
else:
return mean([r.cells[-2] for r in node.rows])
class xtrees():
"Treatments"
def __init__(self, train=None, test=None, test_DF=None,
verbose=True, smoteit=True, bin=False):
self.train, self.test = train, test
self.train_DF = createTbl(train, _smote=smoteit, isBin=bin)
if not test_DF:
self.test_DF = createTbl(test, isBin=bin)
else:
self.test_DF = test_DF
self.verbose, self.smoteit = verbose, smoteit
self.mod, self.keys = [], self.getKey()
def flatten(self, x):
"""
Takes an N times nested list of list like [[a,b],[c, [d, e]],[f]]
and returns a single list [a,b,c,d,e,f]
"""
result = []
for el in x:
if hasattr(el, "__iter__") and not isinstance(el, basestring):
result.extend(self.flatten(el))
else:
result.append(el)
return result
def leaves(self, node):
"""
Returns all terminal nodes.
"""
L = []
if len(node.kids) > 1:
for l in node.kids:
L.extend(self.leaves(l))
return L
elif len(node.kids) == 1:
return [node.kids]
else:
return [node]
def scorer(self, node):
"""
Score an leaf node
"""
return mean([r.cells[-2] for r in node.rows])
def isBetter(self, me, others):
"""
Compare [me] with a bunch of [others,...], return the best person
"""
for notme in others:
# if '%.2f' % self.scorer(notme) == 0:
if self.scorer(notme) < self.scorer(me):
return True, notme.branch
else:
return False, []
def attributes(self, nodes):
"""
A method to handle unique branch variables that characterizes
a bunch of nodes.
"""
xx = []
attr = []
def seen(x):
xx.append(x)
for node in nodes:
if not node.node.branch in xx:
attr.append(node.node.branch)
seen(node.node.branch)
return attr
def finder2(self, node, alpha=0.5, pos='far'):
"""
finder2 is a more elegant version of finder that performs a search on
the entire tree to find leaves which are better than a certain 'node'
"""
euclidDist = lambda a, b: ((b[0] - a[0]) ** 2 + (b[1] - a[1]) ** 2) ** 0.5
midDist = lambda a, b: abs(sum(b) - sum(a)) / 2
vals = []
current = store(node) # Store current sample
while node.lvl > -1:
node = node.up # Move to tree root
# Get all the terminal nodes
leaves = self.flatten([self.leaves(_k) for _k in node.kids])
for leaf in leaves:
l = store(leaf)
for b in leaf.branch:
dist = []
if b[0] in [bb[0] for bb in current.node.branch]:
l.DoC += 1
dist.extend([midDist(b[1], bb[1])
for bb in current.node.branc
|
h if b[0] == bb[0]])
l.dist = np.sqrt(np.sum(dist))
vals.append(l)
vals = sorted(vals, key=lambda F: F.DoC, reverse=False)
best = [v for v in vals if v.score < alpha * current.score]
if not len(best) > 0:
best = vals
# Get a list of DoCs (DoC -> (D)epth (o)f (C)orrespondence, btw..)
# set_trace()
attr = {}
bests = {}
unq = sorted(list(set([v.DoC for v in b
|
est]))) # A list of all DoCs..
for dd in unq:
bests.update(
{dd: sorted([v for v in best if v.DoC == dd], key=lambda F: F.dist)})
attr.update({dd: self.attributes(
sorted([v for v in best if v.DoC == dd], key=lambda F: F.score))})
if pos == 'near':
return attr[unq[-1]][0]
elif pos == 'far':
return attr[unq[0]][-1]
def getKey(self):
keys = {}
for i in xrange(len(self.test_DF.headers)):
keys.update({self.test_DF.headers[i].name[1:]: i})
return keys
def main(self):
# >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
# Main
# >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
# Decision Tree
t = discreteNums(self.train_DF, map(lambda x: x.cells, self.train_DF._rows))
myTree = tdiv(t)
# Testing data
testCase = self.test_DF._rows
for tC in testCase:
newRow = tC
node = deltas(newRow, myTree) # A delta instance for the rows
if newRow.cells[-2] == 0:
node.contrastSet = []
self.mod.append(node.newRow)
else:
node.contrastSet = [self.finder2(node.loc, pos='near')]
# Now generate 1 potential patch
patch = node.patches(self.keys, N_Patches=5)
# Shuffle
shuffle(patch)
p = patch.pop()
tmpTbl = clone(self.test_DF,
rows=[k.cells for k in p],
discrete=True)
self.mod.append(choice(tmpTbl._rows))
# <<<<<<<<<<< Debug >>>>>>>>>>>>>>>
# set_trace()
return clone(self.test_DF, rows=[k.cells for k in self.mod], discrete=True)
def _planningTest():
# Test contrast sets
n = 0
Dir = 'Data/'
one, two = explore(Dir)
# Training data
_ = xtrees(train=one[n],
test=two[n],
verbose=True,
smoteit=False).main()
# <<<<<<<<<<< Debug >>>>>>>>>>>>>>>
set_trace()
if __name__ == '__main__':
_planningTest()
|
rglass/buddha-bum
|
listman.py
|
Python
|
gpl-3.0
| 711
| 0.001406
|
# Copyright (c) 2007 Roman Glass
# See LICENSE for details.
"""Play around with different SortingAlgorithms"""
from zope.interface import Interface, implements
from interfaces impor
|
t IListManager
from baselist import *
from sortingalgorithms import *
from help import *
class ListM:
implements(IListManager)
def __init__(self):
self.n = NullList(
|
)
self.sort = SelectionSortV
def add(self, o):
self.n = NonNull(o, self.n)
def diff(self, o):
self.n = self.n.accept(DiffV(o))
def parse(self):
return self.n.accept(ParseV())
def sort(self):
self.n = self.n.accept(self.sort())
def changeSort(self, o):
self.sort = o
|
anokata/pythonPetProjects
|
surgame/src/mapGenerator.py
|
Python
|
mit
| 1,097
| 0.004558
|
import caveGenerate
import yaml
filename = 'data/cavegen.map'
def addObjects():
o = list()
o.append('floor')
o.append('ground')
o.append('apple')
o.append('cherry')
o.append('enemy_poringp')
o.append('enemy_poringb')
return o
def writeMap(fn, m):
data = yaml.dump(m, default_flow_style=False)
with open(fn, 'wt') as fout:
print(data)
fout.write(data)
def genMap():
m = dict()
genmap = caveGenerate.gen(50, 5)
caveGenerate.addObjectsRandom(genmap, 'A', 16)
caveGenerate.addObjectsRandom(genmap, 'C', 16)
caveGenerate.addObjectsRandom(genmap, 'G', 16)
caveGenerate.addObjectsRandom(genmap, 'B', 16)
flo
|
or = caveGenerate.init_matrix(50, cav
|
eGenerate.FLOOR)
map_str = caveGenerate.drawString(genmap)
map_floor = caveGenerate.drawString(floor)
m['layers_count'] = 2
m['layers'] = list()
m['layers'].append(map_floor)
m['layers'].append(map_str)
m['objects'] = addObjects()
return m
if __name__ == '__main__':
m = genMap()
print(m)
writeMap('../data/cavegen.yaml', m)
|
huaijiefeng/contact-sync-server
|
contactsync/settings.py
|
Python
|
apache-2.0
| 5,270
| 0.00038
|
# Django settings for ssssss project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
import os
if 'SERVER_SOFTWARE' in os.environ:
from sae.const import (
MYSQL_HOST, MYSQL_PORT, MYSQL_USER, MYSQL_PASS, MYSQL_DB
)
else:
# Make `python manage.py syncdb` works happy!
MYSQL_HOST = 'localhost'
MYSQL_PORT = '3306'
MYSQL_USER = 'root'
MYSQL_PASS = 'root'
MYSQL_DB = 'app_pylabs'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': MYSQL_DB,
'USER': MYSQL_USER,
'PASSWORD': MYSQL_PASS,
'HOST': MYSQL_HOST,
'PORT': MYSQL_PORT,
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.4/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
#
|
Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'e
|
n-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'pg95(hjk#kjll4p%g)5+%4=$ra_%+9kf7@)8co=7=)%7t$$6%f'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'contactsync.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'contactsync.wsgi.application'
TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), '..', 'templates').replace('\\', '/'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
jonbrohauge/pySudokuSolver
|
board.py
|
Python
|
mit
| 1,589
| 0.001259
|
class Board(object):
"""This class defines the board"""
def __init__(self, board_size):
"""The initializer for the class"""
self.board_size = board_size
self.board = []
for index in range(0, self.board_size):
self.board.append(['0'] * self.board_size)
def is_on_board(self, x_coordinate, y_coordinate):
"""Is the piece on the board"""
return bool((0 <= x_coordinate < self.board_size) and (0 <= y_coordinate < self.board_size))
def place_piece(self, x_coordinate, y_coordinate, value):
"""Place a piece on the board"""
try:
if not self.is_on_board(x_coordinate, y_coordinate):
raise Exception('not_on_board')
if self.is_piece_set(x_coordinate, y_coordinate):
raise Exception('piece_is_set')
self.update_cell(x_coordinate, y_coordinate, value)
except ValueError as err:
print(err.args)
def update_cell(self, x_coordinate, y_coordinate, value):
"""Update the placement of the piece on the board"""
self.board[(y_coordinate-1)].insert((x_coordinate-1), str(value))
self.board[(y_coordinate-1)].pop(x_coordinate)
def print_board(self):
"""Print the board"""
for row in self.board:
print(" ".join(row))
def is_piece_set(self, x_coordinate, y_coordinate):
"""Check to see if a piece is set """
if self.is_on_board(x_coordinate, y_coordinate):
return bool(str(self.board[(y_coordinate-1)][(
|
x_coordina
|
te-1)]) != '0')
|
oleg-chubin/let_me_play
|
let_me_app/migrations/0004_auto_20150713_2126.py
|
Python
|
apache-2.0
| 848
| 0.001179
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
|
from django.db import models, migrations
import django.contrib.gis.db.models.fields
class Migration(migrations.Migration):
dependencies = [
('let_me_app', '0003_auto_20150713_2051'),
]
operations = [
migrations.RemoveField(
model_name='site',
name='location',
),
m
|
igrations.RemoveField(
model_name='site',
name='location_lat',
),
migrations.RemoveField(
model_name='site',
name='location_lon',
),
migrations.AddField(
model_name='site',
name='geo_point',
field=django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326),
preserve_default=True,
),
]
|
miniconfig/home-assistant
|
homeassistant/components/android_ip_webcam.py
|
Python
|
mit
| 9,809
| 0
|
"""
Support for IP Webcam, an Android app that acts as a full-featured webcam.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/android_ip_webcam/
"""
import asyncio
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.const import (
CONF_NAME, CONF_HOST, CONF_PORT, CONF_USERNAME, CONF_PASSWORD,
CONF_SENSORS, CONF_SWITCHES, CONF_TIMEOUT, CONF_SCAN_INTERVAL,
CONF_PLATFORM)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_send, async_dispatcher_connect)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import utcnow
from homeassistant.components.camera.mjpeg import (
CONF_MJPEG_URL, CONF_STILL_IMAGE_URL)
DOMAIN = 'android_ip_webcam'
REQUIREMENTS = ["pydroid-ipcam==0.4"]
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=10)
DATA_IP_WEBCAM = 'android_ip_webcam'
ATTR_HOST = 'host'
ATTR_VID_CONNS = 'Video Connections'
ATTR_AUD_CONNS = 'Audio Connections'
KEY_MAP = {
'audio_connections': 'Audio Connections',
'adet_limit': 'Audio Trigger Limit',
'antibanding': 'Anti-banding',
'audio_only': 'Audio Only',
'battery_level': 'Battery Level',
'battery_temp': 'Battery Temperature',
'battery_voltage': 'Battery Voltage',
'coloreffect': 'Color Effect',
'exposure': 'Exposure Level',
'exposure_lock': 'Exposure Lock',
'ffc': 'Front-facing Camera',
'flashmode': 'Flash Mode',
'focus': 'Focus',
'focus_homing': 'Focus Homing',
'focus_region': 'Focus Region',
'focusmode': 'Focus Mode',
'gps_active': 'GPS Active',
'idle': 'Idle',
'ip_address': 'IPv4 Address',
'ipv6_address': 'IPv6 Address',
'ivideon_streaming': 'Ivideon Streaming',
'light': 'Light Level',
'mirror_flip': 'Mirror Flip',
'motion': 'Motion',
'motion_active': 'Motion Active',
'motion_detect': 'Motion Detection',
'motion_event': 'Motion Event',
'motion_limit': 'Motion Limit',
'night_vision': 'Night Vision',
'night_vision_average': 'Night Vision Average',
'night_vision_gain': 'Night Vision Gain',
'orientation': 'Orientation',
'overlay': 'Overlay',
'photo_size': 'Photo Size',
'pressure': 'Pressure',
'proximity': 'Proximity',
'quality': 'Quality',
'scenemode': 'Scene Mode',
'sound': 'Sound',
'sound_event': 'Sound Event',
'sound_timeout': 'Sound Timeout',
'torch': 'Torch',
'video_connections': 'Video Connections',
'video_chunk_len': 'Video Chunk Length',
'video_recording': 'Video Recording',
'video_size': 'Video Size',
'whitebalance': 'White Balance',
'whitebalance_lock': 'White Balance Lock',
'zoom': 'Zoom'
}
ICON_MAP = {
'audio_connections': 'mdi:speaker',
'battery_level': 'mdi:battery',
'battery_temp': 'mdi:thermometer',
'battery_voltage': 'mdi:battery-charging-100',
'exposure_lock': 'mdi:camera',
'ffc': 'mdi:camera-front-variant',
'focus': 'mdi:image-filter-center-focus',
'gps_active': 'mdi:crosshairs-gps',
'light': 'mdi:flashlight',
'motion': 'mdi:run',
'night_vision': 'mdi:weather-night',
'overlay': 'mdi:monitor',
'pressure': 'mdi:gauge',
'proximity': 'mdi:map-marker-radius',
'quality': 'mdi:quality-high',
'sound': 'mdi:speaker',
'sound_event': 'mdi:speaker',
'sound_timeout': 'mdi:speaker',
'torch': 'mdi:white-balance-sunny',
'video_chunk_len': 'mdi:video',
'video_connections': 'mdi:eye',
'video_recording': 'mdi:record-rec',
'whitebalance_lock': 'mdi:white-balance-auto'
}
SWITCHES = ['exposure_lock', 'ffc', 'focus', 'gps_active', 'night_vision',
'overlay', 'torch', 'whitebalance_lock', 'video_recording']
SENSORS = ['audio_connections', 'battery_level', 'battery_temp',
'battery_voltage', 'light', 'motion', 'pressure', 'proximity',
'sound', 'video_connections']
SIGNAL_UPDATE_DATA = 'android_ip_webcam_update'
CONF_MOTION_SENSOR = 'motion_sensor'
DEFAULT_NAME = 'IP Webcam'
DEFAULT_PORT = 8080
DEFAULT_TIMEOUT = 10
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.All(cv.ensure_list, [vol.Schema({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL):
cv.time_period,
vol.Inclusive(CONF_USERNAME, 'authentication'): cv.string,
vol.Inclusive(CONF_PASSWORD, 'authentica
|
tion'): cv.string,
vol.Optional(CONF_SWITCHES, default=None):
vol.All(cv.ensure_list, [vol.In(SWITCHES)]),
vol.Optional(CONF_SENSORS, default=None):
vol.All(cv.ensure_list, [vol.In(SENSORS)]),
vol.Optional(CONF_MOTION_SENSOR, default=None): cv.boolean,
})])
}, extra=vol.ALLOW_EXTRA)
@asyncio.
|
coroutine
def async_setup(hass, config):
"""Setup the IP Webcam component."""
from pydroid_ipcam import PyDroidIPCam
webcams = hass.data[DATA_IP_WEBCAM] = {}
websession = async_get_clientsession(hass)
@asyncio.coroutine
def async_setup_ipcamera(cam_config):
"""Setup a ip camera."""
host = cam_config[CONF_HOST]
username = cam_config.get(CONF_USERNAME)
password = cam_config.get(CONF_PASSWORD)
name = cam_config[CONF_NAME]
interval = cam_config[CONF_SCAN_INTERVAL]
switches = cam_config[CONF_SWITCHES]
sensors = cam_config[CONF_SENSORS]
motion = cam_config[CONF_MOTION_SENSOR]
# init ip webcam
cam = PyDroidIPCam(
hass.loop, websession, host, cam_config[CONF_PORT],
username=username, password=password,
timeout=cam_config[CONF_TIMEOUT]
)
if switches is None:
switches = [setting for setting in cam.enabled_settings
if setting in SWITCHES]
if sensors is None:
sensors = [sensor for sensor in cam.enabled_sensors
if sensor in SENSORS]
sensors.extend(['audio_connections', 'video_connections'])
if motion is None:
motion = 'motion_active' in cam.enabled_sensors
@asyncio.coroutine
def async_update_data(now):
"""Update data from ipcam in SCAN_INTERVAL."""
yield from cam.update()
async_dispatcher_send(hass, SIGNAL_UPDATE_DATA, host)
async_track_point_in_utc_time(
hass, async_update_data, utcnow() + interval)
yield from async_update_data(None)
# load platforms
webcams[host] = cam
mjpeg_camera = {
CONF_PLATFORM: 'mjpeg',
CONF_MJPEG_URL: cam.mjpeg_url,
CONF_STILL_IMAGE_URL: cam.image_url,
CONF_NAME: name,
}
if username and password:
mjpeg_camera.update({
CONF_USERNAME: username,
CONF_PASSWORD: password
})
hass.async_add_job(discovery.async_load_platform(
hass, 'camera', 'mjpeg', mjpeg_camera, config))
if sensors:
hass.async_add_job(discovery.async_load_platform(
hass, 'sensor', DOMAIN, {
CONF_NAME: name,
CONF_HOST: host,
CONF_SENSORS: sensors,
}, config))
if switches:
hass.async_add_job(discovery.async_load_platform(
hass, 'switch', DOMAIN, {
CONF_NAME: name,
CONF_HOST: host,
CONF_SWITCHES: switches,
}, config))
if motion:
hass.async_add_job(discovery.async_load_platform(
|
pcejrowski/iwi
|
notebooks/calcs/rating.py
|
Python
|
mit
| 2,328
| 0.005155
|
import metrics
def rate(artToCatSim, label, membershipData, categoryTree, artNamesDict, catNamesDict):
# countBefore1 = artToCatSim.count_nonzero()
# for article in membershipData:
# id_an
|
d_categories = article.split('\t')
# articleId = int(id_and_categories[0])
# del id_and_categories[0]
# cats = [int(x) for x in id_and_categories]
# for cat in cats:
# artToCatSim[articleId, cat] = 0
#
# artToCatSim.eliminate_zeros()
# countAfter1 = artToCatSim.count_nonzero()
# print('removed=' + str(countAfter1 - countBefore1) + ' and left=' + str(countAfter1))
# raw_art_labels = map(lambda x:
|
x.split(), data['po_slowach-articles_dict-simple-20120104'])
# art_labels = dict(map(lambda (x, y): [y, x], raw_art_labels))
#
# raw_cat_labels = map(lambda x: x.split(), data['po_slowach-cats_dict-simple-20120104'])
# cat_labels = dict(map(lambda (x, y): [y, x], raw_cat_labels))
# from sets import Set
# x = Set()
# y = Set()
#
#
# for t in topConnections:
# x.add(str(t[1]))
# y.add(str(t[0]))
#
# for t in topConnections:
# if t[1] != 18941 and t[1] < 13983:
# try:
# print(art_labels[str(t[0])] + ' - ' + cat_labels[str(t[1])])
# except:
# sys.stdout.write('.')
topConnections = getTopConnections(artToCatSim)
metrics.generateSampleForManualRating(topConnections, artNamesDict, catNamesDict, label)
manualMetric = metrics.fromManualRating(label)
print("Manual: {}".format(manualMetric))
childParentMetric = metrics.numberOfChildParentConnections(topConnections, categoryTree)
print("ChildParent: {}".format(childParentMetric))
exisitingConnsMetric = metrics.numberOfExistingConnections(artToCatSim, membershipData)
print("ExisitngConns: {}".format(exisitingConnsMetric))
variance = metrics.variance(artToCatSim)
print("Variance: {}".format(variance))
def getTopConnections(artToCatSim, number = None):
sorted = sort_coo(artToCatSim.tocoo())
if not number:
number = len(sorted)/10
top = sorted[-number:]
return top
def sort_coo(m):
from itertools import izip
tuples = izip(m.row, m.col, m.data)
return sorted(tuples, key=lambda x: (x[2]))
|
HybridF5/tempest_debug
|
tempest/tests/test_list_tests.py
|
Python
|
apache-2.0
| 1,828
| 0.001641
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import six
import subprocess
from tempest.tests.lib import base
class TestTestList(base.TestCase):
def test_testr_list_tests_no_errors(self):
# Remove unit test discover path from env to test tempest tests
test_env = os.environ.copy()
test_env.pop('OS_TEST_PATH')
import_failures = []
p = subprocess.Popen(['testr', 'list-tests'], stdout=subprocess.PIPE,
env=test_env)
ids, err = p.communicate()
self.assertEqual(0, p.returncode,
"test discovery failed, one or more files cause an "
"error on import %s" % ids)
ids = six.text_type(ids).split('\n')
for test_id in ids:
if re.match('(\w+\.){3}\w+', test_id):
if not test_id.startswith('tempest.'):
parts = test_id.partition('tempest')
fail_id = parts[1] + parts[2]
import_failures.appe
|
nd(fail_id)
error_message = ("The follow
|
ing tests have import failures and aren't"
" being run with test filters %s" % import_failures)
self.assertFalse(import_failures, error_message)
|
fedspendingtransparency/data-act-broker-backend
|
tests/unit/dataactvalidator/test_fabs28_detached_award_financial_assistance_1.py
|
Python
|
cc0-1.0
| 2,049
| 0.007321
|
from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory
from tests.unit.dataactvalidator.utils import number_of_errors, query_columns
_FILE = 'fabs28_detached_award_financial_assistance_1'
def test_column_headers(database):
expected_subset = {'row_number', 'assistance_type', 'face_value_loan_guarantee',
'uniqueid_AssistanceTransactionUniqueKey'}
actual = set(query_columns(_FILE, database))
assert expected_subset == actual
def test_success(database):
""" FaceValueOfDirectLoanOrLoanGuarantee is required for loans (i.e., when AssistanceType = 07
|
or 08). """
det_award = DetachedAwardFinancialAssistanceFactory(assistance_type='07', face_value_loan_guarantee=0,
correction_delete_indicatr='')
det_award_2 = DetachedAwardFinancialAssistanceFactory(assistance_type='08', face_value_loan_guarantee=20,
correction_delete_indicatr='c')
# Ignore correction delete indicator of D
de
|
t_award_3 = DetachedAwardFinancialAssistanceFactory(assistance_type='07', face_value_loan_guarantee=None,
correction_delete_indicatr='d')
errors = number_of_errors(_FILE, database, models=[det_award, det_award_2, det_award_3])
assert errors == 0
def test_failure(database):
""" FaceValueOfDirectLoanOrLoanGuarantee is required for loans (i.e., when AssistanceType = 07 or 08). """
det_award = DetachedAwardFinancialAssistanceFactory(assistance_type='07', face_value_loan_guarantee=None,
correction_delete_indicatr=None)
det_award_2 = DetachedAwardFinancialAssistanceFactory(assistance_type='08', face_value_loan_guarantee=None,
correction_delete_indicatr='C')
errors = number_of_errors(_FILE, database, models=[det_award, det_award_2])
assert errors == 2
|
twm/yarrharr
|
yarrharr/scripts/yarrharr.py
|
Python
|
gpl-3.0
| 1,676
| 0.000597
|
# -*- coding: utf-8 -*-
# Copyright © 2013, 2014, 2017, 2020 Tom Most <twm@freecog.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Additional permission under GNU GPL version 3 s
|
ection 7
#
# If you modify this Program, or any covered work, by linking or
# combining it with OpenSSL (or a modified version of that library),
# containing parts covered by the terms of the OpenSSL License, the
# licensors of this Program grant you additional permission to convey
# the resulting work. Corresponding Source for a non-source form of
# such a combination shall include the source code for the parts of
#
|
OpenSSL used as well as that of the covered work.
from __future__ import absolute_import
import argparse
import os
import sys
import yarrharr
def main(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(description="Yarrharr feed reader")
parser.add_argument("--version", action="version", version=yarrharr.__version__)
parser.parse_args(argv)
os.environ["DJANGO_SETTINGS_MODULE"] = "yarrharr.settings"
from yarrharr.application import run
run()
|
systemovich/scrapy-myuniversity
|
myuniversityscraper/pipelines.py
|
Python
|
gpl-3.0
| 273
| 0
|
# Define your item pipelines here
#
# Don't forget to add your pipeline to
|
the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics
|
/item-pipeline.html
class MyuniversityscraperPipeline(object):
def process_item(self, item, spider):
return item
|
edx/course-discovery
|
course_discovery/apps/api/v1/tests/test_views/test_level_types.py
|
Python
|
agpl-3.0
| 1,985
| 0.002519
|
from django.urls import reverse
from course_discovery.apps.api.v1.tests.test_views.mixins import APITestCase, SerializationMixin
from course_discovery.apps.core.tests.factories import USER_PASSWORD, UserFactory
from course_discovery.apps.course_metadata.models import LevelType
from course_discovery.apps.course_metadata.tests.factories import LevelTypeFactory
class LevelTypeViewSetTests(SerializationMixin, APITestCase):
list_path = reverse('api:v1:level_type-list')
def setUp(self):
super().setUp()
self.user = UserFactory(is_staff=True, is_superuser=True)
self.client.login(username=self.user.username, password=USER_PASSWORD)
def test_authentication(self):
""" Verify the endpoint requires the user to be authenticated. """
response = self.client.get(self.list_path)
assert response.status_code == 200
self.client.logout()
response = self.client.get(self.list_path)
assert response.status_code == 401
def test_list(self):
""" Verify the endpoint returns a list of all program types. """
LevelTypeFactory.create_batch(4)
expected = LevelType.objects.all()
with self.assertNumQueries(6):
response = self.client.
|
get(self.list_path)
assert response.status_code == 200
assert response.data['results'] == self.serialize_level_type(expected, many=True)
def t
|
est_retrieve(self):
""" The request should return details for a single level type. """
level_type = LevelTypeFactory()
level_type.set_current_language('en')
level_type.name_t = level_type.name
level_type.save()
url = reverse('api:v1:level_type-detail', kwargs={'name': level_type.name})
print(level_type.__dict__)
with self.assertNumQueries(5):
response = self.client.get(url)
assert response.status_code == 200
assert response.data == self.serialize_level_type(level_type)
|
Surufel/Personal
|
2.pysifer/Older/hello_world.py
|
Python
|
agpl-3.0
| 189
| 0.010582
|
#!/usr/bin/env python3
# Sifer Aseph
"""Prints a "hello world" statement."""
def main():
"""Utterly standard.""
|
"
print("He
|
llo cruel world.")
if __name__ == "__main__":
main()
|
s1na/darkoob
|
darkoob/book/urls.py
|
Python
|
mit
| 374
| 0.005348
|
from django.conf.urls import patterns, include, url
from darkoob.book import views as book_views
urlpatterns = patterns('',
url(r'^(?P<book_id>\d+)/(?P<book_title>[a-zA-Z0-9\-
|
_]+)/$', book_views.page, name='book_page'),
url(r'^look/$', book_views.book_lookup),
url(r'^author/$', book_views.author_lookup),
# url(r'^rate/$', book_views.rate, name='rate'),
)
| |
collective/ECReviewBox
|
Extensions/Install.py
|
Python
|
gpl-2.0
| 2,470
| 0.00486
|
# -*- coding: utf-8 -*-
# $Id$
#
# Copyright (c) 2007 Otto-von-Guericke-Universität Magdeburg
#
# This file is part of ECReviewBox.
from StringIO import StringIO
from Products.Archetypes.Extensions.utils import installTypes, install_subskin
from Products.Archetypes.public import listTypes
from Products.CMFCore.utils import getToolByName
from Products.ECReviewBox.config import *
def installDependencies(self, out):
"""
Tests wether or not depending products are available and installed. If
not, we will try to install them.
"""
qi = getToolByName(self, 'portal_quickinstaller')
for product in DEPENDENCIES:
if qi.isProductInstallable(product):
if not qi.isProductInstalled(product):
qi.installProduct(product)
else:
out.write("Warnig: Depending product '%s' ist not installable." %
product)
def setupWorkflow(self, out):
"""
Assign ECAutoAssignement objects to ec_assignment_workflow.
"""
wf_tool = getToolByName(self, 'portal_workflow')
if 'ec_assignment_workflow' in wf_tool.objectIds():
wf_tool.setChainForPortalTypes((ECR_NAME,), ECA_WF_NAME)
# in case the workflows have changed, update all workflow-aware objects
wf_tool.updateRoleMappings()
out.write("Assigned '%s' to %s.\n" % (ECR_TITLE, ECA_WF_NAME))
else:
out.write("Failed to assign
|
'%' to %s.\n" % (ECR_TITLE, ECA_WF_NAME))
def install(self):
"""
Installs the product.
"""
out = StringIO()
# install depending products
installDependencies(self, out)
# install types
installTypes(self, out,
|
listTypes(PRODUCT_NAME), PRODUCT_NAME)
# install subskins
install_subskin(self, out, GLOBALS)
# install workflows
setupWorkflow(self, out)
# install tools
# register tool to Plone's preferences panel
# enable portal_factory for given types
factory_tool = getToolByName(self, 'portal_factory')
factory_types=[
ECRB_NAME,
] + factory_tool.getFactoryTypes().keys()
factory_tool.manage_setPortalFactoryTypes(listOfTypeIds=factory_types)
print >> out, "Successfully installed %s." % PRODUCT_NAME
return out.getvalue()
def uninstall(self, reinstall):
"""
Uninstalls the product.
"""
out = StringIO()
print >> out, "Successfully uninstalled %s." % PRODUCT_NAME
return out.getvalue()
|
rbramwell/pulp
|
nodes/extensions/admin/pulp_node/extensions/admin/sync_schedules.py
|
Python
|
gpl-2.0
| 4,455
| 0.001571
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from gettext import gettext as _
from pulp.client.commands.schedule import (
DeleteScheduleCommand, ListScheduleCommand, CreateScheduleCommand,
UpdateScheduleCommand, NextRunCommand, ScheduleStrategy)
from pulp_node import constants
from pulp_node.extensions.admin.options import NODE_ID_OPTION, MAX_BANDWIDTH_OPTION, MAX_CONCURRENCY_OPTION
# -- constants ----------------------------------------------------------------
DESC_LIST = _('list scheduled sync operations')
DESC_CREATE = _('adds a new scheduled sync operation')
DESC_DELETE = _('delete a sync schedule')
DESC_UPDATE = _('updates an existing schedule')
DESC_NEXT_RUN = _('displays the next scheduled sync run for a child node')
# A node sync is considered an update operation on the REST API
SYNC_OPERATION = 'update'
# -- commands -----------------------------------------------------------------
class NodeListScheduleCommand(ListScheduleCommand):
def __init__(self, context):
strategy = NodeSyncScheduleStrategy(context)
super(self.__class__, self).__init__(context, strategy, description=DESC_LIST)
self.add_option(NODE_ID_OPTION)
class NodeCreateScheduleCommand(CreateScheduleCommand):
def __init__(self, context):
strategy = NodeSyncScheduleStrategy(context)
super(self.__class__, self).__init__(context, strategy, description=DESC_CREATE)
self.add_option(NODE_ID_OPTION)
self.add_option(MAX_BANDWIDTH_OPTION)
self.add_option(MAX_CONCURRENCY_OPTION)
class NodeDeleteScheduleCommand(DeleteScheduleCommand):
def __init__(self, context):
strategy = NodeSyncScheduleStrategy(context)
super(self.__class__, self).__init__(context, strategy, description=DESC_DELETE)
self.add_option(NODE_ID_OPTION)
class NodeUpdateScheduleCommand(UpdateScheduleCommand):
def __init__(self, context):
strategy = NodeSyncScheduleStrategy(context)
super(self.__class__, self).__init__(context, strategy, description=DESC_UPDATE)
self.add_option(NODE_ID_OPTION)
class NodeNextRunCommand(NextRunCommand):
def __init__(self, context):
strategy = NodeSyncScheduleStrategy(context)
super(self.__class__, self).__init__(context, strategy, description=DESC_NEXT_RUN)
self.add_option(NODE_ID_OPTION)
# -- framework classes --------------------------------------------------------
class NodeSyncScheduleStrategy(ScheduleStrategy):
# See super class for method documentation
def __init__(self, context):
super(self.__class__, self).__init__()
self.context = context
self.api = context.server.consu
|
mer_content_schedules
def create_schedule(self, schedule, failure_threshold, enabled, kwargs):
node_id = kwargs[NODE_ID_OPTION.keyword]
max_bandwidth = kwargs[MAX_BANDWIDTH_OPTION.keyword]
max_concurrency = kwargs[MAX_CONCURRENCY_OPTION.keyword]
|
units = [dict(type_id='node', unit_key=None)]
options = {
constants.MAX_DOWNLOAD_BANDWIDTH_KEYWORD: max_bandwidth,
constants.MAX_DOWNLOAD_CONCURRENCY_KEYWORD: max_concurrency,
}
return self.api.add_schedule(
SYNC_OPERATION,
node_id,
schedule,
units,
failure_threshold,
enabled,
options)
def delete_schedule(self, schedule_id, kwargs):
node_id = kwargs[NODE_ID_OPTION.keyword]
return self.api.delete_schedule(SYNC_OPERATION, node_id, schedule_id)
def retrieve_schedules(self, kwargs):
node_id = kwargs[NODE_ID_OPTION.keyword]
return self.api.list_schedules(SYNC_OPERATION, node_id)
def update_schedule(self, schedule_id, **kwargs):
node_id = kwargs.pop(NODE_ID_OPTION.keyword)
return self.api.update_schedule(SYNC_OPERATION, node_id, schedule_id, **kwargs)
|
kikimaroca/beamtools
|
build/lib/beamtools/pulse.py
|
Python
|
mit
| 9,048
| 0.021994
|
'''
Pulse characterization
Created Fri May 12 2017
@author: cpkmanchee
'''
import numpy as np
import os.path
import inspect
from beamtools.constants import h,c,pi
from beamtools.common import normalize, gaussian, sech2, alias_dict
from beamtools.import_data_file import import_data_file as _import
from beamtools.import_data_file import objdict
from scipy.optimize import curve_fit
__all__ = ['spectrumFT', 'fit_ac', 'ac_x2t', 'sigma_fwhm']
class FitResult():
def __init__(self, ffunc, ftype, popt, pcov=0, indep_var='time'):
self.ffunc = ffunc
self.ftype = ftype
self.popt = popt
self.pcov = pcov
self.iv=indep_var
def subs(self,x):
return self.ffunc(x,*self.popt)
def get_args(self):
return inspect.getargspec(self.ffunc)
def spectrumFT(data,from_file = False, file_type='oo_spec', units_wl='nm', n_interp=0):
'''Compute transform limited pulse from spectrum.
data = wavelength vs. PSD (intensity) if from_file=False
= filename of spectrum file to be impor
|
ted if from_file=True
Units assumed to be nm for wavelength.
If from_file is set True, data should be filename
Optional file_format, default is oceanoptics_spectrometer. Currently
can not change this (filetype handling for x/y).
n_interp = bit depth of frequency inter
|
polation, n = 2**n_interp. 0 = auto
'''
if from_file:
if type(data) is str:
if not os.path.exists(data):
print('File does not exist')
return -1
imported_data = _import(data,file_type)
#insert testing for wavelength/intensity location in dataobject
wavelength = imported_data.wavelength
intensity = imported_data.intensity
#get units from dataobject
else:
print('invalid filetype')
return -1
else:
wavelength = data[0]
intensity = data[1]
imported_data = data
if n_interp == 0:
#insert here later - round up to nearest power of two.
n = 2**12
else:
n = 2**12
#use units to convert wavelength to SI
wl = wavelength*1E-9
psd = normalize(intensity)
nu = c/wl #nu is SI
#interpolate psd, linear freq spacing
nui = np.linspace(min(nu),max(nu),n)
df = (max(nu)-min(nu))/(n-1)
psdi = normalize(np.interp(nui,np.flipud(nu),np.flipud(psd)))
#i = (np.abs(nui-nu0)).argmin() #centre freq index
#perform FT-1, remove centre spike
t = np.fft.ifftshift(np.fft.fftfreq(n,df)[1:-1])
ac =np.fft.ifftshift((np.fft.ifft(np.fft.ifftshift(psdi)))[1:-1])
output_dict = {'time': t, 'ac': ac, 'nu': nui, 'psd': psdi}
output = objdict(output_dict)
return output, imported_data
def ac_x2t(position,aoi=15,config='sym'):
'''Convert autocorrelation position to time
Symmetric - stage moves perp to normal.
Asymmetric - stage moves along incoming optical axis
'''
if type(config) is not str:
print('Unrecognized configuration. Must be symmetric or asymmetric.')
return position
if config.lower() in alias_dict['symmetric']:
time = (1/c)*position*2*np.cos(aoi*pi/180)
elif config.lower() in alias_dict['asymmetric']:
time = (1/c)*position*(1+np.cos(2*aoi*pi/180))
else:
print('Unrecognized configuration. Must be symmetric or asymmetric.')
return position
return time
def fit_ac(data, from_file = False, file_type='bt_ac', form='all', bgform = 'constant'):
'''Fit autocorrelation peak.
data must be either:
1. 2 x n array - data[0] = time(delay), data[1] = intensity
2. datafile name --> from_file must be True
If there is no 'delay' parameter in data file (only position), the position is
auto converted to time delay.
'''
if from_file:
if type(data) is str:
if not os.path.exists(data):
print('File does not exist')
return -1
imported_data = _import(data,file_type)
#insert testing for power location in dataobject
position = imported_data.position
intensity = imported_data.power
if 'delay' in imported_data.__dict__:
delay = imported_data.delay
else:
delay = ac_x2t(position,aoi=15,config='sym')
#get units from dataobject
else:
print('invalid filetype')
return -1
else:
imported_data = data
delay = data[0]
intensity = data[1]
x = delay
y = intensity
bgpar, bgform = _background(x,y,form = bgform)
mean = np.average(x,weights = y)
stdv = np.sqrt(np.average((x-mean)**2 ,weights = y))
#set fitting function (including background)
if bgform is None:
def fitfuncGaus(x,sigma,a,x0):
return gaussian(x,sigma,a,x0)
def fitfuncSech2(x,sigma,a,x0):
return sech2(x,sigma,a,x0)
if bgform.lower() in alias_dict['constant']:
def fitfuncGaus(x,sigma,a,x0,p0):
return gaussian(x,sigma,a,x0) + p0
def fitfuncSech2(x,sigma,a,x0,p0):
return sech2(x,sigma,a,x0) + p0
elif bgform.lower() in alias_dict['linear']:
def fitfuncGaus(x,sigma,a,x0,p0,p1):
return gaussian(x,sigma,a,x0) + p1*x + p0
def fitfuncSech2(x,sigma,a,x0,p0,p1):
return sech2(x,sigma,a,x0) + p1*x + p0
elif bgform.lower() in alias_dict['quadratic']:
def fitfuncGaus(x,sigma,a,x0,p0,p1,p2):
return gaussian(x,sigma,a,x0) + p2*x**2 + p1*x + p0
def fitfuncSech2(x,sigma,a,x0,p0,p1,p2):
return sech2(x,sigma,a,x0) + p2*x**2 + p1*x + p0
else:
def fitfuncGaus(x,sigma,a,x0):
return gaussian(x,sigma,a,x0)
def fitfuncSech2(x,sigma,a,x0):
return sech2(x,sigma,a,x0)
nFitArgs = len(inspect.getargspec(fitfuncGaus).args) - 1
#sets which functions are to be fit... this can be streamlined i think
if form.lower() in ['both', 'all']:
fitGaus = True
fitSech2 = True
elif form.lower() in alias_dict['gaus']:
fitGaus = True
fitSech2 = False
elif form.lower() in alias_dict['sech2']:
fitGaus = False
fitSech2 = True
else:
print('Unknown fit form: '+form[0])
fitGaus = False
fitSech2 = False
#start fitting
popt=[]
pcov=[]
fit_results=[]
if type(bgpar) is np.float64:
p0=[stdv,max(y)-min(y),mean,bgpar]
elif type(bgpar) is np.ndarray:
p0=[stdv,max(y)-min(y),mean]+bgpar.tolist()
else:
p0=None
if fitGaus:
try:
poptGaus,pcovGaus = curve_fit(fitfuncGaus,x,y,p0)
except RuntimeError:
poptGaus = np.zeros(nFitArgs)
pcovGaus = np.zeros((nFitArgs,nFitArgs))
popt.append(poptGaus)
pcov.append(pcovGaus)
fit_results.append(FitResult(ffunc=fitfuncGaus, ftype='gaussian',
popt=poptGaus, pcov=pcovGaus))
if fitSech2:
try:
poptSech2,pcovSech2 = curve_fit(fitfuncSech2,x,y,p0)
except RuntimeError:
poptSech2 = np.zeros(nFitArgs)
pcovSech2 = np.zeros((nFitArgs,nFitArgs))
popt.append(poptSech2)
pcov.append(pcovSech2)
fit_results.append(FitResult(ffunc=fitfuncSech2, ftype='sech2',
popt=poptSech2, pcov=pcovSech2))
return fit_results, imported_data
def sigma_fwhm(sigma, shape='gaus'):
'''Convert sigma to full-width half-max
'''
if shape.lower() in alias_dict['gaus']:
A = 2*np.sqrt(2*np.log(2))
elif shape.lower() in alias_dict['sech2']:
A = 2*np.arccosh(np.sqrt(2))
else:
A = 1
return A*sigma
def _background(x,y,form = 'constant'):
'''Provides starting values for background parameters.
Takes x,y data and the desired background form (default to constant)
returns p, the polynomial coefficients. p is variable
|
stewartsmith/bzr
|
bzrlib/plugins/weave_fmt/test_bzrdir.py
|
Python
|
gpl-2.0
| 26,897
| 0.005391
|
# Copyright (C) 2006-2011 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Tests for the weave-era BzrDir formats.
For interface contract tests, see tests/per_bzr_dir.
"""
from __future__ import absolute_import
import os
import sys
from bzrlib import (
branch,
bzrdir,
controldir,
errors,
repository,
upgrade,
urlutils,
workingtree,
)
from bzrlib.osutils import (
getcwd,
)
from bzrlib.tests.test_bundle import V4BundleTester
from bzrlib.tests.test_sftp_transport import TestCaseWithSFTPServer
from bzrlib.tests import (
TestCaseWithTransport,
)
from bzrlib.plugins.weave_fmt.branch import (
BzrBranchFormat4,
)
from bzrlib.plugins.weave_fmt.bzrdir import (
BzrDirFormat5,
BzrDirFormat6,
)
class TestFormat5(TestCaseWithTransport):
"""Tests specific to the version 5 bzrdir format."""
def test_same_lockfiles_between_tree_repo_branch(self):
# this checks that only a single lockfiles instance is created
# for format 5 objects
dir = BzrDirFormat5().initialize(self.get_url())
def check_dir_components_use_same_lock(dir):
ctrl_1 = dir.open_repository().control_files
ctrl_2 = dir.open_branch().control_files
ctrl_3 = dir.open_workingtree()._control_files
self.assertTrue(ctrl_1 is ctrl_2)
self.assertTrue(ctrl_2 is ctrl_3)
check_dir_components_use_same_lock(dir)
# and if we open it normally.
dir = controldir.ControlDir.open(self.get_url())
check_dir_components_use_same_lock(dir)
def test_can_convert(self):
# format 5 dirs are convertable
dir = BzrDirFormat5().initialize(self.get_url())
self.assertTrue(dir.can_convert_format())
def test_needs_conversion(self):
# format 5 dirs need a conversion if they are not the default,
# and they aren't
dir = BzrDirFormat5().initialize(self.get_url())
# don't need to convert it to itself
self.assertFalse(dir.needs_format_conversion(BzrDirFormat5()))
# do need to convert it to the current default
self.assertTrue(dir.needs_format_conversion(
bzrdir.BzrDirFormat.get_default_format()))
class TestFormat6(TestCaseWithTransport):
"""Tests specific to the version 6 bzrdir format."""
def test_same_lockfiles_between_tree_repo_branch(self):
# this checks that only a single lockfiles instance is created
# for format 6 objects
dir = BzrDirFormat6().initialize(self.get_url())
def check_dir_components_use_same_lock(dir):
ctrl_1 = dir.open_repository().control_files
ctrl_2 = dir.open_branch().control_files
ctrl_3 = dir.open_workingtree()._control_files
self.assertTrue(ctrl_1 is ctrl_2)
self.assertTrue(ctrl_2 is ctrl_3)
check_dir_components_use_same_lock(dir)
# and if we open it normally.
dir = controldir.ControlDir.open(self.get_url())
check_dir_components_use_same_lock(dir)
def test_can_convert(self):
# format 6 dirs are convertable
dir = BzrDirFormat6().initialize(self.get_url())
self.assertTrue(dir.can_convert_format())
def test_needs_conversion(self):
# format 6 dirs need an conversion if they are not the default.
dir = BzrDirFormat6().initialize(self.get_url())
self.assertTrue(dir.needs_format_conversion(
bzrdir.BzrDirFormat.get_default_format()))
class TestBreakLockOldBranch(TestCaseWithTransport):
def test_break_lock_format_5_bzrdir(self):
# break lock on a format 5 bzrdir should just return
self.make_branch_and_tree('foo', format=BzrDirFormat5())
out, err = self.run_bzr('break-lock foo')
self.assertEqual('', out)
self.assertEqual('', err)
_upgrade1_template = \
[
('foo', 'new contents\n'),
('.bzr/',),
('.bzr/README',
'This is a Bazaar control directory.\n'
'Do not change any files in this directory.\n'
'See http://bazaar.canonical.com/ for more information about Bazaar.\n'),
('.bzr/branch-format', 'Bazaar-NG branch, format 0.0.4\n'),
('.bzr/revision-history',
'mbp@sourcefrog.net-20051004035611-176b16534b086b3c\n'
'mbp@sourcefrog.net-20051004035756-235f2b7dcdddd8dd\n'),
('.bzr/merged-patches', ''),
('.bzr/pending-merged-patches', ''),
('.bzr/branch-name', ''),
('.bzr/branch-lock', ''),
('.bzr/pending-merges', ''),
('.bzr/inventory',
'<inventory>\n'
'<entry file_id="foo-20051004035605-91e788d1875603ae" kind="file" name="foo" />\n'
'</inventory>\n'),
('.bzr/stat-cache',
'### bzr hashcache v5\n'
'foo// be9f309239729f69a6309e970ef24941d31e042c 13 1128398176 1128398176 303464 770\n'),
('.bzr/text-store/',),
('.bzr/text-store/foo-20051004035611-1591048e9dc7c2d4.gz',
'\x1f\x8b\x08\x00[\xfdAC\x02\xff\xcb\xcc\xcb,\xc9L\xccQH\xce\xcf+I\xcd+)\xe6\x02\x00\xdd\xcc\xf90\x11\x00\x00\x00'),
('.bzr/text-store/foo-20051004035756-4081373d897c3453.gz',
'\x1f\x8b\x08\x00\xc4\xfdAC\x02\xff\xcbK-WH\xce\xcf+I\xcd+)\xe6\x02\x00g\xc3\xdf\xc9\r\x00\x00\x00'),
('.bzr/inventory-store/',),
('.bzr/inventory-store/mbp@sourcefrog.net-20051004035611-176b16534b086b3c.gz',
'\x1f\x8b\x08\x00[\xfdAC\x02\xffm\x8f\xcd\n\xc20\x10\x84\xef>E\xc8\xbdt7?M\x02\xad\xaf"\xa1\x99`P[\xa8E\xacOo\x14\x05\x0f\xdef\xe1\xfbv\x98\xbeL7L\xeb\xbcl\xfb]_\xc3\xb2\x89\\\xce8\x944\xc8<\xcf\x8d"\xb2LdH\xdb\x8el\x13\x18\xce\xfb\xc4\xde\xd5SGHq*\xd3\x0b\xad\x8e\x14S\xbc\xe0\xadI\xb1\xe2\xbe\xfe}\xc2\xdc\xb0\rL\xc6#\xa4\xd1\x8d*\x99\x0f}=F\x1e$8G\x9d\xa0\x02\xa1rP9\x01c`FV\xda1qg\x98"\x02}\xa5\xf2\xa8\x95\xec\xa4h\xeb\x80\xf6g\xcd\x13\xb3\x01\xcc\x98\xda\x00\x00\x00'),
('.bzr/inventory-store/mbp@sourcefrog.net-20051004035756-235f2b7dcdddd8dd.gz',
'\x1f\x8b\x08\x00\xc4\xfdAC\x02\xffm\x8f\xc1\n\xc20\x10D\xef~E\xc8\xbd\xb8\x9bM\x9a,\xb4\xfe\x8a\xc4f\x83Am\xa1\x16\xb1~\xbdQ\x14<x\x9b\x81y3LW\xc6\x9b\x8c\xcb4\xaf\xbbMW\xc5\xbc\xaa\\\xce\xb2/\xa9\xd7y\x9a\x1a\x03\xe0\x10\xc0\x02\xb9\x16\\\xc3(>\x84\x84\xc1WKQ\xb4:\x95\xf1\x15\xad\x8cVc\xbc\xc8\x1b\xd3j\x91\xfb\xf2\xaf\xa4r\x8d\x85\x80\xe4)\x05\xf6\x03YG\x9f\xf4\xf5\x18\xb1\xd7\x07\xe1L\xc0\x86\xd8\x1b\xce-\xc7\xb6:a\x0f\x92\x8de\x8b\x89P\xc0\x9a\xe1\x0b\x95G\x9d\xc4\xda\xb1\xad\x07\xb6?o\x9e\xb5\xff\xf0\xf9\xda\x00\x00\x00'),
('.bzr/revision-store/',),
('.bzr/revision-store/mbp@sourcefrog.net-20051004035611-176b16534b086b3c.gz',
'\x1f\x8b\x08\x00[\xfdAC\x02\xff\x9d\x8eKj\xc30\x14E\xe7^\x85\xd0 \xb3$\xefI\xd1\x8f\xd8\xa6\x1b(t\x07E?\xbb\x82H\n\xb2\x1ahW\xdfB1\x14:\xeb\xf4r\xee\xbdgl\xf1\x91\xb6T\x0b\xf15\xe7\xd4{l\x13}\xb6\xad\xa7B^j\xbd\x91
|
\xc3\xad_\xb3\xbb?m\xf5\xbd\xf9\xb8\xb4\xba\x9eJ\xec\x87\xb5_)I\xe5\x11K\xaf\xed\xe35\x85\x89\xfe\xa5\x8e\x0c@ \xc0\x05\xb8\x90\x88GT\xd2\xa1\x14\xfc\xe2@K\xc7\xfd\xef\x85\xed\xcd\xe2D\x95\x8d\x1a\xa47<\x02c2\xb0 \xbc\xd0\x8ay\xa3\xbcp\x8a\x83\x1
|
2A3\xb7XJv\xef\x7f_\xf7\x94\xe3\xd6m\xbeO\x14\x91in4*<\x812\x88\xc60\xfc\x01>k\x89\x13\xe5\x12\x00\xe8<\x8c\xdf\x8d\xcd\xaeq\xb6!\x90\xa5\xd6\xf1\xbc\x07\xc3x\xde\x85\xe6\xe1\x0b\xc8\x8a\x98\x03T\x01\x00\x00'),
('.bzr/revision-store/mbp@sourcefrog.net-20051004035756-235f2b7dcdddd8dd.gz',
'\x1f\x8b\x08\x00\xc4\xfdAC\x02\xff\x9d\x90Kj\x031\x0c\x86\xf79\xc5\xe0Ev\xe9\xc8o\x9b\xcc\x84^\xa0\xd0\x1b\x14\xbf&5d\xec`\xbb\x81\xf6\xf45\x84\xa4\x81\xaeZ\
|
thinkopensolutions/server-tools
|
sentry/__init__.py
|
Python
|
agpl-3.0
| 2,417
| 0
|
# -*- coding: utf-8 -*-
# Copyright 2016-2017 Versada <https://versada.eu/>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import logging
from odoo.service import wsgi_server
from odoo.tools import config as odoo_config
from . import const
from .logutils import LoggerNameFilter, OdooSentryHandler
_logger = logging.getLogger(__name__)
HAS_RAVEN = True
try:
import raven
from raven.middleware import Sentry
except ImportError:
HAS_RAVEN = False
_logger.debug('Cannot import "raven". Please make sure it is installed.')
def get_odoo_commit(odoo_dir):
'''Attempts to get Odoo git commit from :param:`odoo_dir`.'''
if not odoo_dir:
return
try:
return raven.fetch_git_sha(odoo_dir)
except raven.exceptions.InvalidGitRepository:
_logger.debug(
u'Odoo directory: "%s" not a valid git repository', odoo_dir)
def initialize_raven(config, client_cls=None):
'''
Setup an instance of :class:`raven.Client`.
:param config: Sentry configuration
:param client: class used to instantiate the raven client.
'''
enabled = config.get('sentry_enabled', False)
if not (HAS_RAVEN and enabled):
return
options = {
'release': get_odoo_commit(config.get('sentry_odoo_dir')),
}
for option in const.get_sentry_options():
value = config.get('sentry_%s' % option.key, option.default)
if callable(option.converter):
valu
|
e = option.converter(value)
options[option.key] = value
level = config.get('sentry_logging_level', const.DEFAULT_LOG_LEVEL)
exclude_loggers = const.split_multiple(
config.get('sentry_exclude_loggers', const.DEFAULT_EXCLUDE_LOGGERS)
)
if level not in const.LOG_LEVEL_MAP:
|
level = const.DEFAULT_LOG_LEVEL
client_cls = client_cls or raven.Client
client = client_cls(**options)
handler = OdooSentryHandler(
config.get('sentry_include_context', True),
client=client,
level=const.LOG_LEVEL_MAP[level],
)
if exclude_loggers:
handler.addFilter(LoggerNameFilter(
exclude_loggers, name='sentry.logger.filter'))
raven.conf.setup_logging(handler)
wsgi_server.application = Sentry(
wsgi_server.application, client=client)
client.captureMessage('Starting Odoo Server')
return client
sentry_client = initialize_raven(odoo_config)
|
kaizentech/skeleton
|
config/settings/configurations/ENV.py
|
Python
|
apache-2.0
| 103
| 0
|
""" environ se
|
ttings """
import environ
BASE_DIR = environ.Path(__file__) - 4
ENV_VAR = environ.Env()
| |
oxc/Flexget
|
flexget/plugins/modify/list_clear.py
|
Python
|
mit
| 2,149
| 0.003723
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
import logging
from flexget import plugin
from flexget.event import event
from flexget.plugin import PluginError
log = logging.getLogger('list_clear')
class ListClear(object):
schema = {
'type': 'object',
'properties': {
'what': {'type': 'array', 'items':
{'allOf': [
{'$ref': '/schema/plugins?group=list'},
{'maxProperties': 1,
'error_maxProperties': 'Plugin options within list_clear plugin must be indented '
'2 more spaces than the first letter of the plugin name.',
'minProperties': 1}]}},
'phase': {'type': 'string', 'enum': plugin.task_phases, 'default': 'start'}
},
'required': ['what']
}
def __getattr__(self, phase):
# enable plugin in regular task phases
if phase.replace('on_task_', '') in plugin.task_phases:
return self.clear
@plugin.priority(255)
def clear(self, task, config):
for item in config['what']:
for plugin_name, plugin_config in item.items():
try:
thelist = plugin.get_plugin_by_name(plugin_name).instance.get_list(plugin_config)
except AttributeError:
raise PluginError('Plugin %s does not support list interface' % plugin_name)
if thelist.immutable:
raise plugin.PluginError(thelist.immutable)
if
|
config['phase'] == task.current_phase:
if task.manager.options.test and thelist.online:
log.info('would have cleared all items from %s - %s', plu
|
gin_name, plugin_config)
continue
log.verbose('clearing all items from %s - %s', plugin_name, plugin_config)
thelist.clear()
@event('plugin.register')
def register_plugin():
plugin.register(ListClear, 'list_clear', api_ver=2)
|
kaushik94/sympy
|
sympy/ntheory/tests/test_generate.py
|
Python
|
bsd-3-clause
| 6,809
| 0.001028
|
from sympy import Sieve, sieve, Symbol, S, limit, I, zoo, nan, Rational
from sympy.core.compatibility import range
from sympy.ntheory import isprime, totient, mobius, randprime, nextprime, prevprime, \
primerange, primepi, prime, primorial, composite, compositepi, reduced_totient
from sympy.ntheory.generate import cycle_length
from sympy.ntheory.primetest import mr
from sympy.utilities.pytest import raises
def test_prime():
assert prime(1) == 2
assert prime(2) == 3
assert prime(5) == 11
assert prime(11) == 31
assert prime(57) == 269
assert prime(296) == 1949
assert prime(559) == 4051
assert prime(3000) == 27449
assert prime(4096) == 38873
assert prime(9096) == 94321
assert prime(25023) == 287341
raises(ValueError, lambda: prime(0))
sieve.extend(3000)
assert prime(401) == 2749
def test_primepi():
assert primepi(-1) == 0
assert primepi(1) == 0
assert primepi(2) == 1
assert primepi(Rational(7, 2)) == 2
assert primepi(3.5) == 2
assert primepi(5) == 3
assert primepi(11) == 5
assert primepi(57) == 16
assert primepi(296) == 62
assert primepi(559) == 102
assert primepi(3000) == 430
assert primepi(4096) == 564
assert primepi(9096) == 1128
assert primepi(25023) == 2763
assert primepi(10**8) == 5761455
assert primepi(253425253) == 13856396
assert primepi(8769575643) == 401464322
sieve.extend(3000)
assert primepi(2000) == 303
n = Symbol('n')
assert primepi(n).subs(n, 2) == 1
r = Symbol('r', real=True)
assert primepi(r).subs(r, 2) == 1
assert primepi(S.Infinity) is S.Infinity
assert primepi(S.NegativeInfinity) == 0
assert limit(primepi(n), n, 100) == 25
raises(ValueError, lambda: primepi(I))
raises(ValueError, lambda: primepi(1 + I))
raises(ValueError, lambda: primepi(zoo))
raises(ValueError, lambda: primepi(nan))
def test_composite():
from sympy.ntheory.generate import sieve
sieve._reset()
assert composite(1) == 4
assert composite(2) == 6
assert composite(5) == 10
assert composite(11) == 20
assert composite(41) == 58
assert composite(57) == 80
assert composite(296) == 370
assert composite(559) == 684
assert composite(3000) == 3488
assert composite(4096) == 4736
assert composite(9096) == 10368
assert composite(25023) == 28088
sieve.extend(3000)
assert composite(1957) == 2300
assert composite(2568) == 2998
raises(ValueError, lambda: composite(0))
def test_compositepi():
assert compositepi(1) == 0
assert compositepi(2) == 0
assert compositepi(5) == 1
assert compositepi(11) == 5
assert compositepi(57) == 40
assert compositepi(296) == 233
assert compositepi(559) == 456
assert compositepi(3000) == 2569
assert compositepi(4096) == 3531
assert compositepi(9096) == 7967
assert compositepi(25023) == 22259
assert compositepi(10**8) == 94238544
assert compositepi(253425253) == 239568856
assert compositepi(8769575643) == 8368111320
sieve.extend(3000)
assert compositepi(2321) == 1976
def test_generate():
from sympy.ntheory.generate import sieve
sieve._reset()
assert nextprime(-4) == 2
assert nextprime(2) == 3
assert nextprime(5) == 7
assert nextprime(12) == 13
assert prevprime(3) == 2
assert prevprime(7) == 5
assert prevprime(13) == 11
assert prevprime(19) == 17
assert prevprime(20) == 19
sieve.extend_to_no(9)
assert sieve._list[-1] == 23
assert sieve._list[-1] < 31
assert 31 in sieve
assert nextprime(90) == 97
assert nextprime(10**40) == (10**40 + 121)
assert prevprime(97) == 89
assert prevprime(10**40) == (10**40 - 17)
assert list(sieve.primerange(10, 1)) == []
assert list(sieve.primerange(5, 9)) == [5, 7]
sieve._reset(prime=True)
assert list(sieve.primerange(2, 12)) == [2, 3, 5, 7, 11]
assert list(sieve.totientrange(5, 15)) == [4, 2, 6, 4, 6, 4, 10, 4, 12, 6]
sieve._reset(totient=True)
assert list(sieve.totientrange(3, 13)) == [2, 2, 4, 2, 6, 4, 6, 4, 10, 4]
assert list(sieve.totientrange(900, 1000)) == [totient(x) for x in range(900, 1000)
|
]
assert list(sieve.totientrange(0, 1)) == []
assert list(sieve.totientrange(1, 2)) == [1]
assert list(sieve.mobiusrange(5, 15)) == [-1, 1, -1, 0, 0, 1, -1, 0, -1, 1]
|
sieve._reset(mobius=True)
assert list(sieve.mobiusrange(3, 13)) == [-1, 0, -1, 1, -1, 0, 0, 1, -1, 0]
assert list(sieve.mobiusrange(1050, 1100)) == [mobius(x) for x in range(1050, 1100)]
assert list(sieve.mobiusrange(0, 1)) == []
assert list(sieve.mobiusrange(1, 2)) == [1]
assert list(primerange(10, 1)) == []
assert list(primerange(2, 7)) == [2, 3, 5]
assert list(primerange(2, 10)) == [2, 3, 5, 7]
assert list(primerange(1050, 1100)) == [1051, 1061,
1063, 1069, 1087, 1091, 1093, 1097]
s = Sieve()
for i in range(30, 2350, 376):
for j in range(2, 5096, 1139):
A = list(s.primerange(i, i + j))
B = list(primerange(i, i + j))
assert A == B
s = Sieve()
assert s[10] == 29
assert nextprime(2, 2) == 5
raises(ValueError, lambda: totient(0))
raises(ValueError, lambda: reduced_totient(0))
raises(ValueError, lambda: primorial(0))
assert mr(1, [2]) is False
func = lambda i: (i**2 + 1) % 51
assert next(cycle_length(func, 4)) == (6, 2)
assert list(cycle_length(func, 4, values=True)) == \
[17, 35, 2, 5, 26, 14, 44, 50, 2, 5, 26, 14]
assert next(cycle_length(func, 4, nmax=5)) == (5, None)
assert list(cycle_length(func, 4, nmax=5, values=True)) == \
[17, 35, 2, 5, 26]
sieve.extend(3000)
assert nextprime(2968) == 2969
assert prevprime(2930) == 2927
raises(ValueError, lambda: prevprime(1))
def test_randprime():
assert randprime(10, 1) is None
assert randprime(2, 3) == 2
assert randprime(1, 3) == 2
assert randprime(3, 5) == 3
raises(ValueError, lambda: randprime(20, 22))
for a in [100, 300, 500, 250000]:
for b in [100, 300, 500, 250000]:
p = randprime(a, a + b)
assert a <= p < (a + b) and isprime(p)
def test_primorial():
assert primorial(1) == 2
assert primorial(1, nth=0) == 1
assert primorial(2) == 6
assert primorial(2, nth=0) == 2
assert primorial(4, nth=0) == 6
def test_search():
assert 2 in sieve
assert 2.1 not in sieve
assert 1 not in sieve
assert 2**1000 not in sieve
raises(ValueError, lambda: sieve.search(1))
def test_sieve_slice():
assert sieve[5] == 11
assert list(sieve[5:10]) == [sieve[x] for x in range(5, 10)]
assert list(sieve[5:10:2]) == [sieve[x] for x in range(5, 10, 2)]
|
UmSenhorQualquer/pyStateMachine
|
pyStateMachine/States/__init__.py
|
Python
|
mit
| 273
| 0.025641
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__
|
= "Ricardo Ribeiro"
__credits__ = ["Ricardo Ribeiro"]
__license__ = "MIT"
__version__ = "1.0"
__maintainer__ = "Ricardo Ribeiro"
__email__ = "ri
|
cardojvr@gmail.com"
__status__ = "Production"
|
OpenAgInitiative/gro-api
|
gro_api/sensors/urls.py
|
Python
|
gpl-2.0
| 344
| 0.002907
|
from .views import (
SensorTypeVie
|
wSet, SensorViewSet, SensingPointViewSet, DataPointViewSet
)
def contribute_to_router(router):
router.register(r'sensorType', SensorTypeViewSet)
router.register(r'sensor', SensorViewSet)
router.register(r'sensingPoint', SensingPointViewSet)
|
router.register(r'dataPoint', DataPointViewSet)
|
michaelsevilla/teuthology
|
teuthology/packaging.py
|
Python
|
mit
| 22,261
| 0.000225
|
import logging
import ast
import re
import requests
from cStringIO import StringIO
from .config import config
log = logging.getLogger(__name__)
'''
Map 'generic' package name to 'flavor-specific' package name.
If entry is None, either the package isn't known here, or
it's known but should not be installed on remotes of this flavor
'''
_PACKAGE_MAP = {
'sqlite': {'deb': 'sqlite3', 'rpm': None}
}
'''
Map 'generic' service name to 'flavor-specific' service name.
'''
_SERVICE_MAP = {
'httpd': {'deb': 'apache2', 'rpm': 'httpd'}
}
DISTRO_CODENAME_MAP = {
"ubuntu": {
"14.04": "trusty",
"12.04": "precise",
"15.04": "vivid",
},
"debian": {
"7": "wheezy",
},
}
DEFAULT_OS_VERSION = dict(
ubuntu="14.04",
fedora="20",
centos="7.0",
opensuse="12.2",
sles="11-sp2",
rhel="7.0",
debian='7.0'
)
def get_package_name(pkg, rem):
"""
Find the remote-specific name of the generic 'pkg'
"""
flavor = rem.os.package_type
try:
return _PACKAGE_MAP[pkg][flavor]
except KeyError:
return None
def get_service_name(service, rem):
"""
Find the remote-specific name of the generic 'service'
"""
flavor = rem.os.package_type
try:
return _SERVICE_MAP[service][flavor]
except KeyError:
return None
def install_package(package, remote):
"""
Install 'package' on 'remote'
Assumes repo has already been set up (perhaps with install_repo)
"""
log.info('Installing package %s on %s', package, remote)
flavor = remote.os.package_type
if flavor == 'deb':
pkgcmd = ['DEBIAN_FRONTEND=noninteractive',
'sudo',
'-E',
'apt-get',
'-y',
'install',
'{package}'.format(package=package)]
elif flavor == 'rpm':
pkgcmd = ['sudo',
'yum',
'-y',
'install',
'{package}'.format(package=package)]
else:
log.error('install_package: bad flavor ' + flavor + '\n')
return False
return remote.run(args=pkgcmd)
def remove_package(package, remote):
"""
Remove package from remote
"""
flavor = remote.os.package_type
if flavor == 'deb':
pkgcmd = ['DEBIAN_FRONTEND=noninteractive',
'sudo',
'-E',
'apt-get',
'-y',
|
'purge',
'{package}'.format(package=package)]
elif flavor == 'rpm':
pkgcmd = ['sudo',
'yum',
|
'-y',
'erase',
'{package}'.format(package=package)]
else:
log.error('remove_package: bad flavor ' + flavor + '\n')
return False
return remote.run(args=pkgcmd)
def get_koji_task_result(task_id, remote, ctx):
"""
Queries kojihub and retrieves information about
the given task_id. The package, koji, must be installed
on the remote for this command to work.
We need a remote here because koji can only be installed
on rpm based machines and teuthology runs on Ubuntu.
The results of the given task are returned. For example:
{
'brootid': 3303567,
'srpms': [],
'rpms': [
'tasks/6745/9666745/kernel-4.1.0-0.rc2.git2.1.fc23.x86_64.rpm',
'tasks/6745/9666745/kernel-modules-4.1.0-0.rc2.git2.1.fc23.x86_64.rpm',
],
'logs': []
}
:param task_id: The koji task_id we want to retrieve results for.
:param remote: The remote to run the koji command on.
:param ctx: The ctx from the current run, used to provide a
failure_reason and status if the koji command fails.
:returns: A python dict containing info about the task results.
"""
py_cmd = ('import koji; '
'hub = koji.ClientSession("{kojihub_url}"); '
'print hub.getTaskResult({task_id})')
py_cmd = py_cmd.format(
task_id=task_id,
kojihub_url=config.kojihub_url
)
log.info("Querying kojihub for the result of task {0}".format(task_id))
task_result = _run_python_command(py_cmd, remote, ctx)
return task_result
def get_koji_task_rpm_info(package, task_rpms):
"""
Extracts information about a given package from the provided
rpm results of a koji task.
For example, if trying to retrieve the package 'kernel' from
the results of a task, the output would look like this:
{
'base_url': 'https://kojipkgs.fedoraproject.org/work/tasks/6745/9666745/',
'rpm_name': 'kernel-4.1.0-0.rc2.git2.1.fc23.x86_64.rpm',
'package_name': 'kernel',
'version': '4.1.0-0.rc2.git2.1.fc23.x86_64',
}
:param task_rpms: A list of rpms from a tasks reusults.
:param package: The name of the package to retrieve.
:returns: A python dict containing info about the package.
"""
result = dict()
result['package_name'] = package
found_pkg = _find_koji_task_result(package, task_rpms)
if not found_pkg:
raise RuntimeError("The package {pkg} was not found in: {rpms}".format(
pkg=package,
rpms=task_rpms,
))
path, rpm_name = found_pkg.rsplit("/", 1)
result['rpm_name'] = rpm_name
result['base_url'] = "{koji_task_url}/{path}/".format(
koji_task_url=config.koji_task_url,
path=path,
)
# removes the package name from the beginning of rpm_name
version = rpm_name.split("{0}-".format(package), 1)[1]
# removes .rpm from the rpm_name
version = version.split(".rpm")[0]
result['version'] = version
return result
def _find_koji_task_result(package, rpm_list):
"""
Looks in the list of rpms from koji task results to see if
the package we are looking for is present.
Returns the full list item, including the path, if found.
If not found, returns None.
"""
for rpm in rpm_list:
if package == _get_koji_task_result_package_name(rpm):
return rpm
return None
def _get_koji_task_result_package_name(path):
"""
Strips the package name from a koji rpm result.
This makes the assumption that rpm names are in the following
format: <package_name>-<version>.<release>.<arch>.rpm
For example, given a koji rpm result might look like:
tasks/6745/9666745/kernel-4.1.0-0.rc2.git2.1.fc23.x86_64.rpm
This method would return "kernel".
"""
filename = path.split('/')[-1]
trimmed = []
for part in filename.split('-'):
# assumes that when the next part is not a digit
# we're past the name and at the version
if part[0].isdigit():
return '-'.join(trimmed)
trimmed.append(part)
return '-'.join(trimmed)
def get_koji_build_info(build_id, remote, ctx):
"""
Queries kojihub and retrieves information about
the given build_id. The package, koji, must be installed
on the remote for this command to work.
We need a remote here because koji can only be installed
on rpm based machines and teuthology runs on Ubuntu.
Here is an example of the build info returned:
{'owner_name': 'kdreyer', 'package_name': 'ceph',
'task_id': 8534149, 'completion_ts': 1421278726.1171,
'creation_event_id': 10486804, 'creation_time': '2015-01-14 18:15:17.003134',
'epoch': None, 'nvr': 'ceph-0.80.5-4.el7ost', 'name': 'ceph',
'completion_time': '2015-01-14 18:38:46.1171', 'state': 1, 'version': '0.80.5',
'volume_name': 'DEFAULT', 'release': '4.el7ost', 'creation_ts': 1421277317.00313,
'package_id': 34590, 'id': 412677, 'volume_id': 0, 'owner_id': 2826
}
:param build_id: The koji build_id we want to retrieve info on.
:param remote: The remote to run the koji command on.
:param ctx: The ctx from the current run, used to provide a
failure_reason and status if the koji command fails.
:returns: A python dict containing info about the build.
"""
py_cmd = ('import koji; '
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.