text stringlengths 4 1.02M | meta dict |
|---|---|
import os
from mock import MagicMock, patch
from .. import cfy
from .mocks import execution_mock
from .constants import BLUEPRINTS_DIR, DEFAULT_BLUEPRINT_FILE_NAME
from .test_base import CliCommandTest
from ...commands import executions
from cloudify_rest_client.exceptions import \
DeploymentEnvironmentCreationPendingError, \
DeploymentEnvironmentCreationInProgressError
class ExecutionsTest(CliCommandTest):
def setUp(self):
super(ExecutionsTest, self).setUp()
self.use_manager()
def test_executions_get(self):
execution = execution_mock('terminated')
self.client.executions.get = MagicMock(return_value=execution)
self.invoke('cfy executions get execution-id')
def test_executions_list(self):
self.client.executions.list = MagicMock(return_value=[])
self.invoke('cfy executions list -d deployment-id')
self.invoke('cfy executions list -t dummy_tenant')
def test_executions_cancel(self):
self.client.executions.cancel = MagicMock()
self.invoke('cfy executions cancel e_id')
@patch('cloudify_cli.commands.executions.get_events_logger')
def test_executions_start_json(self, get_events_logger_mock):
execution = execution_mock('started')
original_client_execution_start = self.client.executions.start
original_wait_for_executions = executions.wait_for_execution
try:
self.client.executions.start = MagicMock(return_value=execution)
executions.wait_for_execution = MagicMock(return_value=execution)
self.invoke('cfy executions start mock_wf -d dep --json-output')
get_events_logger_mock.assert_called_with(True)
finally:
self.client.executions.start = original_client_execution_start
executions.wait_for_execution = original_wait_for_executions
def test_executions_start_dep_env_pending(self):
self._test_executions_start_dep_env(
ex=DeploymentEnvironmentCreationPendingError('m'))
def test_executions_start_dep_env_in_progress(self):
self._test_executions_start_dep_env(
ex=DeploymentEnvironmentCreationInProgressError('m'))
def test_executions_start_dep_other_ex_sanity(self):
try:
self._test_executions_start_dep_env(ex=RuntimeError)
except cfy.ClickInvocationException as e:
self.assertEqual(str(RuntimeError), e.exception)
def _test_executions_start_dep_env(self, ex):
start_mock = MagicMock(side_effect=[ex, execution_mock('started')])
self.client.executions.start = start_mock
list_mock = MagicMock(return_value=[
execution_mock('terminated', 'create_deployment_environment')])
self.client.executions.list = list_mock
wait_for_mock = MagicMock(return_value=execution_mock('terminated'))
original_wait_for = executions.wait_for_execution
try:
executions.wait_for_execution = wait_for_mock
self.invoke('cfy executions start mock_wf -d dep')
self.assertEqual(wait_for_mock.mock_calls[0][1][1].workflow_id,
'create_deployment_environment')
self.assertEqual(wait_for_mock.mock_calls[1][1][1].workflow_id,
'mock_wf')
finally:
executions.wait_for_execution = original_wait_for
def test_local_execution_default_param(self):
self._init_local_env()
self._assert_outputs({'param': 'null'})
self.invoke('cfy executions start {0}'.format('run_test_op_on_nodes'))
self._assert_outputs({'param': 'default_param'})
def test_local_execution_custom_param_value(self):
self._init_local_env()
self.invoke('cfy executions start {0} -p param=custom_value'.format(
'run_test_op_on_nodes')
)
self._assert_outputs({'param': 'custom_value'})
def test_local_execution_allow_custom_params(self):
self._init_local_env()
self.invoke('cfy executions start {0} '
'-p custom_param=custom_value --allow-custom-parameters'
''.format('run_test_op_on_nodes')
)
self._assert_outputs(
{'param': 'default_param', 'custom_param': 'custom_value'}
)
def test_local_execution_dont_allow_custom_params(self):
self._init_local_env()
self.invoke(
'cfy executions start {0} -p custom_param=custom_value'.format(
'run_test_op_on_nodes'
),
err_str_segment='Workflow "run_test_op_on_nodes" does not '
'have the following parameters declared: '
'custom_param',
exception=ValueError
)
def _assert_outputs(self, expected_outputs):
output = self.invoke('cfy deployments outputs').logs.split('\n')
for key, value in expected_outputs.iteritems():
if value == 'null':
key_val_string = ' "{0}": {1}, '.format(key, value)
else:
key_val_string = ' "{0}": "{1}", '.format(key, value)
self.assertIn(key_val_string, output)
def _init_local_env(self):
blueprint_path = os.path.join(
BLUEPRINTS_DIR,
'local',
DEFAULT_BLUEPRINT_FILE_NAME
)
self.invoke('cfy init {0}'.format(blueprint_path))
cfy.register_commands()
| {
"content_hash": "25682063047d12a4b73459f7197b695e",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 78,
"avg_line_length": 40.681481481481484,
"alnum_prop": 0.6260014566642389,
"repo_name": "isaac-s/cloudify-cli",
"id": "aea409123a2bfa5af1224c531b1372b970ef301e",
"size": "5492",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudify_cli/tests/commands/test_executions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "6346"
},
{
"name": "Inno Setup",
"bytes": "4977"
},
{
"name": "Makefile",
"bytes": "4178"
},
{
"name": "PowerShell",
"bytes": "8677"
},
{
"name": "Python",
"bytes": "1991534"
},
{
"name": "Ruby",
"bytes": "29400"
},
{
"name": "Shell",
"bytes": "11075"
}
],
"symlink_target": ""
} |
from .prepare import make_train_test
import os
import tempfile
import scipy.io as sio
from hashlib import sha256
try:
import urllib.request as urllib_request # for Python 3
except ImportError:
import urllib2 as urllib_request # for Python 2
urls = {
"chembl-IC50-346targets.mm" :
(
"http://homes.esat.kuleuven.be/~jsimm/chembl-IC50-346targets.mm",
"10c3e1f989a7a415a585a175ed59eeaa33eff66272d47580374f26342cddaa88",
),
"chembl-IC50-compound-feat.mm" :
(
"http://homes.esat.kuleuven.be/~jsimm/chembl-IC50-compound-feat.mm",
"f9fe0d296272ef26872409be6991200dbf4884b0cf6c96af8892abfd2b55e3bc",
),
}
def load_one(filename):
(url, expected_sha) = urls[filename]
with tempfile.TemporaryDirectory() as tmpdirname:
output = os.path.join(tmpdirname, filename)
urllib_request.urlretrieve(url, output)
actual_sha = sha256(open(output, "rb").read()).hexdigest()
assert actual_sha == expected_sha
matrix = sio.mmread(output)
return matrix
def load_chembl():
"""Downloads a small subset of the ChEMBL dataset.
Returns
-------
ic50_train: sparse matrix
sparse train matrix
ic50_test: sparse matrix
sparse test matrix
feat: sparse matrix
sparse row features
"""
# load bioactivity and features
ic50 = load_one("chembl-IC50-346targets.mm")
feat = load_one("chembl-IC50-compound-feat.mm")
## creating train and test sets
ic50_train, ic50_test = make_train_test(ic50, 0.2)
return (ic50_train, ic50_test, feat)
| {
"content_hash": "16f744dfdbc4b00c8dad51d9d5712f2a",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 80,
"avg_line_length": 26.822580645161292,
"alnum_prop": 0.6458208057727,
"repo_name": "ExaScience/smurff",
"id": "75df3bdbb199a7bcc281a52dd191464560de1d6f",
"size": "1663",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/smurff/datasets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1229"
},
{
"name": "C",
"bytes": "65340"
},
{
"name": "C++",
"bytes": "1032297"
},
{
"name": "CMake",
"bytes": "42024"
},
{
"name": "Dockerfile",
"bytes": "3753"
},
{
"name": "Jupyter Notebook",
"bytes": "266445"
},
{
"name": "Makefile",
"bytes": "369"
},
{
"name": "Objective-C++",
"bytes": "214673"
},
{
"name": "Perl",
"bytes": "1745"
},
{
"name": "PowerShell",
"bytes": "2577"
},
{
"name": "Python",
"bytes": "149232"
},
{
"name": "Ruby",
"bytes": "675"
},
{
"name": "Shell",
"bytes": "5918"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(
setup_requires=['pbr>=1.9', 'setuptools>=17.1'],
pbr=True)
| {
"content_hash": "f06958ec28336512dbd24075c680b804",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 52,
"avg_line_length": 17.5,
"alnum_prop": 0.6476190476190476,
"repo_name": "stand1921/python-huaweivbsclient",
"id": "64cb1dae715bab99c354b93008b4acc794d36f3a",
"size": "105",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "12133"
}
],
"symlink_target": ""
} |
'''
cgat_check_deps.py - check whether the software dependencies are on your PATH
=============================================================================
Purpose
-------
.. The goal of this script is to provide a list of third-party command-line
programs used in a Python script given as input, and check whether
they are on your PATH. This is useful to identify dependencies across all
CGAT pipelines and module files.
This script takes the path to a Python script, which is expected to call
command-line programs like we do in CGAT pipelines, i.e.:
statement = """cmd-1 | cmd-2 | cmd-3""""
P.run()
Programs called other way (e.g. using subprocess) will not be picked up
by this script.
Usage
-----
.. python cgat_check_deps --pipeline </path/to/pipeline_name.py> [--print-summary]
Example::
python cgat_check_deps --pipeline CGATPipelines/pipeline_annotations.py
Type::
cgat cgat_check_deps --help
for command line help.
Command line options
--------------------
'''
import os
import sys
import re
import ast
import CGAT.Experiment as E
import CGAT.IOTools as IOTools
def checkDepedencies(pipeline):
# check existence of pipeline script
if not os.access(pipeline, os.R_OK):
raise IOError("Pipeline %s was not found\n" % pipeline)
if os.path.isdir(pipeline):
raise IOError("The given input is a folder, and must be a script\n")
# parse pipeline script
with open(pipeline) as f:
tree = ast.parse(f.read())
# list to store all statements = ''' <commands> '''
statements = []
# inspired by
# https://docs.python.org/3/library/ast.html#module-ast
# http://bit.ly/2rDf5xu
# http://bit.ly/2r0Uv9t
for node in ast.walk(tree):
if type(node) is ast.Assign and \
hasattr(node, 'targets') and \
hasattr(node.targets[0], 'id') and \
node.targets[0].id == "statement" and \
hasattr(node.value, 's'):
statement = node.value.s
# clean up statement, code copied from Execution module of Pipeline.py
statement = " ".join(re.sub("\t+", " ", statement).split("\n")).strip()
if statement.endswith(";"):
statement = statement[:-1]
statements.append(statement)
# dictionary where:
# key = program name
# value = number of times it has been called
deps = {}
# set of names that are not proper deps
exceptions = ['create',
'drop',
'select',
'attach',
'insert',
'module',
'checkpoint',
'for']
for statement in statements:
for command in statement.split("|"):
# take program name, thanks http://pythex.org/
groups = re.match("^\s*([\w|\-|\.]+)", command)
if groups is not None:
# program name is first match
prog_name = groups.group(0)
# clean up duplicated white spaces
prog_name = ' '.join(prog_name.split())
# filter exceptions
if prog_name.lower() not in exceptions:
if prog_name not in deps:
deps[prog_name] = 1
else:
deps[prog_name] += 1
# list of unmet dependencies
check_path_failures = []
# print dictionary ordered by value
for k in sorted(deps, key=deps.get, reverse=True):
if IOTools.which(k) is None:
check_path_failures.append(k)
return deps, check_path_failures
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
# setup command line parser
parser = E.OptionParser(version="%prog version: $Id$",
usage=globals()["__doc__"])
parser.add_option("-p", "--pipeline", dest="pipeline", type="string",
help="Path to pipeline script")
parser.add_option("-s", "--print-summary", dest="summary", action="store_true", default=False,
help="Print how many times a program is used [default=%default]")
# add common options (-h/--help, ...) and parse command line
(options, args) = E.Start(parser, argv=argv)
# get dependencies dependencies
deps, check_path_failures = checkDepedencies(options.pipeline)
# print info about dependencies
if len(deps) == 0:
print('\nNo dependencies found.\n')
else:
# print dictionary ordered by value
if options.summary:
for k in sorted(deps, key=deps.get, reverse=True):
print('\nProgram: {0!s} used {1} time(s)'.format(k, deps[k]))
n_failures = len(check_path_failures)
if n_failures == 0:
print('\nCongratulations! All required programs are available on your PATH\n')
else:
print('\nThe following programs are not on your PATH')
for p in check_path_failures:
print('\n{0!s}'.format(p))
print
# write footer and output benchmark information.
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv))
| {
"content_hash": "e59eab79becbc818c279eab367237a2f",
"timestamp": "",
"source": "github",
"line_count": 172,
"max_line_length": 98,
"avg_line_length": 30.651162790697676,
"alnum_prop": 0.575113808801214,
"repo_name": "AntonioJBT/CGATPipeline_core",
"id": "d578da0f96c9f5aa4b0438e559714cbe0a4c2e6f",
"size": "5272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/cgat_check_deps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7463"
},
{
"name": "JavaScript",
"bytes": "285056"
},
{
"name": "Python",
"bytes": "394936"
},
{
"name": "Shell",
"bytes": "37635"
}
],
"symlink_target": ""
} |
import errno
import os
from lxml import etree
def _indent(element, indent_char='\t', level=0):
indent_text = '\n' + level * indent_char
if len(element):
if not element.text or not element.text.strip():
element.text = indent_text + indent_char
if not element.tail or not element.tail.strip():
element.tail = indent_text
for element in element:
_indent(element, indent_char, level + 1)
if not element.tail or not element.tail.strip():
element.tail = indent_text
elif level and (not element.tail or not element.tail.strip()):
element.tail = indent_text
def builds_strings_tree(resources):
root = etree.Element('resources')
for string in resources.sorted_strings:
if not string.text:
continue
xml_string = etree.SubElement(root, 'string', name=string.name)
xml_string.text = string.text
for array in resources.sorted_arrays:
has_text = False
for item in array:
if item.text:
has_text = True
break
if not has_text:
continue
string_array = etree.SubElement(root, 'string-array', name=array.name)
for item in array:
etree.SubElement(string_array, 'item').text = item.text
for plural in resources.sorted_plurals:
has_text = False
for item in plural.sorted_items:
if item.text:
has_text = True
break
if not has_text:
continue
plurals = etree.SubElement(root, 'plurals', name=plural.name)
for item in plural.sorted_items:
xml_item = etree.SubElement(plurals, 'item', quantity=item.quantity)
xml_item.text = item.text
_indent(root)
return etree.ElementTree(root)
def get_strings_text(resources):
tree = builds_strings_tree(resources)
return etree.tostring(tree,
pretty_print=True,
xml_declaration=True,
encoding='utf-8',
with_tail=False)
def write_strings_file(directory, resources):
tree = builds_strings_tree(resources)
file_path = os.path.join(directory, 'strings.xml')
tree.write(file_path,
pretty_print=True,
xml_declaration=True,
encoding='utf-8',
with_tail=False)
def _make_dir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def write_strings_to_directory(strings_by_language, target_dir):
_make_dir(target_dir)
for language in strings_by_language.languages():
values_dir = os.path.join(target_dir, 'values-' + language)
_make_dir(values_dir)
write_strings_file(values_dir, strings_by_language[language])
| {
"content_hash": "29c5452e92b05b2d67f1ccd223075367",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 80,
"avg_line_length": 29.612244897959183,
"alnum_prop": 0.5868366643694004,
"repo_name": "Tunous/StringSheet",
"id": "43b87666efb027245d7cc3a0d2e1fa619d7d84d2",
"size": "2902",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stringsheet/writer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "66510"
}
],
"symlink_target": ""
} |
"""
AngularJS, Django, and Jquery File-upload App.
Sandeep Jadoonanan
"""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'portal.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r"", include("multiupload.urls", namespace = "multi"))
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
)
| {
"content_hash": "e96e423c2822a00f7e2a028047435758",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 123,
"avg_line_length": 28.818181818181817,
"alnum_prop": 0.6451104100946372,
"repo_name": "TunedMystic/file-upload-django-angular",
"id": "8bf3829e1b779fbaf4569640748149fc32472d05",
"size": "634",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "portal/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5525"
},
{
"name": "JavaScript",
"bytes": "166788"
},
{
"name": "Python",
"bytes": "10643"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from errno import EEXIST
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_native, to_text
__all__ = ['unfrackpath', 'makedirs_safe']
def unfrackpath(path, follow=True):
'''
Returns a path that is free of symlinks (if follow=True), environment variables, relative path traversals and symbols (~)
:arg path: A byte or text string representing a path to be canonicalized
:raises UnicodeDecodeError: If the canonicalized version of the path
contains non-utf8 byte sequences.
:rtype: A text string (unicode on pyyhon2, str on python3).
:returns: An absolute path with symlinks, environment variables, and tilde
expanded. Note that this does not check whether a path exists.
example::
'$HOME/../../var/mail' becomes '/var/spool/mail'
'''
if follow:
final_path = os.path.normpath(os.path.realpath(os.path.expanduser(os.path.expandvars(to_bytes(path, errors='surrogate_or_strict')))))
else:
final_path = os.path.normpath(os.path.abspath(os.path.expanduser(os.path.expandvars(to_bytes(path, errors='surrogate_or_strict')))))
return to_text(final_path, errors='surrogate_or_strict')
def makedirs_safe(path, mode=None):
'''Safe way to create dirs in muliprocess/thread environments.
:arg path: A byte or text string representing a directory to be created
:kwarg mode: If given, the mode to set the directory to
:raises AnsibleError: If the directory cannot be created and does not already exists.
:raises UnicodeDecodeError: if the path is not decodable in the utf-8 encoding.
'''
rpath = unfrackpath(path)
b_rpath = to_bytes(rpath)
if not os.path.exists(b_rpath):
try:
if mode:
os.makedirs(b_rpath, mode)
else:
os.makedirs(b_rpath)
except OSError as e:
if e.errno != EEXIST:
raise AnsibleError("Unable to create local directories(%s): %s" % (to_native(rpath), to_native(e)))
| {
"content_hash": "f6e4e660c84031f501d3bf8f12b6e0c2",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 141,
"avg_line_length": 39.611111111111114,
"alnum_prop": 0.6774193548387096,
"repo_name": "abtreece/ansible",
"id": "cd74877124fb51cb3548a030c14c9595be616fcb",
"size": "2852",
"binary": false,
"copies": "2",
"ref": "refs/heads/stable-2.2",
"path": "lib/ansible/utils/path.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
import os
import unittest
from PwdsTestLib import PwdsBaseTest
class PwdsBasicTest( PwdsBaseTest ):
def setUp( self ):
super( PwdsBasicTest, self ).setUp()
self.initSafe()
def tearDown( self ):
os.remove( self.safeFile )
def test_second_run( self ):
out = self.runPwds( 'show --raw' )
self.assertLines( out, [ '\[\]' ] )
def test_no_args( self ):
out= self.runPwds( '', passwordPrompts=[] )
self.assertLines( out, [ 'usage' ] )
def test_add( self ):
print 'thing'
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "9d13840bbec19c8b9dc75932602ff2fa",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 51,
"avg_line_length": 23.153846153846153,
"alnum_prop": 0.5730897009966778,
"repo_name": "tsiemens/pwds",
"id": "f0f77e5b7d498e5567d9cbde6ca8387a92811216",
"size": "625",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/PwdsTests.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17612"
}
],
"symlink_target": ""
} |
import os
from config import Config
class LogConfig(Config):
def __new__(cls, *args, **kwargs):
if not hasattr(cls, "__instance__"):
cls.__instance__ = super(LogConfig, cls).__new__(cls, *args, **kwargs)
return cls.__instance__
def __init__(self):
super(LogConfig, self).__init__(self.__setup())
def __setup(self):
return {
"level" : "DEBUG",
}
if __name__ == '__main__':
conf = LogConfig()
print conf.get("level")
| {
"content_hash": "35188cbb9d5bc07eb129922db3dea6ba",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 82,
"avg_line_length": 25.6,
"alnum_prop": 0.513671875,
"repo_name": "dev1x-org/python-example",
"id": "1c36b6aee039d1b54c3fdb9734ea58bac7f4db12",
"size": "526",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/config/log.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2501"
},
{
"name": "Python",
"bytes": "46927"
},
{
"name": "Shell",
"bytes": "91"
}
],
"symlink_target": ""
} |
"""
:Author: Adam Ginsburg <adam.g.ginsburg@gmail.com> and Jordan Mirocha <mirochaj@gmail.com>
.. moduleauthor:: Adam Ginsburg <adam.g.ginsburg@gmail.com>
"""
__all__ = ['spectrum','cubes','wrappers']
from __version__ import __version__
import spectrum
import specwarnings
try:
import cubes
except ImportError:
specwarnings.warn( "pyspeckit.cubes module not imported - cubes requires pywcs" )
import wrappers
from wrappers import *
from cubes import *
from spectrum import *
| {
"content_hash": "88a946c9f3f116a5d6ecaa2da6799d21",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 90,
"avg_line_length": 30.1875,
"alnum_prop": 0.7391304347826086,
"repo_name": "keflavich/pyspeckit-obsolete",
"id": "a74e4f7cefde9f67b26c8dd3053b66abbd9ccbbe",
"size": "483",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyspeckit/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3626"
},
{
"name": "Python",
"bytes": "1133508"
},
{
"name": "TeX",
"bytes": "1170"
}
],
"symlink_target": ""
} |
from concurrence import TIMEOUT_CURRENT
from _io import Buffer, BufferOverflowError, BufferUnderflowError, BufferInvalidArgumentError, get_errno
class IOStream(object):
"""abstract class to indicate that something is a stream and capable
of the basic read and write ops on a buffer"""
def write(self, buffer, timeout = TIMEOUT_CURRENT, assume_writable = True):
"""should write available bytes from the buffer into stream and return
the number of bytes written (could be less than available), or 0 on EOF
or raise error, or timeout"""
def read(self, buffer, timeout = TIMEOUT_CURRENT, assume_readable = True):
"""should read from the stream into buffer and return number of bytes read, or 0 on EOF
or raise error, or timeout"""
pass
from concurrence.io.socket import Socket, SocketServer
from concurrence.io.buffered import BufferedReader, BufferedWriter, BufferedStream
#TODO what if more arguments are needed for connect?, eg. passwords etc?
class Connector(object):
"""connector class for connection oriented IO (TCP), prevents the need for client protocol libraries to hardcode a
particular way to achieve a connection (e.g. no need to explicitly reference sockets"""
@classmethod
def connect(cls, endpoint):
if isinstance(endpoint, Connector):
assert False, "TODO"
else:
#default is to connect to Socket and endpoint is address
from concurrence.io.socket import Socket
from concurrence.timer import Timeout
return Socket.connect(endpoint, TIMEOUT_CURRENT)
class Server(object):
"""server class for connection oriented IO (TCP), prevents the need for server protocol libraries to hardcode a
particular way to serve a connection (e.g. no need to explicitly reference Server Sockets"""
@classmethod
def serve(cls, endpoint, handler):
if isinstance(endpoint, Server):
assert False, "TODO"
else:
#default is to server using SocketServer, endpoint is addresss
from concurrence.io.socket import SocketServer
socket_server = SocketServer(endpoint, handler)
socket_server.serve()
return socket_server
| {
"content_hash": "9e3d82cb2b22003de5a5abd72a92fe73",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 119,
"avg_line_length": 48.148936170212764,
"alnum_prop": 0.7034909412284578,
"repo_name": "toymachine/concurrence",
"id": "687f2f3ce5ecffd760c88c3fca71d8d38daefe98",
"size": "2460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/concurrence/io/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "42241"
},
{
"name": "JavaScript",
"bytes": "19227"
},
{
"name": "Python",
"bytes": "408521"
},
{
"name": "Shell",
"bytes": "45"
}
],
"symlink_target": ""
} |
import mock
import unittest
from uuid import uuid4
from django.core.urlresolvers import reverse
from django.core import mail
from django.db import IntegrityError
from django.http import HttpResponse
from django.test import TestCase, Client
from django.test.client import RequestFactory
from django.contrib.auth.models import User, AnonymousUser
from django.conf import settings
from importlib import import_module
from social_core.exceptions import (AuthAlreadyAssociated,
AuthException,
InvalidEmail, AuthMissingParameter)
from accounts.models import Instructor
from psa.custom_django_storage import CustomCode
from psa.views import (context,
validation_sent,
custom_login,
done)
from psa.pipeline import (social_user,
not_allowed_to_merge,
associate_user,
associate_by_email,
social_merge,
union_merge,
validated_user_details,
custom_mail_validation)
from psa.mail import send_validation
from psa.custom_backends import EmailAuth
class ViewsUnitTest(TestCase):
"""
Functional tests.
"""
def setUp(self):
engine = import_module(settings.SESSION_ENGINE)
store = engine.SessionStore()
store.save() # we need to make load() work, or the cookie is worthless
self.factory = RequestFactory()
self.request = self.factory.get('/login/')
self.client = Client()
self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key
self.client.get('/login/')
self.request.session = self.client.session
self.request.session['email_validation_address'] = 'test@test.com'
def test_context(self):
"""
Need to return avaliable_backends variable.
"""
result = context()
self.assertTrue('available_backends' in result)
def test_validation_sent(self):
anonymous = AnonymousUser()
self.request.user = anonymous
self.request.current_page = None
response = validation_sent(self.request)
self.assertIsInstance(response, HttpResponse)
self.assertTrue('test@test.com' in response.content)
def test_custom_login_get(self):
self.request.current_page = None
anonymous = AnonymousUser()
self.request.user = anonymous
response = custom_login(self.request)
self.assertIsInstance(response, HttpResponse)
self.assertTrue('LoginForm' in response.content)
def test_custom_login_post_negative(self):
self.client = Client()
credentials = {'username': 'test',
'password': 'test'}
response = self.client.post('/login/', data=credentials)
self.assertTemplateUsed(
response,
template_name='psa/custom_login.html'
)
def test_custom_login_post_positive(self):
user = User(username='test')
user.set_password('test')
user.save()
self.client = Client()
credentials = {'username': 'test',
'password': 'test'}
response = self.client.post('/login/', data=credentials, follow=True)
self.assertRedirects(response, expected_url=reverse('ct:home'))
self.assertTemplateUsed(response, template_name='ct/index.html')
def test_login_by_email(self):
user = User(username='test', email='test@test.cc')
user.set_password('test')
user.save()
self.client = Client()
credentials = {'email': 'test@test.cc'}
response = self.client.post(reverse('complete') + '?next=/ct/', data=credentials, follow=True)
self.assertRedirects(response, reverse('ctms:email_sent'))
self.assertTemplateUsed(response, 'ctms/email_sent.html')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Confirm your email on Courselets')
@unittest.skip("skip unless fixed")
def test_new_login_with_u_hash_in_session(self):
"""
If user send request with u_hash in post equals to u_hash in session
user will be redirected to ctms:shared_courses page
"""
user = User(username='test', email='test@aa.cc')
user.set_password('test')
user.save()
Instructor.objects.create(user=user, institution='sdfsdf')
u_hash = uuid4().hex
self.client.session['u_hash'] = u_hash
self.client.session.save()
credentials = {
'email': 'test@aa.cc',
'password': 'test',
'u_hash': u_hash
}
response = self.client.post(reverse('new_login'), data=credentials, follow=True)
self.assertRedirects(response, reverse('ctms:shared_courses'))
@unittest.skip("skip unless fixed")
def test_new_login_without_u_hash_in_session(self):
"""
If user send request with u_hash in post equals to u_hash in session
user will be redirected to ctms:shared_courses page
"""
user = User(username='test', email='test@aa.cc')
user.set_password('test')
user.save()
Instructor.objects.create(user=user, institution='sdfsdf')
self.client = Client()
credentials = {
'email': 'test@aa.cc',
'password': 'test',
}
response = self.client.post(reverse('new_login'), data=credentials, follow=True)
self.assertRedirects(response, '/ct/')
def test_done(self):
user = User(username='test_user')
user.set_password('test')
user.save()
self.request.user = user
self.request.current_page = None
response = done(self.request)
self.assertIsInstance(response, HttpResponse)
self.assertTrue('test_user' in response.content)
def test_ask_stranger(self):
user = User(username='test')
user.set_password('test')
user.save()
self.client = Client()
credentials = {'username': 'test',
'password': 'test'}
self.client.post('/login/', data=credentials, follow=True)
response = self.client.get('/tmp-email-ask/')
self.assertIsInstance(response, HttpResponse)
self.assertTrue('email-required-modal' in response.content)
def test_set_pass_false(self):
user = User(username='test')
user.set_password('test')
user.save()
self.client = Client()
credentials = {'username': 'test',
'password': 'test'}
self.client.post('/login/', data=credentials, follow=True)
response = self.client.get('/set-pass/')
self.assertTrue('Something goes wrong' in response.content)
self.assertTemplateUsed(response, template_name='ct/person.html')
def test_set_pass_true(self):
user = User(username='test')
user.set_password('test')
user.save()
self.client = Client()
credentials_for_login = {'username': 'test',
'password': 'test'}
credentials_for_set_pass = {'pass': 'test2',
'confirm': 'test2'}
self.client.post('/login/',
data=credentials_for_login,
follow=True)
response = self.client.post(
'/set-pass/',
data=credentials_for_set_pass,
follow=True
)
self.assertTemplateUsed(response, template_name='ct/person.html')
self.assertTrue('Your password was changed' in response.content)
user = User.objects.get(username='test')
self.assertTrue(user.check_password('test2'))
class TestSocialUser(TestCase):
"""
Test for social_user pipeline.
"""
def setUp(self):
self.exists = mock.Mock()
self.exists.exists.return_value = False
self.user = mock.Mock()
self.user.groups.filter.return_value = self.exists
self.main_user = mock.Mock()
self.main_user.groups.filter.return_value = self.exists
self.social = mock.Mock()
self.social.user = self.user
self.exists_anonym = mock.Mock()
self.exists_anonym.exists.return_value = True
self.anonymous = mock.Mock()
self.anonymous.groups.filter.return_value = self.exists_anonym
self.attrs = {'strategy.storage.user.get_social_auth.return_value': self.social}
def test_no_social(self):
self.attrs = {'strategy.storage.user.get_social_auth.return_value': []}
backend = mock.Mock(**self.attrs)
backend.name = 'google-oauth2'
uid = 'test@test.com'
res = social_user(backend, uid)
self.assertEqual(res['user'], None)
self.assertEqual(res['social'], [])
self.assertEqual(res['is_new'], True)
self.assertEqual(res['new_association'], False)
def test_social_stranger(self):
backend = mock.Mock(**self.attrs)
backend.name = 'google-oauth2'
uid = 'test@test.com'
res = social_user(backend, uid)
self.assertEqual(res['user'], self.user)
self.assertEqual(res['social'], self.social)
self.assertEqual(res['is_new'], False)
self.assertEqual(res['new_association'], False)
def test_social_validated(self):
backend = mock.Mock(**self.attrs)
backend.name = 'google-oauth2'
uid = 'test@test.com'
with mock.patch('psa.pipeline.not_allowed_to_merge') as mocked:
mocked.return_value = False
res = social_user(backend, uid, user=self.main_user)
self.assertEqual(res['user'], self.main_user)
self.assertEqual(res['social'], self.social)
self.assertEqual(res['is_new'], False)
self.assertEqual(res['new_association'], False)
def test_social_validated_not_allowed(self):
backend = mock.Mock(**self.attrs)
backend.name = 'google-oauth2'
uid = 'test@test.com'
with mock.patch('psa.pipeline.not_allowed_to_merge') as mocked:
mocked.return_value = True
with self.assertRaises(AuthAlreadyAssociated):
social_user(backend, uid, user=self.main_user)
def test_social_anonymous(self):
backend = mock.Mock(**self.attrs)
backend.name = 'google-oauth2'
uid = 'test@test.com'
res = social_user(backend, uid, user=self.anonymous)
self.assertEqual(res['user'], self.user)
self.assertEqual(res['social'], self.social)
self.assertEqual(res['is_new'], False)
self.assertEqual(res['new_association'], False)
class NotAllowedToMergeTest(TestCase):
"""
Test not_allowed_to_merge function.
"""
def setUp(self):
self.user1 = mock.Mock()
self.user2 = mock.Mock()
self.provider1 = mock.Mock()
self.provider2 = mock.Mock()
self.provider3 = mock.Mock()
self.provider1.provider = 'facebook'
self.provider2.provider = 'google'
self.provider3.provider = 'twitter'
self.user1.social_auth = mock.Mock()
self.user2.social_auth = mock.Mock()
def test_not_allowed_to_merge_false(self):
self.user1.social_auth.all = mock.Mock(
return_value=(self.provider1, self.provider2)
)
self.user2.social_auth.all = mock.Mock(return_value=(self.provider3,))
res = bool(not_allowed_to_merge(self.user1, self.user2))
self.assertFalse(res)
def test_not_allowed_to_merge_true(self):
self.user1.social_auth.all = mock.Mock(
return_value=(self.provider1, self.provider2)
)
self.user2.social_auth.all = mock.Mock(
return_value=(self.provider2, self.provider3)
)
res = bool(not_allowed_to_merge(self.user1, self.user2))
self.assertTrue(res)
class AssociateUserTest(TestCase):
"""
Test for associate_user pipeline.
"""
def setUp(self):
self.details = {'email': 'test@test.com'}
self.user = mock.Mock()
self.social = mock.Mock()
self.social.user = mock.Mock()
self.backend = mock.Mock()
def test_associate_user_create_secondary(self):
self.backend.strategy.storage.user.create_social_auth = mock.Mock(return_value=self.social)
with mock.patch('psa.pipeline.SecondaryEmail') as mocked:
save = mock.Mock()
save.return_value = None
mocked.return_value = save
res = associate_user(self.backend, self.details,
'test@test.com', user=self.user)
self.assertEqual(res['social'], self.social)
self.assertEqual(res['user'], self.social.user)
self.assertEqual(res['new_association'], True)
def test_associate_user_raise_exception(self):
create_sa = mock.Mock(side_effect=Exception())
self.backend.strategy.storage.user.create_social_auth = create_sa
self.backend.strategy.storage.is_integrity_error = mock.Mock(return_value=False)
with self.assertRaises(Exception):
associate_user(
self.backend,
self.details,
'test@test.com',
user=self.user
)
def test_associate_user_handle_exception(self):
create_sa = mock.Mock(side_effect=Exception())
self.backend.strategy.storage.user.create_social_auth = create_sa
self.backend.strategy.storage.is_integrity_error = mock.Mock(return_value=True)
with mock.patch('psa.pipeline.social_user') as mocked:
mocked.return_value = mock.Mock()
res = associate_user(
self.backend,
self.details,
'test@test.com',
user=self.user
)
self.assertEqual(res, mocked.return_value)
class AssociateByEmailTest(TestCase):
"""
Test associate_by_email pipeline.
"""
def setUp(self):
self.user = mock.Mock()
self.backend = mock.Mock()
self.details = mock.Mock()
def test_return_none(self):
self.assertEqual(associate_by_email(self.backend, self.details, user=self.user), None)
def test_without_email(self):
self.details.get = mock.Mock(return_value=None)
self.assertEqual(associate_by_email(self.backend, self.details, user=None), None)
def test_no_users_founded_by_email_and_no_social(self):
self.backend.strategy.storage.user.get_users_by_email = mock.Mock(return_value=[])
self.details.get = mock.Mock(return_value='test@test.com')
self.assertEqual(associate_by_email(self.backend, self.details, user=None), None)
def test_no_users_founded_by_email_and_social(self):
self.backend.strategy.storage.user.get_users_by_email = mock.Mock(return_value=[])
self.details.get = mock.Mock(return_value='test@test.com')
with mock.patch('psa.pipeline.UserSocialAuth.objects.filter') as mocked:
social = mock.Mock()
social.user = mock.Mock()
social_qs = mock.Mock()
social_qs.first = mock.Mock(return_value=social)
mocked.return_value = social_qs
res = associate_by_email(self.backend, self.details, user=None)
self.assertEqual(res['user'], social.user)
def test_no_users_founded_by_email_and_users_gt_one(self):
self.backend.strategy.storage.user.get_users_by_email = mock.Mock(return_value=[1, 2])
self.details.get = mock.Mock(return_value='test@test.com')
with self.assertRaises(AuthException):
associate_by_email(self.backend, self.details, user=None)
def test_no_users_founded_by_email_and_users_eq_one(self):
self.backend.strategy.storage.user.get_users_by_email = mock.Mock(return_value=[self.user])
self.details.get = mock.Mock(return_value='test@test.com')
res = associate_by_email(self.backend, self.details, user=None)
self.assertEqual(res['user'], self.user)
class SocialMergeTest(TestCase):
"""
Test social_merge function.
"""
def test_social_merge(self):
update = mock.Mock()
update.update = mock.Mock()
tmp_user, user = (mock.Mock(), mock.Mock())
tmp_user.social_auth.all = mock.Mock(return_value=update)
tmp_user.lti_auth.all = mock.Mock(return_value=update)
calls = [mock.call(user=user), mock.call(django_user=user)]
social_merge(tmp_user, user)
update.update.assert_has_calls(calls, any_order=True)
class UnionMergeTest(TestCase):
"""
Test union_merge function.
"""
def test_union_merge(self):
save = mock.Mock()
role1, role2, role3 = (mock.Mock(), mock.Mock(), mock.Mock())
for role in (role1, role2, role3):
role.role = mock.Mock()
role.course = mock.Mock()
role.save = save
tmp_user, user = (mock.Mock(), mock.Mock())
tmp_user.role_set.all = mock.Mock(return_value=(role1, role2, role3,))
user.role_set.filter = mock.Mock(return_value=None)
unitstatus1, unitstatus2 = (mock.Mock(), mock.Mock())
for unitstatus in (unitstatus1, unitstatus2):
unitstatus.save = save
tmp_user.unitstatus_set.all = mock.Mock(return_value=(unitstatus1,
unitstatus2))
update = mock.Mock()
update.update = mock.Mock()
tmp_user.fsmstate_set.all = mock.Mock(return_value=update)
tmp_user.response_set.all = mock.Mock(return_value=update)
tmp_user.studenterror_set.all = mock.Mock(return_value=update)
union_merge(tmp_user, user)
self.assertEqual(tmp_user.role_set.all.call_count, 1)
self.assertEqual(user.role_set.filter.call_count, 3)
self.assertEqual(save.call_count, 5)
self.assertEqual(update.update.call_count, 3)
calls = [mock.call(user=user),
mock.call(author=user),
mock.call(author=user)]
update.update.assert_has_calls(calls, any_order=True)
class MailTest(TestCase):
"""
Testing send_validation function.
"""
def test_send_validation(self):
with mock.patch('psa.mail.send_mail') as mocked:
mocked.return_value = None
strategy, backend, code = (mock.Mock(),
mock.Mock(),
mock.Mock())
backend.name = 'google-oauth2'
code.code = 'test_code'
code.email = 'test@test.com'
self.assertIsNone(send_validation(strategy, backend, code))
class ValidatedUserDetailTest(TestCase):
"""
Tests for ValidatedUserDetailTest.
"""
def setUp(self):
self.strategy = mock.Mock()
self.backend = mock.Mock()
self.details = {'email': 'test@test.com', 'username': 'new_username'}
self.user = mock.Mock()
self.user.username = 'anonymous'
self.exists = mock.Mock()
self.exists.exists.return_value = True
self.user.groups.filter.return_value = self.exists
self.social = mock.Mock()
def test_temporary_user_with_social(self):
self.social.user = mock.Mock()
with mock.patch('psa.pipeline.logout') as mocked_logout:
with mock.patch('psa.pipeline.login') as mocked_login:
with mock.patch('psa.pipeline.union_merge') as mocked_merge:
mocked_logout.return_value = None
mocked_login.return_value = None
mocked_merge.return_value = None
res = validated_user_details(
strategy=self.strategy,
pipeline_index=6,
backend=self.backend,
details=self.details,
user=self.user,
social=self.social
)
self.assertEqual(res['user'], self.social.user)
def test_temporary_user_without_social_user_by_email(self):
user_by_email = mock.Mock()
self.backend.strategy.storage.user.get_users_by_email.return_value = [user_by_email]
self.details = {'email': 'test@test.com'}
with mock.patch('psa.pipeline.logout') as mocked_logout:
with mock.patch('psa.pipeline.login') as mocked_login:
with mock.patch('psa.pipeline.union_merge') as mocked_merge:
mocked_logout.return_value = None
mocked_login.return_value = None
mocked_merge.return_value = None
res = validated_user_details(
strategy=self.strategy,
pipeline_index=6,
backend=self.backend,
details=self.details,
user=self.user
)
self.assertEqual(res['user'], user_by_email)
def test_temporary_user_without_social_no_user_by_email(self):
self.backend.strategy.storage.user.get_users_by_email.return_value = []
with mock.patch('psa.pipeline.logout') as mocked_logout:
with mock.patch('psa.pipeline.login') as mocked_login:
with mock.patch('psa.pipeline.union_merge') as mocked_merge:
mocked_logout.return_value = None
mocked_login.return_value = None
mocked_merge.return_value = None
res = validated_user_details(
strategy=self.strategy,
pipeline_index=6,
backend=self.backend,
details=self.details,
user=self.user
)
self.assertEqual(res, {})
self.assertEqual(self.user.username, self.details.get('username'))
self.assertEqual(self.user.first_name, '')
def test_temporary_user_without_social_two_user_by_email(self):
self.backend.strategy.storage.user.get_users_by_email.return_value = [mock.Mock(),
mock.Mock()]
with mock.patch('psa.pipeline.logout') as mocked_logout:
with mock.patch('psa.pipeline.login') as mocked_login:
with mock.patch('psa.pipeline.union_merge') as mocked_merge:
mocked_logout.return_value = None
mocked_login.return_value = None
mocked_merge.return_value = None
with self.assertRaises(AuthException):
validated_user_details(
strategy=self.strategy,
pipeline_index=6,
backend=self.backend,
details=self.details,
user=self.user
)
def test_integrity_error(self):
self.backend.strategy.storage.user.get_users_by_email.return_value = []
save = mock.Mock()
save.side_effect = IntegrityError()
self.user.save = save
# pipeline do not raise IntegrityError but we mock user.save()
# so in our test second call to save will raise IntegrityError
# exception
with self.assertRaises(IntegrityError):
validated_user_details(
strategy=self.strategy,
pipeline_index=6,
backend=self.backend,
details=self.details,
user=self.user
)
self.assertIn(self.details.get('username'), self.user.username)
self.assertLess(len(self.details.get('username')),
len(self.user.username))
def test_valid_user_with_social_confirm_no(self):
self.social.user = mock.Mock()
self.user.username = 'test_username'
self.exists.exists.return_value = False
self.strategy.request.POST = {'confirm': 'no'}
with self.assertRaises(AuthException):
validated_user_details(
strategy=self.strategy,
pipeline_index=6,
backend=self.backend,
details=self.details,
user=self.user,
social=self.social
)
def test_valid_user_with_social_confirm_yes(self):
self.exists.exists.return_value = False
self.social.user = mock.Mock()
self.social.user.groups.filter.return_value = self.exists
self.social.user.email = 'test@test.com'
self.user.username = 'test_username'
self.user.email = 'test@test.com'
self.user.groups.filter.return_value = self.exists
self.user.get_full_name.return_value = 'test_username1'
self.social.user.get_full_name.return_value = 'test_username2'
self.strategy.request.POST = {'confirm': 'yes'}
with mock.patch('psa.pipeline.social_merge') as mocked_social:
with mock.patch('psa.pipeline.union_merge') as mocked_union:
mocked_social.return_value = None
mocked_union.return_value = None
res = validated_user_details(
strategy=self.strategy,
pipeline_index=6,
backend=self.backend,
details=self.details,
user=self.user,
social=self.social
)
self.assertEqual(res['user'], self.user)
self.assertEqual(res['social'], self.social)
def test_valid_user_with_social_without_confirm(self):
self.exists.exists.return_value = False
self.social.user = mock.Mock()
self.social.user.groups.filter.return_value = self.exists
self.social.user.email = 'test@test.com'
self.user.groups.filter.return_value = self.exists
self.user.username = 'test_username'
self.user.email = 'test@test.com'
self.user.get_full_name.return_value = 'test_username1'
self.social.user.get_full_name.return_value = 'test_username2'
self.strategy.request.POST = {}
with mock.patch('psa.pipeline.social_merge') as mocked_social:
with mock.patch('psa.pipeline.union_merge') as mocked_union:
mocked_social.return_value = None
mocked_union.return_value = None
validated_user_details(
strategy=self.strategy,
pipeline_index=6,
backend=self.backend,
details=self.details,
user=self.user,
social=self.social
)
self.assertTemplateUsed('ct/person.html')
class CustomMailValidation(TestCase):
"""
Testing psa.pipeline.custom_mail_validation.
"""
def setUp(self):
self.strategy = mock.Mock()
self.backend = mock.Mock()
self.backend.name = 'email'
self.backend.setting.return_value = True
self.backend.strategy.session_set = mock.Mock()
self.backend.strategy.session_pop = mock.Mock()
self.backend.strategy.request_data.return_value = {'verification_code': 'test_code'}
self.details = {'email': 'test@test.com', 'username': 'new_username'}
self.user = mock.Mock()
self.social = mock.Mock()
def test_custom_mail_validation_backend_not_email(self):
self.backend.name = 'facebook'
res = custom_mail_validation(
strategy=self.strategy,
pipeline_index=5,
backend=self.backend,
details=self.details,
user=self.user,
social=self.social
)
self.assertEqual(res, {})
def test_custom_mail_validation_backend_email_verify_code(self):
self.backend.strategy.validate_email.return_value = True
code = mock.Mock()
code.user_id = 1
self.backend.strategy.storage.code.get_code.return_value = code
with mock.patch('psa.pipeline.User') as mocked_user:
with mock.patch('psa.pipeline.logout') as mocked_logout:
with mock.patch('psa.pipeline.login') as mocked_login:
mocked_logout.return_value = None
mocked_login.return_value = None
queryset = mock.Mock()
queryset.first.return_value = self.user
mocked_user.objects.filter.return_value = queryset
res = custom_mail_validation(
strategy=self.strategy,
pipeline_index=5,
backend=self.backend,
details=self.details,
user=self.user
)
self.assertEqual(res['user'], self.user)
self.assertEqual(
self.user.backend,
'django.contrib.auth.backends.ModelBackend'
)
def test_custom_mail_validation_raise(self):
self.backend.strategy.validate_email.return_value = False
with self.assertRaises(InvalidEmail):
custom_mail_validation(
strategy=self.strategy,
pipeline_index=5,
backend=self.backend,
details=self.details,
user=self.user
)
def test_custom_mail_validation_backend_email_send_email(self):
self.backend.strategy.request_data.return_value = {}
self.backend.strategy.send_email_validation = mock.Mock()
self.user.username = 'test_user'
exists = mock.Mock()
exists.exists.return_value = False
self.user.groups.filter.return_value = exists
res = custom_mail_validation(
strategy=self.strategy,
pipeline_index=5,
backend=self.backend,
details=self.details,
user=self.user
)
self.assertEqual(res, self.backend.strategy.redirect())
def test_custom_mail_validation_backend_email_send_email_anonym(self):
self.backend.strategy.request_data.return_value = {}
send_email_validation = mock.Mock()
self.backend.strategy.send_email_validation = send_email_validation
self.user.username = 'anonymous_username'
exists = mock.Mock()
exists.exists.return_value = True
self.user.groups.filter.return_value = exists
with mock.patch('psa.pipeline.AnonymEmail') as mocked_anonym:
get_or_create = mock.Mock()
mocked_anonym.objects.get_or_create = get_or_create
res = custom_mail_validation(
strategy=self.strategy,
pipeline_index=5,
backend=self.backend,
details=self.details,
user=self.user
)
self.assertEqual(res, self.backend.strategy.redirect())
self.assertEqual(get_or_create.call_count, 1)
self.assertEqual(send_email_validation.call_count, 1)
@mock.patch('psa.custom_backends.CustomCode')
class EmailAuthTest(TestCase):
"""
Testing EmailAuth.auth_complete method.
"""
def setUp(self):
self.test_email = 'test@test.com'
self.email_auth = EmailAuth()
self.email_auth.strategy = mock.Mock()
self.email_auth.strategy.request.GET.get.return_value = True
code_object = mock.Mock()
code_object.email = self.test_email
self.first = mock.Mock()
self.first.first.return_value = code_object
def test_update_email_from_code_custom_code_found(self, code):
code.objects.filter.return_value = self.first
self.email_auth.auth_complete()
self.assertEqual(self.email_auth.data.get('email'), self.test_email)
def test_update_email_from_code_no_custom_code_found(self, code):
self.first = mock.Mock()
self.first.first.return_value = None
code.objects.filter.return_value = self.first
with self.assertRaises(AuthMissingParameter):
self.email_auth.auth_complete()
class SignupTest(TestCase):
"""
Tests that psa.views.signup function works correctly
"""
def setUp(self):
self.url = reverse('signup')
engine = import_module(settings.SESSION_ENGINE)
store = engine.SessionStore()
store.create() # we need to make load() work, or the cookie is worthless
self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key
def test_signup_logout(self):
User.objects.create_user('test_user', 'test@aa.cc', '123')
self.client.login(email='test@aa.cc', password='123')
self.client.get(self.url)
self.assertNotIn('_auth_user_id', self.client.session)
def test_signup_with_u_hash_in_session(self):
u_hash = uuid4().hex
self.client.logout()
session = self.client.session
session.update({'u_hash': u_hash})
session.save()
self.assertEqual(CustomCode.objects.count(), 0)
self.assertEqual(User.objects.count(), 0)
self.assertEqual(Instructor.objects.count(), 0)
response = self.client.post(
self.url,
{
'u_hash': u_hash,
'email': 'test_email@aa.cc',
'email_confirmation': 'test_email@aa.cc',
'last_name': 'Bo',
'first_name': 'Alex',
'password': '123123123'
},
follow=True
)
self.assertRedirects(response, '{}?next={}'.format(reverse('accounts:profile_update'), reverse('ctms:onboarding')))
self.assertIn('_auth_user_id', self.client.session)
new_user = User.objects.get(email='test_email@aa.cc')
self.assertEqual(int(self.client.session['_auth_user_id']), new_user.pk)
self.assertEqual(CustomCode.objects.count(), 0)
self.assertEqual(User.objects.count(), 1)
self.assertEqual(Instructor.objects.count(), 0)
def test_post_signup_form_invalid(self):
response = self.client.post(
self.url,
{
'email': 'test_email@aa.cc',
'email_confirmation': 'test_email2222@aa.cc',
'last_name': 'Bo',
'first_name': 'Alex',
'password': '123123123'
},
)
self.assertEqual(response.status_code, 200)
self.assertIn('form', response.context)
self.assertIn(u'Confirmation e-mail should be the same as e-mail.',
response.context['form']['email_confirmation'].errors)
def test_signup_without_u_hash(self):
self.assertEqual(CustomCode.objects.count(), 0)
self.assertEqual(User.objects.count(), 0)
self.assertEqual(Instructor.objects.count(), 0)
self.client.post(
self.url,
{
'email': 'test_email@aa.cc',
'email_confirmation': 'test_email@aa.cc',
'last_name': 'Bo',
'first_name': 'Alex',
'institution': 'testInstitute',
'password': '123123123'
},
follow=True
)
self.assertEqual(CustomCode.objects.count(), 1)
self.assertEqual(User.objects.count(), 0)
self.assertIn('resend_user_email', self.client.session)
self.assertEqual(self.client.session['resend_user_email'], 'test_email@aa.cc')
def test_twitter_signup_anonymous_user_cancel(self):
"""Test when anonymous user cancel auth thought twitter should not see 500 error."""
url = "/complete/twitter/?redirect_state=T6zv6SbT2HnhWxLExv1bSrTdcvFMcOMi&denied=06OETwAAAAAAexJsAAABY7v2Lig"
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse('new_login') + "?next=/ctms/")
self.assertTemplateUsed(response, 'psa/new_custom_login.html')
def test_twitter_signup_loggedin_user_cancel(self):
"""Test when logged in user cancel auth thought twitter should not see 500 error."""
User.objects.create_user(username='test', email='test@aa.cc', password='123')
self.client.login(username='test', password='123')
url = "/complete/twitter/?redirect_state=T6zv6SbT2HnhWxLExv1bSrTdcvFMcOMi&denied=06OETwAAAAAAexJsAAABY7v2Lig"
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse('accounts:settings'))
self.assertTemplateUsed(response, 'accounts/settings.html')
class LogoutTest(TestCase):
def setUp(self):
self.url = reverse('signup')
engine = import_module(settings.SESSION_ENGINE)
store = engine.SessionStore()
store.save() # we need to make load() work, or the cookie is worthless
self.client.cookies[settings.SESSION_COOKIE_NAME] = store.session_key
def test_logout(self):
"""Test that old logout page redirect to ct page after logout."""
response = self.client.get(reverse('logout'), follow=True)
self.assertRedirects(response, reverse('new_login'))
self.assertEqual(self.client.cookies.get('sessionid').value, '')
def test_new_logout(self):
response = self.client.get(reverse('new_logout'), follow=True)
self.assertRedirects(response, reverse('new_login'))
self.assertEqual(self.client.cookies.get('sessionid').value, '')
| {
"content_hash": "4376f9c0f8ae55bbc32eda74154576be",
"timestamp": "",
"source": "github",
"line_count": 946,
"max_line_length": 123,
"avg_line_length": 39.99577167019027,
"alnum_prop": 0.5893857701659795,
"repo_name": "raccoongang/socraticqs2",
"id": "91eed0580dbdc280dcdb0fbf730d54468c9de113",
"size": "37836",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mysite/psa/tests/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "189600"
},
{
"name": "Dockerfile",
"bytes": "580"
},
{
"name": "Gherkin",
"bytes": "289"
},
{
"name": "HTML",
"bytes": "342788"
},
{
"name": "JavaScript",
"bytes": "133425"
},
{
"name": "Makefile",
"bytes": "2991"
},
{
"name": "Python",
"bytes": "1504025"
},
{
"name": "Shell",
"bytes": "1521"
}
],
"symlink_target": ""
} |
"""A small demo showcasing a saxpy computation."""
from __future__ import print_function
import sys
import numpy as np
import pycuda.autoinit
import pycuda.driver as driver
import pycuda.gpuarray as gpuarray
saxpy_kernel = """__global__ void saxpy_kernel(const float* const x,
const float* const y,
float a,
float* const result,
size_t size) {
unsigned int tid = threadIdx.x;
unsigned int gid = tid + blockIdx.x * blockDim.x;
if (gid < size) {
result[gid] = a * x[gid] + y[gid];
}
}"""
if __name__ == '__main__':
array_size = 10e5
start, end = driver.Event(), driver.Event()
# Same host-side allocation as last time
h_x = np.random.uniform(1., 101., size=array_size).astype(np.float32)
h_y = np.random.uniform(1., 101., size=array_size).astype(np.float32)
h_result = np.empty_like(h_y)
h_a = np.float32(0.234)
# Transfer host array to device memory
d_x = gpuarray.to_gpu(h_x)
d_y = gpuarray.to_gpu(h_y)
d_result = gpuarray.to_gpu(d_y)
# Launch implicit kernel and retrieve result in one line
start.record()
d_result = h_a * d_x + d_y
end.record()
end.synchronize()
print("Took {}ms".format(start.time_till(end)))
d_result.get(h_result)
# Verify the result
assert np.all(h_a * h_x + h_y == h_result)
| {
"content_hash": "785104784ccf8c7f978753b28735176a",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 73,
"avg_line_length": 30.04,
"alnum_prop": 0.5552596537949401,
"repo_name": "MisanthropicBit/pycuda_examples",
"id": "f85d9e44a1bfa806f4e247f76c4eb7d27513b699",
"size": "1549",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/pycuda_saxpy.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
"""
# UI/UX Authoring Tool
# @license http://www.apache.org/licenses/LICENSE-2.0
# Author @ Jamil Hussain
"""
from django.conf.urls import url, include
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^$',views.homeView, name='home'),
url(r'^overview',views.overviewView, name='overview'),
url(r'^createapp',views.createApp, name='createapp'),
url(r'^locations',views.locationsView, name='location'),
url(r'^screens',views.screensView, name='screens'),
url(r'^events',views.eventsView, name='event'),
url(r'^exceptions',views.exceptionsView, name='exceptions'),
]
| {
"content_hash": "211c75f29d485d4ee13ed28337ce7075",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 68,
"avg_line_length": 32.6,
"alnum_prop": 0.661042944785276,
"repo_name": "ubiquitous-computing-lab/Mining-Minds",
"id": "79e535113fbfde1ea1c5b96b2348ae363aa166ef",
"size": "652",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "supporting-layer/uiux-authoring-tool/analytics/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2388167"
},
{
"name": "CoffeeScript",
"bytes": "87725"
},
{
"name": "HTML",
"bytes": "6002417"
},
{
"name": "Java",
"bytes": "2523276"
},
{
"name": "JavaScript",
"bytes": "35544943"
},
{
"name": "Makefile",
"bytes": "1558"
},
{
"name": "PHP",
"bytes": "874945"
},
{
"name": "PowerShell",
"bytes": "468"
},
{
"name": "Python",
"bytes": "63930"
},
{
"name": "Shell",
"bytes": "3879"
}
],
"symlink_target": ""
} |
"""
WSGI config for algotrade project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "algotrade.settings")
application = get_wsgi_application()
| {
"content_hash": "8f10d5a0f6e6d44381f2e638fb531bae",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.6875,
"alnum_prop": 0.7721518987341772,
"repo_name": "softage0/algorithm-trading-webapp",
"id": "41e5cc0e6401f089f677c69b4d0e39ce8051911e",
"size": "395",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "algotrade/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2752"
},
{
"name": "HTML",
"bytes": "11896"
},
{
"name": "Python",
"bytes": "36585"
}
],
"symlink_target": ""
} |
from fabric.api import *
from fabric.contrib.files import exists, append, comment
from fabric.colors import red
import os
env.sitename = os.path.basename(os.getcwd())
env.mongo_host = 'fire.rccc.ou.edu'
env.psql_host = 'fire.rccc.ou.edu'
env.apache_config = '/etc/httpd/conf.d/%(sitename)s.conf' % env
env.python = '/usr/bin/python2.6'
def testing():
"""
Work on staging environment
"""
env.settings = 'testing'
env.path = '/var/www/apps/%(sitename)s' % env
env.virtpy = '%(path)s/virtpy' % env
env.log_path = '%(path)s/log' % env
env.hosts = ['test.cybercommons.org']
def fire():
"""
Setup on fire.rccc.ou.edu
"""
env.settings = 'production'
env.path = '/scratch/www/wsgi_sites/%(sitename)s' % env
env.virtpy = '%(path)s/virtpy' % env
env.log_path = '%(path)s/log' % env
env.hosts = ['fire.rccc.ou.edu']
def production():
"""
Work on production environment
"""
env.settings = 'production'
env.path = '/var/www/apps/%(sitename)s' % env
env.virtpy = '%(path)s/virtpy' % env
env.log_path = '%(path)s/log' % env
env.hosts = ['production.cybercommons.org']
def setup():
"""
Setup directories and copy everything but virtual environment to server,
then install virtual environment based on requirements.txt
"""
setup_directories()
copy_working_dir()
setup_virtualenv()
install_requirements()
apache_config()
bounce_apache()
def deploy():
"""
Deploy changes which don't impact virtual environment
"""
copy_working_dir()
bounce_apache()
def setup_directories():
"""
Setup directories on the remote system
"""
if not exists('%(path)s' % env):
run('mkdir -p %(path)s' % env)
run('mkdir -p %(log_path)s' % env)
run('mkdir -p %(virtpy)s' % env)
def virtualenv(command):
"""
Wrapper to activate and run virtual environment
"""
with cd(env.virtpy):
run('source %(virtpy)s/bin/activate' % env + '&&' + command)
def setup_virtualenv():
"""
Install the virtual environment
"""
run('virtualenv -p %(python)s --no-site-packages %(virtpy)s' % env)
def bounce_apache():
""" Restart the apache web server """
sudo('/etc/init.d/httpd restart')
def apache_config(secure=False):
"""
Set the apache config file to point to wsgi. Assumes app will be accessible at /sitename/ and
.wsgi named sitename.wsgi
"""
# check if apache config lines exist in old wsgi_sites.conf and comment if found
comment('/etc/httpd/conf.d/wsgi_sites.conf', r'^WSGIScriptAlias /%(sitename)s .*$' % env, use_sudo=True)
confline = 'WSGIScriptAlias /%(sitename)s %(path)s/%(sitename)s.wsgi' %env
append('%(apache_config)s' % env, confline, use_sudo=True)
if secure:
secure_app = """
<Location /%(sitename)s>
AuthType Basic
require valid-user
TKTAuthLoginURL http://test.cybercommons.org/accounts/login/
TKTAuthTimeoutURL http://test.cybercommons.org/accounts/login/?timeout=1
TKTAuthPostTimeoutURL http://test.cybercommons.org/accounts/login/?posttimeout=1
TKTAuthUnauthURL http://test.cybercommons.org/accounts/login/?unauth=1
TKTAuthIgnoreIP on
TKTAuthBackArgName next
</Location>
""" % (env)
append('%(apache_config)s' % env, secure_app, use_sudo=True)
def copy_working_dir():
"""
Shuttle application code from local to remote
"""
local('tar --exclude .git --exclude virtpy -czf /tmp/deploy_%(sitename)s.tgz .' % env)
put('/tmp/deploy_%(sitename)s.tgz' % env, '%(path)s/deploy_%(sitename)s.tgz' % env)
run('cd %(path)s; tar -xf deploy_%(sitename)s.tgz; rm deploy_%(sitename)s.tgz' % env)
local('rm /tmp/deploy_%(sitename)s.tgz' % env)
def install_requirements():
"""
Install the contents of requirements.txt to the virtual environment
"""
check = exists('%(path)s/requirements.txt' % env)
if check:
virtualenv('pip install -E %(virtpy)s -r %(path)s/requirements.txt' % env)
else:
print red("Can't find requirements.txt!")
def upgrade_requirements():
"""
Install the contents of requirements.txt to the virtual environment
"""
put('requirements.txt', '%(path)s/requirements.txt' % env)
check = exists('%(path)s/requirements.txt' % env)
if check:
virtualenv('pip install --upgrade -E %(virtpy)s -r %(path)s/requirements.txt' % env)
else:
print red("Can't find requirements.txt!")
| {
"content_hash": "cba23f758b593b0d2adeeb63e552d037",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 108,
"avg_line_length": 31.690140845070424,
"alnum_prop": 0.6371111111111111,
"repo_name": "ouinformatics/queue",
"id": "ce94b2eb8a449a8d93fc905fa09cae34b9944f13",
"size": "4500",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabfile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17998"
}
],
"symlink_target": ""
} |
__revision__ = "test/AS/nasm.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Verify correct use of the live 'nasm' assembler.
"""
import os
import sys
import TestSCons
_python_ = TestSCons._python_
_exe = TestSCons._exe
test = TestSCons.TestSCons()
nasm = test.where_is('nasm')
if not nasm:
test.skip_test('nasm not found; skipping test\n')
if sys.platform.find('linux') == -1:
test.skip_test("skipping test on non-Linux platform '%s'\n" % sys.platform)
try:
import popen2
stdout = popen2.popen2('nasm -v')[0]
except OSError:
test.skip_test('could not determine nasm version; skipping test\n')
else:
version = stdout.read().split()[2]
if version[:4] != '0.98':
test.skip_test("skipping test of nasm version %s\n" % version)
machine = os.uname()[4]
if not machine in ('i386', 'i486', 'i586', 'i686'):
fmt = "skipping test of nasm %s on non-x86 machine '%s'\n"
test.skip_test(fmt % (version, machine))
# Allow flexibility about the type of object/executable format
# needed on different systems. Format_map is a dict that maps
# sys.platform substrings to the correct argument for the nasm -f
# option. The default is "elf," which seems to be a reasonable
# lowest common denominator (works on both Linux and FreeBSD,
# anyway...).
nasm_format = 'elf'
format_map = {}
for k, v in format_map.items():
if sys.platform.find(k) != -1:
nasm_format = v
break
test.write("wrapper.py",
"""import os
import sys
open('%s', 'wb').write("wrapper.py\\n")
os.system(" ".join(sys.argv[1:]))
""" % test.workpath('wrapper.out').replace('\\', '\\\\'))
test.write('SConstruct', """
eee = Environment(tools = ['gcc', 'gnulink', 'nasm'],
ASFLAGS = '-f %(nasm_format)s')
fff = eee.Clone(AS = r'%(_python_)s wrapper.py ' + WhereIs('nasm'))
eee.Program(target = 'eee', source = ['eee.asm', 'eee_main.c'])
fff.Program(target = 'fff', source = ['fff.asm', 'fff_main.c'])
""" % locals())
test.write('eee.asm',
"""
global name
name:
db 'eee.asm',0
""")
test.write('fff.asm',
"""
global name
name:
db 'fff.asm',0
""")
test.write('eee_main.c', r"""
extern char name[];
int
main(int argc, char *argv[])
{
argv[argc++] = "--";
printf("eee_main.c %s\n", name);
exit (0);
}
""")
test.write('fff_main.c', r"""
#include <stdio.h>
#include <stdlib.h>
extern char name[];
int
main(int argc, char *argv[])
{
argv[argc++] = "--";
printf("fff_main.c %s\n", name);
exit (0);
}
""")
test.run(arguments = 'eee' + _exe, stderr = None)
test.run(program = test.workpath('eee'), stdout = "eee_main.c eee.asm\n")
test.must_not_exist('wrapper.out')
test.run(arguments = 'fff' + _exe)
test.run(program = test.workpath('fff'), stdout = "fff_main.c fff.asm\n")
test.must_match('wrapper.out', "wrapper.py\n")
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| {
"content_hash": "d4e3233280000bf95e2106b53ed8995a",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 89,
"avg_line_length": 23.27906976744186,
"alnum_prop": 0.6163836163836164,
"repo_name": "EmanueleCannizzaro/scons",
"id": "8656f096383d7d654e5b11b28ffe70c77d15c5f8",
"size": "4138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/AS/nasm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2491"
},
{
"name": "C",
"bytes": "659"
},
{
"name": "C++",
"bytes": "598"
},
{
"name": "CSS",
"bytes": "18502"
},
{
"name": "D",
"bytes": "1997"
},
{
"name": "HTML",
"bytes": "817651"
},
{
"name": "Java",
"bytes": "6860"
},
{
"name": "JavaScript",
"bytes": "215495"
},
{
"name": "Makefile",
"bytes": "3795"
},
{
"name": "Perl",
"bytes": "29978"
},
{
"name": "Python",
"bytes": "7510453"
},
{
"name": "Roff",
"bytes": "556545"
},
{
"name": "Ruby",
"bytes": "11074"
},
{
"name": "Shell",
"bytes": "52682"
},
{
"name": "XSLT",
"bytes": "7567242"
}
],
"symlink_target": ""
} |
import os
import platform
import re
import subprocess
import tempfile
import lit.formats
import lit.util
from lit.llvm import llvm_config
from lit.llvm.subst import ToolSubst
from lit.llvm.subst import FindTool
# Configuration file for the 'lit' test runner.
# name: The name of this test suite.
config.name = 'STANDALONE_OPT'
config.test_format = lit.formats.ShTest(not llvm_config.use_lit_shell)
# suffixes: A list of file extensions to treat as test files.
config.suffixes = ['.mlir']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
# test_exec_root: The root path where tests should be run.
config.test_exec_root = os.path.join(config.standalone_obj_root, 'test')
config.substitutions.append(('%PATH%', config.environment['PATH']))
config.substitutions.append(('%shlibext', config.llvm_shlib_ext))
llvm_config.with_system_environment(
['HOME', 'INCLUDE', 'LIB', 'TMP', 'TEMP'])
llvm_config.use_default_substitutions()
# excludes: A list of directories to exclude from the testsuite. The 'Inputs'
# subdirectories contain auxiliary inputs for various tests in their parent
# directories.
config.excludes = ['Inputs', 'Examples', 'CMakeLists.txt', 'README.txt', 'LICENSE.txt']
# test_source_root: The root path where tests are located.
config.test_source_root = os.path.dirname(__file__)
# test_exec_root: The root path where tests should be run.
config.test_exec_root = os.path.join(config.standalone_obj_root, 'test')
config.standalone_tools_dir = os.path.join(config.standalone_obj_root, 'bin')
# Tweak the PATH to include the tools dir.
llvm_config.with_environment('PATH', config.llvm_tools_dir, append_path=True)
tool_dirs = [config.standalone_tools_dir, config.llvm_tools_dir]
tools = [
'standalone-opt'
]
llvm_config.add_tool_substitutions(tools, tool_dirs)
| {
"content_hash": "5259d6043c426fa4df2416eb6a45a621",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 87,
"avg_line_length": 32,
"alnum_prop": 0.7489224137931034,
"repo_name": "endlessm/chromium-browser",
"id": "049fbc73cd81629711ecca1b48591f57d7addf06",
"size": "1874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/llvm/mlir/examples/standalone/test/lit.cfg.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from msrest.pipeline import ClientRawResponse
from .. import models
class ListManagementTermOperations(object):
"""ListManagementTermOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def add_term(
self, list_id, term, language, custom_headers=None, raw=False, **operation_config):
"""Add a term to the term list with list Id equal to list Id passed.
:param list_id: List Id of the image list.
:type list_id: str
:param term: Term to be deleted
:type term: str
:param language: Language of the terms.
:type language: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: object or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = '/contentmoderator/lists/v1.0/termlists/{listId}/terms/{term}'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url_parameter", self.config.base_url_parameter, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str'),
'term': self._serialize.url("term", term, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['language'] = self._serialize.query("language", language, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [201]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('object', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete_term(
self, list_id, term, language, custom_headers=None, raw=False, **operation_config):
"""Deletes a term from the list with list Id equal to the list Id passed.
:param list_id: List Id of the image list.
:type list_id: str
:param term: Term to be deleted
:type term: str
:param language: Language of the terms.
:type language: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: str or ClientRawResponse if raw=true
:rtype: str or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = '/contentmoderator/lists/v1.0/termlists/{listId}/terms/{term}'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url_parameter", self.config.base_url_parameter, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str'),
'term': self._serialize.url("term", term, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['language'] = self._serialize.query("language", language, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 204:
deserialized = self._deserialize('str', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_all_terms(
self, list_id, language, offset=None, limit=None, custom_headers=None, raw=False, **operation_config):
"""Gets all terms from the list with list Id equal to the list Id passed.
:param list_id: List Id of the image list.
:type list_id: str
:param language: Language of the terms.
:type language: str
:param offset: The pagination start index.
:type offset: int
:param limit: The max limit.
:type limit: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Terms or ClientRawResponse if raw=true
:rtype: ~azure.cognitiveservices.vision.contentmoderator.models.Terms
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = '/contentmoderator/lists/v1.0/termlists/{listId}/terms'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url_parameter", self.config.base_url_parameter, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['language'] = self._serialize.query("language", language, 'str')
if offset is not None:
query_parameters['offset'] = self._serialize.query("offset", offset, 'int')
if limit is not None:
query_parameters['limit'] = self._serialize.query("limit", limit, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Terms', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete_all_terms(
self, list_id, language, custom_headers=None, raw=False, **operation_config):
"""Deletes all terms from the list with list Id equal to the list Id
passed.
:param list_id: List Id of the image list.
:type list_id: str
:param language: Language of the terms.
:type language: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: str or ClientRawResponse if raw=true
:rtype: str or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = '/contentmoderator/lists/v1.0/termlists/{listId}/terms'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url_parameter", self.config.base_url_parameter, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['language'] = self._serialize.query("language", language, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [204]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 204:
deserialized = self._deserialize('str', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| {
"content_hash": "7d480d8aa2d21ba10f55af094583a5fb",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 133,
"avg_line_length": 41.090196078431376,
"alnum_prop": 0.6417255201374308,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "bae3190cb9ad3f42e39a14d342625766ce8040d2",
"size": "10952",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-cognitiveservices-vision-contentmoderator/azure/cognitiveservices/vision/contentmoderator/operations/list_management_term_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Node',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('hostname', models.CharField(max_length=100)),
('domain', models.CharField(max_length=100)),
('fqdn', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='NodeIP',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ipaddr', models.GenericIPAddressField()),
],
),
migrations.AddField(
model_name='node',
name='ipaddrs',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='nodes.NodeIP'),
),
]
| {
"content_hash": "04b8dd4d8b22f6520f6be0c3608ec215",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 123,
"avg_line_length": 31.63888888888889,
"alnum_prop": 0.5610184372256365,
"repo_name": "kmccormick/rmm",
"id": "c66e4fbf05b9e78599464ff34a4758795bcbe3af",
"size": "1212",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/apps/nodes/migrations/0001_initial.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "25055"
},
{
"name": "Shell",
"bytes": "346"
}
],
"symlink_target": ""
} |
import io
import json
import os
import unittest
from . import adverseevent
from .fhirdate import FHIRDate
class AdverseEventTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("AdverseEvent", js["resourceType"])
return adverseevent.AdverseEvent(js)
def testAdverseEvent1(self):
inst = self.instantiate_from("adverseevent-example.json")
self.assertIsNotNone(inst, "Must have instantiated a AdverseEvent instance")
self.implAdverseEvent1(inst)
js = inst.as_json()
self.assertEqual("AdverseEvent", js["resourceType"])
inst2 = adverseevent.AdverseEvent(js)
self.implAdverseEvent1(inst2)
def implAdverseEvent1(self, inst):
self.assertEqual(inst.category, "AE")
self.assertEqual(inst.date.date, FHIRDate("2017-01-29T12:34:56+00:00").date)
self.assertEqual(inst.date.as_json(), "2017-01-29T12:34:56+00:00")
self.assertEqual(inst.description, "This was a mild rash on the left forearm")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.identifier.system, "http://acme.com/ids/patients/risks")
self.assertEqual(inst.identifier.value, "49476534")
self.assertEqual(inst.seriousness.coding[0].code, "Mild")
self.assertEqual(inst.seriousness.coding[0].display, "Mild")
self.assertEqual(inst.seriousness.coding[0].system, "http://hl7.org/fhir/adverse-event-seriousness")
self.assertEqual(inst.text.status, "generated")
self.assertEqual(inst.type.coding[0].code, "304386008")
self.assertEqual(inst.type.coding[0].display, "O/E - itchy rash")
self.assertEqual(inst.type.coding[0].system, "http://snomed.info/sct")
| {
"content_hash": "19181fde07d7231fd074c76a5c8de685",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 108,
"avg_line_length": 45.27906976744186,
"alnum_prop": 0.6789933230611197,
"repo_name": "all-of-us/raw-data-repository",
"id": "c6a62dd4f7ce2050e401fc22e3db01f41052f0af",
"size": "2073",
"binary": false,
"copies": "1",
"ref": "refs/heads/devel",
"path": "rdr_service/lib_fhir/fhirclient_3_0_0/models/adverseevent_tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1866"
},
{
"name": "Mako",
"bytes": "1715"
},
{
"name": "Python",
"bytes": "17040924"
},
{
"name": "R",
"bytes": "2212"
},
{
"name": "Shell",
"bytes": "92213"
}
],
"symlink_target": ""
} |
"""
В данный пакет включаются кнопочные компоненты
"""
from __future__ import absolute_import
from .buttons import ExtButton
| {
"content_hash": "46863a25b70b3debbe8f48e60a0a6240",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 46,
"avg_line_length": 21,
"alnum_prop": 0.7698412698412699,
"repo_name": "barsgroup/m3-ext",
"id": "7daa0c8b19306b9d25813740cef423a0c0e2f2f7",
"size": "183",
"binary": false,
"copies": "1",
"ref": "refs/heads/deprecated/2.0.7.x",
"path": "src/m3_ext/ui/controls/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "132105"
},
{
"name": "HTML",
"bytes": "18203"
},
{
"name": "JavaScript",
"bytes": "1532142"
},
{
"name": "Python",
"bytes": "468313"
}
],
"symlink_target": ""
} |
""" Post-process Oprofile logs for x86-64 nexes running under sel_ldr.
Maps event counts in the "anon" region, to the appropriate addresses
in the nexe assembly. "Anon" represents the untrusted sandbox.
This will become unnecessary once we get immutable files for our .nexe
so that sel_ldr can use mmap the .nexe instead of copying it in
(Oprofile should understand mmap).
Remember to look at the oprofile log for the time spent in the
trusted code / OS (this only lists time spent in the untrusted code).
"""
# TODO(jvoung): consider using addr2line to look up functions with
# the linenum / file info instead of the using the rangemap.
# Pro: less custom code and possibility of understanding Dwarf info.
# Con: lots of exec()s to cover all the samples...
import commands
import getopt
import math
import re
import sys
def Debug(mesg):
sys.stdout.flush() # Make stdout/stderr come out in order.
print >>sys.stderr, "# ", mesg
return
def DemangleFunc(fun_name):
# In case the disassembly was done without the objdump "-C" flag.
# Heuristic for detecting already demangled names
# (c++filt will hate you for giving it an already demangled name)
if ('(' in fun_name or
'*' in fun_name or
':' in fun_name or
'&' in fun_name):
return fun_name
return commands.getoutput("c++filt " + fun_name)
# Assume addresses in inputs (logs and assembly files) are all this base.
ADDRESS_BASE = 16
ADDRESS_DIGIT = '[a-f0-9]'
def GetUntrustedBase(sel_ldr_log_fd):
""" Parse the sel_ldr debug output to find the base of the untrusted memory
region.
Returns the base address. """
untrusted_base = None
for line in sel_ldr_log_fd:
# base is the mem start addr printed by sel_ldr
if line.find('mem start addr') != -1:
fields = line.split()
untrusted_base = int(fields[-1], ADDRESS_BASE)
break
assert untrusted_base is not None, "Couldn't parse untrusted base"
Debug("untrusted_base = %s" % hex(untrusted_base))
return untrusted_base
#--------------- Parse Oprofile Log ---------------
def CheckIfInSelLdrRegion(line, cur_range_base):
""" Checks if we are reading the part of the oprofile --details log
pertaining to the untrusted sandbox in sel_ldr's address space.
Returns the base of that memory region or None. """
fields = line.split()
# cur_range_base should be set if we are already parsing the
# untrusted sandbox section of the log.
if cur_range_base:
# Check if we are exiting the untrusted sandbox section of the log.
# The header of a new non-untrusted-sandbox section should look like:
# 00000000 samples pct foo.so foo.so /path-to/foo.so
if len(fields) >= 6:
Debug('Likely exiting sel_ldr section to a new section: %s' % fields[3])
# Check if the next section is also a sel_ldr region
return CheckIfInSelLdrRegion(line, None)
else:
return cur_range_base
else:
# Check if we are entering the untrusted-sandbox section of the log.
# The header of such a section should look like:
#
# 00000000 samples pct anon (tgid:22067 range:0xBASE-0xEND)
# (sel_ldr or chrome) anon (tgid:22067 range:...)
#
# I.e., 10 fields...
if (len(fields) == 10
and (fields[6] == 'sel_ldr'
or fields[6] == 'chrome'
or fields[6] == 'nacl_helper_bootstrap')
and ('anon' == fields[3])):
Debug('Likely starting sel_ldr section: %s %s' % (fields[3], fields[6]))
range_token = fields[9]
range_re = re.compile('range:0x(' + ADDRESS_DIGIT + '+)-0x')
match = range_re.search(range_token)
if match:
range_str = match.group(1)
range_base = int(range_str, ADDRESS_BASE)
Debug('Likely range base is %s' % hex(range_base))
return range_base
else:
Debug("Couldn't parse range base for: " + str(fields))
return None
else:
return None
def UpdateAddrEventMap(line, sel_ldr_range_base, untrusted_base, addr_to_event):
""" Add an event count to the addr_to_event map if the line of data looks
like an event count. Example:
vma samples %
0000028a 1 1.8e-04
"""
fields = line.split()
if len(fields) == 3:
# deal with numbers like fffffff484494ca5 which are actually negative
address = int(fields[0], ADDRESS_BASE)
if address > 0x8000000000000000:
address = -((0xffffffffffffffff - address) + 1)
address = address + sel_ldr_range_base - untrusted_base
sample_count = int(fields[1])
cur = addr_to_event.get(address, 0)
addr_to_event[address] = cur + sample_count
return
def CheckTrustedRecord(line, trusted_events, filter_events):
""" Checks if this line is a samplecount for a trusted function. Because
oprofile understands these, we just use its aggregate count.
Updates the trusted_events map."""
# oprofile function records have the following format:
# address sample_count percent image_name app_name symbol_name
# Some symbol names have spaces (function prototypes), so only split 6 words.
fields = line.split(None, 5)
if len(fields) < 6:
return False
image_name = fields[3]
symbol_name = fields[5].rstrip()
# 2 cases: we want only 'relevant' samples, or we want all of them.
# Either way, ignore the untrusted region.
if (image_name == "anon" and symbol_name.find('sel_ldr') != -1):
return False
try: # catch lines that aren't records (e.g. the CPU type)
sample_count = int(fields[1])
except ValueError:
return False
if (filter_events and not (image_name.endswith('sel_ldr')
or image_name.startswith('llc')
or image_name.endswith('.so')
or image_name == 'no-vmlinux'
or image_name == 'chrome'
or image_name == 'nacl_helper_bootstrap')):
trusted_events['FILTERED'] = trusted_events.get('FILTERED',0) + sample_count
return False
# If there are duplicate function names, just use the first instance.
# (Most likely they are from shared libraries in different processes, and
# because the opreport output is sorted, the top one is most likely to be
# our process of interest, and the rest are not.)
key = image_name + ':' + symbol_name
trusted_events[key] = trusted_events.get(key, sample_count)
return True
def GetAddressToEventSelLdr(fd, filter_events, untrusted_base):
""" Returns 2 maps: addr_to_event: address (int) -> event count (int)
and trusted_events: func (str) - > event count (int)"""
addr_to_event = {}
trusted_events = {}
sel_ldr_range_base = None
for line in fd:
sel_ldr_range_base = CheckIfInSelLdrRegion(line, sel_ldr_range_base)
if sel_ldr_range_base:
# If we've parsed the header of the region and know the base of
# this range, start picking up event counts.
UpdateAddrEventMap(line,
sel_ldr_range_base,
untrusted_base,
addr_to_event)
else:
CheckTrustedRecord(line, trusted_events, filter_events)
fd.seek(0) # Reset for future use...
return addr_to_event, trusted_events
#--------------- Parse Assembly File ---------------
def CompareBounds((lb1, ub1), (lb2, ub2)):
# Shouldn't be overlapping, so both the upper and lower
# should be less than the other's lower bound
if (lb1 < lb2) and (ub1 < lb2):
return -1
elif (lb1 > ub2) and (ub1 > ub2):
return 1
else:
# Somewhere between, not necessarily equal.
return 0
class RangeMapSorted(object):
""" Simple range map using a sorted list of pairs
((lowerBound, upperBound), data). """
ranges = []
# Error indexes (< 0)
kGREATER = -2
kLESS = -1
def FindIndex(self, lb, ub):
length = len(self.ranges)
return self.FindIndexFrom(lb, ub,
int(math.ceil(length / 2.0)), 0, length)
def FindIndexFrom(self, lb, ub, CurGuess, CurL, CurH):
length = len(self.ranges)
# If it is greater than the last index, it is greater than all.
if CurGuess >= length:
return self.kGREATER
((lb2, ub2), _) = self.ranges[CurGuess]
comp = CompareBounds((lb, ub), (lb2, ub2))
if comp == 0:
return CurGuess
elif comp < 0:
# If it is less than index 0, it is less than all.
if CurGuess == 0:
return self.kLESS
NextL = CurL
NextH = CurGuess
NextGuess = CurGuess - int (math.ceil((NextH - NextL) / 2.0))
else:
# If it is greater than the last index, it is greater than all.
if CurGuess >= length - 1:
return self.kGREATER
NextL = CurGuess
NextH = CurH
NextGuess = CurGuess + int (math.ceil((NextH - NextL) / 2.0))
return self.FindIndexFrom(lb, ub, NextGuess, NextL, NextH)
def Add(self, lb, ub, data):
""" Add a mapping from [lb, ub] --> data """
index = self.FindIndex(lb, ub)
range_data = ((lb, ub), data)
if index == self.kLESS:
self.ranges.insert(0, range_data)
elif index == self.kGREATER:
self.ranges.append(range_data)
else:
self.ranges.insert(index, range_data)
def Lookup(self, key):
""" Get the data that falls within the range. """
index = self.FindIndex(key, key)
# Check if it is out of range.
if index < 0:
return None
((lb, ub), d) = self.ranges[index]
# Double check that the key actually falls in range.
if lb <= key and key <= ub:
return d
else:
return None
def GetRangeFromKey(self, key):
index = self.FindIndex(key, key)
# Check if it is out of range.
if index < 0:
return None
((lb, ub), _) = self.ranges[index]
# Double check that the key actually falls in range.
if lb <= key and key <= ub:
return (lb, ub)
else:
return None
ADDRESS_RE = re.compile('(' + ADDRESS_DIGIT + '+):')
FUNC_RE = re.compile('(' + ADDRESS_DIGIT + '+) <(.*)>:')
def GetAssemblyAddress(line):
""" Look for lines of assembly that look like
address: [byte] [byte]... [instruction in text]
"""
fields = line.split()
if len(fields) > 1:
match = ADDRESS_RE.search(fields[0])
if match:
return int(match.group(1), ADDRESS_BASE)
return None
def GetAssemblyRanges(fd):
""" Return a RangeMap that tracks the boundaries of each function.
E.g., [0x20000, 0x2003f] --> "foo"
[0x20040, 0x20060] --> "bar"
"""
rmap = RangeMapSorted()
cur_start = None
cur_func = None
cur_end = None
for line in fd:
# If we are within a function body...
if cur_func:
# Check if it has ended (with a newline)
if line.strip() == '':
assert (cur_start and cur_end)
rmap.Add(cur_start, cur_end, cur_func)
cur_start = None
cur_end = None
cur_func = None
else:
maybe_addr = GetAssemblyAddress(line)
if maybe_addr:
cur_end = maybe_addr
else:
# Not yet within a function body. Check if we are entering.
# The header should look like:
# 0000000000020040 <foo>:
match = FUNC_RE.search(line)
if match:
cur_start = int(match.group(1), ADDRESS_BASE)
cur_func = match.group(2)
fd.seek(0) # reset for future use.
return rmap
#--------------- Summarize Data ---------------
def PrintTopFunctions(assembly_ranges, address_to_events, trusted_events):
""" Prints the N functions with the top event counts """
func_events = {}
some_addrs_not_found = False
for (addr, count) in address_to_events.iteritems():
func = assembly_ranges.Lookup(addr)
if (func):
# Function labels are mostly unique, except when we have ASM labels
# that we mistake for functions. E.g., "loop:" is a common ASM label.
# Thus, to get a unique value, we must append the unique key range
# to the function label.
(lb, ub) = assembly_ranges.GetRangeFromKey(addr)
key = (func, lb, ub)
cur_count = func_events.get(key, 0)
func_events[key] = cur_count + count
else:
Debug('No matching function for addr/count: %s %d'
% (hex(addr), count))
some_addrs_not_found = True
if some_addrs_not_found:
# Addresses < 0x20000 are likely trampoline addresses.
Debug('NOTE: sample addrs < 0x20000 are likely trampolines')
filtered_events = trusted_events.pop('FILTERED', 0)
# convert trusted functions (which are just functions and not ranges) into
# the same format and mix them with untrusted. Just use 0s for the ranges
for (func, count) in trusted_events.iteritems():
key = (func, 0, 0)
func_events[key] = count
flattened = func_events.items()
def CompareCounts ((k1, c1), (k2, c2)):
if c1 < c2:
return -1
elif c1 == c2:
return 0
else:
return 1
flattened.sort(cmp=CompareCounts, reverse=True)
top_30 = flattened[:30]
total_samples = (sum(address_to_events.itervalues())
+ sum(trusted_events.itervalues()))
print "============= Top 30 Functions ==============="
print "EVENTS\t\tPCT\tCUM\tFUNC [LOW_VMA, UPPER_VMA]"
cum_pct = 0.0
for ((func, lb, ub), count) in top_30:
pct = 100.0 * count / total_samples
cum_pct += pct
print "%d\t\t%.2f\t%.2f\t%s [%s, %s]" % (count, pct, cum_pct,
DemangleFunc(func), hex(lb), hex(ub))
print "%d samples filtered (%.2f%% of all samples)" % (filtered_events,
100.0 * filtered_events / (filtered_events + total_samples))
#--------------- Annotate Assembly ---------------
def PrintAnnotatedAssembly(fd_in, address_to_events, fd_out):
""" Writes to output, a version of assembly_file which has event
counts in the form #; EVENTS: N
This lets us know which instructions took the most time, etc.
"""
for line in fd_in:
line = line.strip()
maybe_addr = GetAssemblyAddress(line)
if maybe_addr in address_to_events:
event_count = address_to_events[maybe_addr]
print >>fd_out, "%s #; EVENTS: %d" % (line, event_count)
else:
print >>fd_out, line
fd_in.seek(0) # reset for future use.
#--------------- Main ---------------
def main(argv):
try:
opts, args = getopt.getopt(argv[1:],
'l:s:o:m:f',
['oprofilelog=',
'assembly=',
'output=',
'memmap=',
'untrusted_base=',
])
assembly_file = None
assembly_fd = None
oprof_log = None
oprof_fd = None
output = sys.stdout
out_name = None
filter_events = False
# Get the untrusted base address from either a sel_ldr log
# which prints out the mapping, or from the command line directly.
mapfile_name = None
mapfile_fd = None
untrusted_base = None
for o, a in opts:
if o in ('-l', '--oprofilelog'):
oprof_log = a
oprof_fd = open(oprof_log, 'r')
elif o in ('-s', '--assembly'):
assembly_file = a
assembly_fd = open(assembly_file, 'r')
elif o in ('-o', '--output'):
out_name = a
output = open(out_name, 'w')
elif o in ('-m', '--memmap'):
mapfile_name = a
try:
mapfile_fd = open(mapfile_name, 'r')
except IOError:
pass
elif o in ('-b', '--untrusted_base'):
untrusted_base = a
elif o == '-f':
filter_events = True
else:
assert False, 'unhandled option'
if untrusted_base:
if mapfile_fd:
print 'Error: Specified both untrusted_base directly and w/ memmap file'
sys.exit(1)
untrusted_base = int(untrusted_base, 16)
else:
if mapfile_fd:
Debug('Parsing sel_ldr output for untrusted memory base: %s' %
mapfile_name)
untrusted_base = GetUntrustedBase(mapfile_fd)
else:
print 'Error: Need sel_ldr log --memmap or --untrusted_base.'
sys.exit(1)
if assembly_file and oprof_log:
Debug('Parsing assembly file of nexe: %s' % assembly_file)
assembly_ranges = GetAssemblyRanges(assembly_fd)
Debug('Parsing oprofile log: %s' % oprof_log)
untrusted_events, trusted_events = \
GetAddressToEventSelLdr(oprof_fd, filter_events, untrusted_base)
Debug('Printing the top functions (most events)')
PrintTopFunctions(assembly_ranges, untrusted_events, trusted_events)
Debug('Printing annotated assembly to %s (or stdout)' % out_name)
PrintAnnotatedAssembly(assembly_fd, untrusted_events, output)
else:
print 'Need assembly file(%s) and oprofile log(%s)!' \
% (assembly_file, oprof_log)
sys.exit(1)
except getopt.GetoptError, err:
print str(err)
sys.exit(1)
if __name__ == '__main__':
main(sys.argv)
| {
"content_hash": "3ae742701579fbc399ce2d530ecb1984",
"timestamp": "",
"source": "github",
"line_count": 481,
"max_line_length": 80,
"avg_line_length": 35.14968814968815,
"alnum_prop": 0.6145383568935944,
"repo_name": "Lind-Project/native_client",
"id": "ad4d5ce1be2e88450f20642b45d35594df32c0c1",
"size": "17098",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tools/process_oprofile_x86_64.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "149910"
},
{
"name": "Batchfile",
"bytes": "10418"
},
{
"name": "C",
"bytes": "10425715"
},
{
"name": "C++",
"bytes": "7409986"
},
{
"name": "HTML",
"bytes": "183711"
},
{
"name": "JavaScript",
"bytes": "5925"
},
{
"name": "Logos",
"bytes": "647"
},
{
"name": "Makefile",
"bytes": "65439"
},
{
"name": "Objective-C++",
"bytes": "2658"
},
{
"name": "Python",
"bytes": "2127774"
},
{
"name": "Ragel",
"bytes": "104506"
},
{
"name": "Shell",
"bytes": "454354"
}
],
"symlink_target": ""
} |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/data/battle/battle_type.proto',
package='pogoprotos.data.battle',
syntax='proto3',
serialized_pb=_b('\n(pogoprotos/data/battle/battle_type.proto\x12\x16pogoprotos.data.battle*k\n\nBattleType\x12\x15\n\x11\x42\x41TTLE_TYPE_UNSET\x10\x00\x12\x16\n\x12\x42\x41TTLE_TYPE_NORMAL\x10\x01\x12\x18\n\x14\x42\x41TTLE_TYPE_TRAINING\x10\x02\x12\x14\n\x10\x42\x41TTLE_TYPE_RAID\x10\x03\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_BATTLETYPE = _descriptor.EnumDescriptor(
name='BattleType',
full_name='pogoprotos.data.battle.BattleType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='BATTLE_TYPE_UNSET', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BATTLE_TYPE_NORMAL', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BATTLE_TYPE_TRAINING', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BATTLE_TYPE_RAID', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=68,
serialized_end=175,
)
_sym_db.RegisterEnumDescriptor(_BATTLETYPE)
BattleType = enum_type_wrapper.EnumTypeWrapper(_BATTLETYPE)
BATTLE_TYPE_UNSET = 0
BATTLE_TYPE_NORMAL = 1
BATTLE_TYPE_TRAINING = 2
BATTLE_TYPE_RAID = 3
DESCRIPTOR.enum_types_by_name['BattleType'] = _BATTLETYPE
# @@protoc_insertion_point(module_scope)
| {
"content_hash": "eb99f15c7dd75071ad9e26b152a51a1a",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 308,
"avg_line_length": 31.890625,
"alnum_prop": 0.7383635472807447,
"repo_name": "bellowsj/aiopogo",
"id": "4536c3a3c3c490cda5896a8f8a2b0cfa3f77b0e5",
"size": "2152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aiopogo/pogoprotos/data/battle/battle_type_pb2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "62068"
}
],
"symlink_target": ""
} |
class ForeignKeyError(Exception):
pass
| {
"content_hash": "c40ed598d00f71fc06431a877725f140",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 33,
"avg_line_length": 21.5,
"alnum_prop": 0.7674418604651163,
"repo_name": "fcurella/django-fakery",
"id": "e4cdbadb5cbcb6df277b6a08cc63a9f098803c7a",
"size": "43",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_fakery/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "248"
},
{
"name": "Python",
"bytes": "75766"
}
],
"symlink_target": ""
} |
from msrest.serialization import Model
class HttpRequestInfo(Model):
"""The Http request info.
:param client_request_id: the client request id.
:type client_request_id: str
:param client_ip_address: the client Ip Address
:type client_ip_address: str
:param method: the Http request method.
:type method: str
:param uri: the Uri.
:type uri: str
"""
_attribute_map = {
'client_request_id': {'key': 'clientRequestId', 'type': 'str'},
'client_ip_address': {'key': 'clientIpAddress', 'type': 'str'},
'method': {'key': 'method', 'type': 'str'},
'uri': {'key': 'uri', 'type': 'str'},
}
def __init__(self, *, client_request_id: str=None, client_ip_address: str=None, method: str=None, uri: str=None, **kwargs) -> None:
super(HttpRequestInfo, self).__init__(**kwargs)
self.client_request_id = client_request_id
self.client_ip_address = client_ip_address
self.method = method
self.uri = uri
| {
"content_hash": "ed60c63e1b6265608cd5d0edfe8c408c",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 135,
"avg_line_length": 34.827586206896555,
"alnum_prop": 0.6049504950495049,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "5e0e15d35a832cbbd6eeeb5afab4dbf0968a1333",
"size": "1484",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-monitor/azure/mgmt/monitor/models/http_request_info_py3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
import re
from collections import OrderedDict
from datetime import datetime
from misaka import BaseRenderer
class ChangelogParserMixin(object):
versions = None
_current_version = None
_current_section = None
def reset(self):
self.versions = {}
self._current_version = None
self._current_section = None
def header(self, content, level):
if level == 2:
data = parse_version(content)
v = data["version"]
self.versions[v] = {
"release_date": data["release_date"],
"sections": OrderedDict(),
}
self._current_version = v
self._current_section = None
elif level == 3:
self.versions[self._current_version]["sections"][content] = []
self._current_section = content
def listitem(self, content, is_ordered, is_block):
self.versions[self._current_version]["sections"][self._current_section].append(content.strip())
def changelog_for(self, version):
if not self.versions or version not in self.versions:
raise ValueError("Unknown version {}".format(version))
content = ""
for section, items in self.versions[version]["sections"].items():
content += "## {section}\n".format(section=section)
for item in items:
content += "- {item}\n".format(item=item.replace("\n", " "))
content += "\n"
return content.strip()
class Renderer(ChangelogParserMixin, BaseRenderer):
pass
def parse_version(title):
m = re.match(r"\[?(?P<version>[^(\]|\s)]+)\]? - (?P<release_date>\d{4}-\d{2}-\d{2})$", title)
if m:
return {
"version": m.group("version"),
"release_date": (datetime.strptime(m.group("release_date"),
"%Y-%m-%d")
.date()),
}
m = re.match(r"\[(?P<version>Unreleased)\]$", title)
if m:
return {
"version": m.group("version"),
"release_date": None,
}
| {
"content_hash": "cbddf328a0bfa2c5572d6c4f7b2a687a",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 103,
"avg_line_length": 31.492753623188406,
"alnum_prop": 0.5462494247583985,
"repo_name": "rcmachado/pykeepachangelog",
"id": "52117cc3242bea6d130c179dc39ada98294de2c4",
"size": "2173",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keepachangelog/renderer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12602"
}
],
"symlink_target": ""
} |
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from modular_build import read_file, write_file
import os
import os.path as path
import generate_protocol_externs
import modular_build
import re
import shutil
import subprocess
import sys
import tempfile
try:
import simplejson as json
except ImportError:
import json
if len(sys.argv) == 2 and sys.argv[1] == '--help':
print("Usage: %s [module_names]" % path.basename(sys.argv[0]))
print(" module_names list of modules for which the Closure compilation should run.")
print(" If absent, the entire frontend will be compiled.")
sys.exit(0)
is_cygwin = sys.platform == 'cygwin'
def popen(arguments):
return subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
def to_platform_path(filepath):
if not is_cygwin:
return filepath
return re.sub(r'^/cygdrive/(\w)', '\\1:', filepath)
def to_platform_path_exact(filepath):
if not is_cygwin:
return filepath
output, _ = popen(['cygpath', '-w', filepath]).communicate()
# pylint: disable=E1103
return output.strip().replace('\\', '\\\\')
scripts_path = path.dirname(path.abspath(__file__))
devtools_path = path.dirname(scripts_path)
inspector_path = path.join(path.dirname(devtools_path), 'core', 'inspector')
v8_inspector_path = path.join(path.dirname(devtools_path), 'platform', 'v8_inspector')
devtools_frontend_path = path.join(devtools_path, 'front_end')
global_externs_file = to_platform_path(path.join(devtools_frontend_path, 'externs.js'))
protocol_externs_file = path.join(devtools_frontend_path, 'protocol_externs.js')
injected_script_source_name = path.join(v8_inspector_path, 'InjectedScriptSource.js')
injected_script_externs_file = path.join(v8_inspector_path, 'injected_script_externs.js')
debugger_script_source_name = path.join(v8_inspector_path, 'DebuggerScript.js')
debugger_script_externs_file = path.join(v8_inspector_path, 'debugger_script_externs.js')
jsmodule_name_prefix = 'jsmodule_'
runtime_module_name = '_runtime'
type_checked_jsdoc_tags_list = ['param', 'return', 'type', 'enum']
type_checked_jsdoc_tags_or = '|'.join(type_checked_jsdoc_tags_list)
# Basic regex for invalid JsDoc types: an object type name ([A-Z][_A-Za-z0-9.]+[A-Za-z0-9]) not preceded by '!', '?', ':' (this, new), or '.' (object property).
invalid_type_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*\{.*(?<![!?:._A-Za-z0-9])([A-Z][_A-Za-z0-9.]+[A-Za-z0-9])[^/]*\}')
invalid_type_designator_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*.*(?<![{: ])([?!])=?\}')
invalid_non_object_type_regex = re.compile(r'@(?:' + type_checked_jsdoc_tags_or + r')\s*\{.*(![a-z]+)[^/]*\}')
error_warning_regex = re.compile(r'WARNING|ERROR')
loaded_css_regex = re.compile(r'(?:registerRequiredCSS|WebInspector\.View\.createStyleElement)\s*\(\s*"(.+)"\s*\)')
java_build_regex = re.compile(r'^\w+ version "(\d+)\.(\d+)')
errors_found = False
generate_protocol_externs.generate_protocol_externs(protocol_externs_file, path.join(inspector_path, 'browser_protocol.json'), path.join(v8_inspector_path, 'js_protocol.json'))
# Based on http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python.
def which(program):
def is_exe(fpath):
return path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = path.split(program)
if fpath:
if is_exe(program):
return program
else:
for part in os.environ["PATH"].split(os.pathsep):
part = part.strip('"')
exe_file = path.join(part, program)
if is_exe(exe_file):
return exe_file
return None
def log_error(message):
print 'ERROR: ' + message
def error_excepthook(exctype, value, traceback):
print 'ERROR:'
sys.__excepthook__(exctype, value, traceback)
sys.excepthook = error_excepthook
application_descriptors = ['inspector.json', 'toolbox.json', 'formatter_worker.json', 'heap_snapshot_worker.json', 'temp_storage_shared_worker.json']
loader = modular_build.DescriptorLoader(devtools_frontend_path)
descriptors = loader.load_applications(application_descriptors)
modules_by_name = descriptors.modules
def hasErrors(output):
return re.search(error_warning_regex, output) != None
def verify_jsdoc_extra(additional_files):
files = [to_platform_path(file) for file in descriptors.all_compiled_files() + additional_files]
file_list = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
file_list.write('\n'.join(files))
finally:
file_list.close()
return popen(java_exec + ['-jar', jsdoc_validator_jar, '--files-list-name', to_platform_path_exact(file_list.name)]), file_list
def verify_jsdoc(additional_files):
def file_list():
return descriptors.all_compiled_files() + additional_files
errors_found = False
for full_file_name in file_list():
lineIndex = 0
with open(full_file_name, 'r') as sourceFile:
for line in sourceFile:
line = line.rstrip()
lineIndex += 1
if not line:
continue
if verify_jsdoc_line(full_file_name, lineIndex, line):
errors_found = True
return errors_found
def verify_jsdoc_line(fileName, lineIndex, line):
def print_error(message, errorPosition):
print '%s:%s: ERROR - %s%s%s%s%s%s' % (fileName, lineIndex, message, os.linesep, line, os.linesep, ' ' * errorPosition + '^', os.linesep)
known_css = {}
errors_found = False
match = re.search(invalid_type_regex, line)
if match:
print_error('Type "%s" nullability not marked explicitly with "?" (nullable) or "!" (non-nullable)' % match.group(1), match.start(1))
errors_found = True
match = re.search(invalid_non_object_type_regex, line)
if match:
print_error('Non-object type explicitly marked with "!" (non-nullable), which is the default and should be omitted', match.start(1))
errors_found = True
match = re.search(invalid_type_designator_regex, line)
if match:
print_error('Type nullability indicator misplaced, should precede type', match.start(1))
errors_found = True
match = re.search(loaded_css_regex, line)
if match:
file = path.join(devtools_frontend_path, match.group(1))
exists = known_css.get(file)
if exists is None:
exists = path.isfile(file)
known_css[file] = exists
if not exists:
print_error('Dynamically loaded CSS stylesheet is missing in the source tree', match.start(1))
errors_found = True
return errors_found
def find_java():
required_major = 1
required_minor = 7
exec_command = None
has_server_jvm = True
java_path = which('java')
if not java_path:
java_path = which('java.exe')
if not java_path:
print 'NOTE: No Java executable found in $PATH.'
sys.exit(1)
is_ok = False
java_version_out, _ = popen([java_path, '-version']).communicate()
# pylint: disable=E1103
match = re.search(java_build_regex, java_version_out)
if match:
major = int(match.group(1))
minor = int(match.group(2))
is_ok = major >= required_major and minor >= required_minor
if is_ok:
exec_command = [java_path, '-Xms1024m', '-server', '-XX:+TieredCompilation']
check_server_proc = popen(exec_command + ['-version'])
check_server_proc.communicate()
if check_server_proc.returncode != 0:
# Not all Java installs have server JVMs.
exec_command = exec_command.remove('-server')
has_server_jvm = False
if not is_ok:
print 'NOTE: Java executable version %d.%d or above not found in $PATH.' % (required_major, required_minor)
sys.exit(1)
print 'Java executable: %s%s' % (java_path, '' if has_server_jvm else ' (no server JVM)')
return exec_command
java_exec = find_java()
closure_compiler_jar = to_platform_path(path.join(scripts_path, 'closure', 'compiler.jar'))
closure_runner_jar = to_platform_path(path.join(scripts_path, 'compiler-runner', 'closure-runner.jar'))
jsdoc_validator_jar = to_platform_path(path.join(scripts_path, 'jsdoc-validator', 'jsdoc-validator.jar'))
modules_dir = tempfile.mkdtemp()
common_closure_args = [
'--summary_detail_level', '3',
'--jscomp_error', 'visibility',
'--compilation_level', 'SIMPLE_OPTIMIZATIONS',
'--warning_level', 'VERBOSE',
'--language_in=ES6_STRICT',
'--language_out=ES5_STRICT',
'--extra_annotation_name', 'suppressReceiverCheck',
'--extra_annotation_name', 'suppressGlobalPropertiesCheck',
'--module_output_path_prefix', to_platform_path_exact(modules_dir + path.sep)
]
worker_modules_by_name = {}
dependents_by_module_name = {}
for module_name in descriptors.application:
module = descriptors.modules[module_name]
if descriptors.application[module_name].get('type', None) == 'worker':
worker_modules_by_name[module_name] = module
for dep in module.get('dependencies', []):
list = dependents_by_module_name.get(dep)
if not list:
list = []
dependents_by_module_name[dep] = list
list.append(module_name)
def check_conditional_dependencies():
for name in modules_by_name:
for dep_name in modules_by_name[name].get('dependencies', []):
dependency = modules_by_name[dep_name]
if dependency.get('experiment') or dependency.get('condition'):
log_error('Module "%s" may not depend on the conditional module "%s"' % (name, dep_name))
errors_found = True
check_conditional_dependencies()
def verify_worker_modules():
for name in modules_by_name:
for dependency in modules_by_name[name].get('dependencies', []):
if dependency in worker_modules_by_name:
log_error('Module "%s" may not depend on the worker module "%s"' % (name, dependency))
errors_found = True
verify_worker_modules()
def check_duplicate_files():
def check_module(module, seen_files, seen_modules):
name = module['name']
seen_modules[name] = True
for dep_name in module.get('dependencies', []):
if not dep_name in seen_modules:
check_module(modules_by_name[dep_name], seen_files, seen_modules)
for source in module.get('scripts', []):
referencing_module = seen_files.get(source)
if referencing_module:
log_error('Duplicate use of %s in "%s" (previously seen in "%s")' % (source, name, referencing_module))
seen_files[source] = name
for module_name in worker_modules_by_name:
check_module(worker_modules_by_name[module_name], {}, {})
print 'Checking duplicate files across modules...'
check_duplicate_files()
def module_arg(module_name):
return ' --module ' + jsmodule_name_prefix + module_name
def modules_to_check():
if len(sys.argv) == 1:
return descriptors.sorted_modules()
print 'Compiling only these modules: %s' % sys.argv[1:]
return [module for module in descriptors.sorted_modules() if module in set(sys.argv[1:])]
def dump_module(name, recursively, processed_modules):
if name in processed_modules:
return ''
processed_modules[name] = True
module = modules_by_name[name]
skipped_scripts = set(module.get('skip_compilation', []))
command = ''
dependencies = module.get('dependencies', [])
if recursively:
for dependency in dependencies:
command += dump_module(dependency, recursively, processed_modules)
command += module_arg(name) + ':'
filtered_scripts = descriptors.module_compiled_files(name)
command += str(len(filtered_scripts))
firstDependency = True
for dependency in dependencies + [runtime_module_name]:
if firstDependency:
command += ':'
else:
command += ','
firstDependency = False
command += jsmodule_name_prefix + dependency
for script in filtered_scripts:
command += ' --js ' + to_platform_path(path.join(devtools_frontend_path, name, script))
return command
print 'Compiling frontend...'
compiler_args_file = tempfile.NamedTemporaryFile(mode='wt', delete=False)
try:
platform_protocol_externs_file = to_platform_path(protocol_externs_file)
runtime_js_path = to_platform_path(path.join(devtools_frontend_path, 'Runtime.js'))
checked_modules = modules_to_check()
for name in checked_modules:
closure_args = ' '.join(common_closure_args)
closure_args += ' --externs ' + to_platform_path(global_externs_file)
closure_args += ' --externs ' + platform_protocol_externs_file
runtime_module = module_arg(runtime_module_name) + ':1 --js ' + runtime_js_path
closure_args += runtime_module + dump_module(name, True, {})
compiler_args_file.write('%s %s%s' % (name, closure_args, os.linesep))
finally:
compiler_args_file.close()
modular_compiler_proc = popen(java_exec + ['-jar', closure_runner_jar, '--compiler-args-file', to_platform_path_exact(compiler_args_file.name)])
def unclosure_injected_script(sourceFileName, outFileName):
source = read_file(sourceFileName)
def replace_function(matchobj):
return re.sub(r'@param', 'param', matchobj.group(1) or '') + '\n//' + matchobj.group(2)
# Comment out the closure function and its jsdocs
source = re.sub(r'(/\*\*(?:[\s\n]*\*\s*@param[^\n]+\n)+\s*\*/\s*)?\n(\(function)', replace_function, source, count=1)
# Comment out its return statement
source = re.sub(r'\n(\s*return\s+[^;]+;\s*\n\}\)\s*)$', '\n/*\\1*/', source)
# Replace the "var Object" override with a "self.Object" one
source = re.sub(r'\nvar Object =', '\nself.Object =', source, count=1)
write_file(outFileName, source)
injectedScriptSourceTmpFile = to_platform_path(path.join(inspector_path, 'InjectedScriptSourceTmp.js'))
unclosure_injected_script(injected_script_source_name, injectedScriptSourceTmpFile)
print 'Compiling InjectedScriptSource.js...'
spawned_compiler_command = java_exec + [
'-jar',
closure_compiler_jar
] + common_closure_args
command = spawned_compiler_command + [
'--externs', to_platform_path_exact(injected_script_externs_file),
'--externs', to_platform_path_exact(protocol_externs_file),
'--module', jsmodule_name_prefix + 'injected_script' + ':1',
'--js', to_platform_path(injectedScriptSourceTmpFile)
]
injectedScriptCompileProc = popen(command)
print 'Compiling DebuggerScript.js...'
command = spawned_compiler_command + [
'--externs', to_platform_path_exact(debugger_script_externs_file),
'--module', jsmodule_name_prefix + 'debugger_script' + ':1',
'--js', to_platform_path(debugger_script_source_name)
]
debuggerScriptCompileProc = popen(command)
print 'Compiling devtools.js...'
command = spawned_compiler_command + [
'--externs', to_platform_path(global_externs_file),
'--externs', to_platform_path(path.join(devtools_frontend_path, 'host', 'InspectorFrontendHostAPI.js')),
'--module', jsmodule_name_prefix + 'devtools_js' + ':1',
'--js', to_platform_path(path.join(devtools_frontend_path, 'devtools.js'))
]
devtoolsJSCompileProc = popen(command)
print 'Verifying JSDoc comments...'
additional_jsdoc_check_files = [injectedScriptSourceTmpFile]
errors_found |= verify_jsdoc(additional_jsdoc_check_files)
jsdocValidatorProc, jsdocValidatorFileList = verify_jsdoc_extra(additional_jsdoc_check_files)
print 'Validating InjectedScriptSource.js...'
injectedscript_check_script_path = path.join(scripts_path, "check_injected_script_source.py")
validateInjectedScriptProc = popen([sys.executable, injectedscript_check_script_path, injected_script_source_name])
print
(jsdocValidatorOut, _) = jsdocValidatorProc.communicate()
if jsdocValidatorOut:
print ('JSDoc validator output:%s%s' % (os.linesep, jsdocValidatorOut))
errors_found = True
os.remove(jsdocValidatorFileList.name)
(moduleCompileOut, _) = modular_compiler_proc.communicate()
print 'Modular compilation output:'
start_module_regex = re.compile(r'^@@ START_MODULE:(.+) @@$')
end_module_regex = re.compile(r'^@@ END_MODULE @@$')
in_module = False
skipped_modules = {}
error_count = 0
def skip_dependents(module_name):
for skipped_module in dependents_by_module_name.get(module_name, []):
skipped_modules[skipped_module] = True
has_module_output = False
# pylint: disable=E1103
for line in moduleCompileOut.splitlines():
if not in_module:
match = re.search(start_module_regex, line)
if not match:
continue
in_module = True
has_module_output = True
module_error_count = 0
module_output = []
module_name = match.group(1)
skip_module = skipped_modules.get(module_name)
if skip_module:
skip_dependents(module_name)
else:
match = re.search(end_module_regex, line)
if not match:
if not skip_module:
module_output.append(line)
if hasErrors(line):
error_count += 1
module_error_count += 1
skip_dependents(module_name)
continue
in_module = False
if skip_module:
print 'Skipping module %s...' % module_name
elif not module_error_count:
print 'Module %s compiled successfully: %s' % (module_name, module_output[0])
else:
print 'Module %s compile failed: %s errors%s' % (module_name, module_error_count, os.linesep)
print os.linesep.join(module_output)
if not has_module_output:
print moduleCompileOut
if error_count:
print 'Total Closure errors: %d%s' % (error_count, os.linesep)
errors_found = True
(injectedScriptCompileOut, _) = injectedScriptCompileProc.communicate()
print 'InjectedScriptSource.js compilation output:%s' % os.linesep, injectedScriptCompileOut
errors_found |= hasErrors(injectedScriptCompileOut)
(debuggerScriptCompilerOut, _) = debuggerScriptCompileProc.communicate()
print 'DebuggerScript.js compilation output:%s' % os.linesep, debuggerScriptCompilerOut
errors_found |= hasErrors(debuggerScriptCompilerOut)
(devtoolsJSCompileOut, _) = devtoolsJSCompileProc.communicate()
print 'devtools.js compilation output:%s' % os.linesep, devtoolsJSCompileOut
errors_found |= hasErrors(devtoolsJSCompileOut)
(validateInjectedScriptOut, _) = validateInjectedScriptProc.communicate()
print 'Validate InjectedScriptSource.js output:%s' % os.linesep, (validateInjectedScriptOut if validateInjectedScriptOut else '<empty>')
errors_found |= hasErrors(validateInjectedScriptOut)
if errors_found:
print 'ERRORS DETECTED'
os.remove(injectedScriptSourceTmpFile)
os.remove(compiler_args_file.name)
os.remove(protocol_externs_file)
shutil.rmtree(modules_dir, True)
| {
"content_hash": "3ad6a387b3f628916af3f4ad65064f8e",
"timestamp": "",
"source": "github",
"line_count": 524,
"max_line_length": 176,
"avg_line_length": 39.204198473282446,
"alnum_prop": 0.6745850167940418,
"repo_name": "danakj/chromium",
"id": "aed1a8feebfec114af24d565728f46914750e592",
"size": "20543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/WebKit/Source/devtools/scripts/compile_frontend.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""This example gets all text ads for a given ad group. To add an ad, run
add_text_ads.py.
Tags: AdGroupAdService.get
Api: AdWordsOnly
"""
__author__ = 'api.kwinter@gmail.com (Kevin Winter)'
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import AdWordsClient
PAGE_SIZE = 500
ad_group_id = 'INSERT_AD_GROUP_ID_HERE'
def main(client, ad_group_id):
# Initialize appropriate service.
ad_group_ad_service = client.GetAdGroupAdService(
'https://adwords-sandbox.google.com', 'v201109')
# Construct selector and get all ads for a given ad group.
offset = 0
selector = {
'fields': ['Id', 'AdGroupId', 'Status'],
'predicates': [
{
'field': 'AdGroupId',
'operator': 'EQUALS',
'values': [ad_group_id]
},
{
'field': 'AdType',
'operator': 'EQUALS',
'values': ['TEXT_AD']
}
],
'paging': {
'startIndex': str(offset),
'numberResults': str(PAGE_SIZE)
}
}
more_pages = True
while more_pages:
page = ad_group_ad_service.Get(selector)[0]
# Display results.
if 'entries' in page:
for ad in page['entries']:
print ('Ad with id \'%s\', status \'%s\', and of type \'%s\' was found.'
% (ad['ad']['id'], ad['status'], ad['ad']['Ad_Type']))
else:
print 'No ads were found.'
offset += PAGE_SIZE
selector['paging']['startIndex'] = str(offset)
more_pages = offset < int(page['totalNumEntries'])
print
print ('Usage: %s units, %s operations' % (client.GetUnits(),
client.GetOperations()))
if __name__ == '__main__':
# Initialize client object.
client = AdWordsClient(path=os.path.join('..', '..', '..', '..', '..'))
main(client, ad_group_id)
| {
"content_hash": "c4e23c834e5759b3d8aadd104400234e",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 80,
"avg_line_length": 27.02777777777778,
"alnum_prop": 0.5488180883864338,
"repo_name": "nearlyfreeapps/python-googleadwords",
"id": "8ae7806409adf8c7304444de90b08564a8954dc7",
"size": "2564",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/adspygoogle/adwords/v201109/basic_operations/get_text_ads.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "10581"
},
{
"name": "Python",
"bytes": "1394721"
}
],
"symlink_target": ""
} |
class Solution(object):
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
return self.getll(self.getnum(l1) + self.getnum(l2))
def getnum(self, l):
length = self.getlength(l)
num = 0
for i in range(length):
num += l.val * (10 ** i)
l = l.next
return num
def getll(self, num):
nodelist = []
while int(num/10):
digit = num % 10
num = int(num / 10)
nodelist.append(ListNode(digit))
nodelist.append(ListNode(num)) # add the last digit
for i in range(len(nodelist) - 1):
nodelist[i].next = nodelist[i + 1]
return nodelist[0]
def getlength(self, l):
length = 1 # when linked list has just one node, length = 1, no loop
while l.next is not None:
length += 1
l = l.next
return length
if __name__ == '__main__':
pass | {
"content_hash": "ebe6f954546ed55febd603450c507477",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 91,
"avg_line_length": 25.365853658536587,
"alnum_prop": 0.4913461538461538,
"repo_name": "mistwave/leetcode",
"id": "379138f9d58efa0c6816b87ba2ad8f34a2e196cb",
"size": "1183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Python3/no2_Add_Two_Numbers.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "89065"
}
],
"symlink_target": ""
} |
import sys
import os, os.path
import shutil
import ConfigParser
import subprocess
import re
from contextlib import contextmanager
def _check_ndk_root_env():
''' Checking the environment NDK_ROOT, which will be used for building
'''
try:
NDK_ROOT = os.environ['NDK_ROOT']
except Exception:
print "NDK_ROOT not defined. Please define NDK_ROOT in your environment."
sys.exit(1)
return NDK_ROOT
def _check_python_bin_env():
''' Checking the environment PYTHON_BIN, which will be used for building
'''
try:
PYTHON_BIN = os.environ['PYTHON_BIN']
except Exception:
print "PYTHON_BIN not defined, use current python."
PYTHON_BIN = sys.executable
return PYTHON_BIN
class CmdError(Exception):
pass
@contextmanager
def _pushd(newDir):
previousDir = os.getcwd()
os.chdir(newDir)
yield
os.chdir(previousDir)
def _run_cmd(command):
ret = subprocess.call(command, shell=True)
if ret != 0:
message = "Error running command"
raise CmdError(message)
def main():
cur_platform= '??'
llvm_path = '??'
ndk_root = _check_ndk_root_env()
# del the " in the path
ndk_root = re.sub(r"\"", "", ndk_root)
python_bin = _check_python_bin_env()
platform = sys.platform
if platform == 'win32':
cur_platform = 'windows'
elif platform == 'darwin':
cur_platform = platform
elif 'linux' in platform:
cur_platform = 'linux'
else:
print 'Your platform is not supported!'
sys.exit(1)
if platform == 'win32':
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s' % cur_platform))
if not os.path.exists(x86_llvm_path):
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.4/prebuilt', '%s' % cur_platform))
else:
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s-%s' % (cur_platform, 'x86')))
if not os.path.exists(x86_llvm_path):
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.4/prebuilt', '%s-%s' % (cur_platform, 'x86')))
x64_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s-%s' % (cur_platform, 'x86_64')))
if not os.path.exists(x64_llvm_path):
x64_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.4/prebuilt', '%s-%s' % (cur_platform, 'x86_64')))
if os.path.isdir(x86_llvm_path):
llvm_path = x86_llvm_path
elif os.path.isdir(x64_llvm_path):
llvm_path = x64_llvm_path
else:
print 'llvm toolchain not found!'
print 'path: %s or path: %s are not valid! ' % (x86_llvm_path, x64_llvm_path)
sys.exit(1)
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
cocos_root = os.path.abspath(project_root)
jsb_root = os.path.abspath(os.path.join(project_root, 'cocos/scripting/js-bindings'))
cxx_generator_root = os.path.abspath(os.path.join(project_root, 'tools/bindings-generator'))
# save config to file
config = ConfigParser.ConfigParser()
config.set('DEFAULT', 'androidndkdir', ndk_root)
config.set('DEFAULT', 'clangllvmdir', llvm_path)
config.set('DEFAULT', 'cocosdir', cocos_root)
config.set('DEFAULT', 'jsbdir', jsb_root)
config.set('DEFAULT', 'cxxgeneratordir', cxx_generator_root)
config.set('DEFAULT', 'extra_flags', '')
# To fix parse error on windows, we must difine __WCHAR_MAX__ and undefine __MINGW32__ .
if platform == 'win32':
config.set('DEFAULT', 'extra_flags', '-D__WCHAR_MAX__=0x7fffffff -U__MINGW32__')
conf_ini_file = os.path.abspath(os.path.join(os.path.dirname(__file__), 'userconf.ini'))
print 'generating userconf.ini...'
with open(conf_ini_file, 'w') as configfile:
config.write(configfile)
# set proper environment variables
if 'linux' in platform or platform == 'darwin':
os.putenv('LD_LIBRARY_PATH', '%s/libclang' % cxx_generator_root)
if platform == 'win32':
path_env = os.environ['PATH']
os.putenv('PATH', r'%s;%s\libclang;%s\tools\win32;' % (path_env, cxx_generator_root, cxx_generator_root))
try:
tojs_root = '%s/tools/tojs' % project_root
output_dir = '%s/../runtime-src/Classes/extensions/bindings/auto' % project_root
cmd_args = {'game_ext.ini' : ('game_ext', 'js_game_ext_auto')
}
target = 'spidermonkey'
generator_py = '%s/generator.py' % cxx_generator_root
for key in cmd_args.keys():
args = cmd_args[key]
cfg = '%s/%s' % (tojs_root, key)
print 'Generating bindings for %s...' % (key[:-4])
command = '%s %s %s -s %s -t %s -o %s -n %s' % (python_bin, generator_py, cfg, args[0], target, output_dir, args[1])
_run_cmd(command)
# if platform == 'win32':
# with _pushd(output_dir):
# _run_cmd('dos2unix *')
custom_cmd_args = {}
if len(custom_cmd_args) > 0:
output_dir = '%s/frameworks/custom/auto' % project_root
for key in custom_cmd_args.keys():
args = custom_cmd_args[key]
cfg = '%s/%s' % (tojs_root, key)
print 'Generating bindings for %s...' % (key[:-4])
command = '%s %s %s -s %s -t %s -o %s -n %s' % (python_bin, generator_py, cfg, args[0], target, output_dir, args[1])
_run_cmd(command)
# if platform == 'win32':
# with _pushd(output_dir):
# _run_cmd('dos2unix *')
print '----------------------------------------'
print 'Generating javascript bindings succeeds.'
print '----------------------------------------'
except Exception as e:
if e.__class__.__name__ == 'CmdError':
print '-------------------------------------'
print 'Generating javascript bindings fails.'
print '-------------------------------------'
sys.exit(1)
else:
raise
# -------------- main --------------
if __name__ == '__main__':
main()
| {
"content_hash": "0f8a5bcf9b30c213ddb2732426eb6e7c",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 132,
"avg_line_length": 35.59659090909091,
"alnum_prop": 0.569193934557063,
"repo_name": "wanmaple/PokemonRPG",
"id": "46a02d6d1fbe2c0fb73f0fd9dc9f4d913b2f8ec9",
"size": "6382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/frameworks/cocos2d-x/tools/tojs/genbindings_ext.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2595"
},
{
"name": "C",
"bytes": "8380610"
},
{
"name": "C++",
"bytes": "20283487"
},
{
"name": "CMake",
"bytes": "232936"
},
{
"name": "GLSL",
"bytes": "56787"
},
{
"name": "HTML",
"bytes": "4386"
},
{
"name": "Java",
"bytes": "643771"
},
{
"name": "JavaScript",
"bytes": "5895543"
},
{
"name": "Lua",
"bytes": "16064"
},
{
"name": "Makefile",
"bytes": "48929"
},
{
"name": "Objective-C",
"bytes": "2936879"
},
{
"name": "Objective-C++",
"bytes": "446393"
},
{
"name": "Python",
"bytes": "360880"
},
{
"name": "Shell",
"bytes": "26867"
}
],
"symlink_target": ""
} |
from fonts import *
| {
"content_hash": "aa5b3da00c28fc988ea9ee8b88db69ec",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 19,
"avg_line_length": 20,
"alnum_prop": 0.75,
"repo_name": "MikhailMS/Final_Project",
"id": "bb8bc1cba3db05479eb10825cb9deb3f40a256b8",
"size": "58",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/__init__.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "127754"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class TicklabeloverflowValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="ticklabeloverflow", parent_name="layout.yaxis", **kwargs
):
super(TicklabeloverflowValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
values=kwargs.pop("values", ["allow", "hide past div", "hide past domain"]),
**kwargs,
)
| {
"content_hash": "982e73588923eec047232da698707430",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 88,
"avg_line_length": 38.5,
"alnum_prop": 0.6233766233766234,
"repo_name": "plotly/plotly.py",
"id": "ff2bf9766c226c2d2b8c7303ebbe88abf74823fe",
"size": "539",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/layout/yaxis/_ticklabeloverflow.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
import os
from urllib.parse import urljoin, urlparse
from xml.sax.saxutils import escape
import docutils.nodes
from reportlab.platypus.paragraph import Paragraph
from .basenodehandler import NodeHandler
from .image import MyImage, missing
class FontHandler(NodeHandler):
def get_pre_post(self, client, node, replaceEnt):
return self.get_font_prefix(client, node, replaceEnt), '</font>'
def get_font_prefix(self, client, node, replaceEnt):
return client.styleToFont(self.fontstyle)
class HandleText(NodeHandler, docutils.nodes.Text):
def gather_elements(self, client, node, style):
return [Paragraph(client.gather_pdftext(node), style)]
def get_text(self, client, node, replaceEnt):
text = node.astext()
if replaceEnt:
text = escape(text)
return text
class HandleStrong(NodeHandler, docutils.nodes.strong):
pre = "<b>"
post = "</b>"
class HandleEmphasis(NodeHandler, docutils.nodes.emphasis):
pre = "<i>"
post = "</i>"
class HandleLiteral(NodeHandler, docutils.nodes.literal):
def get_pre_post(self, client, node, replaceEnt):
if node['classes']:
pre = client.styleToFont(node['classes'][0])
else:
pre = client.styleToFont('literal')
post = "</font>"
if not client.styles['literal'].hyphenation:
pre = '<nobr>' + pre
post += '</nobr>'
return pre, post
def get_text(self, client, node, replaceEnt):
text = node.astext()
text = escape(node.astext())
text = text.replace(' ', ' ')
return text
class HandleSuper(NodeHandler, docutils.nodes.superscript):
pre = '<super>'
post = "</super>"
class HandleSub(NodeHandler, docutils.nodes.subscript):
pre = '<sub>'
post = "</sub>"
class HandleTitleReference(FontHandler, docutils.nodes.title_reference):
fontstyle = 'title_reference'
class HandleReference(NodeHandler, docutils.nodes.reference):
def get_pre_post(self, client, node, replaceEnt):
pre, post = '', ''
uri = node.get('refuri')
if uri:
# Issue 366: links to "#" make no sense in a PDF
if uri == "#":
return "", ""
if uri.startswith('#'):
pass
elif client.baseurl: # Need to join the uri with the base url
uri = urljoin(client.baseurl, uri)
if urlparse(uri)[0] and client.inlinelinks:
# external inline reference
if uri in [node.astext(), "mailto:" + node.astext()]:
# No point on repeating it
post = u''
elif uri.startswith('mailto:'):
# No point on showing "mailto:"
post = u' (%s)' % uri[7:]
else:
post = u' (%s)' % uri
else:
# A plain old link
pre += u'<a href="%s" color="%s">' % (
uri,
client.styles.linkColor,
)
post = '</a>' + post
else:
uri = node.get('refid')
if uri:
pre += u'<a href="#%s" color="%s">' % (
uri,
client.styles.linkColor,
)
post = '</a>' + post
return pre, post
class HandleOptions(
HandleText, docutils.nodes.option_string, docutils.nodes.option_argument
):
pass
class HandleSysMessage(
HandleText, docutils.nodes.system_message, docutils.nodes.problematic
):
pre = '<font color="red">'
post = "</font>"
def gather_elements(self, client, node, style):
# FIXME show the error in the document, red, whatever
# log.warning("Problematic node %s", node.astext())
return []
class HandleGenerated(HandleText, docutils.nodes.generated):
pass
# def get_text(self, client, node, replaceEnt):
# if 'sectnum' in node['classes']:
# # This is the child of a title with a section number
# # Send the section number up to the title node
# node.parent['_sectnum'] = node.astext()
# return node.astext()
class HandleImage(NodeHandler, docutils.nodes.image):
def gather_elements(self, client, node, style):
# FIXME: handle alt
target = None
if isinstance(node.parent, docutils.nodes.reference):
target = node.parent.get('refuri', None)
st_name = 'image'
if node.get('classes'):
st_name = node.get('classes')[0]
style = client.styles[st_name]
uri = str(node.get("uri"))
if uri.split("://")[0].lower() not in ('http', 'ftp', 'https'):
imgname = os.path.join(client.basedir, uri)
else:
imgname = uri
try:
w, h, kind = MyImage.size_for_node(node, client=client)
except ValueError:
# Broken image, return arbitrary stuff
imgname = missing
w, h, kind = 100, 100, 'direct'
node.elements = [
MyImage(
filename=imgname,
height=h,
width=w,
kind=kind,
client=client,
target=target,
)
]
alignment = node.get('align', '').upper()
if not alignment:
# There is no JUSTIFY for flowables, of course, so 4:LEFT
alignment = {0: 'LEFT', 1: 'CENTER', 2: 'RIGHT', 4: 'LEFT'}[style.alignment]
if not alignment:
alignment = 'CENTER'
node.elements[0].image.hAlign = alignment
node.elements[0].spaceBefore = style.spaceBefore
node.elements[0].spaceAfter = style.spaceAfter
# Image flowables don't support valign (makes no sense for them?)
# elif alignment in ('TOP','MIDDLE','BOTTOM'):
# i.vAlign = alignment
return node.elements
def get_text(self, client, node, replaceEnt):
# First see if the image file exists, or else,
# use image-missing.png
imgname = os.path.join(client.basedir, str(node.get("uri")))
try:
w, h, kind = MyImage.size_for_node(node, client=client)
except ValueError:
# Broken image, return arbitrary stuff
imgname = missing
w, h, kind = 100, 100, 'direct'
alignment = node.get('align', 'CENTER').lower()
if alignment in ('top', 'middle', 'bottom'):
align = 'valign="%s"' % alignment
else:
align = ''
# TODO: inline images don't support SVG, vectors and PDF,
# which may be surprising. So, work on converting them
# previous to passing to reportlab.
# Try to rasterize using the backend
w, h, kind = MyImage.size_for_node(node, client=client)
uri = MyImage.raster(imgname, client)
return '<img src="%s" width="%f" height="%f" %s/>' % (uri, w, h, align)
class HandleFootRef(
NodeHandler,
docutils.nodes.footnote_reference,
docutils.nodes.citation_reference,
):
def get_text(self, client, node, replaceEnt):
# TODO: when used in Sphinx, all footnotes are autonumbered
anchors = ''
for i in node.get('ids'):
if i not in client.targets:
anchors += '<a name="%s"/>' % i
client.targets.append(i)
return u'%s<super><a href="%s" color="%s">%s</a></super>' % (
anchors,
'#' + node.get('refid', node.astext()),
client.styles.linkColor,
node.astext(),
)
class HandleTarget(NodeHandler, docutils.nodes.target):
def gather_elements(self, client, node, style):
if 'refid' in node:
client.pending_targets.append(node['refid'])
return client.gather_elements(node, style)
def get_text(self, client, node, replaceEnt):
text = client.gather_pdftext(node)
if replaceEnt:
text = escape(text)
return text
def get_pre_post(self, client, node, replaceEnt):
pre = ''
if node['ids']:
if node['ids'][0] not in client.targets:
pre = u'<a name="%s"/>' % node['ids'][0]
client.targets.append(node['ids'][0])
else:
name = node['names'][0]
if name:
pre = '<a name="%s"/>' % name
client.targets.append(name)
return pre, ''
class HandleInline(NodeHandler, docutils.nodes.inline):
def get_pre_post(self, client, node, replaceEnt):
if node['classes'] and node['classes'][0]:
r = client.styleToTags(node['classes'][0])
if r:
return r
return '', ''
| {
"content_hash": "727b2a28bb23567532ce96356f11e373",
"timestamp": "",
"source": "github",
"line_count": 270,
"max_line_length": 88,
"avg_line_length": 32.662962962962965,
"alnum_prop": 0.5505159315115092,
"repo_name": "rst2pdf/rst2pdf",
"id": "b22ff7569362ee9d9fbfacdbaff0a7a503bbe780",
"size": "8882",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "rst2pdf/genpdftext.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "485883"
},
{
"name": "Shell",
"bytes": "2089"
}
],
"symlink_target": ""
} |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from distutils.extension import Extension
from distutils.command.build_ext import build_ext
import sys, os, shutil, fnmatch
platform = sys.platform
if platform[:6] == 'darwin':
platform = 'darwin'
elif platform[:5] == 'linux':
platform = 'linux'
elif platform[:7] == 'freebsd':
platform = 'freebsd'
use_ice = False
if "--with-installed-ice" in sys.argv:
use_ice = True
sys.argv.remove("--with-installed-ice")
#
# Files from cpp/src/Slice that are required by IcePy
#
sliceSrcs = ["Checksum.cpp", "FileTracker.cpp", "Grammar.cpp", "MD5.cpp",
"MD5I.cpp", "Parser.cpp", "Preprocessor.cpp", "Python.cpp",
"PythonUtil.cpp", "Scanner.cpp", "SliceUtil.cpp", "StringLiteralUtil.cpp"]
#
# Sort out packages, package_dir and package_data from the lib dir. We include 'slice' in the
# package list and use package_data to ensure the Slice files are included. We also need to
# set include_package_data=True.
#
packages=['Glacier2', 'Ice', 'IceBox', 'IceGrid', 'IceMX', 'IcePatch2', 'IceStorm', 'slice']
if sys.version_info[:2] >= (3, 5):
packages += ['Ice.Py3']
package_dir={'' : 'lib'}
package_data={'' : ['*.ice']}
extra_compile_args=[]
define_macros=[('ICE_PYPI', None)]
if use_ice:
include_dirs=['src', 'src/ice/cpp/src']
else:
include_dirs=['src', 'src/ice/cpp/include', 'src/ice/cpp/include/generated', 'src/ice/cpp/src']
#
# Define ICE_STATIC_LIBS to disable _API macros
#
define_macros+=[('ICE_STATIC_LIBS', None)]
if platform == 'darwin':
if not 'ARCHFLAGS' in os.environ:
os.environ['ARCHFLAGS'] = '-arch x86_64'
# Make sure to use the SDK from Xcode (required for Sierra where old system headers can be used otherwise)
os.environ['CC'] = 'xcrun -sdk macosx clang'
os.environ['CXX'] = 'xcrun -sdk macosx clang++'
extra_compile_args.append('-w')
if use_ice:
libraries = ["IceSSL", "IceLocatorDiscovery", "IceDiscovery", "Ice"]
extra_link_args = []
else:
libraries=['iconv']
extra_link_args = ['-framework','Security', '-framework','CoreFoundation']
elif platform == 'win32':
extra_link_args = []
libraries=[]
define_macros.append(('WIN32_LEAN_AND_MEAN', None))
define_macros.append(('ICE_BUILDING_SRC', None))
define_macros.append(('ICE_BUILDING_ICE', None))
define_macros.append(('ICE_BUILDING_ICESSL', None))
define_macros.append(('ICE_BUILDING_ICE_LOCATOR_DISCOVERY', None))
define_macros.append(('_WIN32_WINNT', '0x601'))
include_dirs.append('src/ice/bzip2')
extra_compile_args.append('/EHsc')
extra_compile_args.append('/wd4018')
extra_compile_args.append('/wd4146')
extra_compile_args.append('/wd4244')
extra_compile_args.append('/wd4250')
extra_compile_args.append('/wd4251')
extra_compile_args.append('/wd4267')
extra_compile_args.append('/wd4275')
extra_compile_args.append('/wd4996')
extra_compile_args.append('/Zi')
if sys.version_info[:2] >= (3, 6):
extra_link_args.append('/DEBUG:FULL')
else:
extra_link_args.append('/DEBUG')
libraries=['dbghelp', 'Shlwapi', 'rpcrt4','advapi32','Iphlpapi','secur32','crypt32','ws2_32']
else:
#
# TODO: Get rid of this hack to remove -Wstrict-prototypes from the compiler options
# when http://bugs.python.org/issue1222585 is fixed. Note that this hack doesn't work
# with recent distutils versions which no longer allow overriding OPT in the env.
#
from distutils.sysconfig import get_config_vars
(opt,) = get_config_vars('OPT')
os.environ['OPT'] = " ".join(flag for flag in opt.split() if flag != '-Wstrict-prototypes')
extra_compile_args.append('-w')
extra_link_args = []
if use_ice:
libraries = ["IceSSL", "IceLocatorDiscovery", "IceDiscovery", "Ice"]
else:
libraries=['ssl', 'crypto', 'bz2', 'rt']
if platform != 'freebsd':
libraries.append('dl')
def filterName(path):
d = os.path.dirname(path)
b = os.path.basename(path)
#
# Always build mcpp sources
#
if "mcpp" in d:
return True
#
# Always build Slice sources required by IcePy
#
elif os.path.normpath("ice/cpp/src/Slice") in d:
return b in sliceSrcs
if platform == "win32":
#
# SysLoggerI.cpp shouldn't be built under Windows.
#
if b == 'SysLoggerI.cpp':
return False
#
# Don't build OpenSSL, SecureTransport and UWP sources on
# Windows
#
if b.startswith("UWP") or b.startswith("OpenSSL") or b.startswith("SecureTransport"):
return False
else:
#
# Filter IceSSL sources that doesn't match current OS default
# implementation
#
if (b.startswith("SChannel") or b.startswith("UWP") or (b.startswith("OpenSSL") and platform == "darwin") or
(b.startswith("SecureTransport") and platform != "darwin")):
return False
#
# Don't build Ice for C++ sources if using Ice system install (--with-installed-ice)
#
if use_ice and os.path.normpath("src/ice/") in d:
return False
#
# Always use system bzip2 under Unix platforms.
#
elif d.find('bzip2') != -1:
return False
return True
# Gather the list of sources to compile.
sources = []
for root, dirnames, filenames in os.walk('src'):
for filename in fnmatch.filter(filenames, '*.cpp'):
n = os.path.normpath(os.path.join(root, filename))
if filterName(n):
sources.append(n)
for filename in fnmatch.filter(filenames, '*.c'):
n = os.path.normpath(os.path.join(root, filename))
if filterName(n):
sources.append(n)
with open('README.rst') as file:
long_description = file.read()
class BuildExt(build_ext):
def build_extensions(self):
if platform == "win32":
for e in self.extensions:
e.extra_link_args.append('/PDB:{0}'.format(os.path.join(self.build_temp, "IcePy.pdb")))
build_ext.build_extensions(self)
setup(
name='zeroc-ice',
version='3.7.8',
description="Ice is a comprehensive RPC framework with support for Python, C++, .NET, Java, JavaScript and more.",
long_description=long_description,
# The project's main homepage.
url='https://zeroc.com',
# Author details
author='ZeroC, Inc.',
author_email='info@zeroc.com',
# Choose your license
license='GPL v2 with exceptions',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
# What does your project relate to?
keywords='RPC distributed systems development',
packages = packages,
package_dir = package_dir,
package_data = package_data,
include_package_data = True,
exclude_package_data={'slice': ['IceDiscovery/*.ice', 'IceLocatorDiscovery/*.ice']},
py_modules = ["slice2py"],
cmdclass = {'build_ext': BuildExt },
entry_points = {
'console_scripts': ['slice2py=slice2py:main'],
},
ext_modules=[
Extension('IcePy', sources,
extra_link_args=extra_link_args,
define_macros=define_macros,
include_dirs=include_dirs,
extra_compile_args=extra_compile_args,
libraries=libraries)
]
)
| {
"content_hash": "a89017963ca8a17e674fb6529b25a466",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 118,
"avg_line_length": 34.708,
"alnum_prop": 0.6203757058891322,
"repo_name": "zeroc-ice/ice-packaging",
"id": "16f5e24127d5f6a98a621cebd99b5b3b7384b7f4",
"size": "8773",
"binary": false,
"copies": "1",
"ref": "refs/heads/3.7",
"path": "ice/pypi/setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10245"
},
{
"name": "Ruby",
"bytes": "4153"
},
{
"name": "Shell",
"bytes": "137"
}
],
"symlink_target": ""
} |
"""
RoBERTa: A Robustly Optimized BERT Pretraining Approach.
"""
import logging
import torch
import torch.nn as nn
import torch.nn.functional as F
from fairseq import utils
from fairseq.model_parallel.models.transformer import ModelParallelTransformerEncoder
from fairseq.models import register_model, register_model_architecture
from fairseq.models.roberta import (
roberta_base_architecture,
roberta_prenorm_architecture,
RobertaEncoder,
RobertaModel,
)
from fairseq.modules import LayerNorm
try:
from fairseq.model_parallel.megatron.mpu import (
copy_to_model_parallel_region,
gather_from_model_parallel_region,
ColumnParallelLinear,
VocabParallelEmbedding,
)
has_megatron_submodule = True
except (ImportError, ModuleNotFoundError):
has_megatron_submodule = False
logger = logging.getLogger(__name__)
@register_model("model_parallel_roberta")
class ModelParallelRobertaModel(RobertaModel):
def __init__(self, args, encoder):
super().__init__(args, encoder)
self.classification_heads = nn.ModuleDict()
@staticmethod
def add_args(parser):
RobertaModel.add_args(parser)
parser.add_argument(
"--no-final-layer-norm",
action="store_true",
help=(
"don't add final layernorm (only applicable when "
"--encoder-normalize-before=True"
),
)
@classmethod
def build_model(cls, args, task):
"""Build a new model instance."""
# make sure all arguments are present
base_architecture(args)
task.source_dictionary.pad_to_multiple_(args.model_parallel_size * 8)
task.target_dictionary.pad_to_multiple_(args.model_parallel_size * 8)
if not hasattr(args, "max_positions"):
args.max_positions = args.tokens_per_sample
if getattr(args, "untie_weights_roberta", False):
raise NotImplementedError(
"--untie-weights-roberta is not supported in model parallel mode"
)
encoder = ModelParallelRobertaEncoder(args, task.source_dictionary)
return cls(args, encoder)
def forward(
self,
src_tokens,
features_only=False,
return_all_hiddens=False,
classification_head_name=None,
**kwargs
):
if classification_head_name is not None:
features_only = True
x, extra = self.encoder(src_tokens, features_only, return_all_hiddens, **kwargs)
if classification_head_name is not None:
x = self.classification_heads[classification_head_name](x)
return x, extra
def register_classification_head(
self, name, num_classes=None, inner_dim=None, **kwargs
):
"""Register a classification head."""
if name in self.classification_heads:
prev_num_classes = self.classification_heads[name].out_proj.out_features
prev_inner_dim = self.classification_heads[name].dense.out_features
if num_classes != prev_num_classes or inner_dim != prev_inner_dim:
logger.warning(
're-registering head "{}" with num_classes {} (prev: {}) '
"and inner_dim {} (prev: {})".format(
name, num_classes, prev_num_classes, inner_dim, prev_inner_dim
)
)
self.classification_heads[name] = ModelParallelRobertaClassificationHead(
self.args.encoder_embed_dim,
inner_dim or self.args.encoder_embed_dim,
num_classes,
self.args.pooler_activation_fn,
self.args.pooler_dropout,
)
class ModelParallelRobertaLMHead(nn.Module):
"""Head for masked language modeling."""
def __init__(self, embed_dim, output_dim, activation_fn, weight=None):
super().__init__()
self.dense = ColumnParallelLinear(embed_dim, embed_dim, gather_output=True)
self.activation_fn = utils.get_activation_fn(activation_fn)
self.layer_norm = LayerNorm(embed_dim)
if weight is None:
weight = nn.Linear(embed_dim, output_dim, bias=False).weight
self.weight = weight
self.bias = nn.Parameter(torch.zeros(output_dim))
def forward(self, features, masked_tokens=None, **kwargs):
# Only project the unmasked tokens while training,
# saves both memory and computation
if masked_tokens is not None:
features = features[masked_tokens, :]
x = self.dense(features)
x = self.activation_fn(x)
x = self.layer_norm(x)
x = copy_to_model_parallel_region(x)
# project back to size of vocabulary with bias
x = F.linear(x, self.weight)
x = gather_from_model_parallel_region(x).contiguous()
x = x + self.bias
return x
class ModelParallelRobertaClassificationHead(nn.Module):
"""Head for sentence-level classification tasks."""
def __init__(
self, input_dim, inner_dim, num_classes, activation_fn, pooler_dropout
):
super().__init__()
self.dense = ColumnParallelLinear(input_dim, inner_dim, gather_output=True)
self.activation_fn = utils.get_activation_fn(activation_fn)
self.dropout = nn.Dropout(p=pooler_dropout)
self.out_proj = nn.Linear(inner_dim, num_classes)
def forward(self, features, **kwargs):
x = features[:, 0, :] # take <s> token (equiv. to [CLS])
x = self.dropout(x)
x = self.dense(x)
x = self.activation_fn(x)
x = self.dropout(x)
x = self.out_proj(x)
return x
class ModelParallelRobertaEncoder(RobertaEncoder):
"""RoBERTa encoder."""
def __init__(self, args, dictionary):
super().__init__(args, dictionary)
assert not self.args.untie_weights_roberta
def build_embedding(self, vocab_size, embedding_dim, padding_idx):
return VocabParallelEmbedding(vocab_size, embedding_dim, padding_idx)
def build_encoder(self, args, dictionary, embed_tokens):
return ModelParallelTransformerEncoder(args, dictionary, embed_tokens)
def build_lm_head(self, embed_dim, output_dim, activation_fn, weight):
return ModelParallelRobertaLMHead(embed_dim, output_dim, activation_fn, weight)
@register_model_architecture("model_parallel_roberta", "model_parallel_roberta")
def base_architecture(args):
args.no_final_layer_norm = getattr(args, "no_final_layer_norm", False)
# model parallel RoBERTa defaults to "Pre-LN" formulation
roberta_prenorm_architecture(args)
# earlier versions of model parallel RoBERTa removed the final layer norm
@register_model_architecture("model_parallel_roberta", "model_parallel_roberta_v1")
def model_parallel_roberta_v1_architecture(args):
args.no_final_layer_norm = getattr(args, "no_final_layer_norm", True)
base_architecture(args)
@register_model_architecture(
"model_parallel_roberta", "model_parallel_roberta_postnorm"
)
def model_parallel_roberta_postnorm_architecture(args):
# the original BERT/RoBERTa uses the "Post-LN" formulation
roberta_base_architecture(args)
@register_model_architecture("model_parallel_roberta", "model_parallel_roberta_base")
def model_parallel_roberta_base_architecture(args):
base_architecture(args)
@register_model_architecture("model_parallel_roberta", "model_parallel_roberta_large")
def model_parallel_roberta_large_architecture(args):
args.encoder_layers = getattr(args, "encoder_layers", 24)
args.encoder_embed_dim = getattr(args, "encoder_embed_dim", 1024)
args.encoder_ffn_embed_dim = getattr(args, "encoder_ffn_embed_dim", 4096)
args.encoder_attention_heads = getattr(args, "encoder_attention_heads", 16)
base_architecture(args)
| {
"content_hash": "db788378564bccc6ff56c481f8baf1d0",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 88,
"avg_line_length": 35.411764705882355,
"alnum_prop": 0.6579350881676463,
"repo_name": "pytorch/fairseq",
"id": "77a80ef72057219110b34678a38705549910edd3",
"size": "8003",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "fairseq/model_parallel/models/roberta/model.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "21106"
},
{
"name": "Cuda",
"bytes": "38166"
},
{
"name": "Cython",
"bytes": "13294"
},
{
"name": "Lua",
"bytes": "4210"
},
{
"name": "Python",
"bytes": "3699357"
},
{
"name": "Shell",
"bytes": "2182"
}
],
"symlink_target": ""
} |
"""Django page CMS unit test suite module."""
from pages.models import Page, Content
from pages.placeholders import PlaceholderNode
from pages.tests.testcase import TestCase, MockRequest
from pages import urlconf_registry as reg
from pages.http import get_language_from_request, get_slug
from pages.http import get_request_mock, remove_slug
from pages.utils import export_po_files, import_po_files
from pages.views import details
import django
from django.http import Http404
from django.contrib.auth.models import User
from django.conf import settings
from django.core.urlresolvers import reverse
from django.template import Template, RequestContext, Context
from django.template.loader import get_template_from_string
from django.template import Template, TemplateSyntaxError
import datetime
class UnitTestCase(TestCase):
"""Django page CMS unit test suite class."""
def test_date_ordering(self):
"""Test page date ordering feature."""
self.set_setting("PAGE_USE_SITE_ID", False)
author = User.objects.all()[0]
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
now = datetime.datetime.now()
p1 = Page(author=author, status=Page.PUBLISHED, publication_date=now)
p1.save()
p2 = Page(
author=author,
publication_date=now,
status=Page.PUBLISHED
)
p2.save()
p3 = Page(
author=author,
publication_date=yesterday,
status=Page.PUBLISHED
)
p3.save()
p2.move_to(p1, position='first-child')
p3.move_to(p1, position='first-child')
p1 = Page.objects.get(pk=p1.id)
p2 = Page.objects.get(pk=p2.id)
p3 = Page.objects.get(pk=p3.id)
self.assertEqual(
[p.id for p in p1.get_children_for_frontend()],
[p3.id, p2.id]
)
self.assertEqual(
[p.id for p in p1.get_date_ordered_children_for_frontend()],
[p2.id, p3.id]
)
def test_widgets_registry(self):
"""Test the widget registry module."""
from pages import widgets_registry as wreg
for widget in wreg.registry:
w = widget()
w.render('name', 'value')
try:
wreg.register_widget(wreg.registry[0])
raise AssertionError("Error not raised properly.")
except wreg.WidgetAlreadyRegistered:
pass
try:
wreg.get_widget('wrong')
raise AssertionError("Error not raised properly.")
except wreg.WidgetNotFound:
pass
def test_page_caculated_status(self):
"""Test calculated status property."""
self.set_setting("PAGE_SHOW_START_DATE", True)
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
tomorrow = datetime.datetime.now() + datetime.timedelta(days=1)
page = self.new_page()
self.assertEqual(page.calculated_status, Page.PUBLISHED)
page.publication_date = tomorrow
self.assertEqual(page.calculated_status, Page.DRAFT)
page.publication_date = yesterday
self.assertEqual(page.calculated_status, Page.PUBLISHED)
self.set_setting("PAGE_SHOW_END_DATE", True)
page.publication_end_date = yesterday
self.assertEqual(page.calculated_status, Page.EXPIRED)
def test_placeholder_inherit_content(self):
"""Test placeholder content inheritance between pages."""
self.set_setting("PAGE_USE_SITE_ID", False)
author = User.objects.all()[0]
p1 = self.new_page(content={'inher':'parent-content'})
p2 = self.new_page()
template = django.template.loader.get_template('pages/tests/test7.html')
context = Context({'current_page': p2, 'lang':'en-us'})
self.assertEqual(template.render(context), '')
p2.move_to(p1, position='first-child')
self.assertEqual(template.render(context), 'parent-content')
def test_get_page_template_tag(self):
"""Test get_page template tag."""
context = Context({})
pl1 = """{% load pages_tags %}{% get_page get-page-slug as toto %}{{ toto }}"""
template = get_template_from_string(pl1)
self.assertEqual(template.render(context), u'None')
page = self.new_page({'slug':'get-page-slug'})
self.assertEqual(template.render(context), u'get-page-slug')
def test_placeholder_all_syntaxes(self):
"""Test placeholder syntaxes."""
page = self.new_page()
context = Context({'current_page': page, 'lang':'en-us'})
pl1 = """{% load pages_tags %}{% placeholder title as hello %}"""
template = get_template_from_string(pl1)
self.assertEqual(template.render(context), '')
pl1 = """{% load pages_tags %}{% placeholder title as hello %}{{ hello }}"""
template = get_template_from_string(pl1)
self.assertEqual(template.render(context), page.title())
# to be sure to raise an errors in parse template content
setattr(settings, "DEBUG", True)
page = self.new_page({'wrong': '{% wrong %}'})
context = Context({'current_page': page, 'lang':'en-us'})
pl2 = """{% load pages_tags %}{% placeholder wrong parsed %}"""
template = get_template_from_string(pl2)
from pages.placeholders import PLACEHOLDER_ERROR
error = PLACEHOLDER_ERROR % {
'name': 'wrong',
'error': "Invalid block tag: 'wrong'",
}
self.assertEqual(template.render(context), error)
# generate errors
pl3 = """{% load pages_tags %}{% placeholder %}"""
try:
template = get_template_from_string(pl3)
except TemplateSyntaxError:
pass
pl4 = """{% load pages_tags %}{% placeholder wrong wrong %}"""
try:
template = get_template_from_string(pl4)
except TemplateSyntaxError:
pass
pl5 = """{% load pages_tags %}{% placeholder wrong as %}"""
try:
template = get_template_from_string(pl5)
except TemplateSyntaxError:
pass
def test_parsed_template(self):
"""Test the parsed template syntax."""
setattr(settings, "DEBUG", True)
page = self.new_page({'title':'<b>{{ "hello"|capfirst }}</b>'})
page.save()
context = Context({'current_page': page, 'lang':'en-us'})
pl_parsed = """{% load pages_tags %}{% placeholder title parsed %}"""
template = get_template_from_string(pl_parsed)
self.assertEqual(template.render(context), '<b>Hello</b>')
setattr(settings, "DEBUG", False)
page = self.new_page({'title':'<b>{{ "hello"|wrong_filter }}</b>'})
context = Context({'current_page': page, 'lang':'en-us'})
self.assertEqual(template.render(context), u'')
def test_video(self):
"""Test video placeholder."""
page = self.new_page(content={
'title':'video-page',
'video':'http://www.youtube.com/watch?v=oHg5SJYRHA0\\\\'
})
context = Context({'current_page': page, 'lang':'en-us'})
pl1 = """{% load pages_tags %}{% videoplaceholder video %}"""
template = get_template_from_string(pl1)
self.assertNotEqual(template.render(context), '')
self.assertTrue(len(template.render(context)) > 10)
def test_placeholder_untranslated_content(self):
"""Test placeholder untranslated content."""
self.set_setting("PAGE_USE_SITE_ID", False)
page = self.new_page(content={})
placeholder = PlaceholderNode('untrans', page='p', untranslated=True)
placeholder.save(page, 'fr-ch', 'test-content', True)
placeholder.save(page, 'en-us', 'test-content', True)
self.assertEqual(len(Content.objects.all()), 1)
self.assertEqual(Content.objects.all()[0].language, 'en-us')
placeholder = PlaceholderNode('untrans', page='p', untranslated=False)
placeholder.save(page, 'fr-ch', 'test-content', True)
self.assertEqual(len(Content.objects.all()), 2)
# test the syntax
page = self.new_page()
template = django.template.loader.get_template(
'pages/tests/untranslated.html')
context = Context({'current_page': page, 'lang':'en-us'})
self.assertEqual(template.render(context), '')
def test_urlconf_registry(self):
"""Test urlconf_registry basic functions."""
reg.register_urlconf('Documents', 'example.documents.urls',
label='Display documents')
reg.get_urlconf('Documents')
try:
reg.register_urlconf('Documents', 'example.documents.urls',
label='Display documents')
except reg.UrlconfAlreadyRegistered:
pass
reg.registry = []
try:
reg.get_urlconf('Documents')
except reg.UrlconfNotFound:
pass
reg.register_urlconf('Documents', 'example.documents.urls',
label='Display documents')
self.assertEqual(reg.get_choices(),
[('', 'No delegation'), ('Documents', 'Display documents')])
def test_permissions(self):
"""Test the permissions lightly."""
from pages.permissions import PagePermission
admin = User.objects.get(username='admin')
page = self.new_page()
pp = PagePermission(user=page.author)
self.assertTrue(pp.check('change', page=page, method='GET'))
self.assertTrue(pp.check('change', page=page, method='POST'))
staff = User.objects.get(username='staff')
pp = PagePermission(user=staff)
# weird because nonstaff?
self.assertTrue(pp.check('change', page=page, method='GET',
lang='en-us'))
self.assertFalse(pp.check('change', page=page, method='POST',
lang='en-us'))
self.assertFalse(pp.check('delete', page=page, method='POST',
lang='en-us'))
self.assertFalse(pp.check('add', page=page, method='POST',
lang='en-us'))
self.assertFalse(pp.check('freeze', page=page, method='POST',
lang='en-us'))
self.assertFalse(pp.check('doesnotexist', page=page, method='POST',
lang='en-us'))
self.assertFalse(pp.check('publish', page=page, method='POST',
lang='en-us'))
def test_managers(self):
# TODO: this test seems dependant from other tests
self.set_setting("PAGE_USE_SITE_ID", False)
Page.objects.populate_pages(child=2, depth=2)
for p in Page.objects.all():
p.invalidate()
self.assertEqual(Page.objects.count(), 3)
self.assertEqual(Page.objects.published().count(), 3)
self.assertEqual(Page.objects.drafts().count(), 0)
self.assertEqual(Page.objects.expired().count(), 0)
def test_get_content_tag(self):
"""
Test the {% get_content %} template tag
"""
page_data = {'title':'test', 'slug':'test'}
page = self.new_page(page_data)
context = RequestContext(MockRequest, {'page': page})
template = Template('{% load pages_tags %}'
'{% get_content page "title" "en-us" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), page_data['title'])
template = Template('{% load pages_tags %}'
'{% get_content page "title" as content %}'
'{{ content }}')
self.assertEqual(template.render(context), page_data['title'])
def test_show_content_tag(self):
"""
Test the {% show_content %} template tag.
"""
page_data = {'title':'test', 'slug':'test'}
page = self.new_page(page_data)
# cleanup the cache from previous tests
page.invalidate()
context = RequestContext(MockRequest, {'page': page, 'lang':'en-us',
'path':'/page-1/'})
template = Template('{% load pages_tags %}'
'{% show_content page "title" "en-us" %}')
self.assertEqual(template.render(context), page_data['title'])
template = Template('{% load pages_tags %}'
'{% show_content page "title" %}')
self.assertEqual(template.render(context), page_data['title'])
def test_pages_siblings_menu_tag(self):
"""
Test the {% pages_siblings_menu %} template tag.
"""
page_data = {'title':'test', 'slug':'test'}
page = self.new_page(page_data)
# cleanup the cache from previous tests
page.invalidate()
context = RequestContext(MockRequest, {'page': page, 'lang':'en-us',
'path':'/page-1/'})
template = Template('{% load pages_tags %}'
'{% pages_siblings_menu page %}')
renderer = template.render(context)
def test_show_absolute_url_with_language(self):
"""
Test a {% show_absolute_url %} template tag bug.
"""
page_data = {'title':'english', 'slug':'english'}
page = self.new_page(page_data)
Content(page=page, language='fr-ch', type='title', body='french').save()
Content(page=page, language='fr-ch', type='slug', body='french').save()
self.assertEqual(page.get_url_path(language='fr-ch'),
self.get_page_url(u'french'))
self.assertEqual(page.get_url_path(language='en-us'),
self.get_page_url(u'english'))
context = RequestContext(MockRequest, {'page': page})
template = Template('{% load pages_tags %}'
'{% show_absolute_url page "en-us" %}')
self.assertEqual(template.render(context),
self.get_page_url(u'english'))
template = Template('{% load pages_tags %}'
'{% show_absolute_url page "fr-ch" %}')
self.assertEqual(template.render(context),
self.get_page_url('french'))
def test_get_page_ids_by_slug(self):
"""
Test that get_page_ids_by_slug work as intented.
"""
page_data = {'title':'test1', 'slug':'test1'}
page1 = self.new_page(page_data)
self.assertEqual(
Content.objects.get_page_ids_by_slug('test1'),
[page1.id]
)
page_data = {'title':'test1', 'slug':'test1'}
page2 = self.new_page(page_data)
self.assertEqual(
Content.objects.get_page_ids_by_slug('test1'),
[page1.id, page2.id]
)
Content(page=page1, language='en-us', type='slug', body='test2').save()
self.assertEqual(
Content.objects.get_page_ids_by_slug('test1'),
[page1.id, page2.id]
)
Content(page=page1, language='en-us', type='slug', body='test1').save()
self.assertEqual(
Content.objects.get_page_ids_by_slug('test1'),
[page1.id, page2.id]
)
def test_get_language_from_request(self):
"""
Test that get_language_from_request return the default language even if a
unaccepted language is used.
"""
class Req():
LANGUAGE_CODE = 'en-us'
GET = {}
request = Req()
self.assertEqual(
get_language_from_request(request), 'en-us')
request.LANGUAGE_CODE = 'dont'
self.assertEqual(
get_language_from_request(request), 'en-us')
request.LANGUAGE_CODE = 'fr-ch'
self.assertEqual(
get_language_from_request(request), 'fr-ch')
def test_default_view_with_language_prefix(self):
"""
Test that everything is working with the language prefix option
activated.
"""
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", True)
from pages.views import details
req = get_request_mock()
self.assertRaises(Http404, details, req, '/pages/')
page1 = self.new_page(content={'slug': 'page1'})
page2 = self.new_page(content={'slug': 'page2'})
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[],
kwargs={'lang': 'en-us', 'path': 'page1'})
)
self.assertEqual(details(req, page1.get_url_path(),
only_context=True)['current_page'],
page1)
self.assertEqual(details(req, path=page2.get_complete_slug(),
only_context=True)['current_page'], page2)
self.assertEqual(details(req, page2.get_url_path(),
only_context=True)['current_page'],
page2)
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", False)
self.assertEqual(details(req, page2.get_url_path(),
only_context=True)['current_page'],
page2)
def test_root_page_hidden_slug(self):
"""
Check that the root works properly in every case.
"""
page1 = self.new_page(content={'slug': 'page1'})
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", False)
self.set_setting("PAGE_HIDE_ROOT_SLUG", True)
self.assertEqual(page1.is_first_root(), True)
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[], kwargs={'path': ''})
)
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", True)
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[],
kwargs={'lang': 'en-us', 'path': ''})
)
self.set_setting("PAGE_HIDE_ROOT_SLUG", False)
page1.invalidate()
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[],
kwargs={'lang': 'en-us', 'path': 'page1'})
)
self.set_setting("PAGE_USE_LANGUAGE_PREFIX", False)
self.assertEqual(page1.get_url_path(),
reverse('pages-details-by-path', args=[],
kwargs={'path': 'page1'})
)
def test_revision_depth(self):
"""
Check that PAGE_CONTENT_REVISION_DEPTH works.
"""
page1 = self.new_page(content={'slug':'page1'})
self.set_setting("PAGE_CONTENT_REVISION_DEPTH", 3)
Content.objects.create_content_if_changed(page1, 'en-us', 'rev-test', 'rev1')
Content.objects.create_content_if_changed(page1, 'en-us', 'rev-test', 'rev2')
Content.objects.create_content_if_changed(page1, 'en-us', 'rev-test', 'rev3')
Content.objects.create_content_if_changed(page1, 'en-us', 'rev-test', 'rev4')
self.assertEqual(Content.objects.filter(type='rev-test').count(), 3)
self.assertEqual(
Content.objects.filter(type='rev-test').latest('creation_date').body,
'rev4')
def test_content_dict(self):
"""
Check that content_dict method works.
"""
page1 = self.new_page(content={'slug':'page1'})
page1.save()
c = Content.objects.create_content_if_changed(page1, 'en-us', 'body', 'test')
self.assertEqual(
page1.content_by_language(language='en-us'),
[c]
)
def test_strict_urls(self):
"""
Check that the strict handling of URLs work as
intended.
"""
page1 = self.new_page(content={'slug':'page1'})
page2 = self.new_page(content={'slug':'page2'})
page1.save()
page2.save()
page2.parent = page1
page2.save()
page1 = Page.objects.get(id=page1.id)
self.assertTrue(page1.get_children(), [page2])
self.assertEqual(
Page.objects.from_path('wrong/path/page2', 'en-us'),
page2
)
self.set_setting("PAGE_USE_STRICT_URL", True)
self.assertEqual(
Page.objects.from_path('wrong/path/page2', 'en-us'),
None
)
self.assertEqual(
Page.objects.from_path('page1/page2', 'en-us'),
page2
)
def test_remove_slug(self):
"""Test the remove slug function."""
self.assertEqual(remove_slug('hello/world/toto'), 'hello/world')
self.assertEqual(remove_slug('hello/world'), 'hello')
self.assertEqual(remove_slug('/hello/world/'), 'hello')
self.assertEqual(remove_slug('hello'), None)
def test_path_too_long(self):
"""Test that the CMS try to resolve the whole page path to find
a suitable sub path with delegation."""
page1 = self.new_page(content={'slug':'page1'})
page2 = self.new_page(content={'slug':'page2'})
from pages import urlconf_registry as reg
reg.register_urlconf('test', 'pages.testproj.documents.urls',
label='test')
page2.delegate_to = 'test'
page1.delegate_to = 'test'
page1.save()
page2.save()
page2.parent = page1
page2.save()
from pages.testproj.documents.models import Document
doc = Document(title='doc title 1', text='text', page=page1)
doc.save()
req = get_request_mock()
self.set_setting("PAGE_HIDE_ROOT_SLUG", True)
def _get_context_page(path):
return details(req, path, 'en-us')
self.assertEqual(_get_context_page('/').status_code, 200)
self.assertEqual(_get_context_page('/page1').status_code, 200)
self.assertEqual(_get_context_page('/page1/').status_code, 200)
self.assertEqual(_get_context_page('/page1/page2').status_code, 200)
self.assertEqual(_get_context_page('/page1/page2/').status_code, 200)
self.assertEqual(_get_context_page('/page1/page2/doc-%d' % doc.id
).status_code, 200)
self.assertRaises(Http404, _get_context_page,
'/page1/page-wrong/doc-%d' % doc.id)
reg.registry = []
def test_po_file_imoprt_export(self):
"""Test the po files export and import."""
page1 = self.new_page(content={'slug':'page1', 'title':'english title'})
page1.save()
#Content(page=page1, language='en-us', type='title', body='toto').save()
Content(page=page1, language='fr-ch', type='title', body='french title').save()
page1.invalidate()
import StringIO
stdout = StringIO.StringIO()
# TODO: might be nice to use a temp dir for this test
export_po_files(path='potests', stdout=stdout)
self.assertTrue("Export language fr-ch" in stdout.getvalue())
f = open("potests/fr-ch.po", "r+")
old = f.read().replace('french title', 'translated')
f.seek(0)
f.write(old)
f.close()
stdout = StringIO.StringIO()
import_po_files(path='potests', stdout=stdout)
self.assertTrue("Update language fr-ch" in stdout.getvalue())
self.assertTrue(("Update page %d" % page1.id) in stdout.getvalue())
self.assertTrue(page1.title(language='fr-ch'), 'translated')
def test_page_methods(self):
"""Test that some methods run properly."""
page1 = self.new_page(content={'slug':'page1', 'title':'hello'})
page2 = self.new_page(content={'slug':'page2'})
page1.save()
page2.save()
page2.parent = page1
page2.save()
self.assertEqual(
page1.expose_content(),
u"hello"
)
self.assertEqual(
page2.slug_with_level(),
u" page2"
)
p = Page(author=page1.author)
self.assertEqual(unicode(p), u"Page without id")
p.save()
self.assertEqual(unicode(p), u"page-%d" % p.id)
def test_context_processor(self):
"""Test that the page's context processor is properly activated."""
from pages.views import details
req = get_request_mock()
page1 = self.new_page(content={'slug':'page1', 'title':'hello'})
page1.save()
self.set_setting("PAGES_MEDIA_URL", "test_request_context")
self.assertContains(details(req, path='/'), "test_request_context")
| {
"content_hash": "97bd24302cb0742b337d62a2efd52c91",
"timestamp": "",
"source": "github",
"line_count": 634,
"max_line_length": 87,
"avg_line_length": 38.01577287066246,
"alnum_prop": 0.582067878184383,
"repo_name": "oliciv/django-page-cms",
"id": "624a175f1eadbf9a8a9af7fc95368d1fcce33b44",
"size": "24126",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pages/tests/test_unit.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "672312"
},
{
"name": "PHP",
"bytes": "1052"
},
{
"name": "Python",
"bytes": "256392"
}
],
"symlink_target": ""
} |
"""
Python package to interact with UniFi Controller
"""
import shutil
import time
import warnings
import json
import logging
import requests
from urllib3.exceptions import InsecureRequestWarning
"""For testing purposes:
logging.basicConfig(filename='pyunifi.log', level=logging.WARN,
format='%(asctime)s %(message)s')
""" # pylint: disable=W0105
CONS_LOG = logging.getLogger(__name__)
class APIError(Exception):
"""API Error exceptions"""
def retry_login(func, *args, **kwargs): # pylint: disable=w0613
"""To reattempt login if requests exception(s) occur at time of call"""
def wrapper(*args, **kwargs):
try:
try:
return func(*args, **kwargs)
except (requests.exceptions.RequestException, APIError) as err:
CONS_LOG.warning("Failed to perform %s due to %s", func, err)
controller = args[0]
controller._login() # pylint: disable=w0212
return func(*args, **kwargs)
except Exception as err:
raise APIError(err)
return wrapper
class Controller: # pylint: disable=R0902,R0904
"""Interact with a UniFi controller.
Uses the JSON interface on port 8443 (HTTPS) to communicate with a UniFi
controller. Operations will raise unifi.controller.APIError on obvious
problems (such as login failure), but many errors (such as disconnecting a
nonexistant client) will go unreported.
>>> from unifi.controller import Controller
>>> c = Controller('192.168.1.99', 'admin', 'p4ssw0rd')
>>> for ap in c.get_aps():
... print 'AP named %s with MAC %s' % (ap.get('name'), ap['mac'])
...
AP named Study with MAC dc:9f:db:1a:59:07
AP named Living Room with MAC dc:9f:db:1a:59:08
AP named Garage with MAC dc:9f:db:1a:59:0b
"""
def __init__( # pylint: disable=r0913
self,
host,
username,
password,
port=8443,
version="v5",
site_id="default",
ssl_verify=True,
):
"""
:param host: the address of the controller host; IP or name
:param username: the username to log in with
:param password: the password to log in with
:param port: the port of the controller host
:param version: the base version of the controller API [v4|v5]
:param site_id: the site ID to connect to
:param ssl_verify: Verify the controllers SSL certificate,
can also be "path/to/custom_cert.pem"
"""
self.log = logging.getLogger(__name__ + ".Controller")
self.host = host
self.headers = None
self.version = version
self.port = port
self.username = username
self.password = password
self.site_id = site_id
self.ssl_verify = ssl_verify
if version == "unifiOS":
self.url = "https://" + host + "/proxy/network/"
self.auth_url = self.url + "api/login"
elif version == "UDMP-unifiOS":
self.auth_url = "https://" + host + "/api/auth/login"
self.url = "https://" + host + "/proxy/network/"
elif version[:1] == "v":
if float(version[1:]) < 4:
raise APIError("%s controllers no longer supported" % version)
self.url = "https://" + host + ":" + str(port) + "/"
self.auth_url = self.url + "api/login"
else:
raise APIError("%s controllers no longer supported" % version)
if ssl_verify is False:
warnings.simplefilter("default", category=InsecureRequestWarning)
self.log.debug("Controller for %s", self.url)
self._login()
@staticmethod
def _jsondec(data):
obj = json.loads(data)
if "meta" in obj:
if obj["meta"]["rc"] != "ok":
raise APIError(obj["meta"]["msg"])
if "data" in obj:
result = obj["data"]
else:
result = obj
return result
def _api_url(self):
return self.url + "api/s/" + self.site_id + "/"
@retry_login
def _read(self, url, params=None):
# Try block to handle the unifi server being offline.
response = self.session.get(url, params=params, headers=self.headers)
if response.headers.get("X-CSRF-Token"):
self.headers = {"X-CSRF-Token": response.headers["X-CSRF-Token"]}
return self._jsondec(response.text)
def _api_read(self, url, params=None):
return self._read(self._api_url() + url, params)
@retry_login
def _write(self, url, params=None):
response = self.session.post(url, json=params, headers=self.headers)
if response.headers.get("X-CSRF-Token"):
self.headers = {"X-CSRF-Token": response.headers["X-CSRF-Token"]}
return self._jsondec(response.text)
def _api_write(self, url, params=None):
return self._write(self._api_url() + url, params)
@retry_login
def _update(self, url, params=None):
response = self.session.put(url, json=params, headers=self.headers)
if response.headers.get("X-CSRF-Token"):
self.headers = {"X-CSRF-Token": response.headers["X-CSRF-Token"]}
return self._jsondec(response.text)
def _api_update(self, url, params=None):
return self._update(self._api_url() + url, params)
@retry_login
def _delete(self, url, params=None):
response = self.session.delete(url, json=params, headers=self.headers)
if response.headers.get("X-CSRF-Token"):
self.headers = {"X-CSRF-Token": response.headers["X-CSRF-Token"]}
return self._jsondec(response.text)
def _api_delete(self, url, params=None):
return self._delete(self._api_url() + url, params)
def _login(self):
self.log.debug("login() as %s", self.username)
self.session = requests.Session()
self.session.verify = self.ssl_verify
response = self.session.post(
self.auth_url,
json={"username": self.username, "password": self.password},
headers=self.headers,
)
if response.headers.get("X-CSRF-Token"):
self.headers = {"X-CSRF-Token": response.headers["X-CSRF-Token"]}
if response.status_code != 200:
raise APIError(
"Login failed - status code: %i" % response.status_code
)
def _logout(self):
self.log.debug("logout()")
self._api_write("logout")
self.session.close()
def switch_site(self, name):
"""
Switch to another site
:param name: Site Name
:return: True or APIError
"""
# TODO: Not currently supported on UDMP as site support doesn't exist.
if self.version == "UDMP-unifiOS":
raise APIError(
"Controller version not supported: %s" % self.version
)
for site in self.get_sites():
if site["desc"] == name:
self.site_id = site["name"]
return True
raise APIError("No site %s found" % name)
def get_alerts(self):
"""Return a list of all Alerts."""
return self._api_write("stat/alarm")
def get_alerts_unarchived(self):
"""Return a list of Alerts unarchived."""
params = {"archived": False}
return self._api_write("stat/alarm", params=params)
def get_statistics_last_24h(self):
"""Returns statistical data of the last 24h"""
return self.get_statistics_24h(time.time())
def get_statistics_24h(self, endtime):
"""Return statistical data last 24h from time"""
params = {
"attrs": ["bytes", "num_sta", "time"],
"start": int(endtime - 86400) * 1000,
"end": int(endtime - 3600) * 1000,
}
return self._api_write("stat/report/hourly.site", params)
def get_events(self):
"""Return a list of all Events."""
return self._api_read("stat/event")
def get_aps(self):
"""Return a list of all APs,
with significant information about each.
"""
# Set test to 0 instead of NULL
params = {"_depth": 2, "test": 0}
return self._api_read("stat/device", params)
def get_client(self, mac):
"""Get details about a specific client"""
# stat/user/<mac> works better than stat/sta/<mac>
# stat/sta seems to be only active clients
# stat/user includes known but offline clients
return self._api_read("stat/user/" + mac)[0]
def get_clients(self):
"""Return a list of all active clients,
with significant information about each.
"""
return self._api_read("stat/sta")
def get_users(self):
"""Return a list of all known clients,
with significant information about each.
"""
return self._api_read("list/user")
def get_user_groups(self):
"""Return a list of user groups with its rate limiting settings."""
return self._api_read("list/usergroup")
def get_sysinfo(self):
"""Return basic system informations."""
return self._api_read("stat/sysinfo")
def get_healthinfo(self):
"""Return health information."""
return self._api_read("stat/health")
def get_sites(self):
"""Return a list of all sites,
with their UID and description"""
return self._read(self.url + "api/self/sites")
def get_wlan_conf(self):
"""Return a list of configured WLANs
with their configuration parameters.
"""
return self._api_read("list/wlanconf")
def _run_command(self, command, params=None, mgr="stamgr"):
if params is None:
params = {}
self.log.debug("_run_command(%s)", command)
params.update({"cmd": command})
return self._api_write("cmd/" + mgr, params=params)
def _mac_cmd(self, target_mac, command, mgr="stamgr", params=None):
if params is None:
params = {}
self.log.debug("_mac_cmd(%s, %s)", target_mac, command)
params["mac"] = target_mac
return self._run_command(command, params, mgr)
def get_device_stat(self, target_mac):
"""Gets the current state & configuration of
the given device based on its MAC Address.
:param target_mac: MAC address of the device.
:type target_mac: str
:returns: Dictionary containing metadata, state,
capabilities and configuration of the device
:rtype: dict()
"""
self.log.debug("get_device_stat(%s)", target_mac)
params = {"macs": [target_mac]}
return self._api_read("stat/device/" + target_mac, params)[0]
def get_radius_users(self):
"""Return a list of all users, with their
name, password, 24 digit user id, and 24 digit site id
"""
return self._api_read('rest/account')
def add_radius_user(self, name, password):
"""Add a new user with this username and password
:param name: new user's username
:param password: new user's password
:returns: user's name, password, 24 digit user id, and 24 digit site id
"""
params = {'name': name, 'x_password': password}
return self._api_write('rest/account/', params)
def update_radius_user(self, name, password, user_id):
"""Update a user to this new username and password
:param name: user's new username
:param password: user's new password
:param id: the user's 24 digit user id, from get_radius_users()
or add_radius_user()
:returns: user's name, password, 24 digit user id, and 24 digit site id
:returns: [] if no change was made
"""
params = {'name': name, '_id': user_id, 'x_password': password}
return self._api_update('rest/account/' + user_id, params)
def delete_radius_user(self, user_id):
"""Delete user
:param id: the user's 24 digit user id, from get_radius_users()
or add_radius_user()
:returns: [] if successful
"""
return self._api_delete('rest/account/' + user_id)
def get_switch_port_overrides(self, target_mac):
"""Gets a list of port overrides, in dictionary
format, for the given target MAC address. The
dictionary contains the port_idx, portconf_id,
poe_mode, & name.
:param target_mac: MAC address of the device.
:type target_mac: str
:returns: [ { 'port_idx': int(), 'portconf': str,
'poe_mode': str, 'name': str } ]
:rtype: list( dict() )
"""
self.log.debug("get_switch_port_overrides(%s)", target_mac)
return self.get_device_stat(target_mac)["port_overrides"]
def _switch_port_power(self, target_mac, port_idx, mode):
"""Helper method to set the given PoE mode the port/switch.
:param target_mac: MAC address of the Switch.
:type target_mac: str
:param port_idx: Port ID to target
:type port_idx: int
:param mode: PoE mode to set. ie. auto, on, off.
:type mode: str
:returns: { 'port_overrides': [ { 'port_idx': int(),
'portconf': str, 'poe_mode': str, 'name': str } ] }
:rtype: dict( list( dict() ) )
"""
# TODO: Switch operations should most likely happen in a
# different Class, Switch.
self.log.debug(
"_switch_port_power(%s, %s, %s)", target_mac, port_idx, mode
)
device_stat = self.get_device_stat(target_mac)
device_id = device_stat.get("_id")
overrides = device_stat.get("port_overrides")
found = False
if overrides:
for i in overrides:
if overrides[i]["port_idx"] == port_idx:
# Override already exists, update..
overrides[i]["poe_mode"] = mode
found = True
break
if not found:
# Retrieve portconf
portconf_id = None
for port in device_stat["port_table"]:
if port["port_idx"] == port_idx:
portconf_id = port["portconf_id"]
break
if portconf_id is None:
raise APIError(
"Port ID %s not found in port_table" % str(port_idx)
)
overrides.append(
{
"port_idx": port_idx,
"portconf_id": portconf_id,
"poe_mode": mode
}
)
# We return the device_id as it's needed by the parent method
return {"port_overrides": overrides, "device_id": device_id}
def switch_port_power_off(self, target_mac, port_idx):
"""Powers Off the given port on the Switch identified
by the given MAC Address.
:param target_mac: MAC address of the Switch.
:type target_mac: str
:param port_idx: Port ID to power off
:type port_idx: int
:returns: API Response which is the resulting complete port overrides
:rtype: list( dict() )
"""
self.log.debug("switch_port_power_off(%s, %s)", target_mac, port_idx)
params = self._switch_port_power(target_mac, port_idx, "off")
device_id = params["device_id"]
del params["device_id"]
return self._api_update("rest/device/" + device_id, params)
def switch_port_power_on(self, target_mac, port_idx):
"""Powers On the given port on the Switch identified
by the given MAC Address.
:param target_mac: MAC address of the Switch.
:type target_mac: str
:param port_idx: Port ID to power on
:type port_idx: int
:returns: API Response which is the resulting complete port overrides
:rtype: list( dict() )
"""
self.log.debug("switch_port_power_on(%s, %s)", target_mac, port_idx)
params = self._switch_port_power(target_mac, port_idx, "auto")
device_id = params["device_id"]
del params["device_id"]
return self._api_update("rest/device/" + device_id, params)
def create_site(self, desc="desc"):
"""Create a new site.
:param desc: Name of the site to be created.
"""
# TODO: Not currently supported on UDMP as site support doesn't exist.
if self.version == "UDMP-unifiOS":
raise APIError(
"Controller version not supported: %s" % self.version
)
return self._run_command(
"add-site",
params={"desc": desc},
mgr="sitemgr"
)
def block_client(self, mac):
"""Add a client to the block list.
:param mac: the MAC address of the client to block.
"""
return self._mac_cmd(mac, "block-sta")
def unblock_client(self, mac):
"""Remove a client from the block list.
:param mac: the MAC address of the client to unblock.
"""
return self._mac_cmd(mac, "unblock-sta")
def disconnect_client(self, mac):
"""Disconnect a client.
Disconnects a client, forcing them to reassociate. Useful when the
connection is of bad quality to force a rescan.
:param mac: the MAC address of the client to disconnect.
"""
return self._mac_cmd(mac, "kick-sta")
def restart_ap(self, mac):
"""Restart an access point (by MAC).
:param mac: the MAC address of the AP to restart.
"""
return self._mac_cmd(mac, "restart", "devmgr")
def restart_ap_name(self, name):
"""Restart an access point (by name).
:param name: the name address of the AP to restart.
"""
if not name:
raise APIError("%s is not a valid name" % str(name))
for access_point in self.get_aps():
if (
access_point.get("state", 0) == 1
and access_point.get("name", None) == name
):
result = self.restart_ap(access_point["mac"])
return result
def archive_all_alerts(self):
"""Archive all Alerts"""
return self._run_command("archive-all-alarms", mgr="evtmgr")
# TODO: Not currently supported on UDMP as it now utilizes async-backups.
def create_backup(self, days="0"):
"""Ask controller to create a backup archive file
..warning:
This process puts significant load on the controller
and may render it partially unresponsive for other requests.
:param days: metrics of the last x days will be added to the backup.
'-1' backup all metrics. '0' backup only the configuration.
:return: URL path to backup file
"""
if self.version == "UDMP-unifiOS":
raise APIError(
"Controller version not supported: %s" % self.version
)
res = self._run_command(
"backup",
mgr="system",
params={"days": days}
)
return res[0]["url"]
# TODO: Not currently supported on UDMP as it now utilizes async-backups.
def get_backup(self, download_path=None, target_file="unifi-backup.unf"):
"""
:param download_path: path to backup; if None is given
one will be created
:param target_file: Filename or full path to download the
backup archive to, should have .unf extension for restore.
"""
if self.version == "UDMP-unifiOS":
raise APIError(
"Controller version not supported: %s" % self.version
)
if not download_path:
download_path = self.create_backup()
response = self.session.get(self.url + download_path, stream=True)
if response != 200:
raise APIError("API backup failed: %i" % response.status_code)
with open(target_file, "wb") as _backfh:
return shutil.copyfileobj(response.raw, _backfh)
def authorize_guest( # pylint: disable=R0913
self,
guest_mac,
minutes,
up_bandwidth=None,
down_bandwidth=None,
byte_quota=None,
ap_mac=None,
):
"""
Authorize a guest based on his MAC address.
:param guest_mac: the guest MAC address: 'aa:bb:cc:dd:ee:ff'
:param minutes: duration of the authorization in minutes
:param up_bandwidth: up speed allowed in kbps
:param down_bandwidth: down speed allowed in kbps
:param byte_quota: quantity of bytes allowed in MB
:param ap_mac: access point MAC address
"""
cmd = "authorize-guest"
params = {"mac": guest_mac, "minutes": minutes}
if up_bandwidth:
params["up"] = up_bandwidth
if down_bandwidth:
params["down"] = down_bandwidth
if byte_quota:
params["bytes"] = byte_quota
if ap_mac:
params["ap_mac"] = ap_mac
return self._run_command(cmd, params=params)
def unauthorize_guest(self, guest_mac):
"""
Unauthorize a guest based on his MAC address.
:param guest_mac: the guest MAC address: 'aa:bb:cc:dd:ee:ff'
"""
cmd = "unauthorize-guest"
params = {"mac": guest_mac}
return self._run_command(
cmd,
params=params
)
def get_firmware(
self,
cached=True,
available=True,
known=False,
site=False
):
"""
Return a list of available/cached firmware versions
:param cached: Return cached firmwares
:param available: Return available (and not cached) firmwares
:param known: Return only firmwares for known devices
:param site: Return only firmwares for on-site devices
:return: List of firmware dicts
"""
res = []
if cached:
res.extend(self._run_command("list-cached", mgr="firmware"))
if available:
res.extend(self._run_command("list-available", mgr="firmware"))
if known:
res = [fw for fw in res if fw["knownDevice"]]
if site:
res = [fw for fw in res if fw["siteDevice"]]
return res
def cache_firmware(self, version, device):
"""
Cache the firmware on the UniFi Controller
.. warning:: Caching one device might very well cache others,
as they're on shared platforms
:param version: version to cache
:param device: device model to cache (e.g. BZ2)
:return: True/False
"""
return self._run_command(
"download",
mgr="firmware",
params={
"device": device,
"version": version
}
)[0]["result"]
def remove_firmware(self, version, device):
"""
Remove cached firmware from the UniFi Controller
.. warning:: Removing one device's firmware might very well remove
others, as they're on shared platforms
:param version: version to cache
:param device: device model to cache (e.g. BZ2)
:return: True/false
"""
return self._run_command(
"remove",
mgr="firmware",
params={
"device": device,
"version": version
}
)[0]["result"]
def get_tag(self):
"""Get all tags and their member MACs"""
return self._api_read("rest/tag")
def upgrade_device(self, mac, version):
"""
Upgrade a device's firmware to verion
:param mac: MAC of dev
:param version: version to upgrade to
"""
self._mac_cmd(
mac,
"upgrade",
mgr="devmgr",
params={
"upgrade_to_firmware": version
}
)
def provision(self, mac):
"""
Force provisioning of a device
:param mac: MAC of device
"""
self._mac_cmd(mac, "force-provision", mgr="devmgr")
def get_setting(self, section=None, cs_settings=False):
"""
Return settings for this site or controller
:param cs_settings: Return only controller-wide settings
:param section: Only return this/these section(s)
:return: {section:settings}
"""
res = {}
all_settings = self._api_read("get/setting")
if section and not isinstance(section, (list, tuple)):
section = [section]
for setting in all_settings:
s_sect = setting["key"]
if (
(cs_settings and "site_id" in setting)
or (not cs_settings and "site_id" not in setting)
or (section and s_sect not in section)
):
continue
for k in ("_id", "site_id", "key"):
setting.pop(k, None)
res[s_sect] = setting
return res
def update_setting(self, settings):
"""
Update settings
:param settings: {section:{settings}}
:return: resulting settings
"""
res = []
for sect, setting in settings.items():
res.extend(self._api_write("set/setting/" + sect, setting))
return res
def update_user_group(self, group_id, down_kbps=-1, up_kbps=-1):
"""
Update user group bandwidth settings
:param group_id: Group ID to modify
:param down_kbps: New bandwidth in KBPS for download
:param up_kbps: New bandwidth in KBPS for upload
"""
res = None
groups = self.get_user_groups()
for group in groups:
if group["_id"] == group_id:
# Apply setting change
res = self._api_update(
"rest/usergroup/{0}".format(group_id),
{
"qos_rate_max_down": down_kbps,
"qos_rate_max_up": up_kbps,
"name": group["name"],
"_id": group_id,
"site_id": self.site_id,
},
)
return res
raise ValueError("Group ID {0} is not valid.".format(group_id))
def set_client_alias(self, mac, alias):
"""
Set the client alias. Set to "" to reset to default
:param mac: The MAC of the client to rename
:param alias: The alias to set
"""
client = self.get_client(mac)["_id"]
return self._api_update("rest/user/" + client, {"name": alias})
def create_voucher( # pylint: disable=R0913
self,
number,
quota,
expire,
up_bandwidth=None,
down_bandwidth=None,
byte_quota=None,
note=None,
):
"""
Create voucher for guests.
:param number: number of vouchers
:param quota: number of using; 0 = unlimited
:param expire: expiration of voucher in minutes
:param up_bandwidth: up speed allowed in kbps
:param down_bandwidth: down speed allowed in kbps
:param byte_quota: quantity of bytes allowed in MB
:param note: description
"""
cmd = "create-voucher"
params = {
"n": number,
"quota": quota,
"expire": "custom",
"expire_number": expire,
"expire_unit": 1,
}
if up_bandwidth:
params["up"] = up_bandwidth
if down_bandwidth:
params["down"] = down_bandwidth
if byte_quota:
params["bytes"] = byte_quota
if note:
params["note"] = note
res = self._run_command(cmd, mgr="hotspot", params=params)
return self.list_vouchers(create_time=res[0]["create_time"])
def list_vouchers(self, **filter_voucher):
"""
Get list of vouchers
:param filter_voucher: Filter vouchers by create_time, code, quota,
used, note, status_expires, status, ...
"""
if "code" in filter_voucher:
filter_voucher["code"] = filter_voucher["code"].replace("-", "")
vouchers = []
for voucher in self._api_read("stat/voucher"):
voucher_match = True
for key, val in filter_voucher.items():
voucher_match &= voucher.get(key) == val
if voucher_match:
vouchers.append(voucher)
return vouchers
def delete_voucher(self, voucher_id):
"""
Delete / revoke voucher
:param id: id of voucher
"""
cmd = "delete-voucher"
params = {"_id": voucher_id}
self._run_command(cmd, mgr="hotspot", params=params)
| {
"content_hash": "1f596cbf5858b5645d9d99a27dd1a433",
"timestamp": "",
"source": "github",
"line_count": 863,
"max_line_length": 79,
"avg_line_length": 33.82039397450753,
"alnum_prop": 0.5552129372665913,
"repo_name": "finish06/pyunifi",
"id": "aafc9a20694a5976802fea5d954de39bfc48b38a",
"size": "29187",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyunifi/controller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35750"
}
],
"symlink_target": ""
} |
import sublime
import sublime_plugin
class UnderlinerCommand(sublime_plugin.TextCommand):
replacements = {
' ': '_',
'_': ' ',
}
names = {
' ': 'spaces',
'_': 'underlines',
}
def run(self, edit):
sels = self.view.sel()
replace_char = self.get_prevailing_char(sels)
for sel in sels:
text = self.view.substr(sel)
new_text = text.replace(
replace_char,
self.replacements[replace_char])
self.view.replace(edit, sel, new_text)
replaced_char = self.names[replace_char]
unreplaced_chard = self.names[self.replacements[replace_char]]
message = 'It was found more %s than %s, so it was replaced'
sublime.status_message(message % (replaced_char, unreplaced_chard))
def get_prevailing_char(self, sels):
'''decide which char will be replaced'''
all_text = ''
for sel in sels:
all_text += self.view.substr(sel)
spaces_count = all_text.count(' ')
underlines_count = all_text.count('_')
if spaces_count > underlines_count:
return ' '
else:
return '_'
| {
"content_hash": "7abb4333eb2898758939683ebde4b5d9",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 75,
"avg_line_length": 28.928571428571427,
"alnum_prop": 0.5489711934156378,
"repo_name": "dfleury/underliner",
"id": "3bab1fb6c37c2aa9c876e40ac293e3dca3275896",
"size": "1215",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Underliner.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1215"
}
],
"symlink_target": ""
} |
import mock
import hashlib
from django.core.cache import cache
from siglock.decorators import single_task
def test_single_task_no_arguments(add_mock):
""" Tests cache key without any args or kwargs """
def fn():
pass
# decorate & call
single_task(60)(fn)()
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn', 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_with_args(add_mock):
""" Tests cache key with args """
def fn(*args):
pass
# decorate & call
single_task(60)(fn)('1', 2, None, False)
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn_1_2_None_False', 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_with_kwargs(add_mock):
""" Tests cache key with kwargs """
def fn(**kwargs):
pass
# decorate & call
single_task(60)(fn)(a='1', b=2, c=None, d=False)
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn_a_1_b_2_c_None_d_False', 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_with_args_and_kwargs(add_mock):
""" Tests cache key with both args and kwargs """
def fn(*args, **kwargs):
pass
# decorate & call
single_task(60)(fn)(1, 2, a='b', b='c')
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn_1_2_a_b_b_c', 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_single_argument(add_mock):
""" Tests cache key with single arg """
def fn(arg):
pass
# decorate & call
single_task(60)(fn)(1)
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn_arg_1', 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_with_list_as_argument(add_mock):
""" Tests cache key with list as argument """
def fn(lst):
pass
# decorate & call
single_task(60)(fn)([1, 2])
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn_lst_[1,2]', 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_with_float_as_argument(add_mock):
""" Tests cache key with float as argument """
def fn(f):
pass
# decorate & call
single_task(60)(fn)(1.2345)
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn_f_1.2345', 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_default_arguments(add_mock):
""" Tests cache key with default arguments """
def fn(a=False, b=None, *args):
pass
# decorate & call
single_task(60)(fn)(1, 2, 3, 4)
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn_a_False_b_None_1_2_3_4', 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_override_default_arguments(add_mock):
""" Tests cache key with defaults which are overriden """
def fn(a=False, b=None, **kwargs):
pass
# decorate & call
single_task(60)(fn)(a=True, b=1, c=2)
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn_a_True_b_1_c_2', 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_ignore_arguments(add_mock):
""" Tests task arguments are ignored and cache key generated from function name """
def fn(lst):
pass
# decorate & call
single_task(60, ignore_args=True)(fn)([1, 2, 3])
assert add_mock.call_count == 1
assert add_mock.call_args[0] == ('lock_fn', 'true', 60)
@mock.patch('django.core.cache.cache.delete')
def test_run_while_locked(cache_delete_mock):
""" Test existing lock cache key set that prevents from running task again """
def fn():
pass
cache.set('lock_fn', 'true', 60)
single_task(60)(fn)()
assert not cache_delete_mock.called
@mock.patch('django.core.cache.cache.add')
def test_single_task_digest_key_short(add_mock):
""" Tests generated short cache key is md5 hashed """
def fn(arg):
pass
# decorate & call
single_task(60, digest=True)(fn)(1)
m = hashlib.md5()
m.update(b'lock_fn_arg_1')
_hash = m.hexdigest()
assert add_mock.call_args[0] == (_hash, 'true', 60)
@mock.patch('django.core.cache.cache.add')
def test_single_task_digest_key_long_arguments(add_mock):
""" Tests generated long cache key is md5 hashed """
def fn(lst):
pass
lst = [x for x in range(0, 300)]
# decorate & call
single_task(60, digest=True)(fn)(lst)
expected_key = 'lock_fn_lst_{}'.format(str(lst).replace(' ', ''))
m = hashlib.md5()
m.update(expected_key.encode())
_hash = m.hexdigest()
assert add_mock.call_args[0] == (_hash, 'true', 60)
| {
"content_hash": "70a1e4113c992e00069c7ba39a9ec34c",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 87,
"avg_line_length": 25.057291666666668,
"alnum_prop": 0.618166701309499,
"repo_name": "lockwooddev/djcelery-siglock",
"id": "7f659b630e68880cc65600795e6a992f6fab7802",
"size": "4811",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "siglock/tests/test_decorators.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "247"
},
{
"name": "Python",
"bytes": "9882"
}
],
"symlink_target": ""
} |
"""
Comments in PyCharm style.
References
- Tag sorter by Zack
/common-samples/blob/master/tools/net/tag_sorter/tag_sorter.py
- README standard format
/common-samples/wiki/Standard-sample-documentation-template-%28README.md%29
"""
import os
import re
import typing
import argparse
# region Global sets
# A set of words that get omitted during letter-case checks.
exception_proper_nouns = {
'WmsLayer',
'ArcGIS Online',
'OAuth',
'Web Mercator',
'ArcGIS Pro',
'GeoPackage',
'loadStatus',
'Integrated Windows Authentication',
'GeoElement',
'Network Link',
'Network Link Control',
'Open Street Map',
'OpenStreetMap',
'Play a KML Tour',
'SwiftUI',
'Arcade'
}
# A set of category folder names in current sample viewer.
categories = {
'Maps',
'Layers',
'Features',
'Display information',
'Search',
'Edit data',
'Geometry',
'Route and directions',
'Analysis',
'Cloud and portal',
'Scenes',
'Utility network',
'Augmented reality'
}
# endregion
# region Static functions
def get_folder_name_from_path(path: str, index: int = -1) -> str:
"""
Get the folder name from a full path.
:param path: A string of a full/absolute path to a folder.
:param index: The index of path parts. Default to -1 to get the most
trailing folder in the path; set to certain index to get other parts.
:return: The folder name.
"""
return os.path.normpath(path).split(os.path.sep)[index]
def parse_head(head_string: str) -> (str, str):
"""
Parse the head of README and get title and description.
:param head_string: A string containing title, description and images.
:return: Stripped title and description strings.
"""
# Split title section and rule out empty lines.
parts = list(filter(bool, head_string.splitlines()))
if len(parts) < 3:
raise Exception('README should contain title, description and image.')
title = parts[0].lstrip('# ').rstrip()
description = parts[1].strip()
return title, description
def check_apis(apis_string: str) -> typing.Set[str]:
"""
Check the format for `Relevant API` section.
:param apis_string: A multiline string containing all APIs.
:return: A set of APIs. Throws if format is wrong.
"""
stripped = apis_string.strip()
apis = list(stripped.splitlines())
if not apis:
raise Exception('Empty Relevant APIs.')
s = set()
stripped_apis = []
for api in apis:
# Bullet is checked by the linter, no need to check here.
a = api.lstrip('*- ').rstrip()
s.add(a)
stripped_apis.append(a)
if '`' in a:
raise Exception('API should not include backticks.')
if '' in s:
raise Exception('Empty line in APIs.')
if len(apis) > len(s):
raise Exception('Duplicate APIs.')
if stripped_apis != sorted(stripped_apis, key=str.casefold):
raise Exception('APIs are not sorted.')
return s
def check_tags(tags_string: str) -> typing.Set[str]:
"""
Check the format for `Tags` section.
:param tags_string: A string containing all tags, with comma as delimiter.
:return: A set of tags. Throws if format is wrong.
"""
tags = tags_string.split(',')
if not tags:
raise Exception('Empty tags.')
s = set()
stripped_tags = []
for tag in tags:
t = tag.strip()
s.add(t)
stripped_tags.append(t)
if t.lower() != t and t.upper() != t and t.capitalize() != t \
and t not in exception_proper_nouns:
raise Exception(f'Wrong letter case for tag: "{t}".')
if '' in s:
raise Exception('Empty char in tags.')
if ', '.join(stripped_tags) != tags_string.strip():
raise Exception('Extra whitespaces in tags.')
if len(tags) > len(s):
raise Exception('Duplicate tags.')
if stripped_tags != sorted(stripped_tags, key=str.casefold):
raise Exception('Tags are not sorted.')
return s
def check_sentence_case(string: str) -> None:
"""
Check if a sentence follows 'sentence case'. A few examples below.
Hello world! -> YES
I'm a good guy. -> YES
a man and a gun. -> NO
A WMS layer -> YES, as it's allowed to include proper nouns
:param string: Input sentence, typically the title string.
:return: None. Throws if is not sentence case.
"""
# Check empty string.
if not string:
raise Exception('Empty title string.')
# The whole sentence get excepted.
if string in exception_proper_nouns:
return
# Split sentence into words.
words = string.split()
# First word should either be Title-cased or a proper noun (UPPERCASE).
if words[0][0].upper() != words[0][0] and words[0].upper() != words[0] \
and words[0] not in exception_proper_nouns:
raise Exception('Wrong letter case for the first word in title.')
# If a word is neither lowercase nor UPPERCASE then it is not great.
for word in words[1:]:
word = word.strip('()')
if word.lower() != word and word.upper() != word \
and word not in exception_proper_nouns:
raise Exception(f'Wrong letter case for word: "{word}" in title.')
def check_is_subsequence(list_a: typing.List[str],
list_b: typing.List[str]) -> int:
"""
Check if list A is a subsequence of list B.
E.g.
list_a = ['a', 'b', 'c']
list_b = ['a', 'h', 'b', 'g', 'd', 'c']
-> returns 0, which means all elements in list_a is also in list_b
:param list_a: A list of strings, presumably the section titles of a README.
:param list_b: A list of strings, presumably all valid titles in order.
:return: 0 if list_a is subsequence of list_b.
"""
# Empty list is always a subsequence of other lists.
if not list_a:
return True
pa = len(list_a)
for pb in range(len(list_b), 0, -1):
pa -= 1 if list_b[pb-1] == list_a[pa-1] else 0
return pa
# endregion
class ReadmeStyleChecker:
essential_headers = {
'Use case',
'How to use the sample',
'How it works',
'Relevant API',
'Tags'
}
possible_headers = [
'Use case',
'How to use the sample',
'How it works',
'Relevant API',
'Offline data',
'About the data',
'Additional information',
'Tags'
]
def __init__(self, folder_path: str):
self.folder_path = folder_path
self.folder_name = get_folder_name_from_path(folder_path)
self.readme_path = os.path.join(folder_path, 'README.md')
self.readme_contents = None
self.readme_parts = None
self.readme_headers = None
def populate_from_readme(self) -> None:
"""
Read and parse the sections from README.
:return: None. Throws if exception occurs.
"""
try:
readme_file = open(self.readme_path, 'r')
# read the readme content into a string
contents = readme_file.read()
# A regular expression that matches exactly 2 pound marks, and
# capture the trailing string.
pattern = re.compile(r'^#{2}(?!#)\s(.*)', re.MULTILINE)
self.readme_contents = contents
# Use regex to split the README by section headers, so that they are
# separated into paragraphs.
self.readme_parts = re.split(pattern, contents)
# Capture the section headers.
self.readme_headers = re.findall(pattern, contents)
except Exception as err:
raise Exception(f'Error loading file - {self.readme_path} - {err}.')
else:
readme_file.close()
def check_format_heading(self) -> None:
"""
Check if
1. essential section headers present.
2. all sections are valid.
3. section headers are in correct order.
:return: None. Throws if exception occurs.
"""
header_set = set(self.readme_headers)
possible_header_set = set(self.possible_headers)
# Check if all sections are valid.
sets_diff = header_set - possible_header_set
if sets_diff:
raise Exception(
f'Error header - Unexpected header or extra whitespace'
f' - "{sets_diff}".')
# Check if all essential section headers present.
sets_diff = self.essential_headers - header_set
if sets_diff:
raise Exception(
f'Error header - Missing essential header(s) - "{sets_diff}".')
# Check if all sections are in correct order.
index = check_is_subsequence(self.readme_headers, self.possible_headers)
if index:
raise Exception(
f'Error header - Wrong order at - '
f'"{self.readme_headers[index-1]}".')
def check_format_title_section(self) -> None:
"""
Check if
1. the head has at least 3 parts (title, description and image URLs).
2. the title string uses sentence case.
:return: None. Throws if exception occurs.
"""
try:
title, _ = parse_head(self.readme_parts[0])
check_sentence_case(title)
except Exception as err:
raise Exception(f'Error title - {err}')
def check_format_apis(self) -> None:
"""
Check if APIs
1. do not have backticks.
2. are sorted.
3. do not have duplicate entries.
:return: None. Throws if exception occurs.
"""
try:
api_section_index = self.readme_parts.index('Relevant API') + 1
check_apis(self.readme_parts[api_section_index])
except Exception as err:
raise Exception(f'Error APIs - {err}')
def check_format_tags(self) -> None:
"""
Check if tags
1. are in correct case.
2. are sorted.
3. do not have duplicate entries.
:return: None. Throws if exception occurs.
"""
try:
tags_section_index = self.readme_parts.index('Tags') + 1
check_tags(self.readme_parts[tags_section_index])
except Exception as err:
raise Exception(f'Error tags - {err}')
def check_redundant_apis_in_tags(self) -> None:
"""
Check if APIs and tags intersect.
:return: None. Throws if exception occurs.
"""
try:
tags_section_index = self.readme_parts.index('Tags') + 1
api_section_index = self.readme_parts.index('Relevant API') + 1
api_set = check_apis(self.readme_parts[api_section_index])
tag_set = check_tags(self.readme_parts[tags_section_index])
if not api_set.isdisjoint(tag_set):
raise Exception(f'Error tags - API should not be in tags')
except Exception as err:
raise Exception(f'Error checking extra tags due to previous error')
# region Main wrapper functions
def run_check(path: str, count: int) -> int:
checker = ReadmeStyleChecker(path)
# 1. Populate from README.
try:
checker.populate_from_readme()
except Exception as err:
count += 1
print(f'{count}. {checker.folder_path} - {err}')
# 2. Check format of headings, e.g. 'Use case', 'How it works', etc.
try:
checker.check_format_heading()
except Exception as err:
count += 1
print(f'{count}. {checker.folder_path} - {err}')
# 3. Check format of title section, i.e. title, description and image URLs.
try:
checker.check_format_title_section()
except Exception as err:
count += 1
print(f'{count}. {checker.folder_path} - {err}')
# 4. Check format of relevant APIs.
try:
checker.check_format_apis()
except Exception as err:
count += 1
print(f'{count}. {checker.folder_path} - {err}')
# 5. Check format of tags.
try:
checker.check_format_tags()
except Exception as err:
count += 1
print(f'{count}. {checker.folder_path} - {err}')
# 6. Check if redundant APIs in tags
try:
checker.check_redundant_apis_in_tags()
except Exception as err:
count += 1
print(f'{count}. {checker.folder_path} - {err}')
return count
def single(path: str):
exception_count = run_check(path, 0)
# Throw once if there are exceptions.
if exception_count > 0:
raise Exception('Error(s) occurred during checking a single design.')
def all_designs(path: str):
exception_count = 0
for root, dirs, files in os.walk(path):
# Get parent folder name.
parent_folder_name = get_folder_name_from_path(root)
# If parent folder name is a valid category name.
if parent_folder_name in categories:
for dir_name in dirs:
sample_path = os.path.join(root, dir_name)
# Omit empty folders - they are omitted by Git.
if len([f for f in os.listdir(sample_path)
if not f.startswith('.DS_Store')]) == 0:
continue
exception_count = run_check(sample_path, exception_count)
# Throw once if there are exceptions.
if exception_count > 0:
raise Exception('Error(s) occurred during checking all samples.')
def main():
msg = 'README checker script. Run it against the /arcgis-ios-sdk-samples ' \
'folder or a single sample folder. ' \
'On success: Script will exit with zero. ' \
'On failure: Style violations will print to console and the script ' \
'will exit with non-zero code.'
parser = argparse.ArgumentParser(description=msg)
parser.add_argument('-a', '--all', help='path to project root folder')
parser.add_argument('-s', '--single', help='path to a sample folder')
args = parser.parse_args()
if args.all:
try:
all_designs(args.all)
except Exception as err:
raise err
elif args.single:
try:
single(args.single)
except Exception as err:
raise err
else:
raise Exception('Invalid arguments, abort.')
# endregion
if __name__ == '__main__':
try:
main()
except Exception as error:
print(f'{error}')
# Abort with failure if any exception occurs.
exit(1)
| {
"content_hash": "c80306421cbc8329bfca0f865feca0bc",
"timestamp": "",
"source": "github",
"line_count": 445,
"max_line_length": 80,
"avg_line_length": 32.65842696629213,
"alnum_prop": 0.5941650037844904,
"repo_name": "Esri/arcgis-runtime-samples-ios",
"id": "93279aca23d77929211035c3b055bcb28bc92042",
"size": "14557",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "Scripts/CI/README_Metadata_StyleCheck/README_style_checker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7813"
},
{
"name": "Dockerfile",
"bytes": "692"
},
{
"name": "Python",
"bytes": "50850"
},
{
"name": "Ruby",
"bytes": "598"
},
{
"name": "Shell",
"bytes": "4618"
},
{
"name": "Swift",
"bytes": "1837836"
}
],
"symlink_target": ""
} |
from uspeda import app, db, bcrypt, mail
from uspeda.models import Crime, Residence, User, Review
import uspeda.validate as validate
from flask import render_template, jsonify, redirect, request, \
url_for, abort, flash, session
from flask_mail import Message
from itsdangerous import URLSafeTimedSerializer
from datetime import date
import json
import re
@app.route('/')
@app.route('/index')
def index():
authenticated = False
email = ''
reviews = []
if 'email' in session:
email = session['email']
# user reviews
user_q = User.query.filter_by(email=email).first()
reviews = user_q.reviews
authenticated = True
return render_template('index.html', authenticated=authenticated,
title='USPeda ' + email, email=email, reviews=reviews)
@app.route('/update')
def update_map():
crime_q = Crime.query.all()
crime = []
# cache for crime?
for c in crime_q:
crime.append({'lat': c.lat, 'lng': c.lng, 'weight': c.weight})
avg_score = request.args.get('avg_score', type=int)
if not avg_score:
residence_q = Residence.query.all()
else:
residence_q = Residence.query.filter(Residence.avg_score == avg_score).all()
residence = []
for r in residence_q:
residence.append({'lat': r.lat, 'lng': r.lng,
'name': r.name, 'owner': r.owner,
'avg_score': r.avg_score})
return jsonify(residence=residence, crime=crime)
@app.route('/review', methods=['GET'])
def get_review():
if not 'email' in session:
abort(401)
email = session['email']
review_id = request.args.get('rev_id', '', type=int)
review_q = Review.query.filter_by(id=review_id).first()
if review_q:
return render_template('review.html', review=review_q, email=email)
return render_template('review.html', review=[])
@app.route('/residence', methods=['GET'])
def get_residence():
if 'email' in session:
lat = request.args.get('lat', '', type=float)
lng = request.args.get('lng', '', type=float)
residence_q = Residence.query.filter_by(lat=lat, lng=lng).first()
if residence_q:
reviews = residence_q.reviews
if reviews:
return render_template('residence.html', reviews=reviews)
return render_template('residence.html', reviews=[])
else:
return '''<h4>Não autorizado!</h4>
<h5>Entre com o seu email @usp.br para ter acesso.</h5>'''
@app.route('/options', methods=['GET'])
def get_options():
'''
Render content for right click on map
'''
if 'email' in session:
return render_template('/options.html')
return abort(401)
@app.route('/register', methods=['POST'])
def register():
'''
Register a new user
'''
email = request.form.get('email', '').strip()
password = request.form.get('password', '').strip()
password2 = request.form.get('password2', '').strip()
data = request.form
error = validate.user_register(data)
if not error:
# send confirmation email
mail_confirmation(data['email'])
user = User(data['email'], data['password'])
db.session.add(user)
db.session.commit()
flash('E-mail de confirmação enviado, verifique!')
else:
flash(error)
return redirect(url_for('index'))
def mail_confirmation(email):
'''
Send confirmation email
'''
serializer = URLSafeTimedSerializer(app.config['SECRET_KEY'])
code = serializer.dumps(email, salt=app.config['SECURITY_PASSWORD_SALT'])
msg = Message("USPeda.me - Confirmação de registro",
sender="do-not-reply@uspeda.me",
recipients=[email])
msg.html = "Clique no link abaixo para confirmar o seu registro:" \
"<p><a href=http://177.81.72.220:5000/confirmar?cod" \
"={0}>http://177.81.72.220:5000/confirmar?cod={0}</a></p>" \
.format(code)
mail.send(msg)
@app.route('/confirmar', methods=['GET'])
def confirm_reg():
'''
Confirm registration using user's received code
'''
code = request.args.get('cod', '').strip()
if code:
serializer = URLSafeTimedSerializer(app.config['SECRET_KEY'])
try:
email = serializer.loads(code,
salt=app.config['SECURITY_PASSWORD_SALT'],
max_age=3600)
except:
flash('ERRO: Código expirado!')
return redirect(url_for('index'))
user_q = User.query.filter_by(email=email).first()
user_q.confirmed = True
session['email'] = email
session['user_id'] = user_q.id
user_q.update_last_seen()
return redirect(url_for('index'))
@app.route('/login', methods=['POST'])
def login():
'''
Log user in and start session
'''
data = request.form.to_dict()
error = validate.user_login(data)
print(data['email'])
if not error:
# maybe this query could go somewhere else?
user_q = User.query.filter_by(email=data['email']).first()
if user_q.is_confirmed:
if user_q.check_password(data['password']):
# set session
session['email'] = data['email']
session['user_id'] = user_q.id
user_q.update_last_seen()
else:
flash('Usuário e/ou senha incorretos.')
else:
flash('Verifique e-mail de confirmação.')
else:
print(error)
flash(error)
return redirect(url_for('index'))
@app.route('/logout')
def logout():
'''
Log user out
'''
session.pop('email', None)
return redirect(url_for('index'))
@app.route('/update_review', methods=['POST'])
def update_review():
'''
Update a review with a new text
'''
if 'email' not in session:
abort(401)
try:
data = request.get_json()
except BadRequest:
return abort(400)
error = validate.review(data)
if not error:
review_id = data['review_id']
review_text = data['review_text']
print(type(review_id))
score = data['score']
review_q = Review.query.filter_by(id=review_id).first()
if review_q:
if review_q.user.email != session['email']:
error = 'User do not have permission to edit review with ID: {0}'.format(review_id)
flash(error)
abort(401)
review_q.review_text = review_text
review_q.score = score
db.session.commit()
return jsonify({'review_id': review_q.id})
error = 'Incorrect review ID: {0}'.format(res_id)
flash(error)
return abort(401)
@app.route('/add_review', methods=['POST'])
def add_review():
'''
Add a new review text and score for a residence
If the residence is new create a new one, otherwise
just add to existing one
'''
if not 'email' in session:
return abort(401)
user = session['email'].split('@')[0]
try:
data = request.get_json()
except BadRequest:
return abort(400)
error = validate.review(data)
if not error:
res_id = data.get('res_id', '').strip()
if res_id:
# adding review to existing residence?
residence = Residence.query.filter_by(id=res_id).first()
if residence:
new_res = False
else:
error = 'Incorrect residence ID: {0}'.format(res_id)
flash(error)
abort(400)
else:
new_res = True
residence = Residence(data['lat'], data['lng'], data['res_name'],
data['owner'], data['address'], data['zipcode'])
db.session.add(residence)
db.session.commit()
review = Review(data['review_text'], data['score'])
review.residence_id = residence.id
review.user_id = session['user_id']
db.session.add(review)
db.session.commit()
sum_scores = sum([r.score for r in residence.reviews])
residence.update_avg(sum_scores)
review = {'new_res': new_res, 'res_name': residence.name, 'owner': residence.owner, \
'author': user, 'rev_id': review.id, 'date': str(review.date_added), \
'score': review.score, 'lat': residence.lat, 'lng': residence.lng}
return jsonify(review=review)
flash(error)
return jsonify(error=error)
@app.route('/add_crime', methods=['POST'])
def add_crime():
'''
Add a crime occurence to the map
'''
if not 'email' in session:
return abort(401)
try:
data = request.get_json()
except BadRequest:
return abort(400)
error = validate.crime(data)
if not error:
point = Crime(data['lat'], data['lng'], data['weight'])
db.session.add(point)
db.session.commit()
crime = {'lat': data['lat'], 'lng': data['lng'], 'weight': data['weight']}
return jsonify(crime=crime)
flash(error)
return jsonify(error=error)
| {
"content_hash": "4c168bb13fa5b548debe320d909c5e90",
"timestamp": "",
"source": "github",
"line_count": 296,
"max_line_length": 99,
"avg_line_length": 31.27027027027027,
"alnum_prop": 0.5707649092480553,
"repo_name": "stutumi/USPeda",
"id": "b04c6bca30702792221b06e619d2cc22b4fc8558",
"size": "9265",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uspeda/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "507"
},
{
"name": "HTML",
"bytes": "20370"
},
{
"name": "JavaScript",
"bytes": "17768"
},
{
"name": "Python",
"bytes": "17059"
}
],
"symlink_target": ""
} |
from beritest_tools import BaseBERITestCase
class test_raw_arithmetic_combo(BaseBERITestCase):
def test_a0(self):
'''Test that stages of the arithmetic combo test produced the correct result.'''
self.assertRegisterEqual(self.MIPS.s0, 0x0000002800000014, "Stage 1 Incorrect")
self.assertRegisterEqual(self.MIPS.s1, 0x0000000000000001, "Stage 2 Incorrect")
self.assertRegisterEqual(self.MIPS.s2, 0xffffffff9c320384, "Stage 3 Incorrect")
self.assertRegisterEqual(self.MIPS.s3, 0x00FFFFFFFFFF1E6F, "Stage 4 Incorrect")
self.assertRegisterEqual(self.MIPS.s4, 0xFFFFFFFFFFC3BE75, "Stage 5 Incorrect")
self.assertRegisterEqual(self.MIPS.s5, 0x0, "Stage 6 Incorrect")
| {
"content_hash": "1f5dc5467ae98d7d9e8e3ad926af3c12",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 88,
"avg_line_length": 60.333333333333336,
"alnum_prop": 0.7430939226519337,
"repo_name": "8l/beri",
"id": "7117d221c785109110bf4228b9ffdfa2615b5951",
"size": "1862",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cheritest/trunk/tests/alu/test_raw_arithmetic_combo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1629022"
},
{
"name": "Bluespec",
"bytes": "2336405"
},
{
"name": "C",
"bytes": "1058899"
},
{
"name": "C++",
"bytes": "1864"
},
{
"name": "Groff",
"bytes": "14381"
},
{
"name": "Haskell",
"bytes": "11711"
},
{
"name": "Lex",
"bytes": "2894"
},
{
"name": "Makefile",
"bytes": "242450"
},
{
"name": "Mathematica",
"bytes": "291"
},
{
"name": "Objective-C",
"bytes": "2387"
},
{
"name": "OpenEdge ABL",
"bytes": "568"
},
{
"name": "Perl",
"bytes": "19159"
},
{
"name": "Python",
"bytes": "1491002"
},
{
"name": "Shell",
"bytes": "91130"
},
{
"name": "SystemVerilog",
"bytes": "12058"
},
{
"name": "Tcl",
"bytes": "132818"
},
{
"name": "TeX",
"bytes": "4996"
},
{
"name": "Verilog",
"bytes": "125674"
},
{
"name": "Yacc",
"bytes": "5871"
}
],
"symlink_target": ""
} |
"""
A base class for creating CEA plugins. Subclass this class in your own namespace to become a CEA plugin.
"""
import importlib
import os
import configparser
from typing import Generator, Sequence
import yaml
import inspect
import cea.schemas
import cea.config
import cea.plots.categories
import cea.inputlocator
import warnings
from cea.utilities import identifier
__author__ = "Daren Thomas"
__copyright__ = "Copyright 2020, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Daren Thomas"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "cea@arch.ethz.ch"
__status__ = "Production"
class CeaPlugin(object):
"""
A CEA Plugin defines a list of scripts and a list of plots - the CEA uses this to populate the GUI
and other interfaces. In addition, any input- and output files need to be defined.
"""
@property
def scripts(self):
"""Return the scripts.yml dictionary."""
scripts_yml = os.path.join(os.path.dirname(inspect.getmodule(self).__file__), "scripts.yml")
if not os.path.exists(scripts_yml):
return {}
with open(scripts_yml, "r") as scripts_yml_fp:
scripts = yaml.safe_load(scripts_yml_fp)
return scripts
@property
def plot_categories(self):
"""
Return a list of :py:class`cea.plots.PlotCategory` instances to add to the GUI. The default implementation
uses the ``plots.yml`` file to create PluginPlotCategory instances that use PluginPlotBase to provide a
simplified plot mechanism using cufflinks_
.. _cufflinks: https://plotly.com/python/cufflinks/
"""
plots_yml = os.path.join(os.path.dirname(inspect.getmodule(self).__file__), "plots.yml")
if not os.path.exists(plots_yml):
return {}
with open(plots_yml, "r") as plots_yml_fp:
categories = yaml.load(plots_yml_fp, Loader=yaml.CLoader)
return [PluginPlotCategory(category_label, categories[category_label], self) for category_label in categories.keys()]
@property
def schemas(self):
"""Return the schemas dict for this plugin - it should be in the same format as ``cea/schemas.yml``
(You don't actually have to implement this for your own plugins - having a ``schemas.yml`` file in the same
folder as the plugin class will trigger the default behavior)
"""
schemas_yml = os.path.join(os.path.dirname(inspect.getmodule(self).__file__), "schemas.yml")
if not os.path.exists(schemas_yml):
return {}
with open(schemas_yml, "r") as schemas_yml_fp:
schemas = yaml.load(schemas_yml_fp, Loader=yaml.CLoader)
return schemas
@property
def config(self):
"""
Return the configuration for this plugin - the `cea.config.Configuration` object will include these.
The format is expected to be the same format as `default.config` in the CEA.
:rtype: configparser.ConfigParser
"""
plugin_config = os.path.join(os.path.dirname(inspect.getmodule(self).__file__), "plugin.config")
parser = configparser.ConfigParser()
if not os.path.exists(plugin_config):
return parser
parser.read(plugin_config)
return parser
def __str__(self):
"""To enable encoding in cea.config.PluginListParameter, return the fqname of the class"""
return "{module}.{name}".format(module=self.__class__.__module__, name=self.__class__.__name__)
class PluginPlotCategory(cea.plots.categories.PlotCategory):
"""
Normally, a PlotCategory reads it's plot classes by traversing a folder structure and importing all modules found
there. The PluginPlotCategory works just like a PlotCategory (i.e. compatible with the CEA GUI / Dashboard) but
the category information and plots are loaded from a ``plots.yml`` file. Plugin Plots are a bit restricted (so
you might want to implement your plots directly the way they are implemented in CEA) but instead they are much
easier to understand as they use the cufflinks library.
"""
def __init__(self, category_label, plots, plugin):
"""Ignore calling super class' constructor as we use a totally different mechanism for building plots here
:param str category_label: The category label shown in the interface
:param Sequence[dict] plots: A dictionary mapping plot labels to plot definitions
"""
self.label = category_label
self.name = identifier(category_label)
self.plot_configs = plots
self.plugin = plugin
@property
def plots(category):
"""
Return a list of Plot classes to be used in the Dashboard.
:rtype: Generator[PluginPlotBase]
"""
for plot_config in category.plot_configs:
plot_label = plot_config["label"]
plugin = category.plugin
class Plot(PluginPlotBase):
name = plot_label
category_name = category.name
category_path = category.name
expected_parameters = plot_config.get("expected-parameters", {})
if not "scenario-name" in expected_parameters:
expected_parameters["scenario-name"] = "general:scenario-name"
def __init__(self, project, parameters, cache):
super(Plot, self).__init__(project, parameters, cache, plugin, plot_config)
# for some reason these are being over-written in the call to super
self.category_name = category.name
self.category_path = category.name
# Plot.__name__ = identifier(plot_label, sep="_")
yield Plot
class PluginPlotBase(cea.plots.PlotBase):
"""
A simplified version of cea.plots.PlotBase that is configured with the ``plots.yml`` entries.
"""
def __init__(self, project, parameters, cache, plugin, plot_config):
super(PluginPlotBase, self).__init__(project, parameters, cache)
self.plugin = plugin
self.plot_config = plot_config
self.locator_method = getattr(self.locator, self.plot_config["data"]["location"]) # type: cea.schemas.SchemaIo
self.locator_kwargs = {arg: self.parameters[arg] for arg in self.plot_config["data"].get("args", [])}
self.input_files = [(self.locator_method, self.locator_kwargs)]
def missing_input_files(self):
"""
Return the list of missing input files for this plot - overriding cea.plots.PlotBase.missing_input_files
because we're now moving to kwargs for locator methods.
Also, PluginPlotBase only uses one input file.
"""
result = []
if not os.path.exists(self.locator_method(**self.locator_kwargs)):
result.append((self.locator_method, self.locator_kwargs.values()))
return result
@property
def title(self):
return self.plot_config["label"]
@property
def locator(self):
"""
Make sure the plot's input-locator is aware of the plugin that defines it.
NOTE: We don't currently support depending on other plugins.
:rtype: cea.inputlocator.InputLocator
"""
try:
scenario = os.path.join(self.project, self.parameters['scenario-name'])
return cea.inputlocator.InputLocator(scenario=scenario, plugins=[self.plugin])
except KeyError as error:
raise KeyError("{key} not found in {parameters}".format(key=str(error), parameters=self.parameters))
@property
def layout(self):
"""The layout for plugin plots needs to conform to the input parameters to iplot (see cufflinks docs)"""
return self.plot_config.get("layout", {})
def _plot_div_producer(self):
"""Use the plot_config to create a plot with cufflinks"""
import cufflinks
import plotly.offline
cufflinks.go_offline()
# load the data
df = self.locator_method.read(**self.locator_kwargs)
if "index" in self.plot_config["data"]:
df = df.set_index(self.plot_config["data"]["index"])
if "fields" in self.plot_config["data"]:
df = df[self.plot_config["data"]["fields"]]
# rename the columns (for the legend)
schema = self.locator_method.schema["schema"]["columns"]
columns_mapping = {c: schema[c]["description"] for c in schema.keys()}
df = df.rename(columns=columns_mapping)
# colors need to be re-mapped because we renamed the columns
colors = {columns_mapping[k]: v for k, v in self.locator_method.colors().items()}
fig = df.iplot(asFigure=True, colors=colors, theme="white", **self.layout)
div = plotly.offline.plot(fig, output_type='div', include_plotlyjs=False, show_link=False)
return div
def table_div(self):
pass
def calc_graph(self):
raise AssertionError("cea.plots.PlotBase.calc_graph should not be part of the abstract interface")
def calc_table(self):
raise DeprecationWarning("cea.plots.PlotBase.calc_table is not used anymore and will be removed in future")
@property
def output_path(self):
"""override the cea.plots.PlotBase.output_path"""
file_name = self.id()
return self.locator.get_timeseries_plots_file(file_name, self.category_path)
if __name__ == "__main__":
# try to plot the first plugin found
# FIXME: remove this before commit
import cea.config
import cea.plots.cache
config = cea.config.Configuration()
cache = cea.plots.cache.NullPlotCache()
plugin = config.plugins[0]
category = list(plugin.plot_categories)[0]
plot_class = list(category.plots)[0]
print(plot_class.expected_parameters)
print(plot_class.name)
print(plot_class.category_path)
plot = plot_class(config.project, {"scenario-name": config.scenario_name}, cache)
print(plot.category_path)
print(plot.plot(auto_open=True))
def instantiate_plugin(plugin_fqname):
"""Return a new CeaPlugin based on it's fully qualified name - this is how the config object creates plugins"""
try:
plugin_path = plugin_fqname.split(".")
plugin_module = ".".join(plugin_path[:-1])
plugin_class = plugin_path[-1]
module = importlib.import_module(plugin_module)
instance = getattr(module, plugin_class)()
return instance
except BaseException as ex:
warnings.warn(f"Could not instantiate plugin {plugin_fqname} ({ex})")
return None
def add_plugins(default_config, user_config):
"""
Patch in the plugin configurations during __init__ and __setstate__
:param configparser.ConfigParser default_config:
:param configparser.ConfigParser user_config:
:return: (modifies default_config and user_config in-place)
:rtype: None
"""
plugin_fqnames = cea.config.parse_string_to_list(user_config.get("general", "plugins"))
for plugin in [instantiate_plugin(plugin_fqname) for plugin_fqname in plugin_fqnames]:
if plugin is None:
# plugin could not be instantiated
continue
for section_name in plugin.config.sections():
if section_name in default_config.sections():
raise ValueError("Plugin tried to redefine config section {section_name}".format(
section_name=section_name))
default_config.add_section(section_name)
if not user_config.has_section(section_name):
user_config.add_section(section_name)
for option_name in plugin.config.options(section_name):
if option_name in default_config.options(section_name):
raise ValueError("Plugin tried to redefine parameter {section_name}:{option_name}".format(
section_name=section_name, option_name=option_name))
default_config.set(section_name, option_name, plugin.config.get(section_name, option_name))
if "." not in option_name and not user_config.has_option(section_name, option_name):
user_config.set(section_name, option_name, default_config.get(section_name, option_name)) | {
"content_hash": "cee1cac3903ca3881a94689d66e68c67",
"timestamp": "",
"source": "github",
"line_count": 299,
"max_line_length": 125,
"avg_line_length": 41.1438127090301,
"alnum_prop": 0.647943423833523,
"repo_name": "architecture-building-systems/CityEnergyAnalyst",
"id": "69398e200731e9bf6d5644cc4cc46067c015bc7c",
"size": "12302",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cea/plugin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5622"
},
{
"name": "Dockerfile",
"bytes": "2277"
},
{
"name": "HTML",
"bytes": "47667"
},
{
"name": "Jupyter Notebook",
"bytes": "409952"
},
{
"name": "NSIS",
"bytes": "9782"
},
{
"name": "Python",
"bytes": "2681047"
},
{
"name": "Shell",
"bytes": "8768"
}
],
"symlink_target": ""
} |
import hashlib
import uuid
from oslo_config import cfg
import webob
from keystone.common import authorization
from keystone.common import tokenless_auth
from keystone.contrib.federation import constants as federation_constants
from keystone import exception
from keystone import middleware
from keystone.tests import unit as tests
from keystone.tests.unit import mapping_fixtures
from keystone.tests.unit import test_backend_sql
CONF = cfg.CONF
def make_request(**kwargs):
accept = kwargs.pop('accept', None)
method = kwargs.pop('method', 'GET')
body = kwargs.pop('body', None)
req = webob.Request.blank('/', **kwargs)
req.method = method
if body is not None:
req.body = body
if accept is not None:
req.accept = accept
return req
def make_response(**kwargs):
body = kwargs.pop('body', None)
return webob.Response(body)
class TokenAuthMiddlewareTest(tests.TestCase):
def test_request(self):
req = make_request()
req.headers[middleware.AUTH_TOKEN_HEADER] = 'MAGIC'
middleware.TokenAuthMiddleware(None).process_request(req)
context = req.environ[middleware.CONTEXT_ENV]
self.assertEqual('MAGIC', context['token_id'])
class AdminTokenAuthMiddlewareTest(tests.TestCase):
def test_request_admin(self):
req = make_request()
req.headers[middleware.AUTH_TOKEN_HEADER] = CONF.admin_token
middleware.AdminTokenAuthMiddleware(None).process_request(req)
context = req.environ[middleware.CONTEXT_ENV]
self.assertTrue(context['is_admin'])
def test_request_non_admin(self):
req = make_request()
req.headers[middleware.AUTH_TOKEN_HEADER] = 'NOT-ADMIN'
middleware.AdminTokenAuthMiddleware(None).process_request(req)
context = req.environ[middleware.CONTEXT_ENV]
self.assertFalse(context['is_admin'])
class PostParamsMiddlewareTest(tests.TestCase):
def test_request_with_params(self):
req = make_request(body="arg1=one", method='POST')
middleware.PostParamsMiddleware(None).process_request(req)
params = req.environ[middleware.PARAMS_ENV]
self.assertEqual({"arg1": "one"}, params)
class JsonBodyMiddlewareTest(tests.TestCase):
def test_request_with_params(self):
req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
content_type='application/json',
method='POST')
middleware.JsonBodyMiddleware(None).process_request(req)
params = req.environ[middleware.PARAMS_ENV]
self.assertEqual({"arg1": "one", "arg2": ["a"]}, params)
def test_malformed_json(self):
req = make_request(body='{"arg1": "on',
content_type='application/json',
method='POST')
resp = middleware.JsonBodyMiddleware(None).process_request(req)
self.assertEqual(400, resp.status_int)
def test_not_dict_body(self):
req = make_request(body='42',
content_type='application/json',
method='POST')
resp = middleware.JsonBodyMiddleware(None).process_request(req)
self.assertEqual(400, resp.status_int)
self.assertTrue('valid JSON object' in resp.json['error']['message'])
def test_no_content_type(self):
req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
method='POST')
middleware.JsonBodyMiddleware(None).process_request(req)
params = req.environ[middleware.PARAMS_ENV]
self.assertEqual({"arg1": "one", "arg2": ["a"]}, params)
def test_unrecognized_content_type(self):
req = make_request(body='{"arg1": "one", "arg2": ["a"]}',
content_type='text/plain',
method='POST')
resp = middleware.JsonBodyMiddleware(None).process_request(req)
self.assertEqual(400, resp.status_int)
def test_unrecognized_content_type_without_body(self):
req = make_request(content_type='text/plain',
method='GET')
middleware.JsonBodyMiddleware(None).process_request(req)
params = req.environ.get(middleware.PARAMS_ENV, {})
self.assertEqual({}, params)
class AuthContextMiddlewareTest(test_backend_sql.SqlTests):
def setUp(self):
super(AuthContextMiddlewareTest, self).setUp()
self.client_issuer = uuid.uuid4().hex
self.untrusted_client_issuer = uuid.uuid4().hex
self.trusted_issuer = self.client_issuer
self.config_fixture.config(group='tokenless_auth',
trusted_issuer=[self.trusted_issuer])
# This idp_id is calculated based on
# sha256(self.client_issuer)
hashed_idp = hashlib.sha256(self.client_issuer)
self.idp_id = hashed_idp.hexdigest()
self._load_sample_data()
def _load_sample_data(self):
self.domain_id = uuid.uuid4().hex
self.domain_name = uuid.uuid4().hex
self.project_id = uuid.uuid4().hex
self.project_name = uuid.uuid4().hex
self.user_name = uuid.uuid4().hex
self.user_password = uuid.uuid4().hex
self.user_email = uuid.uuid4().hex
self.protocol_id = 'x509'
self.role_id = uuid.uuid4().hex
self.role_name = uuid.uuid4().hex
# for ephemeral user
self.group_name = uuid.uuid4().hex
# 1) Create a domain for the user.
self.domain = {
'description': uuid.uuid4().hex,
'enabled': True,
'id': self.domain_id,
'name': self.domain_name,
}
self.resource_api.create_domain(self.domain_id, self.domain)
# 2) Create a project for the user.
self.project = {
'description': uuid.uuid4().hex,
'domain_id': self.domain_id,
'enabled': True,
'id': self.project_id,
'name': self.project_name,
}
self.resource_api.create_project(self.project_id, self.project)
# 3) Create a user in new domain.
self.user = {
'name': self.user_name,
'domain_id': self.domain_id,
'project_id': self.project_id,
'password': self.user_password,
'email': self.user_email,
}
self.user = self.identity_api.create_user(self.user)
# Add IDP
self.idp = self._idp_ref(id=self.idp_id)
self.federation_api.create_idp(self.idp['id'],
self.idp)
# Add a role
self.role = {
'id': self.role_id,
'name': self.role_name,
}
self.role_api.create_role(self.role_id, self.role)
# Add a group
self.group = {
'name': self.group_name,
'domain_id': self.domain_id,
}
self.group = self.identity_api.create_group(self.group)
# Assign a role to the user on a project
self.assignment_api.add_role_to_user_and_project(
user_id=self.user['id'],
tenant_id=self.project_id,
role_id=self.role_id)
# Assign a role to the group on a project
self.assignment_api.create_grant(
role_id=self.role_id,
group_id=self.group['id'],
project_id=self.project_id)
def _load_mapping_rules(self, rules):
# Add a mapping
self.mapping = self._mapping_ref(rules=rules)
self.federation_api.create_mapping(self.mapping['id'],
self.mapping)
# Add protocols
self.proto_x509 = self._proto_ref(mapping_id=self.mapping['id'])
self.proto_x509['id'] = self.protocol_id
self.federation_api.create_protocol(self.idp['id'],
self.proto_x509['id'],
self.proto_x509)
def _idp_ref(self, id=None):
idp = {
'id': id or uuid.uuid4().hex,
'enabled': True,
'description': uuid.uuid4().hex
}
return idp
def _proto_ref(self, mapping_id=None):
proto = {
'id': uuid.uuid4().hex,
'mapping_id': mapping_id or uuid.uuid4().hex
}
return proto
def _mapping_ref(self, rules=None):
if rules is None:
mapped_rules = {}
else:
mapped_rules = rules.get('rules', {})
return {
'id': uuid.uuid4().hex,
'rules': mapped_rules
}
def _assert_tokenless_auth_context(self, context, ephemeral_user=False):
self.assertIsNotNone(context)
self.assertEqual(self.project_id, context['project_id'])
self.assertIn(self.role_name, context['roles'])
if ephemeral_user:
self.assertEqual(self.group['id'], context['group_ids'][0])
self.assertEqual('ephemeral',
context[federation_constants.PROTOCOL])
self.assertEqual(self.idp_id,
context[federation_constants.IDENTITY_PROVIDER])
else:
self.assertEqual(self.user['id'], context['user_id'])
def _create_context(self, request, mapping_ref=None,
exception_expected=False):
"""Builds the auth context from the given arguments.
auth context will be returned from the AuthContextMiddleware based on
what is being passed in the given request and what mapping is being
setup in the backend DB.
:param request: HTTP request
:param mapping_ref: A mapping in JSON structure will be setup in the
backend DB for mapping an user or a group.
:param exception_expected: Sets to True when an exception is expected
to raised based on the given arguments.
:returns: context an auth context contains user and role information
:rtype: dict
"""
if mapping_ref:
self._load_mapping_rules(mapping_ref)
if not exception_expected:
(middleware.AuthContextMiddleware('Tokenless_auth_test').
process_request(request))
context = request.environ.get(authorization.AUTH_CONTEXT_ENV)
else:
context = middleware.AuthContextMiddleware('Tokenless_auth_test')
return context
def test_context_already_exists(self):
req = make_request()
token_id = uuid.uuid4().hex
req.environ[authorization.AUTH_CONTEXT_ENV] = {'token_id': token_id}
context = self._create_context(request=req)
self.assertEqual(token_id, context['token_id'])
def test_not_applicable_to_token_request(self):
env = {}
env['PATH_INFO'] = '/auth/tokens'
env['REQUEST_METHOD'] = 'POST'
req = make_request(environ=env)
context = self._create_context(request=req)
self.assertIsNone(context)
def test_no_tokenless_attributes_request(self):
req = make_request()
context = self._create_context(request=req)
self.assertIsNone(context)
def test_no_issuer_attribute_request(self):
env = {}
env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex
req = make_request(environ=env)
context = self._create_context(request=req)
self.assertIsNone(context)
def test_has_only_issuer_and_project_name_request(self):
env = {}
# SSL_CLIENT_I_DN is the attribute name that wsgi env
# references to issuer of the client certificate.
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex
req = make_request(environ=env)
context = self._create_context(request=req,
exception_expected=True)
self.assertRaises(exception.ValidationError,
context.process_request,
req)
def test_has_only_issuer_and_project_domain_name_request(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex
req = make_request(environ=env)
context = self._create_context(request=req,
exception_expected=True)
self.assertRaises(exception.ValidationError,
context.process_request,
req)
def test_has_only_issuer_and_project_domain_id_request(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_DOMAIN_ID'] = uuid.uuid4().hex
req = make_request(environ=env)
context = self._create_context(request=req,
exception_expected=True)
self.assertRaises(exception.ValidationError,
context.process_request,
req)
def test_missing_both_domain_and_project_request(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
req = make_request(environ=env)
context = self._create_context(request=req,
exception_expected=True)
self.assertRaises(exception.ValidationError,
context.process_request,
req)
def test_empty_trusted_issuer_list(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex
req = make_request(environ=env)
self.config_fixture.config(group='tokenless_auth',
trusted_issuer=[])
context = self._create_context(request=req)
self.assertIsNone(context)
def test_client_issuer_not_trusted(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.untrusted_client_issuer
env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex
req = make_request(environ=env)
context = self._create_context(request=req)
self.assertIsNone(context)
def test_proj_scope_with_proj_id_and_proj_dom_id_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
# SSL_CLIENT_USER_NAME and SSL_CLIENT_DOMAIN_NAME are the types
# defined in the mapping that will map to the user name and
# domain name
env['SSL_CLIENT_USER_NAME'] = self.user_name
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
self._assert_tokenless_auth_context(context)
def test_proj_scope_with_proj_id_only_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = self.project_id
env['SSL_CLIENT_USER_NAME'] = self.user_name
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
self._assert_tokenless_auth_context(context)
def test_proj_scope_with_proj_name_and_proj_dom_id_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_USER_NAME'] = self.user_name
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
self._assert_tokenless_auth_context(context)
def test_proj_scope_with_proj_name_and_proj_dom_name_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = self.user_name
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME)
self._assert_tokenless_auth_context(context)
def test_proj_scope_with_proj_name_only_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_id
env['SSL_CLIENT_USER_NAME'] = self.user_name
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME,
exception_expected=True)
self.assertRaises(exception.ValidationError,
context.process_request,
req)
def test_mapping_with_userid_and_domainid_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_ID'] = self.user['id']
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINID)
self._assert_tokenless_auth_context(context)
def test_mapping_with_userid_and_domainname_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_ID'] = self.user['id']
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINNAME)
self._assert_tokenless_auth_context(context)
def test_mapping_with_username_and_domainid_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = self.user_name
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID)
self._assert_tokenless_auth_context(context)
def test_only_domain_name_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_DOMAINNAME_ONLY,
exception_expected=True)
self.assertRaises(exception.ValidationError,
context.process_request,
req)
def test_only_domain_id_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_DOMAINID_ONLY,
exception_expected=True)
self.assertRaises(exception.ValidationError,
context.process_request,
req)
def test_missing_domain_data_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_USER_NAME'] = self.user_name
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_ONLY,
exception_expected=True)
self.assertRaises(exception.ValidationError,
context.process_request,
req)
def test_userid_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_USER_ID'] = self.user['id']
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERID_ONLY)
self._assert_tokenless_auth_context(context)
def test_domain_disable_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = self.user_name
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
req = make_request(environ=env)
self.domain['enabled'] = False
self.domain = self.resource_api.update_domain(
self.domain['id'], self.domain)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID,
exception_expected=True)
self.assertRaises(exception.Unauthorized,
context.process_request,
req)
def test_user_disable_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = self.user_name
env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id
req = make_request(environ=env)
self.user['enabled'] = False
self.user = self.identity_api.update_user(self.user['id'], self.user)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID,
exception_expected=True)
self.assertRaises(AssertionError,
context.process_request,
req)
def test_invalid_user_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_ID'] = self.project_id
env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id
env['SSL_CLIENT_USER_NAME'] = uuid.uuid4().hex
env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name
req = make_request(environ=env)
context = self._create_context(
request=req,
mapping_ref=mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME,
exception_expected=True)
self.assertRaises(exception.UserNotFound,
context.process_request,
req)
def test_ephemeral_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = self.user_name
req = make_request(environ=env)
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
context = self._create_context(
request=req,
mapping_ref=mapping)
self._assert_tokenless_auth_context(context, ephemeral_user=True)
def test_ephemeral_with_default_user_type_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = self.user_name
req = make_request(environ=env)
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
# this mapping does not have the user type defined
# and it should defaults to 'ephemeral' which is
# the expected type for the test case.
mapping = mapping_fixtures.MAPPING_FOR_DEFAULT_EPHEMERAL_USER.copy()
mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
context = self._create_context(
request=req,
mapping_ref=mapping)
self._assert_tokenless_auth_context(context, ephemeral_user=True)
def test_ephemeral_any_user_success(self):
"""Ephemeral user does not need a specified user
Keystone is not looking to match the user, but a corresponding group.
"""
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = uuid.uuid4().hex
req = make_request(environ=env)
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
context = self._create_context(
request=req,
mapping_ref=mapping)
self._assert_tokenless_auth_context(context, ephemeral_user=True)
def test_ephemeral_invalid_scope_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex
env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex
env['SSL_CLIENT_USER_NAME'] = self.user_name
req = make_request(environ=env)
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
mapping['rules'][0]['local'][0]['group']['id'] = self.group['id']
context = self._create_context(
request=req,
mapping_ref=mapping,
exception_expected=True)
self.assertRaises(exception.Unauthorized,
context.process_request,
req)
def test_ephemeral_no_group_found_fail(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = self.user_name
req = make_request(environ=env)
self.config_fixture.config(group='tokenless_auth',
protocol='ephemeral')
self.protocol_id = 'ephemeral'
mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex
context = self._create_context(
request=req,
mapping_ref=mapping,
exception_expected=True)
self.assertRaises(exception.MappedGroupNotFound,
context.process_request,
req)
def test_ephemeral_incorrect_mapping_fail(self):
"""Ephemeral user picks up the non-ephemeral user mapping.
Looking up the mapping with protocol Id 'x509' will load up
the non-ephemeral user mapping, results unauthenticated.
"""
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
env['HTTP_X_PROJECT_NAME'] = self.project_name
env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name
env['SSL_CLIENT_USER_NAME'] = self.user_name
req = make_request(environ=env)
# This will pick up the incorrect mapping
self.config_fixture.config(group='tokenless_auth',
protocol='x509')
self.protocol_id = 'x509'
mapping = mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER.copy()
mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex
context = self._create_context(
request=req,
mapping_ref=mapping,
exception_expected=True)
self.assertRaises(exception.MappedGroupNotFound,
context.process_request,
req)
def test_create_idp_id_success(self):
env = {}
env['SSL_CLIENT_I_DN'] = self.client_issuer
auth = tokenless_auth.TokenlessAuthHelper(env)
idp_id = auth._build_idp_id()
self.assertEqual(self.idp_id, idp_id)
def test_create_idp_id_attri_not_found_fail(self):
env = {}
env[uuid.uuid4().hex] = self.client_issuer
auth = tokenless_auth.TokenlessAuthHelper(env)
expected_msg = ('Could not determine Identity Provider ID. The '
'configuration option %s was not found in the '
'request environment.' %
CONF.tokenless_auth.issuer_attribute)
# Check the content of the exception message as well
self.assertRaisesRegexp(exception.TokenlessAuthConfigError,
expected_msg,
auth._build_idp_id)
| {
"content_hash": "8ea144c803cf08f66743911bacaad9ab",
"timestamp": "",
"source": "github",
"line_count": 735,
"max_line_length": 78,
"avg_line_length": 41.1578231292517,
"alnum_prop": 0.587616938283032,
"repo_name": "jonnary/keystone",
"id": "d420a568a0ad867abbe11c624294fcbdd260dcda",
"size": "30837",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keystone/tests/unit/test_middleware.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "665"
},
{
"name": "Python",
"bytes": "3992867"
}
],
"symlink_target": ""
} |
import os, re, fnmatch, sys
from collections import namedtuple
from itertools import islice
from .Source import Source
from quickfind.Searcher import Ranker, CString
from .Util import truncate_middle, rec_dir_up, highlight, StringRanker, simpleFormatter
try:
import fsnix.util as util
walk = util.walk
except ImportError:
walk = os.walk
if sys.version_info.major >= 3:
xrange = range
File = namedtuple("File", "dir,name,sname")
class DirectorySource(Source):
def __init__(self, dirs=".", ignore_directories=True, ignore_files=True, git_ignore=True):
self.ignore_directories = ignore_directories
self.ignore_files = ignore_files
self.git_ignore = git_ignore
self.startDirs = dirs
def find_parent_gis(self, dir):
dirs = rec_dir_up(os.path.abspath(dir))
next(dirs)
filters = []
for dirname in dirs:
pgi = os.path.join(dirname, '.gitignore')
if os.path.isfile(pgi):
filters.append((dirname, GitIgnoreFilter(dirname, '.gitignore')))
return list(reversed(filters))
def fetch(self):
# optimize for the base case
sd = set()
lst = []
if len(self.startDirs) == 1:
return self.fetchDir(self.startDirs[0], sd)
for d in self.startDirs:
lst.extend(self.fetchDir(d, sd))
return lst
# Walk interface is annoying: to remove dirs, you have to del them from
# the array
def delDirs(self, dirs, f):
for i in xrange(len(dirs) - 1, -1, -1):
if not f(dirs[i]):
del dirs[i]
def fetchDir(self, d, seenDirs):
lst = []
ap = os.path.abspath
filters = self.find_parent_gis(d) if self.git_ignore else []
for dirname, dirs, filenames in walk(d):
abspath = ap(dirname)
if abspath in seenDirs:
self.delDirs(dirs, lambda x: False)
continue
seenDirs.add(abspath)
names = []
if not self.ignore_files:
names = filenames
if not self.ignore_directories:
names.extend(dirs)
if self.git_ignore and '.gitignore' in filenames:
gif = GitIgnoreFilter(abspath, '.gitignore')
filters.append((abspath, gif))
while filters:
path, _ = filters[-1]
if abspath.startswith(path): break
filters.pop()
fltr = filters[-1][1] if filters else None
if fltr is None:
files = (File(dirname, name, name.lower()) for name in names)
else:
files = (File(dirname, name, name.lower())
for name in names if fltr(name, abspath))
lst.extend(files)
# have to delete the names manually
if fltr is not None:
self.delDirs(dirs, lambda d: fltr(d, abspath))
return lst
class GitIgnoreFilter(object):
# Optimization
lastdir = None
last_path_filter = None
globchars = re.compile('[*\[\]?]')
def __init__(self, dirname, filename):
self.dirname = dirname
self.fn = os.path.join(dirname, filename)
path_filters = []
glob_filters = []
exact_filters = set(['.git'])
with open(self.fn) as f:
gc = self.globchars
for fn in f:
if fn.startswith('#'):
continue
fn = fn.strip()
if fn.startswith('/'):
path_filters.append(fn)
elif gc.search(fn) is not None:
glob_filters.append(fnmatch.translate(fn.strip()))
else:
exact_filters.add(fn)
if glob_filters:
self.glob_filters = [re.compile('|'.join(glob_filters))]
else:
self.glob_filters = []
self.exact_filters = exact_filters
self.path_filters = self.setup_path_filters(path_filters)
def setup_path_filters(self, path_filters):
# We can currently glob on only filename positions
dirmaps = {}
for pf in path_filters:
pf = pf.rstrip("/")
dirname, basename = os.path.split(pf)
dm = os.path.join(self.dirname, dirname.lstrip('/')).rstrip('/')
glob = fnmatch.translate(basename.strip())
if dm in dirmaps:
dirmaps[dm].append(glob)
else:
dirmaps[dm] = [glob]
# Build glob maps
for k in dirmaps:
dirmaps[k] = re.compile('|'.join(dirmaps[k]))
return dirmaps
def __call__(self, fn, dirname):
# check exact
if fn in self.exact_filters:
return False
# Check global globs
for f in self.glob_filters:
if f.match(fn) is not None:
return False
lpf = self.path_filters.get(dirname)
# check path dependent globs
if lpf is not None and lpf.match(fn) is not None:
return False
return True
def ranker(inc_path):
def rank(self, item):
if self.inc_path:
return os.path.join(item.dir.lower(), item.sname)
else:
return item.sname
weight = lambda _, s: s.dir.count(os.sep) ** 0.5
return StringRanker.new(weight, inc_path=inc_path, get_part=rank)
def dirFormatter(f, query, dims):
return simpleFormatter(os.path.join(f.dir, f.name), query, dims)
| {
"content_hash": "7e4c7e2e827ab24aa26924e11faccde4",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 94,
"avg_line_length": 30.18918918918919,
"alnum_prop": 0.5459265890778872,
"repo_name": "Refefer/quickfind",
"id": "a5b4a069553c2995c1e6bd5c1b7402c7ced34df3",
"size": "5585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "quickfind/source/DirectorySource.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "32549"
},
{
"name": "Shell",
"bytes": "502"
},
{
"name": "VimL",
"bytes": "733"
}
],
"symlink_target": ""
} |
'''
Expected result of ordered probability attempts
Status: Accepted
'''
###############################################################################
def main():
"""Read input and print output"""
result, prob, words = 0.0, [], int(input())
for _ in range(words):
prob.append(float(input().split()[1]))
for attempts, chance in enumerate(sorted(prob, reverse=True), start=1):
result += attempts * chance
print(result)
###############################################################################
if __name__ == '__main__':
main()
| {
"content_hash": "6768a0610a7983a62e0379886d7ac8a8",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 79,
"avg_line_length": 24.208333333333332,
"alnum_prop": 0.43373493975903615,
"repo_name": "ivanlyon/exercises",
"id": "669ce5ecc215810f08e5fb42a337ceb332135ee6",
"size": "581",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kattis/k_password.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1283"
},
{
"name": "HTML",
"bytes": "9068"
},
{
"name": "Python",
"bytes": "96419"
}
],
"symlink_target": ""
} |
import re
import os
import errno
import argparse
import asyncore
import logging
import logging.config
# Bulk Imports
from bulk.proxy import BulkProxy
from bulk.helpers import *
class CreateProcessor(argparse.Action):
"""
A custom argparse action.
Instantiates a processing engine to be used in Bulk
and appends it to the list of actively used processing
engines.
"""
def __call__(self, parser, namespace, values, option_string=None):
"""
Instantiate a processing engine and append it to the active set.
"""
module_name = values[0]
rule_files = values[1:]
# check the rules files
for fn in rule_files:
if os.path.isfile(fn):
try:
with open(fn):
pass
except IOError:
raise IOError((errno.EACCES,
'Cannot open and read rules file.', fn))
else:
raise IOError((errno.ENOENT, 'Cannot find rules file.', fn))
current_processors = getattr(namespace, self.dest)
current_processors.append(build_processor(module_name,
convert_rules(rule_files)))
setattr(namespace, self.dest, current_processors)
def setup_logging(config):
"""
Configure logging for Bulk.
Keyword arguments:
config -- path to logging config file
Returns a logger.
"""
done = False
while not done:
try:
logging.config.fileConfig(config)
except IOError as e:
if e.args[0] == errno.ENOENT and e.filename:
print "The full path to the log file (%s) does not exist!" \
" Trying to recover." % e.filename
fp = os.path.dirname(e.filename)
if not os.path.exists(fp):
os.makedirs(fp)
else:
print "Failed to setup logging, exiting."
raise
else:
print "Failed to setup logging," \
" check permissions on log file."
raise
except Exception as e:
print "Something went wrong with the logging setup!"
raise
else:
done = True
logger = logging.getLogger('bulk')
return logger
def validate_arguments(args):
"""
Validate command line arguments.
Keyword arguments:
args -- a populated argument namespace from argparse
Returns error messages, or none upon success.
"""
# Check the IP addresses are actually IP addresses
# Check the quarantine_directory is exists and is writable
# Check the IP addresses first
valid_ip = (
"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}"
"([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$")
if not re.match(valid_ip, args.remote_address):
return 'remote_address parameter must be in IPv4 format' \
' (Ex. 127.0.0.1)'
if not re.match(valid_ip, args.bind_address):
return 'bind_address parameter must be in IPv4 format (Ex. 127.0.0.1)'
# Then check the logging config file
if os.path.isfile(args.log_config):
try:
with open(args.log_config):
pass
except IOError:
return 'Cannot open and read logging config file "%s",' \
' exiting' % args.log_config
else:
return 'Cannot find the logging config file "%s",' \
' exiting' % args.log_config
# Then check the directories
# filter(None, list) simple returns all non-none entities
for directory in filter(None, [args.base_log_directory]):
if os.path.isdir(directory):
try:
with open(directory + 'test.txt', 'wb') as f:
f.write('Testing write access to "%s"' % directory)
except IOError:
return 'Cannot write to directory "%s", exiting' % directory
else:
# If we get here, we know the file wrote, so remove it
os.remove(directory + 'test.txt')
else:
create_sub_directories(directory)
# Don't return an error string if we made it here
return None
def run():
"""
Start Bulk.
Handles all commmand line arguments, logging setup,
and kicking off the network listener.
"""
parser = argparse.ArgumentParser(description='A content inspecting \
mail relay built on smtpd')
parser.add_argument(
'--bind_address',
default='127.0.0.1',
help='Address to bind to and listen on for incoming mail. \
Default is 127.0.0.1'
)
parser.add_argument(
'--bind_port',
default=1025,
type=int,
help='Port to bind to and to listen on for incoming mail. \
Default is 1025'
)
parser.add_argument(
'--remote_address',
default='127.0.0.1',
help='Remote address to forward outbound mail. \
Default is 127.0.0.1'
)
parser.add_argument(
'--remote_port',
default=25,
type=int,
help='Remote port to forward outbound mail. Default is 25'
)
# Note that type can be a function
parser.add_argument(
'--base_log_directory',
default='/tmp/bulk/',
type=directory_name,
help='Directory to write log files, messages, and attachments. \
Default is /tmp/bulk/'
)
parser.add_argument(
'--log_all_messages',
action='store_true',
help='Log all messages to /base_log_directory/messages/'
)
parser.add_argument(
'--block',
action='store_true',
help='Block mail with quarantined attachments. Default is False'
)
parser.add_argument(
'--always_block',
action='store_true',
help='Turn the proxy into a server (block all). Default is false'
)
parser.add_argument(
'--save_attachments',
action='store_true',
help='Experimental: Save all attachments as seperate files. \
Default is false.'
)
parser.add_argument(
'--log_config',
default='/etc/bulk/logging.conf',
help='Logging config file. Default is /etc/bulk/logging.conf'
)
# add a group to mark certain arguments as required
req = parser.add_argument_group('required')
# the processor arg is the only required argument
req.add_argument(
'--processor',
default=[],
required=True,
nargs='+',
action=CreateProcessor,
dest='processors',
help='Choose a processing engine by supplying an import string as the \
first positional argument and multiple rules files as optional \
following arguments. For example: \
--processor bulk.processors.basic /etc/bulk/rules/simple'
)
args = parser.parse_args()
err = validate_arguments(args)
if err:
raise Exception(err)
create_sub_directories(args.base_log_directory)
# Setup logging
logger = setup_logging(args.log_config)
logger.info('Starting Bulk Proxy')
logger.info('Listening on %s:%s' %
(args.bind_address, args.bind_port))
if not args.always_block:
logger.info('Forwarding to %s:%s' %
(args.remote_address, args.remote_port))
logger.info('Bulk matches will be logged to %squarantine/'
% args.base_log_directory)
if args.block:
logger.info('Emails that match a processor rule will be BLOCKED')
if args.always_block:
logger.info('Bulk set to BLOCK ALL mail')
if args.log_all_messages:
logger.info('Logging ALL messages to %smessages/'
% args.base_log_directory)
if args.save_attachments:
logger.info('Saving attachments to %sattachments/'
% args.base_log_directory)
if args.processors:
for p in args.processors:
logger.info('Bulk using %s' % p)
server = BulkProxy((args.bind_address, args.bind_port),
(args.remote_address, args.remote_port),
args.processors,
base_directory=args.base_log_directory,
block=args.block,
always_block=args.always_block,
log=args.log_all_messages,
save_attachments=args.save_attachments)
# Kick off the main process
asyncore.loop()
def stop():
"""
Responsible for safely stopping Bulk.
"""
logger = logging.getLogger('bulk')
logger.info('Received a keyboard interrupt, stopping Bulk')
if __name__ == '__main__':
try:
run()
except KeyboardInterrupt:
stop()
| {
"content_hash": "ed7dcb5666dd6074658bcea448f0a296",
"timestamp": "",
"source": "github",
"line_count": 321,
"max_line_length": 79,
"avg_line_length": 27.996884735202492,
"alnum_prop": 0.5649271169467008,
"repo_name": "MITRECND/bulk",
"id": "0b7074b75f8b15b2239722e5c1e3665698e72d69",
"size": "10361",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/bulk_proxy.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "47878"
}
],
"symlink_target": ""
} |
from self_organising_systems.shared.config import cfg
import ml_collections
cfg.texture_ca = ml_collections.ConfigDict()
cfg.texture_ca.channel_n = 12
cfg.texture_ca.hidden_n = 96
cfg.texture_ca.fire_rate = 0.5
cfg.texture_ca.batch_size = 4
cfg.texture_ca.lr = 2e-3
cfg.texture_ca.pool_size = 1024
cfg.texture_ca.fixed_seed = 123 # 0 to disable
cfg.texture_ca.lr = 2e-3
cfg.texture_ca.lr_decay = 2000
cfg.texture_ca.rollout_len_min = 32
cfg.texture_ca.rollout_len_max = 64
cfg.texture_ca.train_steps = 2000
cfg.texture_ca.gradnorm = True
cfg.texture_ca.q = 2.0
cfg.texture_ca.bias = True
cfg.texture_ca.learned_filters = 0
cfg.texture_ca.laplacian = True
cfg.texture_ca.gradient = True
cfg.texture_ca.identity = True
# texture synth / style transfer
cfg.texture_ca.ancestor_npy = ''
cfg.texture_ca.img_size = 128
cfg.texture_ca.vgg_input_img_size = 128
cfg.texture_ca.texture_dir = 'textures'
cfg.texture_ca.ancestor_dir = 'models'
cfg.texture_ca.objective = "style:mondrian.jpg" #{style:mondrian.jpg, inception:mixed4b_pool_reduce_pre_relu:30}
cfg.texture_ca.inception_pb = 'gs://modelzoo/vision/other_models/InceptionV1.pb'
cfg.texture_ca.hidden_viz_group = False # Group the hidden states into RGB when vizualizing
cfg.texture_ca.viz_rollout_len = 1000
cfg.texture_ca.overflow_loss_coef = 1e4 # auxiliary loss to keep generated values in [0,1]
| {
"content_hash": "d83e02cc83091d50e0e374423a94a0de",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 112,
"avg_line_length": 38.542857142857144,
"alnum_prop": 0.7575982209043736,
"repo_name": "google-research/self-organising-systems",
"id": "648fb0a48d2f7b905799a9a2bb6fbe7d810c3e6e",
"size": "1349",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "self_organising_systems/texture_ca/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "5591"
},
{
"name": "JavaScript",
"bytes": "22998"
},
{
"name": "Jupyter Notebook",
"bytes": "27881557"
},
{
"name": "Python",
"bytes": "89033"
}
],
"symlink_target": ""
} |
from spotdl.metadata.exceptions import MetadataNotFoundError
from spotdl.metadata.exceptions import SpotifyMetadataNotFoundError
from spotdl.metadata.exceptions import YouTubeMetadataNotFoundError
class TestMetadataNotFoundSubclass:
def test_metadata_not_found_subclass(self):
assert issubclass(MetadataNotFoundError, Exception)
def test_spotify_metadata_not_found(self):
assert issubclass(SpotifyMetadataNotFoundError, MetadataNotFoundError)
def test_youtube_metadata_not_found(self):
assert issubclass(YouTubeMetadataNotFoundError, MetadataNotFoundError)
| {
"content_hash": "ca1e42a5fd948be76fdcaef0cc298cb1",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 78,
"avg_line_length": 39.86666666666667,
"alnum_prop": 0.8210702341137124,
"repo_name": "Ritiek/Spotify-Downloader",
"id": "ec5c32cb3e3cc525831cc7963be9e94c7f626667",
"size": "598",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spotdl/metadata/tests/test_metadata_exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15734"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pages', '0019_dashboardpage_show_toc'),
]
operations = [
migrations.AddField(
model_name='homepage',
name='show_toc',
field=models.BooleanField(default=False, help_text='Show Table of Contents'),
),
]
| {
"content_hash": "d73e005bcaddfa9bc75ad760d390103b",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 89,
"avg_line_length": 23.38888888888889,
"alnum_prop": 0.6104513064133017,
"repo_name": "bruecksen/isimip",
"id": "5fa5673de515a517efab40292321d6e56dd1711a",
"size": "494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "isi_mip/pages/migrations/0020_homepage_show_toc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "36731"
},
{
"name": "HTML",
"bytes": "106877"
},
{
"name": "JavaScript",
"bytes": "30564"
},
{
"name": "Python",
"bytes": "4244200"
},
{
"name": "Shell",
"bytes": "789"
}
],
"symlink_target": ""
} |
from __future__ import print_function, unicode_literals
from collections import namedtuple
from itertools import cycle
import mock
from zope.interface import alsoProvides
from twisted.trial import unittest
from twisted.internet.task import Clock, Cooperator
from twisted.internet.interfaces import IPullProducer
from ...eventual import EventualQueue
from ..._interfaces import IDilationManager
from ..._dilation.connection import KCM, Open, Data, Close, Ack
from ..._dilation.outbound import Outbound, PullToPush
from .common import clear_mock_calls
Pauser = namedtuple("Pauser", ["seqnum"])
NonPauser = namedtuple("NonPauser", ["seqnum"])
Stopper = namedtuple("Stopper", ["sc"])
def make_outbound():
m = mock.Mock()
alsoProvides(m, IDilationManager)
clock = Clock()
eq = EventualQueue(clock)
term = mock.Mock(side_effect=lambda: True) # one write per Eventual tick
def term_factory():
return term
coop = Cooperator(terminationPredicateFactory=term_factory,
scheduler=eq.eventually)
o = Outbound(m, coop)
c = mock.Mock() # Connection
def maybe_pause(r):
if isinstance(r, Pauser):
o.pauseProducing()
elif isinstance(r, Stopper):
o.subchannel_unregisterProducer(r.sc)
c.send_record = mock.Mock(side_effect=maybe_pause)
o._test_eq = eq
o._test_term = term
return o, m, c
class OutboundTest(unittest.TestCase):
def test_build_record(self):
o, m, c = make_outbound()
scid1 = b"scid"
self.assertEqual(o.build_record(Open, scid1),
Open(seqnum=0, scid=b"scid"))
self.assertEqual(o.build_record(Data, scid1, b"dataaa"),
Data(seqnum=1, scid=b"scid", data=b"dataaa"))
self.assertEqual(o.build_record(Close, scid1),
Close(seqnum=2, scid=b"scid"))
self.assertEqual(o.build_record(Close, scid1),
Close(seqnum=3, scid=b"scid"))
def test_outbound_queue(self):
o, m, c = make_outbound()
scid1 = b"scid"
r1 = o.build_record(Open, scid1)
r2 = o.build_record(Data, scid1, b"data1")
r3 = o.build_record(Data, scid1, b"data2")
o.queue_and_send_record(r1)
o.queue_and_send_record(r2)
o.queue_and_send_record(r3)
self.assertEqual(list(o._outbound_queue), [r1, r2, r3])
# we would never normally receive an ACK without first getting a
# connection
o.handle_ack(r2.seqnum)
self.assertEqual(list(o._outbound_queue), [r3])
o.handle_ack(r3.seqnum)
self.assertEqual(list(o._outbound_queue), [])
o.handle_ack(r3.seqnum) # ignored
self.assertEqual(list(o._outbound_queue), [])
o.handle_ack(r1.seqnum) # ignored
self.assertEqual(list(o._outbound_queue), [])
def test_duplicate_registerProducer(self):
o, m, c = make_outbound()
sc1 = object()
p1 = mock.Mock()
o.subchannel_registerProducer(sc1, p1, True)
with self.assertRaises(ValueError) as ar:
o.subchannel_registerProducer(sc1, p1, True)
s = str(ar.exception)
self.assertIn("registering producer", s)
self.assertIn("before previous one", s)
self.assertIn("was unregistered", s)
def test_connection_send_queued_unpaused(self):
o, m, c = make_outbound()
scid1 = b"scid"
r1 = o.build_record(Open, scid1)
r2 = o.build_record(Data, scid1, b"data1")
r3 = o.build_record(Data, scid1, b"data2")
o.queue_and_send_record(r1)
o.queue_and_send_record(r2)
self.assertEqual(list(o._outbound_queue), [r1, r2])
self.assertEqual(list(o._queued_unsent), [])
# as soon as the connection is established, everything is sent
o.use_connection(c)
self.assertEqual(c.mock_calls, [mock.call.transport.registerProducer(o, True),
mock.call.send_record(r1),
mock.call.send_record(r2)])
self.assertEqual(list(o._outbound_queue), [r1, r2])
self.assertEqual(list(o._queued_unsent), [])
clear_mock_calls(c)
o.queue_and_send_record(r3)
self.assertEqual(list(o._outbound_queue), [r1, r2, r3])
self.assertEqual(list(o._queued_unsent), [])
self.assertEqual(c.mock_calls, [mock.call.send_record(r3)])
def test_connection_send_queued_paused(self):
o, m, c = make_outbound()
r1 = Pauser(seqnum=1)
r2 = Pauser(seqnum=2)
r3 = Pauser(seqnum=3)
o.queue_and_send_record(r1)
o.queue_and_send_record(r2)
self.assertEqual(list(o._outbound_queue), [r1, r2])
self.assertEqual(list(o._queued_unsent), [])
# pausing=True, so our mock Manager will pause the Outbound producer
# after each write. So only r1 should have been sent before getting
# paused
o.use_connection(c)
self.assertEqual(c.mock_calls, [mock.call.transport.registerProducer(o, True),
mock.call.send_record(r1)])
self.assertEqual(list(o._outbound_queue), [r1, r2])
self.assertEqual(list(o._queued_unsent), [r2])
clear_mock_calls(c)
# Outbound is responsible for sending all records, so when Manager
# wants to send a new one, and Outbound is still in the middle of
# draining the beginning-of-connection queue, the new message gets
# queued behind the rest (in addition to being queued in
# _outbound_queue until an ACK retires it).
o.queue_and_send_record(r3)
self.assertEqual(list(o._outbound_queue), [r1, r2, r3])
self.assertEqual(list(o._queued_unsent), [r2, r3])
self.assertEqual(c.mock_calls, [])
o.handle_ack(r1.seqnum)
self.assertEqual(list(o._outbound_queue), [r2, r3])
self.assertEqual(list(o._queued_unsent), [r2, r3])
self.assertEqual(c.mock_calls, [])
def test_premptive_ack(self):
# one mode I have in mind is for each side to send an immediate ACK,
# with everything they've ever seen, as the very first message on each
# new connection. The idea is that you might preempt sending stuff from
# the _queued_unsent list if it arrives fast enough (in practice this
# is more likely to be delivered via the DILATE mailbox message, but
# the effects might be vaguely similar, so it seems worth testing
# here). A similar situation would be if each side sends ACKs with the
# highest seqnum they've ever seen, instead of merely ACKing the
# message which was just received.
o, m, c = make_outbound()
r1 = Pauser(seqnum=1)
r2 = Pauser(seqnum=2)
r3 = Pauser(seqnum=3)
o.queue_and_send_record(r1)
o.queue_and_send_record(r2)
self.assertEqual(list(o._outbound_queue), [r1, r2])
self.assertEqual(list(o._queued_unsent), [])
o.use_connection(c)
self.assertEqual(c.mock_calls, [mock.call.transport.registerProducer(o, True),
mock.call.send_record(r1)])
self.assertEqual(list(o._outbound_queue), [r1, r2])
self.assertEqual(list(o._queued_unsent), [r2])
clear_mock_calls(c)
o.queue_and_send_record(r3)
self.assertEqual(list(o._outbound_queue), [r1, r2, r3])
self.assertEqual(list(o._queued_unsent), [r2, r3])
self.assertEqual(c.mock_calls, [])
o.handle_ack(r2.seqnum)
self.assertEqual(list(o._outbound_queue), [r3])
self.assertEqual(list(o._queued_unsent), [r3])
self.assertEqual(c.mock_calls, [])
def test_pause(self):
o, m, c = make_outbound()
o.use_connection(c)
self.assertEqual(c.mock_calls, [mock.call.transport.registerProducer(o, True)])
self.assertEqual(list(o._outbound_queue), [])
self.assertEqual(list(o._queued_unsent), [])
clear_mock_calls(c)
sc1, sc2, sc3 = object(), object(), object()
p1, p2, p3 = mock.Mock(name="p1"), mock.Mock(
name="p2"), mock.Mock(name="p3")
# we aren't paused yet, since we haven't sent any data
o.subchannel_registerProducer(sc1, p1, True)
self.assertEqual(p1.mock_calls, [])
r1 = Pauser(seqnum=1)
o.queue_and_send_record(r1)
# now we should be paused
self.assertTrue(o._paused)
self.assertEqual(c.mock_calls, [mock.call.send_record(r1)])
self.assertEqual(p1.mock_calls, [mock.call.pauseProducing()])
clear_mock_calls(p1, c)
# so an IPushProducer will be paused right away
o.subchannel_registerProducer(sc2, p2, True)
self.assertEqual(p2.mock_calls, [mock.call.pauseProducing()])
clear_mock_calls(p2)
o.subchannel_registerProducer(sc3, p3, True)
self.assertEqual(p3.mock_calls, [mock.call.pauseProducing()])
self.assertEqual(o._paused_producers, set([p1, p2, p3]))
self.assertEqual(list(o._all_producers), [p1, p2, p3])
clear_mock_calls(p3)
# one resumeProducing should cause p1 to get a turn, since p2 was added
# after we were paused and p1 was at the "end" of a one-element list.
# If it writes anything, it will get paused again immediately.
r2 = Pauser(seqnum=2)
p1.resumeProducing.side_effect = lambda: c.send_record(r2)
o.resumeProducing()
self.assertEqual(p1.mock_calls, [mock.call.resumeProducing(),
mock.call.pauseProducing(),
])
self.assertEqual(p2.mock_calls, [])
self.assertEqual(p3.mock_calls, [])
self.assertEqual(c.mock_calls, [mock.call.send_record(r2)])
clear_mock_calls(p1, p2, p3, c)
# p2 should now be at the head of the queue
self.assertEqual(list(o._all_producers), [p2, p3, p1])
# next turn: p2 has nothing to send, but p3 does. we should see p3
# called but not p1. The actual sequence of expected calls is:
# p2.resume, p3.resume, pauseProducing, set(p2.pause, p3.pause)
r3 = Pauser(seqnum=3)
p2.resumeProducing.side_effect = lambda: None
p3.resumeProducing.side_effect = lambda: c.send_record(r3)
o.resumeProducing()
self.assertEqual(p1.mock_calls, [])
self.assertEqual(p2.mock_calls, [mock.call.resumeProducing(),
mock.call.pauseProducing(),
])
self.assertEqual(p3.mock_calls, [mock.call.resumeProducing(),
mock.call.pauseProducing(),
])
self.assertEqual(c.mock_calls, [mock.call.send_record(r3)])
clear_mock_calls(p1, p2, p3, c)
# p1 should now be at the head of the queue
self.assertEqual(list(o._all_producers), [p1, p2, p3])
# next turn: p1 has data to send, but not enough to cause a pause. same
# for p2. p3 causes a pause
r4 = NonPauser(seqnum=4)
r5 = NonPauser(seqnum=5)
r6 = Pauser(seqnum=6)
p1.resumeProducing.side_effect = lambda: c.send_record(r4)
p2.resumeProducing.side_effect = lambda: c.send_record(r5)
p3.resumeProducing.side_effect = lambda: c.send_record(r6)
o.resumeProducing()
self.assertEqual(p1.mock_calls, [mock.call.resumeProducing(),
mock.call.pauseProducing(),
])
self.assertEqual(p2.mock_calls, [mock.call.resumeProducing(),
mock.call.pauseProducing(),
])
self.assertEqual(p3.mock_calls, [mock.call.resumeProducing(),
mock.call.pauseProducing(),
])
self.assertEqual(c.mock_calls, [mock.call.send_record(r4),
mock.call.send_record(r5),
mock.call.send_record(r6),
])
clear_mock_calls(p1, p2, p3, c)
# p1 should now be at the head of the queue again
self.assertEqual(list(o._all_producers), [p1, p2, p3])
# now we let it catch up. p1 and p2 send non-pausing data, p3 sends
# nothing.
r7 = NonPauser(seqnum=4)
r8 = NonPauser(seqnum=5)
p1.resumeProducing.side_effect = lambda: c.send_record(r7)
p2.resumeProducing.side_effect = lambda: c.send_record(r8)
p3.resumeProducing.side_effect = lambda: None
o.resumeProducing()
self.assertEqual(p1.mock_calls, [mock.call.resumeProducing(),
])
self.assertEqual(p2.mock_calls, [mock.call.resumeProducing(),
])
self.assertEqual(p3.mock_calls, [mock.call.resumeProducing(),
])
self.assertEqual(c.mock_calls, [mock.call.send_record(r7),
mock.call.send_record(r8),
])
clear_mock_calls(p1, p2, p3, c)
# p1 should now be at the head of the queue again
self.assertEqual(list(o._all_producers), [p1, p2, p3])
self.assertFalse(o._paused)
# now a producer disconnects itself (spontaneously, not from inside a
# resumeProducing)
o.subchannel_unregisterProducer(sc1)
self.assertEqual(list(o._all_producers), [p2, p3])
self.assertEqual(p1.mock_calls, [])
self.assertFalse(o._paused)
# and another disconnects itself when called
p2.resumeProducing.side_effect = lambda: None
p3.resumeProducing.side_effect = lambda: o.subchannel_unregisterProducer(
sc3)
o.pauseProducing()
o.resumeProducing()
self.assertEqual(p2.mock_calls, [mock.call.pauseProducing(),
mock.call.resumeProducing()])
self.assertEqual(p3.mock_calls, [mock.call.pauseProducing(),
mock.call.resumeProducing()])
clear_mock_calls(p2, p3)
self.assertEqual(list(o._all_producers), [p2])
self.assertFalse(o._paused)
def test_subchannel_closed(self):
o, m, c = make_outbound()
sc1 = mock.Mock()
p1 = mock.Mock(name="p1")
o.subchannel_registerProducer(sc1, p1, True)
self.assertEqual(p1.mock_calls, [mock.call.pauseProducing()])
clear_mock_calls(p1)
o.subchannel_closed(1, sc1)
self.assertEqual(p1.mock_calls, [])
self.assertEqual(list(o._all_producers), [])
sc2 = mock.Mock()
o.subchannel_closed(2, sc2)
def test_disconnect(self):
o, m, c = make_outbound()
o.use_connection(c)
sc1 = mock.Mock()
p1 = mock.Mock(name="p1")
o.subchannel_registerProducer(sc1, p1, True)
self.assertEqual(p1.mock_calls, [])
o.stop_using_connection()
self.assertEqual(p1.mock_calls, [mock.call.pauseProducing()])
def OFF_test_push_pull(self):
# use one IPushProducer and one IPullProducer. They should take turns
o, m, c = make_outbound()
o.use_connection(c)
clear_mock_calls(c)
sc1, sc2 = object(), object()
p1, p2 = mock.Mock(name="p1"), mock.Mock(name="p2")
r1 = Pauser(seqnum=1)
r2 = NonPauser(seqnum=2)
# we aren't paused yet, since we haven't sent any data
o.subchannel_registerProducer(sc1, p1, True) # push
o.queue_and_send_record(r1)
# now we're paused
self.assertTrue(o._paused)
self.assertEqual(c.mock_calls, [mock.call.send_record(r1)])
self.assertEqual(p1.mock_calls, [mock.call.pauseProducing()])
self.assertEqual(p2.mock_calls, [])
clear_mock_calls(p1, p2, c)
p1.resumeProducing.side_effect = lambda: c.send_record(r1)
p2.resumeProducing.side_effect = lambda: c.send_record(r2)
o.subchannel_registerProducer(sc2, p2, False) # pull: always ready
# p1 is still first, since p2 was just added (at the end)
self.assertTrue(o._paused)
self.assertEqual(c.mock_calls, [])
self.assertEqual(p1.mock_calls, [])
self.assertEqual(p2.mock_calls, [])
self.assertEqual(list(o._all_producers), [p1, p2])
clear_mock_calls(p1, p2, c)
# resume should send r1, which should pause everything
o.resumeProducing()
self.assertTrue(o._paused)
self.assertEqual(c.mock_calls, [mock.call.send_record(r1),
])
self.assertEqual(p1.mock_calls, [mock.call.resumeProducing(),
mock.call.pauseProducing(),
])
self.assertEqual(p2.mock_calls, [])
self.assertEqual(list(o._all_producers), [p2, p1]) # now p2 is next
clear_mock_calls(p1, p2, c)
# next should fire p2, then p1
o.resumeProducing()
self.assertTrue(o._paused)
self.assertEqual(c.mock_calls, [mock.call.send_record(r2),
mock.call.send_record(r1),
])
self.assertEqual(p1.mock_calls, [mock.call.resumeProducing(),
mock.call.pauseProducing(),
])
self.assertEqual(p2.mock_calls, [mock.call.resumeProducing(),
])
self.assertEqual(list(o._all_producers), [p2, p1]) # p2 still at bat
clear_mock_calls(p1, p2, c)
def test_pull_producer(self):
# a single pull producer should write until it is paused, rate-limited
# by the cooperator (so we'll see back-to-back resumeProducing calls
# until the Connection is paused, or 10ms have passed, whichever comes
# first, and if it's stopped by the timer, then the next EventualQueue
# turn will start it off again)
o, m, c = make_outbound()
eq = o._test_eq
o.use_connection(c)
clear_mock_calls(c)
self.assertFalse(o._paused)
sc1 = mock.Mock()
p1 = mock.Mock(name="p1")
alsoProvides(p1, IPullProducer)
records = [NonPauser(seqnum=1)] * 10
records.append(Pauser(seqnum=2))
records.append(Stopper(sc1))
it = iter(records)
p1.resumeProducing.side_effect = lambda: c.send_record(next(it))
o.subchannel_registerProducer(sc1, p1, False)
eq.flush_sync() # fast forward into the glorious (paused) future
self.assertTrue(o._paused)
self.assertEqual(c.mock_calls,
[mock.call.send_record(r) for r in records[:-1]])
self.assertEqual(p1.mock_calls,
[mock.call.resumeProducing()] * (len(records) - 1))
clear_mock_calls(c, p1)
# next resumeProducing should cause it to disconnect
o.resumeProducing()
eq.flush_sync()
self.assertEqual(c.mock_calls, [mock.call.send_record(records[-1])])
self.assertEqual(p1.mock_calls, [mock.call.resumeProducing()])
self.assertEqual(len(o._all_producers), 0)
self.assertFalse(o._paused)
def test_two_pull_producers(self):
# we should alternate between them until paused
p1_records = ([NonPauser(seqnum=i) for i in range(5)] +
[Pauser(seqnum=5)] +
[NonPauser(seqnum=i) for i in range(6, 10)])
p2_records = ([NonPauser(seqnum=i) for i in range(10, 19)] +
[Pauser(seqnum=19)])
expected1 = [NonPauser(0), NonPauser(10),
NonPauser(1), NonPauser(11),
NonPauser(2), NonPauser(12),
NonPauser(3), NonPauser(13),
NonPauser(4), NonPauser(14),
Pauser(5)]
expected2 = [NonPauser(15),
NonPauser(6), NonPauser(16),
NonPauser(7), NonPauser(17),
NonPauser(8), NonPauser(18),
NonPauser(9), Pauser(19),
]
o, m, c = make_outbound()
eq = o._test_eq
o.use_connection(c)
clear_mock_calls(c)
self.assertFalse(o._paused)
sc1 = mock.Mock()
p1 = mock.Mock(name="p1")
alsoProvides(p1, IPullProducer)
it1 = iter(p1_records)
p1.resumeProducing.side_effect = lambda: c.send_record(next(it1))
o.subchannel_registerProducer(sc1, p1, False)
sc2 = mock.Mock()
p2 = mock.Mock(name="p2")
alsoProvides(p2, IPullProducer)
it2 = iter(p2_records)
p2.resumeProducing.side_effect = lambda: c.send_record(next(it2))
o.subchannel_registerProducer(sc2, p2, False)
eq.flush_sync() # fast forward into the glorious (paused) future
sends = [mock.call.resumeProducing()]
self.assertTrue(o._paused)
self.assertEqual(c.mock_calls,
[mock.call.send_record(r) for r in expected1])
self.assertEqual(p1.mock_calls, 6 * sends)
self.assertEqual(p2.mock_calls, 5 * sends)
clear_mock_calls(c, p1, p2)
o.resumeProducing()
eq.flush_sync()
self.assertTrue(o._paused)
self.assertEqual(c.mock_calls,
[mock.call.send_record(r) for r in expected2])
self.assertEqual(p1.mock_calls, 4 * sends)
self.assertEqual(p2.mock_calls, 5 * sends)
clear_mock_calls(c, p1, p2)
def test_send_if_connected(self):
o, m, c = make_outbound()
o.send_if_connected(Ack(1)) # not connected yet
o.use_connection(c)
o.send_if_connected(KCM())
self.assertEqual(c.mock_calls, [mock.call.transport.registerProducer(o, True),
mock.call.send_record(KCM())])
def test_tolerate_duplicate_pause_resume(self):
o, m, c = make_outbound()
self.assertTrue(o._paused) # no connection
o.use_connection(c)
self.assertFalse(o._paused)
o.pauseProducing()
self.assertTrue(o._paused)
o.pauseProducing()
self.assertTrue(o._paused)
o.resumeProducing()
self.assertFalse(o._paused)
o.resumeProducing()
self.assertFalse(o._paused)
def test_stopProducing(self):
o, m, c = make_outbound()
o.use_connection(c)
self.assertFalse(o._paused)
o.stopProducing() # connection does this before loss
self.assertTrue(o._paused)
o.stop_using_connection()
self.assertTrue(o._paused)
def test_resume_error(self):
o, m, c = make_outbound()
o.use_connection(c)
sc1 = mock.Mock()
p1 = mock.Mock(name="p1")
alsoProvides(p1, IPullProducer)
p1.resumeProducing.side_effect = PretendResumptionError
o.subchannel_registerProducer(sc1, p1, False)
o._test_eq.flush_sync()
# the error is supposed to automatically unregister the producer
self.assertEqual(list(o._all_producers), [])
self.flushLoggedErrors(PretendResumptionError)
def make_pushpull(pauses):
p = mock.Mock()
alsoProvides(p, IPullProducer)
unregister = mock.Mock()
clock = Clock()
eq = EventualQueue(clock)
term = mock.Mock(side_effect=lambda: True) # one write per Eventual tick
def term_factory():
return term
coop = Cooperator(terminationPredicateFactory=term_factory,
scheduler=eq.eventually)
pp = PullToPush(p, unregister, coop)
it = cycle(pauses)
def action(i):
if isinstance(i, Exception):
raise i
elif i:
pp.pauseProducing()
p.resumeProducing.side_effect = lambda: action(next(it))
return p, unregister, pp, eq
class PretendResumptionError(Exception):
pass
class PretendUnregisterError(Exception):
pass
class PushPull(unittest.TestCase):
# test our wrapper utility, which I copied from
# twisted.internet._producer_helpers since it isn't publicly exposed
def test_start_unpaused(self):
p, unr, pp, eq = make_pushpull([True]) # pause on each resumeProducing
# if it starts unpaused, it gets one write before being halted
pp.startStreaming(False)
eq.flush_sync()
self.assertEqual(p.mock_calls, [mock.call.resumeProducing()] * 1)
clear_mock_calls(p)
# now each time we call resumeProducing, we should see one delivered to
# the underlying IPullProducer
pp.resumeProducing()
eq.flush_sync()
self.assertEqual(p.mock_calls, [mock.call.resumeProducing()] * 1)
pp.stopStreaming()
pp.stopStreaming() # should tolerate this
def test_start_unpaused_two_writes(self):
p, unr, pp, eq = make_pushpull([False, True]) # pause every other time
# it should get two writes, since the first didn't pause
pp.startStreaming(False)
eq.flush_sync()
self.assertEqual(p.mock_calls, [mock.call.resumeProducing()] * 2)
def test_start_paused(self):
p, unr, pp, eq = make_pushpull([True]) # pause on each resumeProducing
pp.startStreaming(True)
eq.flush_sync()
self.assertEqual(p.mock_calls, [])
pp.stopStreaming()
def test_stop(self):
p, unr, pp, eq = make_pushpull([True])
pp.startStreaming(True)
pp.stopProducing()
eq.flush_sync()
self.assertEqual(p.mock_calls, [mock.call.stopProducing()])
def test_error(self):
p, unr, pp, eq = make_pushpull([PretendResumptionError()])
unr.side_effect = lambda: pp.stopStreaming()
pp.startStreaming(False)
eq.flush_sync()
self.assertEqual(unr.mock_calls, [mock.call()])
self.flushLoggedErrors(PretendResumptionError)
def test_error_during_unregister(self):
p, unr, pp, eq = make_pushpull([PretendResumptionError()])
unr.side_effect = PretendUnregisterError()
pp.startStreaming(False)
eq.flush_sync()
self.assertEqual(unr.mock_calls, [mock.call()])
self.flushLoggedErrors(PretendResumptionError, PretendUnregisterError)
# TODO: consider making p1/p2/p3 all elements of a shared Mock, maybe I
# could capture the inter-call ordering that way
| {
"content_hash": "e9b3f3f97689ccf6156fc9cfb56236ae",
"timestamp": "",
"source": "github",
"line_count": 655,
"max_line_length": 87,
"avg_line_length": 40.919083969465646,
"alnum_prop": 0.5864114618312066,
"repo_name": "warner/magic-wormhole",
"id": "296d7851ab972c5c1d1d98cb736e37cac41cf9cb",
"size": "26802",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/wormhole/test/dilate/test_outbound.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "838"
},
{
"name": "CSS",
"bytes": "1229"
},
{
"name": "HTML",
"bytes": "478"
},
{
"name": "JavaScript",
"bytes": "38865"
},
{
"name": "Python",
"bytes": "867520"
},
{
"name": "Shell",
"bytes": "713"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function
import six
from django.conf import settings
from django.db import IntegrityError, transaction
from django.utils import timezone
from django.utils.safestring import mark_safe
from django.views.decorators.cache import never_cache
from six.moves.urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
from sentry.models import (
ApiApplication, ApiApplicationStatus, ApiAuthorization, ApiGrant, ApiToken
)
from sentry.web.frontend.base import BaseView
class OAuthAuthorizeView(BaseView):
@never_cache
def dispatch(self, request, *args, **kwargs):
with transaction.atomic():
return super(OAuthAuthorizeView, self).dispatch(request, *args, **kwargs)
def redirect_response(self, response_type, redirect_uri, params):
if response_type == 'token':
return self.redirect(
'{}#{}'.format(
redirect_uri,
urlencode([(k, v) for k, v in six.iteritems(params) if v is not None])
)
)
parts = list(urlparse(redirect_uri))
query = parse_qsl(parts[4])
for key, value in six.iteritems(params):
if value is not None:
query.append((key, value))
parts[4] = urlencode(query)
return self.redirect(urlunparse(parts))
def error(self, response_type, redirect_uri, name, state=None):
return self.redirect_response(
response_type, redirect_uri, {
'error': name,
'state': state,
}
)
def get(self, request):
response_type = request.GET.get('response_type')
client_id = request.GET.get('client_id')
redirect_uri = request.GET.get('redirect_uri')
scopes = request.GET.get('scope')
state = request.GET.get('state')
force_prompt = request.GET.get('force_prompt')
if not client_id:
return self.respond(
'sentry/oauth-error.html', {
'error': mark_safe('Missing or invalid <em>client_id</em> parameter.'),
}
)
try:
application = ApiApplication.objects.get(
client_id=client_id,
status=ApiApplicationStatus.active,
)
except ApiApplication.DoesNotExist:
return self.respond(
'sentry/oauth-error.html', {
'error': mark_safe('Missing or invalid <em>client_id</em> parameter.'),
}
)
if not redirect_uri:
redirect_uri = application.get_default_redirect_uri()
elif not application.is_valid_redirect_uri(redirect_uri):
return self.respond(
'sentry/oauth-error.html', {
'error': mark_safe('Missing or invalid <em>redirect_uri</em> parameter.'),
}
)
if not application.is_allowed_response_type(response_type):
return self.error(
response_type=response_type,
redirect_uri=redirect_uri,
name='unsupported_response_type',
state=state,
)
if scopes:
scopes = scopes.split(' ')
for scope in scopes:
if scope not in settings.SENTRY_SCOPES:
return self.error(
response_type=response_type,
redirect_uri=redirect_uri,
name='invalid_scope',
state=state,
)
else:
scopes = []
if not force_prompt:
try:
existing_auth = ApiAuthorization.objects.get(
user=request.user,
application=application,
)
except ApiAuthorization.DoesNotExist:
pass
else:
# if we've already approved all of the required scopes
# we can skip prompting the user
if all(existing_auth.has_scope(s) for s in scopes):
return self.approve(
request=request,
application=application,
scopes=scopes,
response_type=response_type,
redirect_uri=redirect_uri,
state=state,
)
payload = {
'rt': response_type,
'cid': client_id,
'ru': redirect_uri,
'sc': scopes,
'st': state,
'uid': request.user.id,
}
request.session['oa2'] = payload
permissions = []
if scopes:
pending_scopes = set(scopes)
matched_sets = set()
for scope_set in settings.SENTRY_SCOPE_SETS:
for scope, description in scope_set:
if scope_set in matched_sets and scope in pending_scopes:
pending_scopes.remove(scope)
elif scope in pending_scopes:
permissions.append(description)
matched_sets.add(scope_set)
pending_scopes.remove(scope)
if pending_scopes:
raise NotImplementedError(
'{} scopes did not have descriptions'.format(pending_scopes)
)
context = {
'user': request.user,
'application': application,
'scopes': scopes,
'permissions': permissions,
}
return self.respond('sentry/oauth-authorize.html', context)
def post(self, request):
try:
payload = request.session['oa2']
except KeyError:
return self.respond(
'sentry/oauth-error.html', {
'error':
'We were unable to complete your request. Please re-initiate the authorization flow.',
}
)
if payload['uid'] != request.user.id:
return self.respond(
'sentry/oauth-error.html', {
'error':
'We were unable to complete your request. Please re-initiate the authorization flow.',
}
)
try:
application = ApiApplication.objects.get(
client_id=payload['cid'],
status=ApiApplicationStatus.active,
)
except ApiApplication.DoesNotExist:
return self.respond(
'sentry/oauth-error.html', {
'error': mark_safe('Missing or invalid <em>client_id</em> parameter.'),
}
)
response_type = payload['rt']
redirect_uri = payload['ru']
scopes = payload['sc']
op = request.POST.get('op')
if op == 'approve':
return self.approve(
request=request,
application=application,
scopes=scopes,
response_type=response_type,
redirect_uri=redirect_uri,
state=payload['st'],
)
elif op == 'deny':
return self.error(
response_type=response_type,
redirect_uri=redirect_uri,
name='access_denied',
state=payload['st'],
)
else:
raise NotImplementedError
def approve(self, request, application, **params):
try:
with transaction.atomic():
ApiAuthorization.objects.create(
application=application,
user=request.user,
scope_list=params['scopes'],
)
except IntegrityError:
if params['scopes']:
auth = ApiAuthorization.objects.get(
application=application,
user=request.user,
)
for scope in params['scopes']:
if scope not in auth.scope_list:
auth.scope_list.append(scope)
auth.save()
if params['response_type'] == 'code':
grant = ApiGrant.objects.create(
user=request.user,
application=application,
redirect_uri=params['redirect_uri'],
scope_list=params['scopes'],
)
return self.redirect_response(
params['response_type'],
params['redirect_uri'],
{
'code': grant.code,
'state': params['state'],
},
)
elif params['response_type'] == 'token':
token = ApiToken.objects.create(
application=application,
user=request.user,
refresh_token=None,
scope_list=params['scopes'],
)
return self.redirect_response(
params['response_type'],
params['redirect_uri'],
{
'access_token': token.token,
'expires_in': (timezone.now() - token.expires_at).total_seconds(),
'expires_at': token.expires_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
'token_type': 'bearer',
'scope': ' '.join(token.get_scopes()), # NOQA
'state': params['state'],
},
)
| {
"content_hash": "d8cc580e2af65ac4a16ad44911f3047a",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 106,
"avg_line_length": 35.31617647058823,
"alnum_prop": 0.4952113262544243,
"repo_name": "jean/sentry",
"id": "c3e5c7c109457323f072365b439edbd759ca2740",
"size": "9606",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sentry/web/frontend/oauth_authorize.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "296112"
},
{
"name": "HTML",
"bytes": "314273"
},
{
"name": "JavaScript",
"bytes": "1293918"
},
{
"name": "Lua",
"bytes": "57158"
},
{
"name": "Makefile",
"bytes": "6632"
},
{
"name": "Python",
"bytes": "24515298"
},
{
"name": "Ruby",
"bytes": "4410"
},
{
"name": "Shell",
"bytes": "2942"
}
],
"symlink_target": ""
} |
from collections import Counter
import os
import sys
import json
import time
import shlex
import random
import hashlib
import argparse
import threading
import paramiko
#
# util
#
def rebuild_uri(uri):
user, host, port = parse_uri(uri)
return '{}@{}:{}'.format(user, host, port)
def parse_uri(uri):
if '@' in uri:
user, host = uri.split('@')
else:
user = 'root'
host = uri
if ':' in host:
host, port = host.split(':')
port = int(port)
else:
host = host
port = 22
return user, host, port
def parse_ports(ports_str):
ports = []
for n in ports_str.split(','):
if ':' in n:
src_port, dest_port = n.split(':')
src_port, dest_port = int(src_port), int(dest_port)
else:
src_port, dest_port = None, int(n)
ports.append((src_port, dest_port))
return ports
#
# local
#
def load_local_config():
filename = 'nspawn.local.conf'
if os.path.exists(filename):
with open(filename, 'r') as f:
config = json.load(f)
else:
config = {}
return config
def save_local_config(config):
filename = 'nspawn.local.conf'
with open(filename, 'w') as f:
json.dump(config, f, indent=True)
#
# remote
#
def ssh_client(uri):
# FIXME: unused port
user, host, port = parse_uri(uri)
client = paramiko.client.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
known_hosts_path = os.path.expanduser('~/.ssh/known_hosts')
client.load_host_keys(known_hosts_path)
client.connect(host, username=user)
return client
def create_container_arch_install(uri, container, start=False, verbose=False):
# ssh client
client = ssh_client(uri)
# create machine dir
command = 'mkdir -p "/var/lib/machines/{id}"'.format(**container)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
if err:
raise IOError(err)
# wait until other pacman instances finish install
while True:
command = 'ls /var/lib/pacman/db.lck'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
if out != '/var/lib/pacman/db.lck':
break
print('Machine already using pacman, waiting 5 seconds...')
time.sleep(5.0)
# boostrap container
machine_dir = '/var/lib/machines/{id}'.format(**container)
command = 'pacstrap -c -d "{}" base --ignore linux vim openssh'.format(machine_dir)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
if verbose:
for line in iter(lambda: stdout.readline(2048), ""):
print(line, end="")
else:
out = stdout.read()
# resolv.conf
command = ''.join([
'echo "nameserver 8.8.8.8" > ',
'{}/etc/resolv.conf'.format(machine_dir),
])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# enable systemd-network.service
s = '/usr/lib/systemd/system/systemd-networkd.service'
d = '/etc/systemd/system/multi-user.target.wants/systemd-networkd.service'
command = 'ln -s "{}{}" "{}{}"'.format(machine_dir, s, machine_dir, d)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# enable systemd-networkd.socket
s = '/usr/lib/systemd/system/systemd-networkd.socket'
d = '/etc/systemd/system/sockets.target.wants/systemd-networkd.socket'
command = 'ln -s "{}{}" "{}{}"'.format(machine_dir, s, machine_dir, d)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# enable systemd-resolved.service
s = '/usr/lib/systemd/system/systemd-resolved.service'
d = '/etc/systemd/system/sockets.target.wants/systemd-resolved.service'
command = 'ln -s "{}{}" "{}{}"'.format(machine_dir, s, machine_dir, d)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# enable sshd
s = '/usr/lib/systemd/system/sshd.service'
d = '/etc/systemd/system/multi-user.target.wants/sshd.service'
command = 'ln -s "{}{}" "{}{}"'.format(machine_dir, s, machine_dir, d)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# set locale to utf8
f = '#en_US.UTF-8 UTF-8'
t = 'en_US.UTF-8 UTF-8'
p = '{}/etc/locale.gen'.format(machine_dir)
command = 'sed -i \'s/{}/{}/g\' "{}"'.format(f, t, p)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
f = '#en_US ISO-8859-1'
t = 'en_US ISO-8859-1'
p = '{}/etc/locale.gen'.format(machine_dir)
command = 'sed -i \'s/{}/{}/g\' "{}"'.format(f, t, p)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
command = 'localectl set-locale LANG=en_US.UTF-8'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
command = 'locale-gen'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# patch sshd
f = '#PermitRootLogin prohibit-password'
t = 'PermitRootLogin yes'
p = '{}/etc/ssh/sshd_config'.format(machine_dir)
command = 'sed -i \'s/{}/{}/g\' "{}"'.format(f, t, p)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# patch sshd
f = '#PermitEmptyPasswords no'
t = 'PermitEmptyPasswords yes'
p = '{}/etc/ssh/sshd_config'.format(machine_dir)
command = 'sed -i \'s/{}/{}/g\' "{}"'.format(f, t, p)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# remove /etc/securetty
# to allow 'machinectl login ....'
s = '/etc/securetty'
d = '/etc/securetty.0'
command = 'mv "{}{}" "{}{}"'.format(machine_dir, s, machine_dir, d)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# override service
command = 'mkdir -p "/etc/systemd/system/systemd-nspawn@{}.service.d"'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# override service
command = 'printf "[Service]\\nExecStart=\\nExecStart={}" >{}'.format(
'/usr/bin/systemd-nspawn --quiet --keep-unit --boot --network-veth {} --machine={}'.format(
' '.join('--port={}:{}'.format(k, v) for k, v in container['ports'].items()),
container['id'],
),
'/etc/systemd/system/systemd-nspawn\@{}.service.d/override.conf'.format(container['id'])
)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# demon-reload
command = 'systemctl daemon-reload'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# possibly run container
if start:
# start service
command = 'systemctl start systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# enable service
command = 'systemctl enable systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# sync
command = 'sync'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# close ssh client
client.close()
def destroy_container_arch(uri, container, verbose=False):
# ssh client
client = ssh_client(uri)
# stop service
command = 'systemctl stop systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# disable service
command = 'systemctl disable systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# rm service
command = 'rm -r /etc/systemd/system/systemd-nspawn@{}.service.d'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# rm dir
command = 'rm -r /var/lib/machines/{}'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# sync
command = 'sync'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# close ssh client
client.close()
def start_container_arch(uri, container, verbose=False):
if verbose:
print('start_container_arch: {}'.format(uri))
# ssh client
client = ssh_client(uri)
# override service
command = 'mkdir -p "/etc/systemd/system/systemd-nspawn@{}.service.d"'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# override service
command = 'printf "[Service]\\nExecStart=\\nExecStart={}\\nRestart=on-failure" >{}'.format(
'/usr/bin/systemd-nspawn --quiet --keep-unit --boot --network-veth {} --machine={}'.format(
' '.join('--port={}:{}'.format(k, v) for k, v in container['ports'].items()),
container['id'],
),
'/etc/systemd/system/systemd-nspawn\@{}.service.d/override.conf'.format(
container['id']
)
)
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# demon-reload
command = 'systemctl daemon-reload'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# start service
command = 'systemctl start systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# enable service
command = 'systemctl enable systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# sync
command = 'sync'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# close ssh client
client.close()
def stop_container_arch(uri, container, verbose=False):
if verbose:
print('stop_container_arch: {}'.format(uri))
# ssh client
client = ssh_client(uri)
# demon-reload
command = 'systemctl daemon-reload'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# stop service
command = 'systemctl stop systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# disable service
command = 'systemctl disable systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# sync
command = 'sync'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# close ssh client
client.close()
def restart_container_arch(uri, container, verbose=False):
if verbose:
print('restart_container_arch: {}'.format(uri))
# ssh client
client = ssh_client(uri)
# demon-reload
command = 'systemctl daemon-reload'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# start service
command = 'systemctl restart systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# enable service
command = 'systemctl enable systemd-nspawn@{}.service'.format(container['id'])
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# sync
command = 'sync'
if verbose: print('{!r}'.format(command))
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read()
err = stderr.read()
stdin.close()
# close ssh client
client.close()
def load_remote_config(uri, filename='nspawn.remote.conf', verbose=False):
uri = rebuild_uri(uri)
if verbose:
print('load_remote_config: {}'.format(uri))
# ssh client
client = ssh_client(uri)
command = 'cat "{}"'.format(filename)
stdin, stdout, stderr = client.exec_command(command)
out = stdout.read().decode()
err = stderr.read().decode()
stdin.close()
if err:
raise IOError(err)
# close ssh client
client.close()
config = json.loads(out)
return config
def save_remote_config(uri, config, filename='nspawn.remote.conf', verbose=False):
uri = rebuild_uri(uri)
if verbose:
print('save_remote_config: {}'.format(uri))
# ssh client
client = ssh_client(uri)
# save remote config
_config = shlex.quote(json.dumps(config, indent=True))
command = 'echo {} > "{}"'.format(_config, filename)
stdin, stdout, stderr = client.exec_command(command)
err = stderr.read()
stdin.close()
if err:
raise IOError(err)
# close ssh client
client.close()
def merge_remote_configs(configs):
merged_machines = {}
merged_projects = {}
merged_containers = {}
for config in configs:
# merge machines
machines = config.get('machines', {})
merged_machines.update(machines)
# merge projects
projects = config.get('projects', {})
merged_projects.update(projects)
# merge containers
containers = config.get('containers', {})
merged_containers.update(containers)
config = {
'machines': merged_machines,
'projects': merged_projects,
'containers': merged_containers,
}
return config
def _load_consensus_config_thread(lock, configs, machine_uri, verbose=False):
try:
config = load_remote_config(machine_uri, verbose=verbose)
except IOError as e:
err = 'ERROR: Could not load remote config from {}'.format(
machine_uri,
)
if verbose:
print('ERROR: {!r}'.format(e), file=sys.stderr)
print(err, file=sys.stderr)
with lock:
answer = input('Skip? [y/n]: ')
if answer == 'n':
sys.exit(-1)
return
with lock:
configs.append(config)
def load_consensus_config(uri, filename='nspawn.remote.conf', verbose=False):
# load remote config of boostrap/main node
try:
config = load_remote_config(uri, verbose=verbose)
except IOError as e:
print('ERROR: Could not load remote config.')
sys.exit(-1)
# get all remote configs and merge them
machines = config.get('machines', {})
configs = []
"""for machine_id, machine in machines.items():
machine_uri = '{user}@{host}:{port}'.format(**machine)
try:
config = load_remote_config(machine_uri, verbose=verbose)
except IOError as e:
err = 'ERROR: Could not load remote config from {}'.format(
machine_uri,
)
if verbose:
print('ERROR: {!r}'.format(e), file=sys.stderr)
print(err, file=sys.stderr)
answer = input('Skip? [y/n]: ')
if answer == 'n':
sys.exit(-1)
configs.append(config)"""
threads = []
lock = threading.Lock()
for machine_id, machine in machines.items():
machine_uri = '{user}@{host}:{port}'.format(**machine)
t = threading.Thread(
target=_load_consensus_config_thread,
args=(lock, configs, machine_uri),
kwargs={'verbose': verbose},
)
t.start()
threads.append(t)
for t in threads:
t.join()
config = merge_remote_configs(configs)
return config
def _save_consensus_config_thread(lock, config, machine_uri, verbose=False):
try:
save_remote_config(machine_uri, config, verbose=verbose)
except IOError as e:
err = 'ERROR: Could not save remote config on {}'.format(
machine_uri,
)
if verbose:
print('ERROR: {!r}'.format(e), file=sys.stderr)
print(err, file=sys.stderr)
with lock:
answer = input('Skip? [y/n]: ')
if answer == 'n':
sys.exit(-1)
def save_consensus_config(config, filename='nspawn.remote.conf', verbose=False):
machines = config.get('machines', {})
"""for machine_id, machine in machines.items():
machine_uri = '{user}@{host}:{port}'.format(**machine)
try:
save_remote_config(machine_uri, config, verbose=verbose)
except IOError as e:
err = 'ERROR: Could not save remote config on {}'.format(
machine_uri,
)
if verbose:
print('ERROR: {!r}'.format(e), file=sys.stderr)
print(err, file=sys.stderr)
answer = input('Skip? [y/n]: ')
if answer == 'n':
sys.exit(-1)"""
threads = []
lock = threading.Lock()
for machine_id, machine in machines.items():
machine_uri = '{user}@{host}:{port}'.format(**machine)
t = threading.Thread(
target=_save_consensus_config_thread,
args=(lock, config, machine_uri),
kwargs={'verbose': verbose},
)
t.start()
threads.append(t)
for t in threads:
t.join()
def find_available_machine(config, container):
machines = config['machines']
containers = config['containers']
if containers:
# find least occupied machine
counter = Counter()
machines_ids = machines.keys()
counter.update(machines_ids)
machines_ids = [c['machine_id'] for c in containers.values()]
counter.update(machines_ids)
machine_id = counter.most_common()[-1][0]
machine = machines[machine_id]
else:
# from sorted list of machines by host pick first
machines_values = sorted(machines.values(), key=lambda n: n['host'])
machine = machines_values[0]
return machine
def find_available_machine_port(config, machine, dest_port):
# find containers for a given machine
containers = {
c_id: c
for c_id, c in config['containers'].items()
if c['machine_id'] == machine['id']
}
# map ports used on that machine
containers_ports_map = {}
for container_id, container in containers.items():
for c_src_port, c_dest_port in container['ports'].items():
c_src_port = int(c_src_port)
containers_ports_map[c_src_port] = c_dest_port
# find available port on that machine
port = dest_port
if port < 10000:
port += 10000
while port in containers_ports_map:
port += 1
return port
def find_available_machine_ports(config, machine, requested_ports):
available_ports_map = {}
for src_port, dest_port in requested_ports:
if not src_port:
src_port = find_available_machine_port(config, machine, dest_port)
src_port = int(src_port)
available_ports_map[src_port] = dest_port
return available_ports_map
#
# config
#
def config_config(section, property_, value=None):
config = load_local_config()
if section not in config:
config[section] = {}
if value:
config[section][property_] = value
save_local_config(config)
else:
value = config[section][property_]
print(value)
#
# machine
#
def machine_list(remote_uri, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
remote_config = load_consensus_config(remote_uri, verbose=verbose)
machine_items = remote_config.get('machines', {}).items()
machine_items = sorted(
list(machine_items),
key=lambda n: (n[1]['host'], n[1]['port'])
)
print('{a: <12} {b: <67}'.format(a='MACHINE_ID', b='ADDRESS'))
for machine_id, machine in machine_items:
print('{a: <12} {b: <67}'.format(
a=machine['id'],
b='{}@{}:{}'.format(
machine['user'],
machine['host'],
machine['port'],
),
))
def machine_add(remote_uri, uri, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
remote_user, remote_host, remote_port = parse_uri(remote_uri)
user, host, port = parse_uri(uri)
config = load_consensus_config(remote_uri, verbose=verbose)
machines = config['machines']
# check if host already exists
for machine_id, machine in machines.items():
if host == machine['host']:
msg = 'Machine with host {} already exists'.format(host)
print(msg, file=sys.stderr)
sys.exit(1)
# generate random ID
m = hashlib.sha1()
m.update('{}'.format(random.randint(0, 2 ** 128)).encode())
machine_id = m.hexdigest()[-12:]
machine = {
'id': machine_id,
'user': user,
'host': host,
'port': port,
}
machines[machine_id] = machine
save_consensus_config(config, verbose=verbose)
print('{} {}@{}:{}'.format(machine_id, user, host, port))
def machine_remove(remote_uri, machine_id, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
remote_user, remote_host, remote_port = parse_uri(remote_uri)
config = load_consensus_config(remote_uri, verbose=verbose)
machines = config['machines']
# make sure user wants to delete machine
answer = input('Are you sure you want to remove machine? [y/n]: ')
if answer != 'y':
sys.exit(-1)
if machine_id not in machines:
msg = 'Machine with id {} does not exists'.format(machine_id)
print(msg, file=sys.stderr)
sys.exit(1)
del machines[machine_id]
save_consensus_config(config, verbose=verbose)
print('{}'.format(machine_id))
#
# project
#
def project_list(remote_uri, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
remote_config = load_consensus_config(remote_uri, verbose=verbose)
project_items = remote_config.get('projects', {}).items()
project_items = list(project_items)
project_items = sorted(project_items, key=lambda n: n[1]['name'])
print('{a: <12} {b: <67}'.format(a='PROJECT_ID', b='NAME'))
for project_id, project in project_items:
print('{a: <12} {b: <67}'.format(
a=project['id'],
b='{}'.format(project['name']),
))
def project_add(remote_uri, project_name, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
remote_user, remote_host, remote_port = parse_uri(remote_uri)
config = load_consensus_config(remote_uri, verbose=verbose)
projects = config['projects']
# check if project name already exists
for project_id, project in projects.items():
if project_name == project['name']:
msg = 'Project with name {} already exists'.format(project_name)
print(msg, file=sys.stderr)
sys.exit(1)
# generate random ID
m = hashlib.sha1()
m.update('{}'.format(random.randint(0, 2 ** 128)).encode())
project_id = m.hexdigest()[-12:]
project = {
'id': project_id,
'name': project_name,
}
projects[project_id] = project
save_consensus_config(config, verbose=verbose)
print('{} {}'.format(project_id, project_name))
def project_remove(remote_uri, project_id, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
remote_user, remote_host, remote_port = parse_uri(remote_uri)
config = load_consensus_config(remote_uri, verbose=verbose)
projects = config['projects']
# make sure user wants to delete project
answer = input('Are you sure you want to remove project? [y/n]: ')
if answer != 'y':
sys.exit(-1)
if project_id not in projects:
msg = 'Project with id {} does not exists'.format(project_id)
print(msg, file=sys.stderr)
sys.exit(1)
del projects[project_id]
save_consensus_config(config, verbose=verbose)
print('{}'.format(project_id))
#
# container
#
def container_list(remote_uri, project_id, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
if not project_id:
local_config = load_local_config()
project_id = local_config['main']['project_id']
remote_config = load_consensus_config(remote_uri, verbose=verbose)
container_items = remote_config.get('containers', {}).items()
container_items = [
n
for n in container_items
if n[1]['project_id'].endswith(project_id)
]
container_items = sorted(
container_items,
key=lambda n: (n[1]['name'], n[1]['host']),
)
print('{a: <12} {b: <10} {c: <15} {d: <33} {e: <6}'.format(
a='CONTAINER_ID',
b='NAME',
c='ADDRESS',
d='PORTS',
e='STATUS',
))
for container_id, container in container_items:
status = 'x'
ports_str = ','.join(
'{}:{}'.format(k, v)
for k, v in sorted(
list(container['ports'].items()),
key=lambda n: n[1],
)
)
print('{a: <12} {b: <10} {c: <15} {d: <33} {e: <6}'.format(
a=container_id,
b=container['name'][:10],
c=container['host'],
d=ports_str[:33],
e=status,
))
def container_add(remote_uri, project_id, name, ports_str, distro, image_id, image, machine_id=None, start=False, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
if not project_id:
local_config = load_local_config()
project_id = local_config['main']['project_id']
remote_user, remote_host, remote_port = parse_uri(remote_uri)
config = load_consensus_config(remote_uri, verbose=verbose)
containers = config['containers']
machines = config['machines']
# parse ports
requested_ports = parse_ports(ports_str)
# check if project id exists
projects = config['projects']
if project_id not in projects:
msg = 'Project with id {} does not exists'.format(project_id)
print(msg, file=sys.stderr)
sys.exit(1)
# generate random ID
m = hashlib.sha1()
m.update('{}'.format(random.randint(0, 2 ** 128)).encode())
container_id = m.hexdigest()[-12:]
# init container
container = {
'id': container_id,
'project_id': project_id,
'name': name,
'distro': distro,
'image_id': image_id,
'image': image,
}
# find suitable machine where to host container
if machine_id:
machine = machines[machine_id]
else:
machine = find_available_machine(config, container)
container['machine_id'] = machine['id']
container['host'] = machine['host']
# find available ports
ports = find_available_machine_ports(config, machine, requested_ports)
container['ports'] = ports
# create systemd-nspawn container on machine
machine_uri = '{user}@{host}:{port}'.format(**machine)
containers[container_id] = container
# save not yet bootstrapped container
save_consensus_config(config, verbose=verbose)
# bootstrap distro
if container['distro'] == 'arch':
if container['image_id']:
raise NotImplementedError
elif container['image']:
raise NotImplementedError
else:
create_container_arch_install(machine_uri, container, start, verbose)
else:
raise NotImplementedError
# output on success
print('{} {} {}'.format(
container_id,
machine['host'],
','.join('{}:{}'.format(k, v) for k, v in ports.items())
))
def container_remove(remote_uri, project_id, container_id, force=False, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
if not project_id:
local_config = load_local_config()
project_id = local_config['main']['project_id']
remote_user, remote_host, remote_port = parse_uri(remote_uri)
config = load_consensus_config(remote_uri, verbose=verbose)
containers = config['containers']
machines = config['machines']
# make sure user wants to delete container
answer = input('Are you sure you want to remove container? [y/n]: ')
if answer != 'y':
sys.exit(-1)
if force:
# try to remove container on each machine
for machine_id, machine in machines.items():
container = {
'id': container_id,
'project_id': project_id,
'machine_id': machine_id,
}
uri = '{user}@{host}:{port}'.format(**machine)
destroy_container_arch(uri, container, verbose)
del containers[container_id]
save_consensus_config(config, verbose=verbose)
print('{}'.format(container_id))
return
# check if project id exists
projects = config['projects']
if project_id not in projects:
msg = 'Project with id {} does not exists'.format(project_id)
print(msg, file=sys.stderr)
sys.exit(-1)
project = projects[project_id]
if container_id not in containers:
msg = 'Container with id {} does not exists'.format(container_id)
print(msg, file=sys.stderr)
sys.exit(-1)
container = containers[container_id]
# machine
machine_id = container['machine_id']
machine = machines[machine_id]
# create systemd-nspawn container on machine
uri = '{user}@{host}:{port}'.format(**machine)
if container['distro'] == 'arch':
if container['image_id']:
raise NotImplementedError
elif container['image']:
raise NotImplementedError
else:
try:
destroy_container_arch(uri, container, verbose)
except Exception as e:
msg = e
print(msg, file=sys.stderr)
# make sure user wants to delete container
answer = input('There was an error, are you sure you want to remove container? [y/n]: ')
if answer != 'y':
sys.exit(-1)
else:
raise NotImplementedError
del containers[container_id]
save_consensus_config(config, verbose=verbose)
print('{}'.format(container_id))
def container_start(remote_uri, project_id, container_id, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
if not project_id:
local_config = load_local_config()
project_id = local_config['main']['project_id']
config = load_consensus_config(remote_uri, verbose=verbose)
containers = config['containers']
machines = config['machines']
container = containers[container_id]
machine_id = container['machine_id']
machine = machines[machine_id]
machine_uri = '{user}@{host}:{port}'.format(**machine)
if container['distro'] == 'arch':
start_container_arch(machine_uri, container, verbose=verbose)
else:
raise NotImplementedError
def container_stop(remote_uri, project_id, container_id, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
if not project_id:
local_config = load_local_config()
project_id = local_config['main']['project_id']
config = load_consensus_config(remote_uri, verbose=verbose)
containers = config['containers']
machines = config['machines']
container = containers[container_id]
machine_id = container['machine_id']
machine = machines[machine_id]
machine_uri = '{user}@{host}:{port}'.format(**machine)
if container['distro'] == 'arch':
stop_container_arch(machine_uri, container, verbose=verbose)
else:
raise NotImplementedError
def container_restart(remote_uri, project_id, container_id, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
if not project_id:
local_config = load_local_config()
project_id = local_config['main']['project_id']
config = load_consensus_config(remote_uri, verbose=verbose)
containers = config['containers']
machines = config['machines']
container = containers[container_id]
machine_id = container['machine_id']
machine = machines[machine_id]
machine_uri = '{user}@{host}:{port}'.format(**machine)
if container['distro'] == 'arch':
restart_container_arch(machine_uri, container, verbose=verbose)
else:
raise NotImplementedError
def container_migrate(remote_uri, project_id, container_id, verbose=False):
if not remote_uri:
local_config = load_local_config()
remote_uri = local_config['main']['remote_address']
if not project_id:
local_config = load_local_config()
project_id = local_config['main']['project_id']
config = load_consensus_config(remote_uri, verbose=verbose)
containers = config['containers']
machines = config['machines']
container = containers[container_id]
machine_id = container['machine_id']
machine = machines[machine_id]
machine_uri = '{user}@{host}:{port}'.format(**machine)
# FIXME: implement
raise NotImplementedError
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='systemd-nspawn deployment')
parser_subparsers = parser.add_subparsers(dest='subparser', metavar='main')
parser.add_argument('--remote-address', '-r', help='Remote address')
# config
config_parser = parser_subparsers.add_parser('config')
config_parser.add_argument('--section', '-s', default='main', help='Section')
config_parser.add_argument('--property', '-p', help='Propery')
config_parser.add_argument('--value', '-v', help='Value')
# machine
machine_parser = parser_subparsers.add_parser('machine')
machine_subparsers = machine_parser.add_subparsers(dest='machine_subparser', metavar='machine')
# machine list
machine_list_parser = machine_subparsers.add_parser('list', help='List machines')
# machine add
machine_add_parser = machine_subparsers.add_parser('add', help='Add machine')
machine_add_parser.add_argument('--address', '-a', help='[USER="root"@]HOST[:PORT=22]')
# machine remove
machine_remove_parser = machine_subparsers.add_parser('remove', help='Remove machine')
machine_remove_parser.add_argument('--id', '-I', help='Machine ID')
# project
project_parser = parser_subparsers.add_parser('project')
project_subparsers = project_parser.add_subparsers(dest='project_subparser', metavar='project')
# project list
project_list_parser = project_subparsers.add_parser('list', help='List projects')
# project add
project_add_parser = project_subparsers.add_parser('add', help='Add project')
project_add_parser.add_argument('--id', '-I', default=None, help='Project ID')
project_add_parser.add_argument('--name', '-n', help='Name')
# project remove
project_remove_parser = project_subparsers.add_parser('remove', help='Remove project')
project_remove_parser.add_argument('--id', '-I', help='Project ID')
# container
container_parser = parser_subparsers.add_parser('container')
container_subparsers = container_parser.add_subparsers(dest='container_subparser', metavar='container')
container_parser.add_argument('--project-id', '-P', help='Project ID')
# container list
container_list_parser = container_subparsers.add_parser('list', help='List of containers at remote host')
# container add
container_add_parser = container_subparsers.add_parser('add', help='Add container')
container_add_parser.add_argument('--name', '-n', help='Human readable name of container')
container_add_parser.add_argument('--ports', '-p', default='22', help='MACHINE_PORT:CONTAINER_PORT[,M_PORT:C_PORT,...]')
container_add_parser.add_argument('--distro', '-d', default='arch', help='Linux distribution: arch (UNSUPPORTED but planned: debian, fedora)')
container_add_parser.add_argument('--image-id', '-I', help='[UNSUPPORTED] Image ID')
container_add_parser.add_argument('--image', '-i', help='[UNSUPPORTED] Image name')
container_add_parser.add_argument('--machine-id', '-M', help='Machine ID where to create container')
container_add_parser.add_argument('--start', '-s', action='store_true', help='Start container')
container_add_parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')
# container remove
container_remove_parser = container_subparsers.add_parser('remove', help='Remove container')
container_remove_parser.add_argument('--id', '-I', help='Container ID')
container_remove_parser.add_argument('--force', '-f', action='store_true', help='Force')
container_remove_parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')
# container start
container_start_parser = container_subparsers.add_parser('start', help='Start container')
container_start_parser.add_argument('--id', '-I', help='Container ID')
container_start_parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')
# container stop
container_stop_parser = container_subparsers.add_parser('stop', help='Stop container')
container_stop_parser.add_argument('--id', '-I', help='Container ID')
container_stop_parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')
# container restart
container_restart_parser = container_subparsers.add_parser('restart', help='Restart container')
container_restart_parser.add_argument('--id', '-I', help='Container ID')
container_restart_parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')
# parse args
args = parser.parse_args()
# print(args)
if args.subparser == 'config':
config_config(args.section, args.property, args.value)
elif args.subparser == 'machine':
if args.machine_subparser == 'list':
machine_list(args.remote_address)
elif args.machine_subparser == 'add':
machine_add(args.remote_address, args.address)
elif args.machine_subparser == 'remove':
machine_remove(args.remote_address, args.id)
elif args.subparser == 'project':
if args.project_subparser == 'list':
project_list(args.remote_address)
elif args.project_subparser == 'add':
project_add(args.remote_address, args.name)
elif args.project_subparser == 'remove':
project_remove(args.remote_address, args.id)
elif args.subparser == 'container':
if args.container_subparser == 'list':
container_list(args.remote_address, args.project_id)
elif args.container_subparser == 'add':
container_add(
args.remote_address,
args.project_id,
args.name,
args.ports,
args.distro,
args.image_id,
args.image,
args.machine_id,
args.start,
args.verbose,
)
elif args.container_subparser == 'remove':
container_remove(
args.remote_address,
args.project_id,
args.id,
args.force,
args.verbose,
)
elif args.container_subparser == 'start':
container_start(args.remote_address, args.project_id, args.id, args.verbose)
elif args.container_subparser == 'stop':
container_stop(args.remote_address, args.project_id, args.id, args.verbose)
elif args.container_subparser == 'restart':
container_restart(args.remote_address, args.project_id, args.id, args.verbose)
elif args.container_subparser == 'migrate':
container_migrate(args.remote_address, args.project_id, args.id, args.verbose)
| {
"content_hash": "733c9396055907935b8def6949904dc2",
"timestamp": "",
"source": "github",
"line_count": 1418,
"max_line_length": 146,
"avg_line_length": 31.172073342736248,
"alnum_prop": 0.6061716664404325,
"repo_name": "mtasic85/nspawn",
"id": "c6629e17ec50e377dfd930f673c1a23360812df7",
"size": "44224",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nspawn.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "44224"
}
],
"symlink_target": ""
} |
"""This module contains the classes that represent Telegram InlineQueryResultArticle."""
from telegram import InlineQueryResult
class InlineQueryResultArticle(InlineQueryResult):
"""This object represents a Telegram InlineQueryResultArticle.
Attributes:
type (:obj:`str`): 'article'.
id (:obj:`str`): Unique identifier for this result, 1-64 Bytes.
title (:obj:`str`): Title of the result.
input_message_content (:class:`telegram.InputMessageContent`): Content of the message to
be sent.
reply_markup (:class:`telegram.ReplyMarkup`): Optional. Inline keyboard attached to
the message.
url (:obj:`str`): Optional. URL of the result.
hide_url (:obj:`bool`): Optional. Pass True, if you don't want the URL to be shown in the
message.
description (:obj:`str`): Optional. Short description of the result.
thumb_url (:obj:`str`): Optional. Url of the thumbnail for the result.
thumb_width (:obj:`int`): Optional. Thumbnail width.
thumb_height (:obj:`int`): Optional. Thumbnail height.
Args:
id (:obj:`str`): Unique identifier for this result, 1-64 Bytes.
title (:obj:`str`): Title of the result.
input_message_content (:class:`telegram.InputMessageContent`): Content of the message to
be sent.
reply_markup (:class:`telegram.ReplyMarkup`, optional): Inline keyboard attached to
the message
url (:obj:`str`, optional): URL of the result.
hide_url (:obj:`bool`, optional): Pass True, if you don't want the URL to be shown in the
message.
description (:obj:`str`, optional): Short description of the result.
thumb_url (:obj:`str`, optional): Url of the thumbnail for the result.
thumb_width (:obj:`int`, optional): Thumbnail width.
thumb_height (:obj:`int`, optional): Thumbnail height.
**kwargs (:obj:`dict`): Arbitrary keyword arguments.
"""
def __init__(self,
id,
title,
input_message_content,
reply_markup=None,
url=None,
hide_url=None,
description=None,
thumb_url=None,
thumb_width=None,
thumb_height=None,
**kwargs):
# Required
super(InlineQueryResultArticle, self).__init__('article', id)
self.title = title
self.input_message_content = input_message_content
# Optional
if reply_markup:
self.reply_markup = reply_markup
if url:
self.url = url
if hide_url:
self.hide_url = hide_url
if description:
self.description = description
if thumb_url:
self.thumb_url = thumb_url
if thumb_width:
self.thumb_width = thumb_width
if thumb_height:
self.thumb_height = thumb_height
| {
"content_hash": "a948f195ae22168594a58b5a401e6cc9",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 97,
"avg_line_length": 41.093333333333334,
"alnum_prop": 0.5739779364049319,
"repo_name": "rogerscristo/BotFWD",
"id": "7957bd8c0ba70e8ced584386676b935d3d3c7186",
"size": "3909",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "env/lib/python3.6/site-packages/telegram/inline/inlinequeryresultarticle.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13999"
}
],
"symlink_target": ""
} |
from pprint import pprint
from django.conf import settings
from django.contrib.auth.mixins import PermissionRequiredMixin
from django.shortcuts import render
from django.urls import reverse
from django.views import View
from fo2.connections import db_cursor_so
import lotes.models
import lotes.queries
from lotes.views.lote import dict_conserto_lote
import cd.forms
import cd.views.gerais
from cd.queries.endereco import (
local_de_lote,
lotes_em_local,
)
class LocalizaLote(View):
def __init__(self):
self.Form_class = cd.forms.LocalizaLoteForm
self.template_name = 'cd/localiza_lote.html'
self.title_name = 'Localiza lote'
def mount_context(self, request, form):
cursor = db_cursor_so(request)
lote = form.cleaned_data['lote']
context = {'lote': lote}
local = local_de_lote(cursor, lote)
if not local:
context.update({
'erro': f"Lote {lote} não endereçado."})
return context
if len(local) > 1:
context.update({
'varios_locais': True,
'headers': ['Endereço', 'Palete'],
'fields': ['endereco', 'palete'],
'data': local,
})
return context
context.update({
'endereco': local[0]['endereco'],
'palete': local[0]['palete'],
})
lotes_end = lotes_em_local(cursor, local[0]['palete'])
for row in lotes_end:
row['lote|LINK'] = reverse(
'cd:localiza_lote',
args=[row['lote']]
)
context.update({
'headers': ['Lote', 'OP'],
'fields': ['lote', 'op'],
'data': lotes_end,
})
return context
def get(self, request, *args, **kwargs):
if 'lote' in kwargs and kwargs['lote']:
return self.post(request, *args, **kwargs)
context = {'titulo': self.title_name}
form = self.Form_class()
context['form'] = form
return render(request, self.template_name, context)
def post(self, request, *args, **kwargs):
context = {'titulo': self.title_name}
if 'lote' in kwargs and kwargs['lote']:
form = self.Form_class(kwargs)
else:
form = self.Form_class(request.POST)
if form.is_valid():
data = self.mount_context(request, form)
context.update(data)
context['form'] = form
return render(request, self.template_name, context)
| {
"content_hash": "2413b4a89334498a26b6195b4aae023f",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 62,
"avg_line_length": 28.164835164835164,
"alnum_prop": 0.5622317596566524,
"repo_name": "anselmobd/fo2",
"id": "97cd14882ec17fe6aa3486e40f8b25d9c2138991",
"size": "2566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/cd/views/localiza_lote.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "160899"
},
{
"name": "HTML",
"bytes": "855985"
},
{
"name": "JavaScript",
"bytes": "203109"
},
{
"name": "PLSQL",
"bytes": "2762"
},
{
"name": "Python",
"bytes": "3228268"
},
{
"name": "Shell",
"bytes": "2161"
}
],
"symlink_target": ""
} |
from util.tipo import tipo
class S_LOGIN_ACCOUNT_INFO(object):
def __init__(self, tracker, time, direction, opcode, data):
print(str(type(self)).split('.')[3]+'('+str(len(data))+'): '+ str(data.get_array_hex(1))[1:-1])
| {
"content_hash": "28ec88687b4f44ab8ea88bccc4d0119e",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 103,
"avg_line_length": 46.4,
"alnum_prop": 0.6206896551724138,
"repo_name": "jeff-alves/Tera",
"id": "26078eea4242e1e4d177090a17fa0dcc4d568097",
"size": "232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "game/message/unused/S_LOGIN_ACCOUNT_INFO.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "113659"
}
],
"symlink_target": ""
} |
#################################################################################
#
# The MIT License (MIT)
#
# Copyright (c) 2015 Dmitry Sovetov
#
# https://github.com/dmsovetov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
#################################################################################
from ...Template import Template
# class ObjectList
class ObjectList:
# ctor
def __init__( self, isa = None ):
self.isa = isa
self.items = []
# add
def add( self, item ):
self.items.append( item )
# compile
def compile( self ):
result = ''
for i in self.items:
result += i.compile()
return result
# compileList
def compileList( self ):
result = ''
for i in self.items:
result += Template( ObjectList.Item ).compile( { 'id': i.id, 'name': i.name } )
return result
Item = "\t\t\t\t{id} /* {name} */,\n" | {
"content_hash": "53fa903a70fe7e8b6bbf1f939bdf08a3",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 82,
"avg_line_length": 32.12068965517241,
"alnum_prop": 0.654320987654321,
"repo_name": "dmsovetov/pygling",
"id": "635a41da0dfb38dbd1cae69498c4222c6d969b29",
"size": "1863",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Pygling/Generator/Xcode/PBX/ObjectList.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5335"
},
{
"name": "Python",
"bytes": "231527"
},
{
"name": "Shell",
"bytes": "2859"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from orderable.admin import OrderableAdmin, OrderableStackedInline, \
OrderableTabularInline
from orderable.tests.models import Book, Chapter, Review
class ChapterAdmin(OrderableTabularInline):
model = Chapter
class ReviewAdmin(OrderableStackedInline):
model = Review
class BookAdmin(OrderableAdmin):
list_display = ('title', 'author', 'pages')
model = Book
inlines = [ChapterAdmin, ReviewAdmin]
admin.site.register(Book, BookAdmin) | {
"content_hash": "0973fc578e53f186288446b24e5e01a6",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 69,
"avg_line_length": 29,
"alnum_prop": 0.7728194726166329,
"repo_name": "tkaemming/django-orderable",
"id": "93ca1c773a53619ec155021629d6393caa7a1bb9",
"size": "493",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/orderable/tests/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "4179"
},
{
"name": "Python",
"bytes": "17676"
},
{
"name": "Shell",
"bytes": "130"
}
],
"symlink_target": ""
} |
from component.ha.MiVacuum import MiVacuum
import time
import difflib
import datetime
import logging
_LOGGER = logging.getLogger(__name__)
class HAConfig(object):
"""HAConfig"""
def __init__(self):
self._nextTime = {}
self._configs = {}
self._objects = {}
def addConfig(self, key, obj, config):
self._configs[key] = config
self._objects[key] = obj
self._nextTime[key] = 0
def updateNextTime(self, key, nextTime):
self._nextTime[key] = nextTime
def checkTime(self, key, t):
# 'weekday':'1,3,5'
# 'time': '15:00'
# 'interval': '60'
config = self._configs[key]
now = datetime.datetime.now()
_LOGGER.debug('{}'.format(config))
if 'weekday' in config:
weeks = config['weekday'].encode('utf-8').split(',')
week = str(now.isoweekday())
_LOGGER.debug("week = {}, weeks = {}".format(week, weeks))
if week not in weeks:
return False
_LOGGER.debug("t = {}, self._nextTime = {}".format(t, self._nextTime[key]))
if t > self._nextTime[key] and self._nextTime[key] != 0:
self._nextTime[key] = config.get('interval', 30*60) + time.time()
return True
if 'time' in config:
year_str = now.strftime("%Y-%m-%d")
time_str = year_str + ' ' + config['time']
timeStamp = time.mktime(time.strptime(time_str, "%Y-%m-%d %H:%M"))
_LOGGER.debug("abs time({}) stamp: {}".format(time_str, timeStamp))
if t < timeStamp:
self._nextTime[key] = timeStamp
else:
self._nextTime[key] = timeStamp + 24*60*60
return False
def getObject(self, key):
return self._objects[key]
@property
def keys(self):
return self._objects.keys()
from slackbot.bot import tick_task
from slackbot.bot import plugin_init
from slackbot.bot import respond_to
haconfig = HAConfig()
@plugin_init
def init_HA(config):
global haconifg
debug = config.get('debug', False)
if debug :
_LOGGER.setLevel(logging.DEBUG)
logging.getLogger('MiVacuum').setLevel(logging.DEBUG)
miVacuum = MiVacuum()
miConfig = config.get('MiVacuum', '')
miVacuum.update_config(miConfig)
haconfig.addConfig('mivacuum', miVacuum, miConfig)
def guess(key, key_list):
c = difflib.get_close_matches(key, key_list)
if c :
return c[0]
return key
@tick_task
def HA_worker(message):
global haconfig
now = time.time()
for key in haconfig.keys:
if haconfig.checkTime(key, now):
obj = haconfig.getObject(key)
if hasattr(obj, 'automation'):
obj.automation()
@respond_to(r'ha (.*)')
def ha_command(message, rest):
global haconfig
argv = message.body.get('text', "").split()
domain_list = ['mivacuum']
domain = argv[1]
domain = guess(domain, domain_list)
if domain == 'help':
message.reply('ha mivacuum <start|stop|locate|status|suspend|resume>')
if domain in haconfig.keys:
obj = haconfig.getObject(domain)
command_list = ['start', 'stop', 'locate', 'status', 'suspend', 'resume']
command = argv[2]
command = guess(command, command_list)
if command == 'start':
obj.start()
if command == 'stop':
obj.stop()
if command == 'locate':
obj.locate()
if command == 'status':
stat = obj.state()
message.reply("The status of MiVacuum is {}".format(stat))
if command == 'suspend':
obj.suspend(True)
if command == 'resume':
obj.suspend(False)
| {
"content_hash": "090c24348655272a4a092d8d31d04a5b",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 83,
"avg_line_length": 29.761904761904763,
"alnum_prop": 0.5658666666666666,
"repo_name": "pengzhangdev/slackbot",
"id": "4fea9ad40586ae64c77ba6eb593724c5efcb6716",
"size": "3926",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "slackbot/plugins/HA.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "288383"
},
{
"name": "Shell",
"bytes": "3443"
}
],
"symlink_target": ""
} |
from .trie import Trie
from .dictionary import DictInterface, TrieDict
| {
"content_hash": "b7b28bdb8140e9928d287c3db8c8d88e",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 47,
"avg_line_length": 35.5,
"alnum_prop": 0.8309859154929577,
"repo_name": "hankcs/HanLP",
"id": "2725b5824ce86dd6a28c94ec9a14b978a20758a0",
"size": "136",
"binary": false,
"copies": "1",
"ref": "refs/heads/doc-zh",
"path": "plugins/hanlp_trie/hanlp_trie/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "40933"
},
{
"name": "Jupyter Notebook",
"bytes": "566269"
},
{
"name": "Python",
"bytes": "2196905"
}
],
"symlink_target": ""
} |
import numpy as np
from numpy import linalg
import matplotlib.pyplot as plt
from skimage.io import imread
from skimage.exposure import rescale_intensity
from skimage.color import rgb2grey, separate_stains
#Color deconvolution
#Normalized optical density matrix
#see Ruifrok AC, Johnston DA. Quantification of histological staining by color deconvolution.
# R G B
# X X X Hematoxylin(0)
# X X X Red(1)
# X X X DAB(2)
#Hematoxylin(0), Red(1), DAB(2)
rgb_from_hrd = np.array([[0.644, 0.710, 0.285],
[0.0326, 0.873, 0.487],
[0.270, 0.562, 0.781]])
#conv_matrix
hrd_from_rgb = linalg.inv(rgb_from_hrd)
ihc_rgb = imread(r'TestImage.jpg')
# Rescale signals so that intensity ranges from 0 to 1
# ihc_hrd[:, :, (0,1, or 2 -- is the color channel)]
def stainspace_to_2d_array(ihc_xyz, channel):
rescale = rescale_intensity(ihc_xyz[:, :, channel], out_range=(0,1))
stain_array = np.dstack((np.zeros_like(rescale), rescale, rescale))
grey_array = rgb2grey(stain_array)
return grey_array
#Stain space conversion
ihc_hrd = separate_stains(ihc_rgb, hrd_from_rgb)
Hema_Gray_Array = stainspace_to_2d_array(ihc_hrd, 0)
permred_Gray_Array = stainspace_to_2d_array(ihc_hrd, 1)
DAB_Grey_Array = stainspace_to_2d_array(ihc_hrd, 2)
#Plot images
fig, axes = plt.subplots(2, 2, figsize=(12, 11))
ax0, ax1, ax2, ax3 = axes.ravel()
ax0.imshow(ihc_rgb, interpolation='nearest')
ax0.set_title("Original")
ax1.imshow(DAB_Grey_Array, cmap=plt.cm.gray, interpolation='nearest')
ax1.set_title("DAB")
ax2.imshow(permred_Gray_Array, cmap=plt.cm.gray)
ax2.set_title("Permanant Red")
ax3.imshow(Hema_Gray_Array, cmap=plt.cm.gray)
ax3.set_title("Hematoxylin")
for ax in axes.ravel():
ax.axis('on')
fig.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=None, hspace=None)
plt.show()
| {
"content_hash": "52fae6734d60840b5ff6a2a9ae9a90f0",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 93,
"avg_line_length": 27.970588235294116,
"alnum_prop": 0.6792849631966351,
"repo_name": "griffincalme/MicroDeconvolution",
"id": "89d6d88dc95bc30228010084a3c647d43eb5a730",
"size": "2076",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ScriptsUsedInPaper/ColorDeconvolutionPaper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7937"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "122273"
}
],
"symlink_target": ""
} |
import os
import json
def zenoss_pullLargeDevicesList():
os.system("./zenoss_getDevices.sh")
f = open("tmpfile.txt")
data = f.read()
dct_lst = json.loads(data)
print "Device list lenght:", len(dct_lst)
zenoss_pullLargeDevicesList()
| {
"content_hash": "72acbc1d90f38d1f3205874b2fdef2e4",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 45,
"avg_line_length": 18.357142857142858,
"alnum_prop": 0.669260700389105,
"repo_name": "thomasvincent/utilities",
"id": "a7c4749279566d3812109ca7da0e3a03ff4478e5",
"size": "280",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Standalone_Scripts/softlayer_iphostloc_puller/unused_source/zenoss_pagination_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9845"
},
{
"name": "CSS",
"bytes": "19867"
},
{
"name": "Java",
"bytes": "7272"
},
{
"name": "JavaScript",
"bytes": "10581"
},
{
"name": "Python",
"bytes": "2411624"
},
{
"name": "Shell",
"bytes": "7495"
}
],
"symlink_target": ""
} |
import logging
logging.basicConfig(level=logging.DEBUG)
import nengo
import nengo_spinnaker
import numpy as np
def test_constant_node_ensemble_and_value_probe():
with nengo.Network("Test Network") as network:
a = nengo.Node(0.50)
b = nengo.Ensemble(100, 2)
nengo.Connection(a, b, transform=[[-1.0], [1.0]])
p = nengo.Probe(b, synapse=0.05)
# Create the simulate and simulate
sim = nengo_spinnaker.Simulator(network)
# Run the simulation for long enough to ensure that the decoded value is
# with +/-20% of the input value.
with sim:
sim.run(2.0)
# Check that the value was decoded as expected
index = int(p.synapse.tau * 2.5 / sim.dt)
data = sim.data[p]
assert(np.all(+0.40 <= data[index:, 1]) and
np.all(+0.60 >= data[index:, 1]) and
np.all(-0.60 <= data[index:, 0]) and
np.all(-0.40 >= data[index:, 0]))
if __name__ == "__main__":
test_constant_node_ensemble_and_value_probe()
| {
"content_hash": "1b266f339c741f8577be42c8da870f6c",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 76,
"avg_line_length": 29.529411764705884,
"alnum_prop": 0.6145418326693227,
"repo_name": "project-rig/nengo_spinnaker",
"id": "561a41b63372135b89d6f233aec381e10fa2847e",
"size": "1004",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "regression-tests/test_constant_node_ensemble_probe.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "156127"
},
{
"name": "C++",
"bytes": "4428"
},
{
"name": "Makefile",
"bytes": "3057"
},
{
"name": "Python",
"bytes": "609080"
}
],
"symlink_target": ""
} |
from tornado import stack_context
from tornado.tcpserver import TCPServer
class SOCKS5Handler(object):
def __init__(self, stream, address, server):
self.stream = stream
self.address = address
self.server = server
class SOCKS5Server(TCPServer):
def __init__(self):
TCPServer.__init__(self)
def handle_stream(self, stream, address):
SOCKS5Handler(stream, address, self)
| {
"content_hash": "16805fee326972460fceac2aa8da3e03",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 48,
"avg_line_length": 25,
"alnum_prop": 0.668235294117647,
"repo_name": "decimalbell/socksproxy",
"id": "10f80559dc30e1643b8b745a74a52b3211fa16e3",
"size": "425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "socksproxy/socks5.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "121"
},
{
"name": "Python",
"bytes": "1933"
}
],
"symlink_target": ""
} |
import networkzero as nw0
nw0.advertise("myservice5")
| {
"content_hash": "96072836d4ee4db003adc59a1e4c77fe",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 27,
"avg_line_length": 18.333333333333332,
"alnum_prop": 0.8,
"repo_name": "tjguk/networkzero",
"id": "4df5e2db9a1475722632166253e3b58f3a1395e7",
"size": "55",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/cookbook/simple-discovery/discover_a_service_a.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "81"
},
{
"name": "C",
"bytes": "2621"
},
{
"name": "Python",
"bytes": "101990"
},
{
"name": "Shell",
"bytes": "2626"
}
],
"symlink_target": ""
} |
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
Base = declarative_base()
class Translation(Base):
__tablename__ = 'translations'
lang = Column(String(10), primary_key=True)
verb = Column(String(25), primary_key=True)
english = Column(String(25), primary_key=True)
description = Column(String)
def __repr__(self):
return "<Translation(lang='%s', verb='%s', english='%s'>" % (self.lang, self.verb, self.english) | {
"content_hash": "1a77e8d7da047c967719e4c4bc6316d8",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 104,
"avg_line_length": 33.4,
"alnum_prop": 0.6846307385229541,
"repo_name": "saltares/conjugate",
"id": "7c35a1f73c1055a37291b6d7f45b05e7967323ab",
"size": "501",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "site/mappings/translation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "189083"
},
{
"name": "HTML",
"bytes": "3456"
},
{
"name": "JavaScript",
"bytes": "265220"
},
{
"name": "Python",
"bytes": "24207"
}
],
"symlink_target": ""
} |
import os
from PIL import Image
class ImageResize(object):
"""Redimensar imagen."""
def __init__(self, path):
"""Inicializa atributos.
Args:
path (str): Ruta absoluta de la imagen, con la imagen.
Attributes:
path (str): Ruta absoluta de la imagen, con la imagen.
scale (bool): Con escala, default True.
quality (int): Calidad de la imagen, default 70.
optimize (bool): Optimización, default True.
png2jpeg (bool): Convertir png a jpeg, default False.
"""
self.path = path
self.scale = True
self.quality = 70
self.optimize = True
self.png2jpeg = False
if not os.path.exists(self.path):
dirname = os.path.dirname(self.path)
image_name = os.path.basename(self.path)
raise FileNotFoundError(
'La imagen {} no se encuentra en {}'.format(image_name, dirname)
)
def resize(self, save_path, width, height, prefix=''):
"""Redimensiona una imagen.
Si la imagen es menor a la requerida, no hará nada.
Para aumentar la imagen, establecer scale = False
y creara las dimensiones de width y height.
Para la calidad, usar quality y optimize.
Args:
save_path (str): Ruta absoluta para guardar la imagen, con nombre y extensión.
width (float|int): Ancho.
height (float|int): Alto.
prefix (str): Prefijo que se añadirá al thumbnail.
"""
img = Image.open(self.path)
if self.scale:
img_width, img_height = float(img.size[0]), float(img.size[1])
if img_width > img_height or img_width == img_height:
width = width
height = int(img_height * (width / img_width))
else:
height = height
width = int(img_width * (height / img_height))
img = img.resize((width, height), Image.ANTIALIAS)
if self.png2jpeg:
img = img.convert('RGBA')
background = Image.new("RGB", img.size, (255, 255, 255))
background.paste(img, mask=img.split()[3])
img = background
if prefix:
basename = '{}{}'.format(prefix, os.path.basename(save_path))
dirname = os.path.dirname(save_path)
save_path = os.path.join(dirname, basename)
img.save(save_path, optimize=self.optimize, quality=self.quality)
img.close()
| {
"content_hash": "68718ecc014ea2a8fefb0cc36613a51b",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 90,
"avg_line_length": 36.214285714285715,
"alnum_prop": 0.5609467455621302,
"repo_name": "snicoper/snicoper.com",
"id": "3ca4b9ee61c375d5fae7999555fb3a63a8f40d64",
"size": "2540",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/apps/utils/images.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
import string
import random
from serial.tools.list_ports import comports
try:
from glob import glob
except Exception as error:
print 'Install glob'
raise error
ROOT_MESSAGE = """
.-------------------------------.
( You need Super Cow Powers here. )
'-------------------------------'
\ ^__^
\ (oo)\_______
(__)\ )\/\\
||----w |
|| ||
"""
def randomstring(size=20):
""" rewriting method """
chars = string.ascii_uppercase + string.digits
return ''.join(random.choice(chars) for _ in range(size))
def available_ports():
# looking for available ports
PORTS_AVAILABLE = glob('/dev/ttyUSB*') + glob('/dev/ttyACM*') + glob('/dev/ttyMOCK*')
try:
for port, desc, hwid in sorted(comports()):
if port not in port:
PORTS_AVAILABLE.append(port)
except Exception as error:
raise error
return PORTS_AVAILABLE
| {
"content_hash": "9ee2583401282cbe5738ddba51ca1c52",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 89,
"avg_line_length": 31.425,
"alnum_prop": 0.4009546539379475,
"repo_name": "start-x/startx-src",
"id": "813e4afae56dd6cedc3fa452ab3dc5952babd567",
"size": "1274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "device/Comunication/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "949192"
},
{
"name": "Arduino",
"bytes": "2548"
},
{
"name": "C",
"bytes": "13681"
},
{
"name": "C++",
"bytes": "26994"
},
{
"name": "JavaScript",
"bytes": "3352"
},
{
"name": "Makefile",
"bytes": "3533"
},
{
"name": "Pascal",
"bytes": "519"
},
{
"name": "Puppet",
"bytes": "1213"
},
{
"name": "Python",
"bytes": "14749"
},
{
"name": "Ruby",
"bytes": "1167"
},
{
"name": "Shell",
"bytes": "187"
}
],
"symlink_target": ""
} |
"""
Form generation utilities for App Engine's ``db.Model`` class.
The goal of ``model_form()`` is to provide a clean, explicit and predictable
way to create forms based on ``db.Model`` classes. No malabarism or black
magic should be necessary to generate a form for models, and to add custom
non-model related fields: ``model_form()`` simply generates a form class
that can be used as it is, or that can be extended directly or even be used
to create other forms using ``model_form()``.
Example usage:
.. code-block:: python
from google.appengine.ext import db
from tipfy.ext.model.form import model_form
# Define an example model and add a record.
class Contact(db.Model):
name = db.StringProperty(required=True)
city = db.StringProperty()
age = db.IntegerProperty(required=True)
is_admin = db.BooleanProperty(default=False)
new_entity = Contact(key_name='test', name='Test Name', age=17)
new_entity.put()
# Generate a form based on the model.
ContactForm = model_form(Contact)
# Get a form populated with entity data.
entity = Contact.get_by_key_name('test')
form = ContactForm(obj=entity)
Properties from the model can be excluded from the generated form, or it can
include just a set of properties. For example:
.. code-block:: python
# Generate a form based on the model, excluding 'city' and 'is_admin'.
ContactForm = model_form(Contact, exclude=('city', 'is_admin'))
# or...
# Generate a form based on the model, only including 'name' and 'age'.
ContactForm = model_form(Contact, only=('name', 'age'))
The form can be generated setting field arguments:
.. code-block:: python
ContactForm = model_form(Contact, only=('name', 'age'), field_args={
'name': {
'label': 'Full name',
'description': 'Your name',
},
'age': {
'label': 'Age',
'validators': [validators.NumberRange(min=14, max=99)],
}
})
The class returned by ``model_form()`` can be used as a base class for forms
mixing non-model fields and/or other model forms. For example:
.. code-block:: python
# Generate a form based on the model.
BaseContactForm = model_form(Contact)
# Generate a form based on other model.
ExtraContactForm = model_form(MyOtherModel)
class ContactForm(BaseContactForm):
# Add an extra, non-model related field.
subscribe_to_news = f.BooleanField()
# Add the other model form as a subform.
extra = f.FormField(ExtraContactForm)
The class returned by ``model_form()`` can also extend an existing form
class:
.. code-block:: python
class BaseContactForm(Form):
# Add an extra, non-model related field.
subscribe_to_news = f.BooleanField()
# Generate a form based on the model.
ContactForm = model_form(Contact, base_class=BaseContactForm)
"""
from wtforms import Form, validators, widgets, fields as f
from wtforms.ext.appengine.fields import GeoPtPropertyField, ReferencePropertyField, StringListPropertyField
def get_TextField(kwargs):
"""
Returns a ``TextField``, applying the ``db.StringProperty`` length limit
of 500 bytes.
"""
kwargs['validators'].append(validators.length(max=500))
return f.TextField(**kwargs)
def get_IntegerField(kwargs):
"""
Returns an ``IntegerField``, applying the ``db.IntegerProperty`` range
limits.
"""
v = validators.NumberRange(min=-0x8000000000000000, max=0x7fffffffffffffff)
kwargs['validators'].append(v)
return f.IntegerField(**kwargs)
def convert_StringProperty(model, prop, kwargs):
"""Returns a form field for a ``db.StringProperty``."""
if prop.multiline:
kwargs['validators'].append(validators.length(max=500))
return f.TextAreaField(**kwargs)
else:
return get_TextField(kwargs)
def convert_ByteStringProperty(model, prop, kwargs):
"""Returns a form field for a ``db.ByteStringProperty``."""
return get_TextField(kwargs)
def convert_BooleanProperty(model, prop, kwargs):
"""Returns a form field for a ``db.BooleanProperty``."""
return f.BooleanField(**kwargs)
def convert_IntegerProperty(model, prop, kwargs):
"""Returns a form field for a ``db.IntegerProperty``."""
return get_IntegerField(kwargs)
def convert_FloatProperty(model, prop, kwargs):
"""Returns a form field for a ``db.FloatProperty``."""
return f.FloatField(**kwargs)
def convert_DateTimeProperty(model, prop, kwargs):
"""Returns a form field for a ``db.DateTimeProperty``."""
if prop.auto_now or prop.auto_now_add:
return None
return f.DateTimeField(format='%Y-%m-%d %H:%M:%S', **kwargs)
def convert_DateProperty(model, prop, kwargs):
"""Returns a form field for a ``db.DateProperty``."""
if prop.auto_now or prop.auto_now_add:
return None
return f.DateField(format='%Y-%m-%d', **kwargs)
def convert_TimeProperty(model, prop, kwargs):
"""Returns a form field for a ``db.TimeProperty``."""
if prop.auto_now or prop.auto_now_add:
return None
return f.DateTimeField(format='%H:%M:%S', **kwargs)
def convert_ListProperty(model, prop, kwargs):
"""Returns a form field for a ``db.ListProperty``."""
return None
def convert_StringListProperty(model, prop, kwargs):
"""Returns a form field for a ``db.StringListProperty``."""
return StringListPropertyField(**kwargs)
def convert_ReferenceProperty(model, prop, kwargs):
"""Returns a form field for a ``db.ReferenceProperty``."""
kwargs['reference_class'] = prop.reference_class
return ReferencePropertyField(**kwargs)
def convert_SelfReferenceProperty(model, prop, kwargs):
"""Returns a form field for a ``db.SelfReferenceProperty``."""
return None
def convert_UserProperty(model, prop, kwargs):
"""Returns a form field for a ``db.UserProperty``."""
return None
def convert_BlobProperty(model, prop, kwargs):
"""Returns a form field for a ``db.BlobProperty``."""
return f.FileField(**kwargs)
def convert_TextProperty(model, prop, kwargs):
"""Returns a form field for a ``db.TextProperty``."""
return f.TextAreaField(**kwargs)
def convert_CategoryProperty(model, prop, kwargs):
"""Returns a form field for a ``db.CategoryProperty``."""
return get_TextField(kwargs)
def convert_LinkProperty(model, prop, kwargs):
"""Returns a form field for a ``db.LinkProperty``."""
kwargs['validators'].append(validators.url())
return get_TextField(kwargs)
def convert_EmailProperty(model, prop, kwargs):
"""Returns a form field for a ``db.EmailProperty``."""
kwargs['validators'].append(validators.email())
return get_TextField(kwargs)
def convert_GeoPtProperty(model, prop, kwargs):
"""Returns a form field for a ``db.GeoPtProperty``."""
return GeoPtPropertyField(**kwargs)
def convert_IMProperty(model, prop, kwargs):
"""Returns a form field for a ``db.IMProperty``."""
return None
def convert_PhoneNumberProperty(model, prop, kwargs):
"""Returns a form field for a ``db.PhoneNumberProperty``."""
return get_TextField(kwargs)
def convert_PostalAddressProperty(model, prop, kwargs):
"""Returns a form field for a ``db.PostalAddressProperty``."""
return get_TextField(kwargs)
def convert_RatingProperty(model, prop, kwargs):
"""Returns a form field for a ``db.RatingProperty``."""
kwargs['validators'].append(validators.NumberRange(min=0, max=100))
return f.IntegerField(**kwargs)
class ModelConverter(object):
"""
Converts properties from a ``db.Model`` class to form fields.
Default conversions between properties and fields:
+====================+===================+==============+==================+
| Property subclass | Field subclass | datatype | notes |
+====================+===================+==============+==================+
| StringProperty | TextField | unicode | TextArea |
| | | | if multiline |
+--------------------+-------------------+--------------+------------------+
| ByteStringProperty | TextField | str | |
+--------------------+-------------------+--------------+------------------+
| BooleanProperty | BooleanField | bool | |
+--------------------+-------------------+--------------+------------------+
| IntegerProperty | IntegerField | int or long | |
+--------------------+-------------------+--------------+------------------+
| FloatProperty | TextField | float | |
+--------------------+-------------------+--------------+------------------+
| DateTimeProperty | DateTimeField | datetime | skipped if |
| | | | auto_now[_add] |
+--------------------+-------------------+--------------+------------------+
| DateProperty | DateField | date | skipped if |
| | | | auto_now[_add] |
+--------------------+-------------------+--------------+------------------+
| TimeProperty | DateTimeField | time | skipped if |
| | | | auto_now[_add] |
+--------------------+-------------------+--------------+------------------+
| ListProperty | None | list | always skipped |
+--------------------+-------------------+--------------+------------------+
| StringListProperty | TextAreaField | list of str | |
+--------------------+-------------------+--------------+------------------+
| ReferenceProperty | ReferencePropertyF| db.Model | |
+--------------------+-------------------+--------------+------------------+
| SelfReferenceP. | ReferencePropertyF| db.Model | |
+--------------------+-------------------+--------------+------------------+
| UserProperty | None | users.User | always skipped |
+--------------------+-------------------+--------------+------------------+
| BlobProperty | FileField | str | |
+--------------------+-------------------+--------------+------------------+
| TextProperty | TextAreaField | unicode | |
+--------------------+-------------------+--------------+------------------+
| CategoryProperty | TextField | unicode | |
+--------------------+-------------------+--------------+------------------+
| LinkProperty | TextField | unicode | |
+--------------------+-------------------+--------------+------------------+
| EmailProperty | TextField | unicode | |
+--------------------+-------------------+--------------+------------------+
| GeoPtProperty | TextField | db.GeoPt | |
+--------------------+-------------------+--------------+------------------+
| IMProperty | None | db.IM | always skipped |
+--------------------+-------------------+--------------+------------------+
| PhoneNumberProperty| TextField | unicode | |
+--------------------+-------------------+--------------+------------------+
| PostalAddressP. | TextField | unicode | |
+--------------------+-------------------+--------------+------------------+
| RatingProperty | IntegerField | int or long | |
+--------------------+-------------------+--------------+------------------+
| _ReverseReferenceP.| None | <iterable> | always skipped |
+====================+===================+==============+==================+
"""
default_converters = {
'StringProperty': convert_StringProperty,
'ByteStringProperty': convert_ByteStringProperty,
'BooleanProperty': convert_BooleanProperty,
'IntegerProperty': convert_IntegerProperty,
'FloatProperty': convert_FloatProperty,
'DateTimeProperty': convert_DateTimeProperty,
'DateProperty': convert_DateProperty,
'TimeProperty': convert_TimeProperty,
'ListProperty': convert_ListProperty,
'StringListProperty': convert_StringListProperty,
'ReferenceProperty': convert_ReferenceProperty,
'SelfReferenceProperty': convert_SelfReferenceProperty,
'UserProperty': convert_UserProperty,
'BlobProperty': convert_BlobProperty,
'TextProperty': convert_TextProperty,
'CategoryProperty': convert_CategoryProperty,
'LinkProperty': convert_LinkProperty,
'EmailProperty': convert_EmailProperty,
'GeoPtProperty': convert_GeoPtProperty,
'IMProperty': convert_IMProperty,
'PhoneNumberProperty': convert_PhoneNumberProperty,
'PostalAddressProperty': convert_PostalAddressProperty,
'RatingProperty': convert_RatingProperty,
}
def __init__(self, converters=None):
"""
Constructs the converter, setting the converter callables.
:param converters:
A dictionary of converter callables for each property type. The
callable must accept the arguments (model, prop, kwargs).
"""
self.converters = converters or self.default_converters
def convert(self, model, prop, field_args):
"""
Returns a form field for a single model property.
:param model:
The ``db.Model`` class that contains the property.
:param prop:
The model property: a ``db.Property`` instance.
:param field_args:
Optional keyword arguments to construct the field.
"""
kwargs = {
'label': prop.name.replace('_', ' ').title(),
'default': prop.default_value(),
'validators': [],
}
if field_args:
kwargs.update(field_args)
if prop.required:
kwargs['validators'].append(validators.required())
if prop.choices:
# Use choices in a select field.
kwargs['choices'] = [(v, v) for v in prop.choices]
return f.SelectField(**kwargs)
else:
converter = self.converters.get(type(prop).__name__, None)
if converter is not None:
return converter(model, prop, kwargs)
def model_fields(model, only=None, exclude=None, field_args=None,
converter=None):
"""
Extracts and returns a dictionary of form fields for a given
``db.Model`` class.
:param model:
The ``db.Model`` class to extract fields from.
:param only:
An optional iterable with the property names that should be included in
the form. Only these properties will have fields.
:param exclude:
An optional iterable with the property names that should be excluded
from the form. All other properties will have fields.
:param field_args:
An optional dictionary of field names mapping to a keyword arguments
used to construct each field object.
:param converter:
A converter to generate the fields based on the model properties. If
not set, ``ModelConverter`` is used.
"""
converter = converter or ModelConverter()
field_args = field_args or {}
# Get the field names we want to include or exclude, starting with the
# full list of model properties.
props = model.properties()
field_names = props.keys()
if only:
field_names = list(f for f in only if f in field_names)
elif exclude:
field_names = list(f for f in field_names if f not in exclude)
# Create all fields.
field_dict = {}
for name in field_names:
field = converter.convert(model, props[name], field_args.get(name))
if field is not None:
field_dict[name] = field
return field_dict
def model_form(model, base_class=Form, only=None, exclude=None, field_args=None,
converter=None):
"""
Creates and returns a dynamic ``wtforms.Form`` class for a given
``db.Model`` class. The form class can be used as it is or serve as a base
for extended form classes, which can then mix non-model related fields,
subforms with other model forms, among other possibilities.
:param model:
The ``db.Model`` class to generate a form for.
:param base_class:
Base form class to extend from. Must be a ``wtforms.Form`` subclass.
:param only:
An optional iterable with the property names that should be included in
the form. Only these properties will have fields.
:param exclude:
An optional iterable with the property names that should be excluded
from the form. All other properties will have fields.
:param field_args:
An optional dictionary of field names mapping to keyword arguments
used to construct each field object.
:param converter:
A converter to generate the fields based on the model properties. If
not set, ``ModelConverter`` is used.
"""
# Extract the fields from the model.
field_dict = model_fields(model, only, exclude, field_args, converter)
# Return a dynamically created form class, extending from base_class and
# including the created fields as properties.
return type(model.kind() + 'Form', (base_class,), field_dict)
| {
"content_hash": "2929676a597113ddf202fa70fd34c3b6",
"timestamp": "",
"source": "github",
"line_count": 452,
"max_line_length": 108,
"avg_line_length": 39.77654867256637,
"alnum_prop": 0.5481951165248345,
"repo_name": "webitup/python3-wforms",
"id": "8ef690cfa2f7b249b29dc8669ac40e2851f1c961",
"size": "17979",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "wtforms/ext/appengine/db.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "187546"
},
{
"name": "Shell",
"bytes": "2985"
}
],
"symlink_target": ""
} |
import json
from sovrin_client.agent.walleted_agent import WalletedAgent
from sovrin_client.test.agent.mock_backend_system import MockBackendSystem
import anoncreds.protocol.types
from sovrin_client.client.wallet.wallet import Wallet
from sovrin_client.test.constants import primes
from sovrin_common.identity import Identity
from sovrin_common.constants import TRUST_ANCHOR
from sovrin_node.pool.local_pool import create_local_pool
# noinspection PyUnresolvedReferences
from sovrin_node.test.conftest import tdir, conf, nodeSet, tconf, \
updatedPoolTxnData, updatedDomainTxnFile, txnPoolNodeSet, poolTxnData, \
dirName, tdirWithDomainTxns, tdirWithPoolTxns, \
domainTxnOrderedFields, genesisTxns, stewardWallet, poolTxnStewardData, \
poolTxnStewardNames, trusteeWallet, trusteeData, poolTxnTrusteeNames, \
patchPluginManager, txnPoolNodesLooper, tdirWithPoolTxns, \
poolTxnNodeNames, allPluginsPath, tdirWithNodeKeepInited, testNodeClass, \
genesisTxns
BANK_SEED = b'BANK0000000000000000000000000000'
class RefAgent(WalletedAgent):
def create_connection_request(self, internal_id, name):
nonce = str(self.verifier.generateNonce())
# endpoint = self.endpoint.host_address()
# TODO: this should be done by endpoint
endpoint = "127.0.0.1" + ":" + str(self.endpoint.ha[1])
msg = {'connection-request': {
'name': self.name,
'identifier': self._wallet.defaultId,
'nonce': nonce,
'endpoint': endpoint,
'verkey': self._wallet.getVerkey(self.wallet.defaultId)
},
'sig': None
}
self._invites[nonce] = (internal_id, name)
signature = self.wallet.signMsg(msg, self.wallet.defaultId)
msg['sig'] = signature
return json.dumps(msg)
def test_end_to_end(tconf):
base_dir = tconf.baseDir
print('*' * 20)
print(base_dir)
print('*' * 20)
with create_local_pool(base_dir) as network:
print(network.genesis_transactions)
network.runFor(5)
client = network.create_client(5555)
bank_wallet = Wallet()
bank_agent = RefAgent(name="bank",
basedirpath=base_dir,
client=client,
wallet=bank_wallet,
port=8787,
endpointArgs={'seed': BANK_SEED,
'onlyListener': True})
network.add(bank_agent)
bank_id, bank_verkey = bank_agent.new_identifier(seed=BANK_SEED)
print(bank_id)
print(bank_verkey)
s1_agent = network.steward_agent()
s1_agent.publish_trust_anchor(Identity(identifier=bank_id,
verkey=bank_verkey,
role=TRUST_ANCHOR))
network.runFor(5)
# this allows calling asynchronous functions from a synchronous context
run_async = network.run
bank_attribute_definition = anoncreds.protocol.types.AttribDef(
'basic', [
anoncreds.protocol.types.AttribType(
'title', encode=True), anoncreds.protocol.types.AttribType(
'first_name', encode=True), anoncreds.protocol.types.AttribType(
'last_name', encode=True), anoncreds.protocol.types.AttribType(
'address_1', encode=True), anoncreds.protocol.types.AttribType(
'address_2', encode=True), anoncreds.protocol.types.AttribType(
'address_3', encode=True), anoncreds.protocol.types.AttribType(
'postcode_zip', encode=True), anoncreds.protocol.types.AttribType(
'date_of_birth', encode=True), anoncreds.protocol.types.AttribType(
'account_type', encode=True), anoncreds.protocol.types.AttribType(
'year_opened', encode=True), anoncreds.protocol.types.AttribType(
'account_status', encode=True)])
bank_agent.add_attribute_definition(bank_attribute_definition)
backend = MockBackendSystem(bank_attribute_definition)
alices_id_in_banks_system = 1999891343
bobs_id_in_banks_system = 2911891343
backend.add_record(alices_id_in_banks_system,
title='Mrs.',
first_name='Alicia',
last_name='Garcia',
address_1='H-301',
address_2='Street 1',
address_3='UK',
postcode_zip='G61 3NR',
date_of_birth='December 28, 1990',
account_type='savings',
year_opened='2000',
account_status='active')
backend.add_record(bobs_id_in_banks_system,
title='Mrs.',
first_name='Jay',
last_name='Raj',
address_1='222',
address_2='Baker Street',
address_3='UK',
postcode_zip='G61 3NR',
date_of_birth='January 15, 1980',
account_type='savings',
year_opened='1999',
account_status='active')
bank_agent.set_issuer_backend(backend)
schema_id = run_async(
bank_agent.publish_schema('basic',
schema_name='Bank Membership',
schema_version='1.0'))
# NOTE: do NOT use known primes in a non-test environment
issuer_pub_key, revocation_pub_key = run_async(
bank_agent.publish_issuer_keys(schema_id,
p_prime=primes["prime1"][0],
q_prime=primes["prime1"][1]))
print(issuer_pub_key)
print(revocation_pub_key)
# TODO: Not implemented yet
# accPK = run_async(bank_agent.publish_revocation_registry(
# schema_id=schema_id))
# print(accPK)
run_async(bank_agent._set_available_claim_by_internal_id(
alices_id_in_banks_system, schema_id))
run_async(bank_agent._set_available_claim_by_internal_id(
bobs_id_in_banks_system, schema_id))
alice_wallet = Wallet()
alice_agent = RefAgent(name="Alice",
basedirpath=base_dir,
client=client,
wallet=alice_wallet,
port=8786)
network.add(alice_agent)
network.runFor(1)
request = bank_agent.create_connection_request(
alices_id_in_banks_system, "Alice")
# Transfer of this request happens out-of-band (website, QR code, etc)
alices_link_to_bank = alice_agent.load_request_str(request)
# notice the link is not accepted
print(alices_link_to_bank)
alice_agent.accept_request(alices_link_to_bank)
network.runFor(10)
# notice that the link is accepted
print(alices_link_to_bank)
banks_link_to_alice = bank_agent.get_link_by_name(
alices_id_in_banks_system)
# note the available claims are now there
print(banks_link_to_alice)
claim_to_request = alices_link_to_bank.find_available_claim(
name='Bank Membership')
print(claim_to_request)
run_async(alice_agent.send_claim(alices_link_to_bank,
claim_to_request))
network.runFor(5)
claim = run_async(alice_agent.get_claim(schema_id))
print(claim)
# ########
# # PROOF
# ########
# bank_agent._proofRequestsSchema['Address'] = {
# "name": "Address",
# "version": "0.2",
# "attributes": {
# "address_1": "string",
# "address_2": "string",
# "address_3": "string",
# "state": "string",
# "postcode_zip": "string",
# },
# "verifiableAttributes": ["postcode_zip"]
# }
#
# bank_agent.sendProofReq(banks_link_to_alice, 'Address')
#
# network.runFor(3)
# print()
| {
"content_hash": "005b88840d2d09ca4322531e7da8b122",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 113,
"avg_line_length": 37.18376068376068,
"alnum_prop": 0.5326973911044708,
"repo_name": "keenondrums/sovrin-node",
"id": "9bb5c2357f0adad05b5d5037f676eb6b80ee9c6b",
"size": "8701",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sovrin_client/test/agent/test_general_use_case.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3195"
},
{
"name": "Python",
"bytes": "1088655"
},
{
"name": "Rust",
"bytes": "25532"
},
{
"name": "Shell",
"bytes": "15720"
}
],
"symlink_target": ""
} |
"""Connect to a MySensors gateway via pymysensors API."""
import asyncio
import logging
from typing import Callable, Dict, List, Optional, Tuple, Type, Union
from mysensors import BaseAsyncGateway
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.mqtt import valid_publish_topic, valid_subscribe_topic
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_OPTIMISTIC
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from .const import (
ATTR_DEVICES,
CONF_BAUD_RATE,
CONF_DEVICE,
CONF_GATEWAYS,
CONF_NODES,
CONF_PERSISTENCE,
CONF_PERSISTENCE_FILE,
CONF_RETAIN,
CONF_TCP_PORT,
CONF_TOPIC_IN_PREFIX,
CONF_TOPIC_OUT_PREFIX,
CONF_VERSION,
DOMAIN,
MYSENSORS_GATEWAYS,
MYSENSORS_ON_UNLOAD,
SUPPORTED_PLATFORMS_WITH_ENTRY_SUPPORT,
DevId,
GatewayId,
SensorType,
)
from .device import MySensorsDevice, MySensorsEntity, get_mysensors_devices
from .gateway import finish_setup, get_mysensors_gateway, gw_stop, setup_gateway
_LOGGER = logging.getLogger(__name__)
CONF_DEBUG = "debug"
CONF_NODE_NAME = "name"
DEFAULT_BAUD_RATE = 115200
DEFAULT_TCP_PORT = 5003
DEFAULT_VERSION = "1.4"
def has_all_unique_files(value):
"""Validate that all persistence files are unique and set if any is set."""
persistence_files = [gateway.get(CONF_PERSISTENCE_FILE) for gateway in value]
if None in persistence_files and any(
name is not None for name in persistence_files
):
raise vol.Invalid(
"persistence file name of all devices must be set if any is set"
)
if not all(name is None for name in persistence_files):
schema = vol.Schema(vol.Unique())
schema(persistence_files)
return value
def is_persistence_file(value):
"""Validate that persistence file path ends in either .pickle or .json."""
if value.endswith((".json", ".pickle")):
return value
raise vol.Invalid(f"{value} does not end in either `.json` or `.pickle`")
def deprecated(key):
"""Mark key as deprecated in configuration."""
def validator(config):
"""Check if key is in config, log warning and remove key."""
if key not in config:
return config
_LOGGER.warning(
"%s option for %s is deprecated. Please remove %s from your "
"configuration file",
key,
DOMAIN,
key,
)
config.pop(key)
return config
return validator
NODE_SCHEMA = vol.Schema({cv.positive_int: {vol.Required(CONF_NODE_NAME): cv.string}})
GATEWAY_SCHEMA = vol.Schema(
vol.All(
deprecated(CONF_NODES),
{
vol.Required(CONF_DEVICE): cv.string,
vol.Optional(CONF_PERSISTENCE_FILE): vol.All(
cv.string, is_persistence_file
),
vol.Optional(CONF_BAUD_RATE, default=DEFAULT_BAUD_RATE): cv.positive_int,
vol.Optional(CONF_TCP_PORT, default=DEFAULT_TCP_PORT): cv.port,
vol.Optional(CONF_TOPIC_IN_PREFIX): valid_subscribe_topic,
vol.Optional(CONF_TOPIC_OUT_PREFIX): valid_publish_topic,
vol.Optional(CONF_NODES, default={}): NODE_SCHEMA,
},
)
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
vol.All(
deprecated(CONF_DEBUG),
deprecated(CONF_OPTIMISTIC),
deprecated(CONF_PERSISTENCE),
{
vol.Required(CONF_GATEWAYS): vol.All(
cv.ensure_list, has_all_unique_files, [GATEWAY_SCHEMA]
),
vol.Optional(CONF_RETAIN, default=True): cv.boolean,
vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
vol.Optional(CONF_PERSISTENCE, default=True): cv.boolean,
},
)
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
"""Set up the MySensors component."""
if DOMAIN not in config or bool(hass.config_entries.async_entries(DOMAIN)):
return True
config = config[DOMAIN]
user_inputs = [
{
CONF_DEVICE: gw[CONF_DEVICE],
CONF_BAUD_RATE: gw[CONF_BAUD_RATE],
CONF_TCP_PORT: gw[CONF_TCP_PORT],
CONF_TOPIC_OUT_PREFIX: gw.get(CONF_TOPIC_OUT_PREFIX, ""),
CONF_TOPIC_IN_PREFIX: gw.get(CONF_TOPIC_IN_PREFIX, ""),
CONF_RETAIN: config[CONF_RETAIN],
CONF_VERSION: config[CONF_VERSION],
CONF_PERSISTENCE_FILE: gw.get(CONF_PERSISTENCE_FILE)
# nodes config ignored at this time. renaming nodes can now be done from the frontend.
}
for gw in config[CONF_GATEWAYS]
]
user_inputs = [
{k: v for k, v in userinput.items() if v is not None}
for userinput in user_inputs
]
# there is an actual configuration in configuration.yaml, so we have to process it
for user_input in user_inputs:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=user_input,
)
)
return True
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Set up an instance of the MySensors integration.
Every instance has a connection to exactly one Gateway.
"""
gateway = await setup_gateway(hass, entry)
if not gateway:
_LOGGER.error("Gateway setup failed for %s", entry.data)
return False
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
if MYSENSORS_GATEWAYS not in hass.data[DOMAIN]:
hass.data[DOMAIN][MYSENSORS_GATEWAYS] = {}
hass.data[DOMAIN][MYSENSORS_GATEWAYS][entry.entry_id] = gateway
async def finish():
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in SUPPORTED_PLATFORMS_WITH_ENTRY_SUPPORT
]
)
await finish_setup(hass, entry, gateway)
hass.async_create_task(finish())
return True
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
"""Remove an instance of the MySensors integration."""
gateway = get_mysensors_gateway(hass, entry.entry_id)
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in SUPPORTED_PLATFORMS_WITH_ENTRY_SUPPORT
]
)
)
if not unload_ok:
return False
key = MYSENSORS_ON_UNLOAD.format(entry.entry_id)
if key in hass.data[DOMAIN]:
for fnct in hass.data[DOMAIN][key]:
fnct()
del hass.data[DOMAIN][MYSENSORS_GATEWAYS][entry.entry_id]
await gw_stop(hass, entry, gateway)
return True
async def on_unload(
hass: HomeAssistantType, entry: Union[ConfigEntry, GatewayId], fnct: Callable
) -> None:
"""Register a callback to be called when entry is unloaded.
This function is used by platforms to cleanup after themselves
"""
if isinstance(entry, GatewayId):
uniqueid = entry
else:
uniqueid = entry.entry_id
key = MYSENSORS_ON_UNLOAD.format(uniqueid)
if key not in hass.data[DOMAIN]:
hass.data[DOMAIN][key] = []
hass.data[DOMAIN][key].append(fnct)
@callback
def setup_mysensors_platform(
hass,
domain: str, # hass platform name
discovery_info: Optional[Dict[str, List[DevId]]],
device_class: Union[Type[MySensorsDevice], Dict[SensorType, Type[MySensorsEntity]]],
device_args: Optional[
Tuple
] = None, # extra arguments that will be given to the entity constructor
async_add_entities: Callable = None,
) -> Optional[List[MySensorsDevice]]:
"""Set up a MySensors platform.
Sets up a bunch of instances of a single platform that is supported by this integration.
The function is given a list of device ids, each one describing an instance to set up.
The function is also given a class.
A new instance of the class is created for every device id, and the device id is given to the constructor of the class
"""
# Only act if called via MySensors by discovery event.
# Otherwise gateway is not set up.
if not discovery_info:
_LOGGER.debug("Skipping setup due to no discovery info")
return None
if device_args is None:
device_args = ()
new_devices: List[MySensorsDevice] = []
new_dev_ids: List[DevId] = discovery_info[ATTR_DEVICES]
for dev_id in new_dev_ids:
devices: Dict[DevId, MySensorsDevice] = get_mysensors_devices(hass, domain)
if dev_id in devices:
_LOGGER.debug(
"Skipping setup of %s for platform %s as it already exists",
dev_id,
domain,
)
continue
gateway_id, node_id, child_id, value_type = dev_id
gateway: Optional[BaseAsyncGateway] = get_mysensors_gateway(hass, gateway_id)
if not gateway:
_LOGGER.warning("Skipping setup of %s, no gateway found", dev_id)
continue
device_class_copy = device_class
if isinstance(device_class, dict):
child = gateway.sensors[node_id].children[child_id]
s_type = gateway.const.Presentation(child.type).name
device_class_copy = device_class[s_type]
args_copy = (*device_args, gateway_id, gateway, node_id, child_id, value_type)
devices[dev_id] = device_class_copy(*args_copy)
new_devices.append(devices[dev_id])
if new_devices:
_LOGGER.info("Adding new devices: %s", new_devices)
if async_add_entities is not None:
async_add_entities(new_devices, True)
return new_devices
| {
"content_hash": "fdd4e06b76a58636fc04519676a97bf4",
"timestamp": "",
"source": "github",
"line_count": 303,
"max_line_length": 122,
"avg_line_length": 33.71287128712871,
"alnum_prop": 0.6309348996573666,
"repo_name": "turbokongen/home-assistant",
"id": "25b4d3106da5c91031d8e3e6d1db63277d2f8d4e",
"size": "10215",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/mysensors/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "30405146"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
from django.conf.urls import patterns, url
urlpatterns = patterns(
"legacy.views",
url(
r"^article/(?P<article_pk>\d+)",
"legacy_blog_article",
name="legacy_blog_article",
),
url(
r"^article/list",
"legacy_blog_articles",
name="legacy_blog_articles",
),
)
| {
"content_hash": "6935cf5e509c40f61623f23f90efd382",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 42,
"avg_line_length": 20.375,
"alnum_prop": 0.5521472392638037,
"repo_name": "daGrevis/daGrevis.lv",
"id": "4efb8466f7cb7f458e3bfdf997c5040299f75297",
"size": "326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dagrevis_lv/legacy/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "48553"
},
{
"name": "HTML",
"bytes": "19688"
},
{
"name": "JavaScript",
"bytes": "796"
},
{
"name": "Python",
"bytes": "80449"
}
],
"symlink_target": ""
} |
def transform(dataset, sigma=2.0):
"""Apply a Gaussian filter to tilt images."""
"""Gaussian Filter blurs the image and reduces the noise and details."""
import scipy.ndimage
import numpy as np
tiltSeries = dataset.active_scalars
# Transform the dataset.
result = np.empty_like(tiltSeries)
scipy.ndimage.filters.gaussian_filter(
tiltSeries, [sigma, sigma, 0], output=result)
# Set the result as the new scalars.
dataset.active_scalars = result
| {
"content_hash": "ad2052bbf1c3fbbd3f97502622524ec3",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 76,
"avg_line_length": 31,
"alnum_prop": 0.6875,
"repo_name": "OpenChemistry/tomviz",
"id": "01548a757e351b391c936833650164e467c3b53d",
"size": "496",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tomviz/python/GaussianFilterTiltSeries.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "107"
},
{
"name": "C",
"bytes": "251"
},
{
"name": "C++",
"bytes": "2173416"
},
{
"name": "CMake",
"bytes": "47116"
},
{
"name": "Dockerfile",
"bytes": "7467"
},
{
"name": "Python",
"bytes": "552224"
},
{
"name": "Shell",
"bytes": "10796"
}
],
"symlink_target": ""
} |
import sys
sys.path.append('./lib')
import networking as networkingLib
import lib.common as comLib
import re
import time
def getInterfaceStats():
ret = []
f = open("/proc/net/dev", "r")
data = f.readlines()[2:]
ips = getInterfaceIps()
for d in data:
cols = re.split('[\t\s]+', d)
cols[1] = (cols[1])[:-1]
if re.match(r'.*(?:vl|lo).*', cols[1]):
continue
info = {
"name": cols[1],
"ip": ips[cols[1]],
"rx_bytes": cols[2],
"rx_packets": cols[3],
"rx_errors": cols[4],
"rx_dropped": cols[5],
"rx_fifo": cols[6],
"rx_frame": cols[7],
"rx_compressed":cols[8],
"rx_multicast": cols[9],
"tx_bytes": cols[10],
"tx_packets": cols[11],
"tx_errors": cols[12],
"tx_dropped": cols[13],
"tx_fifo": cols[14],
"tx_frame": cols[15],
"tx_compressed":cols[16],
"tx_multicast": cols[17],
}
ret.append(info)
return ret
def getPrimaryIp():
ret = []
f = open("/proc/net/dev", "r")
data = f.readlines()[2:]
ips = getInterfaceIps()
nameMatch = re.compile(".*eth.*")
for d in data:
cols = re.split('[\t\s]+', d)
cols[1] = (cols[1])[:-1]
name = cols[1]
if name in ips:
ip = ips[name]
else: continue
if nameMatch.match(name) and ip and ip != '127.0.0.1':
return ip
return '127.0.0.1'
def getConnections():
conns = networkingLib.psutil.net_connections(kind="inet")
ret = []
families = {}
types = {}
states = {}
for c in conns:
res = {}
res['pid'] = c.pid
res['fd'] = c.fd
res['family'] = c.family
res['type'] = c.type
if res['family'] == networkingLib.socket.AF_UNIX:
res['local_addr_host'] = c.laddr
res['remote_addr_host'] = c.raddr
else:
res['local_addr_host'] = c.laddr[0]
res['local_addr_port'] = c.laddr[1]
res['remote_addr_host'] = c.raddr[0] if c.raddr else ''
res['remote_addr_port'] = c.raddr[1] if c.raddr else ''
res['state'] = c.status
ret.append(res)
types[res['type']] = 1
families[res['family']] = 1
states[res['state']] = 1
return ret, types.keys(), families.keys(), states.keys()
def getInterfaceIps():
interfaceDict = {}
interfaces = networkingLib.getInterfaces()
for i in interfaces:
interfaceDict[i] = networkingLib.getIpAddrFromInt(i)
return interfaceDict
def clearIpsOfInterface(interface):
theCmd = "sudo ip addr flush dev {0}".format(interface)
comLib.cmd(theCmd)
def bringInterfaceDown(interface):
theCmd = "sudo ip link set {0} down".format(interface)
comLib.cmd(theCmd)
clearIpsOfInterface(interface)
def bringInterfaceUp(interface):
theCmd = "sudo ip link set {0} up".format(interface)
comLib.cmd(theCmd)
def setInterfaceIp(interface, ip):
theCmd = "sudo ip addr add {0} dev {1}".format(ip, interface)
comLib.cmd(theCmd)
def setIps(ipAddrs):
for interface in ipAddrs:
bringInterfaceDown(interface)
time.sleep(1)
for interface in ipAddrs:
setInterfaceIp(interface, ipAddrs[interface])
time.sleep(1)
for interface in ipAddrs:
bringInterfaceUp(interface)
time.sleep(1)
comLib.cmd("sudo dhclient")
| {
"content_hash": "0bf11beaef8a1626c087d664233339af",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 67,
"avg_line_length": 28.0546875,
"alnum_prop": 0.5324422166527429,
"repo_name": "GridProtectionAlliance/ARMORE",
"id": "4e35d4387834c8cfea7a0684fb926639b493f012",
"size": "5358",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/webServer/static/python/network.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "918532"
},
{
"name": "C++",
"bytes": "199619"
},
{
"name": "CSS",
"bytes": "14998"
},
{
"name": "Elixir",
"bytes": "19273"
},
{
"name": "HTML",
"bytes": "122769"
},
{
"name": "JavaScript",
"bytes": "961544"
},
{
"name": "Makefile",
"bytes": "29364"
},
{
"name": "Python",
"bytes": "267525"
},
{
"name": "Roff",
"bytes": "6670"
},
{
"name": "Shell",
"bytes": "106609"
},
{
"name": "Zeek",
"bytes": "514415"
}
],
"symlink_target": ""
} |
import unittest
from threading import Event
from unittest.mock import MagicMock, call, patch
from streamlink.stream.hls import HLSStream, HLSStreamReader
from tests.mixins.stream_hls import EventedHLSStreamWriter, Playlist, Segment, TestMixinStreamHLS
FILTERED = "filtered"
class SegmentFiltered(Segment):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = FILTERED
class _TestSubjectHLSReader(HLSStreamReader):
__writer__ = EventedHLSStreamWriter
class _TestSubjectHLSStream(HLSStream):
__reader__ = _TestSubjectHLSReader
@patch("streamlink.stream.hls.HLSStreamWorker.wait", MagicMock(return_value=True))
class TestFilteredHLSStream(TestMixinStreamHLS, unittest.TestCase):
__stream__ = _TestSubjectHLSStream
@classmethod
def filter_sequence(cls, sequence):
return sequence.segment.title == FILTERED
def get_session(self, options=None, *args, **kwargs):
session = super().get_session(options)
session.set_option("hls-live-edge", 2)
session.set_option("hls-timeout", 0)
session.set_option("stream-timeout", 0)
return session
def subject(self, *args, **kwargs):
thread, segments = super().subject(*args, **kwargs)
return thread, thread.reader, thread.reader.writer, segments
# don't patch should_filter_sequence here (it always returns False)
def test_not_filtered(self):
thread, reader, writer, segments = self.subject([
Playlist(0, [SegmentFiltered(0), SegmentFiltered(1)], end=True)
])
self.await_write(2)
data = self.await_read()
self.assertEqual(data, self.content(segments), "Does not filter by default")
@patch("streamlink.stream.hls.HLSStreamWriter.should_filter_sequence", new=filter_sequence)
@patch("streamlink.stream.hls.log")
def test_filtered_logging(self, mock_log):
thread, reader, writer, segments = self.subject([
Playlist(0, [SegmentFiltered(0), SegmentFiltered(1)]),
Playlist(2, [Segment(2), Segment(3)]),
Playlist(4, [SegmentFiltered(4), SegmentFiltered(5)]),
Playlist(6, [Segment(6), Segment(7)], end=True)
])
data = b""
self.assertTrue(reader.filter_event.is_set(), "Doesn't let the reader wait if not filtering")
for i in range(2):
self.await_write(2)
self.assertEqual(len(mock_log.info.mock_calls), i * 2 + 1)
self.assertEqual(mock_log.info.mock_calls[i * 2 + 0], call("Filtering out segments and pausing stream output"))
self.assertFalse(reader.filter_event.is_set(), "Lets the reader wait if filtering")
self.await_write(2)
self.assertEqual(len(mock_log.info.mock_calls), i * 2 + 2)
self.assertEqual(mock_log.info.mock_calls[i * 2 + 1], call("Resuming stream output"))
self.assertTrue(reader.filter_event.is_set(), "Doesn't let the reader wait if not filtering")
data += self.await_read()
self.assertEqual(
data,
self.content(segments, cond=lambda s: s.num % 4 > 1),
"Correctly filters out segments"
)
self.assertTrue(all(self.called(s) for s in segments.values()), "Downloads all segments")
@patch("streamlink.stream.hls.HLSStreamWriter.should_filter_sequence", new=filter_sequence)
def test_filtered_timeout(self):
thread, reader, writer, segments = self.subject([
Playlist(0, [Segment(0), Segment(1)], end=True)
])
self.await_write()
data = self.await_read()
self.assertEqual(data, segments[0].content, "Has read the first segment")
# simulate a timeout by having an empty buffer
# timeout value is set to 0
with self.assertRaises(IOError) as cm:
self.await_read()
self.assertEqual(str(cm.exception), "Read timeout", "Raises a timeout error when no data is available to read")
@patch("streamlink.stream.hls.HLSStreamWriter.should_filter_sequence", new=filter_sequence)
def test_filtered_no_timeout(self):
thread, reader, writer, segments = self.subject([
Playlist(0, [SegmentFiltered(0), SegmentFiltered(1)]),
Playlist(2, [Segment(2), Segment(3)], end=True)
])
self.assertTrue(reader.filter_event.is_set(), "Doesn't let the reader wait if not filtering")
self.await_write(2)
self.assertFalse(reader.filter_event.is_set(), "Lets the reader wait if filtering")
# make reader read (no data available yet)
thread.read_wait.set()
# once data becomes available, the reader continues reading
self.await_write()
self.assertTrue(reader.filter_event.is_set(), "Reader is not waiting anymore")
thread.read_done.wait()
thread.read_done.clear()
self.assertFalse(thread.error, "Doesn't time out when filtering")
self.assertEqual(b"".join(thread.data), segments[2].content, "Reads next available buffer data")
self.await_write()
data = self.await_read()
self.assertEqual(data, self.content(segments, cond=lambda s: s.num >= 2))
@patch("streamlink.stream.hls.HLSStreamWriter.should_filter_sequence", new=filter_sequence)
def test_filtered_closed(self):
thread, reader, writer, segments = self.subject(start=False, playlists=[
Playlist(0, [SegmentFiltered(0), SegmentFiltered(1)], end=True)
])
# mock the reader thread's filter_event.wait method, so that the main thread can wait on its call
filter_event_wait_called = Event()
orig_wait = reader.filter_event.wait
def mocked_wait(*args, **kwargs):
filter_event_wait_called.set()
return orig_wait(*args, **kwargs)
with patch.object(reader.filter_event, "wait", side_effect=mocked_wait):
self.start()
# write first filtered segment and trigger the filter_event's lock
self.assertTrue(reader.filter_event.is_set(), "Doesn't let the reader wait if not filtering")
self.await_write()
self.assertFalse(reader.filter_event.is_set(), "Lets the reader wait if filtering")
# make reader read (no data available yet)
thread.read_wait.set()
# before calling reader.close(), wait until reader thread's filter_event.wait was called
if not filter_event_wait_called.wait(timeout=5): # pragma: no cover
raise RuntimeError("Missing filter_event.wait() call")
# close stream while reader is waiting for filtering to end
thread.reader.close()
thread.read_done.wait()
thread.read_done.clear()
self.assertEqual(thread.data, [b""], "Stops reading on stream close")
self.assertFalse(thread.error, "Is not a read timeout on stream close")
def test_hls_segment_ignore_names(self):
thread, reader, writer, segments = self.subject([
Playlist(0, [Segment(0), Segment(1), Segment(2), Segment(3)], end=True)
], {"hls-segment-ignore-names": [
".*",
"segment0",
"segment2",
]})
self.await_write(4)
self.assertEqual(self.await_read(), self.content(segments, cond=lambda s: s.num % 2 > 0))
| {
"content_hash": "93ef3d1302e5bdf39c6525178b84c103",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 123,
"avg_line_length": 41.52808988764045,
"alnum_prop": 0.6392045454545454,
"repo_name": "amurzeau/streamlink-debian",
"id": "10c52bc4ae2541e7469ad1c31c01464ad76347d1",
"size": "7392",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/stream/test_hls_filtered.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1478809"
}
],
"symlink_target": ""
} |
import os
import re
import ctypes
import struct
from os import O_RDWR, O_CLOEXEC, O_NONBLOCK
from stat import S_ISCHR
from mmap import mmap
from pathlib import Path
from ctypes import c_uint8 as ubyte
PAGE_SHIFT = 12
PAGE_SIZE = 1 << PAGE_SHIFT
PAGE_MASK = -PAGE_SIZE
# a physical memory region associated with an uio device
class MemRegion:
def __init__( rgn, parent, address, size, name=None, uio=None, index=None ):
if parent == None and uio == None:
raise ValueError( "parent region or uio device required" )
if size < 0:
raise ValueError( "invalid size" )
# parent memory region (if any)
rgn.parent = parent
# physical address range
rgn.address = address
rgn.size = size
rgn.end = address + size
# identification
rgn.name = name
rgn.uio = uio
rgn.index = index
# memory mapping
rgn.mappable = 0
rgn._mmap = None
# nothing to map
if size == 0:
return
if parent:
# need to use parent's mapping
if rgn not in parent:
raise ValueError( "memory region not inside parent" )
offset = rgn.address - parent.address
if offset >= parent.mappable:
return
rgn.mappable = min( parent.mappable - offset, size )
rgn._mmap = parent._mmap[ offset : offset + rgn.mappable ]
elif rgn.address & ~PAGE_MASK:
return # not page-aligned, can't be mapped
else:
# round down to integral number of pages
rgn.mappable = size & PAGE_MASK
# UIO uses a disgusting hack where the memory map index is
# passed via the offset argument. In the actual kernel call
# the offset (and length) are in pages rather than bytes, hence
# we actually need to pass index * PAGE_SIZE as offset.
rgn._mmap = memoryview( mmap( rgn.uio._fd, rgn.mappable,
offset = rgn.index * PAGE_SIZE ) )
@classmethod
def from_sysfs( cls, uio, info, parent=None ):
index = int( re.fullmatch( r'map([0-9])', info.name ).group(1) )
def getinfo( attr ):
with (info/attr).open() as f:
return f.readline().rstrip()
# If no name has been supplied, the region gets an auto-generated name
# containing the full DT path. These are useless, ignore them.
name = getinfo( 'name' )
if name == '' or name[0] == '/':
name = None
# get memory region bounds
address = int( getinfo('addr'), 0 )
size = int( getinfo('size'), 0 )
return MemRegion( parent, address, size, name, uio, index )
def subregion( rgn, offset, size, name=None ):
return MemRegion( rgn, rgn.address + offset, size, name )
def __contains__( rgn, child ):
return child.address >= rgn.address and child.end <= rgn.end
# fill bytes in region at given offset
def fill( rgn, length=None, offset=0, value=0 ):
if value not in range(256):
raise ValueError( "invalid fill value" )
if length is None:
length = rgn.mappable
# map ctypes instance (does all necessary error checking)
mem = (ubyte * length).from_buffer( rgn._mmap, offset )
ctypes.memset( mem, value, length )
# write data into region at given offset
def write( rgn, data, offset=0 ):
data = bytes(data)
if offset < 0 or offset > rgn.mappable:
raise ValueError( "invalid offset" )
end = offset + len( data )
if offset == end:
return
if end > rgn.mappable:
raise ValueError( "write extends beyond mappable area" )
rgn._mmap[ offset : end ] = data
# read data from region at given offset
def read( rgn, length=None, offset=0 ):
# read ctypes instance (does all necessary error checking)
if isinstance( length, type ):
return (length * 1).from_buffer_copy( rgn._mmap, offset )[0]
# read bytes
return bytes( rgn.map( length, offset ) )
# map data from region at given offset
def map( rgn, length=None, offset=0 ):
if rgn._mmap == None:
raise RuntimeError( "memory region cannot be mapped" )
if isinstance( length, type ):
# map ctypes instance (does all necessary error checking)
return length.from_buffer( rgn._mmap, offset )
if isinstance( length, int ) or length == None:
# map byte-range
if offset < 0:
raise ValueError( "offset cannot be negative" )
end = rgn.mappable
if length != None:
end = min( offset + length, end )
if offset == 0 and end == rgn.mappable:
return rgn._mmap
return rgn._mmap[ offset : end ]
raise TypeError( "first argument should be length or ctypes type" )
# uio device object
class Uio:
def fileno( self ):
return self._fd
def __init__( self, path, blocking=True, parent=None ):
path = Path( '/dev/uio', path )
self.path = path
if path.is_dir():
path = path / 'module'
flags = O_RDWR | O_CLOEXEC
if not blocking:
flags |= O_NONBLOCK # for irq_recv
self._fd = os.open( str(path), flags )
# check parent memory region (if any)
if parent is not None:
if isinstance( parent, Uio ):
parent = parent.region()
elif not isinstance( parent, MemRegion ):
raise TypeError
# build path to sysfs dir for obtaining metadata
dev = os.stat( self._fd ).st_rdev
dev = '{0}:{1}'.format( os.major(dev), os.minor(dev) )
self.syspath = Path('/sys/dev/char', dev).resolve()
# enumerate memory regions
# beware that if there are none, the dir is absent rather than empty
self._regions = {}
rgninfo = self.syspath/'maps'
if rgninfo.is_dir():
for info in rgninfo.iterdir():
rgn = MemRegion.from_sysfs( self, info, parent )
# allow lookup by index or (if available) by name
self._regions[ rgn.index ] = rgn
if rgn.name:
self._regions[ rgn.name ] = rgn
def region( self, rgn=0 ):
return self._regions[ rgn ]
# shortcut to make subregion of default region (index 0)
def subregion( self, offset, size, name=None ):
return self.region().subregion( offset, size, name )
# shortcut to map default region (index 0)
def map( self, length_or_struct=None, offset=0 ):
return self.region().map( length_or_struct, offset )
# shortcut to read from default region (index 0)
def read( self, length_or_struct=None, offset=0 ):
return self.region().read( length_or_struct, offset )
# shortcut to write to default region (index 0)
def write( self, data=None, offset=0 ):
return self.region().write( data, offset )
# shortcut to fill bytes in default region (index 0)
def fill( self, length=None, offset=0, value=0 ):
return self.region().fill( length, offset, value )
# TODO determine if the device has any irq
def irq_recv( self ):
try:
(counter,) = struct.unpack( "I", os.read( self._fd, 4 ) )
return counter
except BlockingIOError:
return None
def irq_control( self, value ):
os.write( self._fd, struct.pack( "I", value ) )
# irq control functions for uio_pdrv_genirq:
def irq_disable( self ):
self.irq_control( 0 )
def irq_enable( self ):
self.irq_control( 1 )
# note: irq is disabled once received. you need to reenable it
# - before handling it, if edge-triggered
# - after handling it, if level-triggered
| {
"content_hash": "0554b8868df105aa0aa6c77cc2bc65b5",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 80,
"avg_line_length": 33.63598326359833,
"alnum_prop": 0.5725836546834183,
"repo_name": "mvduin/py-uio",
"id": "1a93835a13ae34ce60525e0cde87f4ee89ee2071",
"size": "8039",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/uio/device.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "6189"
},
{
"name": "Batchfile",
"bytes": "1437"
},
{
"name": "C",
"bytes": "8536"
},
{
"name": "C++",
"bytes": "7047"
},
{
"name": "Makefile",
"bytes": "2302"
},
{
"name": "Parrot",
"bytes": "730"
},
{
"name": "Perl",
"bytes": "2939"
},
{
"name": "Python",
"bytes": "130668"
},
{
"name": "Shell",
"bytes": "451"
}
],
"symlink_target": ""
} |
from __future__ import division
from PyQt4 import QtCore, QtGui
from vistrails.core.utils import any, expression
from vistrails.core import system
from constant_configuration import StandardConstantWidget, ColorWidget
############################################################################
class QueryWidgetMixin(object):
def __init__(self, contents=None, query_method=None):
self._last_contents = contents
self._last_query_method = query_method
# updateMethod intercepts calls from a child widget like the
# contents_widget
def updateMethod(self):
self.update_parent()
def update_parent(self):
new_contents = self.contents()
new_query_method = self.query_method()
if (new_contents != self._last_contents or
new_query_method != self._last_query_method):
if self.parent() and hasattr(self.parent(), 'updateMethod'):
self.parent().updateMethod()
self._last_contents = new_contents
self._last_query_method = new_query_method
self.emit(QtCore.SIGNAL('contentsChanged'), (self,new_contents))
class BaseQueryWidget(QtGui.QWidget, QueryWidgetMixin):
def __init__(self, contents_klass, query_methods, param, parent=None):
QtGui.QWidget.__init__(self, parent)
QueryWidgetMixin.__init__(self, param.strValue, param.queryMethod)
contents = param.strValue
queryMethod = param.queryMethod
layout = QtGui.QHBoxLayout()
self.op_button = QtGui.QToolButton()
self.op_button.setPopupMode(QtGui.QToolButton.InstantPopup)
self.op_button.setArrowType(QtCore.Qt.NoArrow)
action_group = QtGui.QActionGroup(self.op_button)
actions = []
checked_exists = False
for method in query_methods:
action = QtGui.QAction(method, self)
action.setCheckable(True)
action_group.addAction(action)
if method == queryMethod:
action.setChecked(True)
checked_exists = True
actions.append(action)
if not checked_exists:
actions[0].setChecked(True)
self._last_query_method = str(actions[0].text())
menu = QtGui.QMenu(self.op_button)
menu.addActions(actions)
self.op_button.setMenu(menu)
self.op_button.setText(action_group.checkedAction().text())
self.contents_widget = contents_klass(param)
self.contents_widget.setContents(contents)
layout.setMargin(0)
layout.setSpacing(0)
layout.addWidget(self.op_button)
layout.addWidget(self.contents_widget)
self.setLayout(layout)
self.connect(self.op_button, QtCore.SIGNAL('triggered(QAction*)'),
self.update_action)
def contents(self):
return self.contents_widget.contents()
def setContents(self, strValue, silent=True):
self.contents_widget.setContents(strValue)
if not silent:
self.update_parent()
def update_action(self, action):
self.op_button.setText(action.text())
self.update_parent()
def query_method(self):
for action in self.op_button.menu().actions():
if action.isChecked():
return str(action.text())
class StandardQueryWidget(BaseQueryWidget):
def __init__(self, param, parent=None):
BaseQueryWidget.__init__(self, StandardConstantWidget, ["==", "!="],
param, parent)
class StringQueryWidget(StandardQueryWidget):
def __init__(self, param, parent=None):
BaseQueryWidget.__init__(self, StandardConstantWidget,
["*[]*", "==", "=~"],
param, parent)
class NumericQueryWidget(StandardQueryWidget):
def __init__(self, param, parent=None):
BaseQueryWidget.__init__(self, StandardConstantWidget,
["==", "<", ">", "<=", ">="],
param, parent)
class ColorQueryWidget(StandardQueryWidget):
def __init__(self, param, parent=None):
BaseQueryWidget.__init__(self, ColorWidget, ["2.3", "5", "10", "50"],
param, parent)
| {
"content_hash": "0d5c4f0f1b704b9ea65a062d8804ef1b",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 77,
"avg_line_length": 37.69298245614035,
"alnum_prop": 0.5941354433325576,
"repo_name": "minesense/VisTrails",
"id": "e28d6985ea1f0d289f1560c77bc5e345dc03742b",
"size": "6211",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vistrails/gui/modules/query_configuration.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1129"
},
{
"name": "Makefile",
"bytes": "768"
},
{
"name": "Mako",
"bytes": "66613"
},
{
"name": "PHP",
"bytes": "49302"
},
{
"name": "Python",
"bytes": "21260847"
},
{
"name": "R",
"bytes": "782836"
},
{
"name": "Ruby",
"bytes": "875"
},
{
"name": "SQLPL",
"bytes": "2323"
},
{
"name": "Shell",
"bytes": "26542"
},
{
"name": "TeX",
"bytes": "147247"
},
{
"name": "XSLT",
"bytes": "1090"
}
],
"symlink_target": ""
} |
from __future__ import division, absolute_import, unicode_literals
from django.contrib import messages
from django.utils.translation import ugettext_lazy, pgettext_lazy
from djadmin2 import permissions
from djadmin2.actions import BaseListAction
class CustomPublishAction(BaseListAction):
permission_classes = BaseListAction.permission_classes + (
permissions.ModelChangePermission,
)
description = ugettext_lazy('Publish selected items')
success_message = pgettext_lazy('singular form',
'Successfully published %(count)s %(items)s')
success_message_plural = pgettext_lazy('plural form',
'Successfully published %(count)s %(items)s')
default_template_name = "actions/publish_selected_items.html"
def process_queryset(self):
self.get_queryset().update(published=True)
class PublishAllItemsAction(BaseListAction):
permission_classes = BaseListAction.permission_classes + (
permissions.ModelChangePermission,
)
description = ugettext_lazy('Publish all items')
success_message = pgettext_lazy(
'singular form',
'Successfully published %(count)s %(items)s',
)
success_message_plural = pgettext_lazy(
'plural form',
'Successfully published %(count)s %(items)s',
)
default_template_name = "model_list.html"
only_selected = False
def process_queryset(self):
self.get_queryset().update(published=True)
def unpublish_items(request, queryset):
queryset.update(published=False)
messages.add_message(request, messages.INFO, ugettext_lazy(u'Items unpublished'))
# Translators : action description
unpublish_items.description = ugettext_lazy('Unpublish selected items')
def unpublish_all_items(request, queryset):
queryset.update(published=False)
messages.add_message(
request,
messages.INFO,
ugettext_lazy('Items unpublished'),
)
unpublish_all_items.description = ugettext_lazy('Unpublish all items')
unpublish_all_items.only_selected = False
| {
"content_hash": "15d5cfa1739eb16fa3f9477235017c09",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 85,
"avg_line_length": 29.71014492753623,
"alnum_prop": 0.713170731707317,
"repo_name": "andrewsmedina/django-admin2",
"id": "fcb6e5da906895129423e7a3c4df6a037270a04b",
"size": "2074",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "example/blog/actions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "19219"
},
{
"name": "HTML",
"bytes": "42539"
},
{
"name": "JavaScript",
"bytes": "2540"
},
{
"name": "Python",
"bytes": "229693"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import platform
if platform.python_version() < '2.7':
import unittest2 as unittest
else:
import unittest
import requests
import responses
from magma.session import Session
class TestSession(unittest.TestCase):
def test_ua_set_on_init(self):
ua = 'th+is/an user 4gent'
s = Session(**{"User-Agent":ua})
self.assertEquals(ua, s.headers["User-Agent"])
# .get_url
def test_get_absolute_url(self):
url = 'http://example.com'
s = Session()
self.assertEquals(url, s.get_url(url))
def test_get_relative_url_root_with_base_url(self):
url = '/'
s = Session(base_url="foobar")
self.assertEquals("http://foobar/", s.get_url(url))
def test_get_relative_url_with_base_url(self):
s = Session(base_url="abc")
self.assertEquals('http://abc/foo/bar/qux', s.get_url("/foo/bar/qux"))
# .get
@responses.activate
def test_get_requests_object(self):
url = 'http://www.example.com/foo'
body = "okx&Asq'"
responses.add(responses.GET, url, body=body, status=200)
s = Session()
resp = s.get(url)
self.assertEquals(1, len(responses.calls))
self.assertIsInstance(resp, requests.Response)
@responses.activate
def test_get_set_default_ua(self):
url = 'http://www.example.com/foo'
responses.add(responses.GET, url, body='ok', status=200)
Session().get(url)
self.assertEquals(1, len(responses.calls))
self.assertIn('User-Agent', responses.calls[0].request.headers)
@responses.activate
def test_post_requests_object(self):
url = 'http://www.example.com/foo'
body = "okx&Asq'"
responses.add(responses.POST, url, body=body, status=200)
s = Session()
resp = s.post(url)
self.assertEquals(1, len(responses.calls))
self.assertIsInstance(resp, requests.Response)
| {
"content_hash": "0d828b9bedc05c5b010565c811da1f8f",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 78,
"avg_line_length": 29.44776119402985,
"alnum_prop": 0.6203750633552965,
"repo_name": "bfontaine/p7magma",
"id": "ea35f7e204a4e7ddd202cc5f82d7cd237601d8ca",
"size": "2121",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_session.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4267"
},
{
"name": "Makefile",
"bytes": "1151"
},
{
"name": "Python",
"bytes": "19981"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function
import io
import re
from glob import glob
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import splitext
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8')
).read()
setup(
name='python-jcsclient',
version='1.0',
license='BSD',
description='Client library for JCS',
long_description='%s\n%s' % (
re.compile('^.. start-badges.*^.. end-badges', re.M | re.S).sub('', read('README.rst')),
re.sub(':[a-z]+:`~?(.*?)`', r'``\1``', read('CHANGELOG.rst'))
),
author='Reliance Jio Cloud Services',
author_email='RDS.Team@ril.com',
url='https://github.com/jiocloudservices/jcsclient',
packages=find_packages('src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
# uncomment if you test on these interpreters:
# 'Programming Language :: Python :: Implementation :: IronPython',
# 'Programming Language :: Python :: Implementation :: Jython',
# 'Programming Language :: Python :: Implementation :: Stackless',
'Topic :: Utilities',
],
keywords=[
# eg: 'keyword1', 'keyword2', 'keyword3',
],
install_requires=[
# eg: 'aspectlib==1.1.1', 'six>=1.7',
'requests', 'xmltodict', 'six', 'pyyaml', 'pycrypto'
],
extras_require={
# eg:
# 'rst': ['docutils>=0.11'],
# ':python_version=="2.6"': ['argparse'],
},
entry_points={
'console_scripts': [
'jcs = jcsclient.clidriver:main',
]
},
test_suite='client'
)
| {
"content_hash": "687e4800017b74719720e38f21c9f3df",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 96,
"avg_line_length": 33.924050632911396,
"alnum_prop": 0.591044776119403,
"repo_name": "jiocloudservices/jcsclient",
"id": "685c5978d7b49840ede7540dd9b5637bc7215a97",
"size": "2728",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "181299"
}
],
"symlink_target": ""
} |
import phase_disassembler_native
import phase_disassembler_jvm
def init_argparser(argparser):
subparsers = argparser.add_subparsers(help='Input format')
native_parser = subparsers.add_parser('native')
jvm_parser = subparsers.add_parser('jvm')
phase_disassembler_native.init_argparser(native_parser)
phase_disassembler_jvm.init_argparser(jvm_parser)
def init():
pass
def apply_options(options):
pass
def get_name():
return "phase_disassembler"
def do_phase(context):
if context.options.input_type == 'native':
return phase_disassembler_native.do_phase(context)
elif context.options.input_type == 'jvm':
return phase_disassembler_jvm.do_phase(context)
raise Exception("Unknown input type")
| {
"content_hash": "2e3694cc9f0f115bfb07c4db50f65288",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 62,
"avg_line_length": 30.12,
"alnum_prop": 0.7250996015936255,
"repo_name": "RainM/perf-toolkit",
"id": "1577ce2d5fd6f64f8ac2885c0612f3c1d6883199",
"size": "1353",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phase_disassembler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "24095"
},
{
"name": "Shell",
"bytes": "97"
}
],
"symlink_target": ""
} |
import base64
from typing import Optional, TypedDict
from backend.common.sitevars.sitevar import Sitevar
class ContentType(TypedDict):
username: str
authkey: str
class FMSApiSecrets(Sitevar[ContentType]):
@staticmethod
def key() -> str:
return "fmsapi.secrets"
@staticmethod
def default_value() -> ContentType:
return ContentType(username="", authkey="")
@staticmethod
def description() -> str:
return "For accessing the FMS API"
@classmethod
def username(cls) -> Optional[str]:
username = cls.get().get("username")
return username if username else None # Drop empty strings
@classmethod
def authkey(cls) -> Optional[str]:
authkey = cls.get().get("authkey")
return authkey if authkey else None # Drop empty strings
@classmethod
def auth_token(cls) -> Optional[str]:
"""The base64 encoded username + auth key - used to authenticate with the FMS API"""
username = cls.username()
authkey = cls.authkey()
if not username or not authkey:
return None
return FMSApiSecrets.generate_auth_token(username, authkey)
@staticmethod
def generate_auth_token(username: str, authkey: str) -> str:
# py3 needs byte-strings for b64 - will convert back/forth from ascii for strings
return base64.b64encode(
"{}:{}".format(username, authkey).encode("ascii")
).decode("ascii")
| {
"content_hash": "a4a20f2055bd2faa7e2ad256420440ba",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 92,
"avg_line_length": 29.54,
"alnum_prop": 0.6472579553148273,
"repo_name": "the-blue-alliance/the-blue-alliance",
"id": "48ef4556a9cd78aeb6223ea37988e1616aa2a818",
"size": "1477",
"binary": false,
"copies": "1",
"ref": "refs/heads/py3",
"path": "src/backend/common/sitevars/fms_api_secrets.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "359032"
},
{
"name": "Dockerfile",
"bytes": "2503"
},
{
"name": "HTML",
"bytes": "5877313"
},
{
"name": "JavaScript",
"bytes": "755910"
},
{
"name": "Less",
"bytes": "244218"
},
{
"name": "PHP",
"bytes": "10727"
},
{
"name": "Pug",
"bytes": "1857"
},
{
"name": "Python",
"bytes": "4321885"
},
{
"name": "Ruby",
"bytes": "4677"
},
{
"name": "Shell",
"bytes": "27698"
}
],
"symlink_target": ""
} |
import os
if os.path.isfile('_ls'):
os.system("make clean")
else:
os.system("make qemu");
| {
"content_hash": "2e7832ffdd7ffec66852b09f0c2bc2d1",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 25,
"avg_line_length": 15.5,
"alnum_prop": 0.6559139784946236,
"repo_name": "belfazt/xv6",
"id": "a07732cf5c6d0f4dfee830601370b0db6c62c1b1",
"size": "93",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "compile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "8690"
},
{
"name": "C",
"bytes": "194911"
},
{
"name": "C++",
"bytes": "23161"
},
{
"name": "Emacs Lisp",
"bytes": "86"
},
{
"name": "Makefile",
"bytes": "8117"
},
{
"name": "Objective-C",
"bytes": "317"
},
{
"name": "OpenEdge ABL",
"bytes": "1990"
},
{
"name": "Perl",
"bytes": "1933"
},
{
"name": "Python",
"bytes": "93"
},
{
"name": "Shell",
"bytes": "32"
}
],
"symlink_target": ""
} |
import numpy as np
from multiprocessing import Process, Pipe
from baselines.common.vec_env import VecEnv
def rollout(env, ob, new, ac, pi, horizon, stochastic):
# Initialize history arrays
obs = np.array([ob for _ in range(horizon)])
rews = np.zeros(horizon, 'float32')
vpreds = np.zeros(horizon, 'float32')
news = np.zeros(horizon, 'int32')
acs = np.array([ac for _ in range(horizon)])
for i in range(horizon):
print('dd')
prevac = ac
ac, vpred = pi.act(stochastic, ob)
print(ac)
obs[i] = ob
vpreds[i] = vpred
news[i] = new
acs[i] = ac
ob, rew, new, _ = env.step(ac,env_index)
# if env_index == 0:
print(' ', rew, ac, vpred, i/horizon, sep='\t')
rews[i] = rew
if new:
ob = env.reset(env_index)
return obs, rews, vpreds, news, acs
def worker(remote, env_fn_wrapper, pi, horizon, stochastic):
env = env_fn_wrapper.x()
env.connect_vrep()
ac_sample = env.action_space.sample() # not used, just so we have the datatype
new_pre = True # marks if we're on first timestep of an episode
ob_pre = env.reset()
while True:
cmd, data = remote.recv()
if cmd == 'step':
ob, reward, done, info = env.step(data)
if done:
ob = env.reset()
remote.send((ob, reward, done, info))
elif cmd == 'reset':
ob = env.reset()
remote.send(ob)
elif cmd == 'close':
remote.close()
break
elif cmd == 'get_spaces':
remote.send((env.action_space, env.observation_space))
elif cmd == 'rollout':
print('rollout')
obs, rews, vpreds, news, acs = rollout(env, ob_pre, new_pre, ac_sample, pi, horizon, stochastic)
new_pre = news[-1]
ob_pre = obs[-1]
remote.send((obs, rews, vpreds, news, acs))
else:
raise NotImplementedError
class CloudpickleWrapper(object):
"""
Uses cloudpickle to serialize contents (otherwise multiprocessing tries to use pickle)
"""
def __init__(self, x):
self.x = x
def __getstate__(self):
import cloudpickle
return cloudpickle.dumps(self.x)
def __setstate__(self, ob):
import pickle
self.x = pickle.loads(ob)
class SubprocVecEnv(VecEnv):
def __init__(self, env_fns):
"""
envs: list of gym environments to run in subprocesses
"""
nenvs = len(env_fns)
self.env_fns = env_fns
self.remotes, self.work_remotes = zip(*[Pipe() for _ in range(nenvs)])
env0 = CloudpickleWrapper(env_fns[0]).x()
# self.remotes[0].send(('get_spaces', None))
self.action_space, self.observation_space = env0.action_space, env0.observation_space
def start_subproc(self, pi, horizon, stochastic):
self.ps = [Process(target=worker, args=(work_remote, CloudpickleWrapper(env_fn), pi, horizon, stochastic))
for (work_remote, env_fn) in zip(self.work_remotes, self.env_fns)]
for p in self.ps:
p.start()
print('start p')
def step(self, actions):
for remote, action in zip(self.remotes, actions):
remote.send(('step', action))
results = [remote.recv() for remote in self.remotes]
obs, rews, dones, infos = zip(*results)
return np.stack(obs), np.stack(rews), np.stack(dones), infos
def reset(self):
for remote in self.remotes:
remote.send(('reset', None))
return np.stack([remote.recv() for remote in self.remotes])
def rollout(self):
for remote in self.remotes:
remote.send(('rollout', None))
return np.stack([remote.recv() for remote in self.remotes])
def step(self, action, index):
remote = self.remotes[index]
remote.send(('step', action))
result = remote.recv()
obs, rews, dones, infos = result
return obs, rews, dones, infos
def reset(self, index):
remote = self.remotes[index]
remote.send(('reset', None))
return remote.recv()
def close(self):
for remote in self.remotes:
remote.send(('close', None))
for p in self.ps:
p.join()
@property
def num_envs(self):
return len(self.remotes)
| {
"content_hash": "b5ad8f65a494c2301d0977a45c61c7b2",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 115,
"avg_line_length": 32.661764705882355,
"alnum_prop": 0.5634849167041873,
"repo_name": "pcchenxi/baseline",
"id": "705d65ac4bdb21b501ebfc3f83539e9159179568",
"size": "4442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "baselines/common/vec_env/subproc_vec_env.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2171"
},
{
"name": "C++",
"bytes": "242720"
},
{
"name": "Lua",
"bytes": "43847"
},
{
"name": "Python",
"bytes": "544643"
},
{
"name": "Shell",
"bytes": "2395"
}
],
"symlink_target": ""
} |
""" Adds a "category" capability to Traits-based classes, similar to that
provided by the Cocoa (Objective-C) environment for the Macintosh.
You can use categories to extend an existing HasTraits class, as an
alternative to subclassing. An advantage of categories over subclassing is
that you can access the added members on instances of the original class,
without having to change them to instances of a subclass. Unlike
subclassing, categories do not allow overriding trait attributes.
"""
#-------------------------------------------------------------------------------
# Imports:
#-------------------------------------------------------------------------------
from __future__ import absolute_import
from .has_traits import MetaHasTraits, MetaHasTraitsObject
#-------------------------------------------------------------------------------
# 'MetaCategory' class:
#-------------------------------------------------------------------------------
class MetaCategory ( MetaHasTraits ):
def __new__ ( cls, class_name, bases, class_dict ):
# Make sure the correct usage is being applied:
if len( bases ) > 2:
raise TypeError, \
"Correct usage is: class FooCategory(Category,Foo):"
# Process any traits-related information in the class dictionary:
MetaCategoryObject( cls, class_name, bases, class_dict, True )
# Move all remaining items in our class dictionary to the base class's
# dictionary:
if len( bases ) == 2:
category_class = bases[1]
for name, value in class_dict.items():
if not hasattr( category_class, name ):
setattr( category_class, name, value )
del class_dict[ name ]
# Finish building the class using the updated class dictionary:
return type.__new__( cls, class_name, bases, class_dict )
#-------------------------------------------------------------------------------
# 'MetaCategoryObject' class:
#-------------------------------------------------------------------------------
class MetaCategoryObject ( MetaHasTraitsObject ):
#---------------------------------------------------------------------------
# Adds the traits meta-data to the class:
#---------------------------------------------------------------------------
def add_traits_meta_data ( self, bases, class_dict, base_traits,
class_traits, instance_traits, prefix_traits,
listeners, view_elements ):
if len( bases ) == 2:
# Update the class and each of the existing subclasses:
bases[1]._add_trait_category( base_traits, class_traits,
instance_traits, prefix_traits, listeners,
view_elements )
else:
MetaHasTraitsObject.add_traits_meta_data( self, bases,
class_dict, base_traits, class_traits, instance_traits,
prefix_traits, listeners, view_elements )
#-------------------------------------------------------------------------------
# 'Category' class:
#-------------------------------------------------------------------------------
class Category ( object ):
""" Used for defining "category" extensions to existing classes.
To define a class as a category, specify "Category," followed by the name
of the base class name in the base class list.
The following example demonstrates defining a category::
from traits.api import HasTraits, Str, Category
class Base(HasTraits):
x = Str("Base x")
y = Str("Base y")
class BaseExtra(Category, Base):
z = Str("BaseExtra z")
"""
__metaclass__ = MetaCategory
| {
"content_hash": "e40934e557791ccf55bfe0f92aa0dce8",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 80,
"avg_line_length": 41.064516129032256,
"alnum_prop": 0.49515579994763026,
"repo_name": "burnpanck/traits",
"id": "fdbc52332810658ccb282cdec8003ad502160a09",
"size": "4445",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "traits/category.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "660"
},
{
"name": "C",
"bytes": "186780"
},
{
"name": "Python",
"bytes": "1085281"
}
],
"symlink_target": ""
} |
import urllib
import json
import os
import constants
import accounts
from flask import Flask
from flask import request
from flask import make_response
# Flask app should start in global layout
app = Flask(__name__)
PERSON = constants.TEST_1
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def makeWebhookResult(req):
if req.get("result").get("action") != "account-balance":
return constants.ERR_DICT(req.get("result").get("action"))
result = req.get("result")
parameters = result.get("parameters")
acct = parameters.get("account-type")
acct = acct.strip()
if acct=='401k':
acct='WI'
qual = parameters.get("qualifier")
speech = str(req.get("result").get("action"))
if acct:
if acct in constants.ACCT_TYPES:
speech = "The value of your {ACCT_TYPE} accounts is {VALU} dollars.".format(VALU=accounts.get_balance(PERSON, acct), ACCT_TYPE=acct)
else:
speech = "You don't have any accounts of that type. The total value of your other accounts is {VALU} dollars.".format(
VALU=accounts.get_balance(PERSON))
elif qual:
speech = "The total value of your accounts is {VALU} dollars.".format(VALU=accounts.get_balance(PERSON))
else:
speech = "The total value of your accounts is {VALU} dollars.".format(VALU=accounts.get_balance(PERSON))
# speech = "The cost of shipping to " + zone + " is " + str(cost[zone]) + " euros."
print("Response:")
print(speech)
speech += "\nAnything else I can help you with today?"
return {
"speech": speech,
"displayText": speech,
#"data": {},
# "contextOut": [],
"source": "home"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print "Starting app on port %d" % port
app.run(debug=True, port=port, host='0.0.0.0')
| {
"content_hash": "e473e5ee6daeb59b1df9c5a521303f28",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 144,
"avg_line_length": 27.49367088607595,
"alnum_prop": 0.6252302025782689,
"repo_name": "10239847509238470925387z/tmp123",
"id": "a477cbe56b0f143b53d9193de114c7aa2f5db5ad",
"size": "2195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3333"
}
],
"symlink_target": ""
} |
__author__ = 'OSAMA'
# This file tests serial communication with Arduino UNO
# when sending a message of data very quickly; Also,
# examine the initialization flag.
# Imports
import serial
import time
import threading
import socket
def main():
# All the main code resides here.
def read():
try:
while True:
if s.inWaiting() > 0:
print s.readline().strip('\n\r')
except KeyboardInterrupt:
s.close()
comport = "COM5"
baudrate = 115200
# timeout = 0.1
# writetimeout = 0.1
s = serial.Serial(comport,baudrate)
readthread = threading.Thread(target=read)
sock = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM)
sock.bind(("127.0.0.2",5000))
while not s.isOpen():
s.open()
else:
print "Serial Port is open"
time.sleep(2)
message = "*255,0,-255,0,1$"
readthread.start()
try:
while True:
r = sock.recvfrom(1024)
s.write(message)
# print "sent:", message
except KeyboardInterrupt:
s.close()
if __name__ == "__main__":
main() | {
"content_hash": "a8648e232dfece0f8228cba24a2d7e83",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 71,
"avg_line_length": 23.352941176470587,
"alnum_prop": 0.5575146935348446,
"repo_name": "osamadel/ROV-Hydra",
"id": "51f99c17999f78d2996b5f9e62fcda23a37efd82",
"size": "1191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "RPi/serialcommunication.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "39663"
},
{
"name": "Python",
"bytes": "23810"
}
],
"symlink_target": ""
} |
from os import path
from urllib import urlencode
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
from gluon import current, URL
from gluon.html import *
#from gluon.storage import Storage
from s3.s3filter import S3FilterForm, S3FilterString, S3OptionsFilter
from s3.s3resource import S3FieldSelector, S3URLQuery
from s3.s3summary import S3Summary
from s3.s3utils import s3_auth_user_represent_name, S3CustomController
THEME = "CRMT"
# =============================================================================
class index(S3CustomController):
""" Custom Home Page """
def __call__(self):
T = current.T
db = current.db
s3db = current.s3db
request = current.request
response = current.response
s3 = response.s3
output = {}
output["title"] = response.title = current.deployment_settings.get_system_name()
# Map
auth = current.auth
is_logged_in = auth.is_logged_in()
callback = None
if is_logged_in:
# Show the User's Coalition's Polygon
org_group_id = auth.user.org_group_id
if org_group_id:
# Lookup Coalition Name
table = s3db.org_group
row = db(table.id == org_group_id).select(table.name,
limitby=(0, 1)
).first()
if row:
callback = '''S3.gis.show_map();
var layer,layers=S3.gis.maps.default_map.layers;
for(var i=0,len=layers.length;i<len;i++){
layer=layers[i];
if(layer.name=='%s'){layer.setVisibility(true)}}''' % row.name
if not callback:
# Show all Coalition Polygons
callback = '''S3.gis.show_map();
var layer,layers=S3.gis.maps.default_map.layers;
for(var i=0,len=layers.length;i<len;i++){
layer=layers[i];
if(layer.name=='All Coalitions'){layer.setVisibility(true)}}
'''
gis = current.gis
config = gis.get_config()
config.zoom = 8
map = gis.show_map(width=770,
height=295,
callback=callback,
catalogue_layers=True,
collapsed=True,
save=False,
)
output["map"] = map
# Description of available data
from s3db.cms import S3CMS
for item in response.menu:
item["cms"] = S3CMS.resource_content(module = item["c"],
resource = item["f"])
# Site Activity Log
resource = s3db.resource("s3_audit")
resource.add_filter(S3FieldSelector("~.method") != "delete")
orderby = "s3_audit.timestmp desc"
list_fields = ["id",
"method",
"user_id",
"tablename",
"record_id",
]
#current.deployment_settings.ui.customise_s3_audit()
db.s3_audit.user_id.represent = s3_auth_user_represent_name
list_id = "log"
datalist, numrows, ids = resource.datalist(fields=list_fields,
start=None,
limit=4,
list_id=list_id,
orderby=orderby,
layout=s3.render_log)
# Placeholder
filter_form = DIV(_class="filter_form")
if numrows == 0:
# Empty table or just no match?
from s3.s3crud import S3CRUD
table = resource.table
if "deleted" in table:
available_records = db(table.deleted != True)
else:
available_records = db(table._id > 0)
if available_records.select(table._id,
limitby=(0, 1)).first():
msg = DIV(S3CRUD.crud_string(resource.tablename,
"msg_no_match"),
_class="empty")
else:
msg = DIV(S3CRUD.crud_string(resource.tablename,
"msg_list_empty"),
_class="empty")
data = msg
else:
# Render the list
ajaxurl = URL(c="default", f="audit", args="datalist_f.dl")
popup_url = URL(c="default", f="audit", args="datalist.popup")
dl = datalist.html(ajaxurl=ajaxurl,
pagesize=4,
popup_url=popup_url,
popup_title=T("Updates"),
)
data = dl
if is_logged_in and org_group_id:
# Add a Filter
filter_widgets = [S3OptionsFilter("user_id$org_group_id",
label = "",
# Can't just use "" as this is then omitted from rendering
options = {"*": T("All"),
org_group_id: T("My Community"),
},
cols = 2,
multiple = False,
),
]
filter_submit_url = URL(c="default", f="index")
filter_ajax_url = URL(c="default", f="audit", args=["filter.options"])
filter_form = S3FilterForm(filter_widgets,
filter_manager = False,
formstyle = filter_formstyle,
clear = False,
submit = True,
ajax = True,
url = filter_submit_url,
ajaxurl = filter_ajax_url,
_class = "filter-form",
_id = "%s-filter-form" % list_id
)
filter_form = filter_form.html(resource,
request.get_vars,
target=list_id,
)
output["updates"] = data
output["filter_form"] = filter_form
# Add JavaScript
appname = request.application
debug = s3.debug
scripts_append = s3.scripts.append
if debug:
# Infinite Scroll doesn't make sense here, but currently required by dataLists.js
scripts_append("/%s/static/scripts/jquery.infinitescroll.js" % appname)
scripts_append("/%s/static/scripts/jquery.viewport.js" % appname)
scripts_append("/%s/static/scripts/S3/s3.dataLists.js" % appname)
else:
scripts_append("/%s/static/scripts/S3/s3.dataLists.min.js" % appname)
self._view(THEME, "index.html")
return output
# =============================================================================
def filter_formstyle(row_id, label, widget, comment, hidden=False):
"""
Custom Formstyle for FilterForm
@param row_id: HTML id for the row
@param label: the label
@param widget: the form widget
@param comment: the comment
@param hidden: whether the row should initially be hidden or not
"""
if hidden:
_class = "advanced hide"
else:
_class= ""
if label:
return DIV(label, widget, _id=row_id, _class=_class)
else:
return DIV(widget, _id=row_id, _class=_class)
# =============================================================================
class filters(S3CustomController):
""" Custom controller to manage saved filters """
def __call__(self):
""" Main entry point """
# Authorization (user must be logged in)
auth = current.auth
permissions = auth.permission
if not auth.user:
permissions.fail()
fmt = permissions.format
if current.request.env.request_method == "POST" and fmt != "dl":
return self.update()
pe_id = auth.user.pe_id
s3 = current.response.s3
# Filter
f = S3FieldSelector("pe_id") == pe_id
s3.filter = f
# List Fields
current.s3db.configure("pr_filter",
list_fields = ["title",
"resource",
"url",
"query"],
list_layout = self.render_filter,
orderby = "resource")
# Page length
s3.dl_pagelength = 10
# Data list
current.request.args = ["datalist.%s" % fmt]
output = current.rest_controller("pr", "filter",
list_ajaxurl = URL(f="index",
args="filters.dl"))
# Title and view
T = current.T
if fmt != "dl":
output["title"] = T("Saved Filters")
self._view(THEME, "filters.html")
# Script for inline-editing of filter title
options = {"cssclass": "jeditable-input",
"tooltip": str(T("Click to edit"))}
script = '''$('.jeditable').editable('%s',%s)''' % \
(URL(args="filters"), json.dumps(options))
s3.jquery_ready.append(script)
return output
# -------------------------------------------------------------------------
@classmethod
def render_filter(cls, list_id, item_id, resource, rfields, record):
"""
Custom dataList item renderer for 'Saved Filters'
@param list_id: the HTML ID of the list
@param item_id: the HTML ID of the item
@param resource: the S3Resource to render
@param rfields: the S3ResourceFields to render
@param record: the record as dict
"""
record_id = record["pr_filter.id"]
item_class = "thumbnail"
raw = record._row
resource_name = raw["pr_filter.resource"]
resource = current.s3db.resource(resource_name)
T = current.T
# Resource title
crud_strings = current.response.s3.crud_strings.get(resource.tablename)
if crud_strings:
resource_name = crud_strings.title_list
else:
resource_name = string.capwords(resource.name, "_")
# Filter title
title = record["pr_filter.title"]
# Filter Query and Summary URLs
fstring = S3FilterString(resource, raw["pr_filter.query"])
query = fstring.represent()
links = cls.summary_urls(resource,
raw["pr_filter.url"],
fstring.get_vars)
actions = []
if links:
if "map" in links:
actions.append(A(I(" ", _class="icon icon-globe"),
_title=T("Open Map"),
_href=links["map"]))
if "table" in links:
actions.append(A(I(" ", _class="icon icon-list"),
_title=T("Open Table"),
_href=links["table"]))
if "chart" in links:
actions.append(A(I(" ", _class="icon icon-list"),
_title=T("Open Chart"),
_href=links["chart"]))
# Render the item
item = DIV(DIV(DIV(actions,
_class="action-bar fleft"),
SPAN(T("%(resource)s Filter") % \
dict(resource=resource_name),
_class="card-title"),
DIV(A(I(" ", _class="icon icon-remove-sign"),
_title=T("Delete this Filter"),
_class="dl-item-delete"),
_class="edit-bar fright"),
_class="card-header"),
DIV(DIV(H5(title,
_id="filter-title-%s" % record_id,
_class="media-heading jeditable"),
DIV(query),
_class="media-body"),
_class="media"),
_class=item_class,
_id=item_id)
return item
# -------------------------------------------------------------------------
def update(self):
""" Simple ajax method to update a saved filter title """
post_vars = current.request.post_vars
record_id = post_vars["id"].rsplit("-", 1)[-1]
new_title = post_vars["value"]
if new_title:
ftable = current.s3db.pr_filter
success = current.db(ftable.id==record_id) \
.update(title=new_title)
else:
success = False
if success:
return new_title
else:
raise HTTP(400)
# -------------------------------------------------------------------------
@staticmethod
def summary_urls(resource, url, filters):
"""
Construct the summary tabs URLs for a saved filter.
@param resource: the S3Resource
@param url: the filter page URL
@param filters: the filter GET vars
"""
links = {}
if not url:
return links
get_vars = S3URLQuery.parse_url(url)
get_vars.pop("t", None)
get_vars.pop("w", None)
get_vars.update(filters)
list_vars = []
for (k, v) in get_vars.items():
if v is None:
continue
values = v if type(v) is list else [v]
for value in values:
if value is not None:
list_vars.append((k, value))
base_url = url.split("?", 1)[0]
summary_config = S3Summary._get_config(resource)
tab_idx = 0
for section in summary_config:
if section.get("common"):
continue
section_id = section["name"]
tab_vars = list_vars + [("t", str(tab_idx))]
links[section["name"]] = "%s?%s" % (base_url, urlencode(tab_vars))
tab_idx += 1
return links
# END =========================================================================
| {
"content_hash": "cda9a2188f386718a3dd0d12629733ad",
"timestamp": "",
"source": "github",
"line_count": 405,
"max_line_length": 108,
"avg_line_length": 37.65432098765432,
"alnum_prop": 0.43875409836065576,
"repo_name": "code-for-india/sahana_shelter_worldbank",
"id": "46c9b8ce80591c7aa72524fba1735e0061f65815",
"size": "15275",
"binary": false,
"copies": "1",
"ref": "refs/heads/hackathon",
"path": "private/templates/CRMT/controllers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1214342"
},
{
"name": "JavaScript",
"bytes": "16755282"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Perl",
"bytes": "500"
},
{
"name": "Python",
"bytes": "27298931"
},
{
"name": "Shell",
"bytes": "893"
},
{
"name": "XSLT",
"bytes": "2245739"
}
],
"symlink_target": ""
} |
from fabric.api import local
import re
import sys
import os
import shutil
scala_version_path = 'project/Version.scala'
gradle_version_path = 'aeromock-dsl/gradle.properties'
gradle_tmp_path = 'tmp_gradle_version'
scala_tmp_path = 'tmp_scala_version'
versions_path = 'versions.txt'
versions_tmp_path = 'tmp_versions.txt'
def snapshot_release():
# build aeromock-dsl
local('./aeromock-dsl/gradlew -p aeromock-dsl install uploadArchives')
# build aeromock
local('./sbt publishSigned')
def release(next_version):
# remove SNAPSHOT scala
to_release_version_scala()
# remove SNAPSHOT Gradle
to_release_version_gradle()
release_version = get_release_version()
# build aeromock-dsl
local('./aeromock-dsl/gradlew -p aeromock-dsl install uploadArchives')
# build aeromock
local('./sbt publishSigned')
# update versions.txt
update_versions_file(release_version)
# commit & tag release version
commit('pre tag commit \'%s\'.' % release_version)
tag_name = 'v%s' % release_version
local('git tag %s' % tag_name)
# to snapshot version
to_snapshot_version_scala(next_version)
to_snapshot_version_gradle(next_version)
# commit snapshot version
commit('- new version commit: \'%s-SNAPSHOT\'.' % next_version)
# push
local('git push origin master')
local('git push origin %s' % tag_name)
def to_release_version_scala():
p = re.compile('"\s*([0-9A-Z\-\.]*)-SNAPSHOT\s*"')
try:
version_file = open(scala_version_path, 'r')
write_file = open(scala_tmp_path, 'w')
for line in version_file:
result = p.search(line)
if result:
write_file.write(' val aeromock = "%s"\n' % result.group(1))
else:
write_file.write(line)
finally:
version_file.close()
write_file.close()
os.remove(scala_version_path)
shutil.move(scala_tmp_path, scala_version_path)
def to_release_version_gradle():
p = re.compile('\s*version\s*=\s*([0-9A-Z\-\.]*)-SNAPSHOT\s*')
try:
version_file = open(gradle_version_path, 'r')
write_file = open(gradle_tmp_path, 'w')
for line in version_file:
result = p.search(line)
if result:
write_file.write('version=%s\n' % result.group(1))
else:
write_file.write(line)
finally:
version_file.close()
write_file.close()
os.remove(gradle_version_path)
shutil.move(gradle_tmp_path, gradle_version_path)
def to_snapshot_version_scala(next_version):
p = re.compile('"\s*[0-9A-Z\-\.]*\s*"')
try:
version_file = open(scala_version_path, 'r')
write_file = open(scala_tmp_path, 'w')
for line in version_file:
result = p.search(line)
if result:
write_file.write(' val aeromock = "%s-SNAPSHOT"\n' % next_version)
else:
write_file.write(line)
finally:
version_file.close()
write_file.close()
os.remove(scala_version_path)
shutil.move(scala_tmp_path, scala_version_path)
def to_snapshot_version_gradle(next_version):
p = re.compile('\s*version\s*=\s*[0-9A-Z\-\.]+\s*')
try:
version_file = open(gradle_version_path, 'r')
write_file = open(gradle_tmp_path, 'w')
for line in version_file:
result = p.search(line)
if result:
write_file.write('version=%s-SNAPSHOT\n' % next_version)
else:
write_file.write(line)
finally:
version_file.close()
write_file.close()
os.remove(gradle_version_path)
shutil.move(gradle_tmp_path, gradle_version_path)
def get_release_version():
p = re.compile('\s*version\s*=\s*([0-9A-Z\-\.]*)\s*')
try:
version_file = open(gradle_version_path, 'r')
version = version_file.read()
result = p.search(version)
if result:
return result.group(1)
else:
raise SystemError('cannot get release version!')
finally:
version_file.close()
def update_versions_file(release_version):
try:
versions_file = open(versions_path, 'r')
write_file = open(versions_tmp_path, 'w')
write_file.write(release_version + '\n')
for line in versions_file:
write_file.write(line)
finally:
versions_file.close()
write_file.close()
os.remove(versions_path)
shutil.move(versions_tmp_path, versions_path)
def finish_release(release_version, next_version):
# add files
local('git add -A')
# commit release version
local('git commit -m "[Aeromock Release Task][ci skip] pre tag commit \'%s\'."' % release_version)
# create tag
local('git tag v%s' % release_version)
def commit(message):
# add files
local('git add -A')
# commit
local('git commit -m "[Aeromock Release Task][ci skip] %s"' % message)
| {
"content_hash": "f35f5811c941613cfc150931130835cf",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 102,
"avg_line_length": 28.426136363636363,
"alnum_prop": 0.6012392564461323,
"repo_name": "CyberAgent/aeromock",
"id": "2b0f76a358dc00c23f62f4abe0543159ad71a2c8",
"size": "5003",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabfile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "192"
},
{
"name": "CoffeeScript",
"bytes": "1636"
},
{
"name": "FreeMarker",
"bytes": "29542"
},
{
"name": "Groovy",
"bytes": "28723"
},
{
"name": "HTML",
"bytes": "35129"
},
{
"name": "Java",
"bytes": "3656"
},
{
"name": "JavaScript",
"bytes": "21641"
},
{
"name": "Protocol Buffer",
"bytes": "5617"
},
{
"name": "Scala",
"bytes": "331330"
},
{
"name": "Shell",
"bytes": "18746"
}
],
"symlink_target": ""
} |
"""
BlueButtonFHIR_API
FILE: apps.v1api.contract
Created: 1/4/16 1:22 AM
Write consent by beneficiary to allow use of their data by third party application
Status: Experimental
Get FHIR Contract format from http://hl7.org/fhir/contract.html
{
"resourceType" : "Contract",
// from Resource: id, meta, implicitRules, and language
// from DomainResource: text, contained, extension, and modifierExtension
"identifier" : { Identifier }, // Contract identifier
"issued" : "<dateTime>", // When this Contract was issued
"applies" : { Period }, // Effective time
"subject" : [{ Reference(Any) }], // Subject of this Contract
"authority" : [{ Reference(Organization) }], // Authority under which this Contract has standing
"domain" : [{ Reference(Location) }], // Domain in which this Contract applies
"type" : { CodeableConcept }, // Contract Tyoe
"subType" : [{ CodeableConcept }], // Contract Subtype
"action" : [{ CodeableConcept }], // Contract Action
"actionReason" : [{ CodeableConcept }], // Contract Action Reason
"actor" : [{ // Contract Actor
"entity" : { Reference(Contract|Device|Group|Location|Organization|Patient|
Practitioner|RelatedPerson|Substance) }, // R! Contract Actor Type
"role" : [{ CodeableConcept }] // Contract Actor Role
}],
"valuedItem" : [{ // Contract Valued Item
// entity[x]: Contract Valued Item Type. One of these 2:
"entityCodeableConcept" : { CodeableConcept },
"entityReference" : { Reference(Any) },
"identifier" : { Identifier }, // Contract Valued Item Identifier
"effectiveTime" : "<dateTime>", // Contract Valued Item Effective Tiem
"quantity" : { Quantity(SimpleQuantity) }, // Count of Contract Valued Items
"unitPrice" : { Quantity(Money) }, // Contract Valued Item fee, charge, or cost
"factor" : <decimal>, // Contract Valued Item Price Scaling Factor
"points" : <decimal>, // Contract Valued Item Difficulty Scaling Factor
"net" : { Quantity(Money) } // Total Contract Valued Item Value
}],
"signer" : [{ // Contract Signer
"type" : { Coding }, // R! Contract Signer Type
"party" : { Reference(Organization|Patient|Practitioner|RelatedPerson) }, // R! Contract Signatory Party
"signature" : "<string>" // R! Contract Documentation Signature
}],
"term" : [{ // Contract Term List
"identifier" : { Identifier }, // Contract Term identifier
"issued" : "<dateTime>", // Contract Term Issue Date Time
"applies" : { Period }, // Contract Term Effective Time
"type" : { CodeableConcept }, // Contract Term Type
"subType" : { CodeableConcept }, // Contract Term Subtype
"subject" : { Reference(Any) }, // Subject of this Contract Term
"action" : [{ CodeableConcept }], // Contract Term Action
"actionReason" : [{ CodeableConcept }], // Contract Term Action Reason
"actor" : [{ // Contract Term Actor List
"entity" : { Reference(Contract|Device|Group|Location|Organization|
Patient|Practitioner|RelatedPerson|Substance) }, // R! Contract Term Actor
"role" : [{ CodeableConcept }] // Contract Term Actor Role
}],
"text" : "<string>", // Human readable Contract term text
"valuedItem" : [{ // Contract Term Valued Item
// entity[x]: Contract Term Valued Item Type. One of these 2:
"entityCodeableConcept" : { CodeableConcept },
"entityReference" : { Reference(Any) },
"identifier" : { Identifier }, // Contract Term Valued Item Identifier
"effectiveTime" : "<dateTime>", // Contract Term Valued Item Effective Tiem
"quantity" : { Quantity(SimpleQuantity) }, // Contract Term Valued Item Count
"unitPrice" : { Quantity(Money) }, // Contract Term Valued Item fee, charge, or cost
"factor" : <decimal>, // Contract Term Valued Item Price Scaling Factor
"points" : <decimal>, // Contract Term Valued Item Difficulty Scaling Factor
"net" : { Quantity(Money) } // Total Contract Term Valued Item Value
}],
"group" : [{ Content as for Contract.term }] // Nested Contract Term Group
}],
// binding[x]: Binding Contract. One of these 2:
"bindingAttachment" : { Attachment },
"bindingReference" : { Reference(Composition|DocumentReference|
QuestionnaireResponse) },
"friendly" : [{ // Contract Friendly Language
// content[x]: Easily comprehended representation of this Contract. One of these 2:
"contentAttachment" : { Attachment }
"contentReference" : { Reference(Composition|DocumentReference|
QuestionnaireResponse) }
}],
"legal" : [{ // Contract Legal Language
// content[x]: Contract Legal Text. One of these 2:
"contentAttachment" : { Attachment }
"contentReference" : { Reference(Composition|DocumentReference|
QuestionnaireResponse) }
}],
"rule" : [{ // Computable Contract Language
// content[x]: Computable Contract Rules. One of these 2:
"contentAttachment" : { Attachment }
"contentReference" : { Reference(DocumentReference) }
}]
}
"""
__author__ = 'Mark Scrimshire:@ekivemark'
from django.conf import settings
def contract_create(contract_info={}):
# Create a new contract
if settings.DEBUG:
print("In contract_create with contract_info:", contract_info)
pass
return True
| {
"content_hash": "ff0b57619ae6a944fa30f538b4d55e7a",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 109,
"avg_line_length": 46.892857142857146,
"alnum_prop": 0.6713632901751714,
"repo_name": "ekivemark/BlueButtonFHIR_API",
"id": "25cc917b26808f7902aee415f3383c9b04b96aba",
"size": "5328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/v1api/views/contract.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "251473"
},
{
"name": "Dockerfile",
"bytes": "326"
},
{
"name": "HTML",
"bytes": "474740"
},
{
"name": "JavaScript",
"bytes": "3335"
},
{
"name": "Python",
"bytes": "419723"
},
{
"name": "Shell",
"bytes": "940"
}
],
"symlink_target": ""
} |
"""
The MatchMaker classes should except a Topic or Fanout exchange key and
return keys for direct exchanges, per (approximate) AMQP parlance.
"""
import contextlib
import itertools
import json
import logging
from cinder.openstack.common import cfg
from cinder.openstack.common.gettextutils import _
matchmaker_opts = [
# Matchmaker ring file
cfg.StrOpt('matchmaker_ringfile',
default='/etc/nova/matchmaker_ring.json',
help='Matchmaker ring file (JSON)'),
]
CONF = cfg.CONF
CONF.register_opts(matchmaker_opts)
LOG = logging.getLogger(__name__)
contextmanager = contextlib.contextmanager
class MatchMakerException(Exception):
"""Signified a match could not be found."""
message = _("Match not found by MatchMaker.")
class Exchange(object):
"""
Implements lookups.
Subclass this to support hashtables, dns, etc.
"""
def __init__(self):
pass
def run(self, key):
raise NotImplementedError()
class Binding(object):
"""
A binding on which to perform a lookup.
"""
def __init__(self):
pass
def test(self, key):
raise NotImplementedError()
class MatchMakerBase(object):
"""Match Maker Base Class."""
def __init__(self):
# Array of tuples. Index [2] toggles negation, [3] is last-if-true
self.bindings = []
def add_binding(self, binding, rule, last=True):
self.bindings.append((binding, rule, False, last))
#NOTE(ewindisch): kept the following method in case we implement the
# underlying support.
#def add_negate_binding(self, binding, rule, last=True):
# self.bindings.append((binding, rule, True, last))
def queues(self, key):
workers = []
# bit is for negate bindings - if we choose to implement it.
# last stops processing rules if this matches.
for (binding, exchange, bit, last) in self.bindings:
if binding.test(key):
workers.extend(exchange.run(key))
# Support last.
if last:
return workers
return workers
class DirectBinding(Binding):
"""
Specifies a host in the key via a '.' character
Although dots are used in the key, the behavior here is
that it maps directly to a host, thus direct.
"""
def test(self, key):
if '.' in key:
return True
return False
class TopicBinding(Binding):
"""
Where a 'bare' key without dots.
AMQP generally considers topic exchanges to be those *with* dots,
but we deviate here in terminology as the behavior here matches
that of a topic exchange (whereas where there are dots, behavior
matches that of a direct exchange.
"""
def test(self, key):
if '.' not in key:
return True
return False
class FanoutBinding(Binding):
"""Match on fanout keys, where key starts with 'fanout.' string."""
def test(self, key):
if key.startswith('fanout~'):
return True
return False
class PublisherBinding(Binding):
"""Match on publishers keys, where key starts with 'publishers.' string."""
def test(self, key):
if key.startswith('publishers~'):
return True
return False
class StubExchange(Exchange):
"""Exchange that does nothing."""
def run(self, key):
return [(key, None)]
class RingExchange(Exchange):
"""
Match Maker where hosts are loaded from a static file containing
a hashmap (JSON formatted).
__init__ takes optional ring dictionary argument, otherwise
loads the ringfile from CONF.mathcmaker_ringfile.
"""
def __init__(self, ring=None):
super(RingExchange, self).__init__()
if ring:
self.ring = ring
else:
fh = open(CONF.matchmaker_ringfile, 'r')
self.ring = json.load(fh)
fh.close()
self.ring0 = {}
for k in self.ring.keys():
self.ring0[k] = itertools.cycle(self.ring[k])
def _ring_has(self, key):
if key in self.ring0:
return True
return False
class RoundRobinRingExchange(RingExchange):
"""A Topic Exchange based on a hashmap."""
def __init__(self, ring=None):
super(RoundRobinRingExchange, self).__init__(ring)
def run(self, key):
if not self._ring_has(key):
LOG.warn(
_("No key defining hosts for topic '%s', "
"see ringfile") % (key, )
)
return []
host = next(self.ring0[key])
return [(key + '.' + host, host)]
class PublisherRingExchange(RingExchange):
"""Fanout Exchange based on a hashmap."""
def __init__(self, ring=None):
super(PublisherRingExchange, self).__init__(ring)
def run(self, key):
# Assume starts with "publishers~", strip it for lookup.
nkey = key.split('publishers~')[1:][0]
if not self._ring_has(nkey):
LOG.warn(
_("No key defining hosts for topic '%s', "
"see ringfile") % (nkey, )
)
return []
return map(lambda x: (key + '.' + x, x), self.ring[nkey])
class FanoutRingExchange(RingExchange):
"""Fanout Exchange based on a hashmap."""
def __init__(self, ring=None):
super(FanoutRingExchange, self).__init__(ring)
def run(self, key):
# Assume starts with "fanout~", strip it for lookup.
nkey = key.split('fanout~')[1:][0]
if not self._ring_has(nkey):
LOG.warn(
_("No key defining hosts for topic '%s', "
"see ringfile") % (nkey, )
)
return []
return map(lambda x: (key + '.' + x, x), self.ring[nkey] +
['localhost'])
class LocalhostExchange(Exchange):
"""Exchange where all direct topics are local."""
def __init__(self):
super(Exchange, self).__init__()
def run(self, key):
return [(key.split('.')[0] + '.localhost', 'localhost')]
class DirectExchange(Exchange):
"""
Exchange where all topic keys are split, sending to second half.
i.e. "compute.host" sends a message to "compute" running on "host"
"""
def __init__(self):
super(Exchange, self).__init__()
def run(self, key):
b, e = key.split('.', 1)
return [(b, e)]
class MatchMakerRing(MatchMakerBase):
"""
Match Maker where hosts are loaded from a static hashmap.
"""
def __init__(self, ring=None):
super(MatchMakerRing, self).__init__()
self.add_binding(PublisherBinding(), PublisherRingExchange(ring))
self.add_binding(FanoutBinding(), FanoutRingExchange(ring))
self.add_binding(DirectBinding(), DirectExchange())
self.add_binding(TopicBinding(), RoundRobinRingExchange(ring))
class MatchMakerLocalhost(MatchMakerBase):
"""
Match Maker where all bare topics resolve to localhost.
Useful for testing.
"""
def __init__(self):
super(MatchMakerLocalhost, self).__init__()
self.add_binding(PublisherBinding(), LocalhostExchange())
self.add_binding(FanoutBinding(), LocalhostExchange())
self.add_binding(DirectBinding(), DirectExchange())
self.add_binding(TopicBinding(), LocalhostExchange())
class MatchMakerStub(MatchMakerBase):
"""
Match Maker where topics are untouched.
Useful for testing, or for AMQP/brokered queues.
Will not work where knowledge of hosts is known (i.e. zeromq)
"""
def __init__(self):
super(MatchMakerLocalhost, self).__init__()
self.add_binding(PublisherBinding(), StubExchange())
self.add_binding(FanoutBinding(), StubExchange())
self.add_binding(DirectBinding(), StubExchange())
self.add_binding(TopicBinding(), StubExchange())
| {
"content_hash": "8e0e3b0d6847b33e953fb5c8c1cea75c",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 79,
"avg_line_length": 29.125,
"alnum_prop": 0.6032567533451149,
"repo_name": "rnirmal/cinder",
"id": "ffe4870aa4a0c12df6e7336fe076d2d3af53e1e6",
"size": "8585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/openstack/common/rpc/matchmaker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "1888971"
},
{
"name": "Shell",
"bytes": "7441"
}
],
"symlink_target": ""
} |
import Turbo_I2C.BMP280 as BMP280
#sensor = BMP280(mode=BMP280_OSAMPLE_16)
sensor = BMP280.BMP280()
degrees = sensor.read_temperature()
pascals = sensor.read_pressure()
hectopascals = pascals / 100
print 'ChipID = 0x{0:2x} '.format(sensor.ChipID )
print 'Version = 0x{0:2x}'.format(sensor.Version )
print 'Timestamp = {0:0.3f}'.format(sensor.t_fine)
print 'Temp = {0:0.3f} deg C'.format(degrees)
print 'Pressure = {0:0.2f} hPa'.format(hectopascals)
if sensor.HasHumidity :
humidity = sensor.read_humidity()
print 'Humidity = {0:0.2f} %'.format(humidity)
else:
print 'Sensor does not have Humidity'
| {
"content_hash": "74fff6bf498433d53fa7fe895d7d1026",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 53,
"avg_line_length": 31.3,
"alnum_prop": 0.694888178913738,
"repo_name": "emwtur/Turbo_Python_I2C",
"id": "a7a02d0b82eee8f18398721aaa587441eae4b723",
"size": "704",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/exampleBMP280.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "48906"
}
],
"symlink_target": ""
} |
from django.test.utils import override_settings
from hc.api.models import Channel
from hc.test import BaseTestCase
class AddSlackTestCase(BaseTestCase):
def setUp(self):
super().setUp()
self.url = "/projects/%s/add_slack/" % self.project.code
def test_instructions_work(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertContains(r, "Integration Settings", status_code=200)
def test_it_works(self):
form = {"value": "http://example.org"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertRedirects(r, self.channels_url)
c = Channel.objects.get()
self.assertEqual(c.kind, "slack")
self.assertEqual(c.value, "http://example.org")
self.assertEqual(c.project, self.project)
def test_it_rejects_bad_url(self):
form = {"value": "not an URL"}
self.client.login(username="alice@example.org", password="password")
r = self.client.post(self.url, form)
self.assertContains(r, "Enter a valid URL")
def test_it_requires_rw_access(self):
self.bobs_membership.role = "r"
self.bobs_membership.save()
self.client.login(username="bob@example.org", password="password")
r = self.client.get(self.url)
self.assertEqual(r.status_code, 403)
@override_settings(SLACK_ENABLED=False)
def test_it_handles_disabled_integration(self):
self.client.login(username="alice@example.org", password="password")
r = self.client.get(self.url)
self.assertEqual(r.status_code, 404)
| {
"content_hash": "f821d03364313475294c8875d7d1fb8f",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 76,
"avg_line_length": 36.46808510638298,
"alnum_prop": 0.6534422403733956,
"repo_name": "iphoting/healthchecks",
"id": "73fa1269dfeb422765b999d5d3da79dbcb149bec",
"size": "1714",
"binary": false,
"copies": "1",
"ref": "refs/heads/heroku",
"path": "hc/front/tests/test_add_slack.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "64145"
},
{
"name": "Dockerfile",
"bytes": "939"
},
{
"name": "HTML",
"bytes": "595497"
},
{
"name": "JavaScript",
"bytes": "55883"
},
{
"name": "Less",
"bytes": "14135"
},
{
"name": "Python",
"bytes": "894208"
},
{
"name": "Shell",
"bytes": "4382"
}
],
"symlink_target": ""
} |
"""
Fantastic Add-on
Copyright (C) 2016 Fantastic
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os.path
files = os.listdir(os.path.dirname(__file__))
__all__ = [filename[:-3] for filename in files if not filename.startswith('__') and filename.endswith('.py')]
| {
"content_hash": "198a56075eb5303a08a0a94a9bfef528",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 109,
"avg_line_length": 35.64,
"alnum_prop": 0.7182940516273849,
"repo_name": "TheWardoctor/Wardoctors-repo",
"id": "9a8a264a48e3d3f5c25a17483284b0d8113523f0",
"size": "916",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "script.module.fantastic/lib/resources/lib/sources/gr/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "3208"
},
{
"name": "JavaScript",
"bytes": "115722"
},
{
"name": "Python",
"bytes": "34405207"
},
{
"name": "Shell",
"bytes": "914"
}
],
"symlink_target": ""
} |
import socket
from pysnmp import debug
SYMBOLS = {
'IP_PKTINFO': 8,
'IP_TRANSPARENT': 19,
'SOL_IPV6': 41,
'IPV6_RECVPKTINFO': 49,
'IPV6_PKTINFO': 50,
'IPV6_TRANSPARENT': 75
}
for symbol, value in SYMBOLS.items():
if not hasattr(socket, symbol):
setattr(socket, symbol, value)
debug.logger & debug.FLAG_IO and debug.logger(
'WARNING: the socket module on this platform misses option %s. '
'Assuming its value is %d.' % (symbol, value)
)
| {
"content_hash": "66377b60c51713bbf0c8893c994e8e82",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 76,
"avg_line_length": 24.571428571428573,
"alnum_prop": 0.6046511627906976,
"repo_name": "etingof/pysnmp",
"id": "bba068faed7c653d8ad6208a083380b38be3035e",
"size": "673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pysnmp/carrier/sockfix.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "1453555"
},
{
"name": "Shell",
"bytes": "1312"
}
],
"symlink_target": ""
} |
"""
Created on Sat Mar 25 17:21:17 2017
@author: kissf
"""
from .dist import *
from .ioRule import *
from .queue_m import *
| {
"content_hash": "b4e5c7304e9055809567c0fc01daffee",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 35,
"avg_line_length": 11.636363636363637,
"alnum_prop": 0.65625,
"repo_name": "kissf-lu/jupyter_app",
"id": "2c485dfe84c41b3bcd4818e93bb2cc0586974e8b",
"size": "152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ipython/py36_erzhou_input/dist_time/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1175274"
},
{
"name": "Python",
"bytes": "2436676"
}
],
"symlink_target": ""
} |
import logging
from common_fixtures import * # NOQA
DEFAULT_TIMEOUT = 900
subscription_id = os.environ.get('AZURE_SUBSCRIPTION_ID')
subscription_cert = os.environ.get('AZURE_SUBSCRIPTION_CERT')
# Use azure settings from environment variables , if set
i = 'b39f27a8b8c64d52b05eac6a62ebad85__'
i = i + 'Ubuntu-14_04_1-LTS-amd64-server-20140927-en-us-30GB'
image = os.environ.get('AZURE_IMAGE', i)
location = os.environ.get('AZURE_LOCATION', "West US")
username = os.environ.get('AZURE_USERNAME', "")
password = os.environ.get('AZURE_PASSWORD', "")
size = os.environ.get('AZURE_SIZE', "Small")
if_machine_azure = pytest.mark.skipif(
not os.environ.get('AZURE_SUBSCRIPTION_ID') or
not os.environ.get('AZURE_SUBSCRIPTION_CERT'),
reason='Azure SubscriptionId/SubscriptionCert/AuthToken is not set')
# Get logger
logger = logging.getLogger(__name__)
@pytest.fixture(scope='session', autouse=True)
def register_host(admin_client):
test_url = cattle_url()
start = test_url.index("//") + 2
api_host = test_url[start:]
admin_client.create_setting(name="api.host", value=api_host)
@if_machine_azure
def test_azure_machine_all_params(client):
name = random_str()
create_args = {"name": name,
"azureConfig": {"subscriptionId": subscription_id,
"subscriptionCert": subscription_cert,
"image": image,
"location": location,
"username": username,
"password": password,
"size": size}}
expected_values = {"subscriptionId": subscription_id,
"subscriptionCert": subscription_cert,
"image": image,
"location": location,
"username": username,
"password": password,
"size": size}
azure_machine_life_cycle(client, create_args, expected_values)
def azure_machine_life_cycle(client, configs, expected_values):
machine = client.create_machine(**configs)
machine = client.wait_success(machine, timeout=DEFAULT_TIMEOUT)
assert machine.state == 'active'
# Wait until host shows up with some physicalHostId
machine = wait_for_host(client, machine)
host = machine.hosts()[0]
assert host.state == 'active'
assert machine.accountId == host.accountId
# Remove the machine and make sure that the host
# and the machine get removed
machine = client.wait_success(machine.remove())
assert machine.state == 'removed'
host = client.reload(machine.hosts()[0])
assert host.state == 'removed'
def wait_for_host(client, machine):
wait_for_condition(client,
machine,
lambda x: len(x.hosts()) == 1,
lambda x: 'Number of hosts associated with machine ' +
str(len(x.hosts())),
DEFAULT_TIMEOUT)
host = machine.hosts()[0]
host = wait_for_condition(client,
host,
lambda x: x.state == 'active',
lambda x: 'Host state is ' + x.state
)
return machine
| {
"content_hash": "46470ecfdb2dee39f7e3d76d07e504fd",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 77,
"avg_line_length": 35.946236559139784,
"alnum_prop": 0.575231827699671,
"repo_name": "sonchang/validation-tests",
"id": "2b1533df519c305680a98bb1ad4d95a2027dead3",
"size": "3343",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/validation/cattlevalidationtest/core/test_machine_azure.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "221466"
},
{
"name": "Shell",
"bytes": "3026"
}
],
"symlink_target": ""
} |
import common as c
from config import bitness, msvc_version, build_dir, dependencies_dir, build_type
import os
import platform
c.print('>> Installing leptonica')
install_dir = dependencies_dir
url = 'https://github.com/DanBloomberg/leptonica/releases/download/1.82.0/leptonica-1.82.0.tar.gz'
required_version = '1.82.0'
build_type_flag = 'Debug' if build_type == 'debug' else 'Release'
cache_file = install_dir + '/leptonica.cache'
cache_file_data = required_version + build_type_flag
def check_existing():
if not os.path.exists(cache_file):
return False
with open(cache_file, 'r') as f:
cached = f.read()
if cached != cache_file_data:
return False
if platform.system() == "Windows":
dll = install_dir + '/bin/leptonica-1.82.0.dll'
lib = install_dir + '/lib/leptonica-1.82.0.lib'
if not os.path.exists(dll) or not os.path.exists(lib):
return False
c.symlink(dll, install_dir + '/bin/leptonica.dll')
c.symlink(lib, install_dir + '/lib/leptonica.lib')
elif platform.system() == "Darwin":
lib = install_dir + '/lib/libleptonica.1.82.0.dylib'
if not os.path.exists(lib):
return False
c.symlink(lib, install_dir + '/lib/libleptonica.dylib')
else:
if not os.path.exists(install_dir + '/lib/libleptonica.so'):
return False
includes_path = install_dir + '/include/leptonica'
if len(c.get_folder_files(includes_path)) == 0:
return False
version_file = install_dir + '/lib/cmake/leptonica/LeptonicaConfig-version.cmake'
if not os.path.exists(version_file):
return False
with open(version_file, 'rt') as f:
existing_version = f.readline()[22:28] # set(Leptonica_VERSION 1.82.0)
if existing_version != required_version:
return False
return True
if check_existing():
c.print('>> Using cached')
exit(0)
archive = os.path.basename(url)
c.download(url, archive)
src_dir = os.path.abspath('leptonica_src')
c.extract(archive, '.')
c.symlink(c.get_archive_top_dir(archive), src_dir)
with open('{}/CMakeLists.txt'.format(src_dir), 'r+') as f:
data = f.read()
data = data.replace('pkg_check_modules(WEBP', '#pkg_check_modules(WEBP')
data = data.replace('if(NOT WEBP', 'if(FALSE')
f.seek(0, os.SEEK_SET)
f.write(data)
c.ensure_got_path(install_dir)
c.recreate_dir(build_dir)
os.chdir(build_dir)
cmake_args = '"{}" -DCMAKE_INSTALL_PREFIX="{}" -DBUILD_SHARED_LIBS=ON \
-DSW_BUILD=OFF'.format(src_dir, install_dir,)
if platform.system() == "Windows":
env_cmd = c.get_msvc_env_cmd(bitness=bitness, msvc_version=msvc_version)
c.apply_cmd_env(env_cmd)
cmake_args += ' ' + c.get_cmake_arch_args(bitness=bitness)
c.set_make_threaded()
c.run('cmake {}'.format(cmake_args))
build_type_flag = 'Debug' if build_type == 'debug' else 'Release'
c.run('cmake --build . --config {}'.format(build_type_flag))
c.run('cmake --build . --target install --config {}'.format(build_type_flag))
with open(cache_file, 'w') as f:
f.write(cache_file_data)
if not check_existing(): # create links
c.print('>> Build failed')
exit(1)
| {
"content_hash": "a5f2de52671e3f3cdd5513b29fd8480e",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 98,
"avg_line_length": 31.89,
"alnum_prop": 0.6462841015992474,
"repo_name": "OneMoreGres/ScreenTranslator",
"id": "6361abfed475cd9b801391d1f10fba8176d815a3",
"size": "3189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "share/ci/get_leptonica.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "228309"
},
{
"name": "Dockerfile",
"bytes": "267"
},
{
"name": "JavaScript",
"bytes": "10715"
},
{
"name": "Python",
"bytes": "38259"
},
{
"name": "QMake",
"bytes": "3871"
},
{
"name": "Shell",
"bytes": "1136"
}
],
"symlink_target": ""
} |
from binding import *
from .namespace import llvm
from .Type import Type
GenericValue = llvm.Class()
@GenericValue
class GenericValue:
delete = Destructor()
def _factory(name, *argtys):
return CustomStaticMethod('GenericValue_' + name,
ptr(GenericValue), *argtys)
CreateFloat = _factory('CreateFloat', cast(float, Float))
CreateDouble = _factory('CreateDouble', cast(float, Float))
CreateInt = _factory('CreateInt', ptr(Type),
cast(int, UnsignedLongLong), cast(bool, Bool))
CreatePointer = _factory('CreatePointer', cast(int, VoidPtr))
def _accessor(name, *argtys):
return CustomMethod('GenericValue_' + name, *argtys)
valueIntWidth = _accessor('ValueIntWidth', cast(Unsigned, int))
toSignedInt = _accessor('ToSignedInt', cast(LongLong, int))
toUnsignedInt = _accessor('ToUnsignedInt', cast(UnsignedLongLong, int))
toFloat = _accessor('ToFloat', cast(Double, float), ptr(Type))
toPointer = _accessor('ToPointer', cast(VoidPtr, int))
| {
"content_hash": "d1901acb35b68d40808b053f32fdf719",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 75,
"avg_line_length": 31.470588235294116,
"alnum_prop": 0.6570093457943925,
"repo_name": "llvmpy/llvmpy",
"id": "4dc50d840f5321b5ce4905af4c7108cbf55ea737",
"size": "1070",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "llvmpy/src/GenericValue.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "10456"
},
{
"name": "C++",
"bytes": "58044"
},
{
"name": "CSS",
"bytes": "12590"
},
{
"name": "HTML",
"bytes": "851926"
},
{
"name": "JavaScript",
"bytes": "4102"
},
{
"name": "LLVM",
"bytes": "35445"
},
{
"name": "Makefile",
"bytes": "1862"
},
{
"name": "Python",
"bytes": "720443"
},
{
"name": "Shell",
"bytes": "335"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.