text stringlengths 4 1.02M | meta dict |
|---|---|
from __future__ import annotations
import uuid
import typing
import itertools
import collections
from datetime import datetime
import naming
try:
from pxr import Sdf
_USD_SUFFIXES = tuple(ext for ext in Sdf.FileFormat.FindAllFileFormatExtensions() if ext.startswith('usd'))
except ImportError: # Don't fail if Sdf is not importable to facilitate portability
_USD_SUFFIXES = ("usd", "usda", "usdc", "usdz", "usdt")
from grill.tokens import ids
def _table_from_id(token_ids):
headers = [
'Token',
'Pattern',
'Default',
'Description',
]
table_sep = tuple([''] * len(headers))
sorter = lambda value: (
# cleanup backslashes formatting
value.pattern.replace('\\', '\\\\'),
value.default,
# replace new lines with empty strings to avoid malformed tables.
value.description.replace('\n', ' '),
)
rows = [table_sep, headers, table_sep]
rows.extend([token.name, *sorter(token.value)] for token in token_ids)
rows.append(table_sep)
max_sizes = [(max(len(i) for i in r)) for r in zip(*rows)]
format_rows = []
for r in rows:
filler = '=<' if r == table_sep else ''
format_rows.append(' '.join(
f"{{:{f'{filler}'}{f'{size}'}}}".format(i)
for size, i in zip(max_sizes, r))
)
return '\n'.join(format_rows)
class DefaultName(naming.Name):
""" Inherited by: :class:`grill.names.CGAsset`
Base class for any Name object that wishes to provide `default` functionality via
the `get_default` method.
Subclass implementations can override the `_defaults` member to return a mapping
appropriate to that class.
"""
_defaults = {}
@classmethod
def get_default(cls, **kwargs) -> DefaultName:
"""Get a new Name object with default values and overrides from **kwargs."""
name = cls()
defaults = dict(name._defaults, **kwargs)
name.name = name.get(**defaults)
return name
class DefaultFile(DefaultName, naming.File):
""" Inherited by: :class:`grill.names.DateTimeFile`
Similar to :class:`grill.names.DefaultName`, provides File Name objects default
creation via the `get_default` method.
Adds an extra `DEFAULT_SUFFIX='ext'` member that will be used when creating objects.
"""
DEFAULT_SUFFIX = 'ext'
@property
def _defaults(self):
result = super()._defaults
result['suffix'] = type(self).DEFAULT_SUFFIX
return result
class DateTimeFile(DefaultFile):
"""Time based file names respecting iso standard.
============= ================
**Config:**
------------------------------
*year* Between :py:data:`datetime.MINYEAR` and :py:data:`datetime.MAXYEAR` inclusive.
*month* Between 1 and 12 inclusive.
*day* Between 1 and the number of days in the given month of the given year.
*hour* In ``range(24)``.
*minute* In ``range(60)``.
*second* In ``range(60)``.
*microsecond* In ``range(1000000)``.
============= ================
====== ============
**Composed Fields:**
--------------------
*date* `year` `month` `day`
*time* `hour` `minute` `second` `microsecond`
====== ============
.. note::
When getting a new default name, current ISO time at the moment of execution is used.
Example:
>>> tf = DateTimeFile.get_default(suffix='txt')
>>> tf.day
'28'
>>> tf.date
'2019-10-28'
>>> tf.year = 1999
>>> tf
DateTimeFile("1999-10-28 22-29-31-926548.txt")
>>> tf.month = 14 # ISO format validation
Traceback (most recent call last):
...
ValueError: month must be in 1..12
>>> tf.datetime
datetime.datetime(1999, 10, 28, 22, 29, 31, 926548)
"""
config = dict.fromkeys(
('month', 'day', 'hour', 'minute', 'second'), r'\d{1,2}'
)
config.update(year=r'\d{1,4}', microsecond=r'\d{1,6}')
join = dict(
date=('year', 'month', 'day'),
time=('hour', 'minute', 'second', 'microsecond'),
)
join_sep = '-'
@property
def _defaults(self):
result = super()._defaults
time_field = {'year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'}
now = datetime.now()
result.update({f: getattr(now, f) for f in time_field})
return result
def get_pattern_list(self) -> typing.List[str]:
"""Fields / properties names (sorted) to be used when building names.
Defaults to [`date`, `time`] + keys of this name's config
"""
return ["date", "time"] + super().get_pattern_list()
@property
def name(self) -> str:
return super().name
@name.setter
def name(self, name: str):
prev_name = self._name
super(DateTimeFile, self.__class__).name.fset(self, name)
if name:
try: # validate via datetime conversion
self.datetime
except ValueError:
if prev_name: # if we had a previous valid name, revert to it
self.name = prev_name
raise
@property
def datetime(self) -> datetime:
""" Return a :py:class:`datetime.datetime` object using this name values.
>>> tf = DateTimeFile("1999-10-28 22-29-31-926548.txt")
>>> tf.datetime
datetime.datetime(1999, 10, 28, 22, 29, 31, 926548)
"""
if not self.name:
raise AttributeError("Can not retrieve datetime from an empty name")
date = f"{int(self.year):04d}-{int(self.month):02d}-{int(self.day):02d}"
time = (f"{int(self.hour):02d}:{int(self.minute):02d}:{int(self.second):02d}."
f"{int(self.microsecond):06d}")
return datetime.fromisoformat(f'{date}T{time}')
class CGAsset(DefaultName):
"""Inherited by: :class:`grill.names.CGAssetFile`
Elemental resources that, when composed, generate the entities that bring an idea to a tangible product
through their life cycles (e.g. a character, a film, a videogame).
"""
config = {token.name: token.value.pattern for token in ids.CGAsset}
__doc__ += '\n' + _table_from_id(ids.CGAsset) + '\n'
def __init__(self, *args, sep='-', **kwargs):
super().__init__(*args, sep=sep, **kwargs)
@property
def _defaults(self):
result = super()._defaults
result.update({token.name: token.value.default for token in ids.CGAsset})
return result
class CGAssetFile(CGAsset, DefaultFile, naming.PipeFile):
"""Inherited by: :class:`grill.names.UsdAsset`
Versioned files in the pipeline for a CGAsset.
Example:
>>> name = CGAssetFile.get_default(version=7)
>>> name.suffix
'ext'
>>> name.suffix = 'abc'
>>> name.path
WindowsPath('demo/3d/abc/entity/rnd/lead/atom/main/all/whole/7/demo-3d-abc-entity-rnd-lead-atom-main-all-whole.7.abc')
"""
@property
def _defaults(self):
result = super()._defaults
result.update(version=1)
return result
def get_path_pattern_list(self) -> typing.List[str]:
pattern = super().get_pattern_list()
pattern.append('version')
return pattern
class UsdAsset(CGAssetFile):
"""Specialized :class:`grill.names.CGAssetFile` name object for USD asset resources.
.. admonition:: Inheritance Diagram for UsdAsset
:class: dropdown, note
.. inheritance-diagram:: grill.names.UsdAsset
This is the currency for USD asset identifiers in the pipeline.
Examples:
>>> asset_id = UsdAsset.get_default()
>>> asset_id
UsdAsset("demo-3d-abc-entity-rnd-main-atom-lead-base-whole.1.usda")
>>> asset_id.suffix = 'usdc'
>>> asset_id.version = 42
>>> asset_id
UsdAsset("demo-3d-abc-entity-rnd-main-atom-lead-base-whole.42.usdc")
>>> asset_id.suffix = 'abc'
Traceback (most recent call last):
...
ValueError: Can't set invalid name 'demo-3d-abc-entity-rnd-main-atom-lead-base-whole.42.abc' on UsdAsset("demo-3d-abc-entity-rnd-main-atom-lead-base-whole.42.usdc"). Valid convention is: '{code}-{media}-{kingdom}-{cluster}-{area}-{stream}-{item}-{step}-{variant}-{part}.{pipe}.{suffix}' with pattern: '^(?P<code>\w+)\-(?P<media>\w+)\-(?P<kingdom>\w+)\-(?P<cluster>\w+)\-(?P<area>\w+)\-(?P<stream>\w+)\-(?P<item>\w+)\-(?P<step>\w+)\-(?P<variant>\w+)\-(?P<part>\w+)(?P<pipe>(\.(?P<output>\w+))?\.(?P<version>\d+)(\.(?P<index>\d+))?)(\.(?P<suffix>sdf|usd|usda|usdc|usdz))$'
.. seealso::
:class:`grill.names.CGAsset` for a description of available fields, :class:`naming.Name` for an overview of the core API.
"""
DEFAULT_SUFFIX = 'usd'
file_config = naming.NameConfig(
# NOTE: limit to only extensions starting with USD (some environments register other extensions untested by the grill)
{'suffix': "|".join(_USD_SUFFIXES)}
)
@classmethod
def get_anonymous(cls, **values) -> UsdAsset:
"""Get an anonymous :class:`UsdAsset` name with optional field overrides.
Useful for situations where a temporary but valid identifier is needed.
:param values: Variable keyword arguments with the keys referring to the name's
fields which will use the given values.
Example:
>>> UsdAsset.get_anonymous(stream='test')
UsdAsset("4209091047-34604-19646-169-123-test-4209091047-34604-19646-169.1.usda")
"""
keys = cls.get_default().get_pattern_list()
anon = itertools.cycle(uuid.uuid4().fields)
return cls.get_default(**collections.ChainMap(values, dict(zip(keys, anon))))
class LifeTR(naming.Name):
"""Taxonomic Rank used for biological classification.
"""
config = {token.name: token.value.pattern for token in ids.LifeTR}
__doc__ += '\n' + _table_from_id(ids.LifeTR) + '\n'
def __init__(self, *args, sep=':', **kwargs):
super().__init__(*args, sep=sep, **kwargs)
| {
"content_hash": "4d0617ba3827a96f205a022364146653",
"timestamp": "",
"source": "github",
"line_count": 292,
"max_line_length": 578,
"avg_line_length": 34.6986301369863,
"alnum_prop": 0.5906040268456376,
"repo_name": "thegrill/grill-names",
"id": "2bf18c9b816228740181e410c4d155c08ca0d085",
"size": "10132",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "grill/names/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "15046"
}
],
"symlink_target": ""
} |
import argparse
import compileall
import distutils.version
import json
import os
import pip
import plistlib
import subprocess
import sys
import tempfile
import urllib
import virtualenv
import webbrowser
def export_bundle(bundle_path):
toplevel_path = subprocess.check_output(
['git', 'rev-parse', '--show-toplevel']).rstrip()
git = ['git', '-C', toplevel_path]
dest_path = tempfile.mkdtemp()
ls_files = subprocess.Popen(git +
['ls-files', '-cz', bundle_path], stdout=subprocess.PIPE)
checkout_index = subprocess.Popen(git +
['checkout-index', '--prefix=%s/'% dest_path, '--stdin', '-z'],
stdin=ls_files.stdout)
ls_files.stdout.close()
checkout_index.communicate()
return os.path.abspath(os.path.join(dest_path, bundle_path))
def expand_url_template(url_template, *args, **query):
url = url_template
if args:
url = url % tuple(map(urllib.quote, args))
if query:
url += '?' + urllib.urlencode(query)
return url
def archive_dir_name(bundle_path, version):
dir_path, bundle_filename = os.path.split(bundle_path)
bundle_name, bundle_ext = os.path.splitext(bundle_filename)
# GitHub will replace spaces with periods; dashes look better
bundle_name = bundle_name.replace(' ', '-')
return dir_path, '%s-%s%s' % (bundle_name, version, bundle_ext)
def tag_for_version(version):
return 'v' + version
def create_virtualenv(bundle_path, requirements_path):
scripts_path = os.path.join(bundle_path, 'Contents', 'Scripts')
virtualenv.create_environment(scripts_path, site_packages=True)
virtualenv.make_environment_relocatable(scripts_path)
pip.main(['install', '--prefix', scripts_path, '-r', requirements_path])
compileall.compile_dir(scripts_path, maxlevels=0)
def update_bundle_info(bundle_path, version, repo):
info_plist_path = os.path.join(bundle_path, 'Contents', 'Info.plist')
info_plist = plistlib.readPlist(info_plist_path)
info_plist['CFBundleVersion'] = version
info_plist['LBDescription']['LBDownloadURL'] = expand_url_template(
'https://github.com/%s/releases/download/%s/%s', repo,
tag_for_version(version), archive_dir_name(bundle_path, version)[1])
plistlib.writePlist(info_plist, info_plist_path)
def sign_bundle(bundle_path):
subprocess.check_call(['/usr/bin/codesign', '-fs',
'Developer ID Application: Nicholas Riley',
bundle_path])
def archive_bundle(bundle_path, version):
archive_path = os.path.join(*archive_dir_name(bundle_path, version))
subprocess.check_call(['/usr/bin/ditto', '--keepParent', '-ck',
bundle_path, archive_path])
return archive_path
def upload_release(repo, version, archive_path, github_access_token):
strict_version = distutils.version.StrictVersion(version)
releases_url = expand_url_template(
'https://api.github.com/repos/%s/releases', repo,
access_token=github_access_token)
release_name = tag_for_version(version)
release_json = dict(tag_name=release_name, target_commitish='master',
name=release_name, body='', draft=True,
prerelease=bool(strict_version.prerelease))
releases_api = subprocess.Popen(
['/usr/bin/curl', '--data', '@-', releases_url],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
release_json_data, _ = releases_api.communicate(json.dumps(release_json))
release_json = json.loads(release_json_data)
html_url = release_json['html_url']
upload_url = release_json['upload_url'].split('{', 1)[0]
upload_url = expand_url_template(upload_url,
name=os.path.basename(archive_path), access_token=github_access_token)
subprocess.check_call(
['/usr/bin/curl', '-H', 'Content-Type: application/zip',
'--data-binary', '@' + archive_path, upload_url])
return html_url
def release(version, github_access_token):
repo = 'nriley/LBHue'
project_path = os.path.join(os.path.dirname(__file__), '..')
action_path = os.path.join(project_path, 'Hue.lbaction')
# exported version is equivalent to committed version
export_path = export_bundle(action_path)
# except for version number and download URL
update_bundle_info(export_path, version, repo)
# update the same info in the working copy so it can be committed
update_bundle_info(action_path, version, repo)
requirements_path = os.path.join(project_path, 'requirements.txt')
create_virtualenv(export_path, requirements_path)
sign_bundle(export_path)
archive_path = archive_bundle(export_path, version)
html_url = upload_release(repo, version, archive_path, github_access_token)
webbrowser.open(html_url)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Release to GitHub.')
parser.add_argument('version')
parser.add_argument('github_access_token')
args = parser.parse_args()
release(args.version, args.github_access_token)
| {
"content_hash": "f3b703d220e15200514666e352c628b8",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 79,
"avg_line_length": 37.776119402985074,
"alnum_prop": 0.6712761754247333,
"repo_name": "nriley/LBHue",
"id": "c36f96e28f4dac269dbb1dacb4b6e555281ab9d2",
"size": "5081",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Scripts/upload.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "21552"
}
],
"symlink_target": ""
} |
import time
from oslo.config import cfg
from nova import exception
from nova.i18n import _
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
ironic = None
class IronicClientWrapper(object):
"""Ironic client wrapper class that encapsulates retry logic."""
def __init__(self):
"""Initialise the IronicClientWrapper for use.
Initialise IronicClientWrapper by loading ironicclient
dynamically so that ironicclient is not a dependency for
Nova.
"""
global ironic
if ironic is None:
ironic = importutils.import_module('ironicclient')
# NOTE(deva): work around a lack of symbols in the current version.
if not hasattr(ironic, 'exc'):
ironic.exc = importutils.import_module('ironicclient.exc')
if not hasattr(ironic, 'client'):
ironic.client = importutils.import_module(
'ironicclient.client')
def _get_client(self):
# TODO(deva): save and reuse existing client & auth token
# until it expires or is no longer valid
auth_token = CONF.ironic.admin_auth_token
if auth_token is None:
kwargs = {'os_username': CONF.ironic.admin_username,
'os_password': CONF.ironic.admin_password,
'os_auth_url': CONF.ironic.admin_url,
'os_tenant_name': CONF.ironic.admin_tenant_name,
'os_service_type': 'baremetal',
'os_endpoint_type': 'public',
'ironic_url': CONF.ironic.api_endpoint}
else:
kwargs = {'os_auth_token': auth_token,
'ironic_url': CONF.ironic.api_endpoint}
try:
cli = ironic.client.get_client(CONF.ironic.api_version, **kwargs)
except ironic.exc.Unauthorized:
msg = _("Unable to authenticate Ironic client.")
LOG.error(msg)
raise exception.NovaException(msg)
return cli
def _multi_getattr(self, obj, attr):
"""Support nested attribute path for getattr().
:param obj: Root object.
:param attr: Path of final attribute to get. E.g., "a.b.c.d"
:returns: The value of the final named attribute.
:raises: AttributeError will be raised if the path is invalid.
"""
for attribute in attr.split("."):
obj = getattr(obj, attribute)
return obj
def call(self, method, *args, **kwargs):
"""Call an Ironic client method and retry on errors.
:param method: Name of the client method to call as a string.
:param args: Client method arguments.
:param kwargs: Client method keyword arguments.
:raises: NovaException if all retries failed.
"""
retry_excs = (ironic.exc.ServiceUnavailable,
ironic.exc.ConnectionRefused,
ironic.exc.Conflict)
num_attempts = CONF.ironic.api_max_retries
for attempt in range(1, num_attempts + 1):
client = self._get_client()
try:
return self._multi_getattr(client, method)(*args, **kwargs)
except retry_excs:
msg = (_("Error contacting Ironic server for '%(method)s'. "
"Attempt %(attempt)d of %(total)d")
% {'method': method,
'attempt': attempt,
'total': num_attempts})
if attempt == num_attempts:
LOG.error(msg)
raise exception.NovaException(msg)
LOG.warning(msg)
time.sleep(CONF.ironic.api_retry_interval)
| {
"content_hash": "66bab84751fcedb3336c15d46b085784",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 79,
"avg_line_length": 37.640776699029125,
"alnum_prop": 0.5640959504771731,
"repo_name": "berrange/nova",
"id": "51dd8723c63466083bd5df13f1c105bcd4868a06",
"size": "4551",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/virt/ironic/client_wrapper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15269775"
},
{
"name": "Shell",
"bytes": "18352"
}
],
"symlink_target": ""
} |
import sys
lines = open(sys.argv[1], 'r')
for line in lines:
line = line.replace('\n', '').replace('\r', '')
if len(line) > 0:
numbers, times = line.split(';')
times = int(times)
numbers = numbers.split(',')
parts = [numbers[i:(i + times)][::-1] if len(numbers) - i >= times else numbers[i:(i + times)] for i in range(0, len(numbers), times)]
print(','.join([','.join(x) for x in parts]))
lines.close()
| {
"content_hash": "5ef14c6a2809f742f37979e14beb435b",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 142,
"avg_line_length": 35.84615384615385,
"alnum_prop": 0.5278969957081545,
"repo_name": "AstrorEnales/CodeEval",
"id": "0ba1ee583116f4179cd0ed2fb3006b2f45652c9f",
"size": "466",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Moderate/Reverse Groups/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "413"
},
{
"name": "Python",
"bytes": "63229"
}
],
"symlink_target": ""
} |
"""Gast compatibility library. Supports 0.2.2 and 0.3.2."""
# TODO(mdan): Remove this file once it's safe to break compatibility.
import functools
import gast
GAST2 = hasattr(gast, 'Str')
GAST3 = not GAST2
def _is_constant_gast_2(node):
return isinstance(node, (gast.Num, gast.Str, gast.Bytes, gast.Ellipsis,
gast.NameConstant))
def _is_constant_gast_3(node):
return isinstance(node, gast.Constant)
def is_literal(node):
"""Tests whether node represents a Python literal."""
# Normal literals, True/False/None/Etc. in Python3
if is_constant(node):
return True
# True/False/None/Etc. in Python2
if isinstance(node, gast.Name) and node.id in ['True', 'False', 'None']:
return True
return False
def _is_ellipsis_gast_2(node):
return isinstance(node, gast.Ellipsis)
def _is_ellipsis_gast_3(node):
return isinstance(node, gast.Constant) and node.value == Ellipsis
if GAST2:
is_constant = _is_constant_gast_2
is_ellipsis = _is_ellipsis_gast_2
Module = gast.Module
Name = gast.Name
Str = gast.Str
elif GAST3:
is_constant = _is_constant_gast_3
is_ellipsis = _is_ellipsis_gast_3
Module = functools.partial(gast.Module, type_ignores=None) # pylint:disable=invalid-name
Name = functools.partial(gast.Name, type_comment=None) # pylint:disable=invalid-name
Str = functools.partial(gast.Constant, kind=None) # pylint:disable=invalid-name
else:
assert False
| {
"content_hash": "207c5bd37c28025dd94195dc37b9ae4d",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 91,
"avg_line_length": 24.083333333333332,
"alnum_prop": 0.6996539792387543,
"repo_name": "tensorflow/probability",
"id": "27078b0a4afa04a3a589f934725a651f775a25f5",
"size": "2125",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tensorflow_probability/python/experimental/auto_batching/gast_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "55552121"
},
{
"name": "Python",
"bytes": "17339674"
},
{
"name": "Shell",
"bytes": "24852"
},
{
"name": "Starlark",
"bytes": "663851"
}
],
"symlink_target": ""
} |
from unittest import TestCase
from validator import parsers
from tests.utils import read
class TestPaths(TestCase):
def test_xml_parsing(self):
content = read('tests/fixtures/bugs/parser_bug.xml')
parser = parsers.XmlParser()
parser.parse(content)
| {
"content_hash": "76218a31cb3d261ddaeede2140b9250e",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 60,
"avg_line_length": 23.333333333333332,
"alnum_prop": 0.7107142857142857,
"repo_name": "KeepSafe/content-validator",
"id": "0e2104dc938f8de0d777f2f8e337db89ff5a9281",
"size": "280",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_parser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1333"
},
{
"name": "Python",
"bytes": "51389"
}
],
"symlink_target": ""
} |
from django.forms import ModelForm
from django.core.exceptions import ValidationError
from django import forms
from apps.validez_nacional.models import ValidezNacional
from apps.titulos.models import Cohorte
ANIOS_COHORTE_CHOICES = [('', '-------')] + [(i, i) for i in range(Cohorte.PRIMER_ANIO, Cohorte.ULTIMO_ANIO)]
class ValidezNacionalForm(forms.ModelForm):
cue = forms.CharField(max_length=9, label='CUE', required=True)
carrera = forms.CharField(max_length=255, label='Carrera', required=True)
titulo_nacional = forms.CharField(max_length=255, label='Título', required=True)
primera_cohorte = forms.ChoiceField(label='Primera Cohorte Autorizada', choices=ANIOS_COHORTE_CHOICES, required=True)
ultima_cohorte = forms.ChoiceField(label='Última Cohorte Autorizada', choices=ANIOS_COHORTE_CHOICES, required=True)
normativa_jurisdiccional = forms.CharField(max_length=255, label='Normativa Jurisdiccional', required=True)
dictamen_cofev = forms.CharField(max_length=255, label='Dictamen CoFEv', required=False)
normativas_nacionales = forms.CharField(max_length=255, label='Normativa Nacional', required=True)
nro_infd = forms.CharField(max_length=255, label='Nro. INFD.', required=True)
class Meta:
model = ValidezNacional
fields = ('cue', 'carrera', 'titulo_nacional', 'primera_cohorte', 'ultima_cohorte', 'normativa_jurisdiccional', 'dictamen_cofev', 'normativas_nacionales', 'nro_infd')
def __init__(self, *args, **kwargs):
super(ValidezNacionalForm, self).__init__(*args, **kwargs)
| {
"content_hash": "656f4d61075b5a3eb3f77458d469821a",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 173,
"avg_line_length": 60.11538461538461,
"alnum_prop": 0.7364043506078055,
"repo_name": "MERegistro/meregistro",
"id": "12d16f4d457e83603b769f7b40b18327bb324159",
"size": "1589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "meregistro/apps/validez_nacional/forms/ValidezNacionalForm.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "79500"
},
{
"name": "HTML",
"bytes": "782188"
},
{
"name": "JavaScript",
"bytes": "106755"
},
{
"name": "PLpgSQL",
"bytes": "515442"
},
{
"name": "Python",
"bytes": "7190737"
},
{
"name": "Shell",
"bytes": "804"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "estudiala.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| {
"content_hash": "6dfb9a7d086595e37daeac98a26eadb1",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 73,
"avg_line_length": 25.555555555555557,
"alnum_prop": 0.7130434782608696,
"repo_name": "diego-d5000/Estudiala",
"id": "84e23b170285e721f668e8a636a3a6c2da4025d9",
"size": "252",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "estudiala/estudiala/manage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "490801"
},
{
"name": "Groff",
"bytes": "2189"
},
{
"name": "HTML",
"bytes": "2444503"
},
{
"name": "JavaScript",
"bytes": "1941799"
},
{
"name": "Makefile",
"bytes": "302"
},
{
"name": "Python",
"bytes": "34083"
},
{
"name": "Shell",
"bytes": "10111"
}
],
"symlink_target": ""
} |
import murano.openstack.common.exception as e
class PackageClassLoadError(e.Error):
def __init__(self, class_name, message=None):
msg = 'Unable to load class "{0}" from package'.format(class_name)
if message:
msg += ": " + message
super(PackageClassLoadError, self).__init__(msg)
class PackageUILoadError(e.Error):
def __init__(self, message=None):
msg = 'Unable to load ui definition from package'
if message:
msg += ": " + message
super(PackageUILoadError, self).__init__(msg)
class PackageLoadError(e.Error):
pass
class PackageFormatError(PackageLoadError):
def __init__(self, message=None):
msg = 'Incorrect package format'
if message:
msg += ': ' + message
super(PackageFormatError, self).__init__(msg)
| {
"content_hash": "c2280096f4fd9a374e578968ad1795c8",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 74,
"avg_line_length": 28.96551724137931,
"alnum_prop": 0.6154761904761905,
"repo_name": "ativelkov/murano-api",
"id": "2ac5020962014172c3f5cb054fd5a8e4fefaa73f",
"size": "1423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "murano/packages/exceptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "8634"
},
{
"name": "Python",
"bytes": "935905"
},
{
"name": "Shell",
"bytes": "21119"
}
],
"symlink_target": ""
} |
import os
import ConfigParser
import inspect
import re
class ConfigUtils(object):
def __init__(self):
path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# read config
config = ConfigParser.SafeConfigParser()
config.read(os.path.join(path, "config"))
self.config = config
def get(self, group, key):
return self.config.get(group, key).strip()
def read_list(self, group, key):
config_value = self.config.get(group, key).strip()
return re.split('\s*,\s*', config_value) | {
"content_hash": "e603d97ba4b7b8d72008903b13c4a8c2",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 88,
"avg_line_length": 28.75,
"alnum_prop": 0.6417391304347826,
"repo_name": "mjasion/retweeter",
"id": "a76a800db7d0f9ff9d58c58a01fb37bf0bdb5e94",
"size": "575",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ConfigUtils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1841"
}
],
"symlink_target": ""
} |
"""
This module implements a class which returns the appropriate Response class
based on different criteria.
"""
from mimetypes import MimeTypes
from pkgutil import get_data
from cStringIO import StringIO
from ants.http import Response
from ants.utils.misc import load_object
from ants.utils.python import isbinarytext
class ResponseTypes(object):
CLASSES = {
'text/html': 'ants.http.HtmlResponse',
'application/atom+xml': 'ants.http.XmlResponse',
'application/rdf+xml': 'ants.http.XmlResponse',
'application/rss+xml': 'ants.http.XmlResponse',
'application/xhtml+xml': 'ants.http.HtmlResponse',
'application/vnd.wap.xhtml+xml': 'ants.http.HtmlResponse',
'application/xml': 'ants.http.XmlResponse',
'application/json': 'ants.http.TextResponse',
'application/javascript': 'ants.http.TextResponse',
'application/x-javascript': 'ants.http.TextResponse',
'text/xml': 'ants.http.XmlResponse',
'text/*': 'ants.http.TextResponse',
}
def __init__(self):
self.classes = {}
self.mimetypes = MimeTypes()
mimedata = get_data('ants', 'mime.types')
self.mimetypes.readfp(StringIO(mimedata))
for mimetype, cls in self.CLASSES.iteritems():
self.classes[mimetype] = load_object(cls)
def from_mimetype(self, mimetype):
"""Return the most appropriate Response class for the given mimetype"""
if mimetype is None:
return Response
elif mimetype in self.classes:
return self.classes[mimetype]
else:
basetype = "%s/*" % mimetype.split('/')[0]
return self.classes.get(basetype, Response)
def from_content_type(self, content_type, content_encoding=None):
"""Return the most appropriate Response class from an HTTP Content-Type
header """
if content_encoding:
return Response
mimetype = content_type.split(';')[0].strip().lower()
return self.from_mimetype(mimetype)
def from_content_disposition(self, content_disposition):
try:
filename = content_disposition.split(';')[1].split('=')[1]
filename = filename.strip('"\'')
return self.from_filename(filename)
except IndexError:
return Response
def from_headers(self, headers):
"""Return the most appropriate Response class by looking at the HTTP
headers"""
cls = Response
if 'Content-Type' in headers:
cls = self.from_content_type(headers['Content-type'], \
headers.get('Content-Encoding'))
if cls is Response and 'Content-Disposition' in headers:
cls = self.from_content_disposition(headers['Content-Disposition'])
return cls
def from_filename(self, filename):
"""Return the most appropriate Response class from a file name"""
mimetype, encoding = self.mimetypes.guess_type(filename)
if mimetype and not encoding:
return self.from_mimetype(mimetype)
else:
return Response
def from_body(self, body):
"""Try to guess the appropriate response based on the body content.
This method is a bit magic and could be improved in the future, but
it's not meant to be used except for special cases where response types
cannot be guess using more straightforward methods."""
chunk = body[:5000]
if isbinarytext(chunk):
return self.from_mimetype('application/octet-stream')
elif "<html>" in chunk.lower():
return self.from_mimetype('text/html')
elif "<?xml" in chunk.lower():
return self.from_mimetype('text/xml')
else:
return self.from_mimetype('text')
def from_args(self, headers=None, url=None, filename=None, body=None):
"""Guess the most appropriate Response class based on the given arguments"""
cls = Response
if headers is not None:
cls = self.from_headers(headers)
if cls is Response and url is not None:
cls = self.from_filename(url)
if cls is Response and filename is not None:
cls = self.from_filename(filename)
if cls is Response and body is not None:
cls = self.from_body(body)
return cls
responsetypes = ResponseTypes()
| {
"content_hash": "eb3403deeb7bd3ed58bc2e869bfb2290",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 84,
"avg_line_length": 39.008849557522126,
"alnum_prop": 0.631578947368421,
"repo_name": "wcong/ants",
"id": "e0609daf5981d2282763b61879844fb0f2f43765",
"size": "4408",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ants/utils/responsetypes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "721345"
},
{
"name": "Shell",
"bytes": "1644"
}
],
"symlink_target": ""
} |
"""
libcloud driver for the Host Virtual Inc. (VR) API
Home page http://www.vr.org/
"""
import time
try:
import simplejson as json
except ImportError:
import json
from libcloud.common.hostvirtual import HostVirtualResponse
from libcloud.common.hostvirtual import HostVirtualConnection
from libcloud.common.hostvirtual import HostVirtualException
from libcloud.compute.providers import Provider
from libcloud.compute.types import NodeState
from libcloud.compute.base import Node, NodeDriver
from libcloud.compute.base import NodeImage, NodeSize, NodeLocation
from libcloud.compute.base import NodeAuthSSHKey, NodeAuthPassword
API_ROOT = '/vapi'
NODE_STATE_MAP = {
'BUILDING': NodeState.PENDING,
'PENDING': NodeState.PENDING,
'RUNNING': NodeState.RUNNING, # server is powered up
'STOPPING': NodeState.REBOOTING,
'REBOOTING': NodeState.REBOOTING,
'STARTING': NodeState.REBOOTING,
'TERMINATED': NodeState.TERMINATED # server is powered down
}
DEFAULT_NODE_LOCATION_ID = 4
class HostVirtualComputeResponse(HostVirtualResponse):
pass
class HostVirtualComputeConnection(HostVirtualConnection):
responseCls = HostVirtualComputeResponse
class HostVirtualNodeDriver(NodeDriver):
type = Provider.HOSTVIRTUAL
name = 'HostVirtual'
website = 'http://www.vr.org'
connectionCls = HostVirtualComputeConnection
def __init__(self, key):
self.location = None
NodeDriver.__init__(self, key)
def _to_node(self, data):
state = NODE_STATE_MAP[data['status']]
public_ips = []
private_ips = []
extra = {}
if 'plan_id' in data:
extra['size'] = data['plan_id']
if 'os_id' in data:
extra['image'] = data['os_id']
if 'location_id' in data:
extra['location'] = data['location_id']
if 'ip' in data:
public_ips.append(data['ip'])
node = Node(id=data['mbpkgid'], name=data['fqdn'], state=state,
public_ips=public_ips, private_ips=private_ips,
driver=self.connection.driver, extra=extra)
return node
def list_locations(self):
result = self.connection.request(API_ROOT + '/cloud/locations/').object
locations = []
for dc in result:
locations.append(NodeLocation(
dc["id"],
dc["name"],
dc["name"].split(',')[1].replace(" ", ""), # country
self))
return locations
def list_sizes(self, location=None):
params = {}
if location:
params = {'location': location.id}
result = self.connection.request(
API_ROOT + '/cloud/sizes/',
data=json.dumps(params)).object
sizes = []
for size in result:
n = NodeSize(id=size['plan_id'],
name=size['plan'],
ram=size['ram'],
disk=size['disk'],
bandwidth=size['transfer'],
price=size['price'],
driver=self.connection.driver)
sizes.append(n)
return sizes
def list_images(self):
result = self.connection.request(API_ROOT + '/cloud/images/').object
images = []
for image in result:
i = NodeImage(id=image["id"],
name=image["os"],
driver=self.connection.driver,
extra=image)
del i.extra['id']
del i.extra['os']
images.append(i)
return images
def list_nodes(self):
result = self.connection.request(API_ROOT + '/cloud/servers/').object
nodes = []
for value in result:
node = self._to_node(value)
nodes.append(node)
return nodes
def _wait_for_node(self, node_id, timeout=30, interval=5.0):
"""
@param node_id: ID of the node to wait for.
@type node_id: C{int}
@param timeout: Timeout (in seconds).
@type timeout: C{int}
@param interval: How long to wait (in seconds) between each attempt.
@type interval: C{float}
"""
# poll until we get a node
for i in range(0, timeout, int(interval)):
try:
node = self.ex_get_node(node_id)
return node
except HostVirtualException:
time.sleep(interval)
raise HostVirtualException(412, 'Timedout on getting node details')
def create_node(self, **kwargs):
dc = None
size = kwargs['size']
image = kwargs['image']
params = {'plan': size.name}
dc = DEFAULT_NODE_LOCATION_ID
if 'location' in kwargs:
dc = kwargs['location'].id
# simply order a package first
result = self.connection.request(API_ROOT + '/cloud/buy/',
data=json.dumps(params),
method='POST').object
# create a stub node
stub_node = self._to_node({
'mbpkgid': result['id'],
'status': 'PENDING',
'fqdn': kwargs['name'],
'plan_id': size.id,
'os_id': image.id,
'location_id': dc
})
# provisioning a server using the stub node
self.ex_provision_node(node=stub_node, auth=kwargs['auth'])
node = self._wait_for_node(stub_node.id)
return node
def reboot_node(self, node):
params = {'force': 0, 'mbpkgid': node.id}
result = self.connection.request(
API_ROOT + '/cloud/server/reboot',
data=json.dumps(params),
method='POST').object
return bool(result)
def destroy_node(self, node):
params = {
'mbpkgid': node.id,
#'reason': 'Submitted through Libcloud API'
}
result = self.connection.request(
API_ROOT + '/cloud/cancel', data=json.dumps(params),
method='POST').object
return bool(result)
def ex_get_node(self, node_id):
"""
Get a single node.
@param node_id: id of the node that we need the node object for
@type node_id: C{str}
@rtype: L{Node}
"""
params = {'mbpkgid': node_id}
result = self.connection.request(
API_ROOT + '/cloud/server', params=params).object
node = self._to_node(result)
return node
def ex_stop_node(self, node):
"""
Stop a node.
@param node: Node which should be used
@type node: L{Node}
@rtype: C{bool}
"""
params = {'force': 0, 'mbpkgid': node.id}
result = self.connection.request(
API_ROOT + '/cloud/server/shutdown',
data=json.dumps(params),
method='POST').object
return bool(result)
def ex_start_node(self, node):
"""
Start a node.
@param node: Node which should be used
@type node: L{Node}
@rtype: C{bool}
"""
params = {'mbpkgid': node.id}
result = self.connection.request(
API_ROOT + '/cloud/server/start',
data=json.dumps(params),
method='POST').object
return bool(result)
def ex_provision_node(self, **kwargs):
"""
Provision a server on a VR package and get it booted
@keyword node: node which should be used
@type node: L{Node}
@keyword image: The distribution to deploy on your server (mandatory)
@type image: L{NodeImage}
@keyword auth: an SSH key or root password (mandatory)
@type auth: L{NodeAuthSSHKey} or L{NodeAuthPassword}
@keyword location: which datacenter to create the server in
@type location: L{NodeLocation}
@return: Node representing the newly built server
@rtype: L{Node}
"""
node = kwargs['node']
if 'image' in kwargs:
image = kwargs['image']
else:
image = node.extra['image']
params = {
'mbpkgid': node.id,
'image': image,
'fqdn': node.name,
'location': node.extra['location'],
}
auth = kwargs['auth']
ssh_key = None
password = None
if isinstance(auth, NodeAuthSSHKey):
ssh_key = auth.pubkey
params['ssh_key'] = ssh_key
elif isinstance(auth, NodeAuthPassword):
password = auth.password
params['password'] = password
if not ssh_key and not password:
raise HostVirtualException(500, "Need SSH key or Root password")
result = self.connection.request(API_ROOT + '/cloud/server/build',
data=json.dumps(params),
method='POST').object
return bool(result)
def ex_delete_node(self, node):
"""
Delete a node.
@param node: Node which should be used
@type node: L{Node}
@rtype: C{bool}
"""
params = {'mbpkgid': node.id}
result = self.connection.request(
API_ROOT + '/cloud/server/delete', data=json.dumps(params),
method='POST').object
return bool(result)
| {
"content_hash": "c80b43d79f59360c94f5e3e355631aff",
"timestamp": "",
"source": "github",
"line_count": 319,
"max_line_length": 79,
"avg_line_length": 29.73667711598746,
"alnum_prop": 0.5442757748260595,
"repo_name": "IsCoolEntertainment/debpkg_libcloud",
"id": "e311224d52a5d7571819e43c8c425a1260ff856f",
"size": "10262",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libcloud/compute/drivers/hostvirtual.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2037599"
}
],
"symlink_target": ""
} |
from utils.decorators import as_view
from gap.template import get_template
from StringIO import StringIO
@as_view
def welcome_screen(request, response):
return get_template("homepage.html").render({
'project_name': 'Example project',
})
@as_view
def not_found(request, response, *args, **kwargs):
response.set_status(404)
text = 'The page you are requesting was not found on this server.'
from gap.conf import settings
if settings['DEBUG']:
buffer = StringIO()
dump_routes(buffer)
buffer.seek(0)
return text + '<br>The known rotes are...<br><pre>' + buffer.read() + '</pre>'
else:
return text
def dump_routes(response, routes=None, indent=''):
if routes is None:
from routes import routes
for route in routes:
dump_route(response, route, indent)
def dump_route(response, route, indent=''):
if isinstance(route, (tuple, list)):
response.write(route[0] + ' -> ' + route[1] + '\n')
elif hasattr(route, 'routes'):
response.write(indent + route.prefix + ':' + '\n')
dump_routes(response, route.routes, indent+' ')
elif hasattr(route, 'regex'):
response.write(indent + route.regex.pattern + ' -> ' + repr(route.handler) + '\n')
else:
raise TypeError('Unknown route class %s' % route.__class__.__name__)
| {
"content_hash": "66288260543b64fb609d2be45b08fe65",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 90,
"avg_line_length": 32.404761904761905,
"alnum_prop": 0.6252755326965467,
"repo_name": "czervenka/gap",
"id": "be0e3d76ee38b73ad4d3c6d60e5c8af9d6e04501",
"size": "1361",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gap/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "57935"
},
{
"name": "Shell",
"bytes": "2483"
}
],
"symlink_target": ""
} |
from string import Template
from datetime import date
bitcoinDir = "./";
inFile = bitcoinDir+"/share/qt/Info.plist"
outFile = "Preiscoin-Qt.app/Contents/Info.plist"
version = "unknown";
fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro"
for line in open(fileForGrabbingVersion):
lineArr = line.replace(" ", "").split("=");
if lineArr[0].startswith("VERSION"):
version = lineArr[1].replace("\n", "");
fIn = open(inFile, "r")
fileContent = fIn.read()
s = Template(fileContent)
newFileContent = s.substitute(VERSION=version,YEAR=date.today().year)
fOut = open(outFile, "w");
fOut.write(newFileContent);
print "Info.plist fresh created"
| {
"content_hash": "72ff87667f84c41fc49fa99b737e8792",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 69,
"avg_line_length": 27.291666666666668,
"alnum_prop": 0.7099236641221374,
"repo_name": "Preiscoin/1",
"id": "7eac68d303ed281bc295a6df3895f54462c982b3",
"size": "899",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "share/qt/clean_mac_info_plist.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "98809"
},
{
"name": "C++",
"bytes": "15122611"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "IDL",
"bytes": "15019"
},
{
"name": "Nu",
"bytes": "264"
},
{
"name": "Objective-C++",
"bytes": "5864"
},
{
"name": "Perl",
"bytes": "10948"
},
{
"name": "Python",
"bytes": "37268"
},
{
"name": "Shell",
"bytes": "9702"
},
{
"name": "TypeScript",
"bytes": "5246561"
}
],
"symlink_target": ""
} |
def is_user_game_master(user):
return user.groups.filter(name='game-master').exists()
| {
"content_hash": "29923ba55d3f3d0299a2b5a29821b7c0",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 58,
"avg_line_length": 45,
"alnum_prop": 0.7222222222222222,
"repo_name": "BackwardSpy/leaderboard",
"id": "39523aface7c7329127ad2ba1be6374603313027",
"size": "90",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3035"
},
{
"name": "Python",
"bytes": "16246"
},
{
"name": "Shell",
"bytes": "75"
}
],
"symlink_target": ""
} |
import serial
from serial.tools import list_ports
from ..GenericInstrument import GenericInstrument
from ..IEEE488 import IEEE488
from ..SCPI import SCPI
class IFR_CPM():
def __init__(self, inst):
self.inst = serial.Serial(inst, 9600, timeout=1, bytesize=8, parity='N', stopbits=1)
def query(self, command):
self.inst.write(command)
# ser.write(command.encode('ascii') + b'\r')
return self.inst.readline() # .strip()
def counter(self):
multipliers = {' Hz': 1, 'kHz': 1e3, 'MHz': 1e6, 'GHz': 1e9 }
while self.inst.isOpen():
self.inst.write(b'\r')
if self.inst.read(7).strip() == b'>>': # wait for prompt characters
if self.query(b'DISPLAY FREQ\r') == b'DISPLAY FREQ\r\n': # request frequency data
# The next readline should be frequency
ret = self.inst.readline().decode() # 50.015 623 MHz 0 0 0 0 0
ret = ret[:-12].strip() # 50.015 623 MHz
magnitude, multiplier = float(ret[:-3].replace(' ', '')), ret[-3:] # 50.015623, 'MHz'
# print(f'magnitude {magnitude}, multiplier {multiplier}')
value = int(magnitude * multipliers[multiplier])
# print(f'{value} Hz')
yield value
'''
# import time
from pprint import pprint
import serial
from serial.tools import list_ports
def query(ser, command):
ser.write(command)
# ser.write(command.encode('ascii') + b'\r')
return ser.readline() # .strip()
for each in list_ports.comports():
print('{}\t\t{}'.format(each.device, each.description))
port = input('Serial Port? : ')
# port = 'COM5'
multipliers = {' Hz': 1, 'kHz': 1e3, 'MHz': 1e6, 'GHz': 1e9}
with serial.Serial(port, 9600, timeout=1, bytesize=8, parity='N', stopbits=1) as ser:
while ser.isOpen():
ser.write(b'\r')
if ser.read(7).strip() == b'>>': # wait for prompt characters
if query(ser, b'DISPLAY FREQ\r') == b'DISPLAY FREQ\r\n': # request frequency data
# The next readline should be frequency
ret = ser.readline().decode() # 50.015 623 MHz 0 0 0 0 0
ret = ret[:-12].strip() # 50.015 623 MHz
magnitude, multiplier = float(ret[:-3].replace(' ', '')), ret[-3:] # 50.015623, 'MHz'
# print(f'magnitude {magnitude}, multiplier {multiplier}')
value = int(magnitude * multipliers[multiplier])
print(f'{value} Hz')
'''
| {
"content_hash": "02a793bbdb9ac23fcca9539d884f5d1b",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 106,
"avg_line_length": 34.223684210526315,
"alnum_prop": 0.5544021530180699,
"repo_name": "DavidLutton/EngineeringProject",
"id": "21a51ebc109685bd3125d37ddc7eca03cc5ad756",
"size": "2601",
"binary": false,
"copies": "1",
"ref": "refs/heads/trypip",
"path": "labtoolkit/FrequencyCounter/IFRCPM47.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2548"
},
{
"name": "Python",
"bytes": "220531"
}
],
"symlink_target": ""
} |
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._encryption_scopes_operations import (
build_get_request,
build_list_request,
build_patch_request,
build_put_request,
)
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class EncryptionScopesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.storage.v2021_01_01.aio.StorageManagementClient`'s
:attr:`encryption_scopes` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@overload
async def put(
self,
resource_group_name: str,
account_name: str,
encryption_scope_name: str,
encryption_scope: _models.EncryptionScope,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.EncryptionScope:
"""Synchronously creates or updates an encryption scope under the specified storage account. If an
encryption scope is already created and a subsequent request is issued with different
properties, the encryption scope properties will be updated per the specified request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number. Required.
:type encryption_scope_name: str
:param encryption_scope: Encryption scope properties to be used for the create or update.
Required.
:type encryption_scope: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def put(
self,
resource_group_name: str,
account_name: str,
encryption_scope_name: str,
encryption_scope: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.EncryptionScope:
"""Synchronously creates or updates an encryption scope under the specified storage account. If an
encryption scope is already created and a subsequent request is issued with different
properties, the encryption scope properties will be updated per the specified request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number. Required.
:type encryption_scope_name: str
:param encryption_scope: Encryption scope properties to be used for the create or update.
Required.
:type encryption_scope: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def put(
self,
resource_group_name: str,
account_name: str,
encryption_scope_name: str,
encryption_scope: Union[_models.EncryptionScope, IO],
**kwargs: Any
) -> _models.EncryptionScope:
"""Synchronously creates or updates an encryption scope under the specified storage account. If an
encryption scope is already created and a subsequent request is issued with different
properties, the encryption scope properties will be updated per the specified request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number. Required.
:type encryption_scope_name: str
:param encryption_scope: Encryption scope properties to be used for the create or update. Is
either a model type or a IO type. Required.
:type encryption_scope: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-01-01")) # type: Literal["2021-01-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.EncryptionScope]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(encryption_scope, (IO, bytes)):
_content = encryption_scope
else:
_json = self._serialize.body(encryption_scope, "EncryptionScope")
request = build_put_request(
resource_group_name=resource_group_name,
account_name=account_name,
encryption_scope_name=encryption_scope_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.put.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("EncryptionScope", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("EncryptionScope", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
put.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}"} # type: ignore
@overload
async def patch(
self,
resource_group_name: str,
account_name: str,
encryption_scope_name: str,
encryption_scope: _models.EncryptionScope,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.EncryptionScope:
"""Update encryption scope properties as specified in the request body. Update fails if the
specified encryption scope does not already exist.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number. Required.
:type encryption_scope_name: str
:param encryption_scope: Encryption scope properties to be used for the update. Required.
:type encryption_scope: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def patch(
self,
resource_group_name: str,
account_name: str,
encryption_scope_name: str,
encryption_scope: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.EncryptionScope:
"""Update encryption scope properties as specified in the request body. Update fails if the
specified encryption scope does not already exist.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number. Required.
:type encryption_scope_name: str
:param encryption_scope: Encryption scope properties to be used for the update. Required.
:type encryption_scope: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def patch(
self,
resource_group_name: str,
account_name: str,
encryption_scope_name: str,
encryption_scope: Union[_models.EncryptionScope, IO],
**kwargs: Any
) -> _models.EncryptionScope:
"""Update encryption scope properties as specified in the request body. Update fails if the
specified encryption scope does not already exist.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number. Required.
:type encryption_scope_name: str
:param encryption_scope: Encryption scope properties to be used for the update. Is either a
model type or a IO type. Required.
:type encryption_scope: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-01-01")) # type: Literal["2021-01-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.EncryptionScope]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(encryption_scope, (IO, bytes)):
_content = encryption_scope
else:
_json = self._serialize.body(encryption_scope, "EncryptionScope")
request = build_patch_request(
resource_group_name=resource_group_name,
account_name=account_name,
encryption_scope_name=encryption_scope_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.patch.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("EncryptionScope", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
patch.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}"} # type: ignore
@distributed_trace_async
async def get(
self, resource_group_name: str, account_name: str, encryption_scope_name: str, **kwargs: Any
) -> _models.EncryptionScope:
"""Returns the properties for the specified encryption scope.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number. Required.
:type encryption_scope_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_01_01.models.EncryptionScope
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-01-01")) # type: Literal["2021-01-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.EncryptionScope]
request = build_get_request(
resource_group_name=resource_group_name,
account_name=account_name,
encryption_scope_name=encryption_scope_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("EncryptionScope", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}"} # type: ignore
@distributed_trace
def list(
self, resource_group_name: str, account_name: str, **kwargs: Any
) -> AsyncIterable["_models.EncryptionScope"]:
"""Lists all the encryption scopes available under the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EncryptionScope or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2021_01_01.models.EncryptionScope]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-01-01")) # type: Literal["2021-01-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.EncryptionScopeListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("EncryptionScopeListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes"} # type: ignore
| {
"content_hash": "ce313f17a797793882238d8b33c8b5f8",
"timestamp": "",
"source": "github",
"line_count": 562,
"max_line_length": 211,
"avg_line_length": 49.15658362989324,
"alnum_prop": 0.6618764931586187,
"repo_name": "Azure/azure-sdk-for-python",
"id": "1f0beb61d06f76f9c5c1551d2c8ffecde6582566",
"size": "28126",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/aio/operations/_encryption_scopes_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
"""Base implementation classes.
The public-facing ``Events`` serves as the base class for an event interface;
its public attributes represent different kinds of events. These attributes
are mirrored onto a ``_Dispatch`` class, which serves as a container for
collections of listener functions. These collections are represented both
at the class level of a particular ``_Dispatch`` class as well as within
instances of ``_Dispatch``.
"""
from __future__ import absolute_import
import weakref
from .attr import _ClsLevelDispatch
from .attr import _EmptyListener
from .attr import _JoinedListener
from .. import util
_registrars = util.defaultdict(list)
def _is_event_name(name):
# _sa_event prefix is special to support internal-only event names.
# most event names are just plain method names that aren't
# underscored.
return (
not name.startswith("_") and name != "dispatch"
) or name.startswith("_sa_event")
class _UnpickleDispatch(object):
"""Serializable callable that re-generates an instance of
:class:`_Dispatch` given a particular :class:`.Events` subclass.
"""
def __call__(self, _instance_cls):
for cls in _instance_cls.__mro__:
if "dispatch" in cls.__dict__:
return cls.__dict__["dispatch"].dispatch._for_class(
_instance_cls
)
else:
raise AttributeError("No class with a 'dispatch' member present.")
class _Dispatch(object):
"""Mirror the event listening definitions of an Events class with
listener collections.
Classes which define a "dispatch" member will return a
non-instantiated :class:`._Dispatch` subclass when the member
is accessed at the class level. When the "dispatch" member is
accessed at the instance level of its owner, an instance
of the :class:`._Dispatch` class is returned.
A :class:`._Dispatch` class is generated for each :class:`.Events`
class defined, by the :func:`._create_dispatcher_class` function.
The original :class:`.Events` classes remain untouched.
This decouples the construction of :class:`.Events` subclasses from
the implementation used by the event internals, and allows
inspecting tools like Sphinx to work in an unsurprising
way against the public API.
"""
# In one ORM edge case, an attribute is added to _Dispatch,
# so __dict__ is used in just that case and potentially others.
__slots__ = "_parent", "_instance_cls", "__dict__", "_empty_listeners"
_empty_listener_reg = weakref.WeakKeyDictionary()
def __init__(self, parent, instance_cls=None):
self._parent = parent
self._instance_cls = instance_cls
if instance_cls:
try:
self._empty_listeners = self._empty_listener_reg[instance_cls]
except KeyError:
self._empty_listeners = self._empty_listener_reg[
instance_cls
] = {
ls.name: _EmptyListener(ls, instance_cls)
for ls in parent._event_descriptors
}
else:
self._empty_listeners = {}
def __getattr__(self, name):
# Assign EmptyListeners as attributes on demand
# to reduce startup time for new dispatch objects.
try:
ls = self._empty_listeners[name]
except KeyError:
raise AttributeError(name)
else:
setattr(self, ls.name, ls)
return ls
@property
def _event_descriptors(self):
for k in self._event_names:
# Yield _ClsLevelDispatch related
# to relevant event name.
yield getattr(self, k)
@property
def _listen(self):
return self._events._listen
def _for_class(self, instance_cls):
return self.__class__(self, instance_cls)
def _for_instance(self, instance):
instance_cls = instance.__class__
return self._for_class(instance_cls)
def _join(self, other):
"""Create a 'join' of this :class:`._Dispatch` and another.
This new dispatcher will dispatch events to both
:class:`._Dispatch` objects.
"""
if "_joined_dispatch_cls" not in self.__class__.__dict__:
cls = type(
"Joined%s" % self.__class__.__name__,
(_JoinedDispatcher,),
{"__slots__": self._event_names},
)
self.__class__._joined_dispatch_cls = cls
return self._joined_dispatch_cls(self, other)
def __reduce__(self):
return _UnpickleDispatch(), (self._instance_cls,)
def _update(self, other, only_propagate=True):
"""Populate from the listeners in another :class:`_Dispatch`
object."""
for ls in other._event_descriptors:
if isinstance(ls, _EmptyListener):
continue
getattr(self, ls.name).for_modify(self)._update(
ls, only_propagate=only_propagate
)
def _clear(self):
for ls in self._event_descriptors:
ls.for_modify(self).clear()
class _EventMeta(type):
"""Intercept new Event subclasses and create
associated _Dispatch classes."""
def __init__(cls, classname, bases, dict_):
_create_dispatcher_class(cls, classname, bases, dict_)
type.__init__(cls, classname, bases, dict_)
def _create_dispatcher_class(cls, classname, bases, dict_):
"""Create a :class:`._Dispatch` class corresponding to an
:class:`.Events` class."""
# there's all kinds of ways to do this,
# i.e. make a Dispatch class that shares the '_listen' method
# of the Event class, this is the straight monkeypatch.
if hasattr(cls, "dispatch"):
dispatch_base = cls.dispatch.__class__
else:
dispatch_base = _Dispatch
event_names = [k for k in dict_ if _is_event_name(k)]
dispatch_cls = type(
"%sDispatch" % classname, (dispatch_base,), {"__slots__": event_names}
)
dispatch_cls._event_names = event_names
dispatch_inst = cls._set_dispatch(cls, dispatch_cls)
for k in dispatch_cls._event_names:
setattr(dispatch_inst, k, _ClsLevelDispatch(cls, dict_[k]))
_registrars[k].append(cls)
for super_ in dispatch_cls.__bases__:
if issubclass(super_, _Dispatch) and super_ is not _Dispatch:
for ls in super_._events.dispatch._event_descriptors:
setattr(dispatch_inst, ls.name, ls)
dispatch_cls._event_names.append(ls.name)
if getattr(cls, "_dispatch_target", None):
the_cls = cls._dispatch_target
if (
hasattr(the_cls, "__slots__")
and "_slots_dispatch" in the_cls.__slots__
):
cls._dispatch_target.dispatch = slots_dispatcher(cls)
else:
cls._dispatch_target.dispatch = dispatcher(cls)
def _remove_dispatcher(cls):
for k in cls.dispatch._event_names:
_registrars[k].remove(cls)
if not _registrars[k]:
del _registrars[k]
class Events(util.with_metaclass(_EventMeta, object)):
"""Define event listening functions for a particular target type."""
@staticmethod
def _set_dispatch(cls, dispatch_cls):
# This allows an Events subclass to define additional utility
# methods made available to the target via
# "self.dispatch._events.<utilitymethod>"
# @staticmethod to allow easy "super" calls while in a metaclass
# constructor.
cls.dispatch = dispatch_cls(None)
dispatch_cls._events = cls
return cls.dispatch
@classmethod
def _accept_with(cls, target):
def dispatch_is(*types):
return all(isinstance(target.dispatch, t) for t in types)
def dispatch_parent_is(t):
return isinstance(target.dispatch.parent, t)
# Mapper, ClassManager, Session override this to
# also accept classes, scoped_sessions, sessionmakers, etc.
if hasattr(target, "dispatch"):
if (
dispatch_is(cls.dispatch.__class__)
or dispatch_is(type, cls.dispatch.__class__)
or (
dispatch_is(_JoinedDispatcher)
and dispatch_parent_is(cls.dispatch.__class__)
)
):
return target
@classmethod
def _listen(
cls,
event_key,
propagate=False,
insert=False,
named=False,
asyncio=False,
):
event_key.base_listen(
propagate=propagate, insert=insert, named=named, asyncio=asyncio
)
@classmethod
def _remove(cls, event_key):
event_key.remove()
@classmethod
def _clear(cls):
cls.dispatch._clear()
class _JoinedDispatcher(object):
"""Represent a connection between two _Dispatch objects."""
__slots__ = "local", "parent", "_instance_cls"
def __init__(self, local, parent):
self.local = local
self.parent = parent
self._instance_cls = self.local._instance_cls
def __getattr__(self, name):
# Assign _JoinedListeners as attributes on demand
# to reduce startup time for new dispatch objects.
ls = getattr(self.local, name)
jl = _JoinedListener(self.parent, ls.name, ls)
setattr(self, ls.name, jl)
return jl
@property
def _listen(self):
return self.parent._listen
@property
def _events(self):
return self.parent._events
class dispatcher(object):
"""Descriptor used by target classes to
deliver the _Dispatch class at the class level
and produce new _Dispatch instances for target
instances.
"""
def __init__(self, events):
self.dispatch = events.dispatch
self.events = events
def __get__(self, obj, cls):
if obj is None:
return self.dispatch
disp = self.dispatch._for_instance(obj)
try:
obj.__dict__["dispatch"] = disp
except AttributeError as ae:
util.raise_(
TypeError(
"target %r doesn't have __dict__, should it be "
"defining _slots_dispatch?" % (obj,)
),
replace_context=ae,
)
return disp
class slots_dispatcher(dispatcher):
def __get__(self, obj, cls):
if obj is None:
return self.dispatch
if hasattr(obj, "_slots_dispatch"):
return obj._slots_dispatch
disp = self.dispatch._for_instance(obj)
obj._slots_dispatch = disp
return disp
| {
"content_hash": "cb936c2ba8da00ea5913c235a3b0d8d1",
"timestamp": "",
"source": "github",
"line_count": 338,
"max_line_length": 78,
"avg_line_length": 31.659763313609467,
"alnum_prop": 0.5988225399495374,
"repo_name": "j5int/sqlalchemy",
"id": "510e16bddfed51d3c0af5c2edb1222d277d0e14f",
"size": "10936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/sqlalchemy/event/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "63151"
},
{
"name": "Python",
"bytes": "15339979"
}
],
"symlink_target": ""
} |
"""
TileMap loader for python for Tiled, a generic tile map editor
from http://mapeditor.org/ .
It loads the \*.tmx files produced by Tiled.
"""
# Versioning scheme based on: http://en.wikipedia.org/wiki/Versioning#Designating_development_stage
#
# +-- api change, probably incompatible with older versions
# | +-- enhancements but no api change
# | |
# major.minor[.build[.revision]]
# |
# +-|* 0 for alpha (status)
# |* 1 for beta (status)
# |* 2 for release candidate
# |* 3 for (public) release
#
# For instance:
# * 1.2.0.1 instead of 1.2-a
# * 1.2.1.2 instead of 1.2-b2 (beta with some bug fixes)
# * 1.2.2.3 instead of 1.2-rc (release candidate)
# * 1.2.3.0 instead of 1.2-r (commercial distribution)
# * 1.2.3.5 instead of 1.2-r5 (commercial distribution with many bug fixes)
__revision__ = "$Rev: 115 $"
__version__ = "3.1.0." + __revision__[6:-2]
__author__ = 'DR0ID @ 2009-2011'
# import logging
# #the following few lines are needed to use logging if this module used without
# # a previous call to logging.basicConfig()
# if 0 == len(logging.root.handlers):
# logging.basicConfig(level=logging.DEBUG)
# _LOGGER = logging.getLogger('tiledtmxloader')
# if __debug__:
# _LOGGER.debug('%s loading ...' % (__name__))
# -----------------------------------------------------------------------------
import sys
from xml.dom import minidom, Node
try:
import io
from io import StringIO
except:
from io import StringIO
import os.path
import struct
import array
# -----------------------------------------------------------------------------
class TileMap(object):
"""
The TileMap holds all the map data.
:Ivariables:
orientation : string
orthogonal or isometric or hexagonal or shifted
tilewidth : int
width of the tiles (for all layers)
tileheight : int
height of the tiles (for all layers)
width : int
width of the map (number of tiles)
height : int
height of the map (number of tiles)
version : string
version of the map format
tile_sets : list
list of TileSet
properties : dict
the propertis set in the editor, name-value pairs, strings
pixel_width : int
width of the map in pixels
pixel_height : int
height of the map in pixels
layers : list
list of TileLayer
map_file_name : dict
file name of the map
named_layers : dict of string:TledLayer
dict containing {name : TileLayer}
named_tile_sets : dict
dict containing {name : TileSet}
"""
def __init__(self):
# This is the top container for all data. The gid is the global id
# (for a image).
# Before calling convert most of the values are strings. Some additional
# values are also calculated, see convert() for details. After calling
# convert, most values are integers or floats where appropriat.
"""
The TileMap holds all the map data.
"""
# set through parser
self.orientation = None
self.tileheight = 0
self.tilewidth = 0
self.width = 0
self.height = 0
self.version = 0
self.tile_sets = [] # TileSet
# ISSUE 9: object groups should be in the same order as layers
self.layers = [] # WorldTileLayer <- what order? back to front (guessed)
# self.object_groups = []
self.properties = {} # {name: value}
# additional info
self.pixel_width = 0
self.pixel_height = 0
self.named_layers = {} # {name: layer}
self.named_tile_sets = {} # {name: tile_set}
self.map_file_name = ""
def convert(self):
"""
Converts numerical values from strings to numerical values.
It also calculates or set additional data:
pixel_width
pixel_height
named_layers
named_tile_sets
"""
self.tilewidth = int(self.tilewidth)
self.tileheight = int(self.tileheight)
self.width = int(self.width)
self.height = int(self.height)
self.pixel_width = self.width * self.tilewidth
self.pixel_height = self.height * self.tileheight
for layer in self.layers:
# ISSUE 9
if not layer.is_object_group:
layer.tilewidth = self.tilewidth
layer.tileheight = self.tileheight
self.named_layers[layer.name] = layer
layer.convert()
for tile_set in self.tile_sets:
self.named_tile_sets[tile_set.name] = tile_set
tile_set.spacing = int(tile_set.spacing)
tile_set.margin = int(tile_set.margin)
for img in tile_set.images:
if img.trans:
img.trans = (int(img.trans[:2], 16), \
int(img.trans[2:4], 16), \
int(img.trans[4:], 16))
def decode(self):
"""
Decodes the TileLayer encoded_content and saves it in decoded_content.
"""
for layer in self.layers:
if not layer.is_object_group:
layer.decode()
# -----------------------------------------------------------------------------
class TileSet(object):
"""
A tileset holds the tiles and its images.
:Ivariables:
firstgid : int
the first gid of this tileset
name : string
the name of this TileSet
images : list
list of TileImages
tiles : list
list of Tiles
indexed_images : dict
after calling load() it is dict containing id: image
spacing : int
the spacing between tiles
marging : int
the marging of the tiles
properties : dict
the propertis set in the editor, name-value pairs
tilewidth : int
the actual width of the tile, can be different from the tilewidth
of the map
tilehight : int
the actual hight of th etile, can be different from the tilehight
of the map
"""
def __init__(self):
self.firstgid = 0
self.name = None
self.images = [] # TileImage
self.tiles = [] # Tile
self.indexed_images = {} # {id:image}
self.spacing = 0
self.margin = 0
self.properties = {}
self.tileheight = 0
self.tilewidth = 0
# -----------------------------------------------------------------------------
class TileImage(object):
"""
An image of a tile or just an image.
:Ivariables:
id : int
id of this image (has nothing to do with gid)
format : string
the format as string, only 'png' at the moment
source : string
filename of the image. either this is set or the content
encoding : string
encoding of the content
trans : tuple of (r,g,b)
the colorkey color, raw as hex, after calling convert just a
(r,g,b) tuple
properties : dict
the propertis set in the editor, name-value pairs
image : TileImage
after calling load the pygame surface
"""
def __init__(self):
self.id = 0
self.format = None
self.source = None
self.encoding = None # from <data>...</data>
self.content = None # from <data>...</data>
self.image = None
self.trans = None
self.properties = {} # {name: value}
# -----------------------------------------------------------------------------
class Tile(object):
"""
A single tile.
:Ivariables:
id : int
id of the tile gid = TileSet.firstgid + Tile.id
images : list of :class:TileImage
list of TileImage, either its 'id' or 'image data' will be set
properties : dict of name:value
the propertis set in the editor, name-value pairs
"""
# [20:22] DR0ID_: to sum up: there are two use cases,
# if the tile element has a child element 'image' then tile is
# standalone with its own id and
# the other case where a tileset is present then it
# referes to the image with that id in the tileset
def __init__(self):
self.id = 0
self.images = [] # uses TileImage but either only id will be set or image data
self.properties = {} # {name: value}
# -----------------------------------------------------------------------------
class TileLayer(object):
"""
A layer of the world.
:Ivariables:
x : int
position of layer in the world in number of tiles (not pixels)
y : int
position of layer in the world in number of tiles (not pixels)
width : int
number of tiles in x direction
height : int
number of tiles in y direction
pixel_width : int
width of layer in pixels
pixel_height : int
height of layer in pixels
name : string
name of this layer
opacity : float
float from 0 (full transparent) to 1.0 (opaque)
decoded_content : list
list of graphics id going through the map::
e.g [1, 1, 1, ]
where decoded_content[0] is (0,0)
decoded_content[1] is (1,0)
...
decoded_content[w] is (width,0)
decoded_content[w+1] is (0,1)
...
decoded_content[w * h] is (width,height)
usage: graphics id = decoded_content[tile_x + tile_y * width]
content2D : list
list of list, usage: graphics id = content2D[x][y]
"""
def __init__(self):
self.width = 0
self.height = 0
self.x = 0
self.y = 0
self.pixel_width = 0
self.pixel_height = 0
self.name = None
self.opacity = -1
self.encoding = None
self.compression = None
self.encoded_content = None
self.decoded_content = []
self.visible = True
self.properties = {} # {name: value}
self.content2D = None
self.is_object_group = False # ISSUE 9
def decode(self):
"""
Converts the contents in a list of integers which are the gid of the
used tiles. If necessairy it decodes and uncompresses the contents.
"""
self.decoded_content = []
if self.encoded_content:
content = self.encoded_content
if self.encoding:
if self.encoding.lower() == 'base64':
content = decode_base64(content)
elif self.encoding.lower() == 'csv':
list_of_lines = content.split()
for line in list_of_lines:
self.decoded_content.extend(line.split(','))
self.decoded_content = list(map(int, \
[val for val in self.decoded_content if val]))
content = ""
else:
raise Exception('unknown data encoding %s' % \
(self.encoding))
else:
# in the case of xml the encoded_content already contains a
# list of integers
self.decoded_content = list(map(int, self.encoded_content))
content = ""
if self.compression:
if self.compression == 'gzip':
content = decompress_gzip(content)
elif self.compression == 'zlib':
content = decompress_zlib(content)
else:
raise Exception('unknown data compression %s' % \
(self.compression))
else:
raise Exception('no encoded content to decode')
struc = struct.Struct("<" + "I" * self.width)
struc_unpack_from = struc.unpack_from
self_decoded_content_extend = self.decoded_content.extend
for idx in range(0, len(content), 4 * self.width):
val = struc_unpack_from(content, idx)
self_decoded_content_extend(val)
arr = array.array('I')
arr.fromlist(self.decoded_content)
self.decoded_content = arr
# TODO: generate property grid here??
self._gen_2D()
def _gen_2D(self):
self.content2D = []
# generate the needed lists and fill them
for xpos in range(self.width):
self.content2D.append(array.array('I'))
for ypos in range(self.height):
self.content2D[xpos].append( \
self.decoded_content[xpos + ypos * self.width])
def pretty_print(self):
num = 0
for y in range(int(self.height)):
output = ""
for x in range(int(self.width)):
output += str(self.decoded_content[num])
num += 1
print(output)
def convert(self):
self.opacity = float(self.opacity)
self.x = int(self.x)
self.y = int(self.y)
self.width = int(self.width)
self.height = int(self.height)
self.pixel_width = self.width * self.tilewidth
self.pixel_height = self.height * self.tileheight
self.visible = bool(int(self.visible))
# def get_visible_tile_range(self, xmin, ymin, xmax, ymax):
# tile_w = self.pixel_width / self.width
# tile_h = self.pixel_height / self.height
# left = int(round(float(xmin) / tile_w)) - 1
# right = int(round(float(xmax) / tile_w)) + 2
# top = int(round(float(ymin) / tile_h)) - 1
# bottom = int(round(float(ymax) / tile_h)) + 2
# return (left, top, left - right, top - bottom)
# def get_tiles(self, xmin, ymin, xmax, ymax):
# tiles = []
# if self.visible:
# for ypos in range(ymin, ymax):
# for xpos in range(xmin, xmax):
# try:
# img_idx = self.content2D[xpos][ypos]
# if img_idx:
# tiles.append((xpos, ypos, img_idx))
# except IndexError:
# pass
# return tiles
# -----------------------------------------------------------------------------
class MapObjectGroupLayer(object):
"""
Group of objects on the map.
:Ivariables:
x : int
the x position
y : int
the y position
width : int
width of the bounding box (usually 0, so no use)
height : int
height of the bounding box (usually 0, so no use)
name : string
name of the group
objects : list
list of the map objects
"""
def __init__(self):
self.width = 0
self.height = 0
self.name = None
self.objects = []
self.x = 0
self.y = 0
self.visible = True
self.properties = {} # {name: value}
self.is_object_group = True # ISSUE 9
def convert(self):
self.x = int(self.x)
self.y = int(self.y)
self.width = int(self.width)
self.height = int(self.height)
for map_obj in self.objects:
map_obj.x = int(map_obj.x)
map_obj.y = int(map_obj.y)
map_obj.width = int(map_obj.width)
map_obj.height = int(map_obj.height)
class ImageLayer(object):
def __init__(self):
self.source = "";
self.is_object_group = False # ISSUE 9
self.properties = {};
def convert(self):
self.width = int(self.width)
self.height = int(self.height)
def decode(self):
pass
# -----------------------------------------------------------------------------
class MapObject(object):
"""
A single object on the map.
:Ivariables:
x : int
x position relative to group x position
y : int
y position relative to group y position
width : int
width of this object
height : int
height of this object
type : string
the type of this object
image_source : string
source path of the image for this object
image : :class:TileImage
after loading this is the pygame surface containing the image
"""
def __init__(self):
self.name = None
self.x = 0
self.y = 0
self.width = 0
self.height = 0
self.type = None
self.image_source = None
self.image = None
self.properties = {} # {name: value}
# -----------------------------------------------------------------------------
def decode_base64(in_str):
"""
Decodes a base64 string and returns it.
:Parameters:
in_str : string
base64 encoded string
:returns: decoded string
"""
import base64
return base64.decodestring(in_str.encode('latin-1'))
# -----------------------------------------------------------------------------
def decompress_gzip(in_str):
"""
Uncompresses a gzip string and returns it.
:Parameters:
in_str : string
gzip compressed string
:returns: uncompressed string
"""
import gzip
if sys.version_info > (2, ):
from io import BytesIO
copmressed_stream = BytesIO(in_str)
else:
# gzip can only handle file object therefore using StringIO
copmressed_stream = StringIO(in_str.decode("latin-1"))
gzipper = gzip.GzipFile(fileobj=copmressed_stream)
content = gzipper.read()
gzipper.close()
return content
# -----------------------------------------------------------------------------
def decompress_zlib(in_str):
"""
Uncompresses a zlib string and returns it.
:Parameters:
in_str : string
zlib compressed string
:returns: uncompressed string
"""
import zlib
content = zlib.decompress(in_str)
return content
# -----------------------------------------------------------------------------
def printer(obj, ident=''):
"""
Helper function, prints a hirarchy of objects.
"""
import inspect
print((ident + obj.__class__.__name__.upper()))
ident += ' '
lists = []
for name in dir(obj):
elem = getattr(obj, name)
if isinstance(elem, list) and name != 'decoded_content':
lists.append(elem)
elif not inspect.ismethod(elem):
if not name.startswith('__'):
if name == 'data' and elem:
print((ident + 'data = '))
printer(elem, ident + ' ')
else:
print((ident + '%s\t= %s' % (name, getattr(obj, name))))
for objt_list in lists:
for _obj in objt_list:
printer(_obj, ident + ' ')
# -----------------------------------------------------------------------------
class VersionError(Exception): pass
# -----------------------------------------------------------------------------
class TileMapParser(object):
"""
Allows to parse and decode map files for 'Tiled', a open source map editor
written in java. It can be found here: http://mapeditor.org/
"""
def _build_tile_set(self, tile_set_node, world_map):
tile_set = TileSet()
self._set_attributes(tile_set_node, tile_set)
if hasattr(tile_set, "source"):
tile_set = self._parse_tsx(tile_set.source, tile_set, world_map)
else:
tile_set = self._get_tile_set(tile_set_node, tile_set, \
self.map_file_name)
world_map.tile_sets.append(tile_set)
def _parse_tsx(self, file_name, tile_set, world_map):
# ISSUE 5: the *.tsx file is probably relative to the *.tmx file
if not os.path.isabs(file_name):
# print "map file name", self.map_file_name
file_name = self._get_abs_path(self.map_file_name, file_name)
# print "tsx filename: ", file_name
# would be more elegant to use "with open(file_name, "rb") as file:"
# but that is python 2.6
file = None
try:
file = open(file_name, "rb")
dom = minidom.parseString(file.read())
finally:
if file:
file.close()
for node in self._get_nodes(dom.childNodes, 'tileset'):
tile_set = self._get_tile_set(node, tile_set, file_name)
break
return tile_set
def _get_tile_set(self, tile_set_node, tile_set, base_path):
for node in self._get_nodes(tile_set_node.childNodes, 'image'):
self._build_tile_set_image(node, tile_set, base_path)
for node in self._get_nodes(tile_set_node.childNodes, 'tile'):
self._build_tile_set_tile(node, tile_set)
self._set_attributes(tile_set_node, tile_set)
return tile_set
def _build_tile_set_image(self, image_node, tile_set, base_path):
image = TileImage()
self._set_attributes(image_node, image)
# id of TileImage has to be set! -> Tile.TileImage will only have id set
for node in self._get_nodes(image_node.childNodes, 'data'):
self._set_attributes(node, image)
image.content = node.childNodes[0].nodeValue
image.source = self._get_abs_path(base_path, image.source) # ISSUE 5
tile_set.images.append(image)
def _get_abs_path(self, base, relative):
if os.path.isabs(relative):
return relative
if os.path.isfile(base):
base = os.path.dirname(base)
return os.path.abspath(os.path.join(base, relative))
def _build_tile_set_tile(self, tile_set_node, tile_set):
tile = Tile()
self._set_attributes(tile_set_node, tile)
for node in self._get_nodes(tile_set_node.childNodes, 'image'):
self._build_tile_set_tile_image(node, tile)
tile_set.tiles.append(tile)
def _build_tile_set_tile_image(self, tile_node, tile):
tile_image = TileImage()
self._set_attributes(tile_node, tile_image)
for node in self._get_nodes(tile_node.childNodes, 'data'):
self._set_attributes(node, tile_image)
tile_image.content = node.childNodes[0].nodeValue
tile.images.append(tile_image)
def _build_layer(self, layer_node, world_map):
layer = TileLayer()
self._set_attributes(layer_node, layer)
for node in self._get_nodes(layer_node.childNodes, 'data'):
self._set_attributes(node, layer)
if layer.encoding:
layer.encoded_content = node.lastChild.nodeValue
else:
#print 'has childnodes', node.hasChildNodes()
layer.encoded_content = []
for child in node.childNodes:
if child.nodeType == Node.ELEMENT_NODE and \
child.nodeName == "tile":
val = child.attributes["gid"].nodeValue
#print child, val
layer.encoded_content.append(val)
world_map.layers.append(layer)
def _build_image_layer(self, image_layer_node, world_map):
imagesNodes = self._get_nodes(image_layer_node.childNodes, "image")
image_layer = ImageLayer()
self._set_attributes(image_layer_node, image_layer)
for imageNode in imagesNodes:
self._set_attributes(imageNode, image_layer)
world_map.layers.append(image_layer)
def _build_world_map(self, world_node):
world_map = TileMap()
self._set_attributes(world_node, world_map)
if world_map.version != "1.0":
raise VersionError('this parser was made for maps of version 1.0, found version %s' % world_map.version)
for node in self._get_nodes(world_node.childNodes, 'tileset'):
self._build_tile_set(node, world_map)
for node in self._get_nodes(world_node.childNodes, 'layer'):
self._build_layer(node, world_map)
for node in self._get_nodes(world_node.childNodes, 'objectgroup'):
self._build_object_groups(node, world_map)
for node in self._get_nodes(world_node.childNodes, 'imagelayer'):
self._build_image_layer(node, world_map)
return world_map
def _build_object_groups(self, object_group_node, world_map):
object_group = MapObjectGroupLayer()
self._set_attributes(object_group_node, object_group)
for node in self._get_nodes(object_group_node.childNodes, 'object'):
tiled_object = MapObject()
self._set_attributes(node, tiled_object)
for img_node in self._get_nodes(node.childNodes, 'image'):
tiled_object.image_source = \
img_node.attributes['source'].nodeValue
object_group.objects.append(tiled_object)
# ISSUE 9
world_map.layers.append(object_group)
# -- helpers -- #
def _get_nodes(self, nodes, name):
for node in nodes:
if node.nodeType == Node.ELEMENT_NODE and node.nodeName == name:
yield node
def _set_attributes(self, node, obj):
attrs = node.attributes
for attr_name in list(attrs.keys()):
setattr(obj, attr_name, attrs.get(attr_name).nodeValue)
self._get_properties(node, obj)
def _get_properties(self, node, obj):
props = {}
for properties_node in self._get_nodes(node.childNodes, 'properties'):
for property_node in self._get_nodes(properties_node.childNodes, 'property'):
try:
props[property_node.attributes['name'].nodeValue] = \
property_node.attributes['value'].nodeValue
except KeyError:
props[property_node.attributes['name'].nodeValue] = \
property_node.lastChild.nodeValue
obj.properties.update(props)
# -- parsers -- #
def parse(self, file_name):
"""
Parses the given map. Does no decoding nor loading of the data.
:return: instance of TileMap
"""
# would be more elegant to use
# "with open(file_name, "rb") as tmx_file:" but that is python 2.6
self.map_file_name = os.path.abspath(file_name)
tmx_file = None
try:
tmx_file = open(self.map_file_name, "rb")
dom = minidom.parseString(tmx_file.read())
finally:
if tmx_file:
tmx_file.close()
for node in self._get_nodes(dom.childNodes, 'map'):
world_map = self._build_world_map(node)
break
world_map.map_file_name = self.map_file_name
world_map.convert()
return world_map
def parse_decode(self, file_name):
"""
Parses the map but additionally decodes the data.
:return: instance of TileMap
"""
world_map = self.parse(file_name)
world_map.decode()
return world_map
# -----------------------------------------------------------------------------
class AbstractResourceLoader(object):
"""
Abstract base class for the resource loader.
"""
FLIP_X = 1 << 31
FLIP_Y = 1 << 30
def __init__(self):
self.indexed_tiles = {} # {gid: (offsetx, offsety, image}
self.world_map = None
self._img_cache = {}
def _load_image(self, filename, colorkey=None): # -> image
"""
Load a single image.
:Parameters:
filename : string
Path to the file to be loaded.
colorkey : tuple
The (r, g, b) color that should be used as colorkey
(or magic color).
Default: None
:rtype: image
"""
raise NotImplementedError('This should be implemented in a inherited class')
def _load_image_file_like(self, file_like_obj, colorkey=None): # -> image
"""
Load a image from a file like object.
:Parameters:
file_like_obj : file
This is the file like object to load the image from.
colorkey : tuple
The (r, g, b) color that should be used as colorkey
(or magic color).
Default: None
:rtype: image
"""
raise NotImplementedError('This should be implemented in a inherited class')
def _load_image_parts(self, filename, margin, spacing, tilewidth, tileheight, colorkey=None): #-> [images]
"""
Load different tile images from one source image.
:Parameters:
filename : string
Path to image to be loaded.
margin : int
The margin around the image.
spacing : int
The space between the tile images.
tilewidth : int
The width of a single tile.
tileheight : int
The height of a single tile.
colorkey : tuple
The (r, g, b) color that should be used as colorkey
(or magic color).
Default: None
Luckily that iteration is so easy in python::
...
w, h = image_size
for y in xrange(margin, h, tileheight + spacing):
for x in xrange(margin, w, tilewidth + spacing):
...
:rtype: a list of images
"""
raise NotImplementedError('This should be implemented in a inherited class')
def load(self, tile_map):
"""
"""
self.world_map = tile_map
for tile_set in tile_map.tile_sets:
# do images first, because tiles could reference it
for img in tile_set.images:
if img.source:
self._load_image_from_source(tile_map, tile_set, img)
else:
tile_set.indexed_images[img.id] = self._load_tile_image(img)
# tiles
for tile in tile_set.tiles:
for img in tile.images:
if not img.content and not img.source:
# only image id set
indexed_img = tile_set.indexed_images[img.id]
self.indexed_tiles[int(tile_set.firstgid) + int(tile.id)] = (0, 0, indexed_img)
else:
if img.source:
self._load_image_from_source(tile_map, tile_set, img)
else:
indexed_img = self._load_tile_image(img)
self.indexed_tiles[int(tile_set.firstgid) + int(tile.id)] = (0, 0, indexed_img)
def _load_image_from_source(self, tile_map, tile_set, a_tile_image):
# relative path to file
img_path = os.path.join(os.path.dirname(tile_map.map_file_name), \
a_tile_image.source)
tile_width = int(tile_map.tilewidth)
tile_height = int(tile_map.tileheight)
if tile_set.tileheight:
tile_width = int(tile_set.tilewidth)
if tile_set.tilewidth:
tile_height = int(tile_set.tileheight)
offsetx = 0
offsety = 0
# the offset is used for pygame because the origin is topleft in pygame
if tile_height > tile_map.tileheight:
offsety = tile_height - tile_map.tileheight
idx = 0
for image in self._load_image_parts(img_path, \
tile_set.margin, tile_set.spacing, \
tile_width, tile_height, a_tile_image.trans):
self.indexed_tiles[int(tile_set.firstgid) + idx] = \
(offsetx, -offsety, image)
idx += 1
def _load_tile_image(self, a_tile_image):
img_str = a_tile_image.content
if a_tile_image.encoding:
if a_tile_image.encoding == 'base64':
img_str = decode_base64(a_tile_image.content)
else:
raise Exception('unknown image encoding %s' % a_tile_image.encoding)
sio = StringIO(img_str)
new_image = self._load_image_file_like(sio, a_tile_image.trans)
return new_image
# -----------------------------------------------------------------------------
| {
"content_hash": "fd95ba9b1079df3a2ae97166710aa682",
"timestamp": "",
"source": "github",
"line_count": 932,
"max_line_length": 116,
"avg_line_length": 35.39914163090129,
"alnum_prop": 0.5240664403491756,
"repo_name": "Rastagong/narro",
"id": "bcf5144ae47484a3e86003722cdcc32cff85a523",
"size": "33017",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tmxreader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "584066"
}
],
"symlink_target": ""
} |
"""
Django settings for september project.
Generated by 'django-admin startproject' using Django 1.10.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '6-#)p1!8(bm5sfbprm6wa9h$%2)7+n7g!^0!fbr60&_&m!75z8'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'contacts.apps.ContactsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_static_jquery',
'bootstrap3',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'september.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'september/templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'september.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'september/static')
]
| {
"content_hash": "5dabd21bbc69146c50985c5560e9e32a",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 91,
"avg_line_length": 26.206349206349206,
"alnum_prop": 0.6868564506359782,
"repo_name": "kikeh/contacts",
"id": "cce1fd66bf08ad42e3d9fa679bcefec4e7b0fdde",
"size": "3302",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "september/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "88353"
},
{
"name": "HTML",
"bytes": "13367"
},
{
"name": "Python",
"bytes": "11976"
}
],
"symlink_target": ""
} |
import inspect
class Calculator(object):
"""
Responsible for calculating statistics given the rudimentary stats
that were queried from the DB.
"""
def __init__(self, rudimentary_stats):
self.rudimentary_stats = rudimentary_stats
self.members = inspect.getmembers(self)
self.member_names = list(map(lambda mem: mem[0], self.members))
def calculate(self, stat_string):
method_name = 'calc_' + stat_string
if method_name not in self.member_names:
raise AttributeError
calculation = getattr(self, method_name)
return calculation()
""" Time Played Calculations """
def calc_tot_time(self):
tot_secs_played = int(self.rudimentary_stats['sum(seconds)'])
minutes = str(tot_secs_played//60).zfill(2)
seconds = str(tot_secs_played%60).zfill(2)
tot_mmss = minutes + ' minutes, ' + seconds + ' seconds'
return "Total time played", tot_mmss
def calc_avg_time(self):
avg_secs_played = int(self.rudimentary_stats['avg(seconds)'])
minutes = str(avg_secs_played//60).zfill(2)
seconds = str(avg_secs_played%60).zfill(2)
avg_mmss = minutes + ':' + seconds
return "Minutes per game", avg_mmss
""" Game Count Calculations """
def calc_game_count(self):
game_plyd = str(self.rudimentary_stats['count(*)'])
return "Games played", game_plyd
""" Field Goal Calculations """
def calc_tot_fg(self):
tot_fg = str(self.rudimentary_stats['sum(fg)'])
return "Total FG", tot_fg
def calc_avg_fg(self):
avg_fg = str(round(self.rudimentary_stats['avg(fg)'], 2))
return "FG per game", avg_fg
def calc_tot_fga(self):
tot_fga = str(self.rudimentary_stats['sum(fga)'])
return "FG attempts", tot_fga
def calc_avg_fga(self):
avg_fga = str(round(self.rudimentary_stats['avg(fga)'], 2))
return "FG attempts per game", avg_fga
def calc_fg_pct(self):
tot_fg = self.rudimentary_stats['sum(fg)']
tot_fga = self.rudimentary_stats['sum(fga)']
fg_pct = str(round((tot_fg/tot_fga)*100, 2))+'%'
return "FG%", fg_pct
""" 2 Point Field Goal Calculations """
def calc_tot_fg2(self):
tot_fg = self.rudimentary_stats['sum(fg)']
tot_fg3 = self.rudimentary_stats['sum(fg3)']
tot_fg2 = str(tot_fg - tot_fg3)
return "Total 2-pt FG", tot_fg2
def calc_avg_fg2(self):
avg_fg = self.rudimentary_stats['avg(fg)']
avg_fg3 = self.rudimentary_stats['avg(fg3)']
avg_fg2 = str(round(avg_fg - avg_fg3, 2))
return "2-pt FG per game", avg_fg2
def calc_tot_fg2a(self):
tot_fga = self.rudimentary_stats['sum(fga)']
tot_fg3a = self.rudimentary_stats['sum(fg3a)']
tot_fg2a = str(tot_fga - tot_fg3a)
return "2-pt FG attempts", tot_fg2a
def calc_avg_fg2a(self):
avg_fga = self.rudimentary_stats['avg(fga)']
avg_fg3a = self.rudimentary_stats['avg(fg3a)']
avg_fg2a = str(round(avg_fga - avg_fg3a, 2))
return "2-pt FG attempts per game", avg_fg2a
def calc_fg2_pct(self):
tot_fg = self.rudimentary_stats['sum(fg)']
tot_fga = self.rudimentary_stats['sum(fga)']
tot_fg3 = self.rudimentary_stats['sum(fg3)']
tot_fg3a = self.rudimentary_stats['sum(fg3a)']
fg2_pct = str(round(((tot_fg-tot_fg3)/(tot_fga-tot_fg3a))*100, 2))+'%'
return "2-pt FG%", fg2_pct
""" 3 Point Field Goal Calculations """
def calc_tot_fg3(self):
tot_fg3 = str(self.rudimentary_stats['sum(fg3)'])
return "Total 3-pt FG", tot_fg3
def calc_avg_fg3(self):
avg_fg3 = float('%.2g' % self.rudimentary_stats['avg(fg3)'])
return "3-pt FG per game", avg_fg3
def calc_tot_fg3a(self):
tot_fg3a = str(self.rudimentary_stats['sum(fg3a)'])
return "3-pt FG attempts", tot_fg3a
def calc_avg_fg3a(self):
avg_fg3a = str(round(self.rudimentary_stats['avg(fg3a)'], 2))
return "3-pt FG attempts per game", avg_fg3a
def calc_fg3_pct(self):
tot_fg3 = self.rudimentary_stats['sum(fg3)']
tot_fg3a = self.rudimentary_stats['sum(fg3a)']
fg3_pct = str(round((tot_fg3/tot_fg3a)*100, 2))+'%'
return "3-pt FG%", fg3_pct
""" Free Throw Calculations """
def calc_tot_ft(self):
tot_ft = str(self.rudimentary_stats['sum(ft)'])
return "Total FT", tot_ft
def calc_avg_ft(self):
avg_ft = str(round(self.rudimentary_stats['avg(ft)'], 2))
return "FT per game", avg_ft
def calc_tot_fta(self):
tot_fta = str(self.rudimentary_stats['sum(fta)'])
return "Total FT attempts", tot_fta
def calc_avg_fta(self):
avg_fta = str(round(self.rudimentary_stats['avg(fta)'], 2))
return "FT attempts per game", avg_fta
def calc_ft_pct(self):
tot_ft = self.rudimentary_stats['sum(ft)']
tot_fta = self.rudimentary_stats['sum(fta)']
ft_pct = str(round((tot_ft/tot_fta)*100, 2))+'%'
return "FT %", ft_pct
""" Points Calculations """
def calc_tot_pts(self):
tot_fg = self.rudimentary_stats['sum(fg)']
tot_fg3 = self.rudimentary_stats['sum(fg3)']
tot_ft = self.rudimentary_stats['sum(ft)']
tot_fg2 = tot_fg - tot_fg3
tot_pts = str(2*tot_fg2 + 3*tot_fg3 + tot_ft)
return "Total points", tot_pts
def calc_avg_pts(self):
avg_fg = self.rudimentary_stats['avg(fg)']
avg_fg3 = self.rudimentary_stats['avg(fg3)']
avg_ft = self.rudimentary_stats['avg(ft)']
avg_fg2 = avg_fg - avg_fg3
ppg = str(round(2*avg_fg2 + 3*avg_fg3 + avg_ft, 2))
return "PPG", ppg
""" Rebound Calculations """
def calc_tot_orb(self):
tot_orb = str(self.rudimentary_stats['sum(orb)'])
return "Total offensive rebounds", tot_orb
def calc_avg_orb(self):
avg_orb = str(round(self.rudimentary_stats['avg(orb)'], 2))
return "Offensive rebounds per game", avg_orb
def calc_tot_drb(self):
tot_drb = self.rudimentary_stats['sum(drb)']
return "Total defensive rebounds", tot_drb
def calc_avg_drb(self):
avg_drb = str(round(self.rudimentary_stats['avg(drb)'], 2))
return "Defensive rebounds per game", avg_drb
def calc_tot_reb(self):
tot_orb = self.rudimentary_stats['sum(orb)']
tot_drb = self.rudimentary_stats['sum(drb)']
tot_reb = str(tot_orb + tot_drb)
return "Total rebounds", tot_reb
def calc_avg_reb(self):
avg_orb = self.rudimentary_stats['avg(orb)']
avg_drb = self.rudimentary_stats['avg(drb)']
rpg = str(round(avg_orb + avg_drb, 2))
return "Rebounds per game", rpg
""" Assist Calculations """
def calc_tot_ast(self):
tot_ast = str(self.rudimentary_stats['sum(ast)'])
return "Total assists", tot_ast
def calc_avg_ast(self):
avg_ast = str(round(self.rudimentary_stats['avg(ast)'], 2))
return "Assists per game", avg_ast
""" Block Calculations """
def calc_tot_blk(self):
tot_blk = str(self.rudimentary_stats['sum(blk)'])
return "Total blocks", tot_blk
def calc_avg_blk(self):
avg_blk = str(round(self.rudimentary_stats['avg(blk)'], 2))
return "Blocks per game", avg_blk
""" Steal Calculations """
def calc_tot_stl(self):
tot_stl = str(self.rudimentary_stats['sum(stl)'])
return "Total steals", tot_stl
def calc_avg_stl(self):
avg_stl = str(round(self.rudimentary_stats['avg(stl)'], 2))
return "Steals per game", avg_stl
""" Miscellaneous Calculations """
def calc_tot_tov(self):
tot_tov = str(self.rudimentary_stats['sum(tov)'])
return "Total turnovers", tot_tov
def calc_avg_tov(self):
avg_tov = str(round(self.rudimentary_stats['avg(tov)'], 2))
return "Turnovers per game", avg_tov
def calc_tot_pf(self):
tot_pf = str(self.rudimentary_stats['sum(pf)'])
return "Total fouls", tot_pf
def calc_avg_pf(self):
avg_pf = str(round(self.rudimentary_stats['avg(pf)'], 2))
return "Fouls per game", avg_pf
""" Advanced Calculations """
def calc_plus_min(self):
plus_min = str(self.rudimentary_stats['sum(plus_minus)'])
return "+/-", plus_min
def calc_usg(self):
usg = str(round(self.rudimentary_stats['avg(usg)'], 2))
return "Usage rating", usg
def calc_ortg(self):
ortg = str(round(self.rudimentary_stats['avg(ortg)'], 2))
return "Offensive rating", ortg
def calc_drtg(self):
drtg = str(round(self.rudimentary_stats['avg(drtg)'], 2))
return "Defensive rating", drtg
def calc_ts(self):
tot_fg = self.rudimentary_stats['sum(fg)']
tot_fg3 = self.rudimentary_stats['sum(fg3)']
tot_ft = self.rudimentary_stats['sum(ft)']
tot_pts = (tot_fg-tot_fg3) * 2 + tot_fg3 * 3 + tot_ft
tot_fga = self.rudimentary_stats['sum(fga)']
tot_fta = self.rudimentary_stats['sum(fta)']
ts = str(round((tot_pts / (2*(tot_fga + 0.44*tot_fta)))*100, 2))+'%'
return "True Shooting", ts
| {
"content_hash": "fd26ba86c49d5cd0e5119e8673d2545b",
"timestamp": "",
"source": "github",
"line_count": 321,
"max_line_length": 78,
"avg_line_length": 29.12772585669782,
"alnum_prop": 0.5854545454545454,
"repo_name": "furioustiles/HooperHub",
"id": "3f485dc962cc29fe45b96050a2619af71690a3fa",
"size": "9350",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hooperhub/util/calculator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "51791"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import mock
import pytest
from opbeat.contrib.celery import CeleryClient
from tests.utils.compat import TestCase
try:
from celery.tests.utils import with_eager_tasks
has_with_eager_tasks = True
except ImportError:
from opbeat.utils.compat import noop_decorator as with_eager_tasks
has_with_eager_tasks = False
class ClientTest(TestCase):
def setUp(self):
self.client = CeleryClient(
organization_id='organization_id',
app_id='app_id',
secret_token='secret'
)
@mock.patch('opbeat.contrib.celery.CeleryClient.send_raw')
def test_send_encoded(self, send_raw):
self.client.send_encoded('foo')
send_raw.delay.assert_called_once_with('foo')
@mock.patch('opbeat.contrib.celery.CeleryClient.send_raw')
def test_without_eager(self, send_raw):
"""
Integration test to ensure it propagates all the way down
and calls delay on the task.
"""
self.client.capture('Message', message='test')
self.assertEquals(send_raw.delay.call_count, 1)
@pytest.mark.skipif(not has_with_eager_tasks,
reason='with_eager_tasks is not available')
@with_eager_tasks
@mock.patch('opbeat.base.Client.send_encoded')
def test_with_eager(self, send_encoded):
"""
Integration test to ensure it propagates all the way down
and calls the parent client's send_encoded method.
"""
self.client.capture('Message', message='test')
self.assertEquals(send_encoded.call_count, 1)
| {
"content_hash": "fa908a2bd5f3ef7cbf18995be9ab58bb",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 70,
"avg_line_length": 31.153846153846153,
"alnum_prop": 0.6574074074074074,
"repo_name": "patrys/opbeat_python",
"id": "853969185fef74d551e57c5fec7c76311e6f8a1b",
"size": "1644",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/contrib/celery/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "81877"
},
{
"name": "HTML",
"bytes": "377"
},
{
"name": "Makefile",
"bytes": "287"
},
{
"name": "Python",
"bytes": "482619"
},
{
"name": "Shell",
"bytes": "1983"
}
],
"symlink_target": ""
} |
"""
WSGI config for busquecursos project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "busquecursos.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| {
"content_hash": "c948ed4ecb0d1344f4388ec9da861c72",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 78,
"avg_line_length": 28.5,
"alnum_prop": 0.7794486215538847,
"repo_name": "ProfessionalIT/products",
"id": "c8c0415ad148219b414c5b934fa45797e852baf7",
"size": "399",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "busquecursos/busquecursos/busquecursos/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3254"
}
],
"symlink_target": ""
} |
'''Parser for the plain text version of congressional record documents
outputs the text marked up with xml
'''
import datetime
import os
import argparse
from .fdsys.cr_parser import parse_directory, parse_single
from .fdsys.simple_scrape import find_fdsys
def daterange(start, end, date_format=None):
delta = end - start
for i in range(abs(delta.days) + 1):
date = start + datetime.timedelta(days=i)
if date_format:
date = datetime.datetime.strftime(date, date_format)
yield date
def parsedate(s):
return datetime.datetime.strptime(s.strip(), "%Y-%m-%d")
def main():
default_outdir = os.path.join(os.getcwd(), 'output')
parser = argparse.ArgumentParser(
prog="parsecr",
description='Parse arguments for the Congressional Record Parser \n\
University of Tennessee (UTK) Parser Updates by: \n\
Jace Prince\n\
Dr. Nathan Kelly\n')
parser.add_argument('days', type=str, nargs='*',
help='A positional argument for dates. This can be a single date, a list \
of dates or a range of dates. Records will be Make sure dates are in \
YYYY-MM-DD format. Date ranges should be given as start date then end \
date YYYY-MM-DD:YYYY-MM-DD. For several specific days, write out the \
dates in the correct format with a space between each date.\n\
The parser will look for a previous file to see if it has been downloaded, \
if not, it will download the file from fdsys.')
parser.add_argument('-f', '--infile', dest='infile', action='store',
help='Parse a single txt or htm file.')
parser.add_argument('-id', '--indir', dest='indir', action='store',
help='An entire directory to traverse and parse, can replace infile')
parser.add_argument('-od', '--outdir', dest='outdir', action='store',
help='An output directory for the parsed content')
parser.add_argument('-l', '--logdir', dest='logdir', action='store',
help='An output directory for logs')
parser.add_argument('--interactive', dest='interactive', action='store_true',
help='Step through files and decide whether or not to parse each one')
parser.add_argument('--force', dest='force', action='store_true',
help='Force documents to be downloaded id the txt files already exist.')
parser.add_argument('--ntf', '-no_text_files', dest='notext', action='store_true',
help='Remove the text version of the documents.(The .htm version is automatically removed)\
EVERYING in the indir folder will be removed.')
args = parser.parse_args()
# Scrapes files and creates a directory from FDsys if no file exists in source folder
if args.days:
if not args.outdir:
args.outdir = default_outdir
no_record = []
dates = []
for date_arg in args.days:
if ':' in date_arg:
start_end = date_arg.split(':')
if len(start_end) == 1:
dates.append(date_range)
else:
begin = parsedate(start_end[0])
end = parsedate(start_end[1])
dates.extend(daterange(begin, end, "%Y-%m-%d"))
else:
dates.append(date_arg)
for day in dates:
doc_path = find_fdsys(day, force=args.force, outdir=args.outdir)
# did not return records
if doc_path is None:
no_record.append(day)
else:
file_path = os.path.dirname(doc_path)
if not args.logdir:
args.logdir = os.path.realpath(os.path.join(file_path, '__log'))
parsed_path = os.path.realpath(os.path.join(file_path, '__parsed'))
parse_directory(doc_path, interactive=args.interactive,
logdir=args.logdir, outdir=parsed_path)
if args.notext:
for filename in os.listdir(doc_path):
if filename.endswith('.txt') or filename.endswith('.xml') or filename.endswith('.htm'):
file_path = os.path.join(doc_path, filename)
os.remove(file_path)
os.rmdir(doc_path)
if len(no_record) > 0:
print "No results were found for the following day/s: %s " % (no_record)
# Deal with directory case:
elif args.indir:
if not args.logdir:
args.logdir = os.path.realpath(os.path.join(args.indir, '__log'))
if not args.outdir:
args.outdir = os.path.realpath(os.path.join(args.indir, '__parsed'))
parse_directory(args.indir, interactive=args.interactive,
logdir=args.logdir, outdir=args.outdir)
if args.notext:
for filename in os.listdir(doc_path):
if filename.endswith('.txt') or filename.endswith('.xml') or filename.endswith('.htm'):
file_path = os.path.join(doc_path, filename)
os.remove(file_path)
os.rmdir(doc_path)
# Deal with single file case:
elif args.infile:
if not args.logdir:
args.logdir = os.path.realpath(os.path.join(os.path.dirname(args.infile), '__log'))
if not args.outdir:
args.outdir = os.path.realpath(os.path.join(os.path.dirname(args.infile), '__parsed'))
parse_single(args.infile, logdir=args.logdir, outdir=args.outdir)
if args.notext:
os.remove(args.infile)
else:
msg = 'Either a date (YYYY-MM-DD), --infile argument or the --indir flag is required!'
parser.error(msg)
if __name__ == '__main__':
main()
| {
"content_hash": "a3aa3d3067719232e40849f5f658cdfd",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 115,
"avg_line_length": 42.864285714285714,
"alnum_prop": 0.5674054324279286,
"repo_name": "jprinc16/congressional-record",
"id": "8e574efb59e91a8c6c824a4a8d26f5826fc23d79",
"size": "6024",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "congressionalrecord/cli.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "946"
},
{
"name": "Python",
"bytes": "70594"
}
],
"symlink_target": ""
} |
class Solution(object):
def kthSmallest(self, root, k):
"""
:type root: TreeNode
:type k: int
:rtype: int
"""
lst = []
def inorder(root, lst):
if root is None:
return
inorder(root.left, lst)
lst.append(root.val)
inorder(root.right, lst)
return lst
return inorder(root, lst)[k-1]
| {
"content_hash": "9fda43fca453ac9ce2d4f958f004fd1c",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 36,
"avg_line_length": 26.1875,
"alnum_prop": 0.4677804295942721,
"repo_name": "young-geng/leet_code",
"id": "e0a206fe7e8cde3a5889bc3ebb994b7a1ce146d1",
"size": "482",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "problems/206_reverse-linked-list/230_kth-smallest-element-in-a-bst/main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "599"
},
{
"name": "Python",
"bytes": "111519"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mServer.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| {
"content_hash": "6991094920775211993975946cdb12ab",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 71,
"avg_line_length": 25.333333333333332,
"alnum_prop": 0.7105263157894737,
"repo_name": "Jameeeees/Mag1C_baNd",
"id": "3112f90ffed587cb60e1b0b26be8a1f0e75313e0",
"size": "250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mServer/manage.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "960"
},
{
"name": "Java",
"bytes": "522199"
},
{
"name": "Python",
"bytes": "70708"
}
],
"symlink_target": ""
} |
import json
from django.utils.translation import gettext as _
from jsonobject.exceptions import BadValueError
from corehq.apps.reports_core.filters import (
Choice,
ChoiceListFilter,
DatespanFilter,
DynamicChoiceListFilter,
LocationDrilldownFilter,
MultiFieldDynamicChoiceListFilter,
NumericFilter,
PreFilter,
QuarterFilter,
)
from corehq.apps.userreports.exceptions import BadSpecError
from corehq.apps.userreports.reports.filters.choice_providers import (
DATA_SOURCE_COLUMN,
LOCATION,
OWNER,
USER,
DataSourceColumnChoiceProvider,
LocationChoiceProvider,
MultiFieldDataSourceColumnChoiceProvider,
OwnerChoiceProvider,
UserChoiceProvider, DomainChoiceProvider, COMMCARE_PROJECT,
)
from corehq.apps.userreports.reports.filters.specs import (
ChoiceListFilterSpec,
DateFilterSpec,
DynamicChoiceListFilterSpec,
LocationDrilldownFilterSpec,
MultiFieldDynamicChoiceFilterSpec,
NumericFilterSpec,
PreFilterSpec,
QuarterFilterSpec,
)
from corehq.apps.userreports.reports.filters.values import (
NONE_CHOICE,
SHOW_ALL_CHOICE,
dynamic_choice_list_url,
)
def _build_date_filter(spec, report):
wrapped = DateFilterSpec.wrap(spec)
return DatespanFilter(
name=wrapped.slug,
label=wrapped.get_display(),
compare_as_string=wrapped.compare_as_string,
)
def _build_quarter_filter(spec, report):
wrapped = QuarterFilterSpec.wrap(spec)
return QuarterFilter(
name=wrapped.slug,
label=wrapped.get_display(),
show_all=wrapped.show_all,
)
def _build_numeric_filter(spec, report):
wrapped = NumericFilterSpec.wrap(spec)
return NumericFilter(
name=wrapped.slug,
label=wrapped.get_display(),
)
def _build_pre_filter(spec, report):
wrapped = PreFilterSpec.wrap(spec)
return PreFilter(
name=wrapped.slug,
datatype=wrapped.datatype,
pre_value=wrapped.pre_value,
pre_operator=wrapped.pre_operator,
)
def _build_choice_list_filter(spec, report):
wrapped = ChoiceListFilterSpec.wrap(spec)
choices = [Choice(
fc.value if fc.value is not None else NONE_CHOICE,
fc.get_display()
) for fc in wrapped.choices]
if wrapped.show_all:
choices.insert(0, Choice(SHOW_ALL_CHOICE, _('Show all')))
return ChoiceListFilter(
name=wrapped.slug,
field=wrapped.field,
datatype=wrapped.datatype,
label=wrapped.display,
choices=choices,
)
def _build_dynamic_choice_list_filter(spec, report):
wrapped = DynamicChoiceListFilterSpec.wrap(spec)
choice_provider_spec = wrapped.get_choice_provider_spec()
choice_provider = FilterChoiceProviderFactory.from_spec(choice_provider_spec)(report, wrapped.slug)
choice_provider.configure(choice_provider_spec)
invalid_spec = (
wrapped.ancestor_expression and
not set(wrapped.ancestor_expression.keys()) == set(['field', 'location_type'])
)
if invalid_spec:
raise BadSpecError(_(
"'ancestor_expression' must be empty dictionary or have 'field', 'location_type' keys"))
if wrapped.ancestor_expression and not isinstance(choice_provider, LocationChoiceProvider):
raise BadSpecError(_(
"'ancestor_expression' is applicable only for location choices"
))
return DynamicChoiceListFilter(
name=wrapped.slug,
datatype=wrapped.datatype,
field=wrapped.field,
label=wrapped.display,
show_all=wrapped.show_all,
url_generator=dynamic_choice_list_url,
choice_provider=choice_provider,
ancestor_expression=wrapped.ancestor_expression,
)
def _build_multi_field_dynamic_choice_list_filter(spec, report):
wrapped = MultiFieldDynamicChoiceFilterSpec.wrap(spec)
choice_provider_spec = wrapped.get_choice_provider_spec()
choice_provider = MultiFieldChoiceProviderFactory.from_spec(choice_provider_spec)(report, wrapped.slug)
choice_provider.configure(choice_provider_spec)
return MultiFieldDynamicChoiceListFilter(
name=wrapped.slug,
datatype=wrapped.datatype,
fields=wrapped.fields,
label=wrapped.display,
show_all=wrapped.show_all,
url_generator=dynamic_choice_list_url,
choice_provider=choice_provider,
)
def _build_location_drilldown_filter(spec, report):
wrapped = LocationDrilldownFilterSpec.wrap(spec)
valid_spec = (
not wrapped.ancestor_expression or
set(wrapped.ancestor_expression.keys()) == set(['field', 'location_type'])
)
if not valid_spec:
raise BadSpecError(_(
"'ancestor_expression' must be empty dictionary or have 'field', 'location_type' keys"))
return LocationDrilldownFilter(
name=wrapped.slug,
datatype=wrapped.datatype,
field=wrapped.field,
label=wrapped.display,
domain=report.domain,
include_descendants=wrapped.include_descendants,
max_drilldown_levels=wrapped.max_drilldown_levels,
ancestor_expression=wrapped.ancestor_expression,
)
class ReportFilterFactory(object):
constructor_map = {
'date': _build_date_filter,
'quarter': _build_quarter_filter,
'pre': _build_pre_filter,
'choice_list': _build_choice_list_filter,
'dynamic_choice_list': _build_dynamic_choice_list_filter,
'multi_field_dynamic_choice_list': _build_multi_field_dynamic_choice_list_filter,
'numeric': _build_numeric_filter,
'location_drilldown': _build_location_drilldown_filter,
}
@classmethod
def from_spec(cls, spec, report=None):
cls.validate_spec(spec)
try:
return cls.constructor_map[spec['type']](spec, report)
except (AssertionError, BadValueError) as e:
raise BadSpecError(_('Problem creating report filter from spec: {}, message is: {}').format(
json.dumps(spec, indent=2),
str(e),
))
@classmethod
def validate_spec(cls, spec):
if spec.get('type') not in cls.constructor_map:
raise BadSpecError(
_('Illegal report filter type: {0}, must be one of the following choice: ({1})').format(
spec.get('type', _('(missing from spec)')),
', '.join(cls.constructor_map)
)
)
class FilterChoiceProviderFactory(object):
constructor_map = {
DATA_SOURCE_COLUMN: DataSourceColumnChoiceProvider,
LOCATION: LocationChoiceProvider,
USER: UserChoiceProvider,
OWNER: OwnerChoiceProvider,
COMMCARE_PROJECT: DomainChoiceProvider
}
@classmethod
def from_spec(cls, choice_provider_spec):
return cls.constructor_map.get(choice_provider_spec['type'], DataSourceColumnChoiceProvider)
class MultiFieldChoiceProviderFactory(FilterChoiceProviderFactory):
constructor_map = {
DATA_SOURCE_COLUMN: MultiFieldDataSourceColumnChoiceProvider,
LOCATION: LocationChoiceProvider,
USER: UserChoiceProvider,
OWNER: OwnerChoiceProvider
}
| {
"content_hash": "38aecef6311ae41b1aa3fc873714dcac",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 107,
"avg_line_length": 32.16517857142857,
"alnum_prop": 0.6773074253990284,
"repo_name": "dimagi/commcare-hq",
"id": "a01ec7d9dd330afc7fc05106ca1e6384489ede69",
"size": "7205",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/userreports/reports/filters/factory.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "82928"
},
{
"name": "Dockerfile",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "2589268"
},
{
"name": "JavaScript",
"bytes": "5889543"
},
{
"name": "Jinja",
"bytes": "3693"
},
{
"name": "Less",
"bytes": "176180"
},
{
"name": "Makefile",
"bytes": "1622"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "66704"
},
{
"name": "Python",
"bytes": "21779773"
},
{
"name": "Roff",
"bytes": "150"
},
{
"name": "Shell",
"bytes": "67473"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.db import transaction
from django.db.models import Q
from dimagi.utils.chunked import chunked
from dimagi.utils.couch import CriticalSection
from corehq.apps.data_interfaces.models import AutomaticUpdateRule
from corehq.apps.es import CaseES
from corehq.apps.sms import tasks as sms_tasks
from corehq.form_processor.exceptions import CaseNotFound
from corehq.form_processor.models import CommCareCase
from corehq.messaging.scheduling.tasks import (
delete_schedule_instances_for_cases,
)
from corehq.messaging.scheduling.util import utcnow
from corehq.messaging.util import MessagingRuleProgressHelper
from corehq.sql_db.util import (
get_db_aliases_for_partitioned_query,
paginate_query,
paginate_query_across_partitioned_databases,
)
from corehq.util.celery_utils import no_result_task
from corehq.util.metrics.load_counters import case_load_counter
def get_sync_key(case_id):
return 'sync-case-for-messaging-%s' % case_id
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_QUEUE, acks_late=True,
default_retry_delay=5 * 60, max_retries=12, bind=True)
def sync_case_for_messaging_task(self, domain, case_id):
try:
with CriticalSection([get_sync_key(case_id)], timeout=5 * 60):
sync_case_for_messaging(domain, case_id)
except Exception as e:
self.retry(exc=e)
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE, acks_late=True,
default_retry_delay=5 * 60, max_retries=12, bind=True)
def sync_case_for_messaging_rule(self, domain, case_id, rule_id):
try:
with CriticalSection([get_sync_key(case_id)], timeout=5 * 60):
_sync_case_for_messaging_rule(domain, case_id, rule_id)
except Exception as e:
self.retry(exc=e)
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE, acks_late=True)
def sync_case_chunk_for_messaging_rule(domain, case_id_chunk, rule_id):
for case_id in case_id_chunk:
try:
with CriticalSection([get_sync_key(case_id)], timeout=5 * 60):
_sync_case_for_messaging_rule(domain, case_id, rule_id)
except Exception:
sync_case_for_messaging_rule.delay(domain, case_id, rule_id)
def sync_case_for_messaging(domain, case_id, get_rules=None):
try:
case = CommCareCase.objects.get_case(case_id, domain)
sms_tasks.clear_case_caches(case)
except CaseNotFound:
case = None
case_load_counter("messaging_sync", domain)()
update_messaging_for_case(domain, case_id, case)
if case is not None:
run_auto_update_rules_for_case(case, get_rules)
def update_messaging_for_case(domain, case_id, case):
if case is None or case.is_deleted:
clear_messaging_for_case(domain, case_id)
elif settings.USE_PHONE_ENTRIES:
sms_tasks.sync_case_phone_number(case)
def clear_messaging_for_case(domain, case_id):
sms_tasks.delete_phone_numbers_for_owners([case_id])
delete_schedule_instances_for_cases(domain, [case_id])
def run_auto_update_rules_for_case(case, get_rules=None):
if get_rules is not None:
rules = get_rules(case.domain, case.type)
else:
all_rules = AutomaticUpdateRule.by_domain_cached(case.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rules_by_case_type = AutomaticUpdateRule.organize_rules_by_case_type(all_rules)
rules = rules_by_case_type.get(case.type, [])
for rule in rules:
rule.run_rule(case, utcnow())
def _get_cached_rule(domain, rule_id):
rules = AutomaticUpdateRule.by_domain_cached(domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rules = [rule for rule in rules if rule.pk == rule_id]
if len(rules) == 1:
return rules[0]
def _sync_case_for_messaging_rule(domain, case_id, rule_id):
case_load_counter("messaging_rule_sync", domain)()
try:
case = CommCareCase.objects.get_case(case_id, domain)
except CaseNotFound:
clear_messaging_for_case(domain, case_id)
return
rule = _get_cached_rule(domain, rule_id)
if rule:
rule.run_rule(case, utcnow())
MessagingRuleProgressHelper(rule_id).increment_current_case_count()
def initiate_messaging_rule_run(rule):
if not rule.active:
return
AutomaticUpdateRule.objects.filter(pk=rule.pk).update(locked_for_editing=True)
transaction.on_commit(lambda: run_messaging_rule.delay(rule.domain, rule.pk))
def paginated_case_ids(domain, case_type, db_alias=None):
args = [
CommCareCase,
Q(domain=domain, type=case_type, deleted=False)
]
if db_alias:
fn = paginate_query
args = [db_alias] + args
else:
fn = paginate_query_across_partitioned_databases
row_generator = fn(*args, values=['case_id'], load_source='run_messaging_rule')
for row in row_generator:
yield row[0]
def get_case_ids_for_messaging_rule(domain, case_type):
return paginated_case_ids(domain, case_type)
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE)
def set_rule_complete(rule_id):
AutomaticUpdateRule.objects.filter(pk=rule_id).update(locked_for_editing=False)
MessagingRuleProgressHelper(rule_id).set_rule_complete()
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE, acks_late=True,
soft_time_limit=15 * settings.CELERY_TASK_SOFT_TIME_LIMIT)
def run_messaging_rule(domain, rule_id):
rule = _get_cached_rule(domain, rule_id)
if not rule:
return
progress_helper = MessagingRuleProgressHelper(rule_id)
total_cases_count = CaseES().domain(domain).case_type(rule.case_type).count()
progress_helper.set_total_cases_to_be_processed(total_cases_count)
db_aliases = get_db_aliases_for_partitioned_query()
progress_helper.set_initial_progress(shard_count=len(db_aliases))
for db_alias in db_aliases:
run_messaging_rule_for_shard.delay(domain, rule_id, db_alias)
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE, acks_late=True,
soft_time_limit=15 * settings.CELERY_TASK_SOFT_TIME_LIMIT)
def run_messaging_rule_for_shard(domain, rule_id, db_alias):
rule = _get_cached_rule(domain, rule_id)
if not rule:
return
chunk_size = getattr(settings, 'MESSAGING_RULE_CASE_CHUNK_SIZE', 100)
progress_helper = MessagingRuleProgressHelper(rule_id)
if not progress_helper.is_canceled():
for case_id_chunk in chunked(paginated_case_ids(domain, rule.case_type, db_alias), chunk_size):
sync_case_chunk_for_messaging_rule.delay(domain, case_id_chunk, rule_id)
progress_helper.update_total_key_expiry()
if progress_helper.is_canceled():
break
all_shards_complete = progress_helper.mark_shard_complete(db_alias)
if all_shards_complete:
# this should get triggered for the last shard
set_rule_complete.delay(rule_id)
| {
"content_hash": "8032a5c0a60d582950e8ee61376d3010",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 110,
"avg_line_length": 38.362637362637365,
"alnum_prop": 0.6996562589515898,
"repo_name": "dimagi/commcare-hq",
"id": "13e96588964c8b6413412ee8ddc5367de7fd2bb5",
"size": "6982",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/messaging/tasks.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "82928"
},
{
"name": "Dockerfile",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "2589268"
},
{
"name": "JavaScript",
"bytes": "5889543"
},
{
"name": "Jinja",
"bytes": "3693"
},
{
"name": "Less",
"bytes": "176180"
},
{
"name": "Makefile",
"bytes": "1622"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "66704"
},
{
"name": "Python",
"bytes": "21779773"
},
{
"name": "Roff",
"bytes": "150"
},
{
"name": "Shell",
"bytes": "67473"
}
],
"symlink_target": ""
} |
import pygame
import sys
import math
#import .base
from .base.pygamewrapper import PyGameWrapper
from .utils.vec2d import vec2d
from .utils import percent_round_int
from pygame.constants import K_w, K_a, K_s, K_d
from .primitives import Player, Creep
class WaterWorld(PyGameWrapper):
"""
Based Karpthy's WaterWorld in `REINFORCEjs`_.
.. _REINFORCEjs: https://github.com/karpathy/reinforcejs
Parameters
----------
width : int
Screen width.
height : int
Screen height, recommended to be same dimension as width.
num_creeps : int (default: 3)
The number of creeps on the screen at once.
"""
def __init__(self,
width=48,
height=48,
num_creeps=3):
actions = {
"up": K_w,
"left": K_a,
"right": K_d,
"down": K_s
}
PyGameWrapper.__init__(self, width, height, actions=actions)
self.BG_COLOR = (255, 255, 255)
self.N_CREEPS = num_creeps
self.CREEP_TYPES = ["GOOD", "BAD"]
self.CREEP_COLORS = [(40, 140, 40), (150, 95, 95)]
radius = percent_round_int(width, 0.047)
self.CREEP_RADII = [radius, radius]
self.CREEP_REWARD = [
self.rewards["positive"],
self.rewards["negative"]]
self.CREEP_SPEED = 0.25 * width
self.AGENT_COLOR = (60, 60, 140)
self.AGENT_SPEED = 0.25 * width
self.AGENT_RADIUS = radius
self.AGENT_INIT_POS = (self.width / 2, self.height / 2)
self.creep_counts = {
"GOOD": 0,
"BAD": 0
}
self.dx = 0
self.dy = 0
self.player = None
self.creeps = None
def _handle_player_events(self):
self.dx = 0
self.dy = 0
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
key = event.key
if key == self.actions["left"]:
self.dx -= self.AGENT_SPEED
if key == self.actions["right"]:
self.dx += self.AGENT_SPEED
if key == self.actions["up"]:
self.dy -= self.AGENT_SPEED
if key == self.actions["down"]:
self.dy += self.AGENT_SPEED
def _add_creep(self):
creep_type = self.rng.choice([0, 1])
creep = None
pos = (0, 0)
dist = 0.0
while dist < 1.5:
radius = self.CREEP_RADII[creep_type] * 1.5
pos = self.rng.uniform(radius, self.height - radius, size=2)
dist = math.sqrt(
(self.player.pos.x - pos[0])**2 + (self.player.pos.y - pos[1])**2)
creep = Creep(
self.CREEP_COLORS[creep_type],
self.CREEP_RADII[creep_type],
pos,
self.rng.choice([-1, 1], 2),
self.rng.rand() * self.CREEP_SPEED,
self.CREEP_REWARD[creep_type],
self.CREEP_TYPES[creep_type],
self.width,
self.height,
self.rng.rand()
)
self.creeps.add(creep)
self.creep_counts[self.CREEP_TYPES[creep_type]] += 1
def getGameState(self):
"""
Returns
-------
dict
* player x position.
* player y position.
* player x velocity.
* player y velocity.
* player distance to each creep
"""
state = {
"player_x": self.player.pos.x,
"player_y": self.player.pos.y,
"player_velocity_x": self.player.vel.x,
"player_velocity_y": self.player.vel.y,
"creep_dist": {
"GOOD": [],
"BAD": []
},
"creep_pos": {
"GOOD": [],
"BAD": []
}
}
for c in self.creeps:
dist = math.sqrt((self.player.pos.x - c.pos.x) **
2 + (self.player.pos.y - c.pos.y)**2)
state["creep_dist"][c.TYPE].append(dist)
state["creep_pos"][c.TYPE].append([c.pos.x, c.pos.y])
return state
def getScore(self):
return self.score
def game_over(self):
"""
Return bool if the game has 'finished'
"""
return (self.creep_counts['GOOD'] == 0)
def init(self):
"""
Starts/Resets the game to its inital state
"""
self.creep_counts = {"GOOD": 0, "BAD": 0}
if self.player is None:
self.player = Player(
self.AGENT_RADIUS, self.AGENT_COLOR,
self.AGENT_SPEED, self.AGENT_INIT_POS,
self.width, self.height
)
else:
self.player.pos = vec2d(self.AGENT_INIT_POS)
self.player.vel = vec2d((0.0, 0.0))
if self.creeps is None:
self.creeps = pygame.sprite.Group()
else:
self.creeps.empty()
for i in range(self.N_CREEPS):
self._add_creep()
self.score = 0
self.ticks = 0
self.lives = -1
def step(self, dt):
"""
Perform one step of game emulation.
"""
dt /= 1000.0
self.screen.fill(self.BG_COLOR)
self.score += self.rewards["tick"]
self._handle_player_events()
self.player.update(self.dx, self.dy, dt)
hits = pygame.sprite.spritecollide(self.player, self.creeps, True)
for creep in hits:
self.creep_counts[creep.TYPE] -= 1
self.score += creep.reward
self._add_creep()
if self.creep_counts["GOOD"] == 0:
self.score += self.rewards["win"]
self.creeps.update(dt)
self.player.draw(self.screen)
self.creeps.draw(self.screen)
if __name__ == "__main__":
import numpy as np
pygame.init()
game = WaterWorld(width=256, height=256, num_creeps=10)
game.screen = pygame.display.set_mode(game.getScreenDims(), 0, 32)
game.clock = pygame.time.Clock()
game.rng = np.random.RandomState(24)
game.init()
while True:
dt = game.clock.tick_busy_loop(30)
game.step(dt)
pygame.display.update()
| {
"content_hash": "2244fad77c1f791e30f0a75ab68dacbd",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 82,
"avg_line_length": 26.702928870292887,
"alnum_prop": 0.4968661861485428,
"repo_name": "ntasfi/PyGame-Learning-Environment",
"id": "43965799633ca511f670966421f2def529dfc571",
"size": "6382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ple/games/waterworld.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1588"
},
{
"name": "Python",
"bytes": "167222"
}
],
"symlink_target": ""
} |
from functools import wraps
from collections import defaultdict
def multimethod(store):
def generic_decorator(f):
current_store = store[f.__name__]
@wraps(f)
def generic(*args):
try:
function = current_store[
tuple(a.__class__ for a in args[1:])
]
except KeyError:
function = generic.default
return function(*args)
def dispatch(*clses):
def dispatch_decorator(handler):
handler.__name__ = '_'.join(
[f.__name__] +
[c.__name__ for c in clses]
)
store[f.__name__][clses] = handler
return handler
return dispatch_decorator
generic.d = generic.dispatch = dispatch
generic.default = f
return generic
return generic_decorator
def method_store():
return defaultdict(lambda: {})
| {
"content_hash": "c85d4ca1bd8abd157ebf989ef76ac615",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 56,
"avg_line_length": 22.906976744186046,
"alnum_prop": 0.4984771573604061,
"repo_name": "dacjames/mara-lang",
"id": "70563b52e680176513a3713f24c6dc22e37bf777",
"size": "985",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "bootstrap/mara/util/dispatch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9315"
},
{
"name": "Python",
"bytes": "164554"
},
{
"name": "Ragel in Ruby Host",
"bytes": "8387"
}
],
"symlink_target": ""
} |
import requests
import pytest
from kiefer.client import KieferClient, KieferClientError
@pytest.fixture
def setup(mocker):
class Setup:
resp = mocker.patch.object(requests.Response, '__init__')
resp.status_code = 200
resp.json = lambda: {'meta': {'error_type': 'CustomError',
'error_detail': 'custom error detail'}}
req_get = mocker.patch('requests.get')
req_get.return_value = resp
req_post = mocker.patch('requests.post')
req_post.return_value = resp
req_delete = mocker.patch('requests.delete')
req_delete.return_value = resp
client = KieferClient('access_token')
headers = {'Authorization': 'Bearer access_token'}
return Setup
def test_client_client_init():
client = KieferClient('access_token')
assert client.access_token == 'access_token'
assert client._headers['Authorization'] == 'Bearer access_token'
def test_client_get_helper(setup):
req_url = 'https://jawbone.com/nudge/api/v.1.1/myurl'
setup.client._get('myurl')
setup.req_get.assert_called_once_with(req_url, params=None,
headers=setup.headers)
setup.resp.status_code = 404
with pytest.raises(KieferClientError):
setup.client._get('myurl')
def test_client_post_helper(setup):
req_url = 'https://jawbone.com/nudge/api/v.1.1/myurl'
setup.client._post('myurl', payload={})
setup.req_post.assert_called_once_with(req_url, data={},
headers=setup.headers)
setup.resp.status_code = 404
with pytest.raises(KieferClientError):
setup.client._post('myurl', payload={'foo': 'bar'})
def test_client_delete_helper(setup):
req_url = 'https://jawbone.com/nudge/api/v.1.1/myurl'
setup.client._delete('myurl')
setup.req_delete.assert_called_once_with(req_url, headers=setup.headers)
setup.resp.status_code = 404
with pytest.raises(KieferClientError):
setup.client._delete('myurl')
def test_client_get_band_events(setup):
url = 'https://jawbone.com/nudge/api/v.1.1/users/@me/bandevents'
setup.client.get_band_events()
setup.req_get.assert_called_once_with(url, params=None,
headers=setup.headers)
def test_client_get_body_events(setup):
url = 'https://jawbone.com/nudge/api/v.1.1/users/@me/body_events'
setup.client.get_body_events()
setup.req_get.assert_called_once_with(url, params={},
headers=setup.headers)
| {
"content_hash": "ec3c7408eb35f75db29058574d67dff4",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 77,
"avg_line_length": 34.19736842105263,
"alnum_prop": 0.6240861869949981,
"repo_name": "andygoldschmidt/kiefer",
"id": "a371b63d23080734b4dac1125c257479e7966fd5",
"size": "2599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20210"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import argparse
import copy
import os
import sys
import time
from cinderclient import exceptions
from cinderclient.openstack.common import strutils
from cinderclient import utils
from cinderclient.v1 import availability_zones
def _poll_for_status(poll_fn, obj_id, action, final_ok_states,
poll_period=5, show_progress=True):
"""Blocks while an action occurs. Periodically shows progress."""
def print_progress(progress):
if show_progress:
msg = ('\rInstance %(action)s... %(progress)s%% complete'
% dict(action=action, progress=progress))
else:
msg = '\rInstance %(action)s...' % dict(action=action)
sys.stdout.write(msg)
sys.stdout.flush()
print()
while True:
obj = poll_fn(obj_id)
status = obj.status.lower()
progress = getattr(obj, 'progress', None) or 0
if status in final_ok_states:
print_progress(100)
print("\nFinished")
break
elif status == "error":
print("\nError %(action)s instance" % {'action': action})
break
else:
print_progress(progress)
time.sleep(poll_period)
def _find_volume_snapshot(cs, snapshot):
"""Gets a volume snapshot by name or ID."""
return utils.find_resource(cs.volume_snapshots, snapshot)
def _find_backup(cs, backup):
"""Gets a backup by name or ID."""
return utils.find_resource(cs.backups, backup)
def _find_transfer(cs, transfer):
"""Gets a transfer by name or ID."""
return utils.find_resource(cs.transfers, transfer)
def _find_qos_specs(cs, qos_specs):
"""Gets a qos specs by ID."""
return utils.find_resource(cs.qos_specs, qos_specs)
def _print_volume(volume):
utils.print_dict(volume._info)
def _print_volume_snapshot(snapshot):
utils.print_dict(snapshot._info)
def _print_volume_image(image):
utils.print_dict(image[1]['os-volume_upload_image'])
def _translate_keys(collection, convert):
for item in collection:
keys = item.__dict__
for from_key, to_key in convert:
if from_key in keys and to_key not in keys:
setattr(item, to_key, item._info[from_key])
def _translate_volume_keys(collection):
convert = [('displayName', 'display_name'), ('volumeType', 'volume_type'),
('os-vol-tenant-attr:tenant_id', 'tenant_id')]
_translate_keys(collection, convert)
def _translate_volume_snapshot_keys(collection):
convert = [('displayName', 'display_name'), ('volumeId', 'volume_id')]
_translate_keys(collection, convert)
def _translate_availability_zone_keys(collection):
convert = [('zoneName', 'name'), ('zoneState', 'status')]
_translate_keys(collection, convert)
def _extract_metadata(args):
metadata = {}
for metadatum in args.metadata:
# unset doesn't require a val, so we have the if/else
if '=' in metadatum:
(key, value) = metadatum.split('=', 1)
else:
key = metadatum
value = None
metadata[key] = value
return metadata
@utils.arg(
'--all-tenants',
dest='all_tenants',
metavar='<0|1>',
nargs='?',
type=int,
const=1,
default=0,
help='Shows details for all tenants. Admin only.')
@utils.arg(
'--all_tenants',
nargs='?',
type=int,
const=1,
help=argparse.SUPPRESS)
@utils.arg(
'--display-name',
metavar='<display-name>',
default=None,
help='Filters list by a volume display name. Default=None.')
@utils.arg(
'--status',
metavar='<status>',
default=None,
help='Filters list by a status. Default=None.')
@utils.arg(
'--metadata',
type=str,
nargs='*',
metavar='<key=value>',
default=None,
help='Filters list by metadata key and value pair. '
'Default=None.')
@utils.arg(
'--tenant',
type=str,
dest='tenant',
nargs='?',
metavar='<tenant>',
help='Display information from single tenant (Admin only).')
@utils.arg(
'--limit',
metavar='<limit>',
default=None,
help='Maximum number of volumes to return. OPTIONAL: Default=None.')
@utils.service_type('volume')
def do_list(cs, args):
"""Lists all volumes."""
all_tenants = 1 if args.tenant else \
int(os.environ.get("ALL_TENANTS", args.all_tenants))
search_opts = {
'all_tenants': all_tenants,
'project_id': args.tenant,
'display_name': args.display_name,
'status': args.status,
'metadata': _extract_metadata(args) if args.metadata else None,
}
volumes = cs.volumes.list(search_opts=search_opts, limit=args.limit)
_translate_volume_keys(volumes)
# Create a list of servers to which the volume is attached
for vol in volumes:
servers = [s.get('server_id') for s in vol.attachments]
setattr(vol, 'attached_to', ','.join(map(str, servers)))
if all_tenants:
key_list = ['ID', 'Tenant ID', 'Status', 'Display Name',
'Size', 'Volume Type', 'Bootable', 'Attached to']
else:
key_list = ['ID', 'Status', 'Display Name',
'Size', 'Volume Type', 'Bootable', 'Attached to']
utils.print_list(volumes, key_list)
@utils.arg('volume', metavar='<volume>', help='Volume name or ID.')
@utils.service_type('volume')
def do_show(cs, args):
"""Shows volume details."""
volume = utils.find_volume(cs, args.volume)
_print_volume(volume)
@utils.arg('size',
metavar='<size>',
type=int,
help='Volume size, in GBs.')
@utils.arg(
'--snapshot-id',
metavar='<snapshot-id>',
default=None,
help='Creates volume from snapshot ID. '
'Default=None.')
@utils.arg(
'--snapshot_id',
help=argparse.SUPPRESS)
@utils.arg(
'--source-volid',
metavar='<source-volid>',
default=None,
help='Creates volume from volume ID. '
'Default=None.')
@utils.arg(
'--source_volid',
help=argparse.SUPPRESS)
@utils.arg(
'--image-id',
metavar='<image-id>',
default=None,
help='Creates volume from image ID. '
'Default=None.')
@utils.arg(
'--image_id',
help=argparse.SUPPRESS)
@utils.arg(
'--display-name',
metavar='<display-name>',
default=None,
help='Volume name. '
'Default=None.')
@utils.arg(
'--display_name',
help=argparse.SUPPRESS)
@utils.arg(
'--display-description',
metavar='<display-description>',
default=None,
help='Volume description. '
'Default=None.')
@utils.arg(
'--display_description',
help=argparse.SUPPRESS)
@utils.arg(
'--volume-type',
metavar='<volume-type>',
default=None,
help='Volume type. '
'Default=None.')
@utils.arg(
'--volume_type',
help=argparse.SUPPRESS)
@utils.arg(
'--availability-zone',
metavar='<availability-zone>',
default=None,
help='Availability zone for volume. '
'Default=None.')
@utils.arg(
'--availability_zone',
help=argparse.SUPPRESS)
@utils.arg('--metadata',
type=str,
nargs='*',
metavar='<key=value>',
default=None,
help='Metadata key and value pairs. '
'Default=None.')
@utils.service_type('volume')
def do_create(cs, args):
"""Creates a volume."""
volume_metadata = None
if args.metadata is not None:
volume_metadata = _extract_metadata(args)
volume = cs.volumes.create(args.size,
args.snapshot_id,
args.source_volid,
args.display_name,
args.display_description,
args.volume_type,
availability_zone=args.availability_zone,
imageRef=args.image_id,
metadata=volume_metadata)
_print_volume(volume)
@utils.arg('volume', metavar='<volume>', nargs='+',
help='Name or ID of volume to delete. '
'Separate multiple volumes with a space.')
@utils.service_type('volume')
def do_delete(cs, args):
"""Removes one or more volumes."""
failure_count = 0
for volume in args.volume:
try:
utils.find_volume(cs, volume).delete()
except Exception as e:
failure_count += 1
print("Delete for volume %s failed: %s" % (volume, e))
if failure_count == len(args.volume):
raise exceptions.CommandError("Unable to delete any of the specified "
"volumes.")
@utils.arg('volume', metavar='<volume>', nargs='+',
help='Name or ID of volume to delete. '
'Separate multiple volumes with a space.')
@utils.service_type('volume')
def do_force_delete(cs, args):
"""Attempts force-delete of volume, regardless of state."""
failure_count = 0
for volume in args.volume:
try:
utils.find_volume(cs, volume).force_delete()
except Exception as e:
failure_count += 1
print("Delete for volume %s failed: %s" % (volume, e))
if failure_count == len(args.volume):
raise exceptions.CommandError("Unable to force delete any of the "
"specified volumes.")
@utils.arg('volume', metavar='<volume>', nargs='+',
help='Name or ID of volume to modify. '
'Separate multiple volumes with a space.')
@utils.arg('--state', metavar='<state>', default='available',
help=('The state to assign to the volume. Valid values are '
'"available," "error," "creating," "deleting," "in-use," '
'"attaching," "detaching" and "error_deleting." '
'NOTE: This command simply changes the state of the '
'Volume in the DataBase with no regard to actual status, '
'exercise caution when using. Default=available.'))
@utils.service_type('volume')
def do_reset_state(cs, args):
"""Explicitly updates the volume state."""
failure_flag = False
for volume in args.volume:
try:
utils.find_volume(cs, volume).reset_state(args.state)
except Exception as e:
failure_flag = True
msg = "Reset state for volume %s failed: %s" % (volume, e)
print(msg)
if failure_flag:
msg = "Unable to reset the state for the specified volume(s)."
raise exceptions.CommandError(msg)
@utils.arg('volume', metavar='<volume>',
help='Name or ID of volume to rename.')
@utils.arg('display_name', nargs='?', metavar='<display-name>',
help='New display name for volume.')
@utils.arg('--display-description', metavar='<display-description>',
default=None, help='Volume description. Default=None.')
@utils.service_type('volume')
def do_rename(cs, args):
"""Renames a volume."""
kwargs = {}
if args.display_name is not None:
kwargs['display_name'] = args.display_name
if args.display_description is not None:
kwargs['display_description'] = args.display_description
if not any(kwargs):
msg = 'Must supply either display-name or display-description.'
raise exceptions.ClientException(code=1, message=msg)
utils.find_volume(cs, args.volume).update(**kwargs)
@utils.arg('volume',
metavar='<volume>',
help='Name or ID of volume for which to update metadata.')
@utils.arg('action',
metavar='<action>',
choices=['set', 'unset'],
help='The action. Valid values are "set" or "unset."')
@utils.arg('metadata',
metavar='<key=value>',
nargs='+',
default=[],
help='The metadata key and pair to set or unset. '
'For unset, specify only the key. '
'Default=[].')
@utils.service_type('volume')
def do_metadata(cs, args):
"""Sets or deletes volume metadata."""
volume = utils.find_volume(cs, args.volume)
metadata = _extract_metadata(args)
if args.action == 'set':
cs.volumes.set_metadata(volume, metadata)
elif args.action == 'unset':
# NOTE(zul): Make sure py2/py3 sorting is the same
cs.volumes.delete_metadata(volume, sorted(metadata.keys(),
reverse=True))
@utils.arg(
'--all-tenants',
dest='all_tenants',
metavar='<0|1>',
nargs='?',
type=int,
const=1,
default=0,
help='Shows details for all tenants. Admin only.')
@utils.arg(
'--all_tenants',
nargs='?',
type=int,
const=1,
help=argparse.SUPPRESS)
@utils.arg(
'--display-name',
metavar='<display-name>',
default=None,
help='Filters list by a display name. Default=None.')
@utils.arg(
'--status',
metavar='<status>',
default=None,
help='Filters list by a status. Default=None.')
@utils.arg(
'--volume-id',
metavar='<volume-id>',
default=None,
help='Filters list by a volume ID. Default=None.')
@utils.service_type('volume')
def do_snapshot_list(cs, args):
"""Lists all snapshots."""
all_tenants = int(os.environ.get("ALL_TENANTS", args.all_tenants))
search_opts = {
'all_tenants': all_tenants,
'display_name': args.display_name,
'status': args.status,
'volume_id': args.volume_id,
}
snapshots = cs.volume_snapshots.list(search_opts=search_opts)
_translate_volume_snapshot_keys(snapshots)
utils.print_list(snapshots,
['ID', 'Volume ID', 'Status', 'Display Name', 'Size'])
@utils.arg('snapshot', metavar='<snapshot>',
help='Name or ID of snapshot.')
@utils.service_type('volume')
def do_snapshot_show(cs, args):
"""Shows snapshot details."""
snapshot = _find_volume_snapshot(cs, args.snapshot)
_print_volume_snapshot(snapshot)
@utils.arg('volume',
metavar='<volume>',
help='Name or ID of volume to snapshot.')
@utils.arg('--force',
metavar='<True|False>',
default=False,
help='Allows or disallows snapshot of '
'a volume when the volume is attached to an instance. '
'If set to True, ignores the current status of the '
'volume when attempting to snapshot it rather '
'than forcing it to be available. '
'Default=False.')
@utils.arg(
'--display-name',
metavar='<display-name>',
default=None,
help='The snapshot name. Default=None.')
@utils.arg(
'--display_name',
help=argparse.SUPPRESS)
@utils.arg(
'--display-description',
metavar='<display-description>',
default=None,
help='The snapshot description. Default=None.')
@utils.arg(
'--display_description',
help=argparse.SUPPRESS)
@utils.service_type('volume')
def do_snapshot_create(cs, args):
"""Creates a snapshot."""
volume = utils.find_volume(cs, args.volume)
snapshot = cs.volume_snapshots.create(volume.id,
args.force,
args.display_name,
args.display_description)
_print_volume_snapshot(snapshot)
@utils.arg('snapshot',
metavar='<snapshot>', nargs='+',
help='Name or ID of the snapshot(s) to delete.')
@utils.service_type('volume')
def do_snapshot_delete(cs, args):
"""Remove one or more snapshots."""
failure_count = 0
for snapshot in args.snapshot:
try:
_find_volume_snapshot(cs, snapshot).delete()
except Exception as e:
failure_count += 1
print("Delete for snapshot %s failed: %s" % (snapshot, e))
if failure_count == len(args.snapshot):
raise exceptions.CommandError("Unable to delete any of the specified "
"snapshots.")
@utils.arg('snapshot', metavar='<snapshot>',
help='Name or ID of snapshot.')
@utils.arg('display_name', nargs='?', metavar='<display-name>',
help='New display name for snapshot.')
@utils.arg('--display-description', metavar='<display-description>',
default=None, help='Snapshot description. Default=None.')
@utils.service_type('volume')
def do_snapshot_rename(cs, args):
"""Renames a snapshot."""
kwargs = {}
if args.display_name is not None:
kwargs['display_name'] = args.display_name
if args.display_description is not None:
kwargs['display_description'] = args.display_description
if not any(kwargs):
msg = 'Must supply either display-name or display-description.'
raise exceptions.ClientException(code=1, message=msg)
_find_volume_snapshot(cs, args.snapshot).update(**kwargs)
@utils.arg('snapshot', metavar='<snapshot>', nargs='+',
help='Name or ID of snapshot to modify.')
@utils.arg('--state', metavar='<state>', default='available',
help=('The state to assign to the snapshot. Valid values are '
'"available," "error," "creating," "deleting," and '
'"error_deleting." NOTE: This command simply changes '
'the state of the Snapshot in the DataBase with no regard '
'to actual status, exercise caution when using. '
'Default=available.'))
@utils.service_type('volume')
def do_snapshot_reset_state(cs, args):
"""Explicitly updates the snapshot state."""
failure_count = 0
single = (len(args.snapshot) == 1)
for snapshot in args.snapshot:
try:
_find_volume_snapshot(cs, snapshot).reset_state(args.state)
except Exception as e:
failure_count += 1
msg = "Reset state for snapshot %s failed: %s" % (snapshot, e)
if not single:
print(msg)
if failure_count == len(args.snapshot):
if not single:
msg = ("Unable to reset the state for any of the specified "
"snapshots.")
raise exceptions.CommandError(msg)
def _print_volume_type_list(vtypes):
utils.print_list(vtypes, ['ID', 'Name'])
@utils.service_type('volume')
def do_type_list(cs, args):
"""Lists available 'volume types'."""
vtypes = cs.volume_types.list()
_print_volume_type_list(vtypes)
@utils.service_type('volume')
def do_extra_specs_list(cs, args):
"""Lists current volume types and extra specs."""
vtypes = cs.volume_types.list()
utils.print_list(vtypes, ['ID', 'Name', 'extra_specs'])
@utils.arg('name',
metavar='<name>',
help='Name for the volume type.')
@utils.service_type('volume')
def do_type_create(cs, args):
"""Creates a volume type."""
vtype = cs.volume_types.create(args.name)
_print_volume_type_list([vtype])
@utils.arg('id',
metavar='<id>',
help='ID of volume type to delete.')
@utils.service_type('volume')
def do_type_delete(cs, args):
"""Deletes a specified volume type."""
volume_type = _find_volume_type(cs, args.id)
cs.volume_types.delete(volume_type)
@utils.arg('vtype',
metavar='<vtype>',
help='Name or ID of volume type.')
@utils.arg('action',
metavar='<action>',
choices=['set', 'unset'],
help='The action. Valid values are "set" or "unset."')
@utils.arg('metadata',
metavar='<key=value>',
nargs='*',
default=None,
help='The extra specs key and value pair to set or unset. '
'For unset, specify only the key. Default=None.')
@utils.service_type('volume')
def do_type_key(cs, args):
"""Sets or unsets extra_spec for a volume type."""
vtype = _find_volume_type(cs, args.vtype)
if args.metadata is not None:
keypair = _extract_metadata(args)
if args.action == 'set':
vtype.set_keys(keypair)
elif args.action == 'unset':
vtype.unset_keys(list(keypair))
def do_endpoints(cs, args):
"""Discovers endpoints registered by authentication service."""
catalog = cs.client.service_catalog.catalog
for e in catalog['serviceCatalog']:
utils.print_dict(e['endpoints'][0], e['name'])
def do_credentials(cs, args):
"""Shows user credentials returned from auth."""
catalog = cs.client.service_catalog.catalog
utils.print_dict(catalog['user'], "User Credentials")
utils.print_dict(catalog['token'], "Token")
_quota_resources = ['volumes', 'snapshots', 'gigabytes',
'backups', 'backup_gigabytes']
_quota_infos = ['Type', 'In_use', 'Reserved', 'Limit']
def _quota_show(quotas):
quota_dict = {}
for resource in quotas._info:
good_name = False
for name in _quota_resources:
if resource.startswith(name):
good_name = True
if not good_name:
continue
quota_dict[resource] = getattr(quotas, resource, None)
utils.print_dict(quota_dict)
def _quota_usage_show(quotas):
quota_list = []
for resource in quotas._info.keys():
good_name = False
for name in _quota_resources:
if resource.startswith(name):
good_name = True
if not good_name:
continue
quota_info = getattr(quotas, resource, None)
quota_info['Type'] = resource
quota_info = dict((k.capitalize(), v) for k, v in quota_info.items())
quota_list.append(quota_info)
utils.print_list(quota_list, _quota_infos)
def _quota_update(manager, identifier, args):
updates = {}
for resource in _quota_resources:
val = getattr(args, resource, None)
if val is not None:
if args.volume_type:
resource = resource + '_%s' % args.volume_type
updates[resource] = val
if updates:
_quota_show(manager.update(identifier, **updates))
@utils.arg('tenant', metavar='<tenant_id>',
help='ID of the tenant for which to list quotas.')
@utils.service_type('volume')
def do_quota_show(cs, args):
"""Lists quotas for a tenant."""
_quota_show(cs.quotas.get(args.tenant))
@utils.arg('tenant', metavar='<tenant_id>',
help='ID of the tenant for which to list quota usage.')
@utils.service_type('volume')
def do_quota_usage(cs, args):
"""Lists quota usage for a tenant."""
_quota_usage_show(cs.quotas.get(args.tenant, usage=True))
@utils.arg('tenant', metavar='<tenant_id>',
help='ID of the tenant for which to list default quotas.')
@utils.service_type('volume')
def do_quota_defaults(cs, args):
"""Lists default quotas for a tenant."""
_quota_show(cs.quotas.defaults(args.tenant))
@utils.arg('tenant', metavar='<tenant_id>',
help='ID of the tenant for which to set quotas.')
@utils.arg('--volumes',
metavar='<volumes>',
type=int, default=None,
help='The new "volumes" quota value. Default=None.')
@utils.arg('--snapshots',
metavar='<snapshots>',
type=int, default=None,
help='The new "snapshots" quota value. Default=None.')
@utils.arg('--gigabytes',
metavar='<gigabytes>',
type=int, default=None,
help='The new "gigabytes" quota value. Default=None.')
@utils.arg('--backups',
metavar='<backups>',
type=int, default=None,
help='The new "backups" quota value. Default=None.')
@utils.arg('--backup-gigabytes',
metavar='<backup_gigabytes>',
type=int, default=None,
help='The new "backup_gigabytes" quota value. Default=None.')
@utils.arg('--volume-type',
metavar='<volume_type_name>',
default=None,
help='Volume type. Default=None.')
@utils.service_type('volume')
def do_quota_update(cs, args):
"""Updates quotas for a tenant."""
_quota_update(cs.quotas, args.tenant, args)
@utils.arg('tenant', metavar='<tenant_id>',
help='UUID of tenant to delete the quotas for.')
@utils.service_type('volume')
def do_quota_delete(cs, args):
"""Delete the quotas for a tenant."""
cs.quotas.delete(args.tenant)
@utils.arg('class_name', metavar='<class>',
help='Name of quota class for which to list quotas.')
@utils.service_type('volume')
def do_quota_class_show(cs, args):
"""Lists quotas for a quota class."""
_quota_show(cs.quota_classes.get(args.class_name))
@utils.arg('class_name', metavar='<class>',
help='Name of quota class for which to set quotas.')
@utils.arg('--volumes',
metavar='<volumes>',
type=int, default=None,
help='The new "volumes" quota value. Default=None.')
@utils.arg('--snapshots',
metavar='<snapshots>',
type=int, default=None,
help='The new "snapshots" quota value. Default=None.')
@utils.arg('--gigabytes',
metavar='<gigabytes>',
type=int, default=None,
help='The new "gigabytes" quota value. Default=None.')
@utils.arg('--volume-type',
metavar='<volume_type_name>',
default=None,
help='Volume type. Default=None.')
@utils.service_type('volume')
def do_quota_class_update(cs, args):
"""Updates quotas for a quota class."""
_quota_update(cs.quota_classes, args.class_name, args)
@utils.service_type('volume')
def do_absolute_limits(cs, args):
"""Lists absolute limits for a user."""
limits = cs.limits.get().absolute
columns = ['Name', 'Value']
utils.print_list(limits, columns)
@utils.service_type('volume')
def do_rate_limits(cs, args):
"""Lists rate limits for a user."""
limits = cs.limits.get().rate
columns = ['Verb', 'URI', 'Value', 'Remain', 'Unit', 'Next_Available']
utils.print_list(limits, columns)
def _find_volume_type(cs, vtype):
"""Gets a volume type by name or ID."""
return utils.find_resource(cs.volume_types, vtype)
@utils.arg('volume',
metavar='<volume>',
help='Name or ID of volume to upload to an image.')
@utils.arg('--force',
metavar='<True|False>',
default=False,
help='Enables or disables upload of '
'a volume that is attached to an instance. '
'Default=False.')
@utils.arg('--container-format',
metavar='<container-format>',
default='bare',
help='Container format type. '
'Default is bare.')
@utils.arg('--disk-format',
metavar='<disk-format>',
default='raw',
help='Disk format type. '
'Default is raw.')
@utils.arg('image_name',
metavar='<image-name>',
help='The new image name.')
@utils.service_type('volume')
def do_upload_to_image(cs, args):
"""Uploads volume to Image Service as an image."""
volume = utils.find_volume(cs, args.volume)
_print_volume_image(volume.upload_to_image(args.force,
args.image_name,
args.container_format,
args.disk_format))
@utils.arg('volume', metavar='<volume>',
help='Name or ID of volume to back up.')
@utils.arg('--container', metavar='<container>',
default=None,
help='Backup container name. Default=None.')
@utils.arg('--display-name', metavar='<display-name>',
default=None,
help='Backup name. Default=None.')
@utils.arg('--display-description', metavar='<display-description>',
default=None,
help='Backup description. Default=None.')
@utils.service_type('volume')
def do_backup_create(cs, args):
"""Creates a volume backup."""
volume = utils.find_volume(cs, args.volume)
backup = cs.backups.create(volume.id,
args.container,
args.display_name,
args.display_description)
info = {"volume_id": volume.id}
info.update(backup._info)
if 'links' in info:
info.pop('links')
utils.print_dict(info)
@utils.arg('backup', metavar='<backup>', help='Name or ID of backup.')
@utils.service_type('volume')
def do_backup_show(cs, args):
"""Show backup details."""
backup = _find_backup(cs, args.backup)
info = dict()
info.update(backup._info)
if 'links' in info:
info.pop('links')
utils.print_dict(info)
@utils.service_type('volume')
def do_backup_list(cs, args):
"""Lists all backups."""
backups = cs.backups.list()
columns = ['ID', 'Volume ID', 'Status', 'Name', 'Size', 'Object Count',
'Container']
utils.print_list(backups, columns)
@utils.arg('backup', metavar='<backup>',
help='Name or ID of backup to delete.')
@utils.service_type('volume')
def do_backup_delete(cs, args):
"""Removes a backup."""
backup = _find_backup(cs, args.backup)
backup.delete()
@utils.arg('backup', metavar='<backup>',
help='ID of backup to restore.')
@utils.arg('--volume-id', metavar='<volume>',
default=None,
help='ID or name of backup volume to '
'which to restore. Default=None.')
@utils.service_type('volume')
def do_backup_restore(cs, args):
"""Restores a backup."""
if args.volume_id:
volume_id = utils.find_volume(cs, args.volume_id).id
else:
volume_id = None
cs.restores.restore(args.backup, volume_id)
@utils.arg('volume', metavar='<volume>',
help='Name or ID of volume to transfer.')
@utils.arg('--display-name', metavar='<display-name>',
default=None,
help='Transfer name. Default=None.')
@utils.service_type('volume')
def do_transfer_create(cs, args):
"""Creates a volume transfer."""
volume = utils.find_volume(cs, args.volume)
transfer = cs.transfers.create(volume.id,
args.display_name)
info = dict()
info.update(transfer._info)
if 'links' in info:
info.pop('links')
utils.print_dict(info)
@utils.arg('transfer', metavar='<transfer>',
help='Name or ID of transfer to delete.')
@utils.service_type('volume')
def do_transfer_delete(cs, args):
"""Undoes a transfer."""
transfer = _find_transfer(cs, args.transfer)
transfer.delete()
@utils.arg('transfer', metavar='<transfer>',
help='ID of transfer to accept.')
@utils.arg('auth_key', metavar='<auth_key>',
help='Authentication key of transfer to accept.')
@utils.service_type('volume')
def do_transfer_accept(cs, args):
"""Accepts a volume transfer."""
transfer = cs.transfers.accept(args.transfer, args.auth_key)
info = dict()
info.update(transfer._info)
if 'links' in info:
info.pop('links')
utils.print_dict(info)
@utils.arg(
'--all-tenants',
dest='all_tenants',
metavar='<0|1>',
nargs='?',
type=int,
const=1,
default=0,
help='Shows details for all tenants. Admin only.')
@utils.arg(
'--all_tenants',
nargs='?',
type=int,
const=1,
help=argparse.SUPPRESS)
@utils.service_type('volume')
def do_transfer_list(cs, args):
"""Lists all transfers."""
all_tenants = int(os.environ.get("ALL_TENANTS", args.all_tenants))
search_opts = {
'all_tenants': all_tenants,
}
transfers = cs.transfers.list(search_opts=search_opts)
columns = ['ID', 'Volume ID', 'Name']
utils.print_list(transfers, columns)
@utils.arg('transfer', metavar='<transfer>',
help='Name or ID of transfer to accept.')
@utils.service_type('volume')
def do_transfer_show(cs, args):
"""Show transfer details."""
transfer = _find_transfer(cs, args.transfer)
info = dict()
info.update(transfer._info)
if 'links' in info:
info.pop('links')
utils.print_dict(info)
@utils.arg('volume', metavar='<volume>',
help='Name or ID of volume to extend.')
@utils.arg('new_size',
metavar='<new-size>',
type=int,
help='Size of volume, in GBs.')
@utils.service_type('volume')
def do_extend(cs, args):
"""Attempts to extend size of an existing volume."""
volume = utils.find_volume(cs, args.volume)
cs.volumes.extend(volume, args.new_size)
@utils.arg('--host', metavar='<hostname>', default=None,
help='Host name. Default=None.')
@utils.arg('--binary', metavar='<binary>', default=None,
help='Service binary. Default=None.')
@utils.service_type('volume')
def do_service_list(cs, args):
"""Lists all services. Filter by host and service binary."""
result = cs.services.list(host=args.host, binary=args.binary)
columns = ["Binary", "Host", "Zone", "Status", "State", "Updated_at"]
# NOTE(jay-lau-513): we check if the response has disabled_reason
# so as not to add the column when the extended ext is not enabled.
if result and hasattr(result[0], 'disabled_reason'):
columns.append("Disabled Reason")
utils.print_list(result, columns)
@utils.arg('host', metavar='<hostname>', help='Host name.')
@utils.arg('binary', metavar='<binary>', help='Service binary.')
@utils.service_type('volume')
def do_service_enable(cs, args):
"""Enables the service."""
result = cs.services.enable(args.host, args.binary)
columns = ["Host", "Binary", "Status"]
utils.print_list([result], columns)
@utils.arg('host', metavar='<hostname>', help='Host name.')
@utils.arg('binary', metavar='<binary>', help='Service binary.')
@utils.arg('--reason', metavar='<reason>',
help='Reason for disabling service.')
@utils.service_type('volume')
def do_service_disable(cs, args):
"""Disables the service."""
columns = ["Host", "Binary", "Status"]
if args.reason:
columns.append('Disabled Reason')
result = cs.services.disable_log_reason(args.host, args.binary,
args.reason)
else:
result = cs.services.disable(args.host, args.binary)
utils.print_list([result], columns)
def _treeizeAvailabilityZone(zone):
"""Builds a tree view for availability zones."""
AvailabilityZone = availability_zones.AvailabilityZone
az = AvailabilityZone(zone.manager,
copy.deepcopy(zone._info), zone._loaded)
result = []
# Zone tree view item
az.zoneName = zone.zoneName
az.zoneState = ('available'
if zone.zoneState['available'] else 'not available')
az._info['zoneName'] = az.zoneName
az._info['zoneState'] = az.zoneState
result.append(az)
if getattr(zone, "hosts", None) and zone.hosts is not None:
for (host, services) in zone.hosts.items():
# Host tree view item
az = AvailabilityZone(zone.manager,
copy.deepcopy(zone._info), zone._loaded)
az.zoneName = '|- %s' % host
az.zoneState = ''
az._info['zoneName'] = az.zoneName
az._info['zoneState'] = az.zoneState
result.append(az)
for (svc, state) in services.items():
# Service tree view item
az = AvailabilityZone(zone.manager,
copy.deepcopy(zone._info), zone._loaded)
az.zoneName = '| |- %s' % svc
az.zoneState = '%s %s %s' % (
'enabled' if state['active'] else 'disabled',
':-)' if state['available'] else 'XXX',
state['updated_at'])
az._info['zoneName'] = az.zoneName
az._info['zoneState'] = az.zoneState
result.append(az)
return result
@utils.service_type('volume')
def do_availability_zone_list(cs, _args):
"""Lists all availability zones."""
try:
availability_zones = cs.availability_zones.list()
except exceptions.Forbidden as e: # policy doesn't allow probably
try:
availability_zones = cs.availability_zones.list(detailed=False)
except Exception:
raise e
result = []
for zone in availability_zones:
result += _treeizeAvailabilityZone(zone)
_translate_availability_zone_keys(result)
utils.print_list(result, ['Name', 'Status'])
def _print_volume_encryption_type_list(encryption_types):
"""
Lists volume encryption types.
:param encryption_types: a list of :class: VolumeEncryptionType instances
"""
utils.print_list(encryption_types, ['Volume Type ID', 'Provider',
'Cipher', 'Key Size',
'Control Location'])
@utils.service_type('volume')
def do_encryption_type_list(cs, args):
"""Shows encryption type details for volume types. Admin only."""
result = cs.volume_encryption_types.list()
utils.print_list(result, ['Volume Type ID', 'Provider', 'Cipher',
'Key Size', 'Control Location'])
@utils.arg('volume_type',
metavar='<volume_type>',
type=str,
help='Name or ID of volume type.')
@utils.service_type('volume')
def do_encryption_type_show(cs, args):
"""Shows encryption type details for volume type. Admin only."""
volume_type = _find_volume_type(cs, args.volume_type)
result = cs.volume_encryption_types.get(volume_type)
# Display result or an empty table if no result
if hasattr(result, 'volume_type_id'):
_print_volume_encryption_type_list([result])
else:
_print_volume_encryption_type_list([])
@utils.arg('volume_type',
metavar='<volume_type>',
type=str,
help='Name or ID of volume type.')
@utils.arg('provider',
metavar='<provider>',
type=str,
help='The class that provides encryption support. '
'For example, a volume driver class path.')
@utils.arg('--cipher',
metavar='<cipher>',
type=str,
required=False,
default=None,
help='The encryption algorithm and mode. '
'For example, aes-xts-plain64. Default=None.')
@utils.arg('--key_size',
metavar='<key_size>',
type=int,
required=False,
default=None,
help='Size of encryption key, in bits. '
'For example, 128 or 256. Default=None.')
@utils.arg('--control_location',
metavar='<control_location>',
choices=['front-end', 'back-end'],
type=str,
required=False,
default='front-end',
help='Notional service where encryption is performed. '
'Valid values are "front-end" or "back-end." '
'For example, front-end=Nova. '
'Default is "front-end."')
@utils.service_type('volume')
def do_encryption_type_create(cs, args):
"""Creates encryption type for a volume type. Admin only."""
volume_type = _find_volume_type(cs, args.volume_type)
body = {}
body['provider'] = args.provider
body['cipher'] = args.cipher
body['key_size'] = args.key_size
body['control_location'] = args.control_location
result = cs.volume_encryption_types.create(volume_type, body)
_print_volume_encryption_type_list([result])
@utils.arg('volume_type',
metavar='<volume_type>',
type=str,
help='Name or ID of volume type.')
@utils.service_type('volume')
def do_encryption_type_delete(cs, args):
"""Deletes encryption type for a volume type. Admin only."""
volume_type = _find_volume_type(cs, args.volume_type)
cs.volume_encryption_types.delete(volume_type)
@utils.arg('volume', metavar='<volume>', help='ID of volume to migrate.')
@utils.arg('host', metavar='<host>', help='Destination host.')
@utils.arg('--force-host-copy', metavar='<True|False>',
choices=['True', 'False'], required=False,
default=False,
help='Enables or disables generic host-based '
'force-migration, which bypasses driver '
'optimizations. Default=False.')
@utils.service_type('volume')
def do_migrate(cs, args):
"""Migrates volume to a new host."""
volume = utils.find_volume(cs, args.volume)
volume.migrate_volume(args.host, args.force_host_copy)
def _print_qos_specs(qos_specs):
utils.print_dict(qos_specs._info)
def _print_qos_specs_list(q_specs):
utils.print_list(q_specs, ['ID', 'Name', 'Consumer', 'specs'])
def _print_qos_specs_and_associations_list(q_specs):
utils.print_list(q_specs, ['ID', 'Name', 'Consumer', 'specs'])
def _print_associations_list(associations):
utils.print_list(associations, ['Association_Type', 'Name', 'ID'])
@utils.arg('name',
metavar='<name>',
help='Name of new QoS specifications.')
@utils.arg('metadata',
metavar='<key=value>',
nargs='+',
default=[],
help='Specifications for QoS.')
@utils.service_type('volume')
def do_qos_create(cs, args):
"""Creates a qos specs."""
keypair = None
if args.metadata is not None:
keypair = _extract_metadata(args)
qos_specs = cs.qos_specs.create(args.name, keypair)
_print_qos_specs(qos_specs)
@utils.service_type('volume')
def do_qos_list(cs, args):
"""Lists qos specs."""
qos_specs = cs.qos_specs.list()
_print_qos_specs_list(qos_specs)
@utils.arg('qos_specs', metavar='<qos_specs>',
help='ID of QoS specifications.')
@utils.service_type('volume')
def do_qos_show(cs, args):
"""Shows a specified qos specs."""
qos_specs = _find_qos_specs(cs, args.qos_specs)
_print_qos_specs(qos_specs)
@utils.arg('qos_specs', metavar='<qos_specs>',
help='ID of QoS specifications.')
@utils.arg('--force',
metavar='<True|False>',
default=False,
help='Enables or disables deletion of in-use '
'QoS specifications. Default=False.')
@utils.service_type('volume')
def do_qos_delete(cs, args):
"""Deletes a specified qos specs."""
force = strutils.bool_from_string(args.force)
qos_specs = _find_qos_specs(cs, args.qos_specs)
cs.qos_specs.delete(qos_specs, force)
@utils.arg('qos_specs', metavar='<qos_specs>',
help='ID of QoS specifications.')
@utils.arg('vol_type_id', metavar='<volume_type_id>',
help='ID of volume type.')
@utils.service_type('volume')
def do_qos_associate(cs, args):
"""Associates qos specs with specified volume type."""
cs.qos_specs.associate(args.qos_specs, args.vol_type_id)
@utils.arg('qos_specs', metavar='<qos_specs>',
help='ID of QoS specifications.')
@utils.arg('vol_type_id', metavar='<volume_type_id>',
help='ID of volume type.')
@utils.service_type('volume')
def do_qos_disassociate(cs, args):
"""Disassociates qos specs from specified volume type."""
cs.qos_specs.disassociate(args.qos_specs, args.vol_type_id)
@utils.arg('qos_specs', metavar='<qos_specs>',
help='ID of QoS specifications.')
@utils.service_type('volume')
def do_qos_disassociate_all(cs, args):
"""Disassociates qos specs from all associations."""
cs.qos_specs.disassociate_all(args.qos_specs)
@utils.arg('qos_specs', metavar='<qos_specs>',
help='ID of QoS specifications.')
@utils.arg('action',
metavar='<action>',
choices=['set', 'unset'],
help='The action. Valid values are "set" or "unset."')
@utils.arg('metadata', metavar='key=value',
nargs='+',
default=[],
help='Metadata key and value pair to set or unset. '
'For unset, specify only the key.')
def do_qos_key(cs, args):
"""Sets or unsets specifications for a qos spec."""
keypair = _extract_metadata(args)
if args.action == 'set':
cs.qos_specs.set_keys(args.qos_specs, keypair)
elif args.action == 'unset':
cs.qos_specs.unset_keys(args.qos_specs, list(keypair))
@utils.arg('qos_specs', metavar='<qos_specs>',
help='ID of QoS specifications.')
@utils.service_type('volume')
def do_qos_get_association(cs, args):
"""Gets all associations for specified qos specs."""
associations = cs.qos_specs.get_associations(args.qos_specs)
_print_associations_list(associations)
@utils.arg('snapshot',
metavar='<snapshot>',
help='ID of snapshot for which to update metadata.')
@utils.arg('action',
metavar='<action>',
choices=['set', 'unset'],
help='The action. Valid values are "set" or "unset."')
@utils.arg('metadata',
metavar='<key=value>',
nargs='+',
default=[],
help='The metadata key and value pair to set or unset. '
'For unset, specify only the key.')
@utils.service_type('volume')
def do_snapshot_metadata(cs, args):
"""Sets or deletes snapshot metadata."""
snapshot = _find_volume_snapshot(cs, args.snapshot)
metadata = _extract_metadata(args)
if args.action == 'set':
metadata = snapshot.set_metadata(metadata)
utils.print_dict(metadata._info)
elif args.action == 'unset':
snapshot.delete_metadata(list(metadata.keys()))
@utils.arg('snapshot', metavar='<snapshot>',
help='ID of snapshot.')
@utils.service_type('volume')
def do_snapshot_metadata_show(cs, args):
"""Shows snapshot metadata."""
snapshot = _find_volume_snapshot(cs, args.snapshot)
utils.print_dict(snapshot._info['metadata'], 'Metadata-property')
@utils.arg('volume', metavar='<volume>',
help='ID of volume.')
@utils.service_type('volume')
def do_metadata_show(cs, args):
"""Shows volume metadata."""
volume = utils.find_volume(cs, args.volume)
utils.print_dict(volume._info['metadata'], 'Metadata-property')
@utils.arg('volume',
metavar='<volume>',
help='ID of volume for which to update metadata.')
@utils.arg('metadata',
metavar='<key=value>',
nargs='+',
default=[],
help='Metadata key and value pair or pairs to update. '
'Default=[].')
@utils.service_type('volume')
def do_metadata_update_all(cs, args):
"""Updates volume metadata."""
volume = utils.find_volume(cs, args.volume)
metadata = _extract_metadata(args)
metadata = volume.update_all_metadata(metadata)
utils.print_dict(metadata['metadata'], 'Metadata-property')
@utils.arg('snapshot',
metavar='<snapshot>',
help='ID of snapshot for which to update metadata.')
@utils.arg('metadata',
metavar='<key=value>',
nargs='+',
default=[],
help='Metadata key and value pair or pairs to update. '
'Default=[].')
@utils.service_type('volume')
def do_snapshot_metadata_update_all(cs, args):
"""Updates snapshot metadata."""
snapshot = _find_volume_snapshot(cs, args.snapshot)
metadata = _extract_metadata(args)
metadata = snapshot.update_all_metadata(metadata)
utils.print_dict(metadata)
@utils.arg('volume', metavar='<volume>', help='ID of volume to update.')
@utils.arg('read_only',
metavar='<True|true|False|false>',
choices=['True', 'true', 'False', 'false'],
help='Enables or disables update of volume to '
'read-only access mode.')
@utils.service_type('volume')
def do_readonly_mode_update(cs, args):
"""Updates volume read-only access-mode flag."""
volume = utils.find_volume(cs, args.volume)
cs.volumes.update_readonly_flag(volume,
strutils.bool_from_string(args.read_only))
@utils.arg('volume', metavar='<volume>', help='ID of the volume to update.')
@utils.arg('bootable',
metavar='<True|true|False|false>',
choices=['True', 'true', 'False', 'false'],
help='Flag to indicate whether volume is bootable.')
@utils.service_type('volume')
def do_set_bootable(cs, args):
"""Update bootable status of a volume."""
volume = utils.find_volume(cs, args.volume)
cs.volumes.set_bootable(volume,
strutils.bool_from_string(args.bootable))
| {
"content_hash": "c10fcde648783722d3c00b60242a68ea",
"timestamp": "",
"source": "github",
"line_count": 1473,
"max_line_length": 78,
"avg_line_length": 32.84317718940937,
"alnum_prop": 0.5966968456736533,
"repo_name": "eayunstack/python-cinderclient",
"id": "63aa19eb2c885042272079e162543b7f0e72150a",
"size": "49033",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinderclient/v1/shell.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "776641"
},
{
"name": "Shell",
"bytes": "6954"
}
],
"symlink_target": ""
} |
import builtins
import logging
import os
from unittest import TestCase
from flexiconf import Configuration, ArgsLoader
from app.database.connection import DatabaseConnection
from app.database.scoped_session import ScopedSession
from app.models.all import Group, Request, Response, User
class MatcherAny:
def __eq__(self, _):
return True
class BaseTestCase(TestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
logging.disable(logging.INFO)
logging.disable(logging.WARNING)
logging.disable(logging.ERROR)
class InBotTestCase(BaseTestCase):
def __init__(self, *args, **kwargs):
super(InBotTestCase, self).__init__(*args, **kwargs)
self.configuration = Configuration([ArgsLoader()])
self.connection = DatabaseConnection(None, for_tests=True)
def setUp(self):
super(InBotTestCase, self).setUp()
with ScopedSession(self.connection) as session:
session.query(User).delete()
session.query(Response).delete()
session.query(Request).delete()
session.query(Group).delete()
# Mock translation function, so that localized strings will be returned as their identifiers.
# This will be overriden in real translation setup, if needed.
if not getattr(builtins, '_', None):
builtins._ = lambda x: x
| {
"content_hash": "dd8885e82515cd8d43f20a1d623e122c",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 101,
"avg_line_length": 31.386363636363637,
"alnum_prop": 0.6784938450398262,
"repo_name": "KrusnikViers/Zordon",
"id": "f4d4ae8007582f51809ca54738bbc4fd53551497",
"size": "1381",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "230"
},
{
"name": "Mako",
"bytes": "329"
},
{
"name": "Python",
"bytes": "76713"
},
{
"name": "Shell",
"bytes": "110"
}
],
"symlink_target": ""
} |
from unittest.mock import MagicMock, patch
from zerver.lib.test_classes import WebhookTestCase
TOPIC = "Repository name"
TOPIC_BRANCH_EVENTS = "Repository name / master"
class BitbucketHookTests(WebhookTestCase):
STREAM_NAME = 'bitbucket'
URL_TEMPLATE = "/api/v1/external/bitbucket?stream={stream}"
FIXTURE_DIR_NAME = 'bitbucket'
def test_bitbucket_on_push_event(self) -> None:
fixture_name = 'push'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name))
commit_info = '* c ([25f93d2](https://bitbucket.org/kolaszek/repository-name/commits/25f93d22b719e2d678a7ad5ee0ef0d1fcdf39c12))'
expected_message = f"kolaszek pushed 1 commit to branch master.\n\n{commit_info}"
self.api_stream_message(self.test_user, fixture_name, TOPIC_BRANCH_EVENTS,
expected_message)
def test_bitbucket_on_push_event_without_user_info(self) -> None:
fixture_name = 'push_without_user_info'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name))
commit_info = '* c ([25f93d2](https://bitbucket.org/kolaszek/repository-name/commits/25f93d22b719e2d678a7ad5ee0ef0d1fcdf39c12))'
expected_message = f"Someone pushed 1 commit to branch master. Commits by eeshangarg (1).\n\n{commit_info}"
self.api_stream_message(self.test_user, fixture_name, TOPIC_BRANCH_EVENTS,
expected_message)
def test_bitbucket_on_push_event_filtered_by_branches(self) -> None:
fixture_name = 'push'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name),
branches='master,development')
commit_info = '* c ([25f93d2](https://bitbucket.org/kolaszek/repository-name/commits/25f93d22b719e2d678a7ad5ee0ef0d1fcdf39c12))'
expected_message = f"kolaszek pushed 1 commit to branch master.\n\n{commit_info}"
self.api_stream_message(self.test_user, fixture_name, TOPIC_BRANCH_EVENTS,
expected_message)
def test_bitbucket_on_push_commits_above_limit_event(self) -> None:
fixture_name = 'push_commits_above_limit'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name))
commit_info = '* c ([25f93d2](https://bitbucket.org/kolaszek/repository-name/commits/25f93d22b719e2d678a7ad5ee0ef0d1fcdf39c12))\n'
expected_message = f"kolaszek pushed 50 commits to branch master.\n\n{commit_info * 20}[and 30 more commit(s)]"
self.api_stream_message(self.test_user, fixture_name, TOPIC_BRANCH_EVENTS,
expected_message)
def test_bitbucket_on_push_commits_above_limit_event_filtered_by_branches(self) -> None:
fixture_name = 'push_commits_above_limit'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name),
branches='master,development')
commit_info = '* c ([25f93d2](https://bitbucket.org/kolaszek/repository-name/commits/25f93d22b719e2d678a7ad5ee0ef0d1fcdf39c12))\n'
expected_message = f"kolaszek pushed 50 commits to branch master.\n\n{commit_info * 20}[and 30 more commit(s)]"
self.api_stream_message(self.test_user, fixture_name, TOPIC_BRANCH_EVENTS,
expected_message)
def test_bitbucket_on_force_push_event(self) -> None:
fixture_name = 'force_push'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name))
expected_message = "kolaszek [force pushed](https://bitbucket.org/kolaszek/repository-name)."
self.api_stream_message(self.test_user, fixture_name, TOPIC,
expected_message)
def test_bitbucket_on_force_push_event_without_user_info(self) -> None:
fixture_name = 'force_push_without_user_info'
self.url = self.build_webhook_url(payload=self.get_body(fixture_name))
expected_message = "Someone [force pushed](https://bitbucket.org/kolaszek/repository-name/)."
self.api_stream_message(self.test_user, fixture_name, TOPIC,
expected_message)
@patch('zerver.webhooks.bitbucket.view.check_send_webhook_message')
def test_bitbucket_on_push_event_filtered_by_branches_ignore(self, check_send_webhook_message_mock: MagicMock) -> None:
fixture_name = 'push'
payload = self.get_body(fixture_name)
self.url = self.build_webhook_url(payload=payload,
branches='changes,development')
result = self.api_post(self.test_user, self.url, payload, content_type="application/json,")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.bitbucket.view.check_send_webhook_message')
def test_bitbucket_push_commits_above_limit_filtered_by_branches_ignore(
self, check_send_webhook_message_mock: MagicMock) -> None:
fixture_name = 'push_commits_above_limit'
payload = self.get_body(fixture_name)
self.url = self.build_webhook_url(payload=payload,
branches='changes,development')
result = self.api_post(self.test_user, self.url, payload, content_type="application/json,")
self.assertFalse(check_send_webhook_message_mock.called)
self.assert_json_success(result)
| {
"content_hash": "1a46e1141d85b8083f6ea44c574ab7fe",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 138,
"avg_line_length": 61.85227272727273,
"alnum_prop": 0.662869740951681,
"repo_name": "showell/zulip",
"id": "5a9dfe198d19ea7622b2d070bdd113f42b953eca",
"size": "5443",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/webhooks/bitbucket/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "433235"
},
{
"name": "Dockerfile",
"bytes": "2941"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "634357"
},
{
"name": "Handlebars",
"bytes": "235334"
},
{
"name": "JavaScript",
"bytes": "3341135"
},
{
"name": "Perl",
"bytes": "8594"
},
{
"name": "Puppet",
"bytes": "79720"
},
{
"name": "Python",
"bytes": "8120030"
},
{
"name": "Ruby",
"bytes": "8480"
},
{
"name": "Shell",
"bytes": "133132"
},
{
"name": "TypeScript",
"bytes": "20603"
}
],
"symlink_target": ""
} |
import os
from datetime import datetime, timedelta
from django.utils import timezone
from django.core.files import File
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from substitutes.models import Session, SessionEvent, Sub, Division
from accounts.models import CustomUser, UserProfile
from teams.models import Team
import random
TZINFO = timezone.get_current_timezone()
DATA_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
class Command(BaseCommand):
help = 'Populates database with default (test) data'
def handle(self, *args, **options):
users = [
{
"username": 'evan',
"password": 'evanpassword',
"first_name": 'evan',
"last_name": 'smelser',
"email": 'e@sme.com',
},
{
"username": 'darrin',
"password": 'darrinpassword',
"first_name": 'darrin',
"last_name": 'howard',
"email": 'darrin@test.com',
},
{
"username": 'jason',
"password": 'jasonpassword',
"first_name": 'jason',
"last_name": 'bennett',
"email": 'jason@test.com',
},
{
"username": 'andy',
"password": 'andypassword',
"first_name": 'andy',
"last_name": 'dalbey',
"email": 'andy@test.com',
},
{
"username": 'isaac',
"password": 'isaacpassword',
"first_name": 'isaac',
"last_name": 'norman',
"email": 'isaac@test.com',
},
{
"username": 'chris',
"password": 'chrispassword',
"first_name": 'chris',
"last_name": 'nieland',
"email": 'chris@test.com',
},
{
"username": 'sarah',
"password": 'sarahpassword',
"first_name": 'sarah',
"last_name": 'nieland',
"email": 'sarah@test.com',
},
{
"username": 'nick',
"password": 'nickpassword',
"first_name": 'nick',
"last_name": 'jordan',
"email": 'nick@test.com',
},
{
"username": 'pete',
"password": 'petepassword',
"first_name": 'pete',
"last_name": 'gates',
"email": 'pete@test.com',
},
]
self.stdout.write('Creating users...')
users = [CustomUser.objects.create_user(**user) for user in users]
evan = users[0]
content_type = ContentType.objects.get_for_model(Team)
create_team_permission = Permission.objects.get(
codename='add_team',
)
evan.user_permissions.add(create_team_permission)
# Create admin
admin = CustomUser.objects.create_user(username='admin', password='adminpassword')
admin.is_superuser = True
admin.is_staff = True
admin.save()
self.stdout.write('Creating profiles...')
profiles = []
for i, user in enumerate(users):
profile = UserProfile.objects.create(user=user, phone_number='1231231234')
name = 'profile_{}.jpeg'.format(i)
with open(os.path.join(DATA_DIR, name), 'rb') as f:
file = File(f)
profile.thumbnail.save(name, file, save=True)
profiles.append(profile)
self.stdout.write('Creating divisions...')
darrin = users[1]
divisions = [
Division.objects.create(name='Division A', division_rep=evan),
Division.objects.create(name='Division B', division_rep=darrin)
]
self.stdout.write('Creating sessions...')
sessions = self.create_sessions(divisions)
self.stdout.write('Creating session events...')
for session in sessions:
session_events = SessionEvent.objects.filter(session=session)
for session_event in session_events:
for user in users:
if random.random() < 0.1:
Sub.objects.create(user=user, date=timezone.now(), session_event=session_event)
temp_users = users.copy()
captain1 = temp_users.pop(0)
captain2 = temp_users.pop(0)
halfway = len(temp_users) // 2
teams = [
{
'name': 'team 1',
'team_captain': captain1,
'players': temp_users[halfway:]
},
{
'name': 'team 1',
'team_captain': captain2,
'players': temp_users[:halfway]
},
]
self.stdout.write('Creating teams...')
temp = []
for team in teams:
players = team.pop('players')
team = Team.objects.create(**team)
team.players.add(*players)
temp.append(team)
teams = temp
divisions[0].teams.add(teams[0])
divisions[1].teams.add(teams[1])
def create_sessions(self, divisions):
session_names = ['wichita', '501 Bar', 'Location B', 'Rialto', 'SomeOtherSession', 'TheWhiteHouse', 'PSU',
'My House', 'Location Z']
day = datetime(datetime.now().year, datetime.now().month, 1, tzinfo=TZINFO)
session_divisions = []
start_dates = []
for i, _ in enumerate(session_names):
day = day + timedelta(days=1)
start_dates.append(day)
if i < len(session_names) // 2:
session_divisions.append(divisions[0])
else:
session_divisions.append(divisions[1])
end_dates = [date + timedelta(hours=4) for date in start_dates]
sessions = []
for start, end, name, division in zip(start_dates, end_dates, session_names, session_divisions):
session = Session.objects.create(name=name, game='8ball', division=division, start_date=start, end_date=end)
sessions.append(session)
for session in sessions:
start = session.start_date
start_hour = random.choice([16, 17, 18]) # Starts at 4, 5, or 6
start_time = datetime(start.year, start.month, start.day, start_hour)
# Make 8 weeks worth of events per session
date = start.date()
for _ in range(8):
SessionEvent.objects.create(session=session, start_time=start_time, date=date)
date += timedelta(days=7)
return sessions | {
"content_hash": "818b02ae95a5949846b5147ceec8a61f",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 120,
"avg_line_length": 35.213197969543145,
"alnum_prop": 0.5124693671615972,
"repo_name": "eSmelser/SnookR",
"id": "4d271365927c495e5e759a96cf190b9f324210b0",
"size": "7065",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "SnookR/substitutes/management/commands/db_populate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "42731"
},
{
"name": "HTML",
"bytes": "83044"
},
{
"name": "JavaScript",
"bytes": "7810344"
},
{
"name": "PHP",
"bytes": "6093"
},
{
"name": "Python",
"bytes": "115524"
}
],
"symlink_target": ""
} |
import os
import shutil
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
BASEDIR = os.path.abspath(os.path.dirname(__file__))
SETTINGS = os.environ.get('SWARMS_SURVEY_SETTINGS') or \
os.path.join(basedir, 'settings.cfg')
if not os.path.isfile(SETTINGS):
shutil.copy(os.path.join(basedir, 'settings'),SETTINGS)
JMETER = False
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
WTF_CSRF_ENABLED = True
SSL_DISABLE = False
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
SQLALCHEMY_RECORD_QUERIES = True
LANGUAGES = {
'en': 'English',
'es': 'Español'
}
LOCALES = ['en', 'es']
OPENID_PROVIDERS = [
{ 'name': 'Google', 'url': 'https://www.google.com/accounts/o8/id' },
{ 'name': 'Yahoo', 'url': 'https://me.yahoo.com' },
{ 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' },
{ 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' },
{ 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }]
MAIL_SERVER = None
MAIL_PORT = None
MAIL_USE_TLS = None
MAIL_USERNAME = None
MAIL_PASSWORD = None
SWARMS_MAIL_SUBJECT_PREFIX = None
SWARMS_MAIL_SENDER = None
MODE_GAMES = False
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-dev.sqlite')
class TestingConfig(Config):
TESTING = True
WTF_CSRF_ENABLED = False
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
class Jmeter(Config):
JMETER = True
SEQUENCE = [5, 6, 7, 8, 9, 10, 17, 16, 12, 23, 22, 32, 33, 40, 41, 42, 49, 50, 51, 46, 47, 48, 14, 15]
WTF_CSRF_ENABLED = False
SQLALCHEMY_DATABASE_URI = os.environ.get('JMETER_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-jmeter.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data.sqlite')
@classmethod
def init_app(cls, app):
Config.init_app(app)
import pdb; pdb.set_trace()
# email errors to the administrators
import logging
from logging.handlers import SMTPHandler
credentials = None
secure = None
if app.config.get('MAIL_USERNAME') is not None:
credentials = (app.config.get('MAIL_USERNAME'), app.config.get('MAIL_PASSWORD'))
if app.config.get('MAIL_USE_TLS', False):
secure = ()
mail_handler = SMTPHandler(
mailhost=(app.config.get('MAIL_SERVER'),app.config.get('MAIL_PORT')),
fromaddr=app.config.get('.SWARMS_MAIL_SENDER'),
toaddrs=[app.config.get('SWARMS_ADMIN')],
subject=app.config.get('SWARMS_MAIL_SUBJECT_PREFIX') + ' Application Error',
credentials=credentials,
secure=secure)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
class JmeterProduction(ProductionConfig):
JMETER = True
SEQUENCE = [5, 6, 7, 8, 9, 10, 17, 16, 12, 23, 22, 32, 33, 40, 41, 42, 49, 50, 51, 46, 47, 48, 14, 15]
WTF_CSRF_ENABLED = False
SQLALCHEMY_DATABASE_URI = os.environ.get('JMETER_DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'data-jmeter.sqlite')
class HerokuConfig(ProductionConfig):
SSL_DISABLE = bool(os.environ.get('SSL_DISABLE'))
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# handle proxy server headers
from werkzeug.contrib.fixers import ProxyFix
app.wsgi_app = ProxyFix(app.wsgi_app)
# log to stderr
import logging
from logging import StreamHandler
file_handler = StreamHandler()
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
class UnixConfig(ProductionConfig):
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# log to syslog unix machine
# import logging
# from logging.handlers import SysLogHandler
# syslog_handler = SysLogHandler()
# syslog_handler.setLevel(logging.WARNING)
# app.logger.addHandler(syslog_handler)
# file log
import logging
from logging.handlers import RotatingFileHandler
file_handler = RotatingFileHandler('temp/swarms.log', 'a', 1 * 1024 * 1024, 10)
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('swarms startup')
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'heroku': HerokuConfig,
'unix': UnixConfig,
'jmeter': Jmeter,
'jmeterProduction' : JmeterProduction,
'default': DevelopmentConfig
}
| {
"content_hash": "7687afe55cd1f7b740f103b586c913e0",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 123,
"avg_line_length": 34.56666666666667,
"alnum_prop": 0.6237222757955642,
"repo_name": "nukru/Swarm-Surveys",
"id": "a0302a9c27b4930330c48a6f162714aa49b9cb53",
"size": "5239",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4675"
},
{
"name": "JavaScript",
"bytes": "91058"
},
{
"name": "Python",
"bytes": "366597"
},
{
"name": "Shell",
"bytes": "46"
},
{
"name": "TeX",
"bytes": "234707"
}
],
"symlink_target": ""
} |
import logging
logging.warn('DEPRECATED: enthought.pyface.grid, '
'use enthought.pyface.ui.wx.grid instead.')
from enthought.pyface.ui.wx.grid.grid_model import *
| {
"content_hash": "97081720fa908a51756c026d0c74a721",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 56,
"avg_line_length": 35.4,
"alnum_prop": 0.7231638418079096,
"repo_name": "enthought/traitsgui",
"id": "4884cd621eed9ca1838677903aea85f435bc41ff",
"size": "177",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enthought/pyface/grid/grid_model.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "1196658"
}
],
"symlink_target": ""
} |
from rest_framework import serializers
from ..fields import MoneyField
class ProductSerializer(serializers.Serializer):
id = serializers.UUIDField(read_only=True)
name = serializers.CharField()
code = serializers.CharField()
description = serializers.CharField(allow_null=True)
image = serializers.ImageField(allow_null=True)
price = MoneyField(non_negative=True)
category = serializers.UUIDField(read_only=True, source='category.id')
active = serializers.BooleanField(default=False)
qty = serializers.IntegerField(read_only=True)
class ProductParamSerializer(serializers.Serializer):
start = serializers.IntegerField(min_value=0, required=False)
limit = serializers.IntegerField(min_value=0, required=False)
code = serializers.CharField(required=False)
class ProductCategorySerializer(serializers.Serializer):
id = serializers.UUIDField(read_only=True)
name = serializers.CharField()
image = serializers.ImageField(allow_null=True)
| {
"content_hash": "006f0fc99e0cf46da3bcca9f8320c689",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 74,
"avg_line_length": 38.42307692307692,
"alnum_prop": 0.7667667667667668,
"repo_name": "uppsaladatavetare/foobar-api",
"id": "609dc165b9d30d96ce8a408360cdc4258b2f4485",
"size": "999",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/foobar/rest/serializers/product.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3317"
},
{
"name": "HTML",
"bytes": "10880"
},
{
"name": "JavaScript",
"bytes": "10604"
},
{
"name": "Makefile",
"bytes": "796"
},
{
"name": "Python",
"bytes": "318730"
}
],
"symlink_target": ""
} |
from django.conf.urls import *
from django.utils.translation import ugettext_lazy as _
"""
Also used in cms.tests.ApphooksTestCase
"""
urlpatterns = patterns('cms.test_utils.project.sampleapp.views',
url(r'^$', 'sample_view', {'message': 'sample root page',}, name='sample-root'),
url(r'^settings/$', 'sample_view', kwargs={'message': 'sample settings page'}, name='sample-settings'),
url(r'^myparams/(?P<my_params>[\w_-]+)/$', 'sample_view', name='sample-params'),
url(_(r'^account/$'), 'sample_view', {'message': 'sample account page'}, name='sample-account'),
url(r'^account/my_profile/$', 'sample_view', {'message': 'sample my profile page'}, name='sample-profile'),
url(r'^category/(?P<id>[0-9]+)/$', 'category_view', name='category_view'),
url(r'^notfound/$', 'notfound', name='notfound'),
url(r'^extra_1/$', 'extra_view', {'message': 'test urlconf'}, name='extra_first'),
url(r'^', include('cms.test_utils.project.sampleapp.urls_extra')),
)
| {
"content_hash": "7c0b9ea1bfda51d8ee3cc58270b3e226",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 111,
"avg_line_length": 54.77777777777778,
"alnum_prop": 0.6440162271805274,
"repo_name": "foobacca/django-cms",
"id": "681dbc18a4edb3d1d8cd388071b5f418bde95dcc",
"size": "986",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "cms/test_utils/project/sampleapp/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "125849"
},
{
"name": "JavaScript",
"bytes": "653131"
},
{
"name": "PHP",
"bytes": "2156"
},
{
"name": "Python",
"bytes": "2337854"
},
{
"name": "Ruby",
"bytes": "990"
},
{
"name": "XSLT",
"bytes": "5122"
}
],
"symlink_target": ""
} |
import base64
import os
import shutil
import string
import sys
import tempfile
import unittest
from datetime import timedelta
from http import cookies
from django.conf import settings
from django.contrib.sessions.backends.base import UpdateError
from django.contrib.sessions.backends.cache import SessionStore as CacheSession
from django.contrib.sessions.backends.cached_db import \
SessionStore as CacheDBSession
from django.contrib.sessions.backends.db import SessionStore as DatabaseSession
from django.contrib.sessions.backends.file import SessionStore as FileSession
from django.contrib.sessions.backends.signed_cookies import \
SessionStore as CookieSession
from django.contrib.sessions.exceptions import InvalidSessionKey
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sessions.models import Session
from django.contrib.sessions.serializers import (
JSONSerializer, PickleSerializer,
)
from django.core import management
from django.core.cache import caches
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.http import HttpResponse
from django.test import (
RequestFactory, TestCase, ignore_warnings, override_settings,
)
from django.test.utils import patch_logger
from django.utils import timezone
from .models import SessionStore as CustomDatabaseSession
class SessionTestsMixin:
# This does not inherit from TestCase to avoid any tests being run with this
# class, which wouldn't work, and to allow different TestCase subclasses to
# be used.
backend = None # subclasses must specify
def setUp(self):
self.session = self.backend()
def tearDown(self):
# NB: be careful to delete any sessions created; stale sessions fill up
# the /tmp (with some backends) and eventually overwhelm it after lots
# of runs (think buildbots)
self.session.delete()
def test_new_session(self):
self.assertFalse(self.session.modified)
self.assertFalse(self.session.accessed)
def test_get_empty(self):
self.assertIsNone(self.session.get('cat'))
def test_store(self):
self.session['cat'] = "dog"
self.assertTrue(self.session.modified)
self.assertEqual(self.session.pop('cat'), 'dog')
def test_pop(self):
self.session['some key'] = 'exists'
# Need to reset these to pretend we haven't accessed it:
self.accessed = False
self.modified = False
self.assertEqual(self.session.pop('some key'), 'exists')
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
self.assertIsNone(self.session.get('some key'))
def test_pop_default(self):
self.assertEqual(self.session.pop('some key', 'does not exist'),
'does not exist')
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_pop_default_named_argument(self):
self.assertEqual(self.session.pop('some key', default='does not exist'), 'does not exist')
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_pop_no_default_keyerror_raised(self):
with self.assertRaises(KeyError):
self.session.pop('some key')
def test_setdefault(self):
self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar')
self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar')
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
def test_update(self):
self.session.update({'update key': 1})
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
self.assertEqual(self.session.get('update key', None), 1)
def test_has_key(self):
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertIn('some key', self.session)
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_values(self):
self.assertEqual(list(self.session.values()), [])
self.assertTrue(self.session.accessed)
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.values()), [1])
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_keys(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.keys()), ['x'])
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_items(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_clear(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.session.clear()
self.assertEqual(list(self.session.items()), [])
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
def test_save(self):
self.session.save()
self.assertTrue(self.session.exists(self.session.session_key))
def test_delete(self):
self.session.save()
self.session.delete(self.session.session_key)
self.assertFalse(self.session.exists(self.session.session_key))
def test_flush(self):
self.session['foo'] = 'bar'
self.session.save()
prev_key = self.session.session_key
self.session.flush()
self.assertFalse(self.session.exists(prev_key))
self.assertNotEqual(self.session.session_key, prev_key)
self.assertIsNone(self.session.session_key)
self.assertTrue(self.session.modified)
self.assertTrue(self.session.accessed)
def test_cycle(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_key = self.session.session_key
prev_data = list(self.session.items())
self.session.cycle_key()
self.assertFalse(self.session.exists(prev_key))
self.assertNotEqual(self.session.session_key, prev_key)
self.assertEqual(list(self.session.items()), prev_data)
def test_cycle_with_no_session_cache(self):
self.assertFalse(hasattr(self.session, '_session_cache'))
self.session.cycle_key()
def test_save_doesnt_clear_data(self):
self.session['a'] = 'b'
self.session.save()
self.assertEqual(self.session['a'], 'b')
def test_invalid_key(self):
# Submitting an invalid session key (either by guessing, or if the db has
# removed the key) results in a new key being generated.
try:
session = self.backend('1')
session.save()
self.assertNotEqual(session.session_key, '1')
self.assertIsNone(session.get('cat'))
session.delete()
finally:
# Some backends leave a stale cache entry for the invalid
# session key; make sure that entry is manually deleted
session.delete('1')
def test_session_key_empty_string_invalid(self):
"""Falsey values (Such as an empty string) are rejected."""
self.session._session_key = ''
self.assertIsNone(self.session.session_key)
def test_session_key_too_short_invalid(self):
"""Strings shorter than 8 characters are rejected."""
self.session._session_key = '1234567'
self.assertIsNone(self.session.session_key)
def test_session_key_valid_string_saved(self):
"""Strings of length 8 and up are accepted and stored."""
self.session._session_key = '12345678'
self.assertEqual(self.session.session_key, '12345678')
def test_session_key_is_read_only(self):
def set_session_key(session):
session.session_key = session._get_new_session_key()
with self.assertRaises(AttributeError):
set_session_key(self.session)
# Custom session expiry
def test_default_expiry(self):
# A normal session has a max age equal to settings
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
# So does a custom session with an idle expiration time of 0 (but it'll
# expire at browser close)
self.session.set_expiry(0)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_custom_expiry_seconds(self):
modification = timezone.now()
self.session.set_expiry(10)
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_timedelta(self):
modification = timezone.now()
# Mock timezone.now, because set_expiry calls it on this code path.
original_now = timezone.now
try:
timezone.now = lambda: modification
self.session.set_expiry(timedelta(seconds=10))
finally:
timezone.now = original_now
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_datetime(self):
modification = timezone.now()
self.session.set_expiry(modification + timedelta(seconds=10))
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_reset(self):
self.session.set_expiry(None)
self.session.set_expiry(10)
self.session.set_expiry(None)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_get_expire_at_browser_close(self):
# Tests get_expire_at_browser_close with different settings and different
# set_expiry calls
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False):
self.session.set_expiry(10)
self.assertFalse(self.session.get_expire_at_browser_close())
self.session.set_expiry(0)
self.assertTrue(self.session.get_expire_at_browser_close())
self.session.set_expiry(None)
self.assertFalse(self.session.get_expire_at_browser_close())
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True):
self.session.set_expiry(10)
self.assertFalse(self.session.get_expire_at_browser_close())
self.session.set_expiry(0)
self.assertTrue(self.session.get_expire_at_browser_close())
self.session.set_expiry(None)
self.assertTrue(self.session.get_expire_at_browser_close())
def test_decode(self):
# Ensure we can decode what we encode
data = {'a test key': 'a test value'}
encoded = self.session.encode(data)
self.assertEqual(self.session.decode(encoded), data)
def test_decode_failure_logged_to_security(self):
bad_encode = base64.b64encode(b'flaskdj:alkdjf')
with patch_logger('django.security.SuspiciousSession', 'warning') as calls:
self.assertEqual({}, self.session.decode(bad_encode))
# check that the failed decode is logged
self.assertEqual(len(calls), 1)
self.assertIn('corrupted', calls[0])
def test_actual_expiry(self):
# this doesn't work with JSONSerializer (serializing timedelta)
with override_settings(SESSION_SERIALIZER='django.contrib.sessions.serializers.PickleSerializer'):
self.session = self.backend() # reinitialize after overriding settings
# Regression test for #19200
old_session_key = None
new_session_key = None
try:
self.session['foo'] = 'bar'
self.session.set_expiry(-timedelta(seconds=10))
self.session.save()
old_session_key = self.session.session_key
# With an expiry date in the past, the session expires instantly.
new_session = self.backend(self.session.session_key)
new_session_key = new_session.session_key
self.assertNotIn('foo', new_session)
finally:
self.session.delete(old_session_key)
self.session.delete(new_session_key)
def test_session_load_does_not_create_record(self):
"""
Loading an unknown session key does not create a session record.
Creating session records on load is a DOS vulnerability.
"""
session = self.backend('someunknownkey')
session.load()
self.assertFalse(session.exists(session.session_key))
# provided unknown key was cycled, not reused
self.assertNotEqual(session.session_key, 'someunknownkey')
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
"""
Sessions shouldn't be resurrected by a concurrent request.
"""
# Create new session.
s1 = self.backend()
s1['test_data'] = 'value1'
s1.save(must_create=True)
# Logout in another context.
s2 = self.backend(s1.session_key)
s2.delete()
# Modify session in first context.
s1['test_data'] = 'value2'
with self.assertRaises(UpdateError):
# This should throw an exception as the session is deleted, not
# resurrect the session.
s1.save()
self.assertEqual(s1.load(), {})
class DatabaseSessionTests(SessionTestsMixin, TestCase):
backend = DatabaseSession
session_engine = 'django.contrib.sessions.backends.db'
@property
def model(self):
return self.backend.get_model_class()
def test_session_str(self):
"Session repr should be the session key."
self.session['x'] = 1
self.session.save()
session_key = self.session.session_key
s = self.model.objects.get(session_key=session_key)
self.assertEqual(str(s), session_key)
def test_session_get_decoded(self):
"""
Test we can use Session.get_decoded to retrieve data stored
in normal way
"""
self.session['x'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.get_decoded(), {'x': 1})
def test_sessionmanager_save(self):
"""
Test SessionManager.save method
"""
# Create a session
self.session['y'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
# Change it
self.model.objects.save(s.session_key, {'y': 2}, s.expire_date)
# Clear cache, so that it will be retrieved from DB
del self.session._session_cache
self.assertEqual(self.session['y'], 2)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
self.assertEqual(0, self.model.objects.count())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# Two sessions are in the database before clearsessions...
self.assertEqual(2, self.model.objects.count())
with override_settings(SESSION_ENGINE=self.session_engine):
management.call_command('clearsessions')
# ... and one is deleted.
self.assertEqual(1, self.model.objects.count())
@override_settings(USE_TZ=True)
class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests):
pass
class CustomDatabaseSessionTests(DatabaseSessionTests):
backend = CustomDatabaseSession
session_engine = 'sessions_tests.models'
def test_extra_session_field(self):
# Set the account ID to be picked up by a custom session storage
# and saved to a custom session model database column.
self.session['_auth_user_id'] = 42
self.session.save()
# Make sure that the customized create_model_instance() was called.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.account_id, 42)
# Make the session "anonymous".
self.session.pop('_auth_user_id')
self.session.save()
# Make sure that save() on an existing session did the right job.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertIsNone(s.account_id)
class CacheDBSessionTests(SessionTestsMixin, TestCase):
backend = CacheDBSession
def test_exists_searches_cache_first(self):
self.session.save()
with self.assertNumQueries(0):
self.assertTrue(self.session.exists(self.session.session_key))
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
@override_settings(SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# 21000 - CacheDB backend should respect SESSION_CACHE_ALIAS.
with self.assertRaises(InvalidCacheBackendError):
self.backend()
@override_settings(USE_TZ=True)
class CacheDBSessionWithTimeZoneTests(CacheDBSessionTests):
pass
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class FileSessionTests(SessionTestsMixin, unittest.TestCase):
backend = FileSession
def setUp(self):
# Do file session tests in an isolated directory, and kill it after we're done.
self.original_session_file_path = settings.SESSION_FILE_PATH
self.temp_session_store = settings.SESSION_FILE_PATH = tempfile.mkdtemp()
# Reset the file session backend's internal caches
if hasattr(self.backend, '_storage_path'):
del self.backend._storage_path
super(FileSessionTests, self).setUp()
def tearDown(self):
super(FileSessionTests, self).tearDown()
settings.SESSION_FILE_PATH = self.original_session_file_path
shutil.rmtree(self.temp_session_store)
@override_settings(
SESSION_FILE_PATH="/if/this/directory/exists/you/have/a/weird/computer")
def test_configuration_check(self):
del self.backend._storage_path
# Make sure the file backend checks for a good storage dir
with self.assertRaises(ImproperlyConfigured):
self.backend()
def test_invalid_key_backslash(self):
# Ensure we don't allow directory-traversal.
# This is tested directly on _key_to_file, as load() will swallow
# a SuspiciousOperation in the same way as an IOError - by creating
# a new session, making it unclear whether the slashes were detected.
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a\\b\\c")
def test_invalid_key_forwardslash(self):
# Ensure we don't allow directory-traversal
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a/b/c")
@override_settings(
SESSION_ENGINE="django.contrib.sessions.backends.file",
SESSION_COOKIE_AGE=0,
)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
storage_path = self.backend._get_storage_path()
file_prefix = settings.SESSION_COOKIE_NAME
def count_sessions():
return len([
session_file for session_file in os.listdir(storage_path)
if session_file.startswith(file_prefix)
])
self.assertEqual(0, count_sessions())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# One object in the present without an expiry (should be deleted since
# its modification time + SESSION_COOKIE_AGE will be in the past when
# clearsessions runs).
other_session2 = self.backend()
other_session2['foo'] = 'bar'
other_session2.save()
# Three sessions are in the filesystem before clearsessions...
self.assertEqual(3, count_sessions())
management.call_command('clearsessions')
# ... and two are deleted.
self.assertEqual(1, count_sessions())
class CacheSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CacheSession
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
def test_default_cache(self):
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'sessions': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'session',
},
}, SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# Re-initialize the session backend to make use of overridden settings.
self.session = self.backend()
self.session.save()
self.assertIsNone(caches['default'].get(self.session.cache_key))
self.assertIsNotNone(caches['sessions'].get(self.session.cache_key))
def test_create_and_save(self):
self.session = self.backend()
self.session.create()
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
class SessionMiddlewareTests(TestCase):
@override_settings(SESSION_COOKIE_SECURE=True)
def test_secure_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertTrue(
response.cookies[settings.SESSION_COOKIE_NAME]['secure'])
@override_settings(SESSION_COOKIE_HTTPONLY=True)
def test_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertTrue(
response.cookies[settings.SESSION_COOKIE_NAME]['httponly'])
self.assertIn(
cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_HTTPONLY=False)
def test_no_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertFalse(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'])
self.assertNotIn(cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME]))
def test_session_save_on_500(self):
request = RequestFactory().get('/')
response = HttpResponse('Horrible error')
response.status_code = 500
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The value wasn't saved above.
self.assertNotIn('hello', request.session.load())
def test_session_update_error_redirect(self):
path = '/foo/'
request = RequestFactory().get(path)
response = HttpResponse()
middleware = SessionMiddleware()
request.session = DatabaseSession()
request.session.save(must_create=True)
request.session.delete()
msg = (
"The request's session was deleted before the request completed. "
"The user may have logged out in a concurrent request, for example."
)
with self.assertRaisesMessage(SuspiciousOperation, msg):
# Handle the response through the middleware. It will try to save
# the deleted session which will cause an UpdateError that's caught
# and raised as a SuspiciousOperation.
middleware.process_response(request, response)
def test_session_delete_on_end(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The cookie was deleted, not recreated.
# A deleted cookie header looks like:
# Set-Cookie: sessionid=; expires=Thu, 01-Jan-1970 00:00:00 GMT; Max-Age=0; Path=/
self.assertEqual(
'Set-Cookie: {}={}; expires=Thu, 01-Jan-1970 00:00:00 GMT; '
'Max-Age=0; Path=/'.format(
settings.SESSION_COOKIE_NAME,
'""' if sys.version_info >= (3, 5) else '',
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_DOMAIN='.example.local', SESSION_COOKIE_PATH='/example/')
def test_session_delete_on_end_with_custom_domain_and_path(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The cookie was deleted, not recreated.
# A deleted cookie header with a custom domain and path looks like:
# Set-Cookie: sessionid=; Domain=.example.local;
# expires=Thu, 01-Jan-1970 00:00:00 GMT; Max-Age=0;
# Path=/example/
self.assertEqual(
'Set-Cookie: {}={}; Domain=.example.local; expires=Thu, '
'01-Jan-1970 00:00:00 GMT; Max-Age=0; Path=/example/'.format(
settings.SESSION_COOKIE_NAME,
'""' if sys.version_info >= (3, 5) else '',
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
def test_flush_empty_without_session_cookie_doesnt_set_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# A cookie should not be set.
self.assertEqual(response.cookies, {})
# The session is accessed so "Vary: Cookie" should be set.
self.assertEqual(response['Vary'], 'Cookie')
def test_empty_session_saved(self):
"""
If a session is emptied of data but still has a key, it should still
be updated.
"""
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Set a session key and some data.
middleware.process_request(request)
request.session['foo'] = 'bar'
# Handle the response through the middleware.
response = middleware.process_response(request, response)
self.assertEqual(tuple(request.session.items()), (('foo', 'bar'),))
# A cookie should be set, along with Vary: Cookie.
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
# Empty the session data.
del request.session['foo']
# Handle the response through the middleware.
response = HttpResponse('Session test')
response = middleware.process_response(request, response)
self.assertEqual(dict(request.session.values()), {})
session = Session.objects.get(session_key=request.session.session_key)
self.assertEqual(session.get_decoded(), {})
# While the session is empty, it hasn't been flushed so a cookie should
# still be set, along with Vary: Cookie.
self.assertGreater(len(request.session.session_key), 8)
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class CookieSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CookieSession
def test_save(self):
"""
This test tested exists() in the other session backends, but that
doesn't make sense for us.
"""
pass
def test_cycle(self):
"""
This test tested cycle_key() which would create a new session
key for the same session data. But we can't invalidate previously
signed cookies (other than letting them expire naturally) so
testing for this behavior is meaningless.
"""
pass
@unittest.expectedFailure
def test_actual_expiry(self):
# The cookie backend doesn't handle non-default expiry dates, see #19201
super(CookieSessionTests, self).test_actual_expiry()
def test_unpickling_exception(self):
# signed_cookies backend should handle unpickle exceptions gracefully
# by creating a new session
self.assertEqual(self.session.serializer, JSONSerializer)
self.session.save()
self.session.serializer = PickleSerializer
self.session.load()
@unittest.skip("Cookie backend doesn't have an external store to create records in.")
def test_session_load_does_not_create_record(self):
pass
@unittest.skip("CookieSession is stored in the client and there is no way to query it.")
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
pass
| {
"content_hash": "2c68624551d7a8dcac293b327d333a62",
"timestamp": "",
"source": "github",
"line_count": 865,
"max_line_length": 106,
"avg_line_length": 38.019653179190755,
"alnum_prop": 0.6498008331559583,
"repo_name": "mattseymour/django",
"id": "54b8ac53a579b075edd7bea350799977ae9ebf2e",
"size": "32887",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/sessions_tests/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "55935"
},
{
"name": "HTML",
"bytes": "182963"
},
{
"name": "JavaScript",
"bytes": "252645"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11845544"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
import json
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, render
from mobility.decorators import mobile_template
from kitsune.products.models import Product, Topic
from kitsune.wiki.facets import topics_for, documents_for
@mobile_template('products/{mobile/}products.html')
def product_list(request, template):
"""The product picker page."""
products = Product.objects.filter(visible=True)
return render(request, template, {
'products': products})
@mobile_template('products/{mobile/}product.html')
def product_landing(request, template, slug):
"""The product landing page."""
product = get_object_or_404(Product, slug=slug)
if request.is_ajax():
# Return a list of topics/subtopics for the product
topic_list = list()
for t in Topic.objects.filter(product=product, visible=True):
topic_list.append({'id': t.id, 'title': t.title})
return HttpResponse(json.dumps({'topics': topic_list}),
content_type='application/json')
versions = product.versions.filter(default=True)
if versions:
latest_version = versions[0].min_version
else:
latest_version = 0
return render(request, template, {
'product': product,
'products': Product.objects.filter(visible=True),
'topics': topics_for(product=product, parent=None),
'search_params': {'product': slug},
'latest_version': latest_version
})
@mobile_template('products/{mobile/}documents.html')
def document_listing(request, template, product_slug, topic_slug,
subtopic_slug=None):
"""The document listing page for a product + topic."""
product = get_object_or_404(Product, slug=product_slug)
topic = get_object_or_404(Topic, slug=topic_slug, product=product,
parent__isnull=True)
doc_kw = {'locale': request.LANGUAGE_CODE, 'products': [product]}
if subtopic_slug is not None:
subtopic = get_object_or_404(Topic, slug=subtopic_slug,
product=product, parent=topic)
doc_kw['topics'] = [subtopic]
else:
subtopic = None
doc_kw['topics'] = [topic]
documents, fallback_documents = documents_for(**doc_kw)
return render(request, template, {
'product': product,
'topic': topic,
'subtopic': subtopic,
'topics': topics_for(product=product, parent=None),
'subtopics': topics_for(product=product, parent=topic),
'documents': documents,
'fallback_documents': fallback_documents,
'search_params': {'product': product_slug}})
| {
"content_hash": "0da02fb092f100a72ddd442f2d7ed9a8",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 70,
"avg_line_length": 35.48684210526316,
"alnum_prop": 0.6403411197626993,
"repo_name": "feer56/Kitsune1",
"id": "f1a90fb1659c623d89a4ba0d4854adbc72696a70",
"size": "2697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kitsune/products/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "284142"
},
{
"name": "JavaScript",
"bytes": "1572004"
},
{
"name": "Makefile",
"bytes": "4594"
},
{
"name": "Python",
"bytes": "3061131"
},
{
"name": "Shell",
"bytes": "5453"
}
],
"symlink_target": ""
} |
"""
'test' action for setup.py
"""
# XXX - use setuptools test suite support
import sys, os, string, glob
from os.path import basename, dirname, splitext, join, expanduser, walk
from fnmatch import fnmatch
import unittest
import dejagnu
deja_suite = dejagnu.testSuiteForDirectory('tests/testsuite/libffi.call')
suite = unittest.TestSuite((deja_suite, ))
runner = unittest.TextTestRunner(verbosity=1)
runner.run(suite)
| {
"content_hash": "35f8fa2cbaa09599fdf18e7254bf1393",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 85,
"avg_line_length": 30.785714285714285,
"alnum_prop": 0.7540603248259861,
"repo_name": "unofficial-opensource-apple/libffi",
"id": "ed86919bbe1bf8208da16c7675fcc192045d59d5",
"size": "431",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/run-tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "47301"
},
{
"name": "C",
"bytes": "333039"
},
{
"name": "C++",
"bytes": "7911"
},
{
"name": "Python",
"bytes": "5874"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("forum_conversation", "0004_auto_20160427_0502"),
]
operations = [
migrations.AlterField(
model_name="post",
name="approved",
field=models.BooleanField(
db_index=True, default=True, verbose_name="Approved"
),
),
migrations.AlterField(
model_name="topic",
name="approved",
field=models.BooleanField(
db_index=True, default=True, verbose_name="Approved"
),
),
]
| {
"content_hash": "e984b274857112f8a373a8e3dd0dbfb6",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 68,
"avg_line_length": 25.84,
"alnum_prop": 0.5340557275541795,
"repo_name": "comic/comic-django",
"id": "bd17ece9f4dd7ad04fa1cd061e83383bc79c26bb",
"size": "694",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/grandchallenge/forum_conversation/migrations/0005_auto_20160607_0455.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "94300"
},
{
"name": "HTML",
"bytes": "101108"
},
{
"name": "JavaScript",
"bytes": "122734"
},
{
"name": "PHP",
"bytes": "99155"
},
{
"name": "Python",
"bytes": "486219"
},
{
"name": "Shell",
"bytes": "793"
}
],
"symlink_target": ""
} |
from setuptools import setup
import sys
import gunstar
try:
import multiprocessing
except ImportError:
pass
version = gunstar.__version__
testing_extras = [
'nose',
'coverage',
'coveralls'
]
requires = [
'WebOb>=1.4',
'blinker>=1.3',
'Jinja2>=2.7.1',
'six>=1.3.0',
'itsdangerous>=0.23',
]
setup(
name='gunstar',
version=version,
author='Allisson Azevedo',
author_email='allisson@gmail.com',
packages=['gunstar'],
license='MIT',
description='Another python web framework.',
long_description=open('docs/index.rst').read(),
url='http://github.com/allisson/gunstar',
include_package_data=True,
zip_safe=False,
install_requires=requires,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite='nose.collector',
tests_require=['nose'],
extras_require={
'testing': testing_extras,
},
)
| {
"content_hash": "57845ccc8791ef3f21e49e4a8ab6f211",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 70,
"avg_line_length": 25.775862068965516,
"alnum_prop": 0.6053511705685619,
"repo_name": "allisson/gunstar",
"id": "3122f3dfef6693ad3087b9131bf42c9b8784dcbf",
"size": "1519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "63482"
},
{
"name": "Shell",
"bytes": "6703"
}
],
"symlink_target": ""
} |
'''
----------------------------
PEP-386 compliant versioning
----------------------------
:pep:`386` defines a standard format for version strings. This module
contains a function for creating strings in that format.
'''
__version_info__ = ((1, 0, 0),)
import itertools
def version_tuple_to_string(version_info):
'''Return a :pep:`386` version string from a :pep:`386` style version tuple
:arg version_info: Nested set of tuples that describes the version. See
below for an example.
:returns: a version string
This function implements just enough of :pep:`386` to satisfy our needs.
:pep:`386` defines a standard format for version strings and refers to
a function that will be merged into the |stdlib|_ that transforms a tuple
of version information into a standard version string. This function is
an implementation of that function. Once that function becomes available
in the |stdlib|_ we will start using it and deprecate this function.
:attr:`version_info` takes the form that :pep:`386`'s
:func:`NormalizedVersion.from_parts` uses::
((Major, Minor, [Micros]), [(Alpha/Beta/rc marker, version)],
[(post/dev marker, version)])
Ex: ((1, 0, 0), ('a', 2), ('dev', 3456))
It generates a :pep:`386` compliant version string::
N.N[.N]+[{a|b|c|rc}N[.N]+][.postN][.devN]
Ex: 1.0.0a2.dev3456
.. warning:: This function does next to no error checking. It's up to the
person defining the version tuple to make sure that the values make
sense. If the :pep:`386` compliant version parser doesn't get
released soon we'll look at making this function check that the
version tuple makes sense before transforming it into a string.
It's recommended that you use this function to keep
a :data:`__version_info__` tuple and :data:`__version__` string in your
modules. Why do we need both a tuple and a string? The string is often
useful for putting into human readable locations like release
announcements, version strings in tarballs, etc. Meanwhile the tuple is
very easy for a computer to compare. For example, kitchen sets up its
version information like this::
from kitchen.versioning import version_tuple_to_string
__version_info__ = ((0, 2, 1),)
__version__ = version_tuple_to_string(__version_info__)
Other programs that depend on a kitchen version between 0.2.1 and 0.3.0
can find whether the present version is okay with code like this::
from kitchen import __version_info__, __version__
if __version_info__ < ((0, 2, 1),) or __version_info__ >= ((0, 3, 0),):
print 'kitchen is present but not at the right version.'
print 'We need at least version 0.2.1 and less than 0.3.0'
print 'Currently found: kitchen-%s' % __version__
'''
ver_components = []
for values in version_info:
if isinstance(values[0], int):
ver_components.append('.'.join(itertools.imap(str, values)))
else:
if isinstance(values[0], unicode):
modifier = values[0].encode('ascii')
else:
modifier = values[0]
if modifier in ('a', 'b', 'c', 'rc'):
ver_components.append('%s%s' % (modifier,
'.'.join(itertools.imap(str, values[1:])) or '0'))
else:
ver_components.append('.%s%s' % (modifier,
str(values[1])))
return unicode(''.join(ver_components), 'ascii')
__version__ = version_tuple_to_string(__version_info__)
__all__ = ('version_tuple_to_string',)
| {
"content_hash": "344c511596993959ba208fd6a054fd21",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 79,
"avg_line_length": 41.875,
"alnum_prop": 0.6211668928086839,
"repo_name": "tomasreimers/python-wiki-uturn",
"id": "69bc21ca04bb80149763f9e6a7da628fcf4f5d79",
"size": "4447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kitchen/versioning/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "249869"
}
],
"symlink_target": ""
} |
import re, hashlib, pprint, socket, urllib, time
import urllib.request
from queue import Queue
from collections import Counter, deque, defaultdict
from html.parser import HTMLParser
from classes.fingerprints import Fingerprints
#from classes.requester2 import Requester
from classes.matcher import Match
from classes.request2 import Response, Requester
from classes.printer import Printer
class DiscoverAllCMS(object):
# match all fingerprints against all responses
# this might generate false positives
def __init__(self, data):
self.cache = data['cache']
self.results = data['results']
self.matcher = data['matcher']
self.fps = data['fingerprints']
self.printer = data['printer']
# only used for pretty printing of debugging info
self.tmp_set = set()
def run(self):
self.printer.print('Checking for more matches in cache (option -a) ...', 1)
# find matches for all the responses in the cache
for fp_category in ['cms', 'platform']:
for fp_type in self.fps.data[fp_category]:
fps = self.fps.data[fp_category][fp_type]['fps']
for response in self.cache.get_responses():
matches = self.matcher.get_result(fps, response)
for fp in matches:
self.results.add( fp_category, fp['name'], fp['output'], fp )
if (fp['name'], fp['output']) not in self.tmp_set:
self.printer.print('- Found match: %s %s' % (fp['name'], fp['output']) , 2)
self.tmp_set.add((fp['name'], fp['output']))
class DiscoverCMS(object):
def __init__(self, options, data):
self.printer = data['printer']
self.matcher = data['matcher']
self.requester = data['requester']
self.result = data['results']
self.printer = data['printer']
self.batch_size = options['batch_size']
self.num_cms_to_find = options['stop_after']
self.find_all_cms = options['run_all']
# only used for pretty printing of debugging info
self.tmp_set = set()
self.queue = defaultdict(list)
for fp_type in data['fingerprints'].data['cms']:
for fp in data['fingerprints'].data['cms'][fp_type]['fps']:
self.queue[fp['url']].append(fp)
def get_queue(self, cms=None):
queue = []
if cms is None:
for i in range(self.batch_size):
try:
url, fp_list = self.queue.popitem()
queue.append(fp_list)
except KeyError:
break
else:
# the following procedure is *not* optimal
# the self.queue dict is completely destroyed and
# and rebuilt each time this procedure is called :(
# create a temp queue dict
tmp_queue = defaultdict(list)
# remove elements from the dict until it is empty
while len(self.queue) > 0:
url, fp_list = self.queue.popitem()
# remove all the elements of a queue entry's list
# one-by-one and check if the fingerprints are
# belong to the specified 'cms'
tmp_list = []
out_list = []
while len(fp_list) > 0:
# remove the fingerprint
fp = fp_list.pop()
# if the fingerprint matches the cms, add it to the
# out_list for the current url
# otherwise add it to the tmp_list
if fp['name'] == cms:
out_list.append(fp)
else:
tmp_list.append(fp)
# if there are elements in tmp_list (the new list of fps that
# that do *not* match the 'cms'), add it to the tmp_queue's entry
# for the current url
if len(tmp_list) > 0:
tmp_queue[url].extend(tmp_list)
# if matches for the specified cms have been found, add the list
# to the fingerprintQueue for the requester
if len(out_list) > 0:
queue.append(out_list)
# replace the queue with the tmp queue
self.queue = tmp_queue
return queue
def run(self):
batch_no = 0
self.printer.print('Determining CMS type ...', 1)
detected_cms = []
stop_searching = len(detected_cms) >= self.num_cms_to_find
while (not stop_searching or self.find_all_cms) and (not len(self.queue) == 0):
self.printer.print('Checking fingerprint group no. %s ...' % (batch_no, ) , 3)
# set the requester queue
results = self.requester.run('CMS', self.get_queue())
# search for CMS matches
cms_matches = []
while not results.empty():
fingerprints, response = results.get()
for fp in self.matcher.get_result(fingerprints, response):
self.result.add( 'cms', fp['name'], fp['output'], fp)
cms_matches.append(fp['name'])
# search for the found CMS versions
for cms in cms_matches:
# skip checking the cms, if it has already been detected
if cms in detected_cms: continue
if cms not in self.tmp_set:
self.tmp_set.add(cms)
self.printer.print('- Found CMS match: %s' % (cms, ) , 2)
# set the requester queue with only fingerprints for the cms
results = self.requester.run('CMS_version', self.get_queue(cms))
# find the results
self.printer.print('Determining CMS version ...', 1)
while results.qsize() > 0:
res_fps,response = results.get()
for fp in self.matcher.get_result(res_fps, response):
self.result.add( 'cms', fp['name'], fp['output'], fp)
if (fp['name'], fp['output']) not in self.tmp_set:
self.tmp_set.add((fp['name'], fp['output']))
self.printer.print('- Found version: %s %s' % (fp['name'], fp['output']) , 2)
# update the stop criteria
detected_cms.append(cms)
stop_searching = (len(detected_cms) >= self.num_cms_to_find) or len(self.queue) == 0
batch_no += 1
class DiscoverCookies(object):
def __init__(self, data):
self.data = data
self.printer = data['printer']
def run(self):
self.printer.print('Checking for cookies ...' , 1)
cookies = set()
for r in self.data['cache'].get_responses():
try:
c = r.headers['set-cookie'].strip().split('=')[0]
if c not in cookies:
self.printer.print('- Found cookie: %s' % (c,) , 2)
cookies.add(c)
except:
pass
self.data['results'].site_info['cookies'] = cookies
class DiscoverErrorPage:
# find error pages on the site
# the requester has a built-in list of items and patterns
# to remove before calculating a checksum of pages that
# should not exists
def __init__(self, options, data):
self.host = options['url']
self.fps = data['fingerprints'].data['error_pages']['fps']
self.requester = data['requester']
self.printer = data['printer']
def run(self):
self.requester.find_404s = True
self.printer.print('Error page detection ...', 1)
queue = [[fp] for fp in self.fps]
results = self.requester.run('ErrorPages', queue)
error_pages = set()
while results.qsize() > 0:
fp, response = results.get()
if response is not None:
error_pages.add(response.md5_404)
error_pages.add(response.md5_404_text)
self.printer.print('- Error page fingerprint: %s, %s - %s' % (response.md5_404, response.md5_404_text, fp[0]['url']), 2)
self.requester.find_404s = False
return error_pages
class DiscoverInteresting(object):
def __init__(self, options, data):
self.printer = data['printer']
self.requester = data['requester']
self.matcher = data['matcher']
self.result = data['results']
self.threads = options['threads']
self.batch_size = options['batch_size']
self.category = "interesting"
# add the fingerprints to the queue, ensuring that
# all fps with the same url, are collected in a list
self.queue = defaultdict(list)
for fp in data['fingerprints'].data['interesting']['fps']:
self.queue[fp['url']].append(fp)
def run(self):
self.printer.print('Detecting interesting files ...', 1)
# process the results
results = self.requester.run('Interesting', list(self.queue.values()))
while results.qsize() > 0:
fps,response = results.get()
for fp in self.matcher.get_result(fps, response):
self.result.add( self.category, None, None, fp, weight=1)
try:
self.printer.print('- Found file: %s (%s)' % (fp['url'], fp['note'] ), 2)
except:
pass
class DiscoverIP(object):
def __init__(self, path):
self.path = path
def run(self):
try:
hostname = self.path.split('//')[1]
hostname = hostname.split('/')[0]
ip = socket.gethostbyname(hostname)
except Exception as e:
#print(e)
ip = 'Unknown'
return ip
class DiscoverJavaScript(object):
def __init__(self, options, data):
self.printer = data['printer']
self.cache = data['cache']
self.matcher = data['matcher']
self.result = data['results']
self.fingerprints = []
for fp_type in data['fingerprints'].data['js']:
self.fingerprints.extend(data['fingerprints'].data['js'][fp_type]['fps'])
def run(self):
self.printer.print('Detecting Javascript ...', 1)
for response in self.cache.get_responses():
# match only if the response is JavaScript
# check content type
content_type = response.headers['content-type'] if 'content-type' in response.headers else ''
# and extension
is_js = 'javascript' in content_type or '.js' in response.url.split('.')[-1]
# if the response is JavaScript try to match it to the known fingerprints
if is_js:
matches = self.matcher.get_result(self.fingerprints, response)
for fp in matches:
self.result.add( 'js', fp['name'], fp['output'], fingerprint=fp, weight=1)
self.printer.print('- Found JavaScript: %s %s' % (fp['name'], fp['output']), 2)
# Used by the DiscoverMore crawler
# The
class LinkExtractor(HTMLParser):
def __init__(self, strict):
super().__init__(strict=strict)
self.results = set()
def get_results(self):
return self.results
def handle_starttag(self, tag, attrs):
try:
url = ''
if tag == 'script' or tag == 'img':
for attr in attrs:
if attr[0] == 'src':
self.results.add(attr[1])
if tag == 'link':
for attr in attrs:
if attr[0] == 'href':
self.results.add(attr[1])
except:
pass
class DiscoverMore(object):
def __init__(self, options, data):
self.host = options['url']
self.threads = options['threads']
self.printer = data['printer']
self.cache = data['cache']
self.result = data['results']
self.matcher = data['matcher']
self.requester = data['requester']
self.fingerprints = data['fingerprints']
def _get_urls(self, response):
# only get urls from elements that use 'src' to avoid
# fetching resources provided by <a>-tags, as this could
# lead to the crawling of the whole application
regexes = [ 'src="(.+?)"', "src='(.+?)'"]
urls = set()
for regex in regexes:
for match in re.findall(regex, response.body):
urls.add( match )
return urls
def run(self):
self.printer.print('Detecting links ...', 1)
resources = set()
parser = LinkExtractor(strict=False)
for req in self.cache.get_responses():
# skip pages that do not set 'content-type'
# these might be binaries
if not 'content-type' in req.headers:
continue
# only scrape pages that can contain links/references
if 'text/html' in req.headers['content-type']:
tmp = self._get_urls(req)
parser.feed(req.body)
tmp = tmp.union( parser.get_results())
for i in tmp:
url_data = urllib.request.urlparse(i)
# skip data urls
if url_data.path.startswith('data:'): continue
resources.add( i )
# the items in the resource set should mimic a list of fingerprints:
# a fingerprint is a dict with at least an URL key
self.printer.print('- Discovered %s new resources' % (len(resources), ), 2)
# prepare the urls
queue = defaultdict(list)
for url in resources:
queue[url].append({'url': url})
# fetch'em
results = self.requester.run('DiscoverMore', list(queue.values()))
class DiscoverOS:
def __init__(self, options, data):
self.printer = data['printer']
self.cache = data['cache']
self.results = data['results']
self.fingerprints = data['fingerprints'].data['os']['fps']
self.os = Counter()
self.os_family_list = Counter()
self.matched_packages = set()
def search_and_prioritize_os(self, pkg_name, pkg_version):
for fp in self.fingerprints:
if fp['pkg_name'] == pkg_name and fp['pkg_version'] == pkg_version:
weight = 1 if not 'weight' in fp else fp['weight']
if not type(fp['os_version']) == type([]):
fp['os_version'] = [fp['os_version']]
for os_version in fp['os_version']:
if fp['os_name'].lower() in self.os_family_list:
self.printer.print('- Prioritizing fingerprints for OS: %s' % (fp['os_name'], ), 7)
self.os[ (fp['os_name'], os_version) ] += weight * 100
else:
self.os[ (fp['os_name'], os_version) ] += weight
def find_match_in_headers(self, response):
headers = response.headers
if 'server' in headers:
line = headers['server']
if "(" in line:
os = line[line.find('(')+1:line.find(')')]
line = line[:line.find('(')-1] + line[line.find(')')+1: ]
else:
os = None
if os is not None:
self.os_family_list[os.lower()] += 1
for part in line.split(" "):
try:
pkg,version = list(map(str.lower, part.split('/')))
self.search_and_prioritize_os(pkg, version)
except Exception as e:
continue
def find_match_in_results(self):
platforms = self.results.scores['platform']
for pkg in platforms:
for version in platforms[pkg]:
# hack for asp.net
if pkg == 'ASP.NET':
version = version[:3] if not version.startswith("4.5") else version[:5]
self.search_and_prioritize_os(pkg, version)
def finalize(self):
# add OS to results: self.os: {(os, version): weight, ...}
results = []
for p in self.os:
results.append({'version': p[1], 'os': p[0], 'count': self.os[p]})
if len(results) == 0: return
prio = sorted(results, key=lambda x:x['count'], reverse=True)
max_count = prio[0]['count']
for i in prio:
if i['count'] == max_count:
self.results.add('os', i['os'], i['version'], weight=i['count'])
self.printer.print('- Found OS: %s %s' % (i['os'], i['version']), 2)
else:
break
def run(self):
self.printer.print('Detecting OS ...', 1)
headers = set()
responses = self.cache.get_responses()
# find matches in the header
for response in responses:
self.find_match_in_headers(response)
# find match in current results
self.find_match_in_results()
# do some house keeping
self.finalize()
class DiscoverPlatform:
def __init__(self, options, data):
self.printer = data['printer']
self.requester = data['requester']
self.matcher = data['matcher']
self.result = data['results']
self.printer = data['printer']
self.threads = options['threads']
self.batch_size = options['batch_size']
self.queue = defaultdict(list)
for fp_type in data['fingerprints'].data['platform']:
for fp in data['fingerprints'].data['platform'][fp_type]['fps']:
self.queue[fp['url']].append(fp)
# only used for pretty printing of debugging info
self.tmp_set = set()
def run(self):
self.printer.print('Detecting platform ...', 1)
while len(self.queue) > 0:
queue = []
for i in range(self.batch_size):
try:
url, fp_list = self.queue.popitem()
queue.append(fp_list)
except KeyError:
break
results = self.requester.run('Plaform', queue)
# search for CMS matches
while not results.empty():
fingerprints, response = results.get()
matches = self.matcher.get_result(fingerprints, response)
for fp in matches:
self.result.add('platform', fp['name'], fp['output'], fp)
if (fp['name'], fp['output']) not in self.tmp_set:
self.printer.print('- Found platform %s %s' % (fp['name'], fp['output']), 2)
self.tmp_set.add((fp['name'], fp['output']))
class DiscoverTitle:
def __init__(self, options, data):
self.data = data
self.url = options['url']
self.printer = data['printer']
def run(self):
self.printer.print('Getting title ...', 1)
r = self.data['requester'].run('Title', [[{'url': '/'}]])
front_page = self.data['cache'][self.url]
try:
title = re.findall('<title>\s*(.*)\s*</title>', front_page.body)[0]
title = title.strip()
except:
title = ''
try:
self.printer.print('- Found title: %s' % (title, ), 2)
except:
pass
return title
class DiscoverTools:
def __init__(self, data):
self.fps = data['fingerprints']
self.results = data['results']
self.printer = data['printer']
def run(self):
self.printer.print('Searching for tools ...', 1)
self.results.update()
cms_results = self.results.get_versions()
# loop over the cms' in the results
for cms,_ in cms_results:
# loop over all the translations
for fn in self.fps.translator:
# check if the translated name is the same as the cms
if self.fps.translator[fn]['name'] == cms and 'tool' in self.fps.translator[fn]:
for tool in self.fps.translator[fn]['tool']:
self.results.add_tool(cms, tool['name'], tool['link'])
self.printer.print('- Found tool: %s (%s)' % (tool['name'], tool['link']), 2)
class DiscoverUrlLess(object):
def __init__(self, options, data):
self.printer = data['printer']
self.cache = data['cache']
self.results = data['results']
self.matcher = data['matcher']
self.fingerprints = data['fingerprints']
def run(self):
self.printer.print('Matching urlless fingerprints...', 1)
# only used for pretty printing of debugging info
tmp_set = set()
for fp_category in ['cms', 'platform']:
for fp_type in self.fingerprints.data[fp_category]:
fps = self.fingerprints.data[fp_category][fp_type]['fps']
fps = [fp for fp in fps if fp['url'] == '']
# find matches for all the responses in the cache
for response in self.cache.get_responses():
matches = self.matcher.get_result(fps, response)
for fp in matches:
url_data = urllib.request.urlparse(response.get_url())
fp['url'] = url_data.path
show_all_detections = True
if 'show_all_detections' in fp:
show_all_detections = fp['show_all_detections']
if (fp['name'], fp['output']) in tmp_set:
if show_all_detections:
self.results.add(fp_category, fp['name'], fp['output'], fingerprint=fp, weight=1)
else:
self.printer.print('- Found fingerprint: %s %s' % (fp['name'], fp['output']), 2)
self.results.add(fp_category, fp['name'], fp['output'], fingerprint=fp, weight=1)
tmp_set.add((fp['name'], fp['output']))
class DiscoverVulnerabilities:
def __init__(self, data):
self.printer = data['printer']
self.results = data['results']
self.fps = []
vuln_sources = data['fingerprints'].data['vulnerabilities']
for source in vuln_sources:
self.fps.extend(data['fingerprints'].data['vulnerabilities'][source]['fps'])
def run(self):
self.printer.print('Searching for vulnerabilities ...', 1)
self.results.update()
cms_results = self.results.get_versions()
vendors = Counter()
for r in cms_results: vendors[r[0]] += 1
# if there are more than 5 results,
# skip displaying vuln count, as the
# results are unreliable
for cms, version in cms_results:
if vendors[cms] > 5: continue
try:
for fp in self.fps:
if fp['name'] == cms and fp['version'] == version:
self.results.add_vulnerabilities(cms, version, fp['num_vulns'], fp['link'])
self.printer.print('- Found vulnerability: %s %s: %s' % (cms, version, fp['num_vulns']), 2)
except Exception as e:
print(e)
pass
| {
"content_hash": "2f5ba13897f5f8aafe27940c996855c5",
"timestamp": "",
"source": "github",
"line_count": 694,
"max_line_length": 124,
"avg_line_length": 27.756484149855908,
"alnum_prop": 0.6427866895083839,
"repo_name": "akamajoris/wig",
"id": "29ac6281dd0ea0ca775d986f02fa875c0cf6838f",
"size": "19263",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "classes/discovery.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "62799"
}
],
"symlink_target": ""
} |
"""
Display video on a set of Holidays
"""
import numpy as np
import cv2
import math
import optparse
import time
from api.udpholiday import UDPHoliday
from holiscreen import render_to_hols
NUM_GLOBES = UDPHoliday.NUM_GLOBES
class HolividOptions(optparse.OptionParser):
"""
Command-line options parser
"""
def __init__(self, *args, **kwargs):
optparse.OptionParser.__init__(self, **kwargs)
self.addOptions()
def addOptions(self):
self.add_option('-n', '--numstrings', dest='numstrings',
help="Number of Holiday strings to simulate [%default]",
type="int", default=25)
self.add_option('-f', '--file', dest='filename',
help="Video file to display.",
type="string" )
# Listen on multiple TCP/UDP ports, one for each Holiday we simulate
self.add_option('-p', '--portstart', dest='portstart',
help="Port number to start at for UDP listeners [%default]",
type="int", default=9988)
self.add_option('-o', '--orientation', dest='orientation',
help="Orientation of the strings [%default]",
type="choice", choices=['vertical', 'horizontal'], default='vertical')
self.add_option('', '--switchback', dest='switchback',
help="'Switchback' strings, make a single string display like its "
"more than one every m globes",
type="int")
self.add_option('', '--fps', dest='fps',
help="Set video playback frames-per-second. [%default]",
type="int", default=25)
pass
def parseOptions(self):
"""
Emulate twistedmatrix options parser API
"""
options, args = self.parse_args()
self.options = options
self.args = args
self.postOptions()
return self.options, self.args
def postOptions(self):
if len(self.args) < 1:
self.error("Specify address and port of remote Holiday(s)")
pass
if not self.options.filename:
self.error("Video filename not given.")
pass
pass
if __name__ == '__main__':
usage = "Usage: %prog [options] <hol_addr:hol_port> [<hol_addr:hol_port> ... ]"
optparse = HolividOptions(usage=usage)
options, args = optparse.parseOptions()
hols = []
if len(args) > 1:
for arg in args:
hol_addr, hol_port = arg.split(':')
hols.append(UDPHoliday(ipaddr=hol_addr, port=int(hol_port)))
else:
hol_addr, hol_port = args[0].split(':')
for i in range(options.numstrings):
hols.append(UDPHoliday(ipaddr=hol_addr, port=int(hol_port)+i))
pass
pass
if options.switchback:
if options.orientation == 'vertical':
height = options.switchback
pieces = int(math.floor(float(NUM_GLOBES) / height))
width = options.numstrings * pieces
else:
width = options.switchback
pieces = int(math.floor(float(NUM_GLOBES) / width))
height = options.numstrings * pieces
else:
if options.orientation == 'vertical':
height = NUM_GLOBES
pieces = options.numstrings
width = options.numstrings
else:
width = NUM_GLOBES
pieces = options.numstrings
height = options.numstrings
pass
pass
cap = cv2.VideoCapture(options.filename)
newsize = (width, height)
skipframe = False
while True:
loopstart = time.time()
ret, frame = cap.read()
if ret != True:
print "No valid frame."
break
# Resize the frame into the resolution of our Holiday array
holframe = cv2.resize(frame, newsize, interpolation=cv2.INTER_CUBIC)
# The colours are in the wrong format, so convert them
holframe = cv2.cvtColor(holframe, cv2.COLOR_BGR2RGB)
# Display the original frame, for the demo
cv2.imshow('holivid monitor display', frame)
# A frame is just a Numpy array of pixel values, i.e. globelists. We need to take
# these values and map them onto our holiday array.
render_to_hols(holframe, hols, width, height,
options.orientation, options.switchback)
# Wait period between keycapture (in milliseconds)
# This gives us approximately the right number of frames per second
wait_time = 1000/options.fps
# Figure out how long the wait_time would be without the
# processing time
loopend = time.time()
# Adjust waiting based on how long it takes us to process
process_time = (loopend - loopstart) * 1000
wait_time = int(wait_time - process_time)
if cv2.waitKey(wait_time) & 0xFF == ord('q'):
break
pass
cap.release()
cv2.destroyAllWindows()
| {
"content_hash": "7ab81a46d485b2040d72932f279cf393",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 94,
"avg_line_length": 32.320754716981135,
"alnum_prop": 0.5658688460790037,
"repo_name": "jpwarren/holideck",
"id": "d16953328a8ff58f8676dc416a3f448645b20b8c",
"size": "5157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/holivid.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11660"
},
{
"name": "HTML",
"bytes": "49684"
},
{
"name": "JavaScript",
"bytes": "116607"
},
{
"name": "Python",
"bytes": "2681455"
},
{
"name": "Shell",
"bytes": "5503"
}
],
"symlink_target": ""
} |
from django.apps import AppConfig
class MenuAppConfig(AppConfig):
name = 'menuware'
label = 'menuware'
verbose_name = 'Menu Application'
def ready(self):
pass
| {
"content_hash": "e67c6731b95fcc93586f247d077ce07e",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 37,
"avg_line_length": 18.6,
"alnum_prop": 0.6612903225806451,
"repo_name": "un33k/django-menuware",
"id": "00d33492fede51b57288a86468ce1aef88657e9b",
"size": "186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "menuware/apps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18416"
},
{
"name": "Shell",
"bytes": "338"
}
],
"symlink_target": ""
} |
"""
A simple implementation of bencoding in python.
Bencoding is a way to serialize and organize data. The format supports four data
types: Strings, Integers, Lists and Dictionaries.
Strings: <string length encoded in base ten ASCII>:<string data>
-------
Bencoded strings are encoded in the format:
"<string length encoded in base ten ASCII>:<string data>" there is no constant
beginning delimiter and no ending delimiter.
Example of bencoded string:
"5:Hello" encodes the string "Hello"
Integers: i<integer encoded in base ten ASCII>e
--------
Integers are encoded as "i<integer encoded in base ten ASCII>e" the initial i
and trailing e is the beginning and ending delimiters. A negative integer can be
represented as i-42e while positive are represented as i42e. Padding the integer
with zeros are not allowed, as such i042e is invalid. However the value i0e is
allowed.
Example:
"i42e" encodes the integer 42
Lists: l<bencoded elements>e
-----
The initial l and trailing e is the start end ending delimiters. A bencoded list
can contain any bencoded type, even lists containing lists.
Example:
"li1ei2ei3ee" encodes the list [1, 2, 3]
Dictionaries: d<bencoded string><bencoded element>e
------------
The initial d and trailing e are the start and ending delimiters. A dictionary
is one or more key value pairs. Note that all key's must be bencoded strings.
The keys must appear in sorted order using a binary comparison.
Example:
'd4:listli1ei2ei3ee3:str5:Helloe' encodes dict(str="Hello", list=[1, 2, 3])
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals
)
import sys
from pprint import pprint
if sys.version_info.major == 2:
chr = unichr
string_type = basestring
elif sys.version_info.major == 3:
# chr should assume Unicode
string_type = str
def decode(b_data):
"""
Decodes a bencoded byte array into the relevant python data type
:param b_data: A byte string of bencoded data
:rtype : Either a {Dict|List|Integer|String}
"""
def _bdecode(data):
"""
Does the actual work of decoding bencoded data.
:param data: A list of a byte array containing the bencoded data
:rtype : {Dict|List|Integer|String}
:raise Exception: If input is not valid bencoded data
"""
while len(data) != 0:
char = data.pop()
# bencoded dictionary
if char == b'd':
char = data.pop()
b_dict = {}
while char != b'e':
data.append(char)
key = _bdecode(data)
b_dict[key] = _bdecode(data)
char = data.pop()
return b_dict
# bencoded list
elif char == b'l':
char = data.pop()
b_list = []
while char != b'e':
data.append(char)
b_list.append(_bdecode(data))
char = data.pop()
return b_list
# bencoded integer
elif char == b'i':
char = data.pop()
b_int = ''
while char != b'e':
b_int += char
char = data.pop()
return int(b_int)
# bencoded string
elif char.isdigit():
line_len = b''
while char.isdigit():
line_len += char
char = data.pop()
b_string = b''
for _ in range(int(line_len)):
b_string += data.pop()
return b_string
else:
raise Exception("Invalid bencoded input")
data_list = list(b_data)
data_list.reverse() # We want to be able to pop from the start
return _bdecode(data_list)
def encode(data):
"""
Takes either a Dict, List, Integer or String and encodes it to a bencoded
string
:param data: {Dict|List|Integer|String}
:return: a bencoded String
:raise Exception: and exception is raised if the data could not be bencoded
"""
# data is a string
if isinstance(data, string_type):
b_string = b"{length}:{str}".format(length=len(data), str=data)
return b_string
# data is an integer
elif isinstance(data, int):
b_int = b"i{integer}e".format(integer=str(data))
return b_int
# data is a list
elif isinstance(data, list):
list_elements = "".join([encode(element) for element in data])
b_list = b"l{list_elements}e".format(list_elements=list_elements)
return b_list
# data is a dictionary
elif isinstance(data, dict):
b_dict = b"d"
for key in sorted(data.keys()):
b_dict += encode(key) + encode(data[key])
b_dict += b"e"
return b_dict
else:
raise Exception("Input data could not be bencoded")
def main(argv):
if len(argv) > 1:
path = argv[1]
with open(path, "rb") as f:
content = bytes(f.read())
decoded_content = decode(content)
re_encoded_content = bytes(encode(decoded_content))
assert content == re_encoded_content
# The pieces is in binary so lets convert it to hex before printing
decoded_content['info']['pieces'] = \
''.join(["{0:02x}".format(ord(x)) for
x in decoded_content['info']['pieces']])
pprint(decoded_content)
assert encode("Hello") == b"5:Hello"
assert encode(23) == b"i23e"
assert encode([1, 2, 3]) == b"li1ei2ei3ee"
assert encode(dict(str="Hello", list=[1, 2, 3])) == \
b'd4:listli1ei2ei3ee3:str5:Helloe'
assert decode(bytes("5:Hello")) == "Hello"
assert decode(bytes("i23e")) == 23
assert decode(bytes("li1ei2ei3ee")) == [1, 2, 3]
assert decode(bytes("d4:listli1ei2ei3ee3:str5:Helloe")) == \
dict(str="Hello", list=[1, 2, 3])
# same as above but using binary string literals
assert decode(b"5:Hello") == "Hello"
assert decode(b"i23e") == 23
assert decode(b"li1ei2ei3ee") == [1, 2, 3]
assert decode(b"d4:listli1ei2ei3ee3:str5:Helloe") == \
dict(str="Hello", list=[1, 2, 3])
test_data = {
"string": "String of data",
"list": [1, 2, 3],
"int": 42,
"nested_dict": {
"string": "another string of data",
"list": [2, 4, 6],
"int": -42
}
}
decoded_content = encode(test_data)
data = decode(bytes(decoded_content))
assert data == test_data
if __name__ == "__main__":
main(sys.argv)
| {
"content_hash": "2e6292ef84ab7febd6bc3e865082d9ea",
"timestamp": "",
"source": "github",
"line_count": 220,
"max_line_length": 80,
"avg_line_length": 30.581818181818182,
"alnum_prop": 0.5737217598097503,
"repo_name": "FnuGk/pybencode",
"id": "1c8cdf477e3c03f130b39cc54e62d42b14046595",
"size": "6728",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bencode/bencode.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7167"
}
],
"symlink_target": ""
} |
"""
sentry.web.frontend.admin
~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import functools
import logging
import sys
import uuid
from collections import defaultdict
import pkg_resources
import six
from django.conf import settings
from django.core.context_processors import csrf
from django.db import transaction
from django.http import HttpResponse, HttpResponseRedirect
from django.views.decorators.csrf import csrf_protect
from sentry import options
from sentry.app import env
from sentry.models import Project, User
from sentry.plugins import plugins
from sentry.utils.email import send_mail
from sentry.utils.http import absolute_uri
from sentry.utils.warnings import DeprecatedSettingWarning, UnsupportedBackend, seen_warnings
from sentry.web.decorators import requires_admin
from sentry.web.forms import (ChangeUserForm, NewUserForm, RemoveUserForm, TestEmailForm)
from sentry.utils import auth
from sentry.web.helpers import render_to_response, render_to_string
def configure_plugin(request, slug):
plugin = plugins.get(slug)
if not plugin.has_site_conf():
return HttpResponseRedirect(auth.get_login_url())
view = plugin.configure(request=request)
if isinstance(view, HttpResponse):
return view
return render_to_response(
'sentry/admin/plugins/configure.html', {
'plugin': plugin,
'title': plugin.get_conf_title(),
'slug': plugin.slug,
'view': view,
}, request
)
@requires_admin
@transaction.atomic
@csrf_protect
def create_new_user(request):
if not request.is_superuser():
return HttpResponseRedirect(auth.get_login_url())
form = NewUserForm(
request.POST or None, initial={
'send_welcome_mail': True,
'create_project': True,
}
)
if form.is_valid():
user = form.save(commit=False)
# create a random password
password = uuid.uuid4().hex
user.set_password(password)
user.save()
if form.cleaned_data['send_welcome_mail']:
context = {
'username': user.username,
'password': password,
'url': absolute_uri(auth.get_login_url()),
}
body = render_to_string('sentry/emails/welcome_mail.txt', context, request)
try:
send_mail(
'%s Welcome to Sentry' % (options.get('mail.subject-prefix'), ),
body,
options.get('mail.from'), [user.email],
fail_silently=False
)
except Exception as e:
logger = logging.getLogger('sentry.mail.errors')
logger.exception(e)
return HttpResponseRedirect(absolute_uri('/manage/users/'))
context = {
'form': form,
}
context.update(csrf(request))
return render_to_response('sentry/admin/users/new.html', context, request)
@requires_admin
@csrf_protect
def edit_user(request, user_id):
if not request.is_superuser():
return HttpResponseRedirect(auth.get_login_url())
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
return HttpResponseRedirect(absolute_uri('/manage/users/'))
form = ChangeUserForm(request.POST or None, instance=user)
if form.is_valid():
user = form.save()
return HttpResponseRedirect(absolute_uri('/manage/users/'))
project_list = Project.objects.filter(
status=0,
organization__member_set__user=user,
).order_by('-date_added')
context = {
'form': form,
'the_user': user,
'project_list': project_list,
}
context.update(csrf(request))
return render_to_response('sentry/admin/users/edit.html', context, request)
@requires_admin
@csrf_protect
def remove_user(request, user_id):
if six.text_type(user_id) == six.text_type(request.user.id):
return HttpResponseRedirect(absolute_uri('/manage/users/'))
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
return HttpResponseRedirect(absolute_uri('/manage/users/'))
form = RemoveUserForm(request.POST or None)
if form.is_valid():
if form.cleaned_data['removal_type'] == '2':
user.delete()
else:
User.objects.filter(pk=user.pk).update(is_active=False)
return HttpResponseRedirect(absolute_uri('/manage/users/'))
context = csrf(request)
context.update({
'form': form,
'the_user': user,
})
return render_to_response('sentry/admin/users/remove.html', context, request)
@requires_admin
def status_env(request):
reserved = ('PASSWORD', 'SECRET', 'KEY')
config = []
for k in sorted(dir(settings)):
v_repr = repr(getattr(settings, k))
if any(r.lower() in v_repr.lower() for r in reserved):
v_repr = '*' * 16
if any(r in k for r in reserved):
v_repr = '*' * 16
if k.startswith('_'):
continue
if k.upper() != k:
continue
config.append((k, v_repr))
return render_to_response(
'sentry/admin/status/env.html', {
'python_version': sys.version,
'config': config,
'environment': env.data,
}, request
)
@requires_admin
def status_packages(request):
config = []
for k in sorted(dir(settings)):
if k == 'KEY':
continue
if k.startswith('_'):
continue
if k.upper() != k:
continue
config.append((k, getattr(settings, k)))
return render_to_response(
'sentry/admin/status/packages.html', {
'modules':
sorted([(p.project_name, p.version) for p in pkg_resources.working_set]),
'extensions': [
(p.get_title(), '%s.%s' % (p.__module__, p.__class__.__name__))
for p in plugins.all(version=None)
],
},
request
)
@requires_admin
def status_warnings(request):
groupings = {
DeprecatedSettingWarning: 'Deprecated Settings',
UnsupportedBackend: 'Unsupported Backends',
}
groups = defaultdict(list)
warnings = []
for warning in seen_warnings:
cls = type(warning)
if cls in groupings:
groups[cls].append(warning)
else:
warnings.append(warning)
sort_by_message = functools.partial(sorted, key=six.binary_type)
return render_to_response(
'sentry/admin/status/warnings.html',
{
'groups':
sorted([(groupings[key], sort_by_message(values)) for key, values in groups.items()]),
'warnings':
sort_by_message(warnings),
},
request,
)
@requires_admin
@csrf_protect
def status_mail(request):
form = TestEmailForm(request.POST or None)
if form.is_valid():
body = """This email was sent as a request to test the Sentry outbound email configuration."""
try:
send_mail(
'%s Test Email' % (options.get('mail.subject-prefix'), ),
body,
options.get('mail.from'), [request.user.email],
fail_silently=False
)
except Exception as e:
form.errors['__all__'] = [six.text_type(e)]
return render_to_response(
'sentry/admin/status/mail.html', {
'form': form,
'mail_host': options.get('mail.host'),
'mail_password': bool(options.get('mail.password')),
'mail_username': options.get('mail.username'),
'mail_port': options.get('mail.port'),
'mail_use_tls': options.get('mail.use-tls'),
'mail_from': options.get('mail.from'),
'mail_list_namespace': options.get('mail.list-namespace'),
}, request
)
| {
"content_hash": "896e4c518b15f59fee32f32e1e136972",
"timestamp": "",
"source": "github",
"line_count": 275,
"max_line_length": 102,
"avg_line_length": 29.396363636363635,
"alnum_prop": 0.5968579910935181,
"repo_name": "jean/sentry",
"id": "e38e71fc6633a8a6b5389cca1ee03c596fa6d71d",
"size": "8084",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sentry/web/frontend/admin.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "296112"
},
{
"name": "HTML",
"bytes": "314273"
},
{
"name": "JavaScript",
"bytes": "1293918"
},
{
"name": "Lua",
"bytes": "57158"
},
{
"name": "Makefile",
"bytes": "6632"
},
{
"name": "Python",
"bytes": "24515298"
},
{
"name": "Ruby",
"bytes": "4410"
},
{
"name": "Shell",
"bytes": "2942"
}
],
"symlink_target": ""
} |
import random
from spacerogue.thing import Ship, Space, Debris, Nebula
from spacerogue.items import Weapon, Item
class Player(Ship):
def __init__(self):
self.description = 'Your spaceship'
self.pos = None
self.map_scope = 'system'
self.universe = None
self.resources = {'rock': 0, 'metal': 0, 'dirt':
0, 'gas': 0, 'gold': 0, 'hydrogen': 0, 'ammo':200}
self.inv_capacity = 300
self.inv_holding = 0
self.inv_full = False
self.weapons = [Weapon('railgun'), Weapon('laser'), Weapon('missile')]
self.warp_mode = False
self.destroyed = False
self.current_weapon = 0
self.health = 100
self.inventory = [Item('artifact')]
self.money = 0
def show_money(self):
'''Shows number of space dollar(s) player has'''
return 'You have %s space dollar(s)' % self.money
def near_shop(self):
'''Tells if player is near trading station'''
current_system = self.universe.get_system(self.pos[0])
current_sector = current_system.get_sector(self.pos[1])
thing = current_sector.pick_thing()
return (thing.name.lower() == 'station' and thing.is_shop)
def sell(self,res,amt):
'''Sells amount of resource'''
if (self.resources[res] - amt) < 0:
return None
else:
self.resources[res] -= amt
self.money += 10
def buy(self,res,amt):
'''Buys amount of resource'''
if not self.inv_full:
self.resources[res] += amt
self.money -= 10
def show_res(self):
'''Shows resources, almost same as show_inv'''
output = 'Your cargo hold contains:\n'
for resource, amount in self.resources.items():
output += '%s tons of %s\n' % (amount, resource)
return output
def add_item(self, name):
'''Adds items with name to invnetory'''
self.inventory.append(Item(name))
def pick_up(self):
'''Picks up items on thing in sector'''
current_system = self.universe.get_system(self.pos[0])
current_sector = current_system.get_sector(self.pos[1])
thing = current_sector.pick_thing()
to_pickup = thing.pick_item()
thing.del_item(to_pickup.name)
self.add_item(to_pickup.name)
return 'Picked up %s' % to_pickup.name.lower()
def reduce_ammo(self,amount):
'''Uses amount ammo from ammo store'''
a = self.resources['ammo']
if (a-amount) <= 0:
return False
else:
self.resources['ammo'] = (a-amount)
return True
def weapon_stats(self):
'''Shows stats for each weapon'''
output = ''
for weapon in self.weapons:
output += '%s\n' % weapon.show_stats()
return output
def warp_mode_toggle(self):
'''Toggles warp drive'''
self.warp_mode = not self.warp_mode
if self.warp_mode:
return 'Warp drive has been activated'
else:
return 'Warp drive has been deactivated'
def weapon_cycle(self):
'''Cycles current weapon'''
num_wep = len(self.weapons)
new_wep = self.current_weapon+1
if new_wep >= num_wep:
new_wep = 0
self.current_weapon = new_wep
return 'Switched to %s' % self.weapons[new_wep].name
def attack(self):
'''Attacks thing in sector, incurring damage'''
current_system = self.universe.get_system(self.pos[0])
current_sector = current_system.get_sector(self.pos[1])
to_attack = current_sector.pick_thing()
if to_attack.__class__ == Space:
return 'You cannot attack empty space'
elif to_attack.__class__ == Nebula:
return 'You cannot attack a diffuse cloud of gas'
cur_weapon = self.weapons[self.current_weapon]
damage = cur_weapon.properties['damage']
to_attack.incur_damage(damage)
if not to_attack.destroyed:
n = to_attack.name
h = to_attack.health
a = cur_weapon.properties['ammo usage']
if self.reduce_ammo(a):
output = 'Fired %s, using %s kg ammo\n' % (cur_weapon.name,a)
output += 'Hit %s for %s hp damage\n' % (n.lower(), damage)
output += '%s has %s hp remaining' % (n, h)
else:
return 'Not enough ammo!'
return output
elif to_attack.destroyed:
return 'Destroyed %s' % to_attack.name.lower()
def check_inv_stat(self):
'''Returns inventory full/capacity string'''
self.inv_holding = sum(self.resources.values())
self.inv_full = (self.inv_holding >= self.inv_capacity)
return 'Cargo hold: %s/%s tons\n' % (self.inv_holding, self.inv_capacity)
def show_status(self):
'''Returns status of ship'''
output = self.check_inv_stat()
if self.inv_full:
output += 'CARGO HOLD IS FULL\n\n'
output += 'You are in sector %s\n' % str(self.pos[1])
output += ' in system %s\n\n' % str(self.pos[0])
output += 'Hull is at %s hp\n\n' % self.health
output += 'Ammo: %s kg\n\n' % self.resources['ammo']
output += '%1.1f days since departure\n\n' % self.universe.time
output += '%s space dollar(s)' % self.money
return output
def show_inv(self):
'''Returns formatted string list of inventory stuff'''
output = 'Your cargo hold contains:\n'
for resource, amount in self.resources.items():
output += '%s tons of %s\n' % (amount, resource)
output += '\nItems:\n'
for item in self.inventory:
output += '%s\n' % item.name.title()
return output
def mine(self):
'''Mines the objects in current sector'''
if self.inv_full:
return 'Cargo hold full, cannot mine'
output = ''
current_system = self.universe.get_system(self.pos[0])
current_sector = current_system.get_sector(self.pos[1])
to_mine = [
t for t in current_sector.things if not isinstance(t, Player)]
for thing in to_mine:
for resource, amount in thing.resources.items():
if amount != 0:
if self.inv_holding+amount > self.inv_capacity:
return 'Cannot mine, not enough cargo hold space'
else:
self.resources[resource] += amount
thing.resources[resource] = 0
output += 'Mined %s tons of %s from %s\n' % (amount,
resource, thing.name.lower())
return output
def warp(self, x, y):
'''Warps based on string input'''
current_system = self.universe.get_system(self.pos[0])
uni_x = None
uni_y = None
try:
uni_x = int(x)
uni_y = int(y)
except ValueError:
return 'Invalid arguments'
if 0 <= uni_x < self.universe.size and 0 <= uni_y < self.universe.size:
new_system = self.universe.get_system((uni_x, uni_y))
self.warp_to(current_system, new_system)
return 'Warped to system %s' % str((uni_x, uni_y))
else:
return 'Invalid warp coordinates'
def move(self, x, y):
'''Moves to sector based on string input'''
current_system = self.universe.get_system(self.pos[0])
current_sector = current_system.get_sector(self.pos[1])
sys_x = None
sys_y = None
try:
sys_x = int(x)
sys_y = int(y)
except ValueError:
return 'Invalid movement coordinates'
if 0 <= sys_x < current_system.size and 0 <= sys_y < current_system.size:
new_sector = current_system.get_sector((sys_x, sys_y))
self.move_to(current_sector, new_sector)
return 'Moved to sector %s' % str((sys_x, sys_y))
else:
return 'Coordinates are outside the system, use warp drive instead'
def warp_dir(self, direction, str_amount):
'''Moves player amount in dir, returns whether success'''
amount = None
try:
amount = int(str_amount)
except ValueError:
return False
current_system = self.universe.get_system(self.pos[0])
new_system_pos = (None, None)
if direction == 'up':
new_system_pos = (self.pos[0][0], self.pos[0][1] - amount)
elif direction == 'down':
new_system_pos = (self.pos[0][0], self.pos[0][1] + amount)
elif direction == 'left':
new_system_pos = (self.pos[0][0] - amount, self.pos[0][1])
elif direction == 'right':
new_system_pos = (self.pos[0][0] + amount, self.pos[0][1])
else:
return 'Invalid direction, please try again'
if self.universe.valid_pos(new_system_pos):
new_system = self.universe.get_system(new_system_pos)
self.warp_to(current_system, new_system)
else:
return False
return True
def move_dir(self, direction, str_amount):
'''Moves player amount in dir, returns whether success'''
amount = None
try:
amount = int(str_amount)
except ValueError:
return False
current_system = self.universe.get_system(self.pos[0])
current_sector = current_system.get_sector(self.pos[1])
new_sector_pos = (None, None)
if direction == 'up':
new_sector_pos = (self.pos[1][0], self.pos[1][1] - amount)
elif direction == 'down':
new_sector_pos = (self.pos[1][0], self.pos[1][1] + amount)
elif direction == 'left':
new_sector_pos = (self.pos[1][0] - amount, self.pos[1][1])
elif direction == 'right':
new_sector_pos = (self.pos[1][0] + amount, self.pos[1][1])
else:
return 'Invalid direction, please try again'
if current_system.valid_pos(new_sector_pos):
new_sector = current_system.get_sector(new_sector_pos)
self.move_to(current_sector, new_sector)
else:
return False
return True
def move_to(self, old_sector, new_sector):
'''Moves player to new sector'''
old_sector.del_thing(Player)
new_sector.add_thing(self)
self.pos = new_sector.pos
def warp_to(self, old_system, new_system):
'''Warps player to random sector in new system'''
old_sector = old_system.get_sector(self.pos[1])
old_sector.del_thing(Player)
size = new_system.size
get_rand = lambda x: random.randint(0, x - 1)
universe_pos = new_system.pos
system_pos = (get_rand(size), get_rand(size))
new_pos = (universe_pos, system_pos)
new_sector = new_system.get_sector(new_pos[1])
new_sector.add_thing(self)
self.pos = new_pos
def place(self, universe):
'''Picks random sector in random system and places player there'''
self.universe = universe
get_rand = lambda x: random.randint(0, x - 1)
universe_x = get_rand(universe.size)
universe_y = get_rand(universe.size)
system_size = universe.get_system((0, 0)).size
system_x = get_rand(system_size)
system_y = get_rand(system_size)
sector = universe.get_system(
(universe_x, universe_y)).get_sector((system_x, system_y))
self.pos = ((universe_x, universe_y), (system_x, system_y))
sector.add_thing(self)
def toggle_map(self):
'''Toggles map scope'''
if self.map_scope == 'system':
self.map_scope = 'universe'
else:
self.map_scope = 'system'
| {
"content_hash": "438f72f6c97de233b515106110e9305a",
"timestamp": "",
"source": "github",
"line_count": 306,
"max_line_length": 75,
"avg_line_length": 32.72875816993464,
"alnum_prop": 0.6622066899650524,
"repo_name": "kaashif/spacerogue",
"id": "da0435abd0d182ca7a42ccfd371794efbd3fb479",
"size": "10015",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spacerogue/player.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "34730"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, division, print_function, \
with_statement
import time
import socket
import errno
import struct
import logging
import binascii
import traceback
import random
import platform
import threading
from collections import deque
from shadowsocks import encrypt, obfs, eventloop, shell, common, lru_cache
from shadowsocks.common import pre_parse_header, parse_header
# we clear at most TIMEOUTS_CLEAN_SIZE timeouts each time
TIMEOUTS_CLEAN_SIZE = 512
MSG_FASTOPEN = 0x20000000
# SOCKS command definition
CMD_CONNECT = 1
CMD_BIND = 2
CMD_UDP_ASSOCIATE = 3
# for each opening port, we have a TCP Relay
# for each connection, we have a TCP Relay Handler to handle the connection
# for each handler, we have 2 sockets:
# local: connected to the client
# remote: connected to remote server
# for each handler, it could be at one of several stages:
# as sslocal:
# stage 0 SOCKS hello received from local, send hello to local
# stage 1 addr received from local, query DNS for remote
# stage 2 UDP assoc
# stage 3 DNS resolved, connect to remote
# stage 4 still connecting, more data from local received
# stage 5 remote connected, piping local and remote
# as ssserver:
# stage 0 just jump to stage 1
# stage 1 addr received from local, query DNS for remote
# stage 3 DNS resolved, connect to remote
# stage 4 still connecting, more data from local received
# stage 5 remote connected, piping local and remote
STAGE_INIT = 0
STAGE_ADDR = 1
STAGE_UDP_ASSOC = 2
STAGE_DNS = 3
STAGE_CONNECTING = 4
STAGE_STREAM = 5
STAGE_DESTROYED = -1
# for each handler, we have 2 stream directions:
# upstream: from client to server direction
# read local and write to remote
# downstream: from server to client direction
# read remote and write to local
STREAM_UP = 0
STREAM_DOWN = 1
# for each stream, it's waiting for reading, or writing, or both
WAIT_STATUS_INIT = 0
WAIT_STATUS_READING = 1
WAIT_STATUS_WRITING = 2
WAIT_STATUS_READWRITING = WAIT_STATUS_READING | WAIT_STATUS_WRITING
NETWORK_MTU = 1492
TCP_MSS = NETWORK_MTU - 40
BUF_SIZE = 32 * 1024
UDP_MAX_BUF_SIZE = 65536
class SpeedTester(object):
def __init__(self, max_speed = 0):
self.max_speed = max_speed * 1024
self.timeout = 1
self._cache = deque()
self.sum_len = 0
def update_limit(self, max_speed):
self.max_speed = max_speed * 1024
def add(self, data_len):
if self.max_speed > 0:
self._cache.append((time.time(), data_len))
self.sum_len += data_len
def isExceed(self):
if self.max_speed > 0:
if self.sum_len > 0:
cut_t = time.time()
t = max(cut_t - self._cache[0][0], 0.01)
speed = self.sum_len / t
if self._cache[0][0] + self.timeout < cut_t:
self.sum_len -= self._cache[0][1]
self._cache.popleft()
return speed >= self.max_speed
return False
class TCPRelayHandler(object):
def __init__(self, server, fd_to_handlers, loop, local_sock, config,
dns_resolver, is_local):
self._server = server
self._fd_to_handlers = fd_to_handlers
self._loop = loop
self._local_sock = local_sock
self._remote_sock = None
self._remote_sock_v6 = None
self._remote_udp = False
self._config = config
self._dns_resolver = dns_resolver
self._client_address = local_sock.getpeername()[:2]
self._accept_address = local_sock.getsockname()[:2]
self._user = None
self._user_id = server._listen_port
self._tcp_mss = TCP_MSS
# TCP Relay works as either sslocal or ssserver
# if is_local, this is sslocal
self._is_local = is_local
self._stage = STAGE_INIT
try:
self._encryptor = encrypt.Encryptor(config['password'],
config['method'])
except Exception:
self._stage = STAGE_DESTROYED
logging.error('create encryptor fail at port %d', server._listen_port)
return
self._encrypt_correct = True
self._obfs = obfs.obfs(config['obfs'])
self._protocol = obfs.obfs(config['protocol'])
self._overhead = self._obfs.get_overhead(self._is_local) + self._protocol.get_overhead(self._is_local)
self._recv_buffer_size = BUF_SIZE - self._overhead
try:
self._tcp_mss = local_sock.getsockopt(socket.SOL_TCP, socket.TCP_MAXSEG)
logging.debug("TCP MSS = %d" % (self._tcp_mss,))
except:
pass
server_info = obfs.server_info(server.obfs_data)
server_info.host = config['server']
server_info.port = server._listen_port
#server_info.users = server.server_users
#server_info.update_user_func = self._update_user
server_info.client = self._client_address[0]
server_info.client_port = self._client_address[1]
server_info.protocol_param = ''
server_info.obfs_param = config['obfs_param']
server_info.iv = self._encryptor.cipher_iv
server_info.recv_iv = b''
server_info.key_str = common.to_bytes(config['password'])
server_info.key = self._encryptor.cipher_key
server_info.head_len = 30
server_info.tcp_mss = self._tcp_mss
server_info.buffer_size = self._recv_buffer_size
self._obfs.set_server_info(server_info)
server_info = obfs.server_info(server.protocol_data)
server_info.host = config['server']
server_info.port = server._listen_port
server_info.users = server.server_users
server_info.update_user_func = self._update_user
server_info.client = self._client_address[0]
server_info.client_port = self._client_address[1]
server_info.protocol_param = config['protocol_param']
server_info.obfs_param = ''
server_info.iv = self._encryptor.cipher_iv
server_info.recv_iv = b''
server_info.key_str = common.to_bytes(config['password'])
server_info.key = self._encryptor.cipher_key
server_info.head_len = 30
server_info.tcp_mss = self._tcp_mss
server_info.buffer_size = self._recv_buffer_size
self._protocol.set_server_info(server_info)
self._redir_list = config.get('redirect', ["*#0.0.0.0:0"])
self._is_redirect = False
self._bind = config.get('out_bind', '')
self._bindv6 = config.get('out_bindv6', '')
self._ignore_bind_list = config.get('ignore_bind', [])
self._fastopen_connected = False
self._data_to_write_to_local = []
self._data_to_write_to_remote = []
self._udp_data_send_buffer = b''
self._upstream_status = WAIT_STATUS_READING
self._downstream_status = WAIT_STATUS_INIT
self._remote_address = None
if 'forbidden_ip' in config:
self._forbidden_iplist = config['forbidden_ip']
else:
self._forbidden_iplist = None
if 'forbidden_port' in config:
self._forbidden_portset = config['forbidden_port']
else:
self._forbidden_portset = None
if is_local:
self._chosen_server = self._get_a_server()
fd_to_handlers[local_sock.fileno()] = self
local_sock.setblocking(False)
local_sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
loop.add(local_sock, eventloop.POLL_IN | eventloop.POLL_ERR,
self._server)
self.last_activity = 0
self._update_activity()
self._server.add_connection(1)
self._server.stat_add(self._client_address[0], 1)
self.speed_tester_u = SpeedTester(config.get("speed_limit_per_con", 0))
self.speed_tester_d = SpeedTester(config.get("speed_limit_per_con", 0))
def __hash__(self):
# default __hash__ is id / 16
# we want to eliminate collisions
return id(self)
@property
def remote_address(self):
return self._remote_address
def _get_a_server(self):
server = self._config['server']
server_port = self._config['server_port']
if type(server_port) == list:
server_port = random.choice(server_port)
if type(server) == list:
server = random.choice(server)
logging.debug('chosen server: %s:%d', server, server_port)
return server, server_port
def _update_user(self, user):
self._user = user
self._user_id = struct.unpack('<I', user)[0]
def _update_activity(self, data_len=0):
# tell the TCP Relay we have activities recently
# else it will think we are inactive and timed out
self._server.update_activity(self, data_len)
def _update_stream(self, stream, status):
# update a stream to a new waiting status
# check if status is changed
# only update if dirty
dirty = False
if stream == STREAM_DOWN:
if self._downstream_status != status:
self._downstream_status = status
dirty = True
elif stream == STREAM_UP:
if self._upstream_status != status:
self._upstream_status = status
dirty = True
if dirty:
if self._local_sock:
event = eventloop.POLL_ERR
if self._downstream_status & WAIT_STATUS_WRITING:
event |= eventloop.POLL_OUT
if self._upstream_status & WAIT_STATUS_READING:
event |= eventloop.POLL_IN
self._loop.modify(self._local_sock, event)
if self._remote_sock:
event = eventloop.POLL_ERR
if self._downstream_status & WAIT_STATUS_READING:
event |= eventloop.POLL_IN
if self._upstream_status & WAIT_STATUS_WRITING:
event |= eventloop.POLL_OUT
self._loop.modify(self._remote_sock, event)
if self._remote_sock_v6:
self._loop.modify(self._remote_sock_v6, event)
def _write_to_sock(self, data, sock):
# write data to sock
# if only some of the data are written, put remaining in the buffer
# and update the stream to wait for writing
if not sock:
return False
#logging.debug("_write_to_sock %s %s %s" % (self._remote_sock, sock, self._remote_udp))
uncomplete = False
if self._remote_udp and sock == self._remote_sock:
try:
self._udp_data_send_buffer += data
#logging.info('UDP over TCP sendto %d %s' % (len(data), binascii.hexlify(data)))
while len(self._udp_data_send_buffer) > 6:
length = struct.unpack('>H', self._udp_data_send_buffer[:2])[0]
if length >= 0xff00:
length = struct.unpack('>H', self._udp_data_send_buffer[1:3])[0] + 0xff00
if length > len(self._udp_data_send_buffer):
break
data = self._udp_data_send_buffer[:length]
if length >= 0xff00:
data = data[1:]
self._udp_data_send_buffer = self._udp_data_send_buffer[length:]
frag = common.ord(data[2])
if frag != 0:
logging.warn('drop a message since frag is %d' % (frag,))
continue
else:
data = data[3:]
header_result = parse_header(data)
if header_result is None:
continue
connecttype, dest_addr, dest_port, header_length = header_result
addrs = socket.getaddrinfo(dest_addr, dest_port, 0,
socket.SOCK_DGRAM, socket.SOL_UDP)
#logging.info('UDP over TCP sendto %s:%d %d bytes from %s:%d' % (dest_addr, dest_port, len(data), self._client_address[0], self._client_address[1]))
if addrs:
af, socktype, proto, canonname, server_addr = addrs[0]
data = data[header_length:]
if af == socket.AF_INET6:
self._remote_sock_v6.sendto(data, (server_addr[0], dest_port))
else:
sock.sendto(data, (server_addr[0], dest_port))
except Exception as e:
#trace = traceback.format_exc()
#logging.error(trace)
error_no = eventloop.errno_from_exception(e)
if error_no in (errno.EAGAIN, errno.EINPROGRESS,
errno.EWOULDBLOCK):
uncomplete = True
else:
shell.print_exception(e)
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
return False
return True
else:
try:
if self._encrypt_correct:
if sock == self._remote_sock:
self._server.add_transfer_u(self._user, len(data))
self._update_activity(len(data))
if data:
l = len(data)
s = sock.send(data)
if s < l:
data = data[s:]
uncomplete = True
else:
return
except (OSError, IOError) as e:
error_no = eventloop.errno_from_exception(e)
if error_no in (errno.EAGAIN, errno.EINPROGRESS,
errno.EWOULDBLOCK):
uncomplete = True
else:
#traceback.print_exc()
shell.print_exception(e)
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
return False
except Exception as e:
shell.print_exception(e)
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
return False
if uncomplete:
if sock == self._local_sock:
self._data_to_write_to_local.append(data)
self._update_stream(STREAM_DOWN, WAIT_STATUS_WRITING)
elif sock == self._remote_sock:
self._data_to_write_to_remote.append(data)
self._update_stream(STREAM_UP, WAIT_STATUS_WRITING)
else:
logging.error('write_all_to_sock:unknown socket from %s:%d' % (self._client_address[0], self._client_address[1]))
else:
if sock == self._local_sock:
self._update_stream(STREAM_DOWN, WAIT_STATUS_READING)
elif sock == self._remote_sock:
self._update_stream(STREAM_UP, WAIT_STATUS_READING)
else:
logging.error('write_all_to_sock:unknown socket from %s:%d' % (self._client_address[0], self._client_address[1]))
return True
def _get_redirect_host(self, client_address, ogn_data):
host_list = self._redir_list or ["*#0.0.0.0:0"]
if type(host_list) != list:
host_list = [host_list]
items_sum = common.to_str(host_list[0]).rsplit('#', 1)
if len(items_sum) < 2:
hash_code = binascii.crc32(ogn_data)
addrs = socket.getaddrinfo(client_address[0], client_address[1], 0, socket.SOCK_STREAM, socket.SOL_TCP)
af, socktype, proto, canonname, sa = addrs[0]
address_bytes = common.inet_pton(af, sa[0])
if af == socket.AF_INET6:
addr = struct.unpack('>Q', address_bytes[8:])[0]
elif af == socket.AF_INET:
addr = struct.unpack('>I', address_bytes)[0]
else:
addr = 0
host_port = []
match_port = False
for host in host_list:
items = common.to_str(host).rsplit(':', 1)
if len(items) > 1:
try:
port = int(items[1])
if port == self._server._listen_port:
match_port = True
host_port.append((items[0], port))
except:
pass
else:
host_port.append((host, 80))
if match_port:
last_host_port = host_port
host_port = []
for host in last_host_port:
if host[1] == self._server._listen_port:
host_port.append(host)
return host_port[((hash_code & 0xffffffff) + addr) % len(host_port)]
else:
host_port = []
for host in host_list:
items_sum = common.to_str(host).rsplit('#', 1)
items_match = common.to_str(items_sum[0]).rsplit(':', 1)
items = common.to_str(items_sum[1]).rsplit(':', 1)
if len(items_match) > 1:
if items_match[1] != "*":
try:
if self._server._listen_port != int(items_match[1]) and int(items_match[1]) != 0:
continue
except:
pass
if items_match[0] != "*" and common.match_regex(
items_match[0], ogn_data) == False:
continue
if len(items) > 1:
try:
port = int(items[1])
return (items[0], port)
except:
pass
else:
return (items[0], 80)
return ("0.0.0.0", 0)
def _handel_protocol_error(self, client_address, ogn_data):
logging.warn("Protocol ERROR, TCP ogn data %s from %s:%d via port %d by UID %d" % (binascii.hexlify(ogn_data), client_address[0], client_address[1], self._server._listen_port, self._user_id))
self._encrypt_correct = False
#create redirect or disconnect by hash code
host, port = self._get_redirect_host(client_address, ogn_data)
if port == 0:
raise Exception('can not parse header')
data = b"\x03" + common.to_bytes(common.chr(len(host))) + common.to_bytes(host) + struct.pack('>H', port)
self._is_redirect = True
logging.warn("TCP data redir %s:%d %s" % (host, port, binascii.hexlify(data)))
return data + ogn_data
def _handle_stage_connecting(self, data):
if self._is_local:
if self._encryptor is not None:
data = self._protocol.client_pre_encrypt(data)
data = self._encryptor.encrypt(data)
data = self._obfs.client_encode(data)
if data:
self._data_to_write_to_remote.append(data)
if self._is_local and not self._fastopen_connected and \
self._config['fast_open']:
# for sslocal and fastopen, we basically wait for data and use
# sendto to connect
try:
# only connect once
self._fastopen_connected = True
remote_sock = \
self._create_remote_socket(self._chosen_server[0],
self._chosen_server[1])
self._loop.add(remote_sock, eventloop.POLL_ERR, self._server)
data = b''.join(self._data_to_write_to_remote)
l = len(data)
s = remote_sock.sendto(data, MSG_FASTOPEN, self._chosen_server)
if s < l:
data = data[s:]
self._data_to_write_to_remote = [data]
else:
self._data_to_write_to_remote = []
self._update_stream(STREAM_UP, WAIT_STATUS_READWRITING)
except (OSError, IOError) as e:
if eventloop.errno_from_exception(e) == errno.EINPROGRESS:
# in this case data is not sent at all
self._update_stream(STREAM_UP, WAIT_STATUS_READWRITING)
elif eventloop.errno_from_exception(e) == errno.ENOTCONN:
logging.error('fast open not supported on this OS')
self._config['fast_open'] = False
self.destroy()
else:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
def _get_head_size(self, buf, def_value):
if len(buf) < 2:
return def_value
head_type = common.ord(buf[0]) & 0xF
if head_type == 1:
return 7
if head_type == 4:
return 19
if head_type == 3:
return 4 + common.ord(buf[1])
return def_value
def _handle_stage_addr(self, ogn_data, data):
try:
if self._is_local:
cmd = common.ord(data[1])
if cmd == CMD_UDP_ASSOCIATE:
logging.debug('UDP associate')
if self._local_sock.family == socket.AF_INET6:
header = b'\x05\x00\x00\x04'
else:
header = b'\x05\x00\x00\x01'
addr, port = self._local_sock.getsockname()[:2]
addr_to_send = socket.inet_pton(self._local_sock.family,
addr)
port_to_send = struct.pack('>H', port)
self._write_to_sock(header + addr_to_send + port_to_send,
self._local_sock)
self._stage = STAGE_UDP_ASSOC
# just wait for the client to disconnect
return
elif cmd == CMD_CONNECT:
# just trim VER CMD RSV
data = data[3:]
else:
logging.error('unknown command %d', cmd)
self.destroy()
return
before_parse_data = data
if self._is_local:
header_result = parse_header(data)
else:
data = pre_parse_header(data)
if data is None:
data = self._handel_protocol_error(self._client_address, ogn_data)
header_result = parse_header(data)
if header_result is not None:
try:
common.to_str(header_result[1])
except Exception as e:
header_result = None
if header_result is None:
data = self._handel_protocol_error(self._client_address, ogn_data)
header_result = parse_header(data)
self._overhead = self._obfs.get_overhead(self._is_local) + self._protocol.get_overhead(self._is_local)
self._recv_buffer_size = BUF_SIZE - self._overhead
server_info = self._obfs.get_server_info()
server_info.buffer_size = self._recv_buffer_size
server_info = self._protocol.get_server_info()
server_info.buffer_size = self._recv_buffer_size
connecttype, remote_addr, remote_port, header_length = header_result
common.connect_log('connecting %s:%d from %s:%d' %
(common.to_str(remote_addr), remote_port,
self._client_address[0], self._client_address[1]))
self._remote_address = (common.to_str(remote_addr), remote_port)
self._remote_udp = (connecttype != 0)
# pause reading
self._update_stream(STREAM_UP, WAIT_STATUS_WRITING)
self._stage = STAGE_DNS
if self._is_local:
# forward address to remote
self._write_to_sock((b'\x05\x00\x00\x01'
b'\x00\x00\x00\x00\x10\x10'),
self._local_sock)
head_len = self._get_head_size(data, 30)
self._obfs.obfs.server_info.head_len = head_len
self._protocol.obfs.server_info.head_len = head_len
if self._encryptor is not None:
data = self._protocol.client_pre_encrypt(data)
data_to_send = self._encryptor.encrypt(data)
data_to_send = self._obfs.client_encode(data_to_send)
if data_to_send:
self._data_to_write_to_remote.append(data_to_send)
# notice here may go into _handle_dns_resolved directly
self._dns_resolver.resolve(self._chosen_server[0],
self._handle_dns_resolved)
else:
if len(data) > header_length:
self._data_to_write_to_remote.append(data[header_length:])
# notice here may go into _handle_dns_resolved directly
self._dns_resolver.resolve(remote_addr,
self._handle_dns_resolved)
except Exception as e:
self._log_error(e)
if self._config['verbose']:
traceback.print_exc()
self.destroy()
def _socket_bind_addr(self, sock, af):
bind_addr = ''
if self._bind and af == socket.AF_INET:
bind_addr = self._bind
elif self._bindv6 and af == socket.AF_INET6:
bind_addr = self._bindv6
else:
bind_addr = self._accept_address[0]
bind_addr = bind_addr.replace("::ffff:", "")
if bind_addr in self._ignore_bind_list:
bind_addr = None
if bind_addr:
local_addrs = socket.getaddrinfo(bind_addr, 0, 0, socket.SOCK_STREAM, socket.SOL_TCP)
if local_addrs[0][0] == af:
logging.debug("bind %s" % (bind_addr,))
try:
sock.bind((bind_addr, 0))
except Exception as e:
logging.warn("bind %s fail" % (bind_addr,))
def _create_remote_socket(self, ip, port):
if self._remote_udp:
addrs_v6 = socket.getaddrinfo("::", 0, 0, socket.SOCK_DGRAM, socket.SOL_UDP)
addrs = socket.getaddrinfo("0.0.0.0", 0, 0, socket.SOCK_DGRAM, socket.SOL_UDP)
else:
addrs = socket.getaddrinfo(ip, port, 0, socket.SOCK_STREAM, socket.SOL_TCP)
if len(addrs) == 0:
raise Exception("getaddrinfo failed for %s:%d" % (ip, port))
af, socktype, proto, canonname, sa = addrs[0]
if not self._remote_udp and not self._is_redirect:
if self._forbidden_iplist:
if common.to_str(sa[0]) in self._forbidden_iplist:
if self._remote_address:
raise Exception('IP %s is in forbidden list, when connect to %s:%d via port %d by UID %d' %
(common.to_str(sa[0]), self._remote_address[0], self._remote_address[1], self._server._listen_port, self._user_id))
raise Exception('IP %s is in forbidden list, reject' %
common.to_str(sa[0]))
if self._forbidden_portset:
if sa[1] in self._forbidden_portset:
if self._remote_address:
raise Exception('Port %d is in forbidden list, when connect to %s:%d via port %d by UID %d' %
(sa[1], self._remote_address[0], self._remote_address[1], self._server._listen_port, self._user_id))
raise Exception('Port %d is in forbidden list, reject' % sa[1])
remote_sock = socket.socket(af, socktype, proto)
self._remote_sock = remote_sock
self._fd_to_handlers[remote_sock.fileno()] = self
if self._remote_udp:
af, socktype, proto, canonname, sa = addrs_v6[0]
remote_sock_v6 = socket.socket(af, socktype, proto)
self._remote_sock_v6 = remote_sock_v6
self._fd_to_handlers[remote_sock_v6.fileno()] = self
remote_sock.setblocking(False)
if self._remote_udp:
remote_sock_v6.setblocking(False)
if not self._is_local:
self._socket_bind_addr(remote_sock, af)
self._socket_bind_addr(remote_sock_v6, af)
else:
remote_sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
if not self._is_local:
self._socket_bind_addr(remote_sock, af)
return remote_sock
def _handle_dns_resolved(self, result, error):
if error:
self._log_error(error)
self.destroy()
return
if result:
ip = result[1]
if ip:
try:
self._stage = STAGE_CONNECTING
remote_addr = ip
if self._is_local:
remote_port = self._chosen_server[1]
else:
remote_port = self._remote_address[1]
if self._is_local and self._config['fast_open']:
# for fastopen:
# wait for more data to arrive and send them in one SYN
self._stage = STAGE_CONNECTING
# we don't have to wait for remote since it's not
# created
self._update_stream(STREAM_UP, WAIT_STATUS_READING)
# TODO when there is already data in this packet
else:
# else do connect
remote_sock = self._create_remote_socket(remote_addr,
remote_port)
if self._remote_udp:
self._loop.add(remote_sock,
eventloop.POLL_IN,
self._server)
if self._remote_sock_v6:
self._loop.add(self._remote_sock_v6,
eventloop.POLL_IN,
self._server)
else:
try:
remote_sock.connect((remote_addr, remote_port))
except (OSError, IOError) as e:
if eventloop.errno_from_exception(e) in (errno.EINPROGRESS,
errno.EWOULDBLOCK):
pass # always goto here
else:
raise e
self._loop.add(remote_sock,
eventloop.POLL_ERR | eventloop.POLL_OUT,
self._server)
self._stage = STAGE_CONNECTING
self._update_stream(STREAM_UP, WAIT_STATUS_READWRITING)
self._update_stream(STREAM_DOWN, WAIT_STATUS_READING)
if self._remote_udp:
while self._data_to_write_to_remote:
data = self._data_to_write_to_remote[0]
del self._data_to_write_to_remote[0]
self._write_to_sock(data, self._remote_sock)
return
except Exception as e:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
def _get_read_size(self, sock, recv_buffer_size):
if self._overhead == 0:
return recv_buffer_size
buffer_size = len(sock.recv(recv_buffer_size, socket.MSG_PEEK))
if buffer_size == recv_buffer_size:
return buffer_size
s = buffer_size % self._tcp_mss + self._overhead
if s > self._tcp_mss:
return buffer_size + s - self._tcp_mss
return buffer_size
def _on_local_read(self):
# handle all local read events and dispatch them to methods for
# each stage
if not self._local_sock:
return
is_local = self._is_local
if is_local:
recv_buffer_size = self._get_read_size(self._local_sock, self._recv_buffer_size)
else:
recv_buffer_size = BUF_SIZE
data = None
try:
data = self._local_sock.recv(recv_buffer_size)
except (OSError, IOError) as e:
if eventloop.errno_from_exception(e) in \
(errno.ETIMEDOUT, errno.EAGAIN, errno.EWOULDBLOCK):
return
if not data:
self.destroy()
return
self.speed_tester_u.add(len(data))
self._server.speed_tester_u(self._user_id).add(len(data))
ogn_data = data
if not is_local:
if self._encryptor is not None:
if self._encrypt_correct:
try:
obfs_decode = self._obfs.server_decode(data)
except Exception as e:
shell.print_exception(e)
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
return
if obfs_decode[2]:
data = self._obfs.server_encode(b'')
self._write_to_sock(data, self._local_sock)
if obfs_decode[1]:
if not self._protocol.obfs.server_info.recv_iv:
iv_len = len(self._protocol.obfs.server_info.iv)
self._protocol.obfs.server_info.recv_iv = obfs_decode[0][:iv_len]
data = self._encryptor.decrypt(obfs_decode[0])
else:
data = obfs_decode[0]
try:
data, sendback = self._protocol.server_post_decrypt(data)
if sendback:
backdata = self._protocol.server_pre_encrypt(b'')
backdata = self._encryptor.encrypt(backdata)
backdata = self._obfs.server_encode(backdata)
try:
self._write_to_sock(backdata, self._local_sock)
except Exception as e:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
return
except Exception as e:
shell.print_exception(e)
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
else:
return
if not data:
return
if self._stage == STAGE_STREAM:
if self._is_local:
if self._encryptor is not None:
data = self._protocol.client_pre_encrypt(data)
data = self._encryptor.encrypt(data)
data = self._obfs.client_encode(data)
self._write_to_sock(data, self._remote_sock)
return
elif is_local and self._stage == STAGE_INIT:
# TODO check auth method
self._write_to_sock(b'\x05\00', self._local_sock)
self._stage = STAGE_ADDR
return
elif self._stage == STAGE_CONNECTING:
self._handle_stage_connecting(data)
elif (is_local and self._stage == STAGE_ADDR) or \
(not is_local and self._stage == STAGE_INIT):
self._handle_stage_addr(ogn_data, data)
def _on_remote_read(self, is_remote_sock):
# handle all remote read events
data = None
try:
if self._remote_udp:
if is_remote_sock:
data, addr = self._remote_sock.recvfrom(UDP_MAX_BUF_SIZE)
else:
data, addr = self._remote_sock_v6.recvfrom(UDP_MAX_BUF_SIZE)
port = struct.pack('>H', addr[1])
try:
ip = socket.inet_aton(addr[0])
data = b'\x00\x01' + ip + port + data
except Exception as e:
ip = socket.inet_pton(socket.AF_INET6, addr[0])
data = b'\x00\x04' + ip + port + data
size = len(data) + 2
if size >= 0xff00:
data = common.chr(0xff) + struct.pack('>H', size - 0xff00 + 1) + data
else:
data = struct.pack('>H', size) + data
#logging.info('UDP over TCP recvfrom %s:%d %d bytes to %s:%d' % (addr[0], addr[1], len(data), self._client_address[0], self._client_address[1]))
else:
if self._is_local:
recv_buffer_size = BUF_SIZE
else:
recv_buffer_size = self._get_read_size(self._remote_sock, self._recv_buffer_size)
data = self._remote_sock.recv(recv_buffer_size)
except (OSError, IOError) as e:
if eventloop.errno_from_exception(e) in \
(errno.ETIMEDOUT, errno.EAGAIN, errno.EWOULDBLOCK, 10035): #errno.WSAEWOULDBLOCK
return
if not data:
self.destroy()
return
self.speed_tester_d.add(len(data))
self._server.speed_tester_d(self._user_id).add(len(data))
if self._encryptor is not None:
if self._is_local:
try:
obfs_decode = self._obfs.client_decode(data)
except Exception as e:
shell.print_exception(e)
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
return
if obfs_decode[1]:
send_back = self._obfs.client_encode(b'')
self._write_to_sock(send_back, self._remote_sock)
if not self._protocol.obfs.server_info.recv_iv:
iv_len = len(self._protocol.obfs.server_info.iv)
self._protocol.obfs.server_info.recv_iv = obfs_decode[0][:iv_len]
data = self._encryptor.decrypt(obfs_decode[0])
try:
data = self._protocol.client_post_decrypt(data)
except Exception as e:
shell.print_exception(e)
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
return
else:
if self._encrypt_correct:
data = self._protocol.server_pre_encrypt(data)
data = self._encryptor.encrypt(data)
data = self._obfs.server_encode(data)
self._server.add_transfer_d(self._user, len(data))
self._update_activity(len(data))
else:
return
try:
self._write_to_sock(data, self._local_sock)
except Exception as e:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
logging.error("exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
def _on_local_write(self):
# handle local writable event
if self._data_to_write_to_local:
data = b''.join(self._data_to_write_to_local)
self._data_to_write_to_local = []
self._write_to_sock(data, self._local_sock)
else:
self._update_stream(STREAM_DOWN, WAIT_STATUS_READING)
def _on_remote_write(self):
# handle remote writable event
self._stage = STAGE_STREAM
if self._data_to_write_to_remote:
data = b''.join(self._data_to_write_to_remote)
self._data_to_write_to_remote = []
self._write_to_sock(data, self._remote_sock)
else:
self._update_stream(STREAM_UP, WAIT_STATUS_READING)
def _on_local_error(self):
if self._local_sock:
err = eventloop.get_sock_error(self._local_sock)
if err.errno not in [errno.ECONNRESET, errno.EPIPE]:
logging.error(err)
logging.error("local error, exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
def _on_remote_error(self):
if self._remote_sock:
err = eventloop.get_sock_error(self._remote_sock)
if err.errno not in [errno.ECONNRESET]:
logging.error(err)
if self._remote_address:
logging.error("remote error, when connect to %s:%d" % (self._remote_address[0], self._remote_address[1]))
else:
logging.error("remote error, exception from %s:%d" % (self._client_address[0], self._client_address[1]))
self.destroy()
def handle_event(self, sock, event):
# handle all events in this handler and dispatch them to methods
handle = False
if self._stage == STAGE_DESTROYED:
logging.debug('ignore handle_event: destroyed')
return True
if self._user is not None and self._user not in self._server.server_users:
self.destroy()
return True
# order is important
if sock == self._remote_sock or sock == self._remote_sock_v6:
if event & eventloop.POLL_ERR:
handle = True
self._on_remote_error()
if self._stage == STAGE_DESTROYED:
return True
if event & (eventloop.POLL_IN | eventloop.POLL_HUP):
if not self.speed_tester_d.isExceed():
if not self._server.speed_tester_d(self._user_id).isExceed():
handle = True
self._on_remote_read(sock == self._remote_sock)
if self._stage == STAGE_DESTROYED:
return True
if event & eventloop.POLL_OUT:
handle = True
self._on_remote_write()
elif sock == self._local_sock:
if event & eventloop.POLL_ERR:
handle = True
self._on_local_error()
if self._stage == STAGE_DESTROYED:
return True
if event & (eventloop.POLL_IN | eventloop.POLL_HUP):
if not self.speed_tester_u.isExceed():
if not self._server.speed_tester_u(self._user_id).isExceed():
handle = True
self._on_local_read()
if self._stage == STAGE_DESTROYED:
return True
if event & eventloop.POLL_OUT:
handle = True
self._on_local_write()
else:
logging.warn('unknown socket from %s:%d' % (self._client_address[0], self._client_address[1]))
return handle
def _log_error(self, e):
logging.error('%s when handling connection from %s:%d' %
(e, self._client_address[0], self._client_address[1]))
def stage(self):
return self._stage
def destroy(self):
# destroy the handler and release any resources
# promises:
# 1. destroy won't make another destroy() call inside
# 2. destroy releases resources so it prevents future call to destroy
# 3. destroy won't raise any exceptions
# if any of the promises are broken, it indicates a bug has been
# introduced! mostly likely memory leaks, etc
if self._stage == STAGE_DESTROYED:
# this couldn't happen
logging.debug('already destroyed')
return
self._stage = STAGE_DESTROYED
if self._remote_address:
logging.debug('destroy: %s:%d' %
self._remote_address)
else:
logging.debug('destroy')
if self._remote_sock:
logging.debug('destroying remote')
try:
self._loop.remove(self._remote_sock)
except Exception as e:
pass
del self._fd_to_handlers[self._remote_sock.fileno()]
self._remote_sock.close()
self._remote_sock = None
if self._remote_sock_v6:
logging.debug('destroying remote')
try:
self._loop.remove(self._remote_sock_v6)
except Exception as e:
pass
del self._fd_to_handlers[self._remote_sock_v6.fileno()]
self._remote_sock_v6.close()
self._remote_sock_v6 = None
if self._local_sock:
logging.debug('destroying local')
self._loop.remove(self._local_sock)
del self._fd_to_handlers[self._local_sock.fileno()]
self._local_sock.close()
self._local_sock = None
if self._obfs:
self._obfs.dispose()
self._obfs = None
if self._protocol:
self._protocol.dispose()
self._protocol = None
self._encryptor = None
self._dns_resolver.remove_callback(self._handle_dns_resolved)
self._server.remove_handler(self)
self._server.add_connection(-1)
self._server.stat_add(self._client_address[0], -1)
class TCPRelay(object):
def __init__(self, config, dns_resolver, is_local, stat_callback=None, stat_counter=None):
self._config = config
self._is_local = is_local
self._dns_resolver = dns_resolver
self._closed = False
self._eventloop = None
self._fd_to_handlers = {}
self.server_transfer_ul = 0
self.server_transfer_dl = 0
self.server_users = {}
self.server_user_transfer_ul = {}
self.server_user_transfer_dl = {}
self.mu = False
self._speed_tester_u = {}
self._speed_tester_d = {}
self.server_connections = 0
self.protocol_data = obfs.obfs(config['protocol']).init_data()
self.obfs_data = obfs.obfs(config['obfs']).init_data()
if config.get('connect_verbose_info', 0) > 0:
common.connect_log = logging.info
self._timeout = config['timeout']
self._timeout_cache = lru_cache.LRUCache(timeout=self._timeout,
close_callback=self._close_tcp_client)
if is_local:
listen_addr = config['local_address']
listen_port = config['local_port']
else:
listen_addr = config['server']
listen_port = config['server_port']
self._listen_port = listen_port
if common.to_bytes(config['protocol']) in [b"auth_aes128_md5", b"auth_aes128_sha1"]:
self._update_users(None, None)
addrs = socket.getaddrinfo(listen_addr, listen_port, 0,
socket.SOCK_STREAM, socket.SOL_TCP)
if len(addrs) == 0:
raise Exception("can't get addrinfo for %s:%d" %
(listen_addr, listen_port))
af, socktype, proto, canonname, sa = addrs[0]
server_socket = socket.socket(af, socktype, proto)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(sa)
server_socket.setblocking(False)
if config['fast_open']:
try:
server_socket.setsockopt(socket.SOL_TCP, 23, 5)
except socket.error:
logging.error('warning: fast open is not available')
self._config['fast_open'] = False
server_socket.listen(config.get('max_connect', 1024))
self._server_socket = server_socket
self._stat_counter = stat_counter
self._stat_callback = stat_callback
def add_to_loop(self, loop):
if self._eventloop:
raise Exception('already add to loop')
if self._closed:
raise Exception('already closed')
self._eventloop = loop
self._eventloop.add(self._server_socket,
eventloop.POLL_IN | eventloop.POLL_ERR, self)
self._eventloop.add_periodic(self.handle_periodic)
def remove_handler(self, client):
if hash(client) in self._timeout_cache:
del self._timeout_cache[hash(client)]
def add_connection(self, val):
self.server_connections += val
logging.debug('server port %5d connections = %d' % (self._listen_port, self.server_connections,))
def get_ud(self):
return (self.server_transfer_ul, self.server_transfer_dl)
def get_users_ud(self):
return (self.server_user_transfer_ul.copy(), self.server_user_transfer_dl.copy())
def _update_users(self, protocol_param, acl):
if protocol_param is None:
protocol_param = self._config['protocol_param']
param = common.to_bytes(protocol_param).split(b'#')
if len(param) == 2:
self.mu = True
user_list = param[1].split(b',')
if user_list:
for user in user_list:
items = user.split(b':')
if len(items) == 2:
user_int_id = int(items[0])
uid = struct.pack('<I', user_int_id)
if acl is not None and user_int_id not in acl:
self.del_user(uid)
else:
passwd = items[1]
self.add_user(uid, passwd)
def update_user(self, id, passwd):
uid = struct.pack('<I', id)
self.add_user(uid, passwd)
def update_users(self, users):
for uid in list(self.server_users.keys()):
id = struct.unpack('<I', uid)[0]
if id not in users:
self.del_user(uid)
for id in users:
uid = struct.pack('<I', id)
self.add_user(uid, users[id])
def add_user(self, user, passwd): # user: binstr[4], passwd: str
self.server_users[user] = common.to_bytes(passwd)
def del_user(self, user):
if user in self.server_users:
del self.server_users[user]
def add_transfer_u(self, user, transfer):
if user is None:
self.server_transfer_ul += transfer
else:
if user not in self.server_user_transfer_ul:
self.server_user_transfer_ul[user] = 0
self.server_user_transfer_ul[user] += transfer + self.server_transfer_ul
self.server_transfer_ul = 0
def add_transfer_d(self, user, transfer):
if user is None:
self.server_transfer_dl += transfer
else:
if user not in self.server_user_transfer_dl:
self.server_user_transfer_dl[user] = 0
self.server_user_transfer_dl[user] += transfer + self.server_transfer_dl
self.server_transfer_dl = 0
def speed_tester_u(self, uid):
if uid not in self._speed_tester_u:
if self.mu: #TODO
self._speed_tester_u[uid] = SpeedTester(self._config.get("speed_limit_per_user", 0))
else:
self._speed_tester_u[uid] = SpeedTester(self._config.get("speed_limit_per_user", 0))
return self._speed_tester_u[uid]
def speed_tester_d(self, uid):
if uid not in self._speed_tester_d:
if self.mu: #TODO
self._speed_tester_d[uid] = SpeedTester(self._config.get("speed_limit_per_user", 0))
else:
self._speed_tester_d[uid] = SpeedTester(self._config.get("speed_limit_per_user", 0))
return self._speed_tester_d[uid]
def update_limit(self, uid, max_speed):
if uid in self._speed_tester_u:
self._speed_tester_u[uid].update_limit(max_speed)
if uid in self._speed_tester_d:
self._speed_tester_d[uid].update_limit(max_speed)
def update_stat(self, port, stat_dict, val):
newval = stat_dict.get(0, 0) + val
stat_dict[0] = newval
logging.debug('port %d connections %d' % (port, newval))
connections_step = 25
if newval >= stat_dict.get(-1, 0) + connections_step:
logging.info('port %d connections up to %d' % (port, newval))
stat_dict[-1] = stat_dict.get(-1, 0) + connections_step
elif newval <= stat_dict.get(-1, 0) - connections_step:
logging.info('port %d connections down to %d' % (port, newval))
stat_dict[-1] = stat_dict.get(-1, 0) - connections_step
def stat_add(self, local_addr, val):
if self._stat_counter is not None:
if self._listen_port not in self._stat_counter:
self._stat_counter[self._listen_port] = {}
newval = self._stat_counter[self._listen_port].get(local_addr, 0) + val
logging.debug('port %d addr %s connections %d' % (self._listen_port, local_addr, newval))
self._stat_counter[self._listen_port][local_addr] = newval
self.update_stat(self._listen_port, self._stat_counter[self._listen_port], val)
if newval <= 0:
if local_addr in self._stat_counter[self._listen_port]:
del self._stat_counter[self._listen_port][local_addr]
newval = self._stat_counter.get(0, 0) + val
self._stat_counter[0] = newval
logging.debug('Total connections %d' % newval)
connections_step = 50
if newval >= self._stat_counter.get(-1, 0) + connections_step:
logging.info('Total connections up to %d' % newval)
self._stat_counter[-1] = self._stat_counter.get(-1, 0) + connections_step
elif newval <= self._stat_counter.get(-1, 0) - connections_step:
logging.info('Total connections down to %d' % newval)
self._stat_counter[-1] = self._stat_counter.get(-1, 0) - connections_step
def update_activity(self, client, data_len):
if data_len and self._stat_callback:
self._stat_callback(self._listen_port, data_len)
self._timeout_cache[hash(client)] = client
def _sweep_timeout(self):
self._timeout_cache.sweep()
def _close_tcp_client(self, client):
if client.remote_address:
logging.debug('timed out: %s:%d' %
client.remote_address)
else:
logging.debug('timed out')
client.destroy()
def handle_event(self, sock, fd, event):
# handle events and dispatch to handlers
if sock:
logging.log(shell.VERBOSE_LEVEL, 'fd %d %s', fd,
eventloop.EVENT_NAMES.get(event, event))
if sock == self._server_socket:
if event & eventloop.POLL_ERR:
# TODO
raise Exception('server_socket error')
try:
logging.debug('accept')
conn = self._server_socket.accept()
handler = TCPRelayHandler(self, self._fd_to_handlers,
self._eventloop, conn[0], self._config,
self._dns_resolver, self._is_local)
if handler.stage() == STAGE_DESTROYED:
conn[0].close()
except (OSError, IOError) as e:
error_no = eventloop.errno_from_exception(e)
if error_no in (errno.EAGAIN, errno.EINPROGRESS,
errno.EWOULDBLOCK):
return
else:
shell.print_exception(e)
if self._config['verbose']:
traceback.print_exc()
else:
if sock:
handler = self._fd_to_handlers.get(fd, None)
if handler:
handler.handle_event(sock, event)
else:
logging.warn('poll removed fd')
def handle_periodic(self):
if self._closed:
if self._server_socket:
self._eventloop.remove(self._server_socket)
self._server_socket.close()
self._server_socket = None
logging.info('closed TCP port %d', self._listen_port)
for handler in list(self._fd_to_handlers.values()):
handler.destroy()
self._sweep_timeout()
def close(self, next_tick=False):
logging.debug('TCP close')
self._closed = True
if not next_tick:
if self._eventloop:
self._eventloop.remove_periodic(self.handle_periodic)
self._eventloop.remove(self._server_socket)
self._server_socket.close()
for handler in list(self._fd_to_handlers.values()):
handler.destroy()
| {
"content_hash": "9468325b5eb20b9181a8c4987e88f9a0",
"timestamp": "",
"source": "github",
"line_count": 1336,
"max_line_length": 199,
"avg_line_length": 43.678892215568865,
"alnum_prop": 0.5182760688886985,
"repo_name": "Cherrysaber/shadowsocksr",
"id": "c4c4fcc59294a2ed579335b1779ec28a520bfb0f",
"size": "58973",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shadowsocks/tcprelay.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "365984"
},
{
"name": "Shell",
"bytes": "459"
}
],
"symlink_target": ""
} |
import os
import sys
import re
import json
import urllib.request, urllib.parse, urllib.error
NAME_RE = re.compile(r'[a-zA-Z_][-a-zA-Z0-9_.]+')
DEBUG=False
class DevicePlugin:
def __init__(self, igor):
self.igor = igor
self.hasCapabilities = self.igor.internal.accessControl('hasCapabilitySupport')
def index(self, token=None, callerToken=None):
raise self.igor.app.raiseNotfound()
def add(self, token=None, callerToken=None, name=None, description=None, returnTo=None, secured=False, **kwargs):
rv = self._add(callerToken, name, description, **kwargs)
if returnTo:
queryString = urllib.parse.urlencode(rv)
if '?' in returnTo:
returnTo = returnTo + '&' + queryString
else:
returnTo = returnTo + '?' + queryString
return self.igor.app.raiseSeeother(returnTo)
return json.dumps(rv)
def _add(self, callerToken, name=None, description=None, exportTokens=None, secured=False, **kwargs):
if not NAME_RE.match(name):
self.igor.app.raiseHTTPError('400 Illegal name for device')
if not description:
description = kwargs
elif type(description) != type({}):
description = json.loads(description)
if type(description) != type({}):
self.igor.app.raiseHTTPError('400 description must be dictionary or json object')
if secured:
description['secured'] = True
deviceType = description.get('deviceType', None)
if not deviceType:
self.igor.app.raiseHTTPError('400 deviceType missing')
isDevice = deviceType in {'activeDevice', 'activeSensorDevice'}
isSensor = deviceType in {'activeSensor', 'polledSensor', 'passiveSensor'}
if not isDevice and not isSensor:
self.igor.app.raiseHTTPError('400 unknown deviceType %s' % deviceType)
isActive = deviceType in {'activeSensor', 'activeSensorDevice'}
isPassive = deviceType == 'passiveSensor'
hasPlugin = not isPassive
hostname = description.get('hostname', None)
if not hostname and (isDevice or isActive):
hostname = name + '.local'
if isDevice:
databaseEntry = 'devices/%s' % name
elif isSensor:
databaseEntry = 'sensors/%s' % name
else:
assert 0
if self.igor.databaseAccessor.get_key(databaseEntry, 'application/x-python-object', 'multi', callerToken):
self.igor.app.raiseHTTPError('400 %s already exists' % name)
rv = dict(name=name, deviceType=deviceType, isDevice=isDevice, isSensor=isSensor)
if hostname:
rv['hostname'] = hostname
_owner = self.igor.app.getSessionItem('user', 'admin')
tokenOwner = f'identities/{_owner}'
if hasPlugin:
pluginName = description.get('plugin', '')
if not pluginName:
self.igor.app.raiseHTTPError('400 deviceType %s requires plugin' % deviceType)
msg = self.igor.plugins.installstd(pluginName=name, stdName=pluginName, token=callerToken)
if msg:
rv['message'] = msg
tokenWantedOwner = f'plugindata/{pluginName}'
#
# Copy relevant items from description into plugindata
#
# hostname has been handled already (xxxjack double-check...)
# xxxjack pushMethod might be useful too...
if 'protocol' in description:
self.igor.databaseAccessor.put_key(f'plugindata/{name}/protocol', 'text/plain', 'ref', description['protocol'], 'text/plain', callerToken, replace=True)
if 'secured' in description:
self.igor.databaseAccessor.put_key(f'plugindata/{name}/secured', 'text/plain', 'ref', '1', 'text/plain', callerToken, replace=True)
if 'credentials' in description:
self.igor.databaseAccessor.put_key(f'plugindata/{name}/credentials', 'text/plain', 'ref', description['credentials'], 'text/plain', callerToken, replace=True)
if 'obj' in description:
self.igor.databaseAccessor.put_key(f'plugindata/{name}/endpoint', 'text/plain', 'ref', description['obj'], 'text/plain', callerToken, replace=True)
else:
# Create item
entryValues = {}
self.igor.databaseAccessor.put_key(databaseEntry, 'text/plain', 'ref', entryValues, 'application/x-python-object', callerToken, replace=True)
# Create status item
self.igor.databaseAccessor.put_key('status/' + databaseEntry, 'text/plain', 'ref', '', 'text/plain', callerToken, replace=True)
if secured and isDevice and self.hasCapabilities:
deviceKey = self._genSecretKey(callerToken, aud=hostname)
rv['audSharedKeyId'] = deviceKey
deviceTokenId = self.igor.internal.accessControl('newToken',
token=callerToken,
tokenId='external',
newOwner=tokenOwner,
newPath=description.get('obj', '/'),
get='descendant-or-self',
put='descendant-or-self',
post='descendant',
delete='descendant',
delegate=True,
aud=hostname
)
rv['deviceTokenId'] = deviceTokenId
rv['tokenOwner'] = tokenOwner
if tokenWantedOwner:
rv['deviceTokenWantedOwner'] = tokenWantedOwner
if secured and isActive and self.hasCapabilities:
deviceKey = self._genSecretKey(callerToken, sub=hostname)
rv['subSharedKeyId'] = deviceKey
actions = description.get('actions', {})
if actions:
actionResults = {}
for actionName in list(actions.keys()):
actionData = self._addActionCap(callerToken, subject=hostname, tokenOwner=tokenOwner, exportTokens=exportTokens, **actions[actionName])
actionResults[actionName] = actionData
rv['actions'] = actionResults
return rv
def _genSecretKey(self, callerToken, aud=None, sub=None):
return self.igor.internal.accessControl('createSharedKey', token=callerToken, aud=aud, sub=sub)
def addActionCap(self, callerToken, subject=None, verb='get', obj=None, returnTo=None, tokenOwner=None, exportTokens=False):
if tokenOwner == None:
_owner = igor.app.getSessionItem('user', 'admin')
tokenOwner = f'identities/{_owner}'
rv = self._addActionCap(callerToken, subject, verb, obj, tokenOwner, exportTokens)
rv['tokenOwner'] = tokenOwner
if returnTo:
queryString = urllib.parse.urlencode(rv)
if '?' in returnTo:
returnTo = returnTo + '&' + queryString
else:
returnTo = returnTo + '?' + queryString
return self.igor.app.raiseSeeother(returnTo)
return json.dumps(rv)
def _addActionCap(self, callerToken, subject=None, verb='get', obj=None, tokenOwner='identities/admin', exportTokens=False):
if not self.hasCapabilities:
return{}
if not obj:
self.igor.app.raiseHTTPError('400 missing obj for action')
if obj.startswith('/action/'):
parentTokenId = 'admin-action'
else:
self.igor.app.raiseHTTPError('400 bad action %s' % obj)
newTokenId = actionTokenId = self.igor.internal.accessControl('newToken',
token=callerToken,
tokenId=parentTokenId,
newOwner=tokenOwner,
newPath=obj,
delegate=True,
**{verb : 'self'}
)
rv = dict(verb=verb, obj=obj, actionTokenId=newTokenId)
if exportTokens:
newTokenRepresentation = self.igor.internal.accessControl('exportToken',
token=callerToken,
tokenId=newTokenId,
subject=subject
)
rv['actionTokenRepresentation'] = newTokenRepresentation
return rv
def delete(self, name, hostname=None, token=None, callerToken=None, returnTo=None):
if not NAME_RE.match(name):
self.igor.app.raiseHTTPError('400 Illegal name for device')
if not hostname:
hostname = name + '.local'
if self.hasCapabilities:
self._delSecretKey(callerToken, aud=hostname)
self._delSecretKey(callerToken, sub=hostname)
isDevice = not not self.igor.databaseAccessor.get_key('devices/%s' % name, 'application/x-python-object', 'multi', callerToken)
isSensor = not not self.igor.databaseAccessor.get_key('sensors/%s' % name, 'application/x-python-object', 'multi', callerToken)
self.igor.databaseAccessor.delete_key('devices/%s' % name, callerToken)
self.igor.databaseAccessor.delete_key('sensors/%s' % name, callerToken)
self.igor.databaseAccessor.delete_key('status/devices/%s' % name, callerToken)
self.igor.databaseAccessor.delete_key('status/sensors/%s' % name, callerToken)
if self.igor.plugins.exists(name):
self.igor.plugins.uninstall(name, callerToken)
self.igor.internal.save(callerToken)
if returnTo:
return self.igor.app.raiseSeeother(returnTo)
return ''
def _delSecretKey(self, callerToken, aud=None, sub=None):
self.igor.internal.accessControl('deleteSharedKey', token=callerToken, aud=aud, sub=sub)
def list(self, token=None, callerToken=None):
rv = self._list(callerToken)
return json.dumps(rv)
def _list(self, callerToken):
"""Return list of dictionaries describing all devices"""
def _getNames(path):
"""Helper to get all non-namespaced children tag names"""
allElements = self.igor.database.getElements(path, 'get', callerToken)
rv = []
for e in allElements:
name = e.tagName
if ':' in name: continue
rv.append(name)
return rv
#
# Collect all names of devices and sensors tha occur anywhere (sorted)
#
allNames = _getNames('devices/*') + _getNames('sensors/*') + _getNames('status/sensors/*') + _getNames('status/devices/*')
allNames = list(set(allNames))
allNames.sort()
#
# For each of these collect the relevant information
#
rv = []
for name in allNames:
descr = dict(name=name)
hostname = None
representing = None
entries = []
statusEntries = []
if self.igor.database.getElements('devices/' + name, 'get', callerToken):
descr['isDevice'] = True
entries.append('devices/' + name)
representing = 'devices/' + name
if self.igor.database.getElements('sensors/' + name, 'get', callerToken):
descr['isSensor'] = True
entries.append('sensors/' + name)
representing = 'sensors/' + name
if self.igor.database.getElements('plugindata/' + name, 'get', callerToken):
descr['isPlugin'] = True
entries.append('plugindata/' + name)
hostname = self.igor.database.getValue('plugindata/%s/host' % name, callerToken)
if hostname:
descr['hostname'] = hostname
if self.igor.database.getElements('status/devices/' + name, 'get', callerToken):
statusEntries.append('status/devices/' + name)
if self.igor.database.getElements('status/sensors/' + name, 'get', callerToken):
statusEntries.append('status/sensors/' + name)
descr['entry'] = entries
descr['status'] = statusEntries
if representing:
actionElements = self.igor.database.getElements("actions/action[representing='%s']" % representing, 'get', callerToken)
actionPaths = []
for e in actionElements:
actionPaths.append(self.igor.database.getXPathForElement(e))
if actionPaths:
descr['actions'] = actionPaths
descr['representing'] = representing
# See what the type is
if descr.get('isDevice'):
if not descr.get('isPlugin'):
descr['deviceType'] = 'badDevice (no plugin)'
else:
# We cannot tell difference between activeDevice and activeDeviceSensor.
# Could examine actions, but...
descr['deviceType'] = 'activeDevice'
elif descr.get('isSensor'):
if descr.get('isPlugin'):
descr['deviceType'] = 'polledSensor'
elif descr.get('actionPaths'):
descr['deviceType'] = 'activeSensor'
else:
descr['deviceType'] = 'passiveSensor'
else:
descr['deviceType'] = 'bad (not Device, not Sensor)'
rv.append(descr)
return rv
def _keyList(self, callerToken):
"""Helper for devices.html"""
allKeys = self.igor.internal.accessControl(subcommand='getKeyList', token=callerToken)
allKeysAsTuples = [(k.get('iss', ''), k.get('aud', ''), k.get('sub', '')) for k in allKeys]
allKeysAsTuples.sort()
allKeyAudiences = set([k.get('aud') for k in allKeys])
allKeySubjects = set([k.get('sub') for k in allKeys])
return dict(allKeysAsTuples=allKeysAsTuples, allKeyAudiences=allKeyAudiences, allKeySubjects=allKeySubjects)
def igorPlugin(igor, pluginName, pluginData):
return DevicePlugin(igor)
| {
"content_hash": "ef8ee48e0d9b07425832ed805cbfd8af",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 174,
"avg_line_length": 47.16442953020134,
"alnum_prop": 0.5870508715759516,
"repo_name": "cwi-dis/igor",
"id": "df262e46b162923c1a8d257886eeca1acd034de0",
"size": "14055",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "igor/std-plugins/device/igorplugin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "142939"
},
{
"name": "Python",
"bytes": "580532"
},
{
"name": "Shell",
"bytes": "7868"
}
],
"symlink_target": ""
} |
import unittest
class InitializationTests(unittest.TestCase):
def test_initialization(self):
"""
Check the test suite runs by affirming 2+2=4
"""
self.assertEqual(2 + 2, 4)
def test_import(self):
"""
Ensure the test suite can import our module
"""
try:
import imgur_cli
except ImportError:
self.fail("Was not able to import imgur_cli")
| {
"content_hash": "ee2af0b9be81546065f19a89e81e4bac",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 57,
"avg_line_length": 23.36842105263158,
"alnum_prop": 0.5698198198198198,
"repo_name": "ueg1990/imgur-cli",
"id": "27610258399f8e0b65c3a8238a238b609b2d75b0",
"size": "444",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "77521"
}
],
"symlink_target": ""
} |
import httplib
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA512
import base64
import json
from base64 import b64encode
from Crypto.Signature import PKCS1_v1_5
import subprocess
import signal
import time
def b64url_encode(str):
return base64.urlsafe_b64encode(str).replace("=","")
port = "8088"
serverkey = RSA.importKey(open("serverkey.pem").read())
serverder = serverkey.publickey().exportKey("DER")
address = "127.0.0.1:8080"
time_added = str(int(time.time()))
all = b64url_encode(serverder) + address + time_added
hash = SHA512.new(all)
signature = PKCS1_v1_5.new(serverkey).sign(hash)
contents = json.dumps({"address":address,
"time":int(time_added),
"signature":b64url_encode(signature)},
indent = 4)
serverp = subprocess.Popen(["valgrind",
"--leak-check=full",
"--error-exitcode=1",
"--gen-suppressions=all",
"--suppressions=server_valgrind_suppressions",
"./server", port])
time.sleep(5)
headers = {"Content-Type":"application/json", "Accept": "application/json"}
conn = httplib.HTTPSConnection("127.0.0.1:" + port)
conn.request("POST", "/spiz/v0/dir/server/" + b64url_encode(serverder), \
contents, headers)
response = conn.getresponse()
data = response.read()
print str(response.status) + " " + response.reason
print data
rc = 0
if response.status != 200:
print "Server did not reply 200 OK"
rc = 1
conn.close()
time.sleep(5)
serverp.send_signal(signal.SIGINT)
serverp.wait()
if serverp.returncode == 1:
print "Server exited with return code: " + str(serverp.returncode)
rc = 1
exit(rc)
| {
"content_hash": "d1b6cda8b5a25ea1643f8f3a85c0de00",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 75,
"avg_line_length": 25.87878787878788,
"alnum_prop": 0.6498829039812647,
"repo_name": "pombredanne/spiz",
"id": "0d4e43c9965914fd200f29881408434e00295938",
"size": "1727",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/server/test/dir_server_test.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
Usage:
create_room <type_room> <name>...
add_person <firstname> <surname> <person_type> [<wants_accomodation>]
print_room <name>
print_allocations [<filename>]
print_unallocations [<filename>]
dojo (-i | --interactive)
dojo (-h | --help | --version)
Options:
-i, --interactive Interactive Mode
-h, --help Show this screen and exit.
"""
from docopt import docopt, DocoptExit
import cmd
import os
import sys
from models.dojo import Dojo
from termcolor import colored
from insta import instance
from modules.ui import error,success
def docopt_cmd(func):
"""
This decorator is used to simplify the try/except block and pass the result
of the docopt parsing to the called action
"""
def fn(self, arg):
try:
opt = docopt(fn.__doc__, arg)
except DocoptExit as e:
# The DocoptExit is thrown when the args do not match
# We print a message to the user and the usage block
print('Invalid Command!')
print(e)
return
except SystemExit:
# The SystemExit exception prints the usage for --help
# We do not need to do the print here
return
return func(self, opt)
fn.__name__ = func.__name__
fn.__doc__ = func.__doc__
fn.__dict__.update(func.__dict__)
return fn
def intro():
os.system('cls' if os.name == 'nt' else 'clear')
print(__doc__)
class DOJO(cmd.Cmd):
prompt = colored('DOJO$$$', 'magenta', attrs=['blink','bold'])
@docopt_cmd
def do_create_room(self, arg):
"""Usage: create_room <type_room> <name>..."""
for room in arg['<name>']:
print(instance.create_room(room,arg['<type_room>']))
@docopt_cmd
def do_add_person(self, arg):
"""Usage: add_person <firstname> <surname> <person_type> [<wants_accomodation>]"""
print(instance.add_person(arg['<firstname>'],arg['<surname>'],arg['<person_type>'],arg['<wants_accomodation>']))
@docopt_cmd
def do_print_room(self, arg):
"""Usage: print_room <name>"""
room = instance.print_room(arg['<name>'])
if room == error('Room %s does not exist!'%(arg['<name>'])):
print(room)
else:
print(room)
print('``````````````````````````````````````````')
for person in room:
print(room[person])
@docopt_cmd
def do_print_allocations(self, arg):
"""Usage: print_allocations [<filename>]"""
instance.print_allocations(arg['<filename>'])
@docopt_cmd
def do_print_unallocations(self, arg):
"""Usage: print_allocations [<filename>]"""
unallocations = instance.print_unallocations(arg['<filename>'])
@docopt_cmd
def do_quit(self, arg):
"""Usage: quit"""
os.system('cls' if os.name == 'nt' else 'clear')
print ('Dojo Exiting')
exit()
if __name__ == "__main__":
try:
intro()
DOJO().cmdloop()
except KeyboardInterrupt:
os.system('cls' if os.name == 'nt' else 'clear')
print(error('DOJO EXITING'))
| {
"content_hash": "3bc817a264d005b6344ee98193c89c09",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 120,
"avg_line_length": 28.5,
"alnum_prop": 0.5592105263157895,
"repo_name": "JoshuaOndieki/dojo",
"id": "9d1e7a4df3b689ea995627d8b0d5286eeab85593",
"size": "3192",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19699"
}
],
"symlink_target": ""
} |
from flask import Flask, request, g, render_template, session, redirect, url_for
from github import Github
import github
import test
app = Flask(__name__)
repo_name = ""
@app.route('/repo')
def repo():
repo_dict = github.get('repos/cenkalti/github-flask')
return str(repo_dict)
g = None
user_inst = None
results = None
coll_arr = None
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
global g
g = Github(username, password)
global user_inst
user_inst = g.get_user()
return redirect("/repos")
return render_template('index.html')
@app.route('/repos', methods=['GET', 'POST'])
def select():
repos = (test.retrieve_repos(user_inst))[0]
if request.method == 'POST':
repo_index = int(request.form['repo'])
colls = test.retrieve_collaborators(repos[repo_index])
for user in colls:
comm_files = test.retrieve_commit_files(repos[repo_index], user)
for name, content in comm_files.items():
keywords = test.get_library(user, name, content)
user.hypo_keywords = keywords
comm_mess = test.retrieve_commit_messages(repos[repo_index], user)
for t, b in test.retrieve_issues(repos[repo_index]).items():
task = test.task([k for k in keywords if k in t or b])
test.scoreUsers(colls, task)
global results
# results = task.score
# print("printing score: ", task.score)
url_to_redirct = "/showresults?repo_name=" + repos[repo_index].full_name + '?colls=' + str(colls)
return redirect(url_to_redirct)
return render_template('repos.html', repos=repos)
@app.route('/staticfile/<path:path>')
def staticfile(path):
return send_from_directory('static', path)
class Task(object):
def __init__(self, name, keywords, score):
self.keywords = keywords
self.name = name
self.score = score
@app.route('/showresults', methods=['GET', 'POST'])
def results():
global results
tasks = []
colls = request.args.getlist("colls")
for user in colls:
for u in results.keys():
if user.name == u:
tasks.append(Task('Using Keras built-in model', user.hypo_keywords, results))
tasks.append(Task('Broken download links of the Windows GPU', ['.net', 'networking', 'nvidia-CUDA'], {'tkato0909': 0.974, 'aryavohra04': 0.823, 'superspy.827': 0.214}))
return render_template('results.html', repo_name='aryavohra04/questo-backend', tasks=tasks, selected_task=tasks[0])
if request.args.get('index'):
users = []
tasks.append(Task('Using Keras built-in model', ['Python3', 'Keras', 'machine-learning'], {'karinawalker': 0.744, 'aryavohra04': 0.323, 'superspy.827': 0.214}))
tasks.append(Task('Redesign the iOS page for question answering.', ['Swift', 'Object-C', 'iOS'], {'tkato0909': 0.974, 'superspy.827': 0.823, 'aryavohra04': 0.741}))
userkeywords=[[['Python', 'PHP', 'machine-learning', 'tensorflow'],
['C++', 'Java', 'Keras', 'iOS'],
['Python', 'javascript', 'node.js', 'JSON']
], [['Swift', 'iOS', 'Keras', 'CUDA'],
['Python', 'Swift', 'node.js', 'JSON'],
['C++', 'Java', 'Keras', 'iOS'],
]]
prof=[['https://avatars.githubusercontent.com/u/23270560?size=120','https://avatars.githubusercontent.com/u/8716483?size=100','https://avatars1.githubusercontent.com/u/6974757?v=4&s=400'], ['https://avatars2.githubusercontent.com/u/8716434?v=4&s=460', 'https://avatars1.githubusercontent.com/u/6974757?v=4&s=400', 'https://avatars.githubusercontent.com/u/8716483?size=100']]
return render_template('results.html', repo_name='aryavohra04/questo-backend', tasks=tasks, selected_task=tasks[int(request.args.get('index'))], userkeywords=userkeywords[int(request.args.get('index'))], prof=prof[int(request.args.get('index'))])
# return render_template('results.html', repo_name=request.args.get("repo_name"), tasks=tasks, selected_task=tasks[int(request.args.get('index'))])
tasks.append(Task('Using Keras built-in model', ['Python3', 'Keras', 'machine-learning', 'tensorflow'], {'karinawalker': 0.744, 'aryavohra04': 0.323, 'superspy.827': 0.214}))
tasks.append(Task('Redesign the iOS page for question answering.', ['Swift', 'Object-C', 'iOS'], {'tkato0909': 0.974, 'superspy.827': 0.823, 'aryavohra04': 0.741}))
prof=[['https://avatars.githubusercontent.com/u/23270560?size=120','https://avatars.githubusercontent.com/u/8716483?size=100','https://avatars1.githubusercontent.com/u/6974757?v=4&s=400'], ['https://avatars2.githubusercontent.com/u/8716434?v=4&s=460', 'https://avatars1.githubusercontent.com/u/6974757?v=4&s=400', 'https://avatars.githubusercontent.com/u/8716483?size=100']]
return render_template('results.html', repo_name='aryavohra04/questo-backend', tasks=tasks, selected_task=tasks[0], userkeywords=[['Python', 'PHP', 'machine-learning', 'tensorflow'],
['C++', 'Java', 'Keras', 'iOS'],
['Python', 'javascript', 'node.js', 'JSON']
], prof=prof[0])
app.run(debug=True, host="0.0.0.0", threaded=True) | {
"content_hash": "e373c098222dcf61d7a1b3586a3e9962",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 382,
"avg_line_length": 57.07920792079208,
"alnum_prop": 0.5856027753686036,
"repo_name": "tkato0909/hequals",
"id": "5ae839c90bf14c4ce342152f114625e3689bdf48",
"size": "5765",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backend/flask/app.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2530"
},
{
"name": "HTML",
"bytes": "9150"
},
{
"name": "Python",
"bytes": "20586"
},
{
"name": "Shell",
"bytes": "3214"
}
],
"symlink_target": ""
} |
import sys
import thread
import time
import argparse
from twisted.python import log
from twisted.internet import reactor
from autobahn.twisted.websocket import WebSocketClientFactory, \
WebSocketClientProtocol, \
connectWS
from client_config import Config
class DataStreamProtocol(WebSocketClientProtocol):
"""
Protocol class which specifies the behaviour of the client.
"""
def onOpen(self):
"""
Open connection handling.
"""
log.msg("Connection established")
def run():
log.msg("Sending data...")
while True:
self._send_data()
log.msg("End of file. Reopening...")
log.msg("Thread terminating...")
thread.start_new_thread(run, ())
def onClose(self, wasClean, code, reason):
log.msg("Websocket connection closed: {0}".format(reason))
def _send_data(self):
with open(config.file_path, "r") as data:
for line in data:
log.msg(line)
self.sendMessage("[" + line + "]")
time.sleep(0.1)
def parse_arguments():
parser = argparse.ArgumentParser(
description='Deployment script for Space Shuttle client')
parser.add_argument('--gateway-url', type=str,
help='gateway api url, '
'e.g. gateway-479613d7.demotrustedanalytics.com')
parser.add_argument('--use-https', dest='https', action='store_true',
help='set of flag cause use of `https_proxy` env '
'instead of default `http_proxy`')
return parser.parse_args()
if __name__ == '__main__':
log.startLogging(sys.stdout)
args = parse_arguments()
config = Config(external_gateway=args.gateway_url,
use_https=args.https)
uri = config.uri
proxy_config = config.http_proxy_config
factory = WebSocketClientFactory(uri, proxy=proxy_config)
factory.protocol = DataStreamProtocol
connectWS(factory)
reactor.run()
| {
"content_hash": "03529c43b8a61b30f88f24de553c8f31",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 74,
"avg_line_length": 30.442857142857143,
"alnum_prop": 0.5818864382918817,
"repo_name": "trustedanalytics/space-shuttle-demo",
"id": "77dd077d17bf9799b4c8733e4d13faf0d0a0825d",
"size": "2720",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "client/space_shuttle_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4568"
},
{
"name": "HTML",
"bytes": "5903"
},
{
"name": "Java",
"bytes": "71487"
},
{
"name": "JavaScript",
"bytes": "14761"
},
{
"name": "Jupyter Notebook",
"bytes": "9045"
},
{
"name": "Python",
"bytes": "12630"
},
{
"name": "Shell",
"bytes": "4658"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
long_desc = '''
This package contains the osaka Sphinx extension.
This module converts standard Japanese sentence into Osaka dialect. Osaka is one of the most powerful area in Japan. So your formal Japanese sentence will become lively one.
'''
requires = ['Sphinx>=0.6']
setup(
name='sphinxcontrib-osaka',
version='0.1',
url='http://bitbucket.org/birkenfeld/sphinx-contrib',
download_url='http://pypi.python.org/pypi/sphinxcontrib-osaka',
license='BSD',
author='SHIBUKAWA Yoshiki',
author_email='yoshiki at shibu.jp',
description='Sphinx extension osaka',
long_description=long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: Japanese',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Documentation',
'Topic :: Utilities',
'Topic :: Text Processing :: Filters',
'Topic :: Text Processing :: Linguistic',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=requires,
namespace_packages=['sphinxcontrib'],
)
| {
"content_hash": "36efe2ab59536741eede74268e2bbafd",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 173,
"avg_line_length": 33.41463414634146,
"alnum_prop": 0.6562043795620438,
"repo_name": "Lemma1/MAC-POSTS",
"id": "a08bee5dea000290ddb24276205b47feea383026",
"size": "1395",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "doc_builder/sphinx-contrib/osaka/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "3394"
},
{
"name": "Batchfile",
"bytes": "103388"
},
{
"name": "C",
"bytes": "5399"
},
{
"name": "C++",
"bytes": "3595985"
},
{
"name": "CMake",
"bytes": "53433"
},
{
"name": "CSS",
"bytes": "3618"
},
{
"name": "HTML",
"bytes": "18640"
},
{
"name": "JavaScript",
"bytes": "44610"
},
{
"name": "Jupyter Notebook",
"bytes": "7469541"
},
{
"name": "MATLAB",
"bytes": "5439"
},
{
"name": "Makefile",
"bytes": "148059"
},
{
"name": "Python",
"bytes": "1950140"
},
{
"name": "Shell",
"bytes": "2554"
}
],
"symlink_target": ""
} |
from keras.models import Model
from keras.layers import Input, Activation, Dropout, Merge, TimeDistributed, Masking, Dense
from keras.layers.recurrent import LSTM, GRU
from keras.layers.embeddings import Embedding
from keras.regularizers import l2
from keras.optimizers import Adam
from keras import backend as K
import h5py
import shutil
import logging
import sys
# Set up logger
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
logger = logging.getLogger(__name__)
class NIC:
def __init__(self, embed_size, hidden_size, vocab_size, dropin, optimiser,
l2reg, hsn_size=512, weights=None, gru=False,
clipnorm=-1, batch_size=None, t=None, lr=0.001):
self.max_t = t # Expected timesteps. Needed to build the Theano graph
# Model hyperparameters
self.vocab_size = vocab_size # size of word vocabulary
self.embed_size = embed_size # number of units in a word embedding
self.hsn_size = hsn_size # size of the source hidden vector
self.hidden_size = hidden_size # number of units in first LSTM
self.gru = gru # gru recurrent layer? (false = lstm)
self.dropin = dropin # prob. of dropping input units
self.l2reg = l2reg # weight regularisation penalty
# Optimiser hyperparameters
self.optimiser = optimiser # optimisation method
self.lr = lr
self.beta1 = 0.9
self.beta2 = 0.999
self.epsilon = 1e-8
self.clipnorm = clipnorm
self.weights = weights # initialise with checkpointed weights?
def buildKerasModel(self, use_sourcelang=False, use_image=True):
'''
Define the exact structure of your model here. We create an image
description generation model by merging the VGG image features with
a word embedding model, with an LSTM over the sequences.
'''
logger.info('Building Keras model...')
text_input = Input(shape=(self.max_t, self.vocab_size), name='text')
text_mask = Masking(mask_value=0., name='text_mask')(text_input)
# Word embeddings
wemb = TimeDistributed(Dense(output_dim=self.embed_size,
input_dim=self.vocab_size,
W_regularizer=l2(self.l2reg)),
name="w_embed")(text_mask)
drop_wemb = Dropout(self.dropin, name="wemb_drop")(wemb)
# Embed -> Hidden
emb_to_hidden = TimeDistributed(Dense(output_dim=self.hidden_size,
input_dim=self.vocab_size,
W_regularizer=l2(self.l2reg)),
name='wemb_to_hidden')(drop_wemb)
if use_image:
# Image 'embedding'
logger.info('Using image features: %s', use_image)
img_input = Input(shape=(self.max_t, 4096), name='img')
img_emb = TimeDistributed(Dense(output_dim=self.hidden_size,
input_dim=4096,
W_regularizer=l2(self.l2reg)),
name='img_emb')(img_input)
img_drop = Dropout(self.dropin, name='img_embed_drop')(img_emb)
if use_sourcelang:
logger.info('Using source features: %s', use_sourcelang)
logger.info('Size of source feature vectors: %d', self.hsn_size)
src_input = Input(shape=(self.max_t, self.hsn_size), name='src')
src_relu = Activation('relu', name='src_relu')(src_input)
src_embed = TimeDistributed(Dense(output_dim=self.hidden_size,
input_dim=self.hsn_size,
W_regularizer=l2(self.l2reg)),
name="src_embed")(src_relu)
src_drop = Dropout(self.dropin, name="src_drop")(src_embed)
# Input nodes for the recurrent layer
rnn_input_dim = self.hidden_size
if use_image and use_sourcelang:
recurrent_inputs = [emb_to_hidden, img_drop, src_drop]
recurrent_inputs_names = ['emb_to_hidden', 'img_drop', 'src_drop']
inputs = [text_input, img_input, src_input]
elif use_image:
recurrent_inputs = [emb_to_hidden, img_drop]
recurrent_inputs_names = ['emb_to_hidden', 'img_drop']
inputs = [text_input, img_input]
elif use_sourcelang:
recurrent_inputs = [emb_to_hidden, src_drop]
recurrent_inputs_names = ['emb_to_hidden', 'src_drop']
inputs = [text_input, src_input]
merged_input = Merge(mode='sum')(recurrent_inputs)
# Recurrent layer
if self.gru:
logger.info("Building a GRU with recurrent inputs %s", recurrent_inputs_names)
rnn = GRU(output_dim=self.hidden_size,
input_dim=rnn_input_dim,
return_sequences=True,
W_regularizer=l2(self.l2reg),
U_regularizer=l2(self.l2reg),
name='rnn')(merged_input)
else:
logger.info("Building an LSTM with recurrent inputs %s", recurrent_inputs_names)
rnn = LSTM(output_dim=self.hidden_size,
input_dim=rnn_input_dim,
return_sequences=True,
W_regularizer=l2(self.l2reg),
U_regularizer=l2(self.l2reg),
name='rnn')(merged_input)
output = TimeDistributed(Dense(output_dim=self.vocab_size,
input_dim=self.hidden_size,
W_regularizer=l2(self.l2reg),
activation='softmax'),
name='output')(rnn)
if self.optimiser == 'adam':
# allow user-defined hyper-parameters for ADAM because it is
# our preferred optimiser
optimiser = Adam(lr=self.lr, beta_1=self.beta1,
beta_2=self.beta2, epsilon=self.epsilon,
clipnorm=self.clipnorm)
model = Model(input=inputs, output=output)
model.compile(optimiser, {'output': 'categorical_crossentropy'})
else:
model.compile(self.optimiser, {'output': 'categorical_crossentropy'})
if self.weights is not None:
logger.info("... with weights defined in %s", self.weights)
# Initialise the weights of the model
shutil.copyfile("%s/weights.hdf5" % self.weights,
"%s/weights.hdf5.bak" % self.weights)
model.load_weights("%s/weights.hdf5" % self.weights)
#plot(model, to_file="model.png")
return model
def buildHSNActivations(self, use_image=True):
'''
Define the exact structure of your model here. We create an image
description generation model by merging the VGG image features with
a word embedding model, with an LSTM over the sequences.
'''
logger.info('Building Keras model...')
text_input = Input(shape=(self.max_t, self.vocab_size), name='text')
text_mask = Masking(mask_value=0., name='text_mask')(text_input)
# Word embeddings
wemb = TimeDistributed(Dense(output_dim=self.embed_size,
input_dim=self.vocab_size,
W_regularizer=l2(self.l2reg)),
name="w_embed")(text_mask)
drop_wemb = Dropout(self.dropin, name="wemb_drop")(wemb)
# Embed -> Hidden
emb_to_hidden = TimeDistributed(Dense(output_dim=self.hidden_size,
input_dim=self.vocab_size,
W_regularizer=l2(self.l2reg)),
name='wemb_to_hidden')(drop_wemb)
if use_image:
# Image 'embedding'
logger.info('Using image features: %s', use_image)
img_input = Input(shape=(self.max_t, 4096), name='img')
img_emb = TimeDistributed(Dense(output_dim=self.hidden_size,
input_dim=4096,
W_regularizer=l2(self.l2reg)),
name='img_emb')(img_input)
img_drop = Dropout(self.dropin, name='img_embed_drop')(img_emb)
# Input nodes for the recurrent layer
rnn_input_dim = self.hidden_size
if use_image:
recurrent_inputs = [emb_to_hidden, img_drop]
recurrent_inputs_names = ['emb_to_hidden', 'img_drop']
inputs = [text_input, img_input]
merged_input = Merge(mode='sum')(recurrent_inputs)
# Recurrent layer
if self.gru:
logger.info("Building a GRU with recurrent inputs %s", recurrent_inputs_names)
rnn = GRU(output_dim=self.hidden_size,
input_dim=rnn_input_dim,
return_sequences=True,
W_regularizer=l2(self.l2reg),
U_regularizer=l2(self.l2reg),
name='rnn')(merged_input)
else:
logger.info("Building an LSTM with recurrent inputs %s", recurrent_inputs_names)
rnn = LSTM(output_dim=self.hidden_size,
input_dim=rnn_input_dim,
return_sequences=True,
W_regularizer=l2(self.l2reg),
U_regularizer=l2(self.l2reg),
name='rnn')(merged_input)
if self.optimiser == 'adam':
# allow user-defined hyper-parameters for ADAM because it is
# our preferred optimiser
optimiser = Adam(lr=self.lr, beta_1=self.beta1,
beta_2=self.beta2, epsilon=self.epsilon,
clipnorm=self.clipnorm)
model = Model(input=[text_input, img_input], output=rnn)
print(model.get_config())
model.compile(optimiser, {'rnn': 'categorical_crossentropy'})
else:
model.compile(self.optimiser, {'rnn': 'categorical_crossentropy'})
if self.weights is not None:
logger.info("... with weights defined in %s", self.weights)
# Initialise the weights of the model
shutil.copyfile("%s/weights.hdf5" % self.weights,
"%s/weights.hdf5.bak" % self.weights)
f = h5py.File("%s/weights.hdf5" % self.weights)
self.partial_load_weights(model, f)
f.close()
#plot(model, to_file="model.png")
return model
def partial_load_weights(self, model, f):
'''
Keras does not seem to support partially loading weights from one
model into another model. This function achieves the same purpose so
we can serialise the final RNN hidden state to disk.
TODO: find / engineer a more elegant and general approach
'''
flattened_layers = model.layers
# new file format
filtered_layers = []
for layer in flattened_layers:
weights = layer.weights
if weights:
filtered_layers.append(layer)
flattened_layers = filtered_layers
layer_names = [n.decode('utf8') for n in f.attrs['layer_names']]
filtered_layer_names = []
for name in layer_names[:-1]: # -1 so we clip out the output layer
g = f[name]
weight_names = [n.decode('utf8') for n in g.attrs['weight_names']]
if len(weight_names):
filtered_layer_names.append(name)
layer_names = filtered_layer_names
if len(layer_names) != len(flattened_layers):
raise Exception('You are trying to load a weight file '
'containing ' + str(len(layer_names)) +
' layers into a model with ' +
str(len(flattened_layers)) + ' layers.')
# we batch weight value assignments in a single backend call
# which provides a speedup in TensorFlow.
weight_value_tuples = []
for k, name in enumerate(layer_names):
g = f[name]
weight_names = [n.decode('utf8') for n in g.attrs['weight_names']]
weight_values = [g[weight_name] for weight_name in weight_names]
layer = flattened_layers[k]
symbolic_weights = layer.weights
if len(weight_values) != len(symbolic_weights):
raise Exception('Layer #' + str(k) +
' (named "' + layer.name +
'" in the current model) was found to '
'correspond to layer ' + name +
' in the save file. '
'However the new layer ' + layer.name +
' expects ' + str(len(symbolic_weights)) +
' weights, but the saved weights have ' +
str(len(weight_values)) +
' elements.')
weight_value_tuples += zip(symbolic_weights, weight_values)
K.batch_set_value(weight_value_tuples)
class MRNN:
'''
TODO: port this model architecture to Keras 1.0.7
'''
def __init__(self, embed_size, hidden_size, vocab_size, dropin, optimiser,
l2reg, hsn_size=512, weights=None, gru=False,
clipnorm=-1, batch_size=None, t=None, lr=0.001):
self.max_t = t # Expected timesteps. Needed to build the Theano graph
# Model hyperparameters
self.vocab_size = vocab_size # size of word vocabulary
self.embed_size = embed_size # number of units in a word embedding
self.hsn_size = hsn_size # size of the source hidden vector
self.hidden_size = hidden_size # number of units in first LSTM
self.gru = gru # gru recurrent layer? (false = lstm)
self.dropin = dropin # prob. of dropping input units
self.l2reg = l2reg # weight regularisation penalty
# Optimiser hyperparameters
self.optimiser = optimiser # optimisation method
self.lr = lr
self.beta1 = 0.9
self.beta2 = 0.999
self.epsilon = 1e-8
self.clipnorm = clipnorm
self.weights = weights # initialise with checkpointed weights?
def buildKerasModel(self, use_sourcelang=False, use_image=True):
'''
Define the exact structure of your model here. We create an image
description generation model by merging the VGG image features with
a word embedding model, with an LSTM over the sequences.
The order in which these appear below (text, image) is _IMMUTABLE_.
(Needs to match up with input to model.fit.)
'''
logger.info('Building Keras model...')
logger.info('Using image features: %s', use_image)
logger.info('Using source language features: %s', use_sourcelang)
model = Graph()
model.add_input('text', input_shape=(self.max_t, self.vocab_size))
model.add_node(Masking(mask_value=0.), input='text', name='text_mask')
# Word embeddings
model.add_node(TimeDistributedDense(output_dim=self.embed_size,
input_dim=self.vocab_size,
W_regularizer=l2(self.l2reg)),
name="w_embed", input='text_mask')
model.add_node(Dropout(self.dropin),
name="w_embed_drop",
input="w_embed")
# Embed -> Hidden
model.add_node(TimeDistributedDense(output_dim=self.hidden_size,
input_dim=self.embed_size,
W_regularizer=l2(self.l2reg)),
name='embed_to_hidden', input='w_embed_drop')
recurrent_inputs = 'embed_to_hidden'
# Source language input
if use_sourcelang:
model.add_input('source', input_shape=(self.max_t, self.hsn_size))
model.add_node(Masking(mask_value=0.),
input='source',
name='source_mask')
model.add_node(TimeDistributedDense(output_dim=self.hidden_size,
input_dim=self.hsn_size,
W_regularizer=l2(self.l2reg)),
name="s_embed",
input="source_mask")
model.add_node(Dropout(self.dropin),
name="s_embed_drop",
input="s_embed")
recurrent_inputs = ['embed_to_hidden', 's_embed_drop']
# Recurrent layer
if self.gru:
model.add_node(GRU(output_dim=self.hidden_size,
input_dim=self.hidden_size,
return_sequences=True), name='rnn',
input=recurrent_inputs)
else:
model.add_node(LSTM(output_dim=self.hidden_size,
input_dim=self.hidden_size,
return_sequences=True), name='rnn',
input=recurrent_inputs)
# Image 'embedding'
model.add_input('img', input_shape=(self.max_t, 4096))
model.add_node(Masking(mask_value=0.),
input='img', name='img_mask')
model.add_node(TimeDistributedDense(output_dim=self.hidden_size,
input_dim=4096,
W_regularizer=l2(self.l2reg)),
name='i_embed', input='img_mask')
model.add_node(Dropout(self.dropin), name='i_embed_drop', input='i_embed')
# Multimodal layer outside the recurrent layer
model.add_node(TimeDistributedDense(output_dim=self.hidden_size,
input_dim=self.hidden_size,
W_regularizer=l2(self.l2reg)),
name='m_layer',
inputs=['rnn','i_embed_drop', 'embed_to_hidden'],
merge_mode='sum')
model.add_node(TimeDistributedDense(output_dim=self.vocab_size,
input_dim=self.hidden_size,
W_regularizer=l2(self.l2reg),
activation='softmax'),
name='output',
input='m_layer',
create_output=True)
if self.optimiser == 'adam':
# allow user-defined hyper-parameters for ADAM because it is
# our preferred optimiser
optimiser = Adam(lr=self.lr, beta_1=self.beta1,
beta_2=self.beta2, epsilon=self.epsilon,
clipnorm=self.clipnorm)
model.compile(optimiser, {'output': 'categorical_crossentropy'})
else:
model.compile(self.optimiser, {'output': 'categorical_crossentropy'})
if self.weights is not None:
logger.info("... with weights defined in %s", self.weights)
# Initialise the weights of the model
shutil.copyfile("%s/weights.hdf5" % self.weights,
"%s/weights.hdf5.bak" % self.weights)
model.load_weights("%s/weights.hdf5" % self.weights)
#plot(model, to_file="model.png")
return model
| {
"content_hash": "d3d805d4cafd9445b2b03a19c7995bb5",
"timestamp": "",
"source": "github",
"line_count": 436,
"max_line_length": 92,
"avg_line_length": 46.06651376146789,
"alnum_prop": 0.5268608414239482,
"repo_name": "elliottd/GroundedTranslation",
"id": "5db8ef57f8ffa52031424c1e653d1423fbc2bd4d",
"size": "20085",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "355"
},
{
"name": "Matlab",
"bytes": "3415"
},
{
"name": "Perl",
"bytes": "9498"
},
{
"name": "Python",
"bytes": "202526"
}
],
"symlink_target": ""
} |
from __future__ import division, absolute_import, print_function, unicode_literals
import logging
from datetime import datetime
from string import Template
from collections import defaultdict
from .messages import VERBATIM, MESSAGES, STATS
def eid_to_datetime(eid, tz=None):
unixtime = eid // 1000000
return datetime.fromtimestamp(unixtime, tz)
class TextLogRenderer(object):
"""Render IRCCloud log events to human-readable text."""
def __init__(self, tz=None):
self.log = logging.getLogger(__name__)
self.tz = tz
def render_buffer(self, lines):
""" Take an iterable list of log events and yield human-readable text strings """
for line in lines:
try:
yield self.render_line(line.body)
except KeyError:
self.log.exception("Rendering exception")
def render_line(self, line):
""" Render a single log event to a string. """
time = eid_to_datetime(line['eid'], self.tz)
msg = "[%s] " % (time.strftime('%Y-%m-%d %H:%M:%S'))
if line['type'] == 'buffer_msg':
msg += "<%s> %s" % (line.get('from', line.get('server')), line['msg'])
return msg
if line['type'] == 'buffer_me_msg':
msg += "— %s %s" % (line['from'], line['msg'])
return msg
if line['type'] in ['joined_channel', 'you_joined_channel']:
msg += '→ '
elif line['type'] in ['parted_channel', 'you_parted_channel']:
msg += '← '
elif line['type'] == 'quit':
msg += '⇐ '
else:
msg += '* '
if line['type'] in VERBATIM:
try:
msg += line['msg']
except KeyError:
self.log.warn("Log type %s has no attribute 'msg'", line['type'])
elif line['type'] in MESSAGES:
temp = Template(MESSAGES[line['type']])
msg += temp.safe_substitute(defaultdict(lambda: '', line))
elif line['type'] in STATS:
if 'parts' in line:
msg += line['parts'] + ": "
msg += line['msg']
elif line['type'] == 'user_channel_mode':
msg += '%s set %s %s' % (line.get('from', line.get('server')), line['diff'], line['nick'])
elif line['type'] == 'channel_query':
if line['query_type'] == 'timestamp':
msg += 'channel timestamp is %s' % line['timestamp']
elif line['query_type'] == 'mode':
msg += 'channel mode is %s' % line['newmode']
else:
self.log.warn('Unknown channel_query type: %s', line['query_type'])
elif line['type'] == 'channel_mode':
msg += 'Channel mode set to %s by ' % line['diff']
if 'from' in line:
msg += line['from']
else:
msg += 'the server %s' % line['server']
elif line['type'] == 'motd_response':
msg += "\n".join(line['lines'])
elif line['type'] in ['cap_ls', 'cap_req', 'cap_ack']:
if line['type'] == 'cap_ls':
msg += 'Available'
if line['type'] == 'cap_req':
msg += 'Requested'
if line['type'] == 'cap_ack':
msg += 'Acknowledged'
msg += ' capabilities: %s' % ' | '.join(line['caps'])
elif line['type'] == 'unknown_umode':
if 'flag' in line:
msg += line['flag'] + " "
msg += line['msg']
elif line['type'] == 'time':
msg += 'Server time: %s' % line['time_string']
if 'time_stamp' in line:
msg += ' (%s)' % line['time_stamp']
msg += ' - %s' % line['time_server']
else:
if 'msg' in line:
msg += line['msg']
self.log.warn('Unknown message type (%s)', line['type'])
return msg
| {
"content_hash": "fe65b0f30e111af2bf81a3d22b50d14e",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 102,
"avg_line_length": 39.88775510204081,
"alnum_prop": 0.49143003325658735,
"repo_name": "irccloud/irccloud-python-client",
"id": "017f01f3a2e21f7a0873734d587575bc8ff1321a",
"size": "3932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "irccloud/client/log_render.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20895"
}
],
"symlink_target": ""
} |
from abc import ABC, abstractmethod
class Index(ABC):
@abstractmethod
def __init__(self): # pragma: no cover
pass
@abstractmethod
def index(self, doc): # pragma: no cover
pass
@abstractmethod
def unindex(self, doc): # pragma: no cover
pass
@abstractmethod # pragma: no cover
def search(self, doc, threshold=0):
pass
@abstractmethod
def initSearch(self): # pragma: no cover
pass
| {
"content_hash": "e11380ebcb0aae605d5d57ae88162e1a",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 47,
"avg_line_length": 20.391304347826086,
"alnum_prop": 0.605543710021322,
"repo_name": "datamade/dedupe",
"id": "eb36dd265c3784b34f462b4fa228e032a1120af3",
"size": "469",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dedupe/index.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "190850"
},
{
"name": "Shell",
"bytes": "1224"
}
],
"symlink_target": ""
} |
"""
Neural network - nsp
"""
import numpy as np
def two_norm(dWi, dWo):
"""Calculate the two norm of the gradient"""
return np.sqrt(np.sum(np.sum(np.power(dWi, 2))) +
np.sum(np.sum(np.power(dWo, 2))))
def weight_norm(W):
"""Calculate the squared sum of weights"""
return np.sum(np.power(W, 2))
| {
"content_hash": "7c850852fa6982c2be27b6e8cecad321",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 53,
"avg_line_length": 21.875,
"alnum_prop": 0.5657142857142857,
"repo_name": "aaskov/nsp",
"id": "c45bc0d7cad73295ee10eb8ebed842fa23ecd386",
"size": "375",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nn/nn_norm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39972"
}
],
"symlink_target": ""
} |
"""
.. py:currentmodule:: FileFormat.Results.PhirhozGenerated
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
MCXRay phirhoz generated result file.
"""
# Script information for the file.
__author__ = "Hendrix Demers (hendrix.demers@mail.mcgill.ca)"
__version__ = ""
__date__ = ""
__copyright__ = "Copyright (c) 2012 Hendrix Demers"
__license__ = ""
# Subversion informations for the file.
__svnRevision__ = "$Revision$"
__svnDate__ = "$Date$"
__svnId__ = "$Id$"
# Standard library modules.
# Third party modules.
# Local modules.
# Project modules
import pymcxray.FileFormat.Results.ModelParameters as ModelParameters
import pymcxray.FileFormat.Results.SimulationParameters as SimulationParameters
import pymcxray.FileFormat.Results.MicroscopeParameters as MicroscopeParameters
import pymcxray.FileFormat.Results.ElectronParameters as ElectronParameters
import pymcxray.FileFormat.Results.PhirhozRegion as PhirhozRegion
# Globals and constants variables.
KEY_PHIRHOZ_GENERATED = "phirhozGenerated"
KEY_NUMBER_REGIONS = "Number of Regions"
KEY_PHIRHOZ_CHARACTERISTIC = "Characteristic"
class PhirhozGenerated(object):
def __init__(self):
self._parameters = {}
def read(self, filepath):
lines = open(filepath, 'r').readlines()
lineIndex = 0
lineIndex += self.readSolidSimulationModels(lines[lineIndex:])
lineIndex += self.readSimulationParameters(lines[lineIndex:])
lineIndex += self.readMicroscope(lines[lineIndex:])
lineIndex += self.readElectron(lines[lineIndex:])
lineIndex += self.readRegions(lines[lineIndex:])
def readSolidSimulationModels(self, lines):
self.modelParameters = ModelParameters.ModelParameters()
lineIndex = self.modelParameters.readFromLines(lines)
return lineIndex
def readSimulationParameters(self, lines):
self.simulationParameters = SimulationParameters.SimulationParameters()
lineIndex = self.simulationParameters.readFromLines(lines)
return lineIndex
def readMicroscope(self, lines):
self.microscopeParameters = MicroscopeParameters.MicroscopeParameters()
lineIndex = self.microscopeParameters.readFromLines(lines)
return lineIndex
def readElectron(self, lines):
self.electronParameters = ElectronParameters.ElectronParameters()
lineIndex = self.electronParameters.readFromLines(lines)
return lineIndex
def readRegions(self, lines):
indexLine = 0
for line in lines:
indexLine += 1
if line.strip().startswith(KEY_NUMBER_REGIONS):
indexNumberRegions = len(KEY_NUMBER_REGIONS)
self.numberRegions = int(line[indexNumberRegions:])
break
else:
message = "Cannot find the section header in the liens: %s" % (KEY_NUMBER_REGIONS)
raise ValueError(message)
self._regions = {}
for _indexRegion in range(self.numberRegions):
region = PhirhozRegion.PhirhozRegion(self.simulationParameters.numberEnergyWindows, self.simulationParameters.numberLayersZ)
indexLine += region.readFromLines(lines[indexLine:])
self._regions[region.regionID] = region
assert len(self._regions) == self.numberRegions
return indexLine
def getCharacteristicPhiRhoZ(self, regionID):
return self._regions[regionID].characteristicPhirhoz
@property
def modelParameters(self):
return self._parameters[ModelParameters.KEY_MODEL_PARAMETERS]
@modelParameters.setter
def modelParameters(self, modelParameters):
self._parameters[ModelParameters.KEY_MODEL_PARAMETERS] = modelParameters
@property
def simulationParameters(self):
return self._parameters[SimulationParameters.KEY_SIMULATION_PARAMETERS]
@simulationParameters.setter
def simulationParameters(self, simulationParameters):
self._parameters[SimulationParameters.KEY_SIMULATION_PARAMETERS] = simulationParameters
@property
def microscopeParameters(self):
return self._parameters[MicroscopeParameters.KEY_MICROSCOPE_PARAMETERS]
@microscopeParameters.setter
def microscopeParameters(self, microscopeParameters):
self._parameters[MicroscopeParameters.KEY_MICROSCOPE_PARAMETERS] = microscopeParameters
@property
def electronParameters(self):
return self._parameters[ElectronParameters.KEY_ELECTRON_PARAMETERS]
@electronParameters.setter
def electronParameters(self, electronParameters):
self._parameters[ElectronParameters.KEY_ELECTRON_PARAMETERS] = electronParameters
@property
def numberRegions(self):
return self._parameters[KEY_NUMBER_REGIONS]
@numberRegions.setter
def numberRegions(self, numberRegions):
self._parameters[KEY_NUMBER_REGIONS] = numberRegions
| {
"content_hash": "47436c01d1c0b1d6b112c96b70b55d2c",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 136,
"avg_line_length": 35.709219858156025,
"alnum_prop": 0.7000993048659384,
"repo_name": "drix00/pymcxray",
"id": "8df0bc55037bc98c534ce5f4608460a6caa0f038",
"size": "5058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymcxray/FileFormat/Results/PhirhozGenerated.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2299"
},
{
"name": "Python",
"bytes": "933760"
}
],
"symlink_target": ""
} |
import contextlib
import mock
from oslo.config import cfg
from webob import exc
import webtest
from neutron.api import extensions
from neutron.api.extensions import PluginAwareExtensionManager
from neutron.api.v2 import attributes
from neutron.common import config
from neutron.common.test_lib import test_config
from neutron import context
from neutron.db import api as db_api
from neutron.db import db_base_plugin_v2
from neutron import manager
from neutron.plugins.nicira.dbexts import nicira_networkgw_db
from neutron.plugins.nicira.extensions import nvp_networkgw as networkgw
from neutron import quota
from neutron.tests import base
from neutron.tests.unit import test_api_v2
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import test_extensions
_uuid = test_api_v2._uuid
_get_path = test_api_v2._get_path
class TestExtensionManager(object):
def get_resources(self):
# Add the resources to the global attribute map
# This is done here as the setup process won't
# initialize the main API router which extends
# the global attribute map
attributes.RESOURCE_ATTRIBUTE_MAP.update(
networkgw.RESOURCE_ATTRIBUTE_MAP)
return networkgw.Nvp_networkgw.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class NetworkGatewayExtensionTestCase(base.BaseTestCase):
def setUp(self):
super(NetworkGatewayExtensionTestCase, self).setUp()
plugin = '%s.%s' % (networkgw.__name__,
networkgw.NetworkGatewayPluginBase.__name__)
self._resource = networkgw.RESOURCE_NAME.replace('-', '_')
# Ensure existing ExtensionManager is not used
extensions.PluginAwareExtensionManager._instance = None
# Create the default configurations
args = ['--config-file', test_api_v2.etcdir('neutron.conf.test')]
config.parse(args=args)
# Update the plugin and extensions path
self.setup_coreplugin(plugin)
self.addCleanup(cfg.CONF.reset)
_plugin_patcher = mock.patch(plugin, autospec=True)
self.plugin = _plugin_patcher.start()
self.addCleanup(_plugin_patcher.stop)
# Instantiate mock plugin and enable extensions
manager.NeutronManager.get_plugin().supported_extension_aliases = (
[networkgw.EXT_ALIAS])
ext_mgr = TestExtensionManager()
PluginAwareExtensionManager._instance = ext_mgr
self.ext_mdw = test_extensions.setup_extensions_middleware(ext_mgr)
self.api = webtest.TestApp(self.ext_mdw)
quota.QUOTAS._driver = None
cfg.CONF.set_override('quota_driver', 'neutron.quota.ConfDriver',
group='QUOTAS')
def test_network_gateway_create(self):
nw_gw_id = _uuid()
data = {self._resource: {'name': 'nw-gw',
'tenant_id': _uuid(),
'devices': [{'id': _uuid(),
'interface_name': 'xxx'}]}}
return_value = data[self._resource].copy()
return_value.update({'id': nw_gw_id})
instance = self.plugin.return_value
instance.create_network_gateway.return_value = return_value
res = self.api.post_json(_get_path(networkgw.COLLECTION_NAME), data)
instance.create_network_gateway.assert_called_with(
mock.ANY, network_gateway=data)
self.assertEqual(res.status_int, exc.HTTPCreated.code)
self.assertTrue(self._resource in res.json)
nw_gw = res.json[self._resource]
self.assertEqual(nw_gw['id'], nw_gw_id)
def _test_network_gateway_create_with_error(
self, data, error_code=exc.HTTPBadRequest.code):
res = self.api.post_json(_get_path(networkgw.COLLECTION_NAME), data,
expect_errors=True)
self.assertEqual(res.status_int, error_code)
def test_network_gateway_create_invalid_device_spec(self):
data = {self._resource: {'name': 'nw-gw',
'tenant_id': _uuid(),
'devices': [{'id': _uuid(),
'invalid': 'xxx'}]}}
self._test_network_gateway_create_with_error(data)
def test_network_gateway_create_extra_attr_in_device_spec(self):
data = {self._resource: {'name': 'nw-gw',
'tenant_id': _uuid(),
'devices': [{'id': _uuid(),
'interface_name': 'xxx',
'extra_attr': 'onetoomany'}]}}
self._test_network_gateway_create_with_error(data)
def test_network_gateway_update(self):
nw_gw_name = 'updated'
data = {self._resource: {'name': nw_gw_name}}
nw_gw_id = _uuid()
return_value = {'id': nw_gw_id,
'name': nw_gw_name}
instance = self.plugin.return_value
instance.update_network_gateway.return_value = return_value
res = self.api.put_json(_get_path('%s/%s' % (networkgw.COLLECTION_NAME,
nw_gw_id)),
data)
instance.update_network_gateway.assert_called_with(
mock.ANY, nw_gw_id, network_gateway=data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
self.assertTrue(self._resource in res.json)
nw_gw = res.json[self._resource]
self.assertEqual(nw_gw['id'], nw_gw_id)
self.assertEqual(nw_gw['name'], nw_gw_name)
def test_network_gateway_delete(self):
nw_gw_id = _uuid()
instance = self.plugin.return_value
res = self.api.delete(_get_path('%s/%s' % (networkgw.COLLECTION_NAME,
nw_gw_id)))
instance.delete_network_gateway.assert_called_with(mock.ANY,
nw_gw_id)
self.assertEqual(res.status_int, exc.HTTPNoContent.code)
def test_network_gateway_get(self):
nw_gw_id = _uuid()
return_value = {self._resource: {'name': 'test',
'devices':
[{'id': _uuid(),
'interface_name': 'xxx'}],
'id': nw_gw_id}}
instance = self.plugin.return_value
instance.get_network_gateway.return_value = return_value
res = self.api.get(_get_path('%s/%s' % (networkgw.COLLECTION_NAME,
nw_gw_id)))
instance.get_network_gateway.assert_called_with(mock.ANY,
nw_gw_id,
fields=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
def test_network_gateway_list(self):
nw_gw_id = _uuid()
return_value = [{self._resource: {'name': 'test',
'devices':
[{'id': _uuid(),
'interface_name': 'xxx'}],
'id': nw_gw_id}}]
instance = self.plugin.return_value
instance.get_network_gateways.return_value = return_value
res = self.api.get(_get_path(networkgw.COLLECTION_NAME))
instance.get_network_gateways.assert_called_with(mock.ANY,
fields=mock.ANY,
filters=mock.ANY)
self.assertEqual(res.status_int, exc.HTTPOk.code)
def test_network_gateway_connect(self):
nw_gw_id = _uuid()
nw_id = _uuid()
gw_port_id = _uuid()
mapping_data = {'network_id': nw_id,
'segmentation_type': 'vlan',
'segmentation_id': '999'}
return_value = {'connection_info': {
'network_gateway_id': nw_gw_id,
'port_id': gw_port_id,
'network_id': nw_id}}
instance = self.plugin.return_value
instance.connect_network.return_value = return_value
res = self.api.put_json(_get_path('%s/%s/connect_network' %
(networkgw.COLLECTION_NAME,
nw_gw_id)),
mapping_data)
instance.connect_network.assert_called_with(mock.ANY,
nw_gw_id,
mapping_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
nw_conn_res = res.json['connection_info']
self.assertEqual(nw_conn_res['port_id'], gw_port_id)
self.assertEqual(nw_conn_res['network_id'], nw_id)
def test_network_gateway_disconnect(self):
nw_gw_id = _uuid()
nw_id = _uuid()
mapping_data = {'network_id': nw_id}
instance = self.plugin.return_value
res = self.api.put_json(_get_path('%s/%s/disconnect_network' %
(networkgw.COLLECTION_NAME,
nw_gw_id)),
mapping_data)
instance.disconnect_network.assert_called_with(mock.ANY,
nw_gw_id,
mapping_data)
self.assertEqual(res.status_int, exc.HTTPOk.code)
class NetworkGatewayDbTestCase(test_db_plugin.NeutronDbPluginV2TestCase):
"""Unit tests for Network Gateway DB support."""
def setUp(self):
test_config['plugin_name_v2'] = '%s.%s' % (
__name__, TestNetworkGatewayPlugin.__name__)
ext_mgr = TestExtensionManager()
test_config['extension_manager'] = ext_mgr
self.resource = networkgw.RESOURCE_NAME.replace('-', '_')
super(NetworkGatewayDbTestCase, self).setUp()
def _create_network_gateway(self, fmt, tenant_id, name=None,
devices=None, arg_list=None, **kwargs):
data = {self.resource: {'tenant_id': tenant_id,
'devices': devices}}
if name:
data[self.resource]['name'] = name
for arg in arg_list or ():
# Arg must be present and not empty
if arg in kwargs and kwargs[arg]:
data[self.resource][arg] = kwargs[arg]
nw_gw_req = self.new_create_request(networkgw.COLLECTION_NAME,
data, fmt)
if (kwargs.get('set_context') and tenant_id):
# create a specific auth context for this request
nw_gw_req.environ['neutron.context'] = context.Context(
'', tenant_id)
return nw_gw_req.get_response(self.ext_api)
@contextlib.contextmanager
def _network_gateway(self, name='gw1', devices=None,
fmt='json', tenant_id=_uuid()):
if not devices:
devices = [{'id': _uuid(), 'interface_name': 'xyz'}]
res = self._create_network_gateway(fmt, tenant_id, name=name,
devices=devices)
network_gateway = self.deserialize(fmt, res)
if res.status_int >= 400:
raise exc.HTTPClientError(code=res.status_int)
yield network_gateway
self._delete(networkgw.COLLECTION_NAME,
network_gateway[self.resource]['id'])
def _gateway_action(self, action, network_gateway_id, network_id,
segmentation_type, segmentation_id=None,
expected_status=exc.HTTPOk.code):
connection_data = {'network_id': network_id,
'segmentation_type': segmentation_type}
if segmentation_id:
connection_data['segmentation_id'] = segmentation_id
req = self.new_action_request(networkgw.COLLECTION_NAME,
connection_data,
network_gateway_id,
"%s_network" % action)
res = req.get_response(self.ext_api)
self.assertEqual(res.status_int, expected_status)
return self.deserialize('json', res)
def _test_connect_and_disconnect_network(self, segmentation_type,
segmentation_id=None):
with self._network_gateway() as gw:
with self.network() as net:
body = self._gateway_action('connect',
gw[self.resource]['id'],
net['network']['id'],
segmentation_type,
segmentation_id)
self.assertTrue('connection_info' in body)
connection_info = body['connection_info']
for attr in ('network_id', 'port_id',
'network_gateway_id'):
self.assertTrue(attr in connection_info)
# fetch port and confirm device_id
gw_port_id = connection_info['port_id']
port_body = self._show('ports', gw_port_id)
self.assertEqual(port_body['port']['device_id'],
gw[self.resource]['id'])
# Clean up - otherwise delete will fail
body = self._gateway_action('disconnect',
gw[self.resource]['id'],
net['network']['id'],
segmentation_type,
segmentation_id)
# Check associated port has been deleted too
body = self._show('ports', gw_port_id,
expected_code=exc.HTTPNotFound.code)
def test_create_network_gateway(self):
name = 'test-gw'
devices = [{'id': _uuid(), 'interface_name': 'xxx'},
{'id': _uuid(), 'interface_name': 'yyy'}]
keys = [('devices', devices), ('name', name)]
with self._network_gateway(name=name, devices=devices) as gw:
for k, v in keys:
self.assertEqual(gw[self.resource][k], v)
def test_create_network_gateway_no_interface_name(self):
name = 'test-gw'
devices = [{'id': _uuid()}]
exp_devices = devices
exp_devices[0]['interface_name'] = 'breth0'
keys = [('devices', exp_devices), ('name', name)]
with self._network_gateway(name=name, devices=devices) as gw:
for k, v in keys:
self.assertEqual(gw[self.resource][k], v)
def _test_delete_network_gateway(self, exp_gw_count=0):
name = 'test-gw'
devices = [{'id': _uuid(), 'interface_name': 'xxx'},
{'id': _uuid(), 'interface_name': 'yyy'}]
with self._network_gateway(name=name, devices=devices):
# Nothing to do here - just let the gateway go
pass
# Verify nothing left on db
session = db_api.get_session()
gw_query = session.query(nicira_networkgw_db.NetworkGateway)
dev_query = session.query(nicira_networkgw_db.NetworkGatewayDevice)
self.assertEqual(exp_gw_count, gw_query.count())
self.assertEqual(0, dev_query.count())
def test_delete_network_gateway(self):
self._test_delete_network_gateway()
def test_update_network_gateway(self):
with self._network_gateway() as gw:
data = {self.resource: {'name': 'new_name'}}
req = self.new_update_request(networkgw.COLLECTION_NAME,
data,
gw[self.resource]['id'])
res = self.deserialize('json', req.get_response(self.ext_api))
self.assertEqual(res[self.resource]['name'],
data[self.resource]['name'])
def test_get_network_gateway(self):
with self._network_gateway(name='test-gw') as gw:
req = self.new_show_request(networkgw.COLLECTION_NAME,
gw[self.resource]['id'])
res = self.deserialize('json', req.get_response(self.ext_api))
self.assertEqual(res[self.resource]['name'],
gw[self.resource]['name'])
def test_list_network_gateways(self):
with self._network_gateway(name='test-gw-1') as gw1:
with self._network_gateway(name='test_gw_2') as gw2:
req = self.new_list_request(networkgw.COLLECTION_NAME)
res = self.deserialize('json', req.get_response(self.ext_api))
key = self.resource + 's'
self.assertEqual(len(res[key]), 2)
self.assertEqual(res[key][0]['name'],
gw1[self.resource]['name'])
self.assertEqual(res[key][1]['name'],
gw2[self.resource]['name'])
def _test_list_network_gateway_with_multiple_connections(
self, expected_gateways=1):
with self._network_gateway() as gw:
with self.network() as net_1:
self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 777)
req = self.new_list_request(networkgw.COLLECTION_NAME)
res = self.deserialize('json', req.get_response(self.ext_api))
key = self.resource + 's'
self.assertEqual(len(res[key]), expected_gateways)
for item in res[key]:
self.assertIn('ports', item)
if item['id'] == gw[self.resource]['id']:
gw_ports = item['ports']
self.assertEqual(len(gw_ports), 2)
segmentation_ids = [555, 777]
for gw_port in gw_ports:
self.assertEqual('vlan', gw_port['segmentation_type'])
self.assertIn(gw_port['segmentation_id'], segmentation_ids)
segmentation_ids.remove(gw_port['segmentation_id'])
# Required cleanup
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 777)
def test_list_network_gateway_with_multiple_connections(self):
self._test_list_network_gateway_with_multiple_connections()
def test_connect_and_disconnect_network(self):
self._test_connect_and_disconnect_network('flat')
def test_connect_and_disconnect_network_no_seg_type(self):
self._test_connect_and_disconnect_network(None)
def test_connect_and_disconnect_network_with_segmentation_id(self):
self._test_connect_and_disconnect_network('vlan', 999)
def test_connect_network_multiple_times(self):
with self._network_gateway() as gw:
with self.network() as net_1:
self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 777)
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 777)
def test_connect_network_multiple_gateways(self):
with self._network_gateway() as gw_1:
with self._network_gateway() as gw_2:
with self.network() as net_1:
self._gateway_action('connect',
gw_1[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('connect',
gw_2[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('disconnect',
gw_1[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('disconnect',
gw_2[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
def test_connect_network_mapping_in_use_returns_409(self):
with self._network_gateway() as gw:
with self.network() as net_1:
self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
with self.network() as net_2:
self._gateway_action('connect',
gw[self.resource]['id'],
net_2['network']['id'],
'vlan', 555,
expected_status=exc.HTTPConflict.code)
# Clean up - otherwise delete will fail
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
def test_connect_invalid_network_returns_400(self):
with self._network_gateway() as gw:
self._gateway_action('connect',
gw[self.resource]['id'],
'hohoho',
'vlan', 555,
expected_status=exc.HTTPBadRequest.code)
def test_connect_unspecified_network_returns_400(self):
with self._network_gateway() as gw:
self._gateway_action('connect',
gw[self.resource]['id'],
None,
'vlan', 555,
expected_status=exc.HTTPBadRequest.code)
def test_disconnect_network_ambiguous_returns_409(self):
with self._network_gateway() as gw:
with self.network() as net_1:
self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 777)
# This should raise
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan',
expected_status=exc.HTTPConflict.code)
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 777)
def test_delete_active_gateway_port_returns_409(self):
with self._network_gateway() as gw:
with self.network() as net_1:
body = self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
# fetch port id and try to delete it
gw_port_id = body['connection_info']['port_id']
self._delete('ports', gw_port_id,
expected_code=exc.HTTPConflict.code)
body = self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
def test_delete_network_gateway_active_connections_returns_409(self):
with self._network_gateway() as gw:
with self.network() as net_1:
self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'flat')
self._delete(networkgw.COLLECTION_NAME,
gw[self.resource]['id'],
expected_code=exc.HTTPConflict.code)
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'flat')
def test_disconnect_non_existing_connection_returns_404(self):
with self._network_gateway() as gw:
with self.network() as net_1:
self._gateway_action('connect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 999,
expected_status=exc.HTTPNotFound.code)
self._gateway_action('disconnect',
gw[self.resource]['id'],
net_1['network']['id'],
'vlan', 555)
class TestNetworkGatewayPlugin(db_base_plugin_v2.NeutronDbPluginV2,
nicira_networkgw_db.NetworkGatewayMixin):
"""Simple plugin class for testing db support for network gateway ext."""
supported_extension_aliases = ["network-gateway"]
def delete_port(self, context, id, nw_gw_port_check=True):
if nw_gw_port_check:
port = self._get_port(context, id)
self.prevent_network_gateway_port_deletion(context, port)
super(TestNetworkGatewayPlugin, self).delete_port(context, id)
| {
"content_hash": "930f96915f01e4a857e547361e89a257",
"timestamp": "",
"source": "github",
"line_count": 590,
"max_line_length": 79,
"avg_line_length": 47.9271186440678,
"alnum_prop": 0.47494430102203206,
"repo_name": "rickerc/neutron_audit",
"id": "53f826f1745ed78d74ae52d3b62add1f77d3d3bf",
"size": "28958",
"binary": false,
"copies": "1",
"ref": "refs/heads/cis-havana-staging",
"path": "neutron/tests/unit/nicira/test_networkgw.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "37307"
},
{
"name": "JavaScript",
"bytes": "67928"
},
{
"name": "Python",
"bytes": "7052151"
},
{
"name": "Shell",
"bytes": "8983"
},
{
"name": "XSLT",
"bytes": "50907"
}
],
"symlink_target": ""
} |
from atom.api import Str, Int, Float, Bool, Enum
from Instrument import Instrument
class DigitalAttenuator(Instrument):
ch1Attenuation = Float(0.0).tag(desc="Ch 1 attenuation (dB)")
ch2Attenuation = Float(0.0).tag(desc="Ch 2 attenuation (dB)")
ch3Attenuation = Float(0.0).tag(desc="Ch 3 attenuation (dB)") | {
"content_hash": "eea3a0256d4efb52d9a4a9cae853cb89",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 62,
"avg_line_length": 38.75,
"alnum_prop": 0.7419354838709677,
"repo_name": "rmcgurrin/PyQLab",
"id": "05c8bbdb736775fb669449e1e334dd18d66a2531",
"size": "310",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "instruments/Attenuators.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "104179"
}
],
"symlink_target": ""
} |
project = 'ExoCross'
copyright = '2019, Sergey Yurchenko'
author = 'Sergey Yurchenko'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '1'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'bizstyle'
# html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'ExoCrossdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ExoCross.tex', 'ExoCross Documentation',
'Sergey Yurchenko', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'exocross', 'ExoCross Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ExoCross', 'ExoCross Documentation',
author, 'ExoCross', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# favicon
html_favicon = 'favicon.ico'
# -- Extension configuration ------------------------------------------------- | {
"content_hash": "901e1d66fe12fadd667f9749d825d2d4",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 78,
"avg_line_length": 28.575,
"alnum_prop": 0.6465441819772528,
"repo_name": "Trovemaster/exocross",
"id": "2db054aea40dfb3c3e9777efd0c971137fe291e6",
"size": "5286",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "234"
},
{
"name": "C++",
"bytes": "175"
},
{
"name": "CMake",
"bytes": "1144"
},
{
"name": "Fortran",
"bytes": "250560"
},
{
"name": "Makefile",
"bytes": "1552"
}
],
"symlink_target": ""
} |
import os
import sys
import glob
from setuptools import setup
try:
import multiprocessing # Seems to fix http://bugs.python.org/issue15881
except ImportError:
pass
setup(
name='durabledict',
version='0.9.4',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/durabledict/',
description='Dictionary-style access to different types of models.',
packages=['durabledict'],
zip_safe=False,
tests_require=[
'Django<1.7',
'nose',
'mock',
'redis',
'kazoo',
],
test_suite='nose.collector',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
| {
"content_hash": "a037ab10855227431e456b381cc08405",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 76,
"avg_line_length": 23.666666666666668,
"alnum_prop": 0.6255868544600939,
"repo_name": "disqus/durabledict",
"id": "840d951b7918791d2bc2de1efb3bfa22ca63127f",
"size": "875",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "339"
},
{
"name": "Python",
"bytes": "47006"
}
],
"symlink_target": ""
} |
from azure.mgmt.web import WebSiteManagementClient
from cloudify_azure import (constants, utils)
from azure_sdk.common import AzureResource
class ServicePlan(AzureResource):
def __init__(self, azure_config, logger,
api_version=constants.API_VER_APP_SERVICE):
super(ServicePlan, self).__init__(azure_config)
self.logger = logger
self.client = WebSiteManagementClient(self.credentials,
self.subscription_id)
def get(self, group_name, plan_name):
self.logger.info("Get plan...{0}".format(plan_name))
plan = self.client.app_service_plans.get(
resource_group_name=group_name,
name=plan_name)
if plan:
plan = plan.as_dict()
self.logger.info(
'Get plan result: {0}'.format(
utils.secure_logging_content(plan)))
return plan
def create_or_update(self, group_name, plan_name, params):
self.logger.info("Create/Updating plan...{0}".format(plan_name))
create_async_operation = \
self.client.app_service_plans.create_or_update(
resource_group_name=group_name,
name=plan_name,
app_service_plan=params,
)
create_async_operation.wait()
plan = create_async_operation.result().as_dict()
self.logger.info(
'Create plan result: {0}'.format(
utils.secure_logging_content(plan)))
return plan
def delete(self, group_name, plan_name):
self.logger.info("Deleting plan...{0}".format(plan_name))
self.client.app_service_plans.delete(
resource_group_name=group_name,
name=plan_name
)
self.logger.debug(
'Deleted plan {0}'.format(plan_name))
| {
"content_hash": "1f7992e5279c600f11066b8cade422cb",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 72,
"avg_line_length": 37.1,
"alnum_prop": 0.5816711590296496,
"repo_name": "cloudify-cosmo/cloudify-azure-plugin",
"id": "29fd9ddd85605b144465a9a4a130b434511557bf",
"size": "2486",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure_sdk/resources/app_service/plan.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "710"
},
{
"name": "Python",
"bytes": "413092"
}
],
"symlink_target": ""
} |
"""Singleton mechanism"""
from __future__ import print_function, division
from .core import Registry
from .assumptions import ManagedProperties
from .sympify import sympify
class SingletonRegistry(Registry):
"""
The registry for the singleton classes (accessible as ``S``).
This class serves as two separate things.
The first thing it is is the ``SingletonRegistry``. Several classes in
SymPy appear so often that they are singletonized, that is, using some
metaprogramming they are made so that they can only be instantiated once
(see the :class:`sympy.core.singleton.Singleton` class for details). For
instance, every time you create ``Integer(0)``, this will return the same
instance, :class:`sympy.core.numbers.Zero`. All singleton instances are
attributes of the ``S`` object, so ``Integer(0)`` can also be accessed as
``S.Zero``.
Singletonization offers two advantages: it saves memory, and it allows
fast comparison. It saves memory because no matter how many times the
singletonized objects appear in expressions in memory, they all point to
the same single instance in memory. The fast comparison comes from the
fact that you can use ``is`` to compare exact instances in Python
(usually, you need to use ``==`` to compare things). ``is`` compares
objects by memory address, and is very fast. For instance
>>> from sympy import S, Integer
>>> a = Integer(0)
>>> a is S.Zero
True
For the most part, the fact that certain objects are singletonized is an
implementation detail that users shouldn't need to worry about. In SymPy
library code, ``is`` comparison is often used for performance purposes
The primary advantage of ``S`` for end users is the convenient access to
certain instances that are otherwise difficult to type, like ``S.Half``
(instead of ``Rational(1, 2)``).
When using ``is`` comparison, make sure the argument is sympified. For
instance,
>>> 0 is S.Zero
False
This problem is not an issue when using ``==``, which is recommended for
most use-cases:
>>> 0 == S.Zero
True
The second thing ``S`` is is a shortcut for
:func:`sympy.core.sympify.sympify`. :func:`sympy.core.sympify.sympify` is
the function that converts Python objects such as ``int(1)`` into SymPy
objects such as ``Integer(1)``. It also converts the string form of an
expression into a SymPy expression, like ``sympify("x**2")`` ->
``Symbol("x")**2``. ``S(1)`` is the same thing as ``sympify(1)``
(basically, ``S.__call__`` has been defined to call ``sympify``).
This is for convenience, since ``S`` is a single letter. It's mostly
useful for defining rational numbers. Consider an expression like ``x +
1/2``. If you enter this directly in Python, it will evaluate the ``1/2``
and give ``0.5`` (or just ``0`` in Python 2, because of integer division),
because both arguments are ints (see also
:ref:`tutorial-gotchas-final-notes`). However, in SymPy, you usually want
the quotient of two integers to give an exact rational number. The way
Python's evaluation works, at least one side of an operator needs to be a
SymPy object for the SymPy evaluation to take over. You could write this
as ``x + Rational(1, 2)``, but this is a lot more typing. A shorter
version is ``x + S(1)/2``. Since ``S(1)`` returns ``Integer(1)``, the
division will return a ``Rational`` type, since it will call
``Integer.__div__``, which knows how to return a ``Rational``.
"""
__slots__ = []
# Also allow things like S(5)
__call__ = staticmethod(sympify)
def __init__(self):
self._classes_to_install = {}
# Dict of classes that have been registered, but that have not have been
# installed as an attribute of this SingletonRegistry.
# Installation automatically happens at the first attempt to access the
# attribute.
# The purpose of this is to allow registration during class
# initialization during import, but not trigger object creation until
# actual use (which should not happen until after all imports are
# finished).
def register(self, cls):
# Make sure a duplicate class overwrites the old one
if hasattr(self, cls.__name__):
delattr(self, cls.__name__)
self._classes_to_install[cls.__name__] = cls
def __getattr__(self, name):
"""Python calls __getattr__ if no attribute of that name was installed
yet.
This __getattr__ checks whether a class with the requested name was
already registered but not installed; if no, raises an AttributeError.
Otherwise, retrieves the class, calculates its singleton value, installs
it as an attribute of the given name, and unregisters the class."""
if name not in self._classes_to_install:
raise AttributeError(
"Attribute '%s' was not installed on SymPy registry %s" % (
name, self))
class_to_install = self._classes_to_install[name]
value_to_install = class_to_install()
self.__setattr__(name, value_to_install)
del self._classes_to_install[name]
return value_to_install
def __repr__(self):
return "S"
S = SingletonRegistry()
class Singleton(ManagedProperties):
"""
Metaclass for singleton classes.
A singleton class has only one instance which is returned every time the
class is instantiated. Additionally, this instance can be accessed through
the global registry object ``S`` as ``S.<class_name>``.
Examples
========
>>> from sympy import S, Basic
>>> from sympy.core.singleton import Singleton
>>> from sympy.core.compatibility import with_metaclass
>>> class MySingleton(with_metaclass(Singleton, Basic)):
... pass
>>> Basic() is Basic()
False
>>> MySingleton() is MySingleton()
True
>>> S.MySingleton is MySingleton()
True
Notes
=====
Instance creation is delayed until the first time the value is accessed.
(SymPy versions before 1.0 would create the instance during class
creation time, which would be prone to import cycles.)
This metaclass is a subclass of ManagedProperties because that is the
metaclass of many classes that need to be Singletons (Python does not allow
subclasses to have a different metaclass than the superclass, except the
subclass may use a subclassed metaclass).
"""
_instances = {}
"Maps singleton classes to their instances."
def __new__(cls, *args, **kwargs):
result = super(Singleton, cls).__new__(cls, *args, **kwargs)
S.register(result)
return result
def __call__(self, *args, **kwargs):
# Called when application code says SomeClass(), where SomeClass is a
# class of which Singleton is the metaclas.
# __call__ is invoked first, before __new__() and __init__().
if self not in Singleton._instances:
Singleton._instances[self] = \
super(Singleton, self).__call__(*args, **kwargs)
# Invokes the standard constructor of SomeClass.
return Singleton._instances[self]
# Inject pickling support.
def __getnewargs__(self):
return ()
self.__getnewargs__ = __getnewargs__
| {
"content_hash": "17e1ea0f49cf9833d971a58fd2ad3c6d",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 80,
"avg_line_length": 40.89071038251366,
"alnum_prop": 0.6564212214352533,
"repo_name": "kaushik94/sympy",
"id": "8faf4724a75ce8e374ba4425f6c9dbba9a5f9543",
"size": "7483",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sympy/core/singleton.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "5094"
},
{
"name": "Python",
"bytes": "13553568"
},
{
"name": "Ruby",
"bytes": "304"
},
{
"name": "Scheme",
"bytes": "125"
},
{
"name": "Shell",
"bytes": "4008"
},
{
"name": "TeX",
"bytes": "32356"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from girder import logger
from girder.exceptions import ValidationException
from girder.utility import setting_utilities
from girder_jobs.constants import JobStatus
from girder_jobs.models.job import Job
from celery.result import AsyncResult
from .celery import getCeleryApp
from .constants import PluginSettings
from .status import CustomJobStatus
from .utils import getWorkerApiUrl, jobInfoSpec
@setting_utilities.validator({
PluginSettings.BROKER,
PluginSettings.BACKEND
})
def validateSettings(doc):
"""
Handle plugin-specific system settings. Right now we don't do any
validation for the broker or backend URL settings, but we do reinitialize
the celery app object with the new values.
"""
global _celeryapp
_celeryapp = None
@setting_utilities.validator({
PluginSettings.API_URL
})
def validateApiUrl(doc):
val = doc['value']
if val and not val.startswith('http://') and not val.startswith('https://'):
raise ValidationException('API URL must start with http:// or https://.', 'value')
@setting_utilities.validator(PluginSettings.DIRECT_PATH)
def _validateAutoCompute(doc):
if not isinstance(doc['value'], bool):
raise ValidationException('The direct path setting must be true or false.')
def validateJobStatus(event):
"""Allow our custom job status values."""
if CustomJobStatus.isValid(event.info):
event.preventDefault().addResponse(True)
def validTransitions(event):
"""Allow our custom job transitions."""
states = None
if event.info['job']['handler'] == 'worker_handler':
states = CustomJobStatus.validTransitionsWorker(event.info['status'])
elif event.info['job']['handler'] == 'celery_handler':
states = CustomJobStatus.validTransitionsCelery(event.info['status'])
if states is not None:
event.preventDefault().addResponse(states)
def schedule(event):
"""
This is bound to the "jobs.schedule" event, and will be triggered any time
a job is scheduled. This handler will process any job that has the
handler field set to "worker_handler".
"""
job = event.info
if job['handler'] == 'worker_handler':
task = job.get('celeryTaskName', 'girder_worker.run')
# Set the job status to queued
Job().updateJob(job, status=JobStatus.QUEUED)
# Send the task to celery
asyncResult = getCeleryApp().send_task(
task, job['args'], job['kwargs'], queue=job.get('celeryQueue'), headers={
'jobInfoSpec': jobInfoSpec(job, job.get('token', None)),
'apiUrl': getWorkerApiUrl()
})
# Record the task ID from celery.
Job().updateJob(job, otherFields={
'celeryTaskId': asyncResult.task_id
})
# Stop event propagation since we have taken care of scheduling.
event.stopPropagation()
def cancel(event):
"""
This is bound to the "jobs.cancel" event, and will be triggered any time
a job is canceled. This handler will process any job that has the
handler field set to "worker_handler".
"""
job = event.info
if job['handler'] in ['worker_handler', 'celery_handler']:
# Stop event propagation and prevent default, we are using a custom state
event.stopPropagation().preventDefault()
celeryTaskId = job.get('celeryTaskId')
if celeryTaskId is None:
msg = ("Unable to cancel Celery task. Job '%s' doesn't have a Celery task id."
% job['_id'])
logger.warn(msg)
return
should_revoke = False
if job['status'] == JobStatus.INACTIVE:
# Move inactive jobs directly to canceled state
Job().updateJob(job, status=JobStatus.CANCELED)
should_revoke = True
elif job['status'] not in [CustomJobStatus.CANCELING, JobStatus.CANCELED,
JobStatus.SUCCESS, JobStatus.ERROR]:
# Give active jobs a chance to be canceled by their runner
Job().updateJob(job, status=CustomJobStatus.CANCELING)
should_revoke = True
if should_revoke:
# Send the revoke request.
asyncResult = AsyncResult(celeryTaskId, app=getCeleryApp())
asyncResult.revoke()
def attachParentJob(event):
"""Attach parentJob before a model is saved."""
job = event.info
if job.get('celeryParentTaskId'):
celeryParentTaskId = job['celeryParentTaskId']
parentJob = Job().findOne({'celeryTaskId': celeryParentTaskId})
event.info['parentId'] = parentJob['_id']
def attachJobInfoSpec(event):
"""Attach jobInfoSpec after a model is saved."""
job = event.info
# Local jobs have a module key
if not job.get('module'):
Job().updateJob(job, otherFields={'jobInfoSpec': jobInfoSpec(job)})
| {
"content_hash": "733c83da75e6d22605573c6394950e4f",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 90,
"avg_line_length": 34.3986013986014,
"alnum_prop": 0.6594836348851393,
"repo_name": "girder/girder_worker",
"id": "49276d3dfd28fbe64b975ca3079e545c841c2ec7",
"size": "5661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "girder_worker/girder_plugin/event_handlers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "2021"
},
{
"name": "Dockerfile",
"bytes": "1322"
},
{
"name": "JavaScript",
"bytes": "8248"
},
{
"name": "Makefile",
"bytes": "1519"
},
{
"name": "Pug",
"bytes": "4115"
},
{
"name": "Python",
"bytes": "268215"
},
{
"name": "Shell",
"bytes": "8215"
},
{
"name": "Stylus",
"bytes": "1454"
}
],
"symlink_target": ""
} |
from .data import *
from .inference import *
| {
"content_hash": "449bdccc29affb9394c5e0364d404d24",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 24,
"avg_line_length": 22.5,
"alnum_prop": 0.7333333333333333,
"repo_name": "adrn/ebak",
"id": "33b77aeb49c83535728b78c09dcfc7fb16e677af",
"size": "45",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ebak/singleline/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "3935399"
},
{
"name": "Makefile",
"bytes": "417"
},
{
"name": "Python",
"bytes": "93320"
},
{
"name": "Shell",
"bytes": "1906"
},
{
"name": "TeX",
"bytes": "4561"
}
],
"symlink_target": ""
} |
import sys
import pytest
import tempfile
import numpy as np
from astropy import units as u
from astropy import convolution
from astropy.wcs import WCS
from astropy import wcs
from astropy.io import fits
try:
import tracemalloc
tracemallocOK = True
except ImportError:
tracemallocOK = False
# The comparison of Quantities in test_memory_usage
# fail with older versions of numpy
from distutils.version import LooseVersion
NPY_VERSION_CHECK = LooseVersion(np.version.version) >= "1.13"
from radio_beam import beam, Beam
from .. import SpectralCube
from ..utils import WCSCelestialError
from .test_spectral_cube import cube_and_raw
from .test_projection import load_projection
from . import path, utilities
WINDOWS = sys.platform == "win32"
def test_convolution(data_255_delta, use_dask):
cube, data = cube_and_raw(data_255_delta, use_dask=use_dask)
# 1" convolved with 1.5" -> 1.8027....
target_beam = Beam(1.802775637731995*u.arcsec, 1.802775637731995*u.arcsec,
0*u.deg)
conv_cube = cube.convolve_to(target_beam)
expected = convolution.Gaussian2DKernel((1.5*u.arcsec /
beam.SIGMA_TO_FWHM /
(5.555555555555e-4*u.deg)).decompose().value,
x_size=5, y_size=5,
)
expected.normalize()
np.testing.assert_almost_equal(expected.array,
conv_cube.filled_data[0,:,:].value)
# 2nd layer is all zeros
assert np.all(conv_cube.filled_data[1,:,:] == 0.0)
def test_beams_convolution(data_455_delta_beams, use_dask):
cube, data = cube_and_raw(data_455_delta_beams, use_dask=use_dask)
# 1" convolved with 1.5" -> 1.8027....
target_beam = Beam(1.802775637731995*u.arcsec, 1.802775637731995*u.arcsec,
0*u.deg)
conv_cube = cube.convolve_to(target_beam)
pixscale = wcs.utils.proj_plane_pixel_area(cube.wcs.celestial)**0.5*u.deg
for ii, bm in enumerate(cube.beams):
expected = target_beam.deconvolve(bm).as_kernel(pixscale, x_size=5,
y_size=5)
expected.normalize()
np.testing.assert_almost_equal(expected.array,
conv_cube.filled_data[ii,:,:].value)
def test_beams_convolution_equal(data_522_delta_beams, use_dask):
cube, data = cube_and_raw(data_522_delta_beams, use_dask=use_dask)
# Only checking that the equal beam case is handled correctly.
# Fake the beam in the first channel. Then ensure that the first channel
# has NOT been convolved.
target_beam = Beam(1.0 * u.arcsec, 1.0 * u.arcsec, 0.0 * u.deg)
cube.beams.major[0] = target_beam.major
cube.beams.minor[0] = target_beam.minor
cube.beams.pa[0] = target_beam.pa
conv_cube = cube.convolve_to(target_beam)
np.testing.assert_almost_equal(cube.filled_data[0].value,
conv_cube.filled_data[0].value)
@pytest.mark.parametrize('use_memmap', (True, False))
def test_reproject(use_memmap, data_adv, use_dask):
pytest.importorskip('reproject')
cube, data = cube_and_raw(data_adv, use_dask=use_dask)
wcs_in = WCS(cube.header)
wcs_out = wcs_in.deepcopy()
wcs_out.wcs.ctype = ['GLON-SIN', 'GLAT-SIN', wcs_in.wcs.ctype[2]]
wcs_out.wcs.crval = [134.37608, -31.939241, wcs_in.wcs.crval[2]]
wcs_out.wcs.crpix = [2., 2., wcs_in.wcs.crpix[2]]
header_out = cube.header
header_out['NAXIS1'] = 4
header_out['NAXIS2'] = 5
header_out['NAXIS3'] = cube.shape[0]
header_out.update(wcs_out.to_header())
result = cube.reproject(header_out, use_memmap=use_memmap)
assert result.shape == (cube.shape[0], 5, 4)
# Check WCS in reprojected matches wcs_out
assert wcs_out.wcs.compare(result.wcs.wcs)
# And that the headers have equivalent WCS info.
result_wcs_from_header = WCS(result.header)
assert result_wcs_from_header.wcs.compare(wcs_out.wcs)
def test_spectral_smooth(data_522_delta, use_dask):
cube, data = cube_and_raw(data_522_delta, use_dask=use_dask)
result = cube.spectral_smooth(kernel=convolution.Gaussian1DKernel(1.0), use_memmap=False)
np.testing.assert_almost_equal(result[:,0,0].value,
convolution.Gaussian1DKernel(1.0,
x_size=5).array,
4)
result = cube.spectral_smooth(kernel=convolution.Gaussian1DKernel(1.0), use_memmap=True)
np.testing.assert_almost_equal(result[:,0,0].value,
convolution.Gaussian1DKernel(1.0,
x_size=5).array,
4)
def test_catch_kernel_with_units(data_522_delta, use_dask):
# Passing a kernel with a unit should raise a u.UnitsError
cube, data = cube_and_raw(data_522_delta, use_dask=use_dask)
with pytest.raises(u.UnitsError,
match="The convolution kernel should be defined without a unit."):
cube.spectral_smooth(kernel=convolution.Gaussian1DKernel(1.0 * u.one),
use_memmap=False)
@pytest.mark.skipif('WINDOWS')
def test_spectral_smooth_4cores(data_522_delta):
pytest.importorskip('joblib')
cube, data = cube_and_raw(data_522_delta, use_dask=False)
result = cube.spectral_smooth(kernel=convolution.Gaussian1DKernel(1.0), num_cores=4, use_memmap=True)
np.testing.assert_almost_equal(result[:,0,0].value,
convolution.Gaussian1DKernel(1.0,
x_size=5).array,
4)
# this is one way to test non-parallel mode
result = cube.spectral_smooth(kernel=convolution.Gaussian1DKernel(1.0), num_cores=4, use_memmap=False)
np.testing.assert_almost_equal(result[:,0,0].value,
convolution.Gaussian1DKernel(1.0,
x_size=5).array,
4)
# num_cores = 4 is a contradiction with parallel=False, so we want to make
# sure it fails
with pytest.raises(ValueError,
match=("parallel execution was not requested, but "
"multiple cores were: these are incompatible "
"options. Either specify num_cores=1 or "
"parallel=True")):
result = cube.spectral_smooth(kernel=convolution.Gaussian1DKernel(1.0),
num_cores=4, parallel=False)
np.testing.assert_almost_equal(result[:,0,0].value,
convolution.Gaussian1DKernel(1.0,
x_size=5).array,
4)
def test_spectral_smooth_fail(data_522_delta_beams, use_dask):
cube, data = cube_and_raw(data_522_delta_beams, use_dask=use_dask)
with pytest.raises(AttributeError,
match=("VaryingResolutionSpectralCubes can't be "
"spectrally smoothed. Convolve to a "
"common resolution with `convolve_to` before "
"attempting spectral smoothed.")):
cube.spectral_smooth(kernel=convolution.Gaussian1DKernel(1.0))
def test_spectral_interpolate(data_522_delta, use_dask):
cube, data = cube_and_raw(data_522_delta, use_dask=use_dask)
orig_wcs = cube.wcs.deepcopy()
# midpoint between each position
sg = (cube.spectral_axis[1:] + cube.spectral_axis[:-1])/2.
result = cube.spectral_interpolate(spectral_grid=sg)
np.testing.assert_almost_equal(result[:,0,0].value,
[0.0, 0.5, 0.5, 0.0])
assert cube.wcs.wcs.compare(orig_wcs.wcs)
def test_spectral_interpolate_with_fillvalue(data_522_delta, use_dask):
cube, data = cube_and_raw(data_522_delta, use_dask=use_dask)
# Step one channel out of bounds.
sg = ((cube.spectral_axis[0]) -
(cube.spectral_axis[1] - cube.spectral_axis[0]) *
np.linspace(1,4,4))
result = cube.spectral_interpolate(spectral_grid=sg,
fill_value=42)
np.testing.assert_almost_equal(result[:,0,0].value,
np.ones(4)*42)
def test_spectral_interpolate_fail(data_522_delta_beams, use_dask):
cube, data = cube_and_raw(data_522_delta_beams, use_dask=use_dask)
with pytest.raises(AttributeError,
match=("VaryingResolutionSpectralCubes can't be "
"spectrally interpolated. Convolve to a "
"common resolution with `convolve_to` before "
"attempting spectral interpolation.")):
cube.spectral_interpolate(5)
def test_spectral_interpolate_with_mask(data_522_delta, use_dask):
hdul = fits.open(data_522_delta)
hdu = hdul[0]
# Swap the velocity axis so indiff < 0 in spectral_interpolate
hdu.header["CDELT3"] = - hdu.header["CDELT3"]
cube = SpectralCube.read(hdu, use_dask=use_dask)
mask = np.ones(cube.shape, dtype=bool)
mask[:2] = False
masked_cube = cube.with_mask(mask)
orig_wcs = cube.wcs.deepcopy()
# midpoint between each position
sg = (cube.spectral_axis[1:] + cube.spectral_axis[:-1])/2.
result = masked_cube.spectral_interpolate(spectral_grid=sg[::-1])
# The output makes CDELT3 > 0 (reversed spectral axis) so the masked
# portion are the final 2 channels.
np.testing.assert_almost_equal(result[:, 0, 0].value,
[0.0, 0.5, np.NaN, np.NaN])
assert cube.wcs.wcs.compare(orig_wcs.wcs)
hdul.close()
def test_spectral_interpolate_reversed(data_522_delta, use_dask):
cube, data = cube_and_raw(data_522_delta, use_dask=use_dask)
orig_wcs = cube.wcs.deepcopy()
# Reverse spectral axis
sg = cube.spectral_axis[::-1]
result = cube.spectral_interpolate(spectral_grid=sg)
np.testing.assert_almost_equal(sg.value, result.spectral_axis.value)
def test_convolution_2D(data_55_delta):
proj, hdu = load_projection(data_55_delta)
# 1" convolved with 1.5" -> 1.8027....
target_beam = Beam(1.802775637731995*u.arcsec, 1.802775637731995*u.arcsec,
0*u.deg)
conv_proj = proj.convolve_to(target_beam)
expected = convolution.Gaussian2DKernel((1.5*u.arcsec /
beam.SIGMA_TO_FWHM /
(5.555555555555e-4*u.deg)).decompose().value,
x_size=5, y_size=5,
)
expected.normalize()
np.testing.assert_almost_equal(expected.array,
conv_proj.value)
assert conv_proj.beam == target_beam
# Pass a kwarg to the convolution function
conv_proj = proj.convolve_to(target_beam, nan_treatment='fill')
def test_nocelestial_convolution_2D_fail(data_255_delta, use_dask):
cube, data = cube_and_raw(data_255_delta, use_dask=use_dask)
proj = cube.moment0(axis=1)
test_beam = Beam(1.0 * u.arcsec)
with pytest.raises(WCSCelestialError,
match="WCS does not contain two spatial axes."):
proj.convolve_to(test_beam)
def test_reproject_2D(data_55):
pytest.importorskip('reproject')
proj, hdu = load_projection(data_55)
wcs_in = WCS(proj.header)
wcs_out = wcs_in.deepcopy()
wcs_out.wcs.ctype = ['GLON-SIN', 'GLAT-SIN']
wcs_out.wcs.crval = [134.37608, -31.939241]
wcs_out.wcs.crpix = [2., 2.]
header_out = proj.header
header_out['NAXIS1'] = 4
header_out['NAXIS2'] = 5
header_out.update(wcs_out.to_header())
result = proj.reproject(header_out)
assert result.shape == (5, 4)
assert result.beam == proj.beam
# Check WCS in reprojected matches wcs_out
assert wcs_out.wcs.compare(result.wcs.wcs)
# And that the headers have equivalent WCS info.
result_wcs_from_header = WCS(result.header)
assert result_wcs_from_header.wcs.compare(wcs_out.wcs)
def test_nocelestial_reproject_2D_fail(data_255_delta, use_dask):
pytest.importorskip('reproject')
cube, data = cube_and_raw(data_255_delta, use_dask=use_dask)
proj = cube.moment0(axis=1)
with pytest.raises(WCSCelestialError,
match="WCS does not contain two spatial axes."):
proj.reproject(cube.header)
@pytest.mark.parametrize('use_memmap', (True,False))
def test_downsample(use_memmap, data_255):
# FIXME: this test should be updated to use the use_dask fixture once
# DaskSpectralCube.downsample_axis is fixed.
cube, data = cube_and_raw(data_255, use_dask=False)
dscube = cube.downsample_axis(factor=2, axis=0, use_memmap=use_memmap)
expected = data.mean(axis=0)
np.testing.assert_almost_equal(expected[None,:,:],
dscube.filled_data[:].value)
dscube = cube.downsample_axis(factor=2, axis=1, use_memmap=use_memmap)
expected = np.array([data[:,:2,:].mean(axis=1),
data[:,2:4,:].mean(axis=1),
data[:,4:,:].mean(axis=1), # just data[:,4,:]
]).swapaxes(0,1)
assert expected.shape == (2,3,5)
assert dscube.shape == (2,3,5)
np.testing.assert_almost_equal(expected,
dscube.filled_data[:].value)
dscube = cube.downsample_axis(factor=2, axis=1, truncate=True,
use_memmap=use_memmap)
expected = np.array([data[:,:2,:].mean(axis=1),
data[:,2:4,:].mean(axis=1),
]).swapaxes(0,1)
np.testing.assert_almost_equal(expected,
dscube.filled_data[:].value)
@pytest.mark.parametrize('use_memmap', (True,False))
def test_downsample_wcs(use_memmap, data_255):
# FIXME: this test should be updated to use the use_dask fixture once
# DaskSpectralCube.downsample_axis is fixed.
cube, data = cube_and_raw(data_255, use_dask=False)
dscube = (cube
.downsample_axis(factor=2, axis=1, use_memmap=use_memmap)
.downsample_axis(factor=2, axis=2, use_memmap=use_memmap))
# pixel [0,0] in the new cube should have coordinate [1,1] in the old cube
lonnew, latnew = dscube.wcs.celestial.wcs_pix2world(0, 0, 0)
xpixold_ypixold = np.array(cube.wcs.celestial.wcs_world2pix(lonnew, latnew, 0))
np.testing.assert_almost_equal(xpixold_ypixold, (0.5, 0.5))
# the center of the bottom-left pixel, in FITS coordinates, in the
# original frame will now be at -0.25, -0.25 in the new frame
lonold, latold = cube.wcs.celestial.wcs_pix2world(1, 1, 1)
xpixnew_ypixnew = np.array(dscube.wcs.celestial.wcs_world2pix(lonold, latold, 1))
np.testing.assert_almost_equal(xpixnew_ypixnew, (0.75, 0.75))
@pytest.mark.skipif('not tracemallocOK or (sys.version_info.major==3 and sys.version_info.minor<6) or not NPY_VERSION_CHECK')
def test_reproject_3D_memory():
pytest.importorskip('reproject')
tracemalloc.start()
snap1 = tracemalloc.take_snapshot()
# create a 64 MB cube
cube,_ = utilities.generate_gaussian_cube(shape=[200,200,200])
sz = _.dtype.itemsize
# check that cube is loaded into memory
snap2 = tracemalloc.take_snapshot()
diff = snap2.compare_to(snap1, 'lineno')
diffvals = np.array([dd.size_diff for dd in diff])
# at this point, the generated cube should still exist in memory
assert diffvals.max()*u.B >= 200**3*sz*u.B
wcs_in = cube.wcs
wcs_out = wcs_in.deepcopy()
wcs_out.wcs.ctype = ['GLON-SIN', 'GLAT-SIN', cube.wcs.wcs.ctype[2]]
wcs_out.wcs.crval = [0.001, 0.001, cube.wcs.wcs.crval[2]]
wcs_out.wcs.crpix = [2., 2., cube.wcs.wcs.crpix[2]]
header_out = (wcs_out.to_header())
header_out['NAXIS'] = 3
header_out['NAXIS1'] = int(cube.shape[2]/2)
header_out['NAXIS2'] = int(cube.shape[1]/2)
header_out['NAXIS3'] = cube.shape[0]
# First the unfilled reprojection test: new memory is allocated for
# `result`, but nowhere else
result = cube.reproject(header_out, filled=False)
snap3 = tracemalloc.take_snapshot()
diff = snap3.compare_to(snap2, 'lineno')
diffvals = np.array([dd.size_diff for dd in diff])
# result should have the same size as the input data, except smaller in two dims
# make sure that's all that's allocated
assert diffvals.max()*u.B >= 200*100**2*sz*u.B
assert diffvals.max()*u.B < 200*110**2*sz*u.B
# without masking the cube, nothing should change
result = cube.reproject(header_out, filled=True)
snap4 = tracemalloc.take_snapshot()
diff = snap4.compare_to(snap3, 'lineno')
diffvals = np.array([dd.size_diff for dd in diff])
assert diffvals.max()*u.B <= 1*u.MB
assert result.wcs.wcs.crval[0] == 0.001
assert result.wcs.wcs.crpix[0] == 2.
# masking the cube will force the fill to create a new in-memory copy
mcube = cube.with_mask(cube > 0.1*cube.unit)
# `_is_huge` would trigger a use_memmap
assert not mcube._is_huge
assert mcube.mask.any()
# take a new snapshot because we're not testing the mask creation
snap5 = tracemalloc.take_snapshot()
tracemalloc.stop()
tracemalloc.start() # stop/start so we can check peak mem use from here
current_b4, peak_b4 = tracemalloc.get_traced_memory()
result = mcube.reproject(header_out, filled=True)
current_aftr, peak_aftr = tracemalloc.get_traced_memory()
snap6 = tracemalloc.take_snapshot()
diff = snap6.compare_to(snap5, 'lineno')
diffvals = np.array([dd.size_diff for dd in diff])
# a duplicate of the cube should have been created by filling masked vals
# (this should be near-exact since 'result' should occupy exactly the
# same amount of memory)
assert diffvals.max()*u.B <= 1*u.MB #>= 200**3*sz*u.B
# the peak memory usage *during* reprojection will have that duplicate,
# but the memory gets cleaned up afterward
assert (peak_aftr-peak_b4)*u.B >= (200**3*sz*u.B + 200*100**2*sz*u.B)
assert result.wcs.wcs.crval[0] == 0.001
assert result.wcs.wcs.crpix[0] == 2.
| {
"content_hash": "0185272fcda211ac34528d003c1dcf03",
"timestamp": "",
"source": "github",
"line_count": 522,
"max_line_length": 125,
"avg_line_length": 35.62068965517241,
"alnum_prop": 0.6075615790039798,
"repo_name": "low-sky/spectral-cube",
"id": "d832cd8c20e398c7ae3a40a055b62145a3f405d2",
"size": "18594",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spectral_cube/tests/test_regrid.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "128"
},
{
"name": "Python",
"bytes": "755139"
}
],
"symlink_target": ""
} |
"""Image Processing SciKit (Toolbox for SciPy)"""
import os.path as _osp
data_dir = _osp.abspath(_osp.join(_osp.dirname(__file__), 'data'))
from version import version as __version__
def _setup_test():
import functools
basedir = _osp.dirname(_osp.join(__file__, '../'))
args = ['', '--exe', '-w', '%s' % basedir]
try:
import nose as _nose
except ImportError:
print("Could not load nose. Unit tests not available.")
return None
else:
return functools.partial(_nose.run, 'scikits.image', argv=args)
test = _setup_test()
if test is None:
del test
def get_log(name):
"""Return a console logger.
Output may be sent to the logger using the `debug`, `info`, `warning`,
`error` and `critical` methods.
Parameters
----------
name : str
Name of the log.
References
----------
.. [1] Logging facility for Python,
http://docs.python.org/library/logging.html
"""
import logging, sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
return logging.getLogger(name)
| {
"content_hash": "70e9438a610c77d72e689bf231ab039f",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 74,
"avg_line_length": 23.53191489361702,
"alnum_prop": 0.604882459312839,
"repo_name": "GaelVaroquaux/scikits.image",
"id": "35108d27bd1b73b2a649a1974e8d016cdbf0262b",
"size": "1106",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scikits/image/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "558584"
},
{
"name": "Shell",
"bytes": "3133"
}
],
"symlink_target": ""
} |
from astropy.coordinates import Angle, Latitude, Longitude
from astropy.io.misc.asdf.tags.unit.quantity import QuantityType
__all__ = ['AngleType', 'LatitudeType', 'LongitudeType']
class AngleType(QuantityType):
name = "coordinates/angle"
types = [Angle]
requires = ['astropy']
version = "1.0.0"
organization = 'astropy.org'
standard = 'astropy'
@classmethod
def from_tree(cls, node, ctx):
return Angle(super().from_tree(node, ctx))
class LatitudeType(AngleType):
name = "coordinates/latitude"
types = [Latitude]
@classmethod
def from_tree(cls, node, ctx):
return Latitude(super().from_tree(node, ctx))
class LongitudeType(AngleType):
name = "coordinates/longitude"
types = [Longitude]
@classmethod
def from_tree(cls, node, ctx):
wrap_angle = node['wrap_angle']
return Longitude(super().from_tree(node, ctx), wrap_angle=wrap_angle)
@classmethod
def to_tree(cls, longitude, ctx):
tree = super().to_tree(longitude, ctx)
tree['wrap_angle'] = longitude.wrap_angle
return tree
| {
"content_hash": "985a18880a8cf197b9172e27d7921fd5",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 77,
"avg_line_length": 24.755555555555556,
"alnum_prop": 0.651705565529623,
"repo_name": "StuartLittlefair/astropy",
"id": "0051a26b36bb748caaad690db4e27cfb870bd52a",
"size": "1178",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "astropy/io/misc/asdf/tags/coordinates/angle.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "11034753"
},
{
"name": "C++",
"bytes": "47001"
},
{
"name": "Cython",
"bytes": "78631"
},
{
"name": "HTML",
"bytes": "1172"
},
{
"name": "Lex",
"bytes": "183333"
},
{
"name": "M4",
"bytes": "18757"
},
{
"name": "Makefile",
"bytes": "52457"
},
{
"name": "Python",
"bytes": "12224600"
},
{
"name": "Shell",
"bytes": "17024"
},
{
"name": "TeX",
"bytes": "853"
}
],
"symlink_target": ""
} |
import shipper
from livesettings import config_choice_values
def get_methods():
'''
Fires off shipper.Shipper() for each choice that's
enabled in /settings/
'''
return [shipper.Shipper(service_type=value) for value in \
config_choice_values('singpost', 'SINGPOST_SHIPPING_CHOICES')]
| {
"content_hash": "304ba3b6909e38f964a1beefad012980",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 70,
"avg_line_length": 28.454545454545453,
"alnum_prop": 0.6964856230031949,
"repo_name": "rctay/satchmo-shipping-singpost",
"id": "d345f7fe2ed5fcfd31c537b291fd049f74226c10",
"size": "313",
"binary": false,
"copies": "1",
"ref": "refs/heads/v0.9-master",
"path": "__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "32183"
}
],
"symlink_target": ""
} |
"""Tests for tensorflow_datasets.core.features.image_feature."""
import numpy as np
import tensorflow as tf
from tensorflow_datasets import testing
from tensorflow_datasets.core import features as features_lib
class LabeledImageFeatureTest(testing.FeatureExpectationsTestCase):
def test_images(self):
rng = np.random.default_rng()
img = rng.integers(256, size=(28, 28, 1), dtype=np.uint8)
self.assertFeature(
feature=features_lib.LabeledImage(
labels=['background', 'car', 'truck'],
shape=(28, 28, 1),
),
shape=(28, 28, 1),
dtype=tf.uint8,
tests=[
# Numpy array
testing.FeatureExpectationItem(
value=img,
expected=img,
),
],
test_attributes=dict(
num_classes=3,
names=['background', 'car', 'truck'],
_use_colormap=True,
),
)
| {
"content_hash": "3fd18376f121635c706c85e4445b34b0",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 67,
"avg_line_length": 27.647058823529413,
"alnum_prop": 0.5787234042553191,
"repo_name": "tensorflow/datasets",
"id": "bd54bcce45df18adb00276b74be1165d5ed5d824",
"size": "1552",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow_datasets/core/features/labeled_image_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "728"
},
{
"name": "JavaScript",
"bytes": "13369"
},
{
"name": "NewLisp",
"bytes": "13940"
},
{
"name": "Perl",
"bytes": "520"
},
{
"name": "Python",
"bytes": "5398856"
},
{
"name": "Roff",
"bytes": "22095"
},
{
"name": "Ruby",
"bytes": "25669"
},
{
"name": "Shell",
"bytes": "3895"
},
{
"name": "Smalltalk",
"bytes": "20604"
},
{
"name": "TeX",
"bytes": "759"
}
],
"symlink_target": ""
} |
from nose.tools import eq_, ok_
from path import path
from pyavrutils import support, arduino
from pyavrutils.arduino import Arduino, ArduinoCompileError
import logging
root = path(__file__).parent.parent
examples = support.find_examples(root)
fails = [
('PWM.pde', 'atmega8'),
]
# def check_build(ex, hwpack, board):
# cc = Arduino(hwpack=hwpack, board=board)
# cc.extra_lib = root
# print cc.hwpack, cc.board, ex
# print (str(path(ex).name), cc.mcu_compiler())
# if (str(path(ex).name), cc.mcu_compiler()) in fails:
# class Dummy(TestCase):
# def runTest(self):
# pass
# Dummy().assertRaises(ArduinoCompileError, cc.build, cc, ex)
# else:
# cc.build(ex)
# assert cc.size().ok
def check_build(ex, hwpack, board):
cc = Arduino(hwpack=hwpack, board=board)
# cc.extra_lib = root
print cc.hwpack, cc.board, ex
cc.build(ex)
assert cc.size().ok
def generate(func, params, labels=None):
if not labels:
labels = params
if not hasattr(func, '_index'):
func._index = 0
func._index += 1
cmd = '''def test_{func._index}_{labels}(): {func.__name__}({params})'''.format(func=func,
params=','.join(
['"%s"' % x for x in params]),
labels='_'.join(labels))
logging.debug('cmd:' + cmd)
return cmd
# def test_build():
# for ex in examples:
# for cc in arduino.targets():
# cc.extra_lib = root
# if cc.hwpack=='arduino':
# yield check_build, ex, cc
for ex in examples:
for cc in arduino.targets():
if cc.hwpack == 'arduino':
if (str(path(ex).name), cc.mcu_compiler()) not in fails:
exec generate(check_build,
[ex, cc.hwpack, cc.board],
[ex.namebase, cc.hwpack, cc.board])
| {
"content_hash": "863acb3d9cd80efcf4b44b5e1e5e6b3b",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 114,
"avg_line_length": 34.32786885245902,
"alnum_prop": 0.5095510983763133,
"repo_name": "ponty/arduino-sketchbook",
"id": "24066367767a119621183567db229b27ff888dee",
"size": "2094",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libacoll/xtests/test_examples.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "9918"
},
{
"name": "C",
"bytes": "31590713"
},
{
"name": "C++",
"bytes": "6888240"
},
{
"name": "CMake",
"bytes": "708"
},
{
"name": "CSS",
"bytes": "43245"
},
{
"name": "Elixir",
"bytes": "391"
},
{
"name": "HTML",
"bytes": "1674634"
},
{
"name": "JavaScript",
"bytes": "22190"
},
{
"name": "Makefile",
"bytes": "7537"
},
{
"name": "Objective-C",
"bytes": "29132"
},
{
"name": "Perl",
"bytes": "2517"
},
{
"name": "Processing",
"bytes": "110533"
},
{
"name": "Python",
"bytes": "38030"
},
{
"name": "Shell",
"bytes": "1780"
},
{
"name": "XSLT",
"bytes": "2042"
}
],
"symlink_target": ""
} |
"""This module generates a CNF grammar from a PDA object"""
from sys import argv
from alphabet import createalphabet
from cfggenerator import CFGGenerator, CNFGenerator
from cfgpda import CfgPDA
from pda import PDAState
class IntersectionHandling():
"""
After the intersection (product operation) there would be final
transitions to non accepted states (according to the DFA
accepted states). These transitions should be removed before
generating the CFG
"""
def __init__(self):
pass
def get(self, statediag, dfaaccepted):
"""
# - Remove all the POP (type - 2) transitions to state 0,non DFA accepted
# for symbol @closing
# - Generate the accepted transitions
- Replace DFA accepted States with a push - pop symbol and two extra states
Args:
statediag (list): The states of the PDA
dfaaccepted (list):The list of DFA accepted states
Returns:
list: A cleaned, smaller list of DFA states
"""
newstatediag = {}
newstate = PDAState()
newstate.id = 'AI,I' # BECAREFUL WHEN SIMPLIFYING...
newstate.type = 1
newstate.sym = '@wrapping'
transitions = {}
transitions[(0, 0)] = [0]
newstate.trans = transitions
i = 0
newstatediag[i] = newstate
# print 'accepted:'
# print dfaaccepted
for stateid in statediag:
state = statediag[stateid]
# print state.id
if state.type == 2:
for state2id in dfaaccepted:
# print state.id[1]
if state.id[1] == state2id:
# print 'adding...'
state.trans['AI,I'] = ['@wrapping']
# print state.trans
break
i = i + 1
newstatediag[i] = state
return newstatediag
class ReducePDA():
"""Use BFS to search for unreachable states and remove them"""
def __init__(self):
pass
def bfs(self, graph, start):
"""
Performs BFS operation for eliminating useless loop transitions
Args:
graph (PDA): the PDA object
start (PDA state): The PDA initial state
Returns:
list: A cleaned, smaller list of DFA states
"""
newstatediag = {}
# maintain a queue of paths
queue = []
visited = []
# push the first path into the queue
queue.append(start)
while queue:
# get the first path from the queue
state = queue.pop(0)
# get the last node from the path
# visited
visited.append(state.id)
# enumerate all adjacent nodes, construct a new path and push it
# into the queue
for key in state.trans:
if state.trans[key] != []:
if key not in visited:
for nextstate in graph:
if graph[nextstate].id == key:
queue.append(graph[nextstate])
break
i = 0
for state in graph:
if graph[state].id in visited:
newstatediag[i] = graph[state]
i = i + 1
return newstatediag
def get(self, statediag):
"""
Args:
statediag (list): The states of the PDA
Returns:
list: A reduced list of states using BFS
"""
if len(statediag) < 1:
print 'PDA is empty and can not be reduced'
return statediag
newstatediag = self.bfs(statediag, statediag[0])
return newstatediag
class SimplifyStateIDs():
"""
Transform state IDs to a more simple form (sequencial numbers).
Should be used after product operation (intersection)
"""
def __init__(self):
pass
def get(self, statediag, accepted=None):
"""
Replaces complex state IDs as generated from the product operation,
into simple sequencial numbers. A dictionaty is maintained in order
to map the existed IDs.
Args:
statediag (list): The states of the PDA
accepted (list): the list of DFA accepted states
Returns:
list:
"""
count = 0
statesmap = {}
newstatediag = {}
for state in statediag:
# Simplify state IDs
if statediag[state].id not in statesmap:
statesmap[statediag[state].id] = count
mapped = count
count = count + 1
else:
mapped = statesmap[statediag[state].id]
# Simplify transitions IDs
transitions = {}
for nextstate in statediag[state].trans:
if nextstate not in statesmap:
statesmap[nextstate] = count
transmapped = count
count = count + 1
else:
transmapped = statesmap[nextstate]
transitions[transmapped] = statediag[state].trans[nextstate]
newstate = PDAState()
newstate.id = mapped
newstate.type = statediag[state].type
newstate.sym = statediag[state].sym
newstate.trans = transitions
newstatediag[mapped] = newstate
newaccepted = None
if accepted is not None:
newaccepted = []
for accepted_state in accepted :
if (0, accepted_state) in statesmap:
newaccepted.append(statesmap[(0, accepted_state)])
return newstatediag, count, newaccepted
class ReadReplace():
"""
Removes all READ (type - 3) states and replaces them with PUSH (type - 1) and POP (type - 2).
Should be used before PDA to CFG operation.
"""
statediag = []
quickresponse = {}
quickresponse_types = {}
toadd = []
biggestid = 0
def __init__(self, statediag=[], thebiggestid=None):
"""
Find the biggest State ID
Args:
statediag (list): The states of the PDA
thebiggestid (int): The binggest state identifier
Returns:
None
"""
self.statediag = []
self.quickresponse = {}
self.quickresponse_types = {}
self.toadd = []
self.biggestid = 0
if thebiggestid is None:
for state in statediag:
if statediag[state].id > self.biggestid:
self.biggestid = statediag[state].id
else:
self.biggestid = thebiggestid
self.statediag = statediag
def nextstate(self):
"""
Always return the biggest state ID + 1
"""
self.biggestid = self.biggestid + 1
return self.biggestid
def _generate_state(self, trans):
"""
Creates a new POP state (type - 2) with the same transitions.
The POPed symbol is the unique number of the state.
Args:
trans (dict): Transition dictionary
Returns:
Int: The state identifier
"""
state = PDAState()
state.id = self.nextstate()
state.type = 2
state.sym = state.id
state.trans = trans.copy()
self.toadd.append(state)
return state.id
def replace_read(self):
"""
Replaces all READ (type - 3) states to a PUSH (type - 1) and a POP (type - 2).
The actual state is replaced with the PUSH, and a new POP is created.
"""
for statenum in self.statediag:
state = self.statediag[statenum]
if state.type == 3: # READ state
state.type = 1
destination_and_symbol = self._generate_state(state.trans)
state.sym = destination_and_symbol
state.trans = {}
state.trans[destination_and_symbol] = [0]
statenumber_identifier = len(self.statediag) + 1
for state in self.toadd:
self.statediag[statenumber_identifier] = state
statenumber_identifier = statenumber_identifier + 1
return self.statediag
class PdaCnf():
"""This class manages PDA to CNF generation"""
rules = []
statediag = []
accepted = []
def insert_start_to_accepting(self):
"""
Insert the start rule S -> A0,0
or alphabet -> AI,I (IF intersected, the DFA Accepted states
will be eliminated by adding a wrapping state with push pop symbol)
"""
self.rules.append('S: A0,0')
def insert_self_to_empty_and_insert_all_intemediate(self, optimized):
"""
For each state qi of the PDA, we add the rule Aii -> e
For each triplet of states qi, qj and qk, we add the rule Aij -> Aik Akj.
Args:
optimized (bool): Enable or Disable optimization - Do not produce O(n^3)
"""
for state_a in self.statediag:
self.rules.append('A' +repr(state_a.id) +',' + repr(state_a.id) + ': @empty_set')
# If CFG is not requested, avoid the following O(n^3) rule.
# It can be solved and a string can be generated faster with BFS of DFS
if optimized == 0:
for state_b in self.statediag:
if state_b.id != state_a.id:
for state_c in self.statediag:
if state_c.id != state_a.id \
and state_b.id != state_c.id:
self.rules.append('A' + repr(state_a.id)
+ ',' + repr(state_c.id)
+ ': A' + repr(state_a.id)
+ ',' + repr(state_b.id)
+ ' A' + repr(state_b.id)
+ ',' + repr(state_c.id)
+ '')
def insert_symbol_pushpop(self):
"""
For each stack symbol t E G, we look for a pair of states, qi and qj,
such that the PDA in state qi can read some input a E S and push t
on the stack and in state state qj can read some input b E S and pop t
off the stack. In that case, we add the rule Aik -> a Alj b
where (ql,t) E d(qi,a,e) and (qk,e) E d(qj,b,t).
"""
for state_a in self.statediag:
if state_a.type == 1:
found = 0
for state_b in self.statediag:
if state_b.type == 2 and state_b.sym == state_a.sym:
found = 1
for j in state_a.trans:
if state_a.trans[j] == [0]:
read_a = ''
else:
new = []
for selected_transition in state_a.trans[j]:
if selected_transition == ' ':
new.append('&')
else:
new.append(selected_transition)
read_a = " | ".join(new)
for i in state_b.trans:
if state_b.trans[i] == [0]:
read_b = ''
else:
new = []
for selected_transition in state_b.trans[i]:
if selected_transition == ' ':
new.append('&')
else:
new.append(selected_transition)
read_b = " | ".join(new)
self.rules.append(
'A' + repr(state_a.id)
+ ',' + repr(i)
+ ':' + read_a
+ ' A' + repr(j)
+ ',' + repr(state_b.id)
+ ' ' + read_b)
if found == 0:
# A special case is required for State 2, where the POPed symbols
# are part of the transitions array and not defined for "sym" variable.
for state_b in self.statediag:
if state_b.type == 2 and state_b.sym == 0:
for i in state_b.trans:
if state_a.sym in state_b.trans[i]:
for j in state_a.trans:
if state_a.trans[j] == [0]:
read_a = ''
else:
read_a = " | ".join(
state_a.trans[j])
self.rules.append(
'A' + repr(state_a.id)
+ ',' + repr(i)
+ ':' + read_a
+ ' A' + repr(j)
+ ',' + repr(state_b.id))
# print
# 'A'+`state_a.id`+','+`i`+':'+read_a+'
# A'+`j`+','+`state_b.id`
found = 1
if found == 0:
print "ERROR: symbol " + repr(state_a.sym) \
+ ". It was not found anywhere in the graph."
def get_rules(self, optimized):
"""
Args:
optimized (bool): Enable or Disable optimization - Do not produce O(n^3)
Return:
list: The CFG rules
"""
self.insert_start_to_accepting()
# If CFG is not requested, avoid the following O(n^3) rule.
# It can be solved and a string can be generated faster with BFS of DFS
if optimized == 0:
self.insert_self_to_empty_and_insert_all_intemediate(optimized)
self.insert_symbol_pushpop()
return self.rules
def __init__(self, states, dfaaccepted=[]):
self.rules = []
self.statediag = []
self.accepted = []
self.accepted = dfaaccepted
for key in states:
self.statediag.append(states[key])
def _read_file(fname):
"""
Args:
fname (str): Name of the grammar file to be parsed
Return:
list: The grammar rules
"""
with open(fname) as input_file:
re_grammar = [x.strip('\n') for x in input_file.readlines()]
return re_grammar
def main():
"""
Function for PDA to CNF Operation
:type argv: list
:param argv: Parameters
"""
if len(argv) < 3:
print 'Usage for getting CFG: %s CFG_fileA CFG ' % argv[0]
print 'Usage for getting STR: %s CFG_fileA STR ' \
'Optimize[0 or 1] splitstring[0 or 1] ' % argv[0]
print ''
print 'For example: python pdacnf.py grammar.y STR 1 0'
print ' python pdacnf.py grammar.y STR 1 1'
print ' python pdacnf.py grammar.y CFG'
return
alphabet = createalphabet()
mode = argv[2]
optimized = 0
splitstring = 0
if mode == 'STR':
optimized = int(argv[3])
splitstring = int(argv[4])
cfgtopda = CfgPDA(alphabet)
print '* Parsing Grammar:',
mma = cfgtopda.yyparse(argv[1])
print 'OK'
print ' - Total PDA states are ' + repr(len(mma.s))
print '* Simplify State IDs:',
simple_a = SimplifyStateIDs()
mma.s, biggestid, newaccepted = simple_a.get(mma.s)
if newaccepted:
print 'OK'
else:
print 'OK'
print '* Eliminate READ states:',
replace = ReadReplace(mma.s, biggestid)
mma.s = replace.replace_read()
print 'OK'
print ' - Total PDA states now are ' + repr(len(mma.s))
maxstate = replace.nextstate() - 1
print '* Reduce PDA:',
simple_b = ReducePDA()
mma.s = simple_b.get(mma.s)
print 'OK'
print ' - Total PDA states now are ' + repr(len(mma.s))
print '* PDA to CFG transformation:',
cnfgenerator = PdaCnf(mma.s)
grammar = cnfgenerator.get_rules(optimized)
print 'OK'
print ' - Total CFG rules generated: ' + repr(len(grammar))
if mode == 'STR':
gen = CFGGenerator(CNFGenerator(grammar),
optimized=optimized,
splitstring=splitstring,
maxstate=maxstate)
print gen.generate()
else:
print grammar
# For example, for given cfg:
#
# S: main
# main: 1 mainnew 1
# mainnew: u
# mainnew: main
#
# the generated grammar (optimized) would be:
#
# ['S: A0,0', 'A0,0: A1,2', 'A1,3: A2,2', 'A3,9: A11,2',
# 'A4,8: A12,2', 'A5,2: A18,18 u', 'A6,7: A13,2',
# 'A7,10: A14,2', 'A8,10: A15,2', 'A9,10: A16,2',
# 'A10,2: A19,19 1', 'A11,10: A2,2', 'A12,10: A2,2',
# 'A13,10: A2,2', 'A14,5: A2,2', 'A14,6: A2,2',
# 'A15,5: A2,2', 'A15,6: A2,2', 'A16,5: A2,2', 'A16,6: A2,2']
#
# and can be solved as following:
#
# S: A0,0
# S: A1,2
# S: (A1,3 A3,2)
# S: A2,2 (A3,9 A9,2)
# S: A11,2 (A9,10 A10,2)
# S: (A11,10 A10,2) (A16,2 A19,19 1)
# S: (A2,2 A19,19 1) ((A16,5 A5,2) 1)
# S: 1 (A2,2 (A18,18 u)) 1
# S: 1u1
if __name__ == '__main__':
main()
| {
"content_hash": "15cae968b92861554bdf26c3f2d76d61",
"timestamp": "",
"source": "github",
"line_count": 510,
"max_line_length": 97,
"avg_line_length": 35.32941176470588,
"alnum_prop": 0.4758019758019758,
"repo_name": "GeorgeArgyros/symautomata",
"id": "eb5c7d3db9a4a134b6134585015c46f24c08fac1",
"size": "18018",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "symautomata/pdacnf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "57"
},
{
"name": "Python",
"bytes": "224672"
}
],
"symlink_target": ""
} |
"""This module contains GCI views helper modules.""" | {
"content_hash": "430928744fff43598c70f49a4adb9ade",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 52,
"avg_line_length": 52,
"alnum_prop": 0.75,
"repo_name": "SRabbelier/Melange",
"id": "f6ff9f052cbac943d036cac2e7311c5063863217",
"size": "639",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/soc/modules/gci/views/helper/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "400472"
},
{
"name": "C++",
"bytes": "20"
},
{
"name": "Java",
"bytes": "1496"
},
{
"name": "JavaScript",
"bytes": "1623582"
},
{
"name": "PHP",
"bytes": "1032"
},
{
"name": "Perl",
"bytes": "177565"
},
{
"name": "Python",
"bytes": "15317793"
},
{
"name": "Ruby",
"bytes": "59"
},
{
"name": "Shell",
"bytes": "15303"
}
],
"symlink_target": ""
} |
# Copyright (C) 2009, Hyves (Startphone Ltd.)
#
# This module is part of the Concurrence Framework and is released under
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
import sys
import os
import _mysql
PROXY_STATE = _mysql.PROXY_STATE
PACKET_READ_RESULT = _mysql.PACKET_READ_RESULT
SERVER_STATES = _mysql.SERVER_STATES
CLIENT_STATES = _mysql.CLIENT_STATES
READ_RESULT_STATES = _mysql.READ_RESULT_STATES
AUTH_RESULT_STATES = _mysql.AUTH_RESULT_STATES
COMMAND = _mysql.COMMAND
PacketReader = _mysql.PacketReader
PacketReadError = _mysql.PacketReadError
ProxyProtocol = _mysql.ProxyProtocol
from concurrence.io.buffered import BufferedWriter, BufferedReader
from concurrence.timer import Timeout
from concurrence.io import IOStream
class COMMAND:
QUIT = 0x01
INITDB = 0x02
QUERY = 0x03
LIST = 0x04
PING = 0x0e
class CAPS(object):
LONG_PASSWORD = 1 # new more secure passwords
FOUND_ROWS = 2 #Found instead of affected rows
LONG_FLAG = 4 #Get all column flags */
CONNECT_WITH_DB = 8 # One can specify db on connect */
NO_SCHEMA = 16 # /* Don't allow database.table.column */
COMPRESS = 32 # Can use compression protocol */
ODBC = 64 # Odbc client */
LOCAL_FILES = 128 # Can use LOAD DATA LOCAL */
IGNORE_SPACE= 256 # Ignore spaces before '(' */
PROTOCOL_41 = 512 # New 4.1 protocol */
INTERACTIVE = 1024 # This is an interactive client */
SSL = 2048 #Switch to SSL after handshake */
IGNORE_SIGPIPE = 4096 # IGNORE sigpipes */
TRANSACTIONS = 8192 # Client knows about transactions */
RESERVED = 16384 # Old flag for 4.1 protocol */
SECURE_CONNECTION = 32768 # New 4.1 authentication */
MULTI_STATEMENTS= 65536 # Enable/disable multi-stmt support */
MULTI_RESULTS = 131072 # Enable/disable multi-results */
__ALL__ = {LONG_PASSWORD: 'CLIENT_LONG_PASSWORD',
FOUND_ROWS: 'CLIENT_FOUND_ROWS',
LONG_FLAG: 'CLIENT_LONG_FLAG',
CONNECT_WITH_DB: 'CLIENT_CONNECT_WITH_DB',
NO_SCHEMA: 'CLIENT_NO_SCHEMA',
COMPRESS: 'CLIENT_COMPRESS',
ODBC: 'CLIENT_ODBC',
LOCAL_FILES: 'CLIENT_LOCAL_FILES',
IGNORE_SPACE: 'CLIENT_IGNORE_SPACE',
PROTOCOL_41: 'CLIENT_PROTOCOL_41',
INTERACTIVE: 'CLIENT_INTERACTIVE',
SSL: 'CLIENT_SSL',
IGNORE_SIGPIPE: 'CLIENT_IGNORE_SIGPIPE',
TRANSACTIONS: 'CLIENT_TRANSACTIONS',
RESERVED: 'CLIENT_RESERVED',
SECURE_CONNECTION: 'CLIENT_SECURE_CONNECTION',
MULTI_STATEMENTS: 'CLIENT_MULTI_STATEMENTS',
MULTI_RESULTS: 'CLIENT_MULTI_RESULTS'}
@classmethod
def dbg(cls, caps):
for value, name in cls.__ALL__.items():
if caps & value:
print name
def create_scramble_buff():
import random
return ''.join([chr(random.randint(0, 255)) for _ in xrange(20)])
class BufferedPacketWriter(BufferedWriter):
#TODO make writers really buffered
def __init__(self, stream, buffer):
BufferedWriter.__init__(self, stream, buffer)
self.ERROR_TEMPLATE = "%s"
def write_error(self, errno, errmsg):
self.buffer.write_byte(0xFF) #ERROR
#ERROR CODE:
self.buffer.write_byte((errno >> 0) & 0xFF)
self.buffer.write_byte((errno >> 8) & 0xFF)
#ERROR MSG:
self.buffer.write_bytes(self.ERROR_TEMPLATE % errmsg)
def write_ok(self, field_count, affected_rows, insert_id, server_status, warning_count, msg = ''):
self.buffer.write_byte(field_count)
self.buffer.write_byte(affected_rows)
self.buffer.write_byte(insert_id)
self.buffer.write_short(server_status) #server Status
self.buffer.write_short(warning_count)
if msg:
self.buffer.write_bytes(msg)
def write_greeting(self, scramble_buff, protocol_version, server_version, thread_id, server_caps, server_language, server_status):
self.buffer.write_byte(protocol_version)
self.buffer.write_bytes(server_version + '\0')
self.buffer.write_int(thread_id)
self.buffer.write_bytes(scramble_buff[:8])
self.buffer.write_byte(0) #filler
self.buffer.write_short(server_caps)
self.buffer.write_byte(server_language)
self.buffer.write_short(server_status)
self.buffer.write_bytes('\0' * 13) #filler
self.buffer.write_bytes(scramble_buff[8:])
def write_header(self, length, packet_number):
self.buffer.write_int((length - 4) | (packet_number << 24))
def start(self):
"""starts building a packet"""
self.start_position = self.buffer.position #remember start of header
self.buffer.skip(4) #reserve room for header
def finish(self, packet_number):
"""finishes packet by going back to start of packet and writing header and packetNumber"""
position = self.buffer.position
length = self.buffer.position - self.start_position
#print length
self.buffer.position = self.start_position
self.write_header(length, packet_number)
self.buffer.position = position
def write_int(self, i):
self.buffer.write_int(i)
def write_lcb(self, b):
assert b < 128, "TODO larger numbers"
self.buffer.write_byte(b)
def write_lcs(self, s):
self.write_lcb(len(s))
self.buffer.write_bytes(s)
class BufferedPacketReader(BufferedReader):
def __init__(self, stream, buffer):
BufferedReader.__init__(self, stream, buffer)
self.stream = stream
self.buffer = buffer
self.reader = PacketReader(buffer)
def read_packets(self):
reader = self.reader
READ_RESULT_END = PACKET_READ_RESULT.END
READ_RESULT_MORE = PACKET_READ_RESULT.MORE
while True:
read_result = reader.read_packet()
if read_result & READ_RESULT_END:
yield reader.packet
if not (read_result & READ_RESULT_MORE):
self.fill()
def read_packet(self):
return self.read_packets().next()
def read_length_coded_binary(self):
return self.reader.read_length_coded_binary()
def read_fields(self, field_count):
#generator for rest of result packets
packets = self.read_packets()
#read field types
fields = []
reader = self.reader
i = 0
while i < field_count:
_ = packets.next()
fields.append(reader.read_field_type())
i += 1
#end of field types
packet = packets.next()
assert packet.read_byte() == 0xFE, "expected end of fields"
return fields
def read_rows(self, fields, row_count = 100):
reader = self.reader
READ_RESULT_EOF = PACKET_READ_RESULT.EOF
READ_RESULT_MORE = PACKET_READ_RESULT.MORE
while True:
read_result, rows = reader.read_rows(fields, row_count)
for row in rows:
yield row
if read_result & READ_RESULT_EOF:
break
if not (read_result & READ_RESULT_MORE):
self.fill()
| {
"content_hash": "67772630f4c090f5412f4b2ebfbbe605",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 134,
"avg_line_length": 34.867298578199055,
"alnum_prop": 0.615740111458475,
"repo_name": "toymachine/concurrence",
"id": "b5369045b592e9bef799097c008b853b904ae954",
"size": "7357",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/concurrence/database/mysql/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "42241"
},
{
"name": "JavaScript",
"bytes": "19227"
},
{
"name": "Python",
"bytes": "408521"
},
{
"name": "Shell",
"bytes": "45"
}
],
"symlink_target": ""
} |
import datetime
from django import forms
from django.core import validators
from .models import Post, Comment
class PostForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(PostForm, self).__init__(*args, **kwargs)
for field in self.fields.values():
field.widget.attrs["class"] = "input-block-level"
self.fields["datetime_posted"].required = False
def clean_datetime_posted(self):
data = self.cleaned_data["datetime_posted"]
if data in validators.EMPTY_VALUES:
return datetime.datetime.now()
return data
class Meta:
model = Post
fields = ("title", "body", "datetime_posted")
class CommentForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(CommentForm, self).__init__(*args, **kwargs)
self.fields["body"].widget.attrs["class"] = "input-xxlarge"
class Meta:
model = Comment
fields = ("body", )
| {
"content_hash": "0858c8b9419c69c327bd1b1cd247180c",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 67,
"avg_line_length": 26.97222222222222,
"alnum_prop": 0.615859938208033,
"repo_name": "ukch/gae_simple_blog",
"id": "ef5456f66245c607e9e9561101e9cf268ad30cb3",
"size": "971",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "simpleblog/content/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "56264"
},
{
"name": "Python",
"bytes": "6670"
},
{
"name": "Shell",
"bytes": "720"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from rest_framework import serializers
from accounts.models import Account
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = Account
fields = ('username', 'email', 'first_name', 'last_name', 'slug')
| {
"content_hash": "1a5b3dace7ca500d3ef4e31bd3ab924d",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 73,
"avg_line_length": 26.09090909090909,
"alnum_prop": 0.7142857142857143,
"repo_name": "UserGraund/main_application",
"id": "64a655795c5b929e17568a1da3dd5e7c41df85e9",
"size": "287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "accounts/api/serializers/account.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "411"
},
{
"name": "Python",
"bytes": "13213"
},
{
"name": "Shell",
"bytes": "342"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from mopidy import listener
class AudioListener(listener.Listener):
"""
Marker interface for recipients of events sent by the audio actor.
Any Pykka actor that mixes in this class will receive calls to the methods
defined here when the corresponding events happen in the core actor. This
interface is used both for looking up what actors to notify of the events,
and for providing default implementations for those listeners that are not
interested in all events.
"""
@staticmethod
def send(event, **kwargs):
"""Helper to allow calling of audio listener events"""
listener.send_async(AudioListener, event, **kwargs)
def reached_end_of_stream(self):
"""
Called whenever the end of the audio stream is reached.
*MAY* be implemented by actor.
"""
pass
def stream_changed(self, uri):
"""
Called whenever the audio stream changes.
*MAY* be implemented by actor.
:param string uri: URI the stream has started playing.
"""
pass
def position_changed(self, position_changed):
"""
Called whenever the position of the stream changes.
*MAY* be implemented by actor.
:param int position: Position in milliseconds.
"""
pass
def state_changed(self, old_state, new_state, target_state):
"""
Called after the playback state have changed.
Will be called for both immediate and async state changes in GStreamer.
Target state is used to when we should be in the target state, but
temporarily need to switch to an other state. A typical example of this
is buffering. When this happens an event with
`old=PLAYING, new=PAUSED, target=PLAYING` will be emitted. Once we have
caught up a `old=PAUSED, new=PLAYING, target=None` event will be
be generated.
Regular state changes will not have target state set as they are final
states which should be stable.
*MAY* be implemented by actor.
:param old_state: the state before the change
:type old_state: string from :class:`mopidy.core.PlaybackState` field
:param new_state: the state after the change
:type new_state: A :class:`mopidy.core.PlaybackState` field
:type new_state: string from :class:`mopidy.core.PlaybackState` field
:param target_state: the intended state
:type target_state: string from :class:`mopidy.core.PlaybackState`
field or :class:`None` if this is a final state.
"""
pass
| {
"content_hash": "d4c9ae232fff3b25eb54169cf6a6d671",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 79,
"avg_line_length": 34.61038961038961,
"alnum_prop": 0.6585365853658537,
"repo_name": "woutervanwijk/mopidy",
"id": "b272d15a8188daaa9a1218a4a14f4d16807c4d5f",
"size": "2665",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "mopidy/audio/listener.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "610"
},
{
"name": "JavaScript",
"bytes": "74911"
},
{
"name": "Python",
"bytes": "925399"
}
],
"symlink_target": ""
} |
import pygame
import pygame.font
import pygame.event
import pygame.draw
import string
import os
from pygame.locals import *
def get_key():
while 1:
event = pygame.event.poll()
if event.type == KEYDOWN:
return event.key
elif event.type == pygame.QUIT:
os.system("pkill LeapControlPane")
os.system("pkill roscore")
rospy.signal_shutdown("KeyboardInterrupt")
pygame.quit()
end = True
else:
pass
def display_box(screen, message):
"Print a message in a box in the middle of the screen"
fontobject = pygame.font.Font(None,18)
pygame.draw.rect(screen, (145,185,255),
((screen.get_width() / 2) - 100,
(screen.get_height() / 2) - 10,
200,20), 0)
pygame.draw.rect(screen, (255,255,255),
((screen.get_width() / 2) - 102,
(screen.get_height() / 2) - 12,
204,24), 1)
if len(message) != 0:
screen.blit(fontobject.render(message, 1, (255,255,255)),
((screen.get_width() / 2) - 100, (screen.get_height() / 2) - 10))
pygame.display.flip()
def ask(screen, question):
"ask(screen, question) -> answer"
pygame.font.init()
current_string = []
display_box(screen, question + ": " + string.join(current_string,""))
while 1:
inkey = get_key()
if inkey == K_BACKSPACE:
current_string = current_string[0:-1]
elif inkey == K_RETURN:
break
elif inkey == K_MINUS:
current_string.append("_")
elif inkey <= 127:
current_string.append(chr(inkey))
display_box(screen, question + ": " + string.join(current_string,""))
return string.join(current_string,"")
def main():
screen = pygame.display.set_mode((320,240))
print ask(screen, "Name") + " was entered"
| {
"content_hash": "53a50cab596517b7f8bf1e7de6de7fe4",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 71,
"avg_line_length": 26.901639344262296,
"alnum_prop": 0.6496039000609385,
"repo_name": "mlagunas/lmUR",
"id": "43f48706892f6eb3a6b7dffc3cb48b16a29d1e64",
"size": "1641",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "GUI/inputBox.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "9895"
},
{
"name": "Python",
"bytes": "105703"
}
],
"symlink_target": ""
} |
from django.conf.urls import *
from localeurl.views import change_locale
urlpatterns = patterns('',
url(r'^change/', change_locale, name='localeurl_change_locale'),
)
| {
"content_hash": "1a36f52d372869224fb701a0045f5574",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 65,
"avg_line_length": 28.166666666666668,
"alnum_prop": 0.7455621301775148,
"repo_name": "primepix/django-localeurl",
"id": "c360c204a0108320d49c5c2419dc3971cae41cc6",
"size": "169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "localeurl/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35525"
}
],
"symlink_target": ""
} |
"""Utility functions used across Superset"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import object
from datetime import date, datetime, time, timedelta
import decimal
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate
import functools
import json
import logging
import os
import signal
import smtplib
import sys
import uuid
import zlib
import celery
from dateutil.parser import parse
from flask import flash, Markup, redirect, render_template, request, url_for
from flask_appbuilder._compat import as_unicode
from flask_appbuilder.const import (
FLAMSG_ERR_SEC_ACCESS_DENIED,
LOGMSG_ERR_SEC_ACCESS_DENIED,
PERMISSION_PREFIX,
)
from flask_babel import gettext as __
from flask_cache import Cache
import markdown as md
import numpy
import parsedatetime
from past.builtins import basestring
from pydruid.utils.having import Having
import pytz
import sqlalchemy as sa
from sqlalchemy import event, exc, select
from sqlalchemy.types import TEXT, TypeDecorator
logging.getLogger('MARKDOWN').setLevel(logging.INFO)
PY3K = sys.version_info >= (3, 0)
EPOCH = datetime(1970, 1, 1)
DTTM_ALIAS = '__timestamp'
class SupersetException(Exception):
pass
class SupersetTimeoutException(SupersetException):
pass
class SupersetSecurityException(SupersetException):
pass
class MetricPermException(SupersetException):
pass
class NoDataException(SupersetException):
pass
class SupersetTemplateException(SupersetException):
pass
def can_access(sm, permission_name, view_name, user):
"""Protecting from has_access failing from missing perms/view"""
if user.is_anonymous():
return sm.is_item_public(permission_name, view_name)
return sm._has_view_access(user, permission_name, view_name)
def flasher(msg, severity=None):
"""Flask's flash if available, logging call if not"""
try:
flash(msg, severity)
except RuntimeError:
if severity == 'danger':
logging.error(msg)
else:
logging.info(msg)
class memoized(object): # noqa
"""Decorator that caches a function's return value each time it is called
If called later with the same arguments, the cached value is returned, and
not re-evaluated.
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
value = self.func(*args)
self.cache[args] = value
return value
except TypeError:
# uncachable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
return self.func(*args)
def __repr__(self):
"""Return the function's docstring."""
return self.func.__doc__
def __get__(self, obj, objtype):
"""Support instance methods."""
return functools.partial(self.__call__, obj)
def js_string_to_python(item):
return None if item in ('null', 'undefined') else item
def string_to_num(s):
"""Converts a string to an int/float
Returns ``None`` if it can't be converted
>>> string_to_num('5')
5
>>> string_to_num('5.2')
5.2
>>> string_to_num(10)
10
>>> string_to_num(10.1)
10.1
>>> string_to_num('this is not a string') is None
True
"""
if isinstance(s, (int, float)):
return s
if s.isdigit():
return int(s)
try:
return float(s)
except ValueError:
return None
class DimSelector(Having):
def __init__(self, **args):
# Just a hack to prevent any exceptions
Having.__init__(self, type='equalTo', aggregation=None, value=None)
self.having = {
'having': {
'type': 'dimSelector',
'dimension': args['dimension'],
'value': args['value'],
},
}
def list_minus(l, minus):
"""Returns l without what is in minus
>>> list_minus([1, 2, 3], [2])
[1, 3]
"""
return [o for o in l if o not in minus]
def parse_human_datetime(s):
"""
Returns ``datetime.datetime`` from human readable strings
>>> from datetime import date, timedelta
>>> from dateutil.relativedelta import relativedelta
>>> parse_human_datetime('2015-04-03')
datetime.datetime(2015, 4, 3, 0, 0)
>>> parse_human_datetime('2/3/1969')
datetime.datetime(1969, 2, 3, 0, 0)
>>> parse_human_datetime('now') <= datetime.now()
True
>>> parse_human_datetime('yesterday') <= datetime.now()
True
>>> date.today() - timedelta(1) == parse_human_datetime('yesterday').date()
True
>>> year_ago_1 = parse_human_datetime('one year ago').date()
>>> year_ago_2 = (datetime.now() - relativedelta(years=1) ).date()
>>> year_ago_1 == year_ago_2
True
"""
if not s:
return None
try:
dttm = parse(s)
except Exception:
try:
cal = parsedatetime.Calendar()
parsed_dttm, parsed_flags = cal.parseDT(s)
# when time is not extracted, we 'reset to midnight'
if parsed_flags & 2 == 0:
parsed_dttm = parsed_dttm.replace(hour=0, minute=0, second=0)
dttm = dttm_from_timtuple(parsed_dttm.utctimetuple())
except Exception as e:
logging.exception(e)
raise ValueError("Couldn't parse date string [{}]".format(s))
return dttm
def dttm_from_timtuple(d):
return datetime(
d.tm_year, d.tm_mon, d.tm_mday, d.tm_hour, d.tm_min, d.tm_sec)
def parse_human_timedelta(s):
"""
Returns ``datetime.datetime`` from natural language time deltas
>>> parse_human_datetime('now') <= datetime.now()
True
"""
cal = parsedatetime.Calendar()
dttm = dttm_from_timtuple(datetime.now().timetuple())
d = cal.parse(s, dttm)[0]
d = datetime(d.tm_year, d.tm_mon, d.tm_mday, d.tm_hour, d.tm_min, d.tm_sec)
return d - dttm
class JSONEncodedDict(TypeDecorator):
"""Represents an immutable structure as a json-encoded string."""
impl = TEXT
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
def datetime_f(dttm):
"""Formats datetime to take less room when it is recent"""
if dttm:
dttm = dttm.isoformat()
now_iso = datetime.now().isoformat()
if now_iso[:10] == dttm[:10]:
dttm = dttm[11:]
elif now_iso[:4] == dttm[:4]:
dttm = dttm[5:]
return '<nobr>{}</nobr>'.format(dttm)
def base_json_conv(obj):
if isinstance(obj, numpy.int64):
return int(obj)
elif isinstance(obj, numpy.bool_):
return bool(obj)
elif isinstance(obj, set):
return list(obj)
elif isinstance(obj, decimal.Decimal):
return float(obj)
elif isinstance(obj, uuid.UUID):
return str(obj)
elif isinstance(obj, timedelta):
return str(obj)
def json_iso_dttm_ser(obj):
"""
json serializer that deals with dates
>>> dttm = datetime(1970, 1, 1)
>>> json.dumps({'dttm': dttm}, default=json_iso_dttm_ser)
'{"dttm": "1970-01-01T00:00:00"}'
"""
val = base_json_conv(obj)
if val is not None:
return val
if isinstance(obj, datetime):
obj = obj.isoformat()
elif isinstance(obj, date):
obj = obj.isoformat()
elif isinstance(obj, time):
obj = obj.isoformat()
else:
raise TypeError(
'Unserializable object {} of type {}'.format(obj, type(obj)))
return obj
def datetime_to_epoch(dttm):
if dttm.tzinfo:
epoch_with_tz = pytz.utc.localize(EPOCH)
return (dttm - epoch_with_tz).total_seconds() * 1000
return (dttm - EPOCH).total_seconds() * 1000
def now_as_float():
return datetime_to_epoch(datetime.utcnow())
def json_int_dttm_ser(obj):
"""json serializer that deals with dates"""
val = base_json_conv(obj)
if val is not None:
return val
if isinstance(obj, datetime):
obj = datetime_to_epoch(obj)
elif isinstance(obj, date):
obj = (obj - EPOCH.date()).total_seconds() * 1000
else:
raise TypeError(
'Unserializable object {} of type {}'.format(obj, type(obj)))
return obj
def json_dumps_w_dates(payload):
return json.dumps(payload, default=json_int_dttm_ser)
def error_msg_from_exception(e):
"""Translate exception into error message
Database have different ways to handle exception. This function attempts
to make sense of the exception object and construct a human readable
sentence.
TODO(bkyryliuk): parse the Presto error message from the connection
created via create_engine.
engine = create_engine('presto://localhost:3506/silver') -
gives an e.message as the str(dict)
presto.connect('localhost', port=3506, catalog='silver') - as a dict.
The latter version is parsed correctly by this function.
"""
msg = ''
if hasattr(e, 'message'):
if isinstance(e.message, dict):
msg = e.message.get('message')
elif e.message:
msg = '{}'.format(e.message)
return msg or '{}'.format(e)
def markdown(s, markup_wrap=False):
s = md.markdown(s or '', [
'markdown.extensions.tables',
'markdown.extensions.fenced_code',
'markdown.extensions.codehilite',
])
if markup_wrap:
s = Markup(s)
return s
def readfile(file_path):
with open(file_path) as f:
content = f.read()
return content
def generic_find_constraint_name(table, columns, referenced, db):
"""Utility to find a constraint name in alembic migrations"""
t = sa.Table(table, db.metadata, autoload=True, autoload_with=db.engine)
for fk in t.foreign_key_constraints:
if (fk.referred_table.name == referenced and
set(fk.column_keys) == columns):
return fk.name
def get_datasource_full_name(database_name, datasource_name, schema=None):
if not schema:
return '[{}].[{}]'.format(database_name, datasource_name)
return '[{}].[{}].[{}]'.format(database_name, schema, datasource_name)
def get_schema_perm(database, schema):
if schema:
return '[{}].[{}]'.format(database, schema)
def validate_json(obj):
if obj:
try:
json.loads(obj)
except Exception:
raise SupersetException('JSON is not valid')
def table_has_constraint(table, name, db):
"""Utility to find a constraint name in alembic migrations"""
t = sa.Table(table, db.metadata, autoload=True, autoload_with=db.engine)
for c in t.constraints:
if c.name == name:
return True
return False
class timeout(object):
"""
To be used in a ``with`` block and timeout its content.
"""
def __init__(self, seconds=1, error_message='Timeout'):
self.seconds = seconds
self.error_message = error_message
def handle_timeout(self, signum, frame):
logging.error('Process timed out')
raise SupersetTimeoutException(self.error_message)
def __enter__(self):
try:
signal.signal(signal.SIGALRM, self.handle_timeout)
signal.alarm(self.seconds)
except ValueError as e:
logging.warning("timeout can't be used in the current context")
logging.exception(e)
def __exit__(self, type, value, traceback):
try:
signal.alarm(0)
except ValueError as e:
logging.warning("timeout can't be used in the current context")
logging.exception(e)
def pessimistic_connection_handling(some_engine):
@event.listens_for(some_engine, 'engine_connect')
def ping_connection(connection, branch):
if branch:
# 'branch' refers to a sub-connection of a connection,
# we don't want to bother pinging on these.
return
# turn off 'close with result'. This flag is only used with
# 'connectionless' execution, otherwise will be False in any case
save_should_close_with_result = connection.should_close_with_result
connection.should_close_with_result = False
try:
# run a SELECT 1. use a core select() so that
# the SELECT of a scalar value without a table is
# appropriately formatted for the backend
connection.scalar(select([1]))
except exc.DBAPIError as err:
# catch SQLAlchemy's DBAPIError, which is a wrapper
# for the DBAPI's exception. It includes a .connection_invalidated
# attribute which specifies if this connection is a 'disconnect'
# condition, which is based on inspection of the original exception
# by the dialect in use.
if err.connection_invalidated:
# run the same SELECT again - the connection will re-validate
# itself and establish a new connection. The disconnect detection
# here also causes the whole connection pool to be invalidated
# so that all stale connections are discarded.
connection.scalar(select([1]))
else:
raise
finally:
# restore 'close with result'
connection.should_close_with_result = save_should_close_with_result
class QueryStatus(object):
"""Enum-type class for query statuses"""
STOPPED = 'stopped'
FAILED = 'failed'
PENDING = 'pending'
RUNNING = 'running'
SCHEDULED = 'scheduled'
SUCCESS = 'success'
TIMED_OUT = 'timed_out'
def notify_user_about_perm_udate(
granter, user, role, datasource, tpl_name, config):
msg = render_template(tpl_name, granter=granter, user=user, role=role,
datasource=datasource)
logging.info(msg)
subject = __('[Superset] Access to the datasource %(name)s was granted',
name=datasource.full_name)
send_email_smtp(user.email, subject, msg, config, bcc=granter.email,
dryrun=not config.get('EMAIL_NOTIFICATIONS'))
def send_email_smtp(to, subject, html_content, config, files=None,
dryrun=False, cc=None, bcc=None, mime_subtype='mixed'):
"""
Send an email with html content, eg:
send_email_smtp(
'test@example.com', 'foo', '<b>Foo</b> bar',['/dev/null'], dryrun=True)
"""
smtp_mail_from = config.get('SMTP_MAIL_FROM')
to = get_email_address_list(to)
msg = MIMEMultipart(mime_subtype)
msg['Subject'] = subject
msg['From'] = smtp_mail_from
msg['To'] = ', '.join(to)
recipients = to
if cc:
cc = get_email_address_list(cc)
msg['CC'] = ', '.join(cc)
recipients = recipients + cc
if bcc:
# don't add bcc in header
bcc = get_email_address_list(bcc)
recipients = recipients + bcc
msg['Date'] = formatdate(localtime=True)
mime_text = MIMEText(html_content, 'html')
msg.attach(mime_text)
for fname in files or []:
basename = os.path.basename(fname)
with open(fname, 'rb') as f:
msg.attach(
MIMEApplication(
f.read(),
Content_Disposition="attachment; filename='%s'" % basename,
Name=basename))
send_MIME_email(smtp_mail_from, recipients, msg, config, dryrun=dryrun)
def send_MIME_email(e_from, e_to, mime_msg, config, dryrun=False):
SMTP_HOST = config.get('SMTP_HOST')
SMTP_PORT = config.get('SMTP_PORT')
SMTP_USER = config.get('SMTP_USER')
SMTP_PASSWORD = config.get('SMTP_PASSWORD')
SMTP_STARTTLS = config.get('SMTP_STARTTLS')
SMTP_SSL = config.get('SMTP_SSL')
if not dryrun:
s = smtplib.SMTP_SSL(SMTP_HOST, SMTP_PORT) if SMTP_SSL else \
smtplib.SMTP(SMTP_HOST, SMTP_PORT)
if SMTP_STARTTLS:
s.starttls()
if SMTP_USER and SMTP_PASSWORD:
s.login(SMTP_USER, SMTP_PASSWORD)
logging.info('Sent an alert email to ' + str(e_to))
s.sendmail(e_from, e_to, mime_msg.as_string())
s.quit()
else:
logging.info('Dryrun enabled, email notification content is below:')
logging.info(mime_msg.as_string())
def get_email_address_list(address_string):
if isinstance(address_string, basestring):
if ',' in address_string:
address_string = address_string.split(',')
elif ';' in address_string:
address_string = address_string.split(';')
else:
address_string = [address_string]
return address_string
def has_access(f):
"""
Use this decorator to enable granular security permissions to your
methods. Permissions will be associated to a role, and roles are
associated to users.
By default the permission's name is the methods name.
Forked from the flask_appbuilder.security.decorators
TODO(bkyryliuk): contribute it back to FAB
"""
if hasattr(f, '_permission_name'):
permission_str = f._permission_name
else:
permission_str = f.__name__
def wraps(self, *args, **kwargs):
permission_str = PERMISSION_PREFIX + f._permission_name
if self.appbuilder.sm.has_access(permission_str,
self.__class__.__name__):
return f(self, *args, **kwargs)
else:
logging.warning(
LOGMSG_ERR_SEC_ACCESS_DENIED.format(permission_str,
self.__class__.__name__))
flash(as_unicode(FLAMSG_ERR_SEC_ACCESS_DENIED), 'danger')
# adds next arg to forward to the original path once user is logged in.
return redirect(
url_for(
self.appbuilder.sm.auth_view.__class__.__name__ + '.login',
next=request.path))
f._permission_name = permission_str
return functools.update_wrapper(wraps, f)
def choicify(values):
"""Takes an iterable and makes an iterable of tuples with it"""
return [(v, v) for v in values]
def setup_cache(app, cache_config):
"""Setup the flask-cache on a flask app"""
if cache_config and cache_config.get('CACHE_TYPE') != 'null':
return Cache(app, config=cache_config)
def zlib_compress(data):
"""
Compress things in a py2/3 safe fashion
>>> json_str = '{"test": 1}'
>>> blob = zlib_compress(json_str)
"""
if PY3K:
if isinstance(data, str):
return zlib.compress(bytes(data, 'utf-8'))
return zlib.compress(data)
return zlib.compress(data)
def zlib_decompress_to_string(blob):
"""
Decompress things to a string in a py2/3 safe fashion
>>> json_str = '{"test": 1}'
>>> blob = zlib_compress(json_str)
>>> got_str = zlib_decompress_to_string(blob)
>>> got_str == json_str
True
"""
if PY3K:
if isinstance(blob, bytes):
decompressed = zlib.decompress(blob)
else:
decompressed = zlib.decompress(bytes(blob, 'utf-8'))
return decompressed.decode('utf-8')
return zlib.decompress(blob)
_celery_app = None
def get_celery_app(config):
global _celery_app
if _celery_app:
return _celery_app
_celery_app = celery.Celery(config_source=config.get('CELERY_CONFIG'))
return _celery_app
def merge_extra_filters(form_data):
# extra_filters are temporary/contextual filters that are external
# to the slice definition. We use those for dynamic interactive
# filters like the ones emitted by the "Filter Box" visualization
if 'extra_filters' in form_data:
# __form and __to are special extra_filters that target time
# boundaries. The rest of extra_filters are simple
# [column_name in list_of_values]. `__` prefix is there to avoid
# potential conflicts with column that would be named `from` or `to`
if 'filters' not in form_data:
form_data['filters'] = []
date_options = {
'__from': 'since',
'__to': 'until',
'__time_col': 'granularity_sqla',
'__time_grain': 'time_grain_sqla',
'__time_origin': 'druid_time_origin',
'__granularity': 'granularity',
}
# Grab list of existing filters 'keyed' on the column and operator
def get_filter_key(f):
return f['col'] + '__' + f['op']
existing_filters = {}
for existing in form_data['filters']:
existing_filters[get_filter_key(existing)] = existing['val']
for filtr in form_data['extra_filters']:
# Pull out time filters/options and merge into form data
if date_options.get(filtr['col']):
if filtr.get('val'):
form_data[date_options[filtr['col']]] = filtr['val']
elif filtr['val'] and len(filtr['val']):
# Merge column filters
filter_key = get_filter_key(filtr)
if filter_key in existing_filters:
# Check if the filter already exists
if isinstance(filtr['val'], list):
if isinstance(existing_filters[filter_key], list):
# Add filters for unequal lists
# order doesn't matter
if (
sorted(existing_filters[filter_key]) !=
sorted(filtr['val'])
):
form_data['filters'] += [filtr]
else:
form_data['filters'] += [filtr]
else:
# Do not add filter if same value already exists
if filtr['val'] != existing_filters[filter_key]:
form_data['filters'] += [filtr]
else:
# Filter not found, add it
form_data['filters'] += [filtr]
# Remove extra filters from the form data since no longer needed
del form_data['extra_filters']
| {
"content_hash": "63cb32b12d75e307ee3a0748726975be",
"timestamp": "",
"source": "github",
"line_count": 723,
"max_line_length": 82,
"avg_line_length": 31.363762102351313,
"alnum_prop": 0.5998412418415946,
"repo_name": "alanmcruickshank/superset-dev",
"id": "469bbc26cfc7a216459a3e8c064fe092c1ff68e1",
"size": "22676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "superset/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "65422"
},
{
"name": "HTML",
"bytes": "101728"
},
{
"name": "JavaScript",
"bytes": "783366"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "815898"
},
{
"name": "Shell",
"bytes": "1367"
}
],
"symlink_target": ""
} |
import datetime
from decimal import Decimal
from django.db import models
from corehq import Domain
from corehq.apps.accounting import generator, tasks
from corehq.apps.accounting.models import (
BillingAccount,
Currency,
Subscription,
SubscriptionType,
BillingRecord,
Invoice,
SMALL_INVOICE_THRESHOLD,
)
from corehq.apps.accounting.tests.base_tests import BaseAccountingTest
from corehq.apps.accounting.utils import get_previous_month_date_range
class TestBillingAccount(BaseAccountingTest):
def setUp(self):
super(TestBillingAccount, self).setUp()
self.billing_contact = generator.arbitrary_web_user()
self.dimagi_user = generator.arbitrary_web_user(is_dimagi=True)
self.currency = generator.init_default_currency()
self.billing_account = generator.billing_account(self.dimagi_user, self.billing_contact)
def test_creation(self):
self.assertIsNotNone(self.billing_account)
def test_deletions(self):
self.assertRaises(models.ProtectedError, self.currency.delete)
def tearDown(self):
self.billing_contact.delete()
self.dimagi_user.delete()
BillingAccount.objects.all().delete()
Currency.objects.all().delete()
super(TestBillingAccount, self).tearDown()
class TestSubscription(BaseAccountingTest):
def setUp(self):
super(TestSubscription, self).setUp()
self.billing_contact = generator.arbitrary_web_user()
self.dimagi_user = generator.arbitrary_web_user(is_dimagi=True)
self.domain = Domain(name='test')
self.domain.save()
self.currency = generator.init_default_currency()
self.account = generator.billing_account(self.dimagi_user, self.billing_contact)
self.subscription, self.subscription_length = generator.generate_domain_subscription_from_date(
datetime.date.today(), self.account, self.domain.name
)
def test_creation(self):
self.assertIsNotNone(self.subscription)
def test_no_activation(self):
tasks.activate_subscriptions(based_on_date=self.subscription.date_start - datetime.timedelta(30))
subscription = Subscription.objects.get(id=self.subscription.id)
self.assertFalse(subscription.is_active)
def test_activation(self):
tasks.activate_subscriptions(based_on_date=self.subscription.date_start)
subscription = Subscription.objects.get(id=self.subscription.id)
self.assertTrue(subscription.is_active)
def test_no_deactivation(self):
tasks.activate_subscriptions(based_on_date=self.subscription.date_start)
tasks.deactivate_subscriptions(based_on_date=self.subscription.date_end - datetime.timedelta(30))
subscription = Subscription.objects.get(id=self.subscription.id)
self.assertTrue(subscription.is_active)
def test_deactivation(self):
tasks.deactivate_subscriptions(based_on_date=self.subscription.date_end)
subscription = Subscription.objects.get(id=self.subscription.id)
self.assertFalse(subscription.is_active)
def test_deletions(self):
self.assertRaises(models.ProtectedError, self.account.delete)
self.assertRaises(models.ProtectedError, self.subscription.plan_version.delete)
self.assertRaises(models.ProtectedError, self.subscription.subscriber.delete)
def tearDown(self):
self.billing_contact.delete()
self.dimagi_user.delete()
self.domain.delete()
generator.delete_all_subscriptions()
generator.delete_all_accounts()
super(TestSubscription, self).tearDown()
class TestBillingRecord(BaseAccountingTest):
def setUp(self):
super(TestBillingRecord, self).setUp()
self.billing_contact = generator.arbitrary_web_user()
self.dimagi_user = generator.arbitrary_web_user(is_dimagi=True)
self.domain = Domain(name='test')
self.domain.save()
self.invoice_start, self.invoice_end = get_previous_month_date_range()
self.currency = generator.init_default_currency()
self.account = generator.billing_account(self.dimagi_user, self.billing_contact)
self.subscription, self.subscription_length = generator.generate_domain_subscription_from_date(
datetime.date.today(), self.account, self.domain.name
)
self.invoice = Invoice(
subscription=self.subscription,
date_start=self.invoice_start,
date_end=self.invoice_end,
is_hidden=False,
)
self.billing_record = BillingRecord(invoice=self.invoice)
def test_should_send_email(self):
self.assertTrue(self.billing_record.should_send_email)
def test_should_send_email_contracted(self):
self.subscription.service_type = SubscriptionType.CONTRACTED
self.assertFalse(self.billing_record.should_send_email)
self.invoice.balance = Decimal(SMALL_INVOICE_THRESHOLD - 1)
self.assertFalse(self.billing_record.should_send_email)
self.invoice.balance = Decimal(SMALL_INVOICE_THRESHOLD + 1)
self.assertTrue(self.billing_record.should_send_email)
def test_should_send_email_autogenerate_credits(self):
self.subscription.auto_generate_credits = True
self.assertFalse(self.billing_record.should_send_email)
self.invoice.balance = Decimal(SMALL_INVOICE_THRESHOLD + 1)
self.assertTrue(self.billing_record.should_send_email)
def test_should_send_email_hidden(self):
self.assertTrue(self.billing_record.should_send_email)
self.invoice.is_hidden = True
self.assertFalse(self.billing_record.should_send_email)
| {
"content_hash": "33cffecb0d59cb1526f4a4ba3f6adb12",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 105,
"avg_line_length": 40.11971830985915,
"alnum_prop": 0.7075653852905037,
"repo_name": "puttarajubr/commcare-hq",
"id": "e0a4d5c62983b841fbb031bf15ad42d14f18440c",
"size": "5697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/accounting/tests/test_models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "581878"
},
{
"name": "HTML",
"bytes": "2790361"
},
{
"name": "JavaScript",
"bytes": "2572023"
},
{
"name": "Makefile",
"bytes": "3999"
},
{
"name": "Python",
"bytes": "11275678"
},
{
"name": "Shell",
"bytes": "23890"
}
],
"symlink_target": ""
} |
mutex = Semaphore(1)
counter = 0
## Thread code
mutex.wait()
# critical section
counter += 1
mutex.signal()
| {
"content_hash": "37ea09ad3c958b267ba40e5092e3bfe3",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 20,
"avg_line_length": 11.1,
"alnum_prop": 0.6846846846846847,
"repo_name": "DoWhatILove/turtle",
"id": "c622009fe177ad3378a81988f7f2ffb1e84a7895",
"size": "130",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "programming/python/design/swampy/mutex.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "6651"
},
{
"name": "C",
"bytes": "51179"
},
{
"name": "C#",
"bytes": "2573153"
},
{
"name": "CSS",
"bytes": "15296"
},
{
"name": "Dockerfile",
"bytes": "512"
},
{
"name": "HTML",
"bytes": "19499"
},
{
"name": "JavaScript",
"bytes": "10918"
},
{
"name": "Jupyter Notebook",
"bytes": "1174432"
},
{
"name": "Makefile",
"bytes": "1309"
},
{
"name": "PowerShell",
"bytes": "32263"
},
{
"name": "Python",
"bytes": "310753"
},
{
"name": "Shell",
"bytes": "10285"
},
{
"name": "Vim script",
"bytes": "1757"
}
],
"symlink_target": ""
} |
from ..dojo_test_case import DojoTestCase
from dojo.tools.nsp.parser import NspParser
from dojo.models import Test
class TestNspParser(DojoTestCase):
def test_parse_none(self):
parser = NspParser()
with open("unittests/scans/nsp/none.json") as test_file:
findings = parser.get_findings(test_file, Test())
self.assertEqual(0, len(findings))
def test_parse_ok(self):
parser = NspParser()
with open("unittests/scans/nsp/scan.json") as test_file:
findings = parser.get_findings(test_file, Test())
self.assertEqual(9, len(findings))
# Count each type of finding to check afterwards
codeExec = 0
dos = 0
for finding in findings:
if finding.title.startswith("Remote Code Execution"):
self.assertEqual(findings[0].severity, "High")
self.assertEqual(findings[0].references, "https://nodesecurity.io/advisories/521")
codeExec += 1
elif finding.title.startswith("Regular Expression Denial of Service"):
self.assertEqual(findings[0].severity, "High")
self.assertTrue(
finding.references == "https://nodesecurity.io/advisories/106" or
finding.references == "https://nodesecurity.io/advisories/526" or
finding.references == "https://nodesecurity.io/advisories/534" or
finding.references == "https://nodesecurity.io/advisories/535"
)
dos += 1
else:
self.fail("Unexpected NSP finding.")
self.assertEqual(codeExec, 1)
self.assertEqual(dos, 8)
| {
"content_hash": "25ae37e72494002dfde5babaa8de24fb",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 98,
"avg_line_length": 41.5609756097561,
"alnum_prop": 0.5997652582159625,
"repo_name": "rackerlabs/django-DefectDojo",
"id": "81d661499ebe2a21e1a852c20de99d4062925fda",
"size": "1704",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "unittests/tools/test_nsp_parser.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "18132"
},
{
"name": "Groff",
"bytes": "91"
},
{
"name": "HTML",
"bytes": "666571"
},
{
"name": "JavaScript",
"bytes": "6393"
},
{
"name": "Python",
"bytes": "524728"
},
{
"name": "Shell",
"bytes": "20558"
},
{
"name": "XSLT",
"bytes": "6624"
}
],
"symlink_target": ""
} |
import sublime, sublime_plugin, webbrowser
try:
from .github import *
except ValueError:
from github import *
class GithubPullsCommand(GithubWindowCommand):
@with_repo
def run(self, repo):
webbrowser.open_new_tab(repo.pulls_url())
| {
"content_hash": "c755cb7f80655d82154799c686da8c84",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 49,
"avg_line_length": 23.363636363636363,
"alnum_prop": 0.7120622568093385,
"repo_name": "robotwholearned/dotfilesCustom",
"id": "fb91e83db09ffa321a8dc885da5f43ab0a9a1e3f",
"size": "257",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Sublime Text 2/Packages/Github Tools/github_pulls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "535082"
},
{
"name": "Shell",
"bytes": "2305"
}
],
"symlink_target": ""
} |
import os
import hashlib
from PIL import Image
class Robohash(object):
"""
Robohash is a quick way of generating unique avatars for a site.
The original use-case was to create somewhat memorable images to represent a RSA key.
"""
def __init__(self, string, hashcount=11, ignoreext=True):
"""
Creates our Robohasher
Takes in the string to make a Robohash out of.
"""
# Optionally remove an images extension before hashing.
if ignoreext is True:
string = self._remove_exts(string)
string = string.encode('utf-8')
hash = hashlib.sha512()
hash.update(string)
self.hexdigest = hash.hexdigest()
self.hasharray = []
#Start this at 4, so earlier is reserved
#0 = Color
#1 = Set
#2 = bgset
#3 = BG
self.iter = 4
self._create_hashes(hashcount)
self.resourcedir = os.path.dirname(__file__) + '/'
# Get the list of backgrounds and RobotSets
self.sets = self._listdirs(self.resourcedir + 'sets')
self.bgsets = self._listdirs(self.resourcedir + 'backgrounds')
# Get the colors in set1
self.colors = self._listdirs(self.resourcedir + 'sets/set1')
self.format = 'png'
def _remove_exts(self, string):
"""
Sets the string, to create the Robohash
"""
# If the user hasn't disabled it, we will detect image extensions, such as .png, .jpg, etc.
# We'll remove them from the string before hashing.
# This ensures that /Bear.png and /Bear.bmp will send back the same image, in different formats.
if string.lower().endswith(('.png', '.gif', '.jpg', '.bmp', '.jpeg', '.ppm', '.datauri')):
format = string[string.rfind('.') + 1:len(string)]
if format.lower() == 'jpg':
format = 'jpeg'
self.format = format
string = string[0:string.rfind('.')]
return string
def _create_hashes(self, count):
"""
Breaks up our hash into slots, so we can pull them out later.
Essentially, it splits our SHA/MD5/etc into X parts.
"""
for i in range(0, count):
#Get 1/numblocks of the hash
blocksize = int(len(self.hexdigest) / count)
currentstart = (1 + i) * blocksize - blocksize
currentend = (1 + i) * blocksize
self.hasharray.append(int(self.hexdigest[currentstart:currentend], 16))
def _listdirs(self, path):
return [d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d))]
def _get_list_of_files(self, path):
"""
Go through each subdirectory of `path`, and choose one file from each to use in our hash.
Continue to increase self.iter, so we use a different 'slot' of randomness each time.
"""
chosen_files = []
# Get a list of all subdirectories
directories = []
for root, dirs, files in os.walk(path, topdown=False):
for name in dirs:
if name[:1] is not '.':
directories.append(os.path.join(root, name))
# Go through each directory in the list, and choose one file from each.
# Add this file to our master list of robotparts.
for directory in directories:
files_in_dir = []
for imagefile in os.listdir(directory):
files_in_dir.append(os.path.join(directory, imagefile))
# Use some of our hash bits to choose which file
element_in_list = self.hasharray[self.iter] % len(files_in_dir)
chosen_files.append(files_in_dir[element_in_list])
self.iter += 1
return chosen_files
def assemble(self, roboset=None, color=None, format=None, bgset=None, sizex=300, sizey=300):
"""
Build our Robot!
Returns the robot image itself.
"""
# Allow users to manually specify a robot 'set' that they like.
# Ensure that this is one of the allowed choices, or allow all
# If they don't set one, take the first entry from sets above.
if roboset == 'any':
roboset = self.sets[self.hasharray[1] % len(self.sets)]
elif roboset in self.sets:
roboset = roboset
else:
roboset = self.sets[0]
# Only set1 is setup to be color-seletable. The others don't have enough pieces in various colors.
# This could/should probably be expanded at some point..
# Right now, this feature is almost never used. ( It was < 44 requests this year, out of 78M reqs )
if roboset == 'set1':
if color in self.colors:
roboset = 'set1/' + color
else:
randomcolor = self.colors[self.hasharray[0] % len(self.colors)]
roboset = 'set1/' + randomcolor
# If they specified a background, ensure it's legal, then give it to them.
if bgset in self.bgsets:
bgset = bgset
elif bgset == 'any':
bgset = self.bgsets[self.hasharray[2] % len(self.bgsets)]
# If we set a format based on extension earlier, use that. Otherwise, PNG.
if format is None:
format = self.format
# Each directory in our set represents one piece of the Robot, such as the eyes, nose, mouth, etc.
# Each directory is named with two numbers - The number before the # is the sort order.
# This ensures that they always go in the same order when choosing pieces, regardless of OS.
# The second number is the order in which to apply the pieces.
# For instance, the head has to go down BEFORE the eyes, or the eyes would be hidden.
# First, we'll get a list of parts of our robot.
roboparts = self._get_list_of_files(self.resourcedir + 'sets/' + roboset)
# Now that we've sorted them by the first number, we need to sort each sub-category by the second.
roboparts.sort(key=lambda x: x.split("#")[1])
if bgset is not None:
bglist = []
backgrounds = os.listdir(self.resourcedir + 'backgrounds/' + bgset)
backgrounds.sort()
for ls in backgrounds:
if not ls.startswith("."):
bglist.append(self.resourcedir + 'backgrounds/' + bgset + "/" + ls)
background = bglist[self.hasharray[3] % len(bglist)]
# Paste in each piece of the Robot.
roboimg = Image.open(roboparts[0])
roboimg = roboimg.resize((1024, 1024))
for png in roboparts:
img = Image.open(png)
img = img.resize((1024, 1024))
roboimg.paste(img, (0, 0), img)
# If we're a BMP, flatten the image.
if format == 'bmp':
#Flatten bmps
r, g, b, a = roboimg.split()
roboimg = Image.merge("RGB", (r, g, b))
if bgset is not None:
bg = Image.open(background)
bg = bg.resize((1024, 1024))
bg.paste(roboimg, (0, 0), roboimg)
roboimg = bg
self.img = roboimg.resize((sizex, sizey), Image.ANTIALIAS)
self.format = format
| {
"content_hash": "86bd64d7b4813e90774582b6dd4f2685",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 107,
"avg_line_length": 38.1578947368421,
"alnum_prop": 0.5801379310344827,
"repo_name": "bev-a-tron/Robohash",
"id": "e4251d2d8d2f3cd589a291f78f024becfb3b68ca",
"size": "7304",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "robohash/robohash.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "30229"
},
{
"name": "HTML",
"bytes": "25195"
},
{
"name": "JavaScript",
"bytes": "66657"
},
{
"name": "Python",
"bytes": "35019"
},
{
"name": "Shell",
"bytes": "204"
}
],
"symlink_target": ""
} |
from .. import mfpackage
from ..data.mfdatautil import ArrayTemplateGenerator, ListTemplateGenerator
class ModflowGwtdisv(mfpackage.MFPackage):
"""
ModflowGwtdisv defines a disv package within a gwt6 model.
Parameters
----------
model : MFModel
Model that this package is a part of. Package is automatically
added to model when it is initialized.
loading_package : bool
Do not set this parameter. It is intended for debugging and internal
processing purposes only.
length_units : string
* length_units (string) is the length units used for this model. Values
can be "FEET", "METERS", or "CENTIMETERS". If not specified, the
default is "UNKNOWN".
nogrb : boolean
* nogrb (boolean) keyword to deactivate writing of the binary grid
file.
xorigin : double
* xorigin (double) x-position of the origin used for model grid
vertices. This value should be provided in a real-world coordinate
system. A default value of zero is assigned if not specified. The
value for XORIGIN does not affect the model simulation, but it is
written to the binary grid file so that postprocessors can locate the
grid in space.
yorigin : double
* yorigin (double) y-position of the origin used for model grid
vertices. This value should be provided in a real-world coordinate
system. If not specified, then a default value equal to zero is used.
The value for YORIGIN does not affect the model simulation, but it is
written to the binary grid file so that postprocessors can locate the
grid in space.
angrot : double
* angrot (double) counter-clockwise rotation angle (in degrees) of the
model grid coordinate system relative to a real-world coordinate
system. If not specified, then a default value of 0.0 is assigned.
The value for ANGROT does not affect the model simulation, but it is
written to the binary grid file so that postprocessors can locate the
grid in space.
nlay : integer
* nlay (integer) is the number of layers in the model grid.
ncpl : integer
* ncpl (integer) is the number of cells per layer. This is a constant
value for the grid and it applies to all layers.
nvert : integer
* nvert (integer) is the total number of (x, y) vertex pairs used to
characterize the horizontal configuration of the model grid.
top : [double]
* top (double) is the top elevation for each cell in the top model
layer.
botm : [double]
* botm (double) is the bottom elevation for each cell.
idomain : [integer]
* idomain (integer) is an optional array that characterizes the
existence status of a cell. If the IDOMAIN array is not specified,
then all model cells exist within the solution. If the IDOMAIN value
for a cell is 0, the cell does not exist in the simulation. Input and
output values will be read and written for the cell, but internal to
the program, the cell is excluded from the solution. If the IDOMAIN
value for a cell is 1, the cell exists in the simulation. If the
IDOMAIN value for a cell is -1, the cell does not exist in the
simulation. Furthermore, the first existing cell above will be
connected to the first existing cell below. This type of cell is
referred to as a "vertical pass through" cell.
vertices : [iv, xv, yv]
* iv (integer) is the vertex number. Records in the VERTICES block must
be listed in consecutive order from 1 to NVERT. This argument is an
index variable, which means that it should be treated as zero-based
when working with FloPy and Python. Flopy will automatically subtract
one when loading index variables and add one when writing index
variables.
* xv (double) is the x-coordinate for the vertex.
* yv (double) is the y-coordinate for the vertex.
cell2d : [icell2d, xc, yc, ncvert, icvert]
* icell2d (integer) is the CELL2D number. Records in the CELL2D block
must be listed in consecutive order from the first to the last. This
argument is an index variable, which means that it should be treated
as zero-based when working with FloPy and Python. Flopy will
automatically subtract one when loading index variables and add one
when writing index variables.
* xc (double) is the x-coordinate for the cell center.
* yc (double) is the y-coordinate for the cell center.
* ncvert (integer) is the number of vertices required to define the
cell. There may be a different number of vertices for each cell.
* icvert (integer) is an array of integer values containing vertex
numbers (in the VERTICES block) used to define the cell. Vertices
must be listed in clockwise order. Cells that are connected must
share vertices. This argument is an index variable, which means that
it should be treated as zero-based when working with FloPy and
Python. Flopy will automatically subtract one when loading index
variables and add one when writing index variables.
filename : String
File name for this package.
pname : String
Package name for this package.
parent_file : MFPackage
Parent package file that references this package. Only needed for
utility packages (mfutl*). For example, mfutllaktab package must have
a mfgwflak package parent_file.
"""
top = ArrayTemplateGenerator(("gwt6", "disv", "griddata", "top"))
botm = ArrayTemplateGenerator(("gwt6", "disv", "griddata", "botm"))
idomain = ArrayTemplateGenerator(("gwt6", "disv", "griddata", "idomain"))
vertices = ListTemplateGenerator(("gwt6", "disv", "vertices", "vertices"))
cell2d = ListTemplateGenerator(("gwt6", "disv", "cell2d", "cell2d"))
package_abbr = "gwtdisv"
_package_type = "disv"
dfn_file_name = "gwt-disv.dfn"
dfn = [
[
"header",
],
[
"block options",
"name length_units",
"type string",
"reader urword",
"optional true",
],
[
"block options",
"name nogrb",
"type keyword",
"reader urword",
"optional true",
],
[
"block options",
"name xorigin",
"type double precision",
"reader urword",
"optional true",
],
[
"block options",
"name yorigin",
"type double precision",
"reader urword",
"optional true",
],
[
"block options",
"name angrot",
"type double precision",
"reader urword",
"optional true",
],
[
"block dimensions",
"name nlay",
"type integer",
"reader urword",
"optional false",
],
[
"block dimensions",
"name ncpl",
"type integer",
"reader urword",
"optional false",
],
[
"block dimensions",
"name nvert",
"type integer",
"reader urword",
"optional false",
],
[
"block griddata",
"name top",
"type double precision",
"shape (ncpl)",
"reader readarray",
],
[
"block griddata",
"name botm",
"type double precision",
"shape (nlay, ncpl)",
"reader readarray",
"layered true",
],
[
"block griddata",
"name idomain",
"type integer",
"shape (nlay, ncpl)",
"reader readarray",
"layered true",
"optional true",
],
[
"block vertices",
"name vertices",
"type recarray iv xv yv",
"reader urword",
"optional false",
],
[
"block vertices",
"name iv",
"type integer",
"in_record true",
"tagged false",
"reader urword",
"optional false",
"numeric_index true",
],
[
"block vertices",
"name xv",
"type double precision",
"in_record true",
"tagged false",
"reader urword",
"optional false",
],
[
"block vertices",
"name yv",
"type double precision",
"in_record true",
"tagged false",
"reader urword",
"optional false",
],
[
"block cell2d",
"name cell2d",
"type recarray icell2d xc yc ncvert icvert",
"reader urword",
"optional false",
],
[
"block cell2d",
"name icell2d",
"type integer",
"in_record true",
"tagged false",
"reader urword",
"optional false",
"numeric_index true",
],
[
"block cell2d",
"name xc",
"type double precision",
"in_record true",
"tagged false",
"reader urword",
"optional false",
],
[
"block cell2d",
"name yc",
"type double precision",
"in_record true",
"tagged false",
"reader urword",
"optional false",
],
[
"block cell2d",
"name ncvert",
"type integer",
"in_record true",
"tagged false",
"reader urword",
"optional false",
],
[
"block cell2d",
"name icvert",
"type integer",
"shape (ncvert)",
"in_record true",
"tagged false",
"reader urword",
"optional false",
"numeric_index true",
],
]
def __init__(
self,
model,
loading_package=False,
length_units=None,
nogrb=None,
xorigin=None,
yorigin=None,
angrot=None,
nlay=None,
ncpl=None,
nvert=None,
top=None,
botm=None,
idomain=None,
vertices=None,
cell2d=None,
filename=None,
pname=None,
parent_file=None,
):
super().__init__(
model, "disv", filename, pname, loading_package, parent_file
)
# set up variables
self.length_units = self.build_mfdata("length_units", length_units)
self.nogrb = self.build_mfdata("nogrb", nogrb)
self.xorigin = self.build_mfdata("xorigin", xorigin)
self.yorigin = self.build_mfdata("yorigin", yorigin)
self.angrot = self.build_mfdata("angrot", angrot)
self.nlay = self.build_mfdata("nlay", nlay)
self.ncpl = self.build_mfdata("ncpl", ncpl)
self.nvert = self.build_mfdata("nvert", nvert)
self.top = self.build_mfdata("top", top)
self.botm = self.build_mfdata("botm", botm)
self.idomain = self.build_mfdata("idomain", idomain)
self.vertices = self.build_mfdata("vertices", vertices)
self.cell2d = self.build_mfdata("cell2d", cell2d)
self._init_complete = True
| {
"content_hash": "c2d1bded0a4a8ae8cec37b761218ec28",
"timestamp": "",
"source": "github",
"line_count": 332,
"max_line_length": 79,
"avg_line_length": 35.78915662650602,
"alnum_prop": 0.5507490321494698,
"repo_name": "jentjr/flopy",
"id": "63b4d8c2d489472b51d1cde5b2585f318b65a20f",
"size": "12024",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flopy/mf6/modflow/mfgwtdisv.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "832"
},
{
"name": "CSS",
"bytes": "321"
},
{
"name": "Makefile",
"bytes": "634"
},
{
"name": "Python",
"bytes": "6353118"
},
{
"name": "Shell",
"bytes": "292"
}
],
"symlink_target": ""
} |
'''
Control virtual machines via Salt
'''
# Import Salt libs
import salt.client
import salt.output
import salt.utils.virt
def _determine_hyper(data, omit=''):
'''
Determine what the most resource free hypervisor is based on the given
data
'''
# This is just checking for the hyper with the most free ram, this needs
# to be much more complicated.
hyper = ''
bestmem = 0
bestcpu = 0
for hv_, comps in data.items():
if hv_ == omit:
continue
if not isinstance(comps, dict):
continue
if comps.get('freemem', 0) > bestmem:
bestmem = comps['freemem']
hyper = hv_
return hyper
def _find_vm(name, data, quiet=False):
'''
Scan the query data for the named vm
'''
for hv_ in data:
# Check if data is a dict, and not '"virt.full_info" is not available.'
if not isinstance(data[hv_], dict):
continue
if name in data[hv_].get('vm_info', {}):
ret = {hv_: {name: data[hv_]['vm_info'][name]}}
if not quiet:
salt.output.display_output(
ret,
'nested',
__opts__)
return ret
return {}
def query(hyper=None, quiet=False):
'''
Query the virtual machines
'''
ret = {}
client = salt.client.LocalClient(__opts__['conf_file'])
for info in client.cmd_iter('virtual:physical',
'virt.full_info', expr_form='grain'):
if not info:
continue
if not isinstance(info, dict):
continue
chunk = {}
id_ = info.keys()[0]
if hyper:
if hyper != id_:
continue
if not isinstance(info[id_], dict):
continue
if 'ret' not in info[id_]:
continue
if not isinstance(info[id_]['ret'], dict):
continue
chunk[id_] = info[id_]['ret']
ret.update(chunk)
if not quiet:
salt.output.display_output(chunk, 'virt_query', __opts__)
return ret
def list(hyper=None, quiet=False):
'''
List the virtual machines on each hyper
'''
ret = {}
client = salt.client.LocalClient(__opts__['conf_file'])
for info in client.cmd_iter('virtual:physical',
'virt.vm_info', expr_form='grain'):
if not info:
continue
if not isinstance(info, dict):
continue
chunk = {}
id_ = info.keys()[0]
if hyper:
if hyper != id_:
continue
if not isinstance(info[id_], dict):
continue
if 'ret' not in info[id_]:
continue
if not isinstance(info[id_]['ret'], dict):
continue
data = {}
for k, v in info[id_]['ret'].items():
if v['state'] in data:
data[v['state']].append(k)
else:
data[v['state']] = [k]
chunk[id_] = data
ret.update(chunk)
if not quiet:
salt.output.display_output(chunk, 'virt_list', __opts__)
return ret
def next_hyper():
'''
Return the hypervisor to use for the next autodeployed vm
'''
hyper = _determine_hyper(query(quiet=True))
print(hyper)
return hyper
def hyper_info(hyper=None):
'''
Return information about the hypervisors connected to this master
'''
data = query(hyper, quiet=True)
for id_ in data:
if 'vm_info' in data[id_]:
data[id_].pop('vm_info')
salt.output.display_output(data, 'nested', __opts__)
return data
def init(
name,
cpu,
mem,
image,
hyper=None,
seed=True,
nic='default',
install=True):
'''
Initialize a new vm
'''
print('Searching for Hypervisors')
data = query(hyper, quiet=True)
# Check if the name is already deployed
for hyper in data:
if 'vm_info' in data[hyper]:
if name in data[hyper]['vm_info']:
print('Virtual machine {0} is already deployed'.format(name))
return 'fail'
if hyper is None:
hyper = _determine_hyper(data)
if hyper not in data or not hyper:
print('Hypervisor {0} was not found'.format(hyper))
return 'fail'
if seed:
print('Minion will be preseeded')
kv = salt.utils.virt.VirtKey(hyper, name, __opts__)
kv.authorize()
client = salt.client.LocalClient(__opts__['conf_file'])
print('Creating VM {0} on hypervisor {1}'.format(name, hyper))
cmd_ret = client.cmd_iter(
hyper,
'virt.init',
[
name,
cpu,
mem,
image,
'seed={0}'.format(seed),
'nic={0}'.format(nic),
'install={0}'.format(install),
],
timeout=600)
ret = next(cmd_ret)
if not ret:
print('VM {0} was not initialized.'.format(name))
return 'fail'
print('VM {0} initialized on hypervisor {1}'.format(name, hyper))
return 'good'
def vm_info(name, quiet=False):
'''
Return the information on the named vm
'''
data = query(quiet=True)
return _find_vm(name, data, quiet)
def reset(name):
'''
Force power down and restart an existing vm
'''
ret = {}
client = salt.client.LocalClient(__opts__['conf_file'])
data = vm_info(name, quiet=True)
if not data:
print('Failed to find vm {0} to reset'.format(name))
return 'fail'
hyper = data.keys()[0]
cmd_ret = client.cmd_iter(
hyper,
'virt.reset',
[name],
timeout=600)
for comp in cmd_ret:
ret.update(comp)
print('Reset VM {0}'.format(name))
return ret
def start(name):
'''
Start a named virtual machine
'''
ret = {}
client = salt.client.LocalClient(__opts__['conf_file'])
data = vm_info(name, quiet=True)
if not data:
print('Failed to find vm {0} to start'.format(name))
return 'fail'
hyper = data.keys()[0]
if data[hyper][name]['state'] == 'running':
print('VM {0} is already running'.format(name))
return 'bad state'
cmd_ret = client.cmd_iter(
hyper,
'virt.start',
[name],
timeout=600)
for comp in cmd_ret:
ret.update(comp)
print('Started VM {0}'.format(name))
return 'good'
def force_off(name):
'''
Force power down the named virtual machine
'''
ret = {}
client = salt.client.LocalClient(__opts__['conf_file'])
data = vm_info(name, quiet=True)
if not data:
print('Failed to find vm {0} to destroy'.format(name))
return 'fail'
hyper = data.keys()[0]
if data[hyper][name]['state'] == 'shutdown':
print('VM {0} is already shutdown'.format(name))
return'bad state'
cmd_ret = client.cmd_iter(
hyper,
'virt.destroy',
[name],
timeout=600)
for comp in cmd_ret:
ret.update(comp)
print('Powered off VM {0}'.format(name))
return 'good'
def purge(name):
'''
Destroy the named vm
'''
ret = {}
client = salt.client.LocalClient(__opts__['conf_file'])
data = vm_info(name, quiet=True)
if not data:
print('Failed to find vm {0} to purge'.format(name))
return 'fail'
hyper = data.keys()[0]
cmd_ret = client.cmd_iter(
hyper,
'virt.purge',
[name, True],
timeout=600)
for comp in cmd_ret:
ret.update(comp)
print('Purged VM {0}'.format(name))
return 'good'
def pause(name):
'''
Pause the named vm
'''
ret = {}
client = salt.client.LocalClient(__opts__['conf_file'])
data = vm_info(name, quiet=True)
if not data:
print('Failed to find VM {0} to pause'.format(name))
return 'fail'
hyper = data.keys()[0]
if data[hyper][name]['state'] == 'paused':
print('VM {0} is already paused'.format(name))
return 'bad state'
cmd_ret = client.cmd_iter(
hyper,
'virt.pause',
[name],
timeout=600)
for comp in cmd_ret:
ret.update(comp)
print('Paused VM {0}'.format(name))
return 'good'
def resume(name):
'''
Resume a paused vm
'''
ret = {}
client = salt.client.LocalClient(__opts__['conf_file'])
data = vm_info(name, quiet=True)
if not data:
print('Failed to find VM {0} to pause'.format(name))
return 'not found'
hyper = data.keys()[0]
if data[hyper][name]['state'] != 'paused':
print('VM {0} is not paused'.format(name))
return 'bad state'
cmd_ret = client.cmd_iter(
hyper,
'virt.resume',
[name],
timeout=600)
for comp in cmd_ret:
ret.update(comp)
print('Resumed VM {0}'.format(name))
return 'good'
def migrate(name, target=''):
'''
Migrate a vm from one hypervisor to another. This routine will just start
the migration and display information on how to look up the progress
'''
client = salt.client.LocalClient(__opts__['conf_file'])
data = query(quiet=True)
origin_data = _find_vm(name, data, quiet=True)
try:
origin_hyper = origin_data.keys()[0]
except IndexError:
print('Named vm {0} was not found to migrate'.format(name))
return ''
disks = origin_data[origin_hyper][name]['disks']
if not origin_data:
print('Named vm {0} was not found to migrate'.format(name))
return ''
if not target:
target = _determine_hyper(data, origin_hyper)
if target not in data:
print('Target hypervisor {0} not found'.format(origin_data))
return ''
client.cmd(target, 'virt.seed_non_shared_migrate', [disks, True])
print client.cmd_async(origin_hyper,
'virt.migrate_non_shared',
[name, target])
| {
"content_hash": "1d3edd059c4ace34640ac178ed5f1ce6",
"timestamp": "",
"source": "github",
"line_count": 376,
"max_line_length": 79,
"avg_line_length": 27.10904255319149,
"alnum_prop": 0.5302658687334445,
"repo_name": "MadeiraCloud/salt",
"id": "01742124c5fbc8aa84018c8fd478d2a318f9ff17",
"size": "10217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sources/salt/runners/virt.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "10058"
},
{
"name": "Makefile",
"bytes": "1815"
},
{
"name": "Python",
"bytes": "4530204"
},
{
"name": "Shell",
"bytes": "169676"
}
],
"symlink_target": ""
} |
import os
import sys
sys.path.insert(0, os.path.abspath('lib'))
from fource import __version__, __author__
try:
from setuptools import setup, find_packages
except ImportError:
print("Fource needs setuptools in order to build. Install it using"
" your package manager (usually python-setuptools) or via pip (pip"
" install setuptools).")
sys.exit(1)
CLASSIFIERS = [
"Programming Language :: Python",
"Operating System :: OS Independent",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Developers",
"Development Status :: 4 - Beta",
"Environment :: Console",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
]
# read requirements
fname = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(fname) as f:
requires = list(map(lambda l: l.strip(), f.readlines()))
setup(name = 'fource',
version = __version__,
description = 'Fully automated status board application',
author = __author__,
author_email = 'hi@fource.in',
url = 'http://fource.in/',
license = 'MIT',
install_requires = requires,
package_dir = { '': 'lib' },
packages = find_packages('lib'),
classifiers = CLASSIFIERS,
scripts = [
'bin/fource',
],
data_files = [],
)
| {
"content_hash": "10aaa14b08e77190fe2c45ef220720b7",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 79,
"avg_line_length": 28.48936170212766,
"alnum_prop": 0.6131441374159821,
"repo_name": "fource/fource",
"id": "77f0ac3c38c0507231ab6c7454daafb8f636134d",
"size": "1470",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "158"
},
{
"name": "Perl",
"bytes": "19142"
},
{
"name": "Python",
"bytes": "27735"
},
{
"name": "Ruby",
"bytes": "9689"
},
{
"name": "Shell",
"bytes": "21373"
}
],
"symlink_target": ""
} |
default_app_config = 'info_birthdays.apps.InfoBirthdaysConfig'
| {
"content_hash": "757f7e28b0ab9a8ceb4f5ea2e4c7025e",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 62,
"avg_line_length": 63,
"alnum_prop": 0.8253968253968254,
"repo_name": "ojarva/home-info-display",
"id": "6927d2cf649b950ce222e3fb10190cb1dc3adadd",
"size": "63",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "homedisplay/info_birthdays/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "22171"
},
{
"name": "CoffeeScript",
"bytes": "115283"
},
{
"name": "HTML",
"bytes": "51598"
},
{
"name": "JavaScript",
"bytes": "9902"
},
{
"name": "Python",
"bytes": "310675"
},
{
"name": "Shell",
"bytes": "1617"
}
],
"symlink_target": ""
} |
import pathlib
from ...helpers import article
from .._helpers import _read, register
source = article(
authors=["C.R. Morrow", "T.N.L. Patterson"],
title="The Construction of Algebraic Cubature Formulae by the Distribution of Nodes Along Selected Lines",
journal="SIAM J. Numer. Anal.",
volume="22",
number="6",
year="1985",
pages="1178–1190",
url="https://doi.org/10.1137/0722071",
)
this_dir = pathlib.Path(__file__).resolve().parent
def morrow_patterson_1():
return _read(this_dir / "morrow_patterson_1.json", source)
def morrow_patterson_2():
return _read(this_dir / "morrow_patterson_2.json", source)
register([morrow_patterson_1, morrow_patterson_2])
| {
"content_hash": "5409e3246a374adec5f8ef03dca8377e",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 110,
"avg_line_length": 25.214285714285715,
"alnum_prop": 0.6770538243626062,
"repo_name": "nschloe/quadpy",
"id": "add43f79c80cea5a5ff4b1487ad30202b4e68ae5",
"size": "708",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/quadpy/c2/_morrow_patterson/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "992"
},
{
"name": "Python",
"bytes": "897850"
}
],
"symlink_target": ""
} |
import os
from PIL import Image
files = [];
for filename in os.listdir("./font_standard"):
if filename.endswith(".bmp"):
files.append(filename)
im = Image.new('RGB', (8 * len(files), 8), "#ffffff")
for index, file in enumerate(files):
bitmap_char = Image.open("./font_standard/" + file)
im.paste(bitmap_char, (8 * index, 0))
# im.show()
im.save("standard_font.bmp")
print("ok")
| {
"content_hash": "77f2b52f9116cbed0a59865269c69c8e",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 55,
"avg_line_length": 22.38888888888889,
"alnum_prop": 0.6327543424317618,
"repo_name": "rex64/unnamed-dungeon-crawler",
"id": "bec6c9dfaf29b86a382e3bf8e6c6174d432709fd",
"size": "403",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/font_make.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5821"
},
{
"name": "C++",
"bytes": "92976"
},
{
"name": "CMake",
"bytes": "2085"
},
{
"name": "Lua",
"bytes": "79764"
},
{
"name": "Shell",
"bytes": "102"
}
],
"symlink_target": ""
} |
import os.path
import mock
import pytest
import simplejson
from pyramid_swagger.ingest import API_DOCS_FILENAME
from pyramid_swagger.ingest import create_bravado_core_config
from pyramid_swagger.ingest import _load_resource_listing
from pyramid_swagger.ingest import generate_resource_listing
from pyramid_swagger.ingest import get_swagger_schema
from pyramid_swagger.ingest import get_swagger_spec
from pyramid_swagger.ingest import get_resource_listing
from pyramid_swagger.ingest import ingest_resources
from pyramid_swagger.ingest import ApiDeclarationNotFoundError
from pyramid_swagger.ingest import ResourceListingGenerationError
from pyramid_swagger.ingest import ResourceListingNotFoundError
from pyramid_swagger.tween import SwaggerFormat
def test_proper_error_on_missing_resource_listing():
filename = 'tests/sample_schemas/missing_resource_listing/api_docs.json'
with pytest.raises(ResourceListingNotFoundError) as exc:
_load_resource_listing(filename)
assert filename in str(exc)
assert 'must be named {0}'.format(API_DOCS_FILENAME) in str(exc)
def test_proper_error_on_missing_api_declaration():
with pytest.raises(ApiDeclarationNotFoundError) as exc:
ingest_resources(
{'sample_resource': 'fake/sample_resource.json'},
'fake',
)
assert 'fake/sample_resource.json' in str(exc)
@mock.patch('pyramid_swagger.ingest.build_http_handlers',
return_value={'file': mock.Mock()})
@mock.patch('os.path.abspath', return_value='/bar/foo/swagger.json')
@mock.patch('pyramid_swagger.ingest.Spec.from_dict')
def test_get_swagger_spec_passes_absolute_url(
mock_spec, mock_abs, mock_http_handlers,
):
get_swagger_spec({'pyramid_swagger.schema_directory': 'foo/'})
mock_abs.assert_called_once_with('foo/swagger.json')
expected_url = "file:///bar/foo/swagger.json"
mock_spec.assert_called_once_with(mock.ANY, config=mock.ANY,
origin_url=expected_url)
def test_get_swagger_schema_default():
settings = {
'pyramid_swagger.schema_directory': 'tests/sample_schemas/good_app/',
}
swagger_schema = get_swagger_schema(settings)
assert len(swagger_schema.pyramid_endpoints) == 4
assert swagger_schema.resource_validators
def test_get_swagger_schema_no_validation():
settings = {
'pyramid_swagger.schema_directory': 'tests/sample_schemas/bad_app/',
'pyramid_swagger.enable_swagger_spec_validation': False,
}
# No error means we skipped validation of the bad schema
get_swagger_schema(settings)
def test_generate_resource_listing():
listing = {'swaggerVersion': 1.2}
listing = generate_resource_listing(
'tests/sample_schemas/good_app/',
listing
)
expected = {
'swaggerVersion': 1.2,
'apis': [
{'path': '/no_models'},
{'path': '/other_sample'},
{'path': '/sample'},
]
}
assert listing == expected
def test_generate_resource_listing_with_existing_listing():
listing = {
'apis': [{'path': '/something'}]
}
with pytest.raises(ResourceListingGenerationError) as exc:
generate_resource_listing('tests/sample_schemas/good_app/', listing)
assert 'Generating a listing would override' in str(exc)
@mock.patch('pyramid_swagger.ingest.generate_resource_listing', autospec=True)
@mock.patch('pyramid_swagger.ingest._load_resource_listing', autospec=True)
def test_get_resource_listing_generated(mock_load, mock_generate):
schema_dir = '/api_docs'
resource_listing = get_resource_listing(schema_dir, True)
mock_generate.assert_called_once_with(schema_dir, mock_load.return_value)
assert resource_listing == mock_generate.return_value
def test_get_resource_listing_default():
schema_dir = 'tests/sample_schemas/good_app/'
resource_listing = get_resource_listing(schema_dir, False)
with open(os.path.join(schema_dir, 'api_docs.json')) as fh:
assert resource_listing == simplejson.load(fh)
def test_create_bravado_core_config_with_defaults():
assert {'use_models': False} == create_bravado_core_config({})
def test_create_bravado_core_config_non_empty():
some_format = mock.Mock(spec=SwaggerFormat)
pyramid_swagger_config = {
'pyramid_swagger.enable_request_validation': True,
'pyramid_swagger.enable_response_validation': False,
'pyramid_swagger.enable_swagger_spec_validation': True,
'pyramid_swagger.use_models': True,
'pyramid_swagger.user_formats': [some_format],
}
expected_bravado_core_config = {
'validate_requests': True,
'validate_responses': False,
'validate_swagger_spec': True,
'use_models': True,
'formats': [some_format]
}
bravado_core_config = create_bravado_core_config(pyramid_swagger_config)
assert expected_bravado_core_config == bravado_core_config
| {
"content_hash": "3a849e239aaa59c6808b32413a65286f",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 78,
"avg_line_length": 35.869565217391305,
"alnum_prop": 0.7002020202020202,
"repo_name": "analogue/pyramid_swagger",
"id": "bf39756ba97885626041778077a352dc35cde290",
"size": "4974",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/ingest_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "190"
},
{
"name": "Python",
"bytes": "108530"
}
],
"symlink_target": ""
} |
import os
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import RedirectView
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
ROOT_DIR = os.path.dirname(THIS_DIR)
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': ROOT_DIR+'/website/static',
}, name='static'),
url(r'^(assets/ico/)?favicon\.ico$', RedirectView.as_view(url='/static/images/favicon.ico')),
url(r'^robots.txt$', RedirectView.as_view(url='/static/robots.txt')),
url(r'^', include('website.frontend.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/(.*)', admin.site.root),
]
| {
"content_hash": "7afcc7f23631c5014fef1fc605e92d83",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 97,
"avg_line_length": 40.42307692307692,
"alnum_prop": 0.6641294005708849,
"repo_name": "catcosmo/newsdiffs",
"id": "e394f22c9c34b07079fe5f33e214caeac57e4e35",
"size": "1051",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "website/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "340"
},
{
"name": "CSS",
"bytes": "16227"
},
{
"name": "HTML",
"bytes": "81337"
},
{
"name": "JavaScript",
"bytes": "198821"
},
{
"name": "Python",
"bytes": "173027"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.