text stringlengths 4 1.02M | meta dict |
|---|---|
from setuptools import setup
setup(name='vse',
version='0.1.5',
author='Marcin K. Paszkiewicz',
author_email='mkpaszkiewicz@gmail.com',
description='A visual search engine using local features descriptors and bag of words, based on OpenCV',
url='https://github.com/mkpaszkiewicz/vse',
download_url='https://github.com/mkpaszkiewicz/vse/tarball/0.1.5',
packages=['vse'],
keywords=['visual search engine computer vision local descriptors BoW'],
install_requires=[
'NumPy'
]
)
| {
"content_hash": "77a51d4208448e25bfa2c23a67ec43d6",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 110,
"avg_line_length": 36.733333333333334,
"alnum_prop": 0.6569872958257713,
"repo_name": "mkpaszkiewicz/vse",
"id": "3b905aae1ac140a07cd4f38590bfc82fd583aa37",
"size": "551",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27783"
}
],
"symlink_target": ""
} |
import types
import logging
import io
# Deprecated, remove, everything is parsed in app.py
from pywps import namespaces
# indicates, if xsd-based validation of the request should return
VALIDATE = False
class Request:
""" Base request class
"""
version = None
request = None
service = "wps"
request = None
validate = False
lang = "en"
def set_from_url(self,pairs):
"""Set values of this request based on url key-value pairs
"""
if "version" in pairs:
self.version = pairs["version"]
self.request=pairs["request"].lower()
if "language" in pairs:
self.lang=pairs["language"]
pass
def set_from_xml(self,root):
"""Set values of this request based on url ETree root
"""
global namespaces
self.request=root.tag.lower().replace("{%s}"%namespaces["wps"],"")
if "language" in root.attrib.keys():
self.lang=root.attrib["language"]
pass
def is_valid(self):
"""Returns self-control of the reuquest - if all necessary variables
are set"""
return True
def get_request(data):
"""returns request object in for of key-value pairs (for HTTP GET) or Tree
(POST)
"""
# parse get request
if isinstance(data, str):
kvs = parse_params(data)
return __get_request_from_url(kvs)
# post request is supposed to be file object
elif isinstance(data, io.IOBase):
root = parse_xml(data)
return __get_request_from_xml(root)
else:
pass
def parse_xml(data):
"""Parse xml tree
"""
from lxml import etree
from lxml import objectify
logging.debug("Continuing with lxml xml etree parser")
schema = None
if VALIDATE:
schema = etree.XMLSchema(file="pywps/resources/schemas/wps_all.xsd")
parser = objectify.makeparser(remove_blank_text = True,
remove_comments = True,
schema = schema)
objects = objectify.parse(data,parser)
return objects.getroot()
def parse_params(data):
"""Parse params
"""
logging.debug("Continuing with urllib.parse parser")
pairs = data.split("&")
params = dict(p.split("=",1) for p in pairs)
params = dict((k.lower(), v) for k, v in params.items())
return params
def __get_request_from_url(pairs):
"""return Request object based on url key-value pairs params
"""
# convert keys to lowercase
pairs = dict((k.lower(), v) for k, v in pairs.items())
keys = pairs.keys()
request = None
if "request" in keys:
if pairs["request"].lower() == "getcapabilities":
from pywps.request import getcapabilities
request = getcapabilities.GetCapabilities()
elif pairs["request"].lower() == "describeprocess":
from pywps.request import describeprocess
request = describeprocess.DescribeProcess()
elif pairs["request"].lower() == "execute":
from pywps.request import execute
request = execute.Execute()
if request:
request.set_from_url(pairs)
# return request, whatever it may be
return request
def __get_request_from_xml(root):
"""return Request object based on xml etree root node
"""
global namespaces
# convert keys to lowercase
request = None
if root.tag == "{%s}GetCapabilities"%namespaces["wps"]:
from pywps.request import getcapabilities
request = getcapabilities.GetCapabilities()
elif root.tag == "{%s}DescribeProcess"%namespaces["wps"]:
from pywps.request import describeprocess
request = describeprocess.DescribeProcess()
elif root.tag == "{%s}Execute"%namespaces["wps"]:
from pywps.request import execute
request = execute.Execute()
if request:
request.set_from_xml(root)
# return request, whatever it may be
return request
| {
"content_hash": "3d089d487a9890b04aae6216a683bbd6",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 78,
"avg_line_length": 28.04225352112676,
"alnum_prop": 0.6180311401305877,
"repo_name": "doclements/pywps-4",
"id": "72fe58cad9b639557c166a1b516898bec8c1a143",
"size": "3982",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pywps/request/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "120283"
}
],
"symlink_target": ""
} |
"""
Module handles tasks in the to-do list
"""
class TodoTask:
"""
Class represents a task in the to-do list
"""
TASK_DONE = 'done'
TASK_UNDONE = 'todo'
def __init__(self, description, position=None, completionStatus=TASK_UNDONE):
"""
Initialize a to-do list task item
"""
self.description = description
self.position = position
self.completionStatus = completionStatus
def __str__(self):
"""
Return a human readable str representation of this task
"""
return self.description | {
"content_hash": "49e27b00c5092a9d94eadc6dd6f9ec64",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 81,
"avg_line_length": 19.766666666666666,
"alnum_prop": 0.5902192242833052,
"repo_name": "mes32/mfnd",
"id": "4580ea0b3a66718a8dd2cea4b84f060f1eb51c9a",
"size": "616",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mfnd/todotask.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "33394"
}
],
"symlink_target": ""
} |
import docker
from kippo.core.config import config
server_uri=config().get('docker', 'uri')
docker_version=config().get('docker','version')
docker_name=config().get('docker','image_name')
def start_container():
c = docker.Client(base_url=server_uri, version=docker_version)
docker_id=str(c.create_container(image=docker_name)['Id'])
c.start(docker_id,port_bindings={22:22})
return docker_id
def stop_container(Id):
c = docker.Client(base_url=server_uri, version=docker_version)
c.stop(Id)
| {
"content_hash": "57000f01f67238c0d4ff640b94e68a9d",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 66,
"avg_line_length": 27.31578947368421,
"alnum_prop": 0.7071290944123314,
"repo_name": "coolhacks/docker-hacks",
"id": "8564193cc367b06c2c965475d46cc36e509f701a",
"size": "519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/dockpot/kippo/core/honeydocker.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "23973"
},
{
"name": "CSS",
"bytes": "1227"
},
{
"name": "Groff",
"bytes": "37179"
},
{
"name": "HTML",
"bytes": "50500"
},
{
"name": "Nginx",
"bytes": "3121"
},
{
"name": "Python",
"bytes": "10073"
},
{
"name": "Shell",
"bytes": "35683"
}
],
"symlink_target": ""
} |
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management.core.exceptions import ComponentIsNotRunning
from resource_management.core.logger import Logger
from resource_management.libraries.script import Script
from commands import Commands
class Indexing(Script):
def install(self, env):
import params
env.set_params(params)
commands = Commands(params)
commands.setup_repo()
Logger.info('Install RPM packages')
self.install_packages(env)
def start(self, env, upgrade_type=None):
import params
env.set_params(params)
commands = Commands(params)
if not commands.is_configured():
commands.init_kafka_topics()
commands.set_configured()
commands.start_indexing_topology()
def stop(self, env, upgrade_type=None):
import params
env.set_params(params)
commands = Commands(params)
commands.stop_indexing_topology()
def status(self, env):
import status_params
env.set_params(status_params)
commands = Commands(status_params)
if not commands.is_topology_active():
raise ComponentIsNotRunning()
def restart(self, env):
import params
env.set_params(params)
commands = Commands(params)
commands.restart_indexing_topology()
def kafkabuild(self, env, upgrade_type=None):
import params
env.set_params(params)
commands = Commands(params)
commands.init_kafka_topics()
if __name__ == "__main__":
Indexing().execute()
| {
"content_hash": "07c5f079c5971d820807e30408e26183",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 72,
"avg_line_length": 32.66197183098591,
"alnum_prop": 0.6938335489435101,
"repo_name": "charlesporter/incubator-metron",
"id": "bfae19a2fba8e8f4005e0e9dd9a2a157bedfeb54",
"size": "2319",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metron-deployment/packaging/ambari/src/main/resources/common-services/INDEXING/0.2.0BETA/package/scripts/indexing_master.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "5912"
},
{
"name": "Bro",
"bytes": "5291"
},
{
"name": "C",
"bytes": "28835"
},
{
"name": "C++",
"bytes": "14044"
},
{
"name": "CMake",
"bytes": "5383"
},
{
"name": "CSS",
"bytes": "645635"
},
{
"name": "HTML",
"bytes": "26741"
},
{
"name": "Java",
"bytes": "2442215"
},
{
"name": "JavaScript",
"bytes": "147038"
},
{
"name": "Makefile",
"bytes": "4851"
},
{
"name": "OpenEdge ABL",
"bytes": "48753"
},
{
"name": "Python",
"bytes": "109343"
},
{
"name": "Ruby",
"bytes": "25644"
},
{
"name": "Shell",
"bytes": "81419"
}
],
"symlink_target": ""
} |
"""
EntryInstances
--------------
Every instance referenced in an xpath expression needs to be added to the relevant entry,
so that CommCare knows what data to load when. This includes case list calculations,
form display conditions, etc.
HQ knows about a particular set of instances (locations, reports, etc.).
There's factory-based code dealing with these "known" instances
When a new feature involves any kind of XPath calculation, it needs to be scanned for instances.
Instances are used to reference data beyond the scope of the current XML document.
Examples are the commcare session, casedb, lookup tables, mobile reports, case search data etc.
Instances are added into the suite file in ``<entry>`` elements and directly in the form XML. This is
done in post processing of the suite file in ``corehq.apps.app_manager.suite_xml.post_process.instances``.
How instances work
------------------
When running applications instances are initialized for the current context using an instance declaration
which ties the instance ID to the actual instance model:
<instance id="my-instance" ref="jr://fixture/my-fixture" />
This allows using the fixture with the specified ID:
instance('my-instance')path/to/node
From the mobile code point of view the ID is completely user defined and only used to 'register'
the instance in current context. The index 'ref' is used to determine which instance is attached
to the given ID.
Instances in CommCare HQ
------------------------
In CommCare HQ we allow app builders to reference instance in many places in the application
but don't require that the app builder define the full instance declaration.
When 'building' the app we rely on instance ID conventions to enable the build process to
determine what 'ref' to use for the instances used in the app.
For static instances like 'casedb' the instance ID must match a pre-defined name. For example
* casedb
* commcaresession
* groups
Other instances use a namespaced convention: "type:sub-type". For example:
* commcare-reports:<uuid>
* item-list:<fixture name>
Custom instances
----------------
There are two places in app builder where users can define custom instances:
* in a form using the 'CUSTOM_INSTANCES' plugin
* in 'Lookup Table Selection' case search properties under 'Advanced Lookup Table Options'
"""
import html
import re
from collections import defaultdict
from django.utils.translation import gettext as _
from memoized import memoized
from corehq import toggles
from corehq.apps.app_manager import id_strings
from corehq.apps.app_manager.exceptions import (
DuplicateInstanceIdError,
UnknownInstanceError,
)
from corehq.apps.app_manager.suite_xml.contributors import PostProcessor
from corehq.apps.app_manager.suite_xml.xml_models import Instance
from corehq.apps.app_manager.util import (
module_offers_search,
module_uses_inline_search,
)
from corehq.util.timer import time_method
class EntryInstances(PostProcessor):
"""Adds instance declarations to the suite file
See docs/apps/instances.rst"""
IGNORED_INSTANCES = {
'jr://instance/remote',
'jr://instance/search-input',
}
@time_method()
def update_suite(self):
for entry in self.suite.entries:
self.add_entry_instances(entry)
for remote_request in self.suite.remote_requests:
self.add_entry_instances(remote_request)
def add_entry_instances(self, entry):
xpaths = self._get_all_xpaths_for_entry(entry)
known_instances, unknown_instance_ids = get_all_instances_referenced_in_xpaths(self.app, xpaths)
custom_instances, unknown_instance_ids = self._get_custom_instances(
entry,
known_instances,
unknown_instance_ids
)
all_instances = known_instances | custom_instances
self.require_instances(entry, instances=all_instances, instance_ids=unknown_instance_ids)
def _get_all_xpaths_for_entry(self, entry):
relevance_by_menu, menu_by_command = self._get_menu_relevance_mapping()
details_by_id = self._get_detail_mapping()
detail_ids = set()
xpaths = set()
for datum in entry.all_datums:
detail_ids.add(datum.detail_confirm)
detail_ids.add(datum.detail_select)
detail_ids.add(datum.detail_inline)
detail_ids.add(datum.detail_persistent)
xpaths.add(datum.nodeset)
xpaths.add(datum.function)
for query in entry.queries:
xpaths.update({data.ref for data in query.data})
for prompt in query.prompts:
if prompt.itemset:
xpaths.add(prompt.itemset.nodeset)
if prompt.required:
xpaths.add(prompt.required.test)
if prompt.default_value:
xpaths.add(prompt.default_value)
for validation in prompt.validations:
xpaths.add(validation.test)
if entry.post:
if entry.post.relevant:
xpaths.add(entry.post.relevant)
for data in entry.post.data:
xpaths.update(
xp for xp in [data.ref, data.nodeset, data.exclude] if xp
)
details = [details_by_id[detail_id] for detail_id in detail_ids if detail_id]
entry_id = entry.command.id
if entry_id in menu_by_command:
menu_id = menu_by_command[entry_id]
relevances = relevance_by_menu[menu_id]
xpaths.update(relevances)
for detail in details:
xpaths.update(detail.get_all_xpaths())
for assertion in getattr(entry, 'assertions', []):
xpaths.add(assertion.test)
if entry.stack:
for frame in entry.stack.frames:
xpaths.update(frame.get_xpaths())
xpaths.discard(None)
return xpaths
@memoized
def _get_detail_mapping(self):
return {detail.id: detail for detail in self.suite.details}
@memoized
def _get_menu_relevance_mapping(self):
relevance_by_menu = defaultdict(list)
menu_by_command = {}
for menu in self.suite.menus:
for command in menu.commands:
menu_by_command[command.id] = menu.id
if command.relevant:
relevance_by_menu[menu.id].append(command.relevant)
if menu.relevant:
relevance_by_menu[menu.id].append(menu.relevant)
return relevance_by_menu, menu_by_command
def _get_custom_instances(self, entry, known_instances, required_instances):
if entry.command.id not in self._form_module_by_command_id:
return set(), required_instances
known_instance_ids = {instance.id: instance for instance in known_instances}
form, module = self._form_module_by_command_id[entry.command.id]
custom_instances = []
if hasattr(entry, 'form'):
custom_instances.extend(
Instance(id=instance.instance_id, src=instance.instance_path)
for instance in form.custom_instances
)
if entry.queries:
custom_instances.extend([
Instance(id=prop.itemset.instance_id, src=prop.itemset.instance_uri)
for prop in module.search_config.properties
if prop.itemset.instance_id
])
# sorted list to prevent intermittent test failures
custom_instances = set(sorted(custom_instances, key=lambda i: i.id))
for instance in list(custom_instances):
existing = known_instance_ids.get(instance.id)
if existing:
if existing.src != instance.src:
raise DuplicateInstanceIdError(
_("Duplicate custom instance in {}: {}").format(entry.command.id, instance.id))
# we already have this one, so we can ignore it
custom_instances.discard(instance)
# Remove custom instances from required instances, but add them even if they aren't referenced anywhere
required_instances.discard(instance.id)
return custom_instances, required_instances
@property
@memoized
def _form_module_by_command_id(self):
"""Map the command ID to the form and module.
Module must be included since ``form.get_module()`` does not return the correct
module for ``ShadowModule`` forms
"""
by_command = {}
for module in self.app.get_modules():
if module_offers_search(module) and not module_uses_inline_search(module):
by_command[id_strings.search_command(module)] = (None, module)
for form in module.get_suite_forms():
by_command[id_strings.form_command(form, module)] = (form, module)
return by_command
@staticmethod
def require_instances(entry, instances=(), instance_ids=()):
used = {(instance.id, instance.src) for instance in entry.instances}
instance_order_updated = EntryInstances.update_instance_order(entry)
for instance in instances:
if instance.src in EntryInstances.IGNORED_INSTANCES:
continue
if (instance.id, instance.src) not in used:
entry.instances.append(
# it's important to make a copy,
# since these can't be reused
Instance(id=instance.id, src=instance.src)
)
if not instance_order_updated:
instance_order_updated = EntryInstances.update_instance_order(entry)
covered_ids = {instance_id for instance_id, _ in used}
for instance_id in instance_ids:
if instance_id not in covered_ids:
raise UnknownInstanceError(
"Instance reference not recognized: {} in XPath \"{}\""
# to get xpath context to show in this error message
# make instance_id a unicode subclass with an xpath property
.format(instance_id, getattr(instance_id, 'xpath', "(XPath Unknown)")))
sorted_instances = sorted(entry.instances, key=lambda instance: instance.id)
if sorted_instances != entry.instances:
entry.instances = sorted_instances
@staticmethod
def update_instance_order(entry):
"""Make sure the first instance gets inserted right after the command.
Once you "suggest" a placement to eulxml, it'll follow your lead and place
the rest of them there too"""
if entry.instances:
instance_node = entry.node.find('instance')
command_node = entry.node.find('command')
entry.node.remove(instance_node)
entry.node.insert(entry.node.index(command_node) + 1, instance_node)
return True
_factory_map = {}
def get_instance_factory(instance_name):
"""Get the instance factory for an instance name (ID).
This relies on a naming convention for instances: "scheme:id"
See docs/apps/instances.rst"""
try:
scheme, _ = instance_name.split(':', 1)
except ValueError:
scheme = instance_name
return _factory_map.get(scheme, null_factory)
def null_factory(app, instance_name):
return None
class register_factory(object):
def __init__(self, *schemes):
self.schemes = schemes
def __call__(self, fn):
for scheme in self.schemes:
_factory_map[scheme] = fn
return fn
INSTANCE_KWARGS_BY_ID = {
'groups': dict(id='groups', src='jr://fixture/user-groups'),
'reports': dict(id='reports', src='jr://fixture/commcare:reports'),
'ledgerdb': dict(id='ledgerdb', src='jr://instance/ledgerdb'),
'casedb': dict(id='casedb', src='jr://instance/casedb'),
'commcaresession': dict(id='commcaresession', src='jr://instance/session'),
'registry': dict(id='registry', src='jr://instance/remote'),
'selected_cases': dict(id='selected_cases', src='jr://instance/selected-entities'),
'search_selected_cases': dict(id='search_selected_cases', src='jr://instance/selected-entities'),
}
@register_factory(*list(INSTANCE_KWARGS_BY_ID.keys()))
def preset_instances(app, instance_name):
kwargs = INSTANCE_KWARGS_BY_ID[instance_name]
return Instance(**kwargs)
@memoized
@register_factory('item-list', 'schedule', 'indicators', 'commtrack')
def generic_fixture_instances(app, instance_name):
return Instance(id=instance_name, src='jr://fixture/{}'.format(instance_name))
@register_factory('search-input')
def search_input_instances(app, instance_name):
return Instance(id=instance_name, src='jr://instance/search-input')
@register_factory('results')
def remote_instances(app, instance_name):
return Instance(id=instance_name, src='jr://instance/remote')
@register_factory('commcare')
def commcare_fixture_instances(app, instance_name):
if instance_name == 'commcare:reports' and toggles.MOBILE_UCR.enabled(app.domain):
return Instance(id=instance_name, src='jr://fixture/{}'.format(instance_name))
def _commcare_reports_instances(app, instance_name, prefix):
from corehq.apps.app_manager.suite_xml.features.mobile_ucr import (
get_uuids_by_instance_id,
)
if instance_name.startswith(prefix) and toggles.MOBILE_UCR.enabled(app.domain):
instance_id = instance_name[len(prefix):]
uuid = get_uuids_by_instance_id(app).get(instance_id, [instance_id])[0]
return Instance(id=instance_name, src='jr://fixture/{}{}'.format(prefix, uuid))
@register_factory('commcare-reports')
def commcare_reports_fixture_instances(app, instance_name):
return _commcare_reports_instances(app, instance_name, 'commcare-reports:')
@register_factory('commcare-reports-filters')
def commcare_reports_filters_instances(app, instance_name):
return _commcare_reports_instances(app, instance_name, 'commcare-reports-filters:')
@register_factory('locations')
def location_fixture_instances(app, instance_name):
from corehq.apps.locations.models import LocationFixtureConfiguration
if (toggles.HIERARCHICAL_LOCATION_FIXTURE.enabled(app.domain)
and not LocationFixtureConfiguration.for_domain(app.domain).sync_flat_fixture):
return Instance(id=instance_name, src='jr://fixture/commtrack:{}'.format(instance_name))
return Instance(id=instance_name, src='jr://fixture/{}'.format(instance_name))
def get_all_instances_referenced_in_xpaths(app, xpaths):
instances = set()
unknown_instance_ids = set()
for xpath in set(xpaths):
if not xpath:
continue
instance_names = get_instance_names(xpath)
for instance_name in instance_names:
factory = get_instance_factory(instance_name)
instance = factory(app, instance_name)
if instance:
instances.add(instance)
else:
class UnicodeWithContext(str):
pass
instance_name = UnicodeWithContext(instance_name)
instance_name.xpath = xpath
unknown_instance_ids.add(instance_name)
return instances, unknown_instance_ids
instance_re = re.compile(r"""instance\(['"]([\w\-:]+)['"]\)""", re.UNICODE)
def get_instance_names(xpath):
unescaped = html.unescape(xpath)
return set(re.findall(instance_re, unescaped))
| {
"content_hash": "fa990e57711403ce75ea748f3934e9c9",
"timestamp": "",
"source": "github",
"line_count": 401,
"max_line_length": 115,
"avg_line_length": 38.60847880299252,
"alnum_prop": 0.654502002325281,
"repo_name": "dimagi/commcare-hq",
"id": "975fb0808354deb94e3c5134afe9fe9413967ed4",
"size": "15482",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/app_manager/suite_xml/post_process/instances.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "82928"
},
{
"name": "Dockerfile",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "2589268"
},
{
"name": "JavaScript",
"bytes": "5889543"
},
{
"name": "Jinja",
"bytes": "3693"
},
{
"name": "Less",
"bytes": "176180"
},
{
"name": "Makefile",
"bytes": "1622"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "66704"
},
{
"name": "Python",
"bytes": "21779773"
},
{
"name": "Roff",
"bytes": "150"
},
{
"name": "Shell",
"bytes": "67473"
}
],
"symlink_target": ""
} |
import contextlib
import functools
import os
from logging import getLogger
from time import time
import aspectlib
import pytest
import hunter
from hunter.actions import RETURN_VALUE
from hunter.actions import ColorStreamAction
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
logger = getLogger(__name__)
pytest_plugins = ('pytester',)
def nothin(x):
return x
def bar():
baz()
@nothin
@nothin
@nothin
@nothin
def baz():
for i in range(10):
os.path.join('a', str(i))
foo = 1
def brief_probe(qualname, *actions, **kwargs):
return aspectlib.weave(qualname, functools.partial(hunter.wrap, actions=actions, **kwargs))
def fast_probe(qualname, *actions, **filters):
def tracing_decorator(func):
@functools.wraps(func)
def tracing_wrapper(*args, **kwargs):
# create the Tracer manually to avoid spending time in likely useless things like:
# - loading PYTHONHUNTERCONFIG
# - setting up the clear_env_var or thread_support options
# - atexit cleanup registration
with hunter.Tracer().trace(hunter.When(hunter.Query(**filters), *actions)):
return func(*args, **kwargs)
return tracing_wrapper
return aspectlib.weave(qualname, tracing_decorator) # this does the monkeypatch
@contextlib.contextmanager
def no_probe(*args, **kwargs):
yield
@pytest.mark.parametrize('impl', [fast_probe, brief_probe, no_probe])
def test_probe(impl, benchmark):
with open(os.devnull, 'w') as stream:
with impl(
'%s.baz' % __name__,
hunter.VarsPrinter('foo', stream=stream),
kind='return',
depth=0,
):
benchmark(bar)
class ProfileAction(ColorStreamAction):
# using ColorStreamAction brings this more in line with the other actions
# (stream option, coloring and such, see the other examples for colors)
def __init__(self, **kwargs):
self.timings = {}
super(ProfileAction, self).__init__(**kwargs)
def __call__(self, event):
current_time = time()
# include event.builtin in the id so we don't have problems
# with Python reusing frame objects from the previous call for builtin calls
frame_id = id(event.frame), str(event.builtin)
if event.kind == 'call':
self.timings[frame_id] = current_time, None
elif frame_id in self.timings:
start_time, exception = self.timings.pop(frame_id)
# try to find a complete function name for display
function_object = event.function_object
if event.builtin:
function = '<builtin>.{}'.format(event.arg.__name__)
elif function_object:
if hasattr(function_object, '__qualname__'):
function = '{}.{}'.format(
function_object.__module__,
function_object.__qualname__,
)
else:
function = '{}.{}'.format(function_object.__module__, function_object.__name__)
else:
function = event.function
if event.kind == 'exception':
# store the exception
# (there will be a followup 'return' event in which we deal with it)
self.timings[frame_id] = start_time, event.arg
elif event.kind == 'return':
delta = current_time - start_time
if event.instruction == RETURN_VALUE:
# exception was discarded
self.output(
'{fore(BLUE)}{} returned: {}. Duration: {:.4f}s{RESET}\n',
function,
event.arg,
delta,
)
else:
self.output(
'{fore(RED)}{} raised exception: {}. Duration: {:.4f}s{RESET}\n',
function,
exception,
delta,
)
@pytest.mark.parametrize(
'options',
[{'kind__in': ['call', 'return', 'exception']}, {'profile': True}],
ids=['kind__in=call,return,exception', 'profile=True'],
)
def test_profile(LineMatcher, options):
stream = StringIO()
with hunter.trace(action=ProfileAction(stream=stream), **options):
from sample8errors import notsilenced
from sample8errors import silenced1
from sample8errors import silenced3
from sample8errors import silenced4
silenced1()
print('Done silenced1')
silenced3()
print('Done silenced3')
silenced4()
print('Done silenced4')
try:
notsilenced()
except ValueError:
print('Done not silenced')
lm = LineMatcher(stream.getvalue().splitlines())
if 'profile' in options:
lm.fnmatch_lines(
[
'sample8errors.error raised exception: None. Duration: ?.????s',
'sample8errors.silenced1 returned: None. Duration: ?.????s',
'sample8errors.error raised exception: None. Duration: ?.????s',
'sample8errors.silenced3 returned: mwhahaha. Duration: ?.????s',
'sample8errors.error raised exception: None. Duration: ?.????s',
'<builtin>.repr raised exception: None. Duration: ?.????s',
'sample8errors.silenced4 returned: None. Duration: ?.????s',
'sample8errors.error raised exception: None. Duration: ?.????s',
'sample8errors.notsilenced raised exception: None. Duration: ?.????s',
]
)
else:
lm.fnmatch_lines(
[
'sample8errors.error raised exception: (*RuntimeError*, *). Duration: ?.????s',
'sample8errors.silenced1 returned: None. Duration: ?.????s',
'sample8errors.error raised exception: (*RuntimeError*, *). Duration: ?.????s',
'sample8errors.silenced3 returned: mwhahaha. Duration: ?.????s',
'sample8errors.error raised exception: (*RuntimeError*, *). Duration: ?.????s',
'sample8errors.silenced4 returned: None. Duration: ?.????s',
'sample8errors.error raised exception: (*RuntimeError*, *). Duration: ?.????s',
'sample8errors.notsilenced raised exception: (*ValueError(RuntimeError*, *). Duration: ?.????s',
]
)
| {
"content_hash": "a08434eadb2d8be915463fa3b4e19875",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 112,
"avg_line_length": 35.06417112299465,
"alnum_prop": 0.5621473234710996,
"repo_name": "ionelmc/python-hunter",
"id": "a482af4225a445d7f88f14f006d61f6b43931ab0",
"size": "6557",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_cookbook.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Cython",
"bytes": "50327"
},
{
"name": "Python",
"bytes": "265849"
}
],
"symlink_target": ""
} |
import os
import pytest
import stat
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
# Define fixture for dynamic ansible role variables.
# @see https://github.com/philpep/testinfra/issues/345#issuecomment-409999558
@pytest.fixture
def ansible_role_vars(host):
# Include variables from ansible variable files.
# Paths are relative to the scenario directory.
ansible_vars = host.ansible(
"include_vars",
("file=../../defaults/main.yml"
" name=role_defaults"))["ansible_facts"]["role_defaults"]
ansible_vars.update(host.ansible(
"include_vars",
("file=../../vars/main.yml"
" name=role_vars"))["ansible_facts"]["role_vars"])
ansible_vars.update(host.ansible(
"include_vars",
("file=../resources/prepare-vars.yml"
" name=prepare_vars"))["ansible_facts"]["prepare_vars"])
ansible_vars.update(host.ansible(
"include_vars",
("file=./scenario-vars.yml"
" name=scenario_vars"))["ansible_facts"]["scenario_vars"])
return ansible_vars
def test_wp_config_file_exists(host, ansible_role_vars):
f = host.file(ansible_role_vars['wp_install_dir'] + '/wp-config.php')
assert f.exists
# Verify file group.
assert f.group == ansible_role_vars['wp_core_group']
# Verify that group has read permission.
assert bool(f.mode & stat.S_IRGRP)
def test_wp_config_file_contains_salts(host, ansible_role_vars):
f = host.file(ansible_role_vars['wp_install_dir'] + '/wp-config.php')
assert f.contains("^define(\s*'AUTH_KEY'")
def test_wp_config_file_contains_db_credentials(host, ansible_role_vars):
f = host.file(ansible_role_vars['wp_install_dir'] + '/wp-config.php')
assert f.contains((
"^define('DB_NAME',\s*'"
+ ansible_role_vars['wp_db_name']
+ "');"
))
assert f.contains((
"^define('DB_USER',\s*'"
+ ansible_role_vars['wp_db_user']
+ "');"
))
assert f.contains((
"^define('DB_PASSWORD',\s*'"
+ ansible_role_vars['wp_db_password']
+ "');"
))
assert f.contains((
"^define('DB_HOST',\s*'"
+ ansible_role_vars['wp_db_host']
+ "');"
))
def test_wp_config_file_contains_db_table_prefix(host, ansible_role_vars):
f = host.file(ansible_role_vars['wp_install_dir'] + '/wp-config.php')
assert f.contains((
"^$table_prefix\s*=\s*'"
+ ansible_role_vars['wp_table_prefix']
+ "';"
))
| {
"content_hash": "3d4bdc1e84974f87d92e88175eb2178a",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 77,
"avg_line_length": 28.9010989010989,
"alnum_prop": 0.6064638783269962,
"repo_name": "kentr/drupal-vm",
"id": "d1011692f191f9838aad8cf04619f703622f08a4",
"size": "2630",
"binary": false,
"copies": "1",
"ref": "refs/heads/customized",
"path": "provisioning/roles/kentr.wordpress/molecule/default/tests/test_default.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1248"
},
{
"name": "JavaScript",
"bytes": "5611"
},
{
"name": "Makefile",
"bytes": "710"
},
{
"name": "PHP",
"bytes": "4682"
},
{
"name": "Python",
"bytes": "17877"
},
{
"name": "Ruby",
"bytes": "6412"
},
{
"name": "Shell",
"bytes": "31752"
},
{
"name": "TSQL",
"bytes": "4618"
}
],
"symlink_target": ""
} |
""" Character BLueprint task """
from sqlalchemy.exc import SQLAlchemyError
from lazyblacksmith.extension.esipy import esiclient
from lazyblacksmith.extension.esipy.operations import get_characters_blueprints
from lazyblacksmith.models import Blueprint, TokenScope, User, db
from lazyblacksmith.utils.models import (get_token_update_esipy,
inc_fail_token_scope,
update_token_state)
from ... import celery_app, logger
@celery_app.task(name="update_character_blueprints")
def task_update_character_blueprints(character_id):
""" Update the skills for a given character_id """
character = User.query.get(character_id)
if character is None:
return
# get token
token = get_token_update_esipy(
character_id=character_id,
scope=TokenScope.SCOPE_CHAR_BLUEPRINTS
)
# get current blueprints
bps = Blueprint.query.filter_by(
character_id=character_id
).filter_by(
corporation=False
).all()
blueprints = {}
for bp in bps:
key = "%s-%d-%d-%d" % (
bp.item_id,
bp.original,
bp.material_efficiency,
bp.time_efficiency
)
# update run to 0, to have the real total run for bpc
if not bp.original:
bp.total_runs = 0
blueprints[key] = bp
# set of known blueprints
blueprint_init_list = set(blueprints.keys())
blueprint_updated_list = set()
# get the first page to have the page number
op_blueprint = get_characters_blueprints(
character_id=character_id,
page=1
)
bp_one = esiclient.request(op_blueprint)
if bp_one.status != 200:
inc_fail_token_scope(token, bp_one.status_code)
logger.error(
'Request failed [%s, %s, %d]: %s',
op_blueprint[0].url,
op_blueprint[0].query,
bp_one.status,
bp_one.raw,
)
return
# prepare all other pages
total_page = bp_one.header['X-Pages'][0]
operations = []
for page in range(2, total_page + 1):
operations.append(get_characters_blueprints(
character_id=character_id,
page=page
))
# query all other pages and add the first page
bp_list = esiclient.multi_request(operations)
# parse the response and save everything
for _, response in [(op_blueprint[0], bp_one)] + bp_list:
for blueprint in response.data:
original = (blueprint.quantity != -2)
runs = blueprint.runs
me = blueprint.material_efficiency
te = blueprint.time_efficiency
item_id = blueprint.type_id
key = "%s-%d-%d-%d" % (item_id, original, me, te)
if key not in blueprint_updated_list:
blueprint_updated_list.add(key)
if key not in blueprints:
blueprints[key] = Blueprint(
item_id=item_id,
original=original,
total_runs=runs,
material_efficiency=me,
time_efficiency=te,
character_id=character_id,
)
try:
db.session.add(blueprints[key])
db.session.commit()
except SQLAlchemyError:
db.session.rollback()
logger.error(
"Error while trying to add blueprint id: %d",
item_id
)
continue
if not original:
blueprints[key].total_runs += runs
# delete every blueprint that have not been updated
for key in blueprint_init_list - blueprint_updated_list:
db.session.delete(blueprints[key])
try:
db.session.commit()
except SQLAlchemyError:
db.session.rollback()
logger.error(
"Error while trying to delete unused blueprints"
)
# update the token and the state
update_token_state(token, bp_one.header['Expires'][0])
| {
"content_hash": "6c38b3a04e58b5bc3662656cc74b3146",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 79,
"avg_line_length": 30.514705882352942,
"alnum_prop": 0.5614457831325301,
"repo_name": "Kyria/LazyBlacksmith",
"id": "c4692362e7ca85ee5d925af4f8b23c5e9a510f93",
"size": "4176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lbtasks/tasks/blueprint/character.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "2005"
},
{
"name": "HTML",
"bytes": "219613"
},
{
"name": "JavaScript",
"bytes": "402713"
},
{
"name": "Mako",
"bytes": "557"
},
{
"name": "Python",
"bytes": "192854"
},
{
"name": "SCSS",
"bytes": "226990"
},
{
"name": "Shell",
"bytes": "1707"
}
],
"symlink_target": ""
} |
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1Eviction(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'delete_options': 'V1DeleteOptions',
'kind': 'str',
'metadata': 'V1ObjectMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'delete_options': 'deleteOptions',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, delete_options=None, kind=None, metadata=None):
"""
V1beta1Eviction - a model defined in Swagger
"""
self._api_version = None
self._delete_options = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if delete_options is not None:
self.delete_options = delete_options
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""
Gets the api_version of this V1beta1Eviction.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1beta1Eviction.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1beta1Eviction.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1beta1Eviction.
:type: str
"""
self._api_version = api_version
@property
def delete_options(self):
"""
Gets the delete_options of this V1beta1Eviction.
DeleteOptions may be provided
:return: The delete_options of this V1beta1Eviction.
:rtype: V1DeleteOptions
"""
return self._delete_options
@delete_options.setter
def delete_options(self, delete_options):
"""
Sets the delete_options of this V1beta1Eviction.
DeleteOptions may be provided
:param delete_options: The delete_options of this V1beta1Eviction.
:type: V1DeleteOptions
"""
self._delete_options = delete_options
@property
def kind(self):
"""
Gets the kind of this V1beta1Eviction.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1beta1Eviction.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1beta1Eviction.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1beta1Eviction.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1beta1Eviction.
ObjectMeta describes the pod that is being evicted.
:return: The metadata of this V1beta1Eviction.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1beta1Eviction.
ObjectMeta describes the pod that is being evicted.
:param metadata: The metadata of this V1beta1Eviction.
:type: V1ObjectMeta
"""
self._metadata = metadata
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1Eviction):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "9d58d9da82f0390505ddae8a8c985a71",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 281,
"avg_line_length": 30.927884615384617,
"alnum_prop": 0.5970775687859474,
"repo_name": "mbohlool/client-python",
"id": "b774b495607addfb8cb542d32af32ec3a08c795d",
"size": "6450",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v1beta1_eviction.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8417639"
},
{
"name": "Shell",
"bytes": "16830"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import division
import _init_paths
from model.config import cfg
from datasets.factory import get_imdb
from model.test import test_net
import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="1"
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
import tensorflow as tf
import numpy as np
import collections
from random import shuffle
import random
from nets.vgg16_noBN import vgg16
from tensorflow.python import pywrap_tensorflow
from prune_with_classification_guidance import detect_diff_all
def choose_filters(current_sum,layer_ind,heatmap_all_ind=None,random_seed = None,
MAGNITUTE=False,RANDOM=False,CLASSIFICATION_BASED=False,
COMBINED = False):
if CLASSIFICATION_BASED:
print 'choose pruning guided by classification mode'
return heatmap_all_ind['%dth_layer'%(layer_ind-1)]
if RANDOM:
print 'choose random pruning mode'
random.seed(random_seed)
print 'using random seed %d'%random_seed
shuffled_list = range(len(current_sum))
shuffle(shuffled_list)
return shuffled_list
if MAGNITUTE:
print 'choosing magnitute mode'
return np.argsort(current_sum)
if COMBINED:
clas_index = heatmap_all_ind['%dth_acts'%(layer_ind+1)]
random.seed(100)
random_index = range(len(current_sum))
shuffle(random_index)
common_set = set(random_index[:256])&set(clas_index[:256])
print 'there is a common set between the random pruning and the classification-based methods with length %d'%len(common_set)
return list(common_set)
def filter(dic,old_filter_num,new_filter_num,heatmap_path=None,random_seed = None,
RANDOM=False,MAGNITUTE=False,CLASSIFICATION_BASED=False,COMBINED=False):
'''
modify the weights_dic according the pruning rule(new_filter_num)
inputs&output: collections.OrderedDict()
'''
biases = []
weights = []
w_momentum = []
b_momentum = []
name_scopes = []
for name_scope in dic:
name_scopes.append(name_scope)
for name in dic[name_scope]:
if name.startswith('weights'):
# weights.append(dic[name_scope][name])
weights.append(dic[name_scope][name]['value'])
w_momentum.append(dic[name_scope][name]['Momentum'])
elif name.startswith('biases'):
# biases.append(dic[name_scope][name])
biases.append(dic[name_scope][name]['value'])
b_momentum.append(dic[name_scope][name]['Momentum'])
diff = [(old_filter_num[ind] - new_filter_num[ind])
for ind in range(len(old_filter_num))] #
heatmap_all_ind={}
if CLASSIFICATION_BASED or COMBINED:
heatmap_all_ind = detect_diff_all(heatmap_path)
current_ind = 0
pre_ind = 0
if diff[0] != 0: # for the 0th layer
current_sum = np.sum(weights[0], axis = (0,1,2))
current_ind = choose_filters(current_sum,0,heatmap_all_ind,
random_seed,
RANDOM=RANDOM,MAGNITUTE=MAGNITUTE,
CLASSIFICATION_BASED=CLASSIFICATION_BASED,
COMBINED=COMBINED)
weights[0] = np.delete(weights[0], current_ind[:diff[0]], axis = 3)
biases[0] = np.delete(biases[0],current_ind[:diff[0]], axis = 0)
if w_momentum[0] is not None:
w_momentum[0] = np.delete(w_momentum[0],
current_ind[:diff[0]],
axis = 3)
b_momentum[ind] = np.delete(b_momentum[ind],
current_ind[:diff[0]],
axis = 0)
pre_ind = current_ind
current_ind = None
for ind in range(1,len(old_filter_num)): # for every layer
if diff[ind-1] != 0:
weights[ind] = np.delete(weights[ind], pre_ind[:diff[ind-1]],
axis = 2)
if diff[ind] == 0:
pre_ind = None
if diff[ind] != 0:
current_sum = np.sum(weights[ind],axis = (0,1,2))
current_ind = choose_filters(current_sum,ind,heatmap_all_ind,
random_seed,
RANDOM=RANDOM,MAGNITUTE=MAGNITUTE,
CLASSIFICATION_BASED=CLASSIFICATION_BASED,
COMBINED=COMBINED)
print len(current_ind)
weights[ind] = np.delete(weights[ind], current_ind[:diff[ind]],
axis = 3)
biases[ind] = np.delete(biases[ind],current_ind[:diff[ind]],
axis = 0 )
if w_momentum[ind] is not None:
w_momentum[ind] = np.delete(w_momentum[ind],
current_ind[:diff[ind]],
axis = 3)
b_momentum[ind] = np.delete(b_momentum[ind],
current_ind[:diff[ind]],
axis = 0)
pre_ind = current_ind
current_ind = None
ind = 0
# concatenate
for name_scope in dic:
for name in dic[name_scope]:
if ind <= len(old_filter_num):
if name.startswith('weights'):
dic[name_scope][name]['value'] = weights[ind]
dic[name_scope][name]['Momentum'] = w_momentum[ind]
elif name.startswith('biases'):
dic[name_scope][name]['value'] = biases[ind]
dic[name_scope][name]['Momentum'] = b_momentum[ind]
ind += 1
return dic
def prune_net_for_training(tfmodel,weights_path,old_filter_num,new_filter_num,
heatmap_path=None,random_seed = None,
SAVE=False,RANDOM=False,MAGNITUTE=False,
CLASSIFICATION_BASED=False,COMBINED=False):
'''
prune weights in ckpt files
save results in npy files
'''
reader = pywrap_tensorflow.NewCheckpointReader(tfmodel)
# set config
tfconfig = tf.ConfigProto(allow_soft_placement=True)
tfconfig.gpu_options.allow_growth=True
# load the network
name_scopes = []
with tf.Graph().as_default() as g1:
with tf.Session(config=tfconfig, graph=g1).as_default() as sess:
#load network
net = vgg16(batch_size=1)
filter_num = [64,64,128,128,256,256,256,512,512,512,512,512,512,512]
net.create_architecture(sess,'TRAIN',21,tag='default',
anchor_scales=[8,16,32],
filter_num=filter_num)
# for var in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES):
# print(var.name,var.get_shape())
saver = tf.train.Saver()
saver.restore(sess,tfmodel)
print 'Loaded network {:s}'.format(tfmodel)
#get the weights from ckpt file
dic = collections.OrderedDict()
all_variables = []
for item in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES):
name_list = item.name[:-2].split('/')
name_scopes.append('/'.join(name_list[:-1]))
for name_scope in name_scopes:
for name in ('weights','biases'):#
tensor_name = '{}/{}'.format(name_scope,name)
momentum_name = '{}/Momentum'.format(tensor_name)
if name_scope not in dic:
try:
dic[name_scope] = \
{name:{'value':reader.get_tensor(tensor_name),
'Momentum':reader.get_tensor(momentum_name)}}
except:
dic[name_scope] = \
{name:{'value':reader.get_tensor(tensor_name),
'Momentum':None}}
else:
dic[name_scope][name] = \
{'value':reader.get_tensor(tensor_name),
'Momentum':None}
try:
dic[name_scope][name]['Momentum'] = \
reader.get_tensor(momentum_name)
except:
continue
# filter the weights
if RANDOM:
print 'using random seed %d'%random_seed
dic = filter(dic,old_filter_num,new_filter_num,heatmap_path,random_seed,
RANDOM,MAGNITUTE,CLASSIFICATION_BASED,COMBINED)
# raise NotImplementedError
# display the filtered weights' shape
# for name_scope in dic:
# for name in dic[name_scope]:
# try:
# print 'After filtering, the variable {}/{} has shape {}'.format(
# name_scope, name, dic[name_scope][name]['Momentum'].shape)
# except:
# print 'After filtering, the variable {}/{}/Momentum is: '.format(
# name_scope, name, dic[name_scope][name]['Momentum'])
# save the pruned weights to npy file
if SAVE:
np.save(weights_path, dic)
print 'The weights are saved in {}'.format(weights_path)
return dic
if __name__ == '__main__':
# Below are variables that needs checking every time
num_classes = 21
old_filter_num = (64,64,128,128,256,256,256,512,512,512,512,512,512,512)
new_filter_num = (64,64,128,128,256,256,256,512,512,512,512,512,512,512)
names = ('weights','biases')
# Defined path for loading original weights from ckpy files
demonet = 'vgg16_faster_rcnn_iter_70000.ckpt'
dataset = 'voc_2007_trainval'
tfmodel = os.path.join('../output','vgg16',dataset, 'default', demonet)
if not os.path.isfile(tfmodel + '.meta'):
raise IOError('{:s} not found'.format(tfmodel + '.meta'))
# Defined path for saving pruned weights to npy files
weights_name = 'combined_prune_conv3_to64_with_momentum.npy'
folder_path = '../output/pruning/'
if not os.path.exists(folder_path):
os.makedirs(folder_path)
weights_path = os.path.join(folder_path,weights_name)
# Only useful when classification-based method is chosen
heatmap_path = './activations_res/res.npy'
heatmap_all_ind = {}
# seed = 100
# weights_dic = prune_net_for_training(tfmodel,
# weights_path,
# old_filter_num,
# new_filter_num,
# heatmap_path='./activations_res/res.npy',
# random_seed = seed,
# SAVE=False,RANDOM=True,MAGNITUTE=False,
# CLASSIFICATION_BASED=False,
# COMBINED=False)
#
#
# '''
# load the new weights to a new graph,
# test the pruned network
# '''
# # set config
# tfconfig = tf.ConfigProto(allow_soft_placement=True)
# tfconfig.gpu_options.allow_growth=True
# with tf.Graph().as_default() as g2:
# with tf.Session(config=tfconfig,graph=g2).as_default() as sess:
# #load the new graph
# net = vgg16(batch_size=1)
# net.create_architecture(sess,'TEST',num_classes,tag='default',
# anchor_scales = [8,16,32],
# filter_num = new_filter_num)
#
# # load the new weights from npy file
# # weights_dic = np.load(weights_path).item()
#
# for name_scope in weights_dic:
# with tf.variable_scope(name_scope,reuse = True):
# for name in weights_dic[name_scope]:
# var = tf.get_variable(name)
# sess.run(var.assign(weights_dic[name_scope][name]['value']))
# print 'assigned pruned weights to the pruned model'
#
# # test the new model
# imdb = get_imdb('voc_2007_test')
# filename = 'demo_pruning/experiments/random/random_seed%d'%seed
# experiment_setup = 'prune_layer3_to64'
# test_net(sess, net, imdb, filename,
# experiment_setup=experiment_setup,
# max_per_image=100)
seeds = [200,300,400,500,600,700,800] # random seed for random method
for seed in seeds:
weights_dic = prune_net_for_training(tfmodel,
weights_path,
old_filter_num,
new_filter_num,
heatmap_path='./activations_res/res.npy',
random_seed = seed,
SAVE=False,RANDOM=True,MAGNITUTE=False,
CLASSIFICATION_BASED=False,
COMBINED=False)
'''
load the new weights to a new graph,
test the pruned network
'''
# set config
tfconfig = tf.ConfigProto(allow_soft_placement=True)
tfconfig.gpu_options.allow_growth=True
with tf.Graph().as_default() as g2:
with tf.Session(config=tfconfig,graph=g2).as_default() as sess:
#load the new graph
net = vgg16(batch_size=1)
net.create_architecture(sess,'TEST',num_classes,tag='default',
anchor_scales = [8,16,32],
filter_num = new_filter_num)
# load the new weights from npy file
# weights_dic = np.load(weights_path).item()
for name_scope in weights_dic:
with tf.variable_scope(name_scope,reuse = True):
for name in weights_dic[name_scope]:
var = tf.get_variable(name)
sess.run(var.assign(weights_dic[name_scope][name]['value']))
print 'assigned pruned weights to the pruned model'
# test the new model
imdb = get_imdb('voc_2007_test')
filename = 'demo_pruning/experiments/random/random_seed_variation'
experiment_setup = 'prune_layer3_to64_seed%d'%seed
test_net(sess, net, imdb, filename,
experiment_setup=experiment_setup,
max_per_image=100)
| {
"content_hash": "731907cd5cc79ab297e6c47556ab525f",
"timestamp": "",
"source": "github",
"line_count": 333,
"max_line_length": 132,
"avg_line_length": 44.351351351351354,
"alnum_prop": 0.5219039880831471,
"repo_name": "shuang1330/tf-faster-rcnn",
"id": "7088edd7aa5a8beaf80255c8d5565b6b2f54f180",
"size": "14792",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/prune.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "146"
},
{
"name": "Cuda",
"bytes": "5064"
},
{
"name": "Matlab",
"bytes": "1821"
},
{
"name": "Python",
"bytes": "321187"
},
{
"name": "Roff",
"bytes": "1195"
},
{
"name": "Shell",
"bytes": "6647"
}
],
"symlink_target": ""
} |
from msrest.serialization import Model
class RequestStatistics(Model):
"""RequestStatistics.
:param documents_count: Number of documents submitted in the request.
:type documents_count: int
:param valid_documents_count: Number of valid documents. This excludes
empty, over-size limit or non-supported languages documents.
:type valid_documents_count: int
:param erroneous_documents_count: Number of invalid documents. This
includes empty, over-size limit or non-supported languages documents.
:type erroneous_documents_count: int
:param transactions_count: Number of transactions for the request.
:type transactions_count: long
"""
_attribute_map = {
'documents_count': {'key': 'documentsCount', 'type': 'int'},
'valid_documents_count': {'key': 'validDocumentsCount', 'type': 'int'},
'erroneous_documents_count': {'key': 'erroneousDocumentsCount', 'type': 'int'},
'transactions_count': {'key': 'transactionsCount', 'type': 'long'},
}
def __init__(self, **kwargs):
super(RequestStatistics, self).__init__(**kwargs)
self.documents_count = kwargs.get('documents_count', None)
self.valid_documents_count = kwargs.get('valid_documents_count', None)
self.erroneous_documents_count = kwargs.get('erroneous_documents_count', None)
self.transactions_count = kwargs.get('transactions_count', None)
| {
"content_hash": "e98d9f58962c84e30a22c75b01f604b2",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 87,
"avg_line_length": 46.03225806451613,
"alnum_prop": 0.6867554309740714,
"repo_name": "Azure/azure-sdk-for-python",
"id": "b8cc26575238da2d78af2980347da8680a9d1c94",
"size": "1901",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/cognitiveservices/azure-cognitiveservices-language-textanalytics/azure/cognitiveservices/language/textanalytics/models/request_statistics.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import MySQLdb
db = MySQLdb.connect(user="beeruser", db="beerdb")
c = db.cursor()
id = 1
#c.execute("""SELECT Value from Temperature where ID = %s""", (id,))
c.execute("""SELECT Value from Temperature""")
print c.fetchall()
| {
"content_hash": "a575abeac3db6d6e293cfc6f7ed99b51",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 68,
"avg_line_length": 19.083333333333332,
"alnum_prop": 0.6681222707423581,
"repo_name": "Wollert/beer",
"id": "a3f43a3e7424a6ce70624ff89e2091f372ab22b7",
"size": "229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test_mysql.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "75747"
}
],
"symlink_target": ""
} |
import copy
import mock
import netaddr
from webob import exc
from nova.api.openstack.compute import hypervisors \
as hypervisors_v21
from nova.cells import utils as cells_utils
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
from nova.tests import uuidsentinel as uuids
TEST_HYPERS = [
dict(id=1,
service_id=1,
host="compute1",
vcpus=4,
memory_mb=10 * 1024,
local_gb=250,
vcpus_used=2,
memory_mb_used=5 * 1024,
local_gb_used=125,
hypervisor_type="xen",
hypervisor_version=3,
hypervisor_hostname="hyper1",
free_ram_mb=5 * 1024,
free_disk_gb=125,
current_workload=2,
running_vms=2,
cpu_info='cpu_info',
disk_available_least=100,
host_ip=netaddr.IPAddress('1.1.1.1')),
dict(id=2,
service_id=2,
host="compute2",
vcpus=4,
memory_mb=10 * 1024,
local_gb=250,
vcpus_used=2,
memory_mb_used=5 * 1024,
local_gb_used=125,
hypervisor_type="xen",
hypervisor_version=3,
hypervisor_hostname="hyper2",
free_ram_mb=5 * 1024,
free_disk_gb=125,
current_workload=2,
running_vms=2,
cpu_info='cpu_info',
disk_available_least=100,
host_ip=netaddr.IPAddress('2.2.2.2'))]
TEST_SERVICES = [
objects.Service(id=1,
host="compute1",
binary="nova-compute",
topic="compute_topic",
report_count=5,
disabled=False,
disabled_reason=None,
availability_zone="nova"),
objects.Service(id=2,
host="compute2",
binary="nova-compute",
topic="compute_topic",
report_count=5,
disabled=False,
disabled_reason=None,
availability_zone="nova"),
]
TEST_HYPERS_OBJ = [objects.ComputeNode(**hyper_dct)
for hyper_dct in TEST_HYPERS]
TEST_HYPERS[0].update({'service': TEST_SERVICES[0]})
TEST_HYPERS[1].update({'service': TEST_SERVICES[1]})
TEST_SERVERS = [dict(name="inst1", uuid=uuids.instance_1, host="compute1"),
dict(name="inst2", uuid=uuids.instance_2, host="compute2"),
dict(name="inst3", uuid=uuids.instance_3, host="compute1"),
dict(name="inst4", uuid=uuids.instance_4, host="compute2")]
def fake_compute_node_get_all(context):
return TEST_HYPERS_OBJ
def fake_compute_node_search_by_hypervisor(context, hypervisor_re):
return TEST_HYPERS_OBJ
def fake_compute_node_get(context, compute_id):
for hyper in TEST_HYPERS_OBJ:
if hyper.id == int(compute_id):
return hyper
raise exception.ComputeHostNotFound(host=compute_id)
def fake_service_get_by_compute_host(context, host):
for service in TEST_SERVICES:
if service.host == host:
return service
def fake_compute_node_statistics(context):
result = dict(
count=0,
vcpus=0,
memory_mb=0,
local_gb=0,
vcpus_used=0,
memory_mb_used=0,
local_gb_used=0,
free_ram_mb=0,
free_disk_gb=0,
current_workload=0,
running_vms=0,
disk_available_least=0,
)
for hyper in TEST_HYPERS_OBJ:
for key in result:
if key == 'count':
result[key] += 1
else:
result[key] += getattr(hyper, key)
return result
def fake_instance_get_all_by_host(context, host):
results = []
for inst in TEST_SERVERS:
if inst['host'] == host:
inst_obj = fake_instance.fake_instance_obj(context, **inst)
results.append(inst_obj)
return results
class HypervisorsTestV21(test.NoDBTestCase):
# copying the objects locally so the cells testcases can provide their own
TEST_HYPERS_OBJ = copy.deepcopy(TEST_HYPERS_OBJ)
TEST_SERVICES = copy.deepcopy(TEST_SERVICES)
TEST_SERVERS = copy.deepcopy(TEST_SERVERS)
DETAIL_HYPERS_DICTS = copy.deepcopy(TEST_HYPERS)
del DETAIL_HYPERS_DICTS[0]['service_id']
del DETAIL_HYPERS_DICTS[1]['service_id']
del DETAIL_HYPERS_DICTS[0]['host']
del DETAIL_HYPERS_DICTS[1]['host']
DETAIL_HYPERS_DICTS[0].update({'state': 'up',
'status': 'enabled',
'service': dict(id=1, host='compute1',
disabled_reason=None)})
DETAIL_HYPERS_DICTS[1].update({'state': 'up',
'status': 'enabled',
'service': dict(id=2, host='compute2',
disabled_reason=None)})
INDEX_HYPER_DICTS = [
dict(id=1, hypervisor_hostname="hyper1",
state='up', status='enabled'),
dict(id=2, hypervisor_hostname="hyper2",
state='up', status='enabled')]
def _get_request(self, use_admin_context):
return fakes.HTTPRequest.blank('', use_admin_context=use_admin_context)
def _set_up_controller(self):
self.controller = hypervisors_v21.HypervisorsController()
self.controller.servicegroup_api.service_is_up = mock.MagicMock(
return_value=True)
def setUp(self):
super(HypervisorsTestV21, self).setUp()
self._set_up_controller()
self.rule_hyp_show = "os_compute_api:os-hypervisors"
self.stubs.Set(self.controller.host_api, 'compute_node_get_all',
fake_compute_node_get_all)
self.stubs.Set(self.controller.host_api, 'service_get_by_compute_host',
fake_service_get_by_compute_host)
self.stubs.Set(self.controller.host_api,
'compute_node_search_by_hypervisor',
fake_compute_node_search_by_hypervisor)
self.stubs.Set(self.controller.host_api, 'compute_node_get',
fake_compute_node_get)
self.stub_out('nova.db.compute_node_statistics',
fake_compute_node_statistics)
def test_view_hypervisor_nodetail_noservers(self):
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], False)
self.assertEqual(result, self.INDEX_HYPER_DICTS[0])
def test_view_hypervisor_detail_noservers(self):
result = self.controller._view_hypervisor(
self.TEST_HYPERS_OBJ[0], self.TEST_SERVICES[0], True)
self.assertEqual(result, self.DETAIL_HYPERS_DICTS[0])
def test_view_hypervisor_servers(self):
result = self.controller._view_hypervisor(self.TEST_HYPERS_OBJ[0],
self.TEST_SERVICES[0],
False, self.TEST_SERVERS)
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS[0])
expected_dict.update({'servers': [
dict(name="inst1", uuid=uuids.instance_1),
dict(name="inst2", uuid=uuids.instance_2),
dict(name="inst3", uuid=uuids.instance_3),
dict(name="inst4", uuid=uuids.instance_4)]})
self.assertEqual(result, expected_dict)
def test_index(self):
req = self._get_request(True)
result = self.controller.index(req)
self.assertEqual(result, dict(hypervisors=self.INDEX_HYPER_DICTS))
def test_index_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_detail(self):
req = self._get_request(True)
result = self.controller.detail(req)
self.assertEqual(result, dict(hypervisors=self.DETAIL_HYPERS_DICTS))
def test_detail_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.detail, req)
def test_show_noid(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.show, req, '3')
def test_show_non_integer_id(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.show, req, 'abc')
def test_show_withid(self):
req = self._get_request(True)
result = self.controller.show(req, self.TEST_HYPERS_OBJ[0].id)
self.assertEqual(result, dict(hypervisor=self.DETAIL_HYPERS_DICTS[0]))
def test_show_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.show, req,
self.TEST_HYPERS_OBJ[0].id)
def test_uptime_noid(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.uptime, req, '3')
def test_uptime_notimplemented(self):
def fake_get_host_uptime(context, hyp):
raise exc.HTTPNotImplemented()
self.stubs.Set(self.controller.host_api, 'get_host_uptime',
fake_get_host_uptime)
req = self._get_request(True)
self.assertRaises(exc.HTTPNotImplemented,
self.controller.uptime, req,
self.TEST_HYPERS_OBJ[0].id)
def test_uptime_implemented(self):
def fake_get_host_uptime(context, hyp):
return "fake uptime"
self.stubs.Set(self.controller.host_api, 'get_host_uptime',
fake_get_host_uptime)
req = self._get_request(True)
result = self.controller.uptime(req, self.TEST_HYPERS_OBJ[0].id)
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS[0])
expected_dict.update({'uptime': "fake uptime"})
self.assertEqual(result, dict(hypervisor=expected_dict))
def test_uptime_non_integer_id(self):
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.uptime, req, 'abc')
def test_uptime_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.uptime, req,
self.TEST_HYPERS_OBJ[0].id)
def test_uptime_hypervisor_down(self):
def fake_get_host_uptime(context, hyp):
raise exception.ComputeServiceUnavailable(host='dummy')
self.stubs.Set(self.controller.host_api, 'get_host_uptime',
fake_get_host_uptime)
req = self._get_request(True)
self.assertRaises(exc.HTTPBadRequest,
self.controller.uptime, req,
self.TEST_HYPERS_OBJ[0].id)
def test_search(self):
req = self._get_request(True)
result = self.controller.search(req, 'hyper')
self.assertEqual(result, dict(hypervisors=self.INDEX_HYPER_DICTS))
def test_search_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.search, req,
self.TEST_HYPERS_OBJ[0].id)
def test_search_non_exist(self):
def fake_compute_node_search_by_hypervisor_return_empty(context,
hypervisor_re):
return []
self.stubs.Set(self.controller.host_api,
'compute_node_search_by_hypervisor',
fake_compute_node_search_by_hypervisor_return_empty)
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound, self.controller.search, req, 'a')
@mock.patch.object(objects.InstanceList, 'get_by_host',
side_effect=fake_instance_get_all_by_host)
def test_servers(self, mock_get):
req = self._get_request(True)
result = self.controller.servers(req, 'hyper')
expected_dict = copy.deepcopy(self.INDEX_HYPER_DICTS)
expected_dict[0].update({'servers': [
dict(uuid=uuids.instance_1),
dict(uuid=uuids.instance_3)]})
expected_dict[1].update({'servers': [
dict(uuid=uuids.instance_2),
dict(uuid=uuids.instance_4)]})
for output in result['hypervisors']:
servers = output['servers']
for server in servers:
del server['name']
self.assertEqual(result, dict(hypervisors=expected_dict))
def test_servers_non_id(self):
def fake_compute_node_search_by_hypervisor_return_empty(context,
hypervisor_re):
return []
self.stubs.Set(self.controller.host_api,
'compute_node_search_by_hypervisor',
fake_compute_node_search_by_hypervisor_return_empty)
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound,
self.controller.servers,
req, '115')
def test_servers_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.servers, req,
self.TEST_HYPERS_OBJ[0].id)
def test_servers_with_non_integer_hypervisor_id(self):
def fake_compute_node_search_by_hypervisor_return_empty(context,
hypervisor_re):
return []
self.stubs.Set(self.controller.host_api,
'compute_node_search_by_hypervisor',
fake_compute_node_search_by_hypervisor_return_empty)
req = self._get_request(True)
self.assertRaises(exc.HTTPNotFound,
self.controller.servers, req, 'abc')
def test_servers_with_no_server(self):
def fake_instance_get_all_by_host_return_empty(context, hypervisor_re):
return []
self.stubs.Set(self.controller.host_api, 'instance_get_all_by_host',
fake_instance_get_all_by_host_return_empty)
req = self._get_request(True)
result = self.controller.servers(req, self.TEST_HYPERS_OBJ[0].id)
self.assertEqual(result, dict(hypervisors=self.INDEX_HYPER_DICTS))
def test_statistics(self):
req = self._get_request(True)
result = self.controller.statistics(req)
self.assertEqual(result, dict(hypervisor_statistics=dict(
count=2,
vcpus=8,
memory_mb=20 * 1024,
local_gb=500,
vcpus_used=4,
memory_mb_used=10 * 1024,
local_gb_used=250,
free_ram_mb=10 * 1024,
free_disk_gb=250,
current_workload=4,
running_vms=4,
disk_available_least=200)))
def test_statistics_non_admin(self):
req = self._get_request(False)
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.statistics, req)
_CELL_PATH = 'cell1'
class CellHypervisorsTestV21(HypervisorsTestV21):
TEST_HYPERS_OBJ = [cells_utils.ComputeNodeProxy(obj, _CELL_PATH)
for obj in TEST_HYPERS_OBJ]
TEST_SERVICES = [cells_utils.ServiceProxy(obj, _CELL_PATH)
for obj in TEST_SERVICES]
TEST_SERVERS = [dict(server,
host=cells_utils.cell_with_item(_CELL_PATH,
server['host']))
for server in TEST_SERVERS]
DETAIL_HYPERS_DICTS = copy.deepcopy(HypervisorsTestV21.DETAIL_HYPERS_DICTS)
DETAIL_HYPERS_DICTS = [dict(hyp, id=cells_utils.cell_with_item(_CELL_PATH,
hyp['id']),
service=dict(hyp['service'],
id=cells_utils.cell_with_item(
_CELL_PATH,
hyp['service']['id']),
host=cells_utils.cell_with_item(
_CELL_PATH,
hyp['service']['host'])))
for hyp in DETAIL_HYPERS_DICTS]
INDEX_HYPER_DICTS = copy.deepcopy(HypervisorsTestV21.INDEX_HYPER_DICTS)
INDEX_HYPER_DICTS = [dict(hyp, id=cells_utils.cell_with_item(_CELL_PATH,
hyp['id']))
for hyp in INDEX_HYPER_DICTS]
@classmethod
def fake_compute_node_get_all(cls, context):
return cls.TEST_HYPERS_OBJ
@classmethod
def fake_compute_node_search_by_hypervisor(cls, context, hypervisor_re):
return cls.TEST_HYPERS_OBJ
@classmethod
def fake_compute_node_get(cls, context, compute_id):
for hyper in cls.TEST_HYPERS_OBJ:
if hyper.id == compute_id:
return hyper
raise exception.ComputeHostNotFound(host=compute_id)
@classmethod
def fake_service_get_by_compute_host(cls, context, host):
for service in cls.TEST_SERVICES:
if service.host == host:
return service
@classmethod
def fake_instance_get_all_by_host(cls, context, host):
results = []
for inst in cls.TEST_SERVERS:
if inst['host'] == host:
results.append(inst)
return results
def setUp(self):
self.flags(enable=True, cell_type='api', group='cells')
super(CellHypervisorsTestV21, self).setUp()
self.stubs.Set(self.controller.host_api, 'compute_node_get_all',
self.fake_compute_node_get_all)
self.stubs.Set(self.controller.host_api, 'service_get_by_compute_host',
self.fake_service_get_by_compute_host)
self.stubs.Set(self.controller.host_api,
'compute_node_search_by_hypervisor',
self.fake_compute_node_search_by_hypervisor)
self.stubs.Set(self.controller.host_api, 'compute_node_get',
self.fake_compute_node_get)
self.stubs.Set(self.controller.host_api, 'compute_node_statistics',
fake_compute_node_statistics)
self.stubs.Set(self.controller.host_api, 'instance_get_all_by_host',
self.fake_instance_get_all_by_host)
| {
"content_hash": "b40e13939b3a7a89b710bd39b9527e97",
"timestamp": "",
"source": "github",
"line_count": 498,
"max_line_length": 79,
"avg_line_length": 38.29317269076305,
"alnum_prop": 0.5581541688515994,
"repo_name": "bigswitch/nova",
"id": "6be2584574f955bbf0114509f8d731a419e7f510",
"size": "19710",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/tests/unit/api/openstack/compute/test_hypervisors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17220528"
},
{
"name": "Shell",
"bytes": "36658"
},
{
"name": "Smarty",
"bytes": "291947"
}
],
"symlink_target": ""
} |
from setuptools import setup
setup(
name='Overtime-Calculator',
version='0.1',
description='Python application to help calculate work overtime surplus/deficit.',
author='Christian Chavez',
author_email='x10an14@gmail.com',
# url='https://www.non-existing.com',
setup_requires=['pytest-runner'],
tests_require=['pytest', 'hypothesis'],
packages=['src'],
)
| {
"content_hash": "93fefa0cc4bff3145dfa05ed99f71d2b",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 86,
"avg_line_length": 30.153846153846153,
"alnum_prop": 0.6785714285714286,
"repo_name": "x10an14/overtime-calculator",
"id": "8e23b9b0036bb9ad7acd81c50dba87f2ab1864c6",
"size": "415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "overtime_calculator/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1131"
},
{
"name": "Python",
"bytes": "32107"
}
],
"symlink_target": ""
} |
"""Iteratively build Rule objects from RuleData objects."""
__all__ = [
'build_rules',
]
import itertools
import iga.context
import iga.filetype
from iga.core import KeyedSets
from iga.core import group
from iga.label import Label
from iga.rule import Rule
def build_rules(package, rule_datas,
*, _cxt=None, _get_file_type=iga.filetype.get):
"""Build Rule objects from a list of RuleData iteratively."""
srcdir = (_cxt or iga.context.current())['source']
rules = [Rule.make(rule_data) for rule_data in rule_datas]
# Glob source directory.
for rule, rule_data in zip(rules, rule_datas):
rule.inputs.update(glob_keyed_sets(
rule.inputs.keys(),
rule_data.input_patterns,
srcdir,
package,
_get_file_type=_get_file_type,
))
# Make outputs from inputs.
for rule in rules:
rule.outputs.update(rule.rule_type.make_outputs(rule.inputs))
# Iteratively update inputs from other rules' outputs.
added_outputs = {rule.name: rule.outputs for rule in rules}
while added_outputs:
added_inputs = []
for rule, rule_data in zip(rules, rule_datas):
adding = KeyedSets(rule.inputs.keys())
# Gather outputs from other rules.
for name, outputs in added_outputs.items():
if name != rule.name:
adding.update(outputs)
# Match against this rule's input_patterns.
adding = match_keyed_sets(adding, rule_data.input_patterns)
# Remove labels that are already there.
adding.difference_update(rule.inputs)
# If it's still non-empty, then changed is True.
if adding:
added_inputs.append((rule, adding))
# Update inputs and make outputs from newly-added inputs.
added_outputs = {}
for rule, adding in added_inputs:
rule.inputs.update(adding)
outputs = rule.rule_type.make_outputs(adding)
if outputs:
rule.outputs.update(outputs)
added_outputs[rule.name] = outputs
return rules
def glob_keyed_sets(keys, patterns, from_dir, package,
*, _get_file_type):
package_dir = from_dir / package
paths = itertools.chain.from_iterable(
pattern.glob(package_dir) for pattern in patterns
)
labels = (
_path_to_label(path, from_dir, package) for path in paths
)
ksets = KeyedSets(keys)
ksets.update(group(labels, key=_get_file_type))
return ksets
def match_keyed_sets(ksets, patterns):
result = KeyedSets(ksets.keys())
for key in ksets:
for pattern in patterns:
result[key].update(
label for label in ksets[key] if pattern.match(label.target)
)
return result
def _path_to_label(path, root, package):
target = path.relative_to(root / package)
return Label.make(package, target)
| {
"content_hash": "53c8e95bd9bc28f211c65a4a89638ef3",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 76,
"avg_line_length": 33.98863636363637,
"alnum_prop": 0.6128385155466399,
"repo_name": "clchiou/iga",
"id": "848ffb6ee219d43371d76b5b0f7451fe6f87117f",
"size": "2991",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "iga/build_rules.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "160"
},
{
"name": "C++",
"bytes": "86"
},
{
"name": "Python",
"bytes": "57726"
},
{
"name": "Shell",
"bytes": "155"
}
],
"symlink_target": ""
} |
from JumpScale import j
from .GridFactory import GridFactory
j.base.loader.makeAvailable(j, 'core')
j.core.grid = GridFactory()
| {
"content_hash": "814b1430b9b996f56dd66b522245ebe0",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 38,
"avg_line_length": 32,
"alnum_prop": 0.7890625,
"repo_name": "Jumpscale/jumpscale6_core",
"id": "31ed2ade20af9027231470a15b7ddadc32efc750",
"size": "128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/JumpScale/grid/grid/__init__.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "3681"
},
{
"name": "HTML",
"bytes": "11738"
},
{
"name": "JavaScript",
"bytes": "70132"
},
{
"name": "Lua",
"bytes": "2162"
},
{
"name": "Python",
"bytes": "5848017"
},
{
"name": "Shell",
"bytes": "7692"
}
],
"symlink_target": ""
} |
from random import choice
import sys
def generateModel(text, order):
model = {}
for i in range(0, len(text) - order):
fragment = text[i:i+order]
next_letter = text[i+order]
if fragment not in model:
model[fragment] = {}
if next_letter not in model[fragment]:
model[fragment][next_letter] = 1
else:
model[fragment][next_letter] += 1
return model
def getNextCharacter(model, fragment):
letters = []
for letter in model[fragment].keys():
for times in range(0, model[fragment][letter]):
letters.append(letter)
return choice(letters)
def generateText(text, order, length):
model = generateModel(text, order)
currentFragment = text[0:order]
output = ""
for i in range(0, length-order):
newCharacter = getNextCharacter(model, currentFragment)
output += newCharacter
currentFragment = currentFragment[1:] + newCharacter
print(output)
text = "some sample text"
if __name__ == "__main__":
generateText(text, int(sys.argv[1]), int(sys.argv[2]))
| {
"content_hash": "a8660b348a0966340767ab444af95e34",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 57,
"avg_line_length": 26.54054054054054,
"alnum_prop": 0.6995926680244399,
"repo_name": "HiroIshikawa/21playground",
"id": "793b6d10efb3ba9f685454cc41e2613340e90c0d",
"size": "982",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "merkov/markov.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "51720"
},
{
"name": "CSS",
"bytes": "57775"
},
{
"name": "HTML",
"bytes": "40205"
},
{
"name": "JavaScript",
"bytes": "73667"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "38714409"
},
{
"name": "Shell",
"bytes": "30454"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import numpy as np
from . import Writer
import pyspeckit
import warnings
fitscheck = True
try:
import astropy.io.fits as pyfits
except ImportError:
import pyfits
except ImportError:
fitscheck = False
class write_fits(Writer):
def write_data(self, filename=None, newsuffix='out', clobber=True,
tolerance=1e-8, write_error=True, **kwargs):
"""
Write spectrum to fits file.
"""
if not fitscheck:
raise ImportError("Could not import FITS handler (astropy.io.fits or pyfits).")
if filename is None:
fn = "{0}_{1}.fits".format(self.Spectrum.fileprefix, newsuffix)
else:
fn = filename
header = self.Spectrum.header
header['ORIGIN'] = 'pyspeckit version %s' % pyspeckit.__version__
header['OBJECT'] = self.Spectrum.specname
unit = self.Spectrum.unit or self.Spectrum.header.get('BUNIT')
if unit is not None:
header['BUNIT'] = unit
#header_nowcs = wcs_utils.strip_wcs_from_header(header)
#header.insert(2, pyfits.Card(keyword='NAXIS', value=1))
#header.insert(3, pyfits.Card(keyword='NAXIS1', value=len(self.Spectrum)))
# Generate a WCS header from the X-array
if self.Spectrum.xarr._make_header(tolerance=tolerance):
for k,v in self.Spectrum.xarr.wcshead.iteritems():
if v is not None:
try:
header[k] = v
except ValueError:
try:
#v is a Quantity
header[k] = v.value
except AttributeError:
#v is a Unit
header[k] = v.to_string()
if write_error:
data = np.array( [self.Spectrum.data, self.Spectrum.error] )
else:
data = self.Spectrum.data
print("Writing a FITS-standard (linear-x-axis) spectrum to %s" % (fn))
else:
# if no header, overwrite header parameters that would be deceptive
for k,v in self.Spectrum.xarr.wcshead.iteritems():
if v is None:
if header.get(k): del header[k]
else:
header[k] = v
if write_error:
data = np.array( [self.Spectrum.xarr, self.Spectrum.data, self.Spectrum.error] )
else:
data = np.array( [self.Spectrum.xarr, self.Spectrum.data] )
warnings.warn("Writing a nonlinear X-axis spectrum to %s (header keywords are not FITS-compliant)" % (fn))
try:
HDU = pyfits.PrimaryHDU(data=data, header=header)
except AttributeError:
print("Strange header error. Attempting workaround.")
HDU = pyfits.PrimaryHDU(data=data,
header=pyfits.Header([pyfits.card.Card(k,v)
for k,v in
header.iteritems()]))
HDU.verify('fix')
HDU.writeto(fn, clobber=clobber, output_verify='fix', **kwargs)
| {
"content_hash": "30e7d6e0e37c71d89c69ef1dcb77c707",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 118,
"avg_line_length": 39.54216867469879,
"alnum_prop": 0.5274223034734917,
"repo_name": "bsipocz/pyspeckit",
"id": "5fb970699ed2337f933905f1b2c5adfab35ccb68",
"size": "3282",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyspeckit/spectrum/writers/fits_writer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "53"
},
{
"name": "Python",
"bytes": "1187252"
},
{
"name": "Shell",
"bytes": "313"
}
],
"symlink_target": ""
} |
import os, sys
import numpy as np
import cPickle
from fnmatch import fnmatch
import glob, os
filename = '/home/michalis/Documents/sequence-learning/data/user_1/session_3/robot_1'
def getUsers(root):
user_id = []
for f in os.listdir(root):
user_id.append(f.split('/')[-1].split('_')[-1])
return user_id
'''
EEG file parser
'''
def ReadSessionFile(filename):
data = {}
data['h_eeg'] = []
data['h'] = [[1,1,1,1]]
with open(filename,"r") as f:
txt = f.readlines()
f.close
for line in txt:
key = line.split()[0]
value = line.split()[1:]
if key not in data.keys():
data[key] = []
if key == 'eeg':
data['h'][-1]
data['h_eeg'].append(data['h'][-1])
data[key].append(value)
data['h'] = np.delete(data['h'],0,0)
return data
def main(argv):
folds = 10
root = argv[0]
destination = argv[1]
train_percent = 0.8
user_list = getUsers(root)
for i in range(folds):
train_success = ('/').join((destination,'fold_'+str(i),'train','success'))
train_fail=('/').join((destination,'fold_'+str(i),'train','fail'))
test_success = ('/').join((destination,'fold_'+str(i),'test','success'))
test_fail=('/').join((destination,'fold_'+str(i),'test','fail'))
if not os.path.exists(train_success):
os.makedirs(train_success)
if not os.path.exists(train_fail):
os.makedirs(train_fail)
if not os.path.exists(test_success):
os.makedirs(test_success)
if not os.path.exists(test_fail):
os.makedirs(test_fail)
user_list_randperm = np.random.permutation(user_list)
train_users = user_list_randperm[:int(train_percent*len(user_list))]
test_users = user_list_randperm[int(train_percent*len(user_list)):]
print
print i
print user_list_randperm
print train_users,test_users
#load eeg data for each user
for user in user_list_randperm:
for session in os.listdir(('/').join((root,"user_"+user))):
#read logfile and load labels for all turns in the session
with open(('/').join((root,"user_"+user,session,"logfile")),"r") as f:
logdata = f.readlines()
f.close
#for every round of this session
for j,line in enumerate(logdata):
# get WIN-LOSE label
label = line.split(" ")[4]
#open the EEG_robot file that corresponds to the ith round of the session
data = ReadSessionFile(('/').join((root,"user_"+user,session,"robot_"+str(j+1))))
if user in train_users: # if user belongs to training in this fold
if label == '1':
save = ('/').join((train_success,user+'_'+session.split('_')[-1]+'_'+str(j+1)))
else:
save = ('/').join((train_fail,user+'_'+session.split('_')[-1]+'_'+str(j+1)))
else: #if user belongs to testing
if label == '1':
save = ('/').join((test_success,user+'_'+session.split('_')[-1]+'_'+str(j+1)))
else:
save = ('/').join((test_fail,user+'_'+session.split('_')[-1]+'_'+str(j+1)))
for key in data.keys():
data[key] = np.array(data[key])
np.savez(save,h=data['h'],h_eeg=data['h_eeg'],c=data['c'],raw=data['eeg'],
a=data['a'], b=data['b'], g=data['g'], d=data['d'], t=data['t'] ,
Aa=data['Aa'], Ab=data['Ab'], Ag=data['Ag'], Ad=data['Ad'], At=data['At'],
ascore=data['as'], bscore=data['bs'],gscore=data['gs'],dscore=data['ds'],tscore=data['ts'])
if __name__ == "__main__":
#ReadSessionFile(filename)
main(sys.argv[1:])
#getUsers(root)
#LoadEEGDirs(root) | {
"content_hash": "2c9bc59a4fdecb936f253197db53c454",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 120,
"avg_line_length": 34.07377049180328,
"alnum_prop": 0.4943468847726726,
"repo_name": "MikeMpapa/EEG-Sequence-Learning",
"id": "61c84aa107a15b653a2682cd05ac618ea068dfbf",
"size": "4157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EEG_parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27721"
}
],
"symlink_target": ""
} |
"""@package test_op
@author: Zosia Sobocinska
@date Nov 1, 2014
"""
import unittest
from richtext import op
class OpLengthTest(unittest.TestCase):
def test_delete(self):
self.assertEqual(op.length({'delete': 5}), 5)
def test_retain(self):
self.assertEqual(op.length({'retain': 2}), 2)
def test_insert_text(self):
self.assertEqual(op.length({'insert': 'text'}), 4)
def test_insert_embed(self):
self.assertEqual(op.length({'insert': 2}), 1)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| {
"content_hash": "605ea0b978240cb4559070c0067d45d9",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 58,
"avg_line_length": 22.73076923076923,
"alnum_prop": 0.6209813874788495,
"repo_name": "zosiakropka/rich-text-py",
"id": "8f4b9d6c89efd5f0c4bd855dfa2e2ee84ca2d4d6",
"size": "591",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_op.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21077"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
import os
import phillydata
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
]
setup(
author="Eric Brelsford",
author_email="eric@596acres.org",
name='django-phillydata',
version=phillydata.__version__,
description=('A set of Django apps for loading and storing data regarding '
'the city of Philadelphia.'),
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/ebrelsford/django-phillydata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'Django>=1.3.1',
'pyproj==1.9.3',
'django-reversion==1.6.6',
'django-reversion-compare==0.3.5',
],
packages=find_packages(),
include_package_data=True,
)
| {
"content_hash": "50461596ca6ea1b96e9b76d427128973",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 88,
"avg_line_length": 30.307692307692307,
"alnum_prop": 0.6387478849407784,
"repo_name": "ebrelsford/django-phillydata",
"id": "93cc64f2f3bf0b3c13412b4973a9969d924c5a40",
"size": "1182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "359"
},
{
"name": "Python",
"bytes": "150372"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import Pyro4.naming
# A bank client.
class client(object):
def __init__(self, name):
self.name = name
def doBusiness(self, bank):
print("\n*** %s is doing business with %s:" % (self.name, bank.name()))
print("Creating account")
try:
bank.createAccount(self.name)
except ValueError as x:
print("Failed: %s" % x)
print("Removing account and trying again")
bank.deleteAccount(self.name)
bank.createAccount(self.name)
print("Deposit money")
bank.deposit(self.name, 200.00)
print("Deposit money")
bank.deposit(self.name, 500.75)
print("Balance=%.2f" % bank.balance(self.name))
print("Withdraw money")
bank.withdraw(self.name, 400.00)
print("Withdraw money (overdraw)")
try:
bank.withdraw(self.name, 400.00)
except ValueError as x:
print("Failed: %s" % x)
print("End balance=%.2f" % bank.balance(self.name))
print("Withdraw money from non-existing account")
try:
bank.withdraw('GOD', 2222.22)
print("!!! Succeeded?!? That is an error")
except KeyError as x:
print("Failed as expected: %s" % x)
print("Deleting non-existing account")
try:
bank.deleteAccount('GOD')
print("!!! Succeeded?!? That is an error")
except KeyError as x:
print("Failed as expected: %s" % x)
ns = Pyro4.naming.locateNS()
# list the available banks by looking in the NS for the given prefix path
banknames = [name for name in ns.list(prefix="example.banks.")]
if not banknames:
raise RuntimeError('There are no banks to do business with!')
banks = [] # list of banks (proxies)
print()
for name in banknames:
print("Contacting bank: %s" % name)
uri = ns.lookup(name)
banks.append(Pyro4.core.Proxy(uri))
# Different clients that do business with all banks
irmen = client('Irmen')
suzy = client('Suzy')
for bank in banks:
irmen.doBusiness(bank)
suzy.doBusiness(bank)
# List all accounts
print()
for bank in banks:
print("The accounts in the %s:" % bank.name())
accounts = bank.allAccounts()
for name in accounts.keys():
print(" %s : %.2f" % (name, accounts[name]))
| {
"content_hash": "977ae4d5f0f919bb060b96f164b1649d",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 79,
"avg_line_length": 30.32051282051282,
"alnum_prop": 0.5987315010570825,
"repo_name": "irmen/Pyro4",
"id": "92ade28a2bbd128d76623e78e4d7f953e8a8b50b",
"size": "2535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/banks/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1283"
},
{
"name": "Python",
"bytes": "618799"
},
{
"name": "Shell",
"bytes": "2394"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import re
from netmiko.ssh_connection import BaseSSHConnection
class JuniperSSH(BaseSSHConnection):
"""Implement methods for interacting with Juniper Networks devices.
Subclass of SSHConnection. Disables `enable()` and `check_enable_mode()`
methods. Overrides several methods for Juniper-specific compatibility.
"""
def session_preparation(self):
"""Prepare the session after the connection has been established.
Disable paging (the '--more--' prompts).
Set the base prompt for interaction ('>').
"""
self.disable_paging(command="set cli screen-length 0\n")
self.set_base_prompt()
def config_mode(self, config_command='configure'):
'''
Enter configuration mode.
Checks to see if the session is already in configuration mode first.
Raises `ValueError` if the session was unable to enter configuration
mode.
'''
# Call parent class with specific command for entering config mode
return super(JuniperSSH, self).config_mode(config_command=config_command)
def exit_config_mode(self, exit_config='exit configuration-mode'):
"""Exit configuration mode.
Check if we're in configuration mode. If we are, exit configuration
mode. If we aren't, do nothing.
"""
output = ""
if self.check_config_mode():
output = self.send_command(exit_config, strip_prompt=False, strip_command=False)
if 'Exit with uncommitted changes?' in output:
output += self.send_command('yes', strip_prompt=False, strip_command=False)
if self.check_config_mode():
raise ValueError("Failed to exit configuration mode")
return output
def check_config_mode(self, check_string=']'):
'''
Checks if the device is in configuration mode or not
Returns a boolean
'''
# Call parent class with Juniper check_string
return super(JuniperSSH, self).check_config_mode(check_string=check_string)
def commit(self, confirm=False, confirm_delay=None, check=False, comment='',
and_quit=False, delay_factor=10):
"""
Commit the candidate configuration.
Commit the entered configuration. Raise an error and return the failure
if the commit fails.
Automatically enters configuration mode
default:
command_string = commit
check and (confirm or confirm_dely or comment):
Exception
confirm_delay and no confirm:
Exception
confirm:
confirm_delay option
comment option
command_string = commit confirmed or commit confirmed <confirm_delay>
check:
command_string = commit check
"""
if check and (confirm or confirm_delay or comment):
raise ValueError("Invalid arguments supplied with commit check")
if confirm_delay and not confirm:
raise ValueError("Invalid arguments supplied to commit method both confirm and check")
# Select proper command string based on arguments provided
command_string = 'commit'
commit_marker = 'commit complete'
if check:
command_string = 'commit check'
commit_marker = 'configuration check succeeds'
elif confirm:
if confirm_delay:
command_string = 'commit confirmed ' + str(confirm_delay)
else:
command_string = 'commit confirmed'
commit_marker = 'commit confirmed will be automatically rolled back in'
# wrap the comment in quotes
if comment:
if '"' in comment:
raise ValueError("Invalid comment contains double quote")
comment = '"{0}"'.format(comment)
command_string += ' comment ' + comment
if and_quit:
command_string += ' and-quit'
# Enter config mode (if necessary)
output = self.config_mode()
output += self.send_command(command_string, strip_prompt=False, strip_command=False,
delay_factor=delay_factor)
if commit_marker not in output:
raise ValueError("Commit failed with the following errors:\n\n{0}"
.format(output))
return output
def strip_prompt(self, *args, **kwargs):
"""Strip the trailing router prompt from the output."""
# Call the superclass strip_prompt method
a_string = super(JuniperSSH, self).strip_prompt(*args, **kwargs)
# Call additional method to strip some context items
return self.strip_context_items(a_string)
@staticmethod
def strip_context_items(a_string):
"""Strip Juniper-specific output.
Juniper will also put a configuration context:
[edit]
and various chassis contexts:
{master:0}, {backup:1}
This method removes those lines.
"""
strings_to_strip = [
r'\[edit.*\]',
r'\{master:.*\}',
r'\{backup:.*\}',
r'\{line.*\}',
r'\{primary.*\}',
r'\{secondary.*\}',
]
response_list = a_string.split('\n')
last_line = response_list[-1]
for pattern in strings_to_strip:
if re.search(pattern, last_line):
return "\n".join(response_list[:-1])
return a_string
| {
"content_hash": "474cf55fa50573d88e7ee16f28fba7bc",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 98,
"avg_line_length": 32.578947368421055,
"alnum_prop": 0.6000718003949022,
"repo_name": "mzbenami/netmiko",
"id": "0cfa1db4d2f7ebd15d548adf692351950424d7eb",
"size": "5571",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "netmiko/juniper/juniper_ssh.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "110794"
},
{
"name": "Shell",
"bytes": "1201"
}
],
"symlink_target": ""
} |
import urllib.parse
from random import randint
from app.models import db, Joke
from app.bot.constants import *
from app.bot.reply_generator import ReplyGenerator
from app.bot.rule import Rule, transition
STATE_NEW = 'new'
STATE_COLD_JOKE = 'STATE_COLD_JOKE'
class ColdJokeIntentionRule(Rule):
@transition(STATE_NEW, {'NLP_decision': STATE_COLD_JOKE}, STATE_NEW)
def rule_cold_joke(self, bot, user, msg, **template_params):
cnt = Joke.query.filter_by().count()
q = Joke.query.get(randint(1,cnt))
content = q.content
bot.bot_send_message(user.id, {"text": content})
return True
| {
"content_hash": "9824724df24213452b7c7ccc97ba315f",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 72,
"avg_line_length": 29.952380952380953,
"alnum_prop": 0.6931637519872814,
"repo_name": "tmt514/mtsa-dishes-translator",
"id": "437143b6e79bee0190df3c2eccf89759bcbe7858",
"size": "629",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/bot/intention_rules/cold_joke_intention_rules.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "97745"
},
{
"name": "Shell",
"bytes": "1950"
}
],
"symlink_target": ""
} |
import numpy.testing as npt
import sys
import os
import pkg_resources
from pymatbridge import messenger
ROOT = __file__
CONFIG = os.path.realpath(os.path.join(ROOT, '../../../../config.ini'))
print(CONFIG)
BIN = messenger.get_matlab_bin(config=CONFIG)
def test_config():
npt.assert_equal(os.path.isfile(CONFIG), True)
@npt.decorators.skipif(BIN==None, 'No Matlab Installed')
def test_matlab_bin():
npt.assert_equal(os.path.isdir(BIN), True)
mexext = any(m for m in os.listdir(BIN) if m == 'mexext' or m == 'mexext.exe')
mex = any(m for m in os.listdir(BIN) if m == 'mex' or m == 'mex.exe')
npt.assert_equal(mexext, True)
npt.assert_equal(mex, True)
@npt.decorators.skipif(BIN==None, 'No Matlab Installed')
def test_matlab_env():
matlab = os.path.join(messenger.get_matlab_bin(config=CONFIG), 'matlab')
env = messenger.get_matlab_env(matlab=matlab)
arch = env['ARCH']
npt.assert_equal(arch.endswith(messenger.get_messenger_dir()[-2:]), True)
| {
"content_hash": "49fb76020a6113366991599ad4f08110",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 82,
"avg_line_length": 30.454545454545453,
"alnum_prop": 0.672636815920398,
"repo_name": "jjangsangy/python-matlab-bridge",
"id": "e957735e9edcdccf177db8dc6a12172f538167bb",
"size": "1005",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymatbridge/messenger/tests/test_env.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4333"
},
{
"name": "Matlab",
"bytes": "31609"
},
{
"name": "Python",
"bytes": "63906"
}
],
"symlink_target": ""
} |
import ninjag
from ninjag.tk.ioTK import read_all
def test():
f_input = "input/in5.yaml"
f_answer = "output/out5.ninja"
f_solution = "solution/sol5.ninja"
ninjag.main(f_answer, [f_input])
answer = read_all(f_answer)
solution = read_all(f_solution)
assert answer == solution
| {
"content_hash": "24b8f32819b289cdcfb8c21549d342ee",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 38,
"avg_line_length": 25.333333333333332,
"alnum_prop": 0.6611842105263158,
"repo_name": "yuhangwang/ninjag-python",
"id": "2c5309ce61bfb9cf7d40253926a1a9361e9c5577",
"size": "304",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/frontend/build_dep/test_5.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24739"
}
],
"symlink_target": ""
} |
from geoportal.templatetags import geoportal_tags
| {
"content_hash": "44d57cfba714a4a22d2454a08bfcc406",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 49,
"avg_line_length": 50,
"alnum_prop": 0.88,
"repo_name": "brutasse/django-geoportail",
"id": "3d284fb223369f197311d5915c5587f951440eb0",
"size": "50",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "geoportal/templatetags/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "9868"
},
{
"name": "Python",
"bytes": "31984"
}
],
"symlink_target": ""
} |
"""
model.py
Copyright (c) 2010 En-Ran Zhou, Liang-Heng Chen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import logging
import random
import re
import string
from google.appengine.ext import db, search
def normalize_keywords(keywords):
# split by comma.
keywords = keywords.split(',')
# merge spaces.
keywords = map(lambda k: re.sub('\s+', ' ', k.strip()), keywords)
# remove empty keyword
keywords = filter(lambda k: bool(k), keywords)
logging.info('keywords: %s' % ', '.join(keywords))
return keywords
class Note(search.SearchableModel):
# mandatory
owner = db.UserProperty(required=True, auto_current_user_add=True)
create_time = db.DateTimeProperty(required=True, auto_now_add=True)
title = db.StringProperty(required=True)
# optional
content = db.TextProperty()
keywords = db.StringListProperty()
priority = db.IntegerProperty()
progress = db.IntegerProperty()
def build_keyword(self):
sre = re.match(r'^\[(.*?)\]', self.title.strip())
if sre is not None:
self.keywords = normalize_keywords(sre.group(1))
def before_put(self):
self.build_keyword()
def after_put(self):
pass
def put(self):
self.before_put()
search.SearchableModel.put(self)
self.after_put()
| {
"content_hash": "1a45ad43a2cd15d107db6ca74984c4ba",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 77,
"avg_line_length": 31.73972602739726,
"alnum_prop": 0.7138541217091066,
"repo_name": "zhouer/sagenote",
"id": "dbe1b6e5419b4932cd58b1387d893d070e0f1177",
"size": "2357",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model/note.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "285"
},
{
"name": "HTML",
"bytes": "2063"
},
{
"name": "JavaScript",
"bytes": "3692"
},
{
"name": "Python",
"bytes": "12766"
}
],
"symlink_target": ""
} |
__author__ = 'AVIRAM'
#sys.path.insert(0, 'lib') #we need this line in order to make libraries imported from lib folder work properly
import requests #Used for http requests
import symbol
symbol2=["SP","NSDQ","DAX","GOLD","C.OIL"]
priceForIndex=[50,20,25,500,100]
def calculatingRisk(symbolInput,enterPri,stopPri,vol,type):#(type=1-long, type=0-short)
if (type==1):#long
sumPoint=enterPri-stopPri
else:#short
sumPoint=stopPri-enterPri
priceForPoint=symbol.Symbol.getPrice(symbolInput)#get the price from date store
'''
for x in range(len(symbol2)):
if (symbol2[x]==symbolInput):
risk=sumPoint*vol*priceForIndex[x]
'''
risk=sumPoint*vol*priceForPoint
return risk
#print(calculatingRisk("SP",100,90,1,1))
| {
"content_hash": "5ce08d40a761a20a0606b08acd2a9db8",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 112,
"avg_line_length": 29.925925925925927,
"alnum_prop": 0.6608910891089109,
"repo_name": "yaakov300/ForexApp",
"id": "269302360b68e09c4659e6fdde085640a02a36e8",
"size": "808",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models/CalculateRisk.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15476"
},
{
"name": "HTML",
"bytes": "40036"
},
{
"name": "JavaScript",
"bytes": "7134"
},
{
"name": "Python",
"bytes": "820106"
}
],
"symlink_target": ""
} |
import datetime
import factory
import factory.fuzzy
from pytz import UTC
from ...contract_mgt.tests.factories import SupportContractFactory
class CostCenterFactory(factory.django.DjangoModelFactory):
# total_invoice_amount
# total_accrual_amount
support_contract = factory.SubFactory(SupportContractFactory)
account_code = factory.fuzzy.FuzzyText(length=12)
no = factory.fuzzy.FuzzyText(length=12)
account_code_cost_center = factory.fuzzy.FuzzyText(length=12)
division = factory.fuzzy.FuzzyText(length=12)
# must change to section selection
section = factory.fuzzy.FuzzyText(length=12)
class Meta:
model = 'opex_mgt.CostCenter'
class OpexAccrualFactory(factory.django.DjangoModelFactory):
# total_invoice_amount
# total_accrual_amount
cost_center = factory.SubFactory(CostCenterFactory)
amount = factory.fuzzy.FuzzyDecimal(1.11,999999.99,2)
date = factory.fuzzy.FuzzyDateTime(datetime.datetime(2008, 1, 1, tzinfo=UTC),
datetime.datetime(2009, 1, 1, tzinfo=UTC))
class Meta:
model = 'opex_mgt.OpexAccrual'
| {
"content_hash": "3666ccd6d53c68110a6650796e645038",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 89,
"avg_line_length": 29.358974358974358,
"alnum_prop": 0.7109170305676856,
"repo_name": "mpdevilleres/tbpc_app",
"id": "2ae20e4272df41b3ceab90e97dba62a9993d3612",
"size": "1145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tbpc/opex_mgt/tests/factories.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "97568"
},
{
"name": "Nginx",
"bytes": "1096"
},
{
"name": "Python",
"bytes": "233826"
},
{
"name": "Shell",
"bytes": "8861"
}
],
"symlink_target": ""
} |
import os
import socket
import random
import time
import shutil
from celery import Celery
import subprocess
from vpp import log
from vpp.config import CONF
from vpp.storage.api import get_storage_api
minion = Celery(
backend=CONF.celery_backend_url,
broker=CONF.celery_broker_url,
)
minion.conf.task_acks_late=True
minion.conf.worker_prefetch_multiplier = 1
minion.conf.task_default_queue = 'default'
minion.conf.task_routes = {
'vpp.minion.transcode.transcode_ffmpeg': {'queue': 'transcode'}
}
HOSTNAME = socket.gethostname()
minion_log_dir = os.path.join("/var/log/vpp_minion", HOSTNAME)
minion_base_task_dir = os.path.join("/var/run/vpp_minion", HOSTNAME)
storage_api = get_storage_api()
@minion.task(bind=True)
def transcode_ffmpeg(self, task):
"""
:task: a json format task description
"""
print("ffmpeg minion: received task: %s" % task)
if not os.path.exists(minion_log_dir):
os.makedirs(minion_log_dir)
if not os.path.exists(minion_base_task_dir):
os.makedirs(minion_base_task_dir)
task_id = task["task_id"]
# if task_id.endswith('_1'):
# time.sleep(100)
log_file = os.path.join(minion_log_dir, task_id + ".log")
LOG = log.get_logger(__name__, log_file)
LOG.info("on host [%s]: received task: %s" % (HOSTNAME, task))
task_dir = os.path.join(minion_base_task_dir, task_id)
if not os.path.exists(task_dir):
os.makedirs(task_dir)
remote_src_dir = task["in"]["dir"]
remote_src_file_names = task["in"]["files"]
remote_dst_dir = task["out"]["dir"]
remote_dst_file_names = task["out"]["files"]
for f_src, f_dst in zip(remote_src_file_names, remote_dst_file_names):
remote_src_file = os.path.join(remote_src_dir, f_src)
remote_dst_file = os.path.join(remote_dst_dir, f_dst)
local_src_file = os.path.join(task_dir, f_src)
local_dst_file = os.path.join(task_dir, "speedup_x2_" + f_dst)
try:
storage_api.download(remote_src_file, local_src_file)
LOG.info("file %s downloaded from storage server, saved to %s" %
(remote_src_file, local_src_file))
except Exception as e:
LOG.error("download file %s failed: %s" % (remote_src_file, e))
return
# do processing
cmd = 'ffmpeg -y -i %s -strict experimental -filter:a "atempo=2.0" ' \
'-filter:v "setpts=0.5*PTS" %s' % (local_src_file, local_dst_file)
proc = subprocess.Popen(cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True)
while True:
line = proc.stdout.readline()
LOG.info(line)
if not line:
break
try:
storage_api.upload(local_dst_file, remote_dst_file)
LOG.info("uploaded file %s to storage server, saved to %s" %
(local_dst_file, remote_dst_file))
except Exception as e:
LOG.error("upload file %s failed: %s" % (local_src_file, e))
return
# cleanup
try:
shutil.rmtree(task_dir)
if CONF.dry_run.remove_minion_log_file:
os.remove(log_file)
except Exception as e:
LOG.error("removing task dir[%s]: %s" % (task_dir, e))
| {
"content_hash": "15f3588f24b4d26e7d85c03282a6efd7",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 78,
"avg_line_length": 31.62962962962963,
"alnum_prop": 0.5930913348946136,
"repo_name": "ArthurChiao/videoplusplus",
"id": "f3c0d10fbda56a47944bf0adf553e93c80d2897b",
"size": "4027",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vpp/minion/transcode.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6711"
},
{
"name": "Python",
"bytes": "134178"
},
{
"name": "Shell",
"bytes": "28195"
},
{
"name": "XSLT",
"bytes": "1335"
}
],
"symlink_target": ""
} |
def poly_to_line_layer(ds, poly_name, line_name):
"""Creates a line layer from a polygon layer."""
# Delete the line layer if it exists.
if ds.GetLayer(line_name):
ds.DeleteLayer(line_name)
# Get the polygon layer and its spatial reference.
poly_lyr = ds.GetLayer(poly_name)
sr = poly_lyr.GetSpatialRef()
# Create a line layer with the same SR as the polygons
# and copy the field definitions from the polygons to
# the line layer.
line_lyr = ds.CreateLayer(line_name, sr, ogr.wkbLineString)
line_lyr.CreateFields(poly_lyr.schema)
# Create a feature to use over and over.
line_feat = ogr.Feature(line_lyr.GetLayerDefn())
# Loop through all of the polygons.
for poly_feat in poly_lyr:
# Copy the attribute values from the polygon to the
# new feature.
atts = poly_feat.items()
for fld_name in atts.keys():
line_feat.SetField(fld_name, atts[fld_name])
# Loop through the rings in the polygon.
poly_geom = poly_feat.geometry()
for i in range(poly_geom.GetGeometryCount()):
ring = poly_geom.GetGeometryRef(i)
# Create a new line using the ring's vertices.
line_geom = ogr.Geometry(ogr.wkbLineString)
for coords in ring.GetPoints():
line_geom.AddPoint(*coords)
# Insert the new line feature.
line_feat.SetGeometry(line_geom)
line_lyr.CreateFeature(line_feat)
| {
"content_hash": "ac1cdaaf49c7910488c39ba0eca0f6ea",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 63,
"avg_line_length": 36.53658536585366,
"alnum_prop": 0.6328437917222964,
"repo_name": "cgarrard/osgeopy-code",
"id": "1a5c8f8195195a4a7556518bb3c3495f0fea7122",
"size": "1498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Chapter6/listing6_2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "187780"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import *
from django.contrib import admin
from django.views.generic.simple import redirect_to
from django.conf import settings
from oscar.app import Shop
from oscar.apps.product import ProductApplication
from shop.product.views import MyItemDetailView
admin.autodiscover()
shop_app = Shop(product_app=ProductApplication(detail_view=MyItemDetailView))
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'', include(shop_app.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.MEDIA_ROOT}),
) | {
"content_hash": "59a6d108db6d9c4aeb8069e80cdc2351",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 77,
"avg_line_length": 27.958333333333332,
"alnum_prop": 0.7228017883755589,
"repo_name": "aykut/django-oscar",
"id": "efeafa14b513289f9eed869a684e47a15cc2c2bf",
"size": "671",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/demo/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
ipython -i --pdb scripts/train_model.py -- --model cropped_dec21_3 --data 128_20151029 --use_cropped --as_grey --overwrite --no_test
"""
import numpy as np
from lasagne.layers import dnn
import lasagne as nn
import theano.tensor as T
import theano
from utils.nolearn_net import NeuralNet
from nolearn.lasagne.handlers import SaveWeights
from nolearn_utils.iterators import (
ShuffleBatchIteratorMixin,
RandomFlipBatchIteratorMixin,
AffineTransformBatchIteratorMixin,
AdjustGammaBatchIteratorMixin,
make_iterator
)
from nolearn_utils.hooks import (
SaveTrainingHistory,
PlotTrainingHistory,
EarlyStopping,
StepDecay
)
from utils import TrainSplit
# from utils.layers import batch_norm
# from utils.iterators import PairBatchIteratorMixin
# from utils.nonlinearities import low_temperature_softmax
# from utils.layers import TiedDropoutLayer
from utils.layer_macros import residual_block, conv2dbn
def float32(k):
return np.cast['float32'](k)
model_fname = './models/cropped_dec21_3.pkl'
model_accuracy_fname = './models/cropped_dec21_3_accuracy.pkl'
model_history_fname = './models/cropped_dec21_3_history.pkl'
model_graph_fname = './models/cropped_dec21_3_history.png'
image_size = 256
batch_size = 16
n_classes = 447
train_iterator_mixins = [
ShuffleBatchIteratorMixin,
RandomFlipBatchIteratorMixin,
AffineTransformBatchIteratorMixin,
AdjustGammaBatchIteratorMixin,
]
TrainIterator = make_iterator('TrainIterator', train_iterator_mixins)
test_iterator_mixins = [
]
TestIterator = make_iterator('TestIterator', test_iterator_mixins)
train_iterator_kwargs = dict(
batch_size=batch_size,
flip_horizontal_p=0.5,
flip_vertical_p=0.5,
affine_p=1.,
affine_scale_choices=np.linspace(0.5, 1.5, 11),
# affine_shear_choices=np.linspace(-0.5, 0.5, 11),
affine_translation_choices=np.arange(-64, 64, 1),
# affine_rotation_choices=np.arange(0, 360, 1),
adjust_gamma_p=0.5,
adjust_gamma_chocies=np.linspace(0.5, 1.5, 11)
)
train_iterator = TrainIterator(**train_iterator_kwargs)
test_iterator_kwargs = dict(
batch_size=batch_size,
)
test_iterator = TestIterator(**test_iterator_kwargs)
save_weights = SaveWeights(model_fname, only_best=True, pickle=False)
save_training_history = SaveTrainingHistory(model_history_fname)
plot_training_history = PlotTrainingHistory(model_graph_fname)
early_stopping = EarlyStopping(patience=100)
conv_kwargs = dict(
pad='same',
nonlinearity=nn.nonlinearities.very_leaky_rectify
)
pool_kwargs = dict(
pool_size=2,
)
l = nn.layers.InputLayer(name='in', shape=(None, 3, image_size, image_size))
# 256x256
l = conv2dbn(
l, name='l1c1', num_filters=32, filter_size=(7, 7), stride=2,
**conv_kwargs
)
# 128x128
for i in range(3):
l = residual_block(
l, name='2c%s' % i,
# bottleneck=False,
num_filters=48, filter_size=(3, 3),
num_layers=2,
**conv_kwargs
)
# 128x128
for i in range(3):
actual_stride = 2 if i == 0 else 1
l = residual_block(
l, name='3c%s' % i,
# bottleneck=True, bottleneck_factor=4,
num_filters=64, filter_size=(3, 3), stride=actual_stride,
num_layers=2,
**conv_kwargs
)
# 64x64
for i in range(3):
actual_stride = 2 if i == 0 else 1
l = residual_block(
l, name='4c%s' % i,
# bottleneck=True, bottleneck_factor=4,
num_filters=80, filter_size=(3, 3), stride=actual_stride,
num_layers=2,
**conv_kwargs
)
# 32x32
for i in range(3):
actual_stride = 2 if i == 0 else 1
l = residual_block(
l, name='5c%s' % i,
# bottleneck=True, bottleneck_factor=4,
num_filters=96, filter_size=(3, 3), stride=actual_stride,
num_layers=2,
**conv_kwargs
)
# 16x16
for i in range(3):
actual_stride = 2 if i == 0 else 1
l = residual_block(
l, name='6c%s' % i,
# bottleneck=True, bottleneck_factor=4,
num_filters=128, filter_size=(3, 3), stride=actual_stride,
num_layers=2,
**conv_kwargs
)
# 8x8
# 8
l = nn.layers.dnn.Pool2DDNNLayer(l, name='gp', pool_size=8, mode='average_inc_pad')
l = nn.layers.DropoutLayer(l, name='gpdrop', p=0.5)
l = nn.layers.DenseLayer(l, name='out', num_units=n_classes, nonlinearity=nn.nonlinearities.softmax)
net = NeuralNet(
layers=l,
regression=False,
use_label_encoder=False,
objective_l2=1e-6,
update=nn.updates.nesterov_momentum,
update_learning_rate=theano.shared(float32(1e-1)),
train_split=TrainSplit(0.15, random_state=42, stratify=False),
batch_iterator_train=train_iterator,
batch_iterator_test=test_iterator,
on_epoch_finished=[
save_weights,
save_training_history,
plot_training_history,
early_stopping,
StepDecay('update_learning_rate', start=1e-1, stop=1e-3)
],
verbose=10,
max_epochs=2500,
)
| {
"content_hash": "b226984433718975a615b7b83b27e77a",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 132,
"avg_line_length": 25.755208333333332,
"alnum_prop": 0.6736097067745197,
"repo_name": "felixlaumon/kaggle-right-whale",
"id": "1e91072f4c0b2b06b0948b246c7e236eba4c20b9",
"size": "4945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model_definitions/cropped_dec21_3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "115699"
},
{
"name": "Makefile",
"bytes": "630"
},
{
"name": "Python",
"bytes": "139871"
}
],
"symlink_target": ""
} |
import json
import re
import __ids__
import options
import survey_exceptions as se
import blocks
__qGen__ = __ids__.IdGenerator("q_")
__likert__ = "likert"
__checkbox__ = "checkbox"
__oneof__ = "oneof"
__instruction__ = "instruction"
__freetext__ = "freetext"
qTypes = [__likert__, __checkbox__, __oneof__, __instruction__, __freetext__]
"""
Question types include:
- likert : Questions are presented on a scale. These questions typically have 4-7 options available. Their relative order must be maintained. They are presented to the user with teh HTML `radio` input.
- checkbox : The options for checkbox questions may be presented in any order. They are presented to the user with the HTML `check` input.
- oneof : These questions have unordered, exclusive options. They are presented to the user with the HTML `radio` input.
- freetext : These question have no options associated with them. Instead, they are presented as an HTML `textarea`. Freetext questions may contain a default value, to be displayed in the text box, or they may require validation against a regular expression.
- instruction : These questions have no options associated with them. They are purely instructional. They do not return any data.
"""
class Question:
"""
Contains the components of a survey question. SurveyMan presents questions one at a time.
"""
def __init__(self, qType, qText, options=[], shuffle=True, freetext=None, breakoff=True):
"""
Creates a Question object with a unique id.
Question type, text, and a list of options must be specified
(option list may be empty).
Shuffling is allowed by default; user must specify otherwise.
:param qType: One of "likert", "checkbox", "oneof", "freetext", or "instructional"
:param qText: The text to display
:param options: The list of options associated with this question, if applicable.
:param shuffle: Boolean to permit shuffling.
:param freetext: Boolean, regular expression, or default string. Only use this if qType is "freetext"
:param breakoff: Boolean indicating whether breakoff is permitted at this question.
:return:
"""
# initialize variables depending on how many arguments provided
# if you don't want to add options immediately, add empty list as argument
#call generateID
self.qId = __qGen__.generateID()
if qType not in qTypes:
raise se.NoSuchQuestionTypeException("%s not in {%s}" % (qType, ",".join(qTypes)))
else:
self.qType = qType
self.qText = qText
assert type(shuffle) is bool, type(shuffle)
self.shuffle = shuffle
self.branching = False
self.branch_map = None
self.block = blocks.Block(contents=[self])
self.breakoff = breakoff
self.freetext = freetext
self.options = options
assert (freetext is not True or len(self.options) == 0)
def add_option(self, o):
"""
Adds o to the end of the question's option list. If type(o) is 'str', then this function creates an Option with
this text before adding it.
:param o: Option to add to this questions' option list.
"""
if self.qType in [__instruction__, __freetext__]:
raise se.QuestionTypeException("Questions of type %s cannot have options." % self.qType)
if type(o) is str:
try:
self.options.append(options.HTMLOption(o))
except se.HTMLValidationException:
self.options.append(options.TextOption(o))
else:
self.options.append(o)
def add_option_by_index(self, index, o):
"""
Adds o at the desired index in the question's option list. If type(o) is 'str', then this function creates an
Option with this text before adding it. This method will pad with empty options if an option is added beyond the
current list.
:param index: The target index for where o should be inserted.
:param o: Either the text or html of a survey object, or an option object.
"""
if self.qType in [__instruction__, __freetext__]:
raise se.QuestionTypeException("Questions of type %s cannot have options." % self.qType)
if index > len(self.options):
for i in range(len(self.options), index):
self.options.append(options.Option(""))
if type(o) is str:
try:
self.options.insert(index, options.HTMLOption(o))
except se.HTMLValidationException:
self.options.insert(index, options.TextOption(o))
else:
self.options.insert(index, o)
def __eq__(self, other):
"""
Returns true if self and q2 have the same id.
:param other: Question to compare self to.
"""
return isinstance(other, Question) and self.qId == other.qid
def __str__(self):
text = "Question ID: " + str(self.qId) + " Question type: " + self.qType + "\n"
text = text + self.qText + "\n"
for o in self.options:
text = text + "\t" + str(o) + "\n"
return text
def __repr__(self):
return "Question(%s, %s, options=[%s], shuffle=%b, freetext=%s, breakoff=%b)" % (
self.qType, self.qText, ",".join(self.options), self.shuffle, self.freetext, self.breakoff)
def jsonize(self):
"""
Returns JSON representation of the question
:return: A JSON object according to the `Question Schema <http://surveyman.github.io/Schemata/survey_question.json>`_.
"""
__id__ = "id"
__qtext__ = "qtext"
__options__ = "options"
__branchMap__ = "branchMap"
__freetext_key__ = "freetext"
__answer__ = "answer"
__randomize__ = "randomize"
__ordered__ = "ordered"
__exclusive__ = "exclusive"
__permitBreakoff__ = "permitBreakoff"
output = {__id__: self.qId, __qtext__: self.qText, __permitBreakoff__: self.breakoff}
if self.qType is __instruction__:
return json.dumps(output)
if self.qType is __freetext__:
if (type(self.freetext) is bool and self.freetext) or type(self.freetext) is str:
output[__freetext_key__] = self.freetext
elif type(self.freetext) is type(re.compile("")):
output[__freetext_key__] = str("#{%s}" % self.freetext.pattern)
return json.dumps(output)
output[__options__] = [json.loads(o.jsonize()) for o in self.options]
output[__randomize__] = self.shuffle
output[__ordered__] = self.qType is __likert__
output[__exclusive__] = self.qType in [__likert__, __oneof__]
if self.branch_map is not None:
output[__branchMap__] = json.loads(self.branch_map.jsonize())
return json.dumps(output)
class Instruction(Question):
"""
Instructional convenience class
"""
def __init__(self, qText):
Question.__init__(self, __instruction__, qText)
class FreeText(Question):
"""
Freetext convenience class
"""
def __init__(self, qText, regex=None, default=None):
"""
Convenient initialization of a Freetext question. Freetext questions cannot have both regular expressions and
default values associated with them.
:param qText: Question text
:param regex: String or Pattern object for freetext contents to validate against
:param default: Default text appearing in a freetext box.
"""
if regex is not None and default is not None:
raise se.QuestionTypeException("Freetext questions cannot have both a regex and a default value.")
if regex is not None:
if type(regex) is str:
Question.__init__(self, __freetext__, qText, freetext=re.compile(regex))
elif type(regex) is type(re.compile("")):
Question.__init__(self, __freetext__, qText, freetext=regex)
else:
raise se.QuestionTypeException("Unknown regular expression type: %s (recognized values are %s and %s)" %
(type(regex), str, type(re.compile(""))))
elif default is not None:
Question.__init__(self, __freetext__, qText, freetext=default)
else:
Question.__init__(self, __freetext__, qText, freetext=True)
class RadioButtonQuestion(Question):
""" Convenience class for radio button questions.
"""
def __init__(self, qText, options=[]):
Question.__init__(self, __oneof__, qText, options, shuffle=True, freetext=False, breakoff=True)
| {
"content_hash": "d33f66d8295d9166487ae756eab449ee",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 259,
"avg_line_length": 41.12676056338028,
"alnum_prop": 0.6150684931506849,
"repo_name": "SurveyMan/SMPy",
"id": "555eb6edcd3e81062babeb4f2673fb3c3f7e9a0c",
"size": "8760",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "surveyman/survey/questions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "9486"
},
{
"name": "Makefile",
"bytes": "466"
},
{
"name": "Python",
"bytes": "110622"
}
],
"symlink_target": ""
} |
import pathlib
import shutil
import tempfile
import typing
import pytest
from airflow import models
from ruamel import yaml
from scripts import generate_dag, generate_terraform
PROJECT_ROOT = generate_dag.PROJECT_ROOT
SAMPLE_YAML_PATHS = {
"dataset": PROJECT_ROOT / "samples" / "dataset.yaml",
"pipeline": PROJECT_ROOT / "samples" / "pipeline.yaml",
}
@pytest.fixture
def dataset_path() -> typing.Iterator[pathlib.Path]:
with tempfile.TemporaryDirectory(
dir=generate_dag.DATASETS_PATH, suffix="_dataset"
) as dir_path:
yield pathlib.Path(dir_path)
@pytest.fixture
def pipeline_path(dataset_path, suffix="_pipeline") -> typing.Iterator[pathlib.Path]:
pipelines_dir = dataset_path / "pipelines"
pipelines_dir.mkdir(parents=True, exist_ok=True)
with tempfile.TemporaryDirectory(dir=pipelines_dir, suffix=suffix) as dir_path:
yield pathlib.Path(dir_path)
def all_pipelines() -> typing.Iterator[typing.Tuple[pathlib.Path, pathlib.Path]]:
for dataset_path in generate_terraform.list_subdirs(generate_dag.DATASETS_PATH):
for pipeline_path in generate_terraform.list_subdirs(
dataset_path / "pipelines"
):
yield dataset_path, pipeline_path
def test_all_dag_ids_are_unique():
dag_ids = set()
for dataset_path, pipeline_path in all_pipelines():
generate_dag.CustomYAMLTags(dataset_path.name)
dag_config = yaml.load(open(pipeline_path / "pipeline.yaml"))
config_dag_id = generate_dag.dag_init(dag_config)["dag_id"]
namespaced_id = generate_dag.namespaced_dag_id(config_dag_id, dataset_path.name)
assert namespaced_id not in dag_ids
dag_ids.add(namespaced_id)
pipeline_path_2 = pipeline_path
def test_non_unique_dag_id_will_fail_validation(
dataset_path: pathlib.Path,
pipeline_path: pathlib.Path,
pipeline_path_2: pathlib.Path,
):
shutil.copyfile(
SAMPLE_YAML_PATHS["dataset"], dataset_path / "pipelines" / "dataset.yaml"
)
shutil.copyfile(SAMPLE_YAML_PATHS["pipeline"], pipeline_path / "pipeline.yaml")
shutil.copyfile(SAMPLE_YAML_PATHS["pipeline"], pipeline_path_2 / "pipeline.yaml")
dag_ids = set()
all_unique = True
for dataset_path, pipeline_path in all_pipelines():
generate_dag.CustomYAMLTags(dataset_path.name)
dag_config = yaml.load(open(pipeline_path / "pipeline.yaml"))
config_dag_id = generate_dag.dag_init(dag_config)["dag_id"]
namespaced_id = generate_dag.namespaced_dag_id(config_dag_id, dataset_path.name)
if namespaced_id in dag_ids:
all_unique = False
break
dag_ids.add(namespaced_id)
assert not all_unique
def test_check_all_dag_ids_must_be_prepended_with_dataset_name():
for dataset_path, pipeline_path in all_pipelines():
dag_py = pipeline_path / f"{pipeline_path.name}_dag.py"
generated_dag_id = f"{dataset_path.name}.{pipeline_path.name}"
assert f'dag_id="{generated_dag_id}"' in dag_py.read_text()
def test_check_all_dags_have_no_import_errors():
for dataset_path, pipeline_path in all_pipelines():
dagbag = models.DagBag(dag_folder=str(pipeline_path))
assert (
generate_dag.namespaced_dag_id(pipeline_path.name, dataset_path.name)
in dagbag.dag_ids
)
assert len(dagbag.import_errors) == 0
| {
"content_hash": "3edb79172cc8370f7f0b2089af515eab",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 88,
"avg_line_length": 32.65384615384615,
"alnum_prop": 0.6772673733804476,
"repo_name": "GoogleCloudPlatform/public-datasets-pipelines",
"id": "d68b865a4f9452fbcb2d85ac8e82a24ae966eaea",
"size": "3794",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/test_checks_for_all_dags.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "101888"
},
{
"name": "HCL",
"bytes": "678082"
},
{
"name": "Jinja",
"bytes": "12539"
},
{
"name": "Jupyter Notebook",
"bytes": "655592"
},
{
"name": "Python",
"bytes": "4784376"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2019-2022 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
import functools
from pathlib import Path
from tempfile import _RandomNameSequence
from textwrap import dedent
import pytest
import zipfile
from flexmock import flexmock
from atomic_reactor.constants import PLUGIN_EXPORT_OPERATOR_MANIFESTS_KEY
from atomic_reactor.plugins.check_and_set_platforms import CheckAndSetPlatformsPlugin
from atomic_reactor.plugins.export_operator_manifests import ExportOperatorManifestsPlugin
from atomic_reactor.plugin import PluginFailedException, PluginsRunner
from atomic_reactor.utils import retries
from tests.constants import TEST_IMAGE
from tests.mock_env import MockEnv
from tests.util import FAKE_CSV
pytestmark = pytest.mark.usefixtures('user_params')
CONTAINER_ID = 'mocked'
PLATFORMS = ["aarch64", "x86_64", "s390x", "ppc64le"]
def mock_source_contents(
repo_dir: Path,
has_appregistry_label=False, appregistry_label=False,
has_bundle_label=True, bundle_label=True
) -> None:
base = 'From fedora:30'
cmd = 'CMD /bin/cowsay moo'
operator_appregistry_label = ''
operator_bundle_label = ''
if has_appregistry_label:
operator_appregistry_label = 'LABEL com.redhat.delivery.appregistry={}'.format(
str(appregistry_label).lower())
if has_bundle_label:
operator_bundle_label = 'LABEL com.redhat.delivery.operator.bundle={}'.format(
str(bundle_label).lower())
manifests_dir = repo_dir / 'manifests'
manifests_dir.mkdir()
with open(manifests_dir / 'operator.clusterserviceversion.yaml', 'w') as f:
f.write(FAKE_CSV)
data = '\n'.join([base, operator_appregistry_label, operator_bundle_label, cmd])
with open(repo_dir / 'Dockerfile', 'w') as f:
f.write(data)
def extract_manifests_dir(cmd, empty=False, has_archive=True, change_csv_content=False,
multiple_csv=False) -> None:
"""Simulate extracting manifests dir from the built image
The manifest dir is created under the directory specified in cmd:
build_dir/<platform>/
manifests/
stub.yml
operator.clusterserviceversion.yaml
another_dir/
yayml.yml
:param list cmd: the command to extract manifests
:param bool empty: whether the archive is empty.
:param bool has_archive: whether the archive is created
:param bool change_csv_content: whether to change the default fake CSV
content for the specific test.
:param bool multiple_csv: whether to add extra CSV file.
"""
manifests_dir = Path(cmd[-1].split(':')[-1])
another_dir = manifests_dir / 'another_dir'
another_dir.mkdir()
if not has_archive:
return
if not empty:
with open(manifests_dir / 'stub.yml', 'w') as f:
f.write('')
with open(another_dir / 'yayml.yml', 'w') as f:
f.write('')
csv = FAKE_CSV
if change_csv_content:
csv += dedent('''\
customresourcedefinitions:
''')
with open(manifests_dir / 'operator.clusterserviceversion.yaml', 'w') as f:
f.write(csv)
if multiple_csv:
with open(manifests_dir / 'extra.csv.yaml', 'w') as f:
f.write(dedent('''\
apiVersion: operators.coreos.com/v1alpha1
kind: ClusterServiceVersion
metadata: {}
spec:
install: {}
'''))
def mock_env(workflow, has_appregistry_label=False, appregistry_label=False,
has_bundle_label=True, bundle_label=True, has_archive=True, scratch=False,
empty_archive=False, change_csv_content=False,
multiple_csv=False) -> PluginsRunner:
mock_source_contents(
Path(workflow.source.path),
has_appregistry_label=has_appregistry_label, appregistry_label=appregistry_label,
has_bundle_label=has_bundle_label, bundle_label=bundle_label
)
env = (MockEnv(workflow)
.for_plugin(ExportOperatorManifestsPlugin.key)
.set_scratch(scratch)
.set_plugin_result(CheckAndSetPlatformsPlugin.key, PLATFORMS))
env.workflow.build_dir.init_build_dirs(PLATFORMS, env.workflow.source)
env.workflow.data.tag_conf.add_unique_image(TEST_IMAGE)
mock_oc_image_extract = functools.partial(extract_manifests_dir, empty=empty_archive,
multiple_csv=multiple_csv, has_archive=has_archive,
change_csv_content=change_csv_content)
(flexmock(retries)
.should_receive("run_cmd")
.replace_with(mock_oc_image_extract))
(flexmock(_RandomNameSequence)
.should_receive("__next__")
.and_return('abcdef12'))
return env.create_runner()
class TestExportOperatorManifests(object):
@pytest.mark.parametrize('has_appregistry_label', [True, False])
@pytest.mark.parametrize('appregistry_label', [True, False])
@pytest.mark.parametrize('has_bundle_label', [True, False])
@pytest.mark.parametrize('bundle_label', [True, False])
def test_skip(self, workflow, caplog, has_appregistry_label, appregistry_label,
has_bundle_label, bundle_label):
runner = mock_env(
workflow, has_appregistry_label=has_appregistry_label,
has_bundle_label=has_bundle_label, bundle_label=bundle_label,
appregistry_label=appregistry_label
)
if any([
not (
(has_appregistry_label and appregistry_label) or
(has_bundle_label and bundle_label)
)
]):
result = runner.run()
assert 'Operator manifests label not set in Dockerfile. Skipping' in caplog.text
assert result[PLUGIN_EXPORT_OPERATOR_MANIFESTS_KEY] is None
else:
runner.run()
@pytest.mark.parametrize('scratch', [True, False])
def test_export_archive(self, workflow, scratch):
runner = mock_env(workflow, scratch=scratch)
result = runner.run()
archive = result[PLUGIN_EXPORT_OPERATOR_MANIFESTS_KEY]
assert archive
assert archive.split('/')[-1] == 'operator_manifests.zip'
assert zipfile.is_zipfile(archive)
expected = ['stub.yml', 'operator.clusterserviceversion.yaml', 'another_dir/yayml.yml']
with zipfile.ZipFile(archive, 'r') as z:
assert len(z.namelist()) == len(expected)
assert sorted(z.namelist()) == sorted(expected)
def test_csv_is_changed_in_built_image(self, workflow):
runner = mock_env(workflow, change_csv_content=True)
with pytest.raises(PluginFailedException, match='have different content'):
runner.run()
def test_multiple_csv_files_inside_built_image(self, workflow):
runner = mock_env(workflow, multiple_csv=True)
with pytest.raises(PluginFailedException, match='but contains more'):
runner.run()
@pytest.mark.parametrize('has_archive', [True, False, None])
def test_no_archive(self, workflow, caplog, has_archive):
runner = mock_env(workflow, has_archive=has_archive)
if has_archive:
runner.run()
else:
with pytest.raises(PluginFailedException) as exc:
runner.run()
if not has_archive:
assert 'Could not extract operator manifest files' in caplog.text
assert 'Could not extract operator manifest files' in str(exc.value)
@pytest.mark.parametrize('empty_archive', [True, False])
@pytest.mark.parametrize('has_bundle_label', [True, False])
def test_empty_manifests_dir(self, workflow, caplog, empty_archive, has_bundle_label):
runner = mock_env(workflow, empty_archive=empty_archive,
has_bundle_label=has_bundle_label, has_appregistry_label=True)
if empty_archive and has_bundle_label:
with pytest.raises(PluginFailedException) as exc:
runner.run()
assert 'Missing ClusterServiceVersion in operator manifests' in str(exc.value)
else:
runner.run()
if has_bundle_label:
assert 'Archiving operator manifests' in caplog.text
else:
assert 'Operator manifests label not set in Dockerfile. Skipping' in caplog.text
| {
"content_hash": "ab527b25aa500506aec61481135e3c70",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 97,
"avg_line_length": 39.205479452054796,
"alnum_prop": 0.6390635918937806,
"repo_name": "projectatomic/atomic-reactor",
"id": "be77040e1fec8e2bc5e164c09457c3662e61a509",
"size": "8586",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/plugins/test_export_operator_manifests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "1988"
},
{
"name": "Python",
"bytes": "1981631"
},
{
"name": "Shell",
"bytes": "8544"
}
],
"symlink_target": ""
} |
'''
Simple text file config; one key=value pair per line.
Global settings will be stored in the user's home directory in a hidden file.
Settings can also be stored in the directory of the calling script,
or explicitly passed to the TPAMClient constructor. Global settings
will be overridden by directory settings, which will be overridden
by arguments passed to the constructor.
Example settings file:
----------------------
tpam_host=acme-tpamPrimary-1
tpam_user=tpam_admin
tpam_key=/absolute/path/to/clikey
This module also provides functions to create config files, via
set_global_config() and set_local_config()
'''
import os
import error
import utils
def parse_config(cfg, cfg_opts):
"""Parse a .tccfg file update cfg_opts dictionary"""
with open(cfg, 'r') as myCfg:
for line in myCfg:
try:
k, v = line.lower().split('=')
cfg_opts.update({k.strip():v.strip()})
except ValueError:
pass
# TODO - log config file syntax error
def get_config(cfg_opts):
"""Locate config files for TPAM SSH connection and set values"""
cfg_file = ".tccfg"
cfg_files = [
os.path.join(os.path.expanduser('~'), cfg_file),
os.path.join(os.path.abspath(os.curdir), cfg_file)
]
for f in cfg_files:
try:
parse_config(f, cfg_opts)
except IOError:
pass
return cfg_opts
def validate(cfg_opts):
"""Validate config options, otherwise raise an exception"""
# Check for presence and resolution of tpam_host
if "tpam_host" in cfg_opts:
if not utils.is_valid_host(cfg_opts["tpam_host"]):
raise error.TPAMConfigError("Unable to resolve tpam_host %s" % cfg_opts["tpam_host"])
else:
raise error.TPAMConfigError("No value for 'tpam_host' has been specified")
# Check for presence of tpam_user
if not "tpam_user" in cfg_opts:
raise error.TPAMConfigError("No value for 'tpam_user' has been specified")
# Check for presence of tpam_key and existence of file
if "tpam_key" in cfg_opts:
if not os.path.isfile(cfg_opts["tpam_key"]):
raise error.TPAMConfigError("tpam_key %s not found or cannot be read" % cfg_opts["tpam_key"])
else:
raise error.TPAMConfigError("No value for 'tpam_key' has been specified")
def set_local_config(**kwargs):
"""Generate a .tccfg file in the current directory"""
_set_config(os.path.abspath(os.curdir), kwargs)
def set_global_config(**kwargs):
"""Generate a .tccfg file in the user's home directory"""
_set_config(os.path.expanduser('~'), kwargs)
def _set_config(fpath, kwargs):
"""Helper function for set_local_config() and set_global_config()"""
try:
with open(os.path.join(fpath, ".tccfg"), "w") as f:
if "tpam_host" in kwargs:
f.write("%s=%s\n" % ("tpam_host", kwargs["tpam_host"]))
if "tpam_user" in kwargs:
f.write("%s=%s\n" % ("tpam_user", kwargs["tpam_user"]))
if "tpam_key" in kwargs:
kwargs["tpam_key"] = expand_user_path(kwargs["tpam_key"])
f.write("%s=%s\n" % ("tpam_key", os.path.normpath(kwargs["tpam_key"])))
except IOError as e:
print("Could not write .tccfg file: %s" % e)
def expand_user_path(fpath):
"""Replace '~' in the path with absolute path if necessary"""
return fpath.replace('~', os.path.expanduser('~'), 1) if fpath.startswith('~') else fpath
| {
"content_hash": "61263dccb5d346b00894cc10261eb706",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 105,
"avg_line_length": 35.07920792079208,
"alnum_prop": 0.6223539373412362,
"repo_name": "k4otix/python-tpam",
"id": "60dad0c1ab2583257dcea5efd84341bc3cd39f55",
"size": "3543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17601"
}
],
"symlink_target": ""
} |
from .browser import main
if __name__ == '__main__':
main()
| {
"content_hash": "270c2f167a66fef42a218c2aac4dba3a",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 26,
"avg_line_length": 9.714285714285714,
"alnum_prop": 0.5147058823529411,
"repo_name": "ya790206/call_seq_browser",
"id": "920bb827131af1f09a3f14d8957a0925c9061d51",
"size": "68",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "call_seq_browser/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "12389"
}
],
"symlink_target": ""
} |
"3-value logic (i.e. the way that boolean ops on nulls propagate up in the expression tree in SQL). doesn't rhyme with 'evil' but should."
class ThreeVL:
"Implementation of sql's 3VL. Warning: use == != for comparing python values, not for 3vl comparison. Caveat emptor."
# todo(awinter): is there any downside to using python True/False/None to make this work?
def __init__(self, value):
if value not in ('t', 'f', 'u'):
raise ValueError(value)
self.value = value
def __repr__(self):
return "<3vl %s>" % self.value
def __eq__(self, other):
if not isinstance(other, (bool, ThreeVL)):
return False
return self.value == other.value if isinstance(other, ThreeVL) else {True: 't', False: 'f'}[other] == self.value
def __neq__(self, other):
return not self == other
def __bool__(self):
# if self.value=='u': raise ValueError("can't cast 3VL 'unknown' to bool") # I think this is okay at top level
return self.value == 't'
@staticmethod
def test(item):
"this is the top-level output to SQL 'where' tests. At this level, 'u' *is* false"
if not isinstance(item, (bool, ThreeVL)):
raise TypeError(type(item)) # todo(awinter): test this on whereclause testing an int
return item if isinstance(item, bool) else item.value == 't'
# note below: the 3vl comparisons return a 3vl OR a bool
@staticmethod
def nein(item):
"this is 'not' but not is a keyword so it's 'nein'"
if not isinstance(item, (bool, ThreeVL)):
raise TypeError(type(item))
return not item if isinstance(item, bool) else ThreeVL(dict(t='f', f='t', u='u')[item.value])
@staticmethod
def andor(operator, left, right):
# todo(awinter): does sql cast values to bools? e.g. nonempty strings, int 0 vs 1
# is this the right one? https://en.wikipedia.org/wiki/Three-valued_logic#Kleene_logic
if operator not in ('and', 'or'):
raise ValueError('unk_operator', operator)
vals = left, right
if not all(isinstance(item, (bool, ThreeVL)) for item in vals):
raise TypeError(list(map(type, vals)))
if ThreeVL('u') in vals:
if operator == 'or' and True in vals:
return True
return False if False in vals else ThreeVL('u')
left, right = list(map(bool, vals))
return (left and right) if operator == 'and' else (left or right)
@staticmethod
def compare(operator, left, right):
"this could be replaced by overloading but I want == to return a bool for 'in' use"
# todo(awinter): what about nested 3vl like "(a=b)=(c=d)". is that allowed by sql? It will choke here if there's a null involved.
if left is None or right is None:
return ThreeVL('u')
elif operator == '=':
return left == right
elif operator == '!=':
return left != right
elif operator == '>':
return left > right
elif operator == '<':
return left < right
else:
raise ValueError('unk operator in compare', operator)
| {
"content_hash": "4dc977572614cc0c84b70f12d276ca04",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 138,
"avg_line_length": 41.166666666666664,
"alnum_prop": 0.6477732793522267,
"repo_name": "abe-winter/pg13-py",
"id": "05f1ddb4e409e39f283261fb81ceea7dbaa778ea",
"size": "2964",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pg13/threevl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "154232"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from multiprocessing import Process
from random import choice
from vizdoom import *
def play(game):
game.init()
actions = [[True, False, False], [False, True, False], [False, False, True]]
episodes = 10
for i in range(episodes):
game.new_episode()
while not game.is_episode_finished():
game.make_action(choice(actions))
game.close()
def player1():
game = DoomGame()
game.load_config('../config/basic.cfg')
game.set_mode(Mode.ASYNC_PLAYER)
# Default Doom's ticrate is 35 per second, so this one will work 2 times faster.
game.set_ticrate(70)
play(game)
def player2():
game = DoomGame()
game.load_config('../config/basic.cfg')
game.set_mode(Mode.ASYNC_PLAYER)
# And this one will work 2 times slower.
game.set_ticrate(17)
play(game)
if __name__ == '__main__':
p1 = Process(target=player1)
p1.start()
player2()
| {
"content_hash": "f2afee9c8614010b0b9e1170dc72fc87",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 84,
"avg_line_length": 19.836734693877553,
"alnum_prop": 0.6306584362139918,
"repo_name": "jeffery-do/Vizdoombot",
"id": "5eea41aa0257d8d9bea816e77e943a0aa0bd1b33",
"size": "995",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/python/ticrate.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "465717"
},
{
"name": "C++",
"bytes": "219269"
},
{
"name": "CSS",
"bytes": "7132"
},
{
"name": "Cuda",
"bytes": "232079"
},
{
"name": "FORTRAN",
"bytes": "9868"
},
{
"name": "HTML",
"bytes": "7089"
},
{
"name": "JavaScript",
"bytes": "23881"
},
{
"name": "Jupyter Notebook",
"bytes": "16254"
},
{
"name": "Makefile",
"bytes": "214"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "37513702"
},
{
"name": "Shell",
"bytes": "3838"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('debate', '0007_auto_20170807_2314'),
]
operations = [
migrations.AddField(
model_name='debatetopic',
name='user',
field=models.ForeignKey(default=0, on_delete=django.db.models.deletion.CASCADE, to='debate.UserProfile'),
preserve_default=False,
),
]
| {
"content_hash": "9fc7bd18b864181e5f82d6e77b85e014",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 117,
"avg_line_length": 25.7,
"alnum_prop": 0.632295719844358,
"repo_name": "steventimberman/masterDebater",
"id": "a9e762265fa320141062aa01b3b9a7303ea94f6a",
"size": "587",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "debate/migrations/0008_debatetopic_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "673"
},
{
"name": "CSS",
"bytes": "83414"
},
{
"name": "HTML",
"bytes": "696030"
},
{
"name": "JavaScript",
"bytes": "176225"
},
{
"name": "Makefile",
"bytes": "148"
},
{
"name": "Python",
"bytes": "11809652"
},
{
"name": "Shell",
"bytes": "3230"
}
],
"symlink_target": ""
} |
Creating PDF books from XML text content.
| {
"content_hash": "a46a73b75811c73cb90350c4f4db31fd",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 41,
"avg_line_length": 42,
"alnum_prop": 0.8095238095238095,
"repo_name": "ActiveState/code",
"id": "b66c5f53559af77717dfe81e6021b6c590513eaf",
"size": "42",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "recipes/Python/578561_XML_PDF_book_ElementTree/recipe-578561.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "35894"
},
{
"name": "C",
"bytes": "56048"
},
{
"name": "C++",
"bytes": "90880"
},
{
"name": "HTML",
"bytes": "11656"
},
{
"name": "Java",
"bytes": "57468"
},
{
"name": "JavaScript",
"bytes": "181218"
},
{
"name": "PHP",
"bytes": "250144"
},
{
"name": "Perl",
"bytes": "37296"
},
{
"name": "Perl 6",
"bytes": "9914"
},
{
"name": "Python",
"bytes": "17387779"
},
{
"name": "Ruby",
"bytes": "40233"
},
{
"name": "Shell",
"bytes": "190732"
},
{
"name": "Tcl",
"bytes": "674650"
}
],
"symlink_target": ""
} |
from cmdb import models
class AssetHandler(object):
def __init__( self, request, asset_data, management_ip=None, project_id=0, created_by='auto'):
self.asset_data = asset_data
self.created_by = created_by
self.request = request
self.project_id = project_id
self.management_ip = management_ip
def log_handler(self,event_type, component=None, detail=""):
user = self.request.user.username if self.request.user else None
new_log_obj = models.EventLog(asset= self.asset_obj,
event_type=event_type,
operater=user,
component=component,
detail=detail)
new_log_obj.save()
def create_asset(self, asset_type="server"):
"""
调用相应资产的创建方法创建资产
invoke asset create function according to it's asset type
:return:
"""
manufactory_obj = self._create_or_update_manufactory()
project_obj = models.BusinessUnit.objects.filter(id=self.project_id).first()
self.asset_obj = models.Asset(asset_type="server",
name=self.asset_data.get("essential_information").get("hostname"),
sn=self.asset_data.get("essential_information").get("SN"),
manufactory=manufactory_obj,
status=0,
business_unit=project_obj,
management_ip=self.management_ip
)
self.asset_obj.save()
func = getattr(self, '_create_%s' % asset_type)
create_obj = func()
return create_obj
def _create_server(self):
self._create_server_info()
self._create_cpu_info()
self._create_ram_info()
self._create_nic_info()
self._create_disk_info()
def _create_server_info(self):
server_obj = models.Server(asset=self.asset_obj,
created_by=self.created_by,
kernel_release=self.asset_data.get("essential_information").get("kernel_release"),
os_type=self.asset_data.get("essential_information").get("os_type"),
os_distribution=self.asset_data.get("essential_information").get("os_distribution"),
model=self.asset_data.get("essential_information").get("model"),
os_release=self.asset_data.get("essential_information").get("os_release"),
)
logmsg = "创建服务器%s" % self.asset_obj.name
self.log_handler(0, detail=logmsg)
server_obj.save()
def _create_cpu_info(self):
cpu_obj = models.CPU(asset=self.asset_obj,
model=self.asset_data.get("cpu_information").get("cpu_model"),
count=self.asset_data.get("cpu_information").get("cpu_count"),
core_count=self.asset_data.get("cpu_information").get("cpu_core_count"),
)
cpu_obj.save()
logmsg = "新增CPU,型号:%s,数量(个): %s" % (cpu_obj.model, cpu_obj.count)
self.log_handler(2, detail=logmsg)
def _create_nic_info(self):
for nic_info in self.asset_data.get("interfaces_information"):
if not (nic_info.get("name") == "lo" or nic_info.get("ip_address") == "127.0.0.1"):
nic_obj = models.NIC(asset=self.asset_obj,
name=nic_info.get("name"),
netmask=nic_info.get("netmask"),
ip_address=nic_info.get("ip_address"),
ip_address_v6=nic_info.get("ip_address_v6"),
mac_address=nic_info.get("macaddress")
)
nic_obj.save()
logmsg = "新增网卡,网卡名:%s IP地址:%s" % (nic_obj.name, nic_obj.ip_address)
self.log_handler(2, detail=logmsg)
def _create_disk_info(self):
for disk_info in self.asset_data.get("disk_information"):
# if not (nic_info.get("name") == "lo" or nic_info.get("ip_address") == "127.0.0.1"):
disk_obj = models.Disk(asset=self.asset_obj,
name=disk_info.get("name"),
capacity=disk_info.get("capacity"),
)
disk_obj.save()
logmsg = "新增硬盘,硬盘名:%s 容量(MB):%s" % (disk_obj.name, disk_obj.capacity)
self.log_handler(2, detail=logmsg)
def _create_ram_info(self):
ram_obj = models.RAM(asset=self.asset_obj,
capacity=self.asset_data.get("memory_information").get("capacity")
)
ram_obj.save()
logmsg = "新增内存,容量:%s MB" % ram_obj.capacity
self.log_handler(2, detail=logmsg)
def _create_or_update_manufactory(self):
manufactory = self.asset_data.get("essential_information").get("manufactory")
manufactory_obj = models.Manufactory.objects.filter(name=manufactory).first()
if manufactory_obj:
return manufactory_obj
else:
new_manufatory = models.Manufactory(name=manufactory)
new_manufatory.save()
return new_manufatory
| {
"content_hash": "90d495b4d41cf8c57df76183751be68c",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 119,
"avg_line_length": 48.96491228070175,
"alnum_prop": 0.5078824793980652,
"repo_name": "ZhangXiaoyu-Chief/sandwich",
"id": "e9a167bbf87643cabaef59c8dfac77361f52e858",
"size": "5708",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/libs/asset_handler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "3890774"
},
{
"name": "CoffeeScript",
"bytes": "162"
},
{
"name": "HTML",
"bytes": "1343081"
},
{
"name": "JavaScript",
"bytes": "16719368"
},
{
"name": "PHP",
"bytes": "11744"
},
{
"name": "Python",
"bytes": "127323"
},
{
"name": "Shell",
"bytes": "444"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import os
import sys
from .xcspec import *
class XCSpecBuildSystem(xcspec):
def __init__(self, spec_data):
super(XCSpecBuildSystem, self).__init__(spec_data); | {
"content_hash": "e714d53af94fd5512abf116fd50d3fcd",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 59,
"avg_line_length": 21.6,
"alnum_prop": 0.6851851851851852,
"repo_name": "samdmarshall/xcparse",
"id": "3943cb8e95a094130a6fe934b200acbdffafe065",
"size": "216",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "xcparse/Xcode/BuildSystem/XCSpec/XCSpecBuildSystem.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "260"
},
{
"name": "C++",
"bytes": "270"
},
{
"name": "Objective-C",
"bytes": "1783"
},
{
"name": "Objective-C++",
"bytes": "309"
},
{
"name": "Python",
"bytes": "228296"
},
{
"name": "Shell",
"bytes": "42"
},
{
"name": "Swift",
"bytes": "945"
}
],
"symlink_target": ""
} |
import unittest
from ParamSklearn.components.classification.gaussian_nb import \
GaussianNB
from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit
import sklearn.metrics
class GaussianNBComponentTest(unittest.TestCase):
def test_default_configuration(self):
for i in range(10):
predictions, targets = \
_test_classifier(GaussianNB)
self.assertAlmostEqual(0.95999999999999996,
sklearn.metrics.accuracy_score(predictions,
targets))
def test_default_configuration_iterative_fit(self):
for i in range(10):
predictions, targets = \
_test_classifier_iterative_fit(GaussianNB)
self.assertAlmostEqual(0.95999999999999996,
sklearn.metrics.accuracy_score(predictions,
targets)) | {
"content_hash": "d60643106ddf60b49010499208b96f43",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 78,
"avg_line_length": 40.24,
"alnum_prop": 0.5705765407554672,
"repo_name": "automl/paramsklearn",
"id": "574c1c49ab452424c836c004e764fdc342961efc",
"size": "1006",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/components/classification/test_gaussian_nb.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6722"
},
{
"name": "Makefile",
"bytes": "6796"
},
{
"name": "Python",
"bytes": "560048"
}
],
"symlink_target": ""
} |
def client_query_w_array_params() -> None:
# [START bigquery_query_params_arrays]
from google.cloud import bigquery
# Construct a BigQuery client object.
client = bigquery.Client()
query = """
SELECT name, sum(number) as count
FROM `bigquery-public-data.usa_names.usa_1910_2013`
WHERE gender = @gender
AND state IN UNNEST(@states)
GROUP BY name
ORDER BY count DESC
LIMIT 10;
"""
job_config = bigquery.QueryJobConfig(
query_parameters=[
bigquery.ScalarQueryParameter("gender", "STRING", "M"),
bigquery.ArrayQueryParameter("states", "STRING", ["WA", "WI", "WV", "WY"]),
]
)
query_job = client.query(query, job_config=job_config) # Make an API request.
for row in query_job:
print("{}: \t{}".format(row.name, row.count))
# [END bigquery_query_params_arrays]
| {
"content_hash": "0f62b00f57801b22bd8c2a7cd199a3b3",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 87,
"avg_line_length": 32.42857142857143,
"alnum_prop": 0.6024229074889867,
"repo_name": "googleapis/python-bigquery",
"id": "66971318216cf8ed70823647bead5c59b16d7008",
"size": "1485",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/client_query_w_array_params.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "2520564"
},
{
"name": "Shell",
"bytes": "31939"
}
],
"symlink_target": ""
} |
"""
Given an unsorted array of integers, find the length of longest increasing subsequence.
Example:
Input: [10,9,2,5,3,7,101,18]
Output: 4
Explanation: The longest increasing subsequence is [2,3,7,101], therefore the length is 4.
Note:
There may be more than one LIS combination, it is only necessary for you to return the length.
Your algorithm should run in O(n2) complexity.
"""
class Solution:
def innerLengthOfLIS(self, nums, prev_pos, cur_pos, middle_result):
if cur_pos == len(nums):
return 0
middle_result_value = middle_result[prev_pos + 1][cur_pos]
if middle_result_value >= 0:
return middle_result_value
including = 0
if prev_pos < 0 or nums[cur_pos] > nums[prev_pos]:
including = self.innerLengthOfLIS(nums, cur_pos, cur_pos + 1, middle_result) + 1
not_including = self.innerLengthOfLIS(nums, prev_pos, cur_pos + 1, middle_result)
middle_result_value = max(including, not_including)
middle_result[prev_pos + 1][cur_pos] = middle_result_value
return middle_result_value
def lengthOfLIS(self, nums):
middle_result = [[-1] * len(nums) for i in range(len(nums) + 1)]
return self.innerLengthOfLIS(nums, -1, 0, middle_result)
if __name__ == "__main__":
tests = [
# ([10,9,2,5,3,7,101,18], 4),
# ([4,10,4,3,8,9], 3),
([10,9,2,5,3,4], 3)
]
s = Solution()
for t in tests:
print(t[0], t[1], s.lengthOfLIS(t[0])) | {
"content_hash": "aec490cd148119f155b481b965b7ae57",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 96,
"avg_line_length": 32.82608695652174,
"alnum_prop": 0.6139072847682119,
"repo_name": "caoxudong/code_practice",
"id": "9732113fc562d5a2cae1687d40b5dd6a3eecdabb",
"size": "1510",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leetcode/300_longest_increasing_subsequence.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5500"
},
{
"name": "Java",
"bytes": "12718"
},
{
"name": "Python",
"bytes": "94051"
},
{
"name": "Shell",
"bytes": "748"
}
],
"symlink_target": ""
} |
"""Generates *_test.html files from *_test.js files.
Usage:
$ cd packages/grpc-web
$ python3 ./scripts/gen_test_htmls.py
"""
import os
import re
from string import Template
import common
# The directories containing JS tests.
DIRECTORIES_WITH_TESTS = ["../../javascript"]
TEST_HTML_TEMPLATE_FILE = './scripts/template_test_html.txt'
def main():
template_data = common.read_file(TEST_HTML_TEMPLATE_FILE)
template = Template(template_data)
for directory in DIRECTORIES_WITH_TESTS:
for js_file_path in common.get_files_with_suffix(
directory, "_test.js"):
_gen_test_html(js_file_path, template)
def _gen_test_html(js_file_path: str, template: Template):
"""Generates a Closure test wrapper HTML and saves it to the filesystem."""
# Generates the test_file_name so that:
# ../../javascript/net/grpc/web/grpcwebclientbase_test.js
# will now be named:
# javascript__net__grpc__web__grpcwebclientbase_test.html
test_file_name = js_file_path
while test_file_name.startswith('../'):
test_file_name = test_file_name[3:]
test_file_name = test_file_name.replace('/', '__')
test_file_name = os.path.splitext(test_file_name)[0] + '.html'
# Generates the test HTML using the package name of the test file
package_name = _extract_closure_package(js_file_path)
generated_html = template.substitute(package=package_name)
# Writes the test HTML files
common.write_file(common.GENERATED_TEST_BASE_PATH + test_file_name,
generated_html)
def _extract_closure_package(js_file_path) -> str:
"""Extracts the package name from goog.provide() or goog.module() in the
JS file."""
js_data = common.read_file(js_file_path)
matches = re.search(r"goog\.(provide|module)\([\n\s]*'(.+)'\);", js_data)
if matches is None:
raise ValueError("goog.provide() or goog.module() not found in file")
return matches.group(2)
if __name__ == "__main__":
main()
| {
"content_hash": "eadbe2cfe4259dc09922fe3af08f6113",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 79,
"avg_line_length": 31.841269841269842,
"alnum_prop": 0.6600199401794616,
"repo_name": "grpc/grpc-web",
"id": "09621342b267fa68287357d3da5e85b941605ccc",
"size": "2603",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/grpc-web/scripts/gen_test_htmls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "70132"
},
{
"name": "Dockerfile",
"bytes": "16715"
},
{
"name": "HTML",
"bytes": "6664"
},
{
"name": "Java",
"bytes": "71525"
},
{
"name": "JavaScript",
"bytes": "182361"
},
{
"name": "Makefile",
"bytes": "3479"
},
{
"name": "Python",
"bytes": "17399"
},
{
"name": "Shell",
"bytes": "15282"
},
{
"name": "Starlark",
"bytes": "2250"
},
{
"name": "TypeScript",
"bytes": "14539"
},
{
"name": "Zig",
"bytes": "18380"
}
],
"symlink_target": ""
} |
import os
from pathlib import Path
from typing import Optional
from jedi.inference.cache import inference_state_method_cache
from jedi.inference.names import AbstractNameDefinition, ModuleName
from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter
from jedi.inference import compiled
from jedi.inference.base_value import TreeValue
from jedi.inference.names import SubModuleName
from jedi.inference.helpers import values_from_qualified_names
from jedi.inference.compiled import create_simple_object
from jedi.inference.base_value import ValueSet
from jedi.inference.context import ModuleContext
class _ModuleAttributeName(AbstractNameDefinition):
"""
For module attributes like __file__, __str__ and so on.
"""
api_type = 'instance'
def __init__(self, parent_module, string_name, string_value=None):
self.parent_context = parent_module
self.string_name = string_name
self._string_value = string_value
def infer(self):
if self._string_value is not None:
s = self._string_value
return ValueSet([
create_simple_object(self.parent_context.inference_state, s)
])
return compiled.get_string_value_set(self.parent_context.inference_state)
class SubModuleDictMixin:
@inference_state_method_cache()
def sub_modules_dict(self):
"""
Lists modules in the directory of this module (if this module is a
package).
"""
names = {}
if self.is_package():
mods = self.inference_state.compiled_subprocess.iter_module_names(
self.py__path__()
)
for name in mods:
# It's obviously a relative import to the current module.
names[name] = SubModuleName(self.as_context(), name)
# In the case of an import like `from x.` we don't need to
# add all the variables, this is only about submodules.
return names
class ModuleMixin(SubModuleDictMixin):
_module_name_class = ModuleName
def get_filters(self, origin_scope=None):
yield MergedFilter(
ParserTreeFilter(
parent_context=self.as_context(),
origin_scope=origin_scope
),
GlobalNameFilter(self.as_context()),
)
yield DictFilter(self.sub_modules_dict())
yield DictFilter(self._module_attributes_dict())
yield from self.iter_star_filters()
def py__class__(self):
c, = values_from_qualified_names(self.inference_state, 'types', 'ModuleType')
return c
def is_module(self):
return True
def is_stub(self):
return False
@property # type: ignore[misc]
@inference_state_method_cache()
def name(self):
return self._module_name_class(self, self.string_names[-1])
@inference_state_method_cache()
def _module_attributes_dict(self):
names = ['__package__', '__doc__', '__name__']
# All the additional module attributes are strings.
dct = dict((n, _ModuleAttributeName(self, n)) for n in names)
path = self.py__file__()
if path is not None:
dct['__file__'] = _ModuleAttributeName(self, '__file__', str(path))
return dct
def iter_star_filters(self):
for star_module in self.star_imports():
f = next(star_module.get_filters(), None)
assert f is not None
yield f
# I'm not sure if the star import cache is really that effective anymore
# with all the other really fast import caches. Recheck. Also we would need
# to push the star imports into InferenceState.module_cache, if we reenable this.
@inference_state_method_cache([])
def star_imports(self):
from jedi.inference.imports import Importer
modules = []
module_context = self.as_context()
for i in self.tree_node.iter_imports():
if i.is_star_import():
new = Importer(
self.inference_state,
import_path=i.get_paths()[-1],
module_context=module_context,
level=i.level
).follow()
for module in new:
if isinstance(module, ModuleValue):
modules += module.star_imports()
modules += new
return modules
def get_qualified_names(self):
"""
A module doesn't have a qualified name, but it's important to note that
it's reachable and not `None`. With this information we can add
qualified names on top for all value children.
"""
return ()
class ModuleValue(ModuleMixin, TreeValue):
api_type = 'module'
def __init__(self, inference_state, module_node, code_lines, file_io=None,
string_names=None, is_package=False):
super().__init__(
inference_state,
parent_context=None,
tree_node=module_node
)
self.file_io = file_io
if file_io is None:
self._path: Optional[Path] = None
else:
self._path = file_io.path
self.string_names = string_names # Optional[Tuple[str, ...]]
self.code_lines = code_lines
self._is_package = is_package
def is_stub(self):
if self._path is not None and self._path.suffix == '.pyi':
# Currently this is the way how we identify stubs when e.g. goto is
# used in them. This could be changed if stubs would be identified
# sooner and used as StubModuleValue.
return True
return super().is_stub()
def py__name__(self):
if self.string_names is None:
return None
return '.'.join(self.string_names)
def py__file__(self) -> Optional[Path]:
"""
In contrast to Python's __file__ can be None.
"""
if self._path is None:
return None
return self._path.absolute()
def is_package(self):
return self._is_package
def py__package__(self):
if self.string_names is None:
return []
if self._is_package:
return self.string_names
return self.string_names[:-1]
def py__path__(self):
"""
In case of a package, this returns Python's __path__ attribute, which
is a list of paths (strings).
Returns None if the module is not a package.
"""
if not self._is_package:
return None
# A namespace package is typically auto generated and ~10 lines long.
first_few_lines = ''.join(self.code_lines[:50])
# these are strings that need to be used for namespace packages,
# the first one is ``pkgutil``, the second ``pkg_resources``.
options = ('declare_namespace(__name__)', 'extend_path(__path__')
if options[0] in first_few_lines or options[1] in first_few_lines:
# It is a namespace, now try to find the rest of the
# modules on sys_path or whatever the search_path is.
paths = set()
for s in self.inference_state.get_sys_path():
other = os.path.join(s, self.name.string_name)
if os.path.isdir(other):
paths.add(other)
if paths:
return list(paths)
# Nested namespace packages will not be supported. Nobody ever
# asked for it and in Python 3 they are there without using all the
# crap above.
# Default to the of this file.
file = self.py__file__()
assert file is not None # Shouldn't be a package in the first place.
return [os.path.dirname(file)]
def _as_context(self):
return ModuleContext(self)
def __repr__(self):
return "<%s: %s@%s-%s is_stub=%s>" % (
self.__class__.__name__, self.py__name__(),
self.tree_node.start_pos[0], self.tree_node.end_pos[0],
self.is_stub()
)
| {
"content_hash": "fe304a47032485e8094fd39bee85ee7a",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 95,
"avg_line_length": 35.29565217391304,
"alnum_prop": 0.590169992609017,
"repo_name": "glenngillen/dotfiles",
"id": "6461cb4bcd06eea355b02c08b962c014f16b3d9d",
"size": "8118",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": ".vscode/extensions/ms-python.python-2022.2.1924087327/pythonFiles/lib/jedilsp/jedi/inference/value/module.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "3634"
},
{
"name": "Shell",
"bytes": "4225"
},
{
"name": "Vim script",
"bytes": "16306"
}
],
"symlink_target": ""
} |
"""
* *******************************************************
* Copyright (c) VMware, Inc. 2016-2018. All Rights Reserved.
* SPDX-License-Identifier: MIT
* *******************************************************
*
* DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
* EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
* WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
* NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""
__author__ = 'VMware, Inc.'
import re
from samples.vsphere.common.vim.inventory import get_datastore_mo
from samples.vsphere.common.vim import datastore_file
datastore_path_regex = re.compile(r'\[(.+)\]\s?(.*)')
def parse_datastore_path(datastore_path):
"""Extract datastore name from datastore path"""
m = datastore_path_regex.match(datastore_path)
if m:
(datastore_name, path) = m.groups()
return datastore_name, path
return None, None
def detect_directory(context, description, datacenter_name, datastore_path):
"""Find directory based on specific datacenter and datastore path"""
(datastore_name, path) = parse_datastore_path(datastore_path)
datastore_mo = get_datastore_mo(context.client,
context.service_instance._stub,
datacenter_name,
datastore_name)
if not datastore_mo:
raise Exception("Could not find datastore '{}'".format(datastore_name))
dsfile = datastore_file.File(datastore_mo)
f = dsfile.list(datastore_path)
if len(f) == 0:
print("Failed to detect {} directory '{}'".format(description,
datastore_path))
return False
if f.type != datastore_file.FOLDER:
print("Path '{}' is not a directory".format(datastore_path))
return False
return True
def create_directory(context, description, datacenter_name, datastore_path):
"""Create directory in specific datacenter"""
(datastore_name, path) = parse_datastore_path(datastore_path)
datastore_mo = get_datastore_mo(context.client,
context.service_instance._stub,
datacenter_name,
datastore_name)
if not datastore_mo:
raise Exception("Could not find datastore '{}'".format(datastore_name))
dsfile = datastore_file.File(datastore_mo)
if not dsfile.exists(datastore_path):
print("Creating {} directory '{}'".format(description, datastore_path))
dsfile.mkdir(path, parent=True)
else:
# TODO Need to check that this is actually a directory.
print("{} directory '{}' exists.".format(description, datastore_path))
def delete_directory(context, description, datacenter_name, datastore_path):
"""Delete directory from specific datacenter"""
(datastore_name, path) = parse_datastore_path(datastore_path)
datastore_mo = get_datastore_mo(context.client,
context.service_instance._stub,
datacenter_name,
datastore_name)
if not datastore_mo:
return
dsfile = datastore_file.File(datastore_mo)
if dsfile.exists(datastore_path):
print("Deleting {} directory '{}'.".format(description, datastore_path))
dsfile.delete2(path)
def detect_file(context, description, datacenter_name, datastore_path):
"""Find specific file in specific datacenter"""
(datastore_name, path) = parse_datastore_path(datastore_path)
datastore_mo = get_datastore_mo(context.client,
context.service_instance._stub,
datacenter_name,
datastore_name)
if not datastore_mo:
raise Exception("Could not find datastore '{}'".format(datastore_name))
dsfile = datastore_file.File(datastore_mo)
f = dsfile.list(datastore_path)
if len(f) == 0:
print("Failed to detect {} file '{}'".
format(description, datastore_path))
return False
if f.type != datastore_file.FILE:
print("Path '{}' is not a file".format(datastore_path))
return False
return True
def delete_file(client, service_instance,
description, datacenter_name, datastore_path):
"""Delete a file from specific datacenter"""
(datastore_name, path) = parse_datastore_path(datastore_path)
datastore_mo = get_datastore_mo(client,
service_instance._stub,
datacenter_name,
datastore_name)
if not datastore_mo:
return
dsfile = datastore_file.File(datastore_mo)
if dsfile.exists(datastore_path):
print("Deleting {} file '{}'.".format(description, datastore_path))
dsfile.delete(path)
| {
"content_hash": "bf57efc5243c341846aa235c01307c33",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 80,
"avg_line_length": 39.93700787401575,
"alnum_prop": 0.5972003154574133,
"repo_name": "tianhao64/vsphere-automation-sdk-python",
"id": "5db48a7f11258a61f9861694b383f27d8274adc9",
"size": "5072",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/vsphere/common/vim/file.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1596"
},
{
"name": "Python",
"bytes": "11861"
}
],
"symlink_target": ""
} |
import numpy as np
import numpy.testing as nt
from nose.tools import assert_true, assert_false
import nibabel as nib
from nibabel.tmpdirs import TemporaryDirectory
from dipy.data import get_data
from dipy.workflows.mask import MaskFlow
def test_mask():
with TemporaryDirectory() as out_dir:
data_path, _, _ = get_data('small_25')
vol_img = nib.load(data_path)
volume = vol_img.get_data()
mask_flow = MaskFlow()
mask_flow.run(data_path, 10, out_dir=out_dir, ub=9)
assert_false(mask_flow.last_generated_outputs)
mask_flow.run(data_path, 10, out_dir=out_dir)
mask_path = mask_flow.last_generated_outputs['out_mask']
mask_img = nib.load(mask_path)
mask_data = mask_img.get_data()
assert_true(mask_data.shape == volume.shape)
nt.assert_array_almost_equal(mask_img.get_affine(),
vol_img.get_affine())
assert_true(mask_data.dtype == np.uint8)
if __name__ == '__main__':
test_mask()
| {
"content_hash": "d24ac86066faed62a026998de10a1192",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 64,
"avg_line_length": 31.272727272727273,
"alnum_prop": 0.627906976744186,
"repo_name": "villalonreina/dipy",
"id": "18ddf9fb0cec1896c95ff1f4df8d155a0b23c168",
"size": "1032",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dipy/workflows/tests/test_masking.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2932"
},
{
"name": "Makefile",
"bytes": "3639"
},
{
"name": "Python",
"bytes": "3100258"
}
],
"symlink_target": ""
} |
import os
import tempfile
import logging
import subprocess
logging.basicConfig(level=logging.DEBUG)
def call_binary(arguments, home=None):
env = dict(os.environ)
if home:
env.update({'HOME': home})
logging.info("Running: %s", ' '.join(arguments))
logging.info("Env: %s", env)
p = subprocess.Popen(
arguments,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env
)
out, err = p.communicate()
p.wait()
if err:
logging.error(err)
logging.info(out)
return p.returncode == 0
def convert_pdfupload_to_html(pdfupload):
original_pdf = tempfile.NamedTemporaryFile(suffix='.pdf', delete=False)
original_pdf.write(pdfupload)
original_pdf.close()
print(original_pdf.name)
converted_html_path, _ = original_pdf.name.rsplit('.', 1)
converted_html_path += '.html'
if convert_to_html(original_pdf.name, converted_html_path):
return converted_html_path
def convert_to_html(pdf_file, output_file):
output_path = os.path.dirname(output_file)
arguments = ['pdf2htmlEX',
'--embed-javascript', '0',
# '--embed-font', '0',
# '--decompose-ligature', '1',
'--optimize-text', '1',
# '--proof', '1',
'--dest-dir', output_path,
pdf_file]
home_path = os.path.dirname(output_file)
if call_binary(arguments, home=home_path):
if os.path.exists(output_file):
contents = open(output_file).read()
script = False
with open(output_file, 'w') as outfile:
for line in contents.splitlines():
if '<script>'in line and '</script>' in line:
continue
if '<script>' in line:
script = True
continue
if '</script>' in line:
script = False
continue
if script:
continue
outfile.write(line + '\n')
return output_file
def apply_redactions(html_file, redactions, output_type='pdf',
pdf_engine='wkhtmltopdf'):
phantom_suffix = 'html' if pdf_engine == 'wkhtmltopdf' else 'pdf'
phantom_f = tempfile.NamedTemporaryFile(suffix='.%s' % phantom_suffix, delete=False)
phantom_f.close()
phantom_out = phantom_f.name
home_path = os.path.dirname(phantom_out)
bin_path = os.path.join(os.path.dirname(__file__), 'bin', 'phantom_redact.js')
print(bin_path)
arguments = ['phantomjs', bin_path, html_file, phantom_out, redactions]
if pdf_engine != 'phantomjs':
arguments.append('html')
print(' '.join(arguments))
phantom_return = call_binary(arguments, home=home_path)
if not phantom_return:
return
if not os.path.exists(phantom_out):
return
if pdf_engine == 'phantomjs':
return phantom_out
output_f = tempfile.NamedTemporaryFile(suffix='.%s' % output_type, delete=False)
output_f.close()
output_file = output_f.name
arguments = ['wkhtmltopdf', '--print-media-type', phantom_out, output_file]
print(' '.join(arguments))
wkhtmltopdf_return = call_binary(arguments, home=home_path)
if not wkhtmltopdf_return:
return
if not os.path.exists(output_file):
return
return output_file
| {
"content_hash": "38f65cc1641bf6ebfa37845ca43da452",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 88,
"avg_line_length": 29.912280701754387,
"alnum_prop": 0.5818181818181818,
"repo_name": "stefanw/froide-redact",
"id": "f28983a178d003c3aac5d658299fc929dd38871f",
"size": "3410",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "froide_redact/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "3623"
},
{
"name": "JavaScript",
"bytes": "343377"
},
{
"name": "Python",
"bytes": "7317"
}
],
"symlink_target": ""
} |
__author__ = 'popka'
import urlparse
import Utils.utils as utils
from selenium.common.exceptions import WebDriverException, TimeoutException, NoAlertPresentException, StaleElementReferenceException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import StaleElementReferenceException, ElementNotVisibleException, WebDriverException
from selenium.webdriver.support import expected_conditions as EC
import re
import urllib
class Page(object):
PATH = ''
FORM = "form"
LINKS = 'a'
INPUT = 'input'
TEXT_AREA = 'textarea'
BUTTON = 'button'
SUBMIT_INPUT = 'input[type="submit"]'
KEY = 'abcd'
HASH = "#SMILE"
def __init__(self, driver, url):
self.driver = driver
self.url = url
def open(self):
url = urlparse.urljoin(self.url, self.PATH)
self.driver.get(url)
utils.wait_for_document_ready(self.driver)
def open_without_wait(self):
url = urlparse.urljoin(self.url, self.PATH)
self.driver.get(url)
#utils.wait_for_document_ready(self.driver)
def fill_all_input(self, text):
'''
Заполняет все input на странице текстом
'''
input = self.driver.find_elements_by_tag_name(self.INPUT)
for i in input:
try:
i.send_keys(text)
except StaleElementReferenceException:
pass
input = self.driver.find_elements_by_tag_name(self.TEXT_AREA)
for i in input:
try:
i.send_keys(text)
except StaleElementReferenceException:
pass
def get_all_forms(self):
'''
возвращает массив всех форм
:return:
'''
forms = self.driver.find_elements_by_tag_name(self.FORM)
return forms
def get_all_button(self):
'''
возвращает массив всех форм
:return:
'''
button = []
button += self.driver.find_elements_by_css_selector(self.BUTTON)
button += self.driver.find_elements_by_css_selector(self.SUBMIT_INPUT)
return button
def get_inner_links(self):
'''
Возвращает массив ссылок, находящихся, находящихся внутри домена
:return:
'''
all_links = self.driver.find_elements_by_tag_name(self.LINKS)
domain = self.url[self.url.index("://")+3:] #Доменное имя
domain = domain[:domain.index("/")]
regular = re.compile('\.+\w+$', re.IGNORECASE) #Поиск конструкций вида .doc
inside_links = []
for link in all_links:
href = link.get_attribute('href')
if href is not None:
if (domain in href): # Находимся ли мы в пределах сайта?
# По настройке можно выключить поддержку поддоменов '.'+domain not in href. но нужна еще проверка на www
if '#' in href: #Если есть '#' - отрезаем ее
href = href[0:href.index('#')]
href_without_domain = href[href.index(domain)+len(domain):] # выделяем часть урла без доменного имени
extensions = regular.findall(href_without_domain) # производим по нему поиск конструкций типа .doc
if len(extensions) == 0: #Если нет конструции вида .doc,
inside_links.append(href)
else:
extensions = extensions[0]
if extensions=='.html' or extensions=='.htm' or extensions == '.xml': #Продолжать список согласно опыту
inside_links.append(href)
# print(href)
return inside_links
def try_page(self):
"""
Для каждой кнопки заполняем все input и нажимаем на нее.
Если в результате нажатия срабатывает ajax(пока нету),
или нас редиректит на ссылку с параметрами (содержит '?'),
то записываем ее в список интересных ссылок
возвращает список интересных ссылок
"""
self.open()
urls_with_parameters = []
forms = self.get_all_forms()
forms_count = len(forms) # Определяем количество подходящих нам кнопок
for i in xrange(forms_count):
self.open()
forms = self.get_all_forms()
try:
self.fill_all_input(self.KEY)
forms[i].submit()
utils.wait_for_ajax_complete(self.driver)
utils.wait_for_head_load(self.driver) # Достаточно прогрузки шапки, поменять
if '?' in self.driver.current_url:
decoded_url = urllib.unquote(self.driver.current_url).decode('utf8')
urls_with_parameters.append(decoded_url)
except (StaleElementReferenceException, ElementNotVisibleException, WebDriverException):
pass
return urls_with_parameters
def check_xss(self):
try:
hash = self.driver.execute_script("return location.hash")
return self.HASH == hash
except TimeoutException:
return False
def get_alert_text_and_close(self):
alert = self.driver.switch_to_alert()
text = alert.text
alert.accept()
return text
def find_element_by_tag_and_attributes(self, tag, attr):
elements = self.driver.find_elements_by_tag_name(tag)
for el in elements:
count_of_hit_attribute = 0
for attribute in attr:
attrib = el.get_attribute(attribute)
attr_attr = attr[attribute]
if (el.get_attribute(attribute) is not None):
if (attr[attribute] in el.get_attribute(attribute)):
count_of_hit_attribute += 1
if (count_of_hit_attribute == len(attr)):
return True
return False | {
"content_hash": "15c8ecc67c0bb013f63f051f635c33f4",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 132,
"avg_line_length": 32.33879781420765,
"alnum_prop": 0.578573842514363,
"repo_name": "MyBeautyTeam/xss-checker",
"id": "d96d40e2e2588dd186f89bfff81701f94cd17605",
"size": "6591",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PageClass/Page.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20968"
},
{
"name": "Shell",
"bytes": "366"
}
],
"symlink_target": ""
} |
import packet_names
from packet import Packet,PacketType, PacketDest
class Stats:
def __init__(self, verbose=False):
self.verbose = verbose
self.records = 0
self.control = 0
self.game = 0
self.invalid = 0
self.unknown = 0
self.game_types = [0]*len(packet_names.game_packet_names)
self.control_types = [0]*len(packet_names.control_packet_names)
# network stats
self.to_client = 0
self.to_server = 0
self.size_accum = 0
self.game_dst = [[0,0] for i in range(len(packet_names.game_packet_names))]
self.control_dst = [[0,0] for i in range(len(packet_names.control_packet_names))]
def add(self, dst, data):
ptype, pid, unknown, _ = Packet.get_type(data)
if not ptype:
self.invalid += 1
return
self.records += 1
self.size_accum += len(data)
if ptype == PacketType.Control:
self.control += 1
self.control_types[pid] += 1
self.control_dst[pid][dst.value] += 1
elif ptype == PacketType.Game:
self.game += 1
self.game_types[pid] += 1
self.game_dst[pid][dst.value] += 1
else:
raise RuntimeError("Unsupported packet type: " + str(ptype))
if dst == PacketDest.Server:
self.to_server += 1
elif dst == PacketDest.Client:
self.to_client += 1
else:
raise RuntimeError("Unsupported packet destination: " + str(dst))
if unknown:
self.unknown += 1
# combine two Stats objects
def __add__(self, other):
self.records += other.records
self.control += other.control
self.game += other.game
self.invalid += other.invalid
self.unknown += other.unknown
for i,v in enumerate(other.game_types):
self.game_types[i] += v
for i,v in enumerate(other.control_types):
self.control_types[i] += v
# network stats
self.to_client += other.to_client
self.to_server += other.to_server
self.size_accum += other.size_accum
for i,v in enumerate(other.game_dst):
j = self.game_dst[i]
self.game_dst[i] = [j[0] + v[0], j[1] + v[1]]
for i,v in enumerate(other.control_dst):
j = self.control_dst[i]
self.control_dst[i] = [j[0] + v[0], j[1] + v[1]]
return self
def stats(self):
return {
"records" : self.records,
"control" : self.control,
"game" : self.game,
"invalid" : self.invalid,
"unknown" : self.unknown,
"game_types" : self.game_types,
"control_types" : self.control_types
}
def pp(self):
def fmtlist(a, showAmt=False):
newArr = []
for i,v in enumerate(a):
amt = ""
if showAmt:
amt = "%d " % v[1]
newArr += ["%d. %s%s (0x%02x)" % (i+1, amt, v[2], v[0])]
return "\n".join(newArr)
def fmtlistDest(a):
newArr = []
for i, (pid,v,n,d) in enumerate(a):
dst = ""
if d == PacketDest.Server.value:
dst = "Server"
elif d == PacketDest.Client.value:
dst = "Client"
else:
dst = "Either"
newArr += ["%d. %s (0x%02x) -> %s" % (i+1, n, pid, dst)]
return "\n".join(newArr)
unseenControlList = []
unseenGameList = []
controlFreq = []
gameFreq = []
singleDstControl = []
singleDstGame = []
for i,v in enumerate(self.control_types):
controlFreq += [[i, v, Packet.get_name_by_id(PacketType.Control, i)]]
for i,v in enumerate(self.game_types):
gameFreq += [[i, v, Packet.get_name_by_id(PacketType.Game, i)]]
controlFreqS = sorted(controlFreq, key=lambda x: x[1], reverse=True)
gameFreqS = sorted(gameFreq, key=lambda x: x[1], reverse=True)
for (i, v, n) in gameFreqS:
if v == 0 and not Packet.is_unknown(PacketType.Game, i):
unseenGameList += [[i, v, n]]
for (i, v, n) in controlFreqS:
if v == 0 and not Packet.is_unknown(PacketType.Control, i):
unseenControlList += [[i, v, n]]
for i,dst in enumerate(self.game_dst):
(i, v, n) = gameFreq[i]
if v > 0: # did we have packets of this type?
if dst[PacketDest.Server.value] == 0:
singleDstGame += [[i, v, n, PacketDest.Server.value]]
elif dst[PacketDest.Client.value] == 0:
singleDstGame += [[i, v, n, PacketDest.Client.value]]
else:
singleDstGame += [[i, v, n, 2]]
for i,dst in enumerate(self.control_dst):
(i, v, n) = controlFreq[i]
if v > 0: # did we have packets of this type?
if dst[PacketDest.Server.value] == 0:
singleDstControl += [[i, v, n, PacketDest.Server.value]]
elif dst[PacketDest.Client.value] == 0:
singleDstControl += [[i, v, n, PacketDest.Client.value]]
else:
singleDstControl += [[i, v, n, 2]]
singleDstControl = sorted(singleDstControl, key=lambda x: x[3])
singleDstGame = sorted(singleDstGame, key=lambda x: x[3])
#####################
statistics = \
"""\
Records: %d
- Control: %d
- Game: %d
Invalid: %d
Unknown: %d
""" % (self.records, self.control, self.game, self.invalid, self.unknown)
#####################
frequency = \
"""\
Frequency (highest to lowest)
== Game ==
%s
== Control ==
%s
""" % (fmtlist(gameFreqS, True), fmtlist(controlFreqS, True))
#####################
unseen = \
"""\
Unseen packets
== Game (%d) ==
%s
== Control (%d) ==
%s
""" % (len(unseenGameList), fmtlist(unseenGameList),
len(unseenControlList), fmtlist(unseenControlList))
#####################
singleDest = \
"""\
Packet Destination
== Game (%d) ==
%s
== Control (%d) ==
%s
""" % (len(singleDstGame), fmtlistDest(singleDstGame),
len(singleDstControl), fmtlistDest(singleDstControl))
#####################
print(statistics)
print(frequency)
print(unseen)
print(singleDest)
| {
"content_hash": "dcf2a25f7ebab09b1eea81e267d1752f",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 89,
"avg_line_length": 28.23175965665236,
"alnum_prop": 0.5033444816053512,
"repo_name": "psforever/gcapy",
"id": "cf428fd99c694966c8ae0eab0cd5e3137bdebdd2",
"size": "6578",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gcapy/stats.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "49419"
}
],
"symlink_target": ""
} |
"""
Given a config file, run a given workflow
"""
from percept.management.commands import BaseCommand
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
from optparse import make_option
import IPython
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = 'config_file'
option_list = BaseCommand.option_list + (make_option('-s', '--shell',
action = "store_true",
help='Whether or not to load a shell afterwards".'),)
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
if 'shell' in options and options['shell']:
ns = {
'flow' : wrapper.workflow,
'tasks' : wrapper.workflow.tasks
}
IPython.embed(user_ns=ns, banner2 = "This shell session has been started in the namespace for your project. You can access the following variables:"
"flow - The workflow that just ran."
"tasks - the completed tasks that the workflow ran.")
| {
"content_hash": "8b1f5c34258715cba2ee9765c4c9f9cd",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 161,
"avg_line_length": 37.432432432432435,
"alnum_prop": 0.5797833935018051,
"repo_name": "VikParuchuri/percept",
"id": "5d28a40f5f4a33bef7ab6b89810cfd3900947ce6",
"size": "1385",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "percept/workflows/commands/run_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "81776"
},
{
"name": "Shell",
"bytes": "5098"
}
],
"symlink_target": ""
} |
import math
import numpy as np
from dp_accounting import accountant
from dp_accounting import common
class LaplaceMechanism:
"""Transforms a function using the Laplace mechanism.
If f(x) = (Z[1], Z[2], ..., Z[k]), then returns a function that computes
(Z'[1], Z'[2], ..., Z'[k]), where
Z'[i] = Z[i] + Y[i],
Y[i] ~ Lap(x | delta_f / epsilon),
and Lap(x | b) is given by the probability density function
Lap(x | b) = (1 / 2b) exp(-|x| / b).
See section 3.3 of Dwork and Roth.
"""
def __init__(self, f, delta_f, epsilon, random_state=None):
"""Instantiates a LaplaceMechanism.
Args:
f: A function which takes as input a database and which returns as output
a numpy array.
delta_f: The sensitivity paramater, e.g., the maximum value by which the
function can change for two databases that differ by only one row.
epsilon: Differential privacy parameter.
random_state: Optional instance of numpy.random.RandomState that is
used to seed the random number generator.
"""
self._func = f
self._delta_f = delta_f
self._epsilon = epsilon
self._random_state = random_state or np.random.RandomState()
def __call__(self, x):
z = self._func(x)
return z + self._random_state.laplace(
size=z.shape, scale=self._delta_f / self._epsilon)
class GeometricMechanism:
"""Transforms a function using the geometric mechanism.
If f(x) = (Z[1], Z[2], ..., Z[k]), then returns a function that computes
(Z'[1], Z'[2], ..., Z'[k]), where
Z'[i] = Z[i] + Y[i],
Y[i] ~ DL(k | exp(-epsilon / delta_f)),
and DL(k | alpha) is a probability mass function defined on the
integers that is given by
DL(k | alpha) = (1 - alpha) / (1 + alpha) * alpha ^ |k|
DL(k | alpha) is sometimes referred to as the discrete Laplace
distribution. See:
Inusah, Seidu, and Tomasz J. Kozubowski. "A discrete analogue of the
Laplace distribution." Journal of statistical planning and inference
136.3 (2006): 1090-1102.
The geometric mechanism was defined in:
Ghosh, Arpita, Tim Roughgarden, and Mukund Sundararajan.
"Universally utility-maximizing privacy mechanisms."
SIAM Journal on Computing 41.6 (2012): 1673-1693.
The geometric mechanism should not be confused with the geometric
distribution. The geometric distribution has PMF
Pr(X=k | p) = p * (1-p)^k-1.
There is a connection between the geometric distribution and the discrete
Laplace distribution, though. If X and Y are independent random variables
having geometric distribution p, then X-Y is a discrete Laplace random
variable with parameter 1-p.
"""
def __init__(self, f, delta_f, epsilon, random_state=None):
"""Instantiates a geometric mechanism.
Args:
f: A function which takes as input a database and which returns as output
a numpy array.
delta_f: The sensitivity paramater, e.g., the maximum value by which the
function can change for two databases that differ by only one row.
epsilon: Differential privacy parameter.
random_state: Optional instance of numpy.random.RandomState that is
used to seed the random number generator.
"""
self._func = f
self._delta_f = delta_f
self._epsilon = epsilon
self._random_state = random_state or np.random.RandomState()
def __call__(self, x):
z = self._func(x)
p_geometric = 1 - math.exp(-self._epsilon / self._delta_f)
x = self._random_state.geometric(size=z.shape, p=p_geometric)
y = self._random_state.geometric(size=z.shape, p=p_geometric)
return z + x - y
class GaussianMechanism:
"""Transforms a function using the gaussian mechanism.
If f(x) = (Z[1], Z[2], ..., Z[k]), then returns a function that computes
(Z'[1], Z'[2], ..., Z'[k]), where
Z'[i] = Z[i] + Y[i],
Y[i] ~ N(x | sigma),
and N(x | sigma) is given by the probability density function
N(x | sigma) = exp(-0.5 x^2 / sigma^2) / (sigma * sqrt(2 * pi))
See Appendix A of Dwork and Roth.
"""
def __init__(
self, f, delta_f, epsilon, delta, num_queries=1, random_state=None):
"""Instantiates a gaussian mechanism.
Args:
f: A function which takes as input a database and which returns as output
a numpy array.
delta_f: The sensitivity paramater, e.g., the maximum value by which the
function can change for two databases that differ by only one row.
epsilon: Differential privacy parameter.
delta: Differential privacy parameter.
num_queries: The number of queries for which the mechanism is used. Note
that the constructed mechanism will be (epsilon, delta)-differentially
private when answering (no more than) num_queries queries.
random_state: Optional instance of numpy.random.RandomState that is
used to seed the random number generator.
"""
self._func = f
self._delta_f = delta_f
self._sigma = accountant.get_smallest_gaussian_noise(
common.DifferentialPrivacyParameters(epsilon, delta),
num_queries, sensitivity=delta_f)
self._random_state = random_state or np.random.RandomState()
def __call__(self, x):
z = self._func(x)
return z + self._random_state.normal(size=z.shape, scale=self._sigma)
class DiscreteGaussianMechanism:
"""Transforms a function using the discrete gaussian mechanism.
If f(x) = (Z[1], Z[2], ..., Z[k]), then returns a function that computes
(Z'[1], Z'[2], ..., Z'[k]), where
Z'[i] = Z[i] + Y[i],
Y[i] ~ N_Z(x | sigma),
and N_Z(x | sigma) is given by the probability mass function defined on the
integers such that N_Z(x | sigma) is proportional to
exp(-0.5 x^2 / sigma^2) / (sigma * sqrt(2 * pi)) for all integers x.
See:
Clément L. Canonne, Gautam Kamath, Thomas Steinke. "The Discrete Gaussian for
Differential Privacy" Advances in Neural Information Processing Systems 33
(NeurIPS 2020).
"""
def __init__(
self, f, delta_f, epsilon, delta, num_queries=1, random_state=None):
"""Instantiates a discrete gaussian mechanism.
Args:
f: A function which takes as input a database and which returns as output
a numpy array.
delta_f: The sensitivity paramater, e.g., the maximum value by which the
function can change for two databases that differ by only one row.
epsilon: Differential privacy parameter.
delta: Differential privacy parameter.
num_queries: The number of queries for which the mechanism is used. Note
that the constructed mechanism will be (epsilon, delta)-differentially
private when answering (no more than) num_queries queries.
random_state: Optional instance of numpy.random.RandomState that is
used to seed the random number generator.
"""
self._func = f
self._delta_f = delta_f
# This is only an estimate parameter using the continuous Gaussian as a
# a proxy. It is known that the two parameters are almost the same for
# a large regime of parameter; see Figure 1 in Canonne et al.'s paper.
# TODO: add a more rigorous parameter calculation based on privacy loss
# distributions.
self._sigma = accountant.get_smallest_gaussian_noise(
common.DifferentialPrivacyParameters(epsilon, delta),
num_queries, sensitivity=delta_f)
self._random_state = random_state or np.random.RandomState()
def __call__(self, x):
def sample_discrete_gaussian(*unused):
# Use rejection sampling of discrete Laplace distribution (Algorithm 3 in
# Canonne et al.) to sample a discrete Gaussian random variable.
t = math.floor(self._sigma) + 1
while True:
# Generate discrete laplace with parameter t
p_geometric = 1 - math.exp(-1/t)
y1 = self._random_state.geometric(p=p_geometric)
y2 = self._random_state.geometric(p=p_geometric)
y = y1 - y2
sigma_sq = self._sigma**2
p_bernoulli = math.exp(-(abs(y) - sigma_sq/t)**2 * 0.5 / sigma_sq)
if self._random_state.binomial(1, p_bernoulli) == 1:
return y
z = self._func(x)
return z + np.fromfunction(
np.vectorize(sample_discrete_gaussian, otypes=[float]), z.shape)
| {
"content_hash": "6bb1cff2a93840634e920f6763413d07",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 79,
"avg_line_length": 38.839622641509436,
"alnum_prop": 0.6633470974010202,
"repo_name": "world-federation-of-advertisers/cardinality_estimation_evaluation_framework",
"id": "dfe3e7ed5bfae798a6955bb4b27cc1605e09f059",
"size": "8853",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/common/noisers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "30965"
},
{
"name": "Jupyter Notebook",
"bytes": "124222"
},
{
"name": "Python",
"bytes": "650173"
}
],
"symlink_target": ""
} |
from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:5078")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:5078")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a eclipseCoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a eclipseCoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| {
"content_hash": "2a74b8a49b62f5bc8c8ef83af2eba062",
"timestamp": "",
"source": "github",
"line_count": 324,
"max_line_length": 79,
"avg_line_length": 24.209876543209877,
"alnum_prop": 0.6620346761856196,
"repo_name": "ECLIeclipse/ECLI-eClipse",
"id": "9f3b4b26c15e41243ec48843fd0a5d6f38afd643",
"size": "7844",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/bitrpc/bitrpc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "32873"
},
{
"name": "C++",
"bytes": "2606098"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "18284"
},
{
"name": "HTML",
"bytes": "50615"
},
{
"name": "Makefile",
"bytes": "102375"
},
{
"name": "NSIS",
"bytes": "6022"
},
{
"name": "Objective-C",
"bytes": "1052"
},
{
"name": "Objective-C++",
"bytes": "5864"
},
{
"name": "Python",
"bytes": "69729"
},
{
"name": "QMake",
"bytes": "14726"
},
{
"name": "Shell",
"bytes": "13173"
}
],
"symlink_target": ""
} |
"""
EasyBuild support for building and installing GCC, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
@author: Toon Willems (Ghent University)
@author: Ward Poelmans (Ghent University)
"""
import os
import re
import shutil
from copy import copy
from distutils.version import LooseVersion
from vsc.utils.missing import any
import easybuild.tools.environment as env
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.modules import get_software_root
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import check_os_dependency, get_os_name, get_os_type, get_shared_lib_ext, get_platform_name
class EB_GCC(ConfigureMake):
"""
Self-contained build of GCC.
Uses system compiler for initial build, then bootstraps.
"""
@staticmethod
def extra_options():
extra_vars = {
'languages': [[], "List of languages to build GCC for (--enable-languages)", CUSTOM],
'withlto': [True, "Enable LTO support", CUSTOM],
'withcloog': [False, "Build GCC with CLooG support", CUSTOM],
'withppl': [False, "Build GCC with PPL support", CUSTOM],
'withisl': [False, "Build GCC with ISL support", CUSTOM],
'pplwatchdog': [False, "Enable PPL watchdog", CUSTOM],
'clooguseisl': [False, "Use ISL with CLooG or not", CUSTOM],
'multilib': [False, "Build multilib gcc (both i386 and x86_64)", CUSTOM],
}
return ConfigureMake.extra_options(extra_vars)
def __init__(self, *args, **kwargs):
super(EB_GCC, self).__init__(*args, **kwargs)
self.stagedbuild = False
if LooseVersion(self.version) >= LooseVersion("4.8.0") and self.cfg['clooguseisl'] and not self.cfg['withisl']:
raise EasyBuildError("Using ISL bundled with CLooG is unsupported in >=GCC-4.8.0. "
"Use a seperate ISL: set withisl=True")
# I think ISL without CLooG has no purpose in GCC < 5.0.0 ...
if LooseVersion(self.version) < LooseVersion("5.0.0") and self.cfg['withisl'] and not self.cfg['withcloog']:
raise EasyBuildError("Activating ISL without CLooG is pointless")
# unset some environment variables that are known to may cause nasty build errors when bootstrapping
self.cfg.update('unwanted_env_vars', ['CPATH', 'C_INCLUDE_PATH', 'CPLUS_INCLUDE_PATH', 'OBJC_INCLUDE_PATH'])
# ubuntu needs the LIBRARY_PATH env var to work apparently (#363)
if get_os_name() not in ['ubuntu', 'debian']:
self.cfg.update('unwanted_env_vars', ['LIBRARY_PATH'])
self.platform_lib = get_platform_name(withversion=True)
def create_dir(self, dirname):
"""
Create a dir to build in.
"""
dirpath = os.path.join(self.cfg['start_dir'], dirname)
try:
os.mkdir(dirpath)
os.chdir(dirpath)
self.log.debug("Created dir at %s" % dirpath)
return dirpath
except OSError, err:
raise EasyBuildError("Can't use dir %s to build in: %s", dirpath, err)
def prep_extra_src_dirs(self, stage, target_prefix=None):
"""
Prepare extra (optional) source directories, so GCC will build these as well.
"""
if LooseVersion(self.version) >= LooseVersion('4.5'):
known_stages = ["stage1", "stage2", "stage3"]
if stage not in known_stages:
raise EasyBuildError("Incorrect argument for prep_extra_src_dirs, should be one of: %s", known_stages)
configopts = ''
if stage == "stage2":
# no MPFR/MPC needed in stage 2
extra_src_dirs = ["gmp"]
else:
extra_src_dirs = ["gmp", "mpfr", "mpc"]
# list of the extra dirs that are needed depending on the 'with%s' option
# the order is important: keep CLooG last!
self.with_dirs = ["isl", "ppl", "cloog"]
# add optional ones that were selected (e.g. CLooG, PPL, ...)
for x in self.with_dirs:
if self.cfg['with%s' % x]:
extra_src_dirs.append(x)
# see if modules are loaded
# if module is available, just use the --with-X GCC configure option
for extra in copy(extra_src_dirs):
envvar = get_software_root(extra)
if envvar:
configopts += " --with-%s=%s" % (extra, envvar)
extra_src_dirs.remove(extra)
elif extra in self.with_dirs and stage in ["stage1", "stage3"]:
# building CLooG or PPL or ISL requires a recent compiler
# our best bet is to do a 3-staged build of GCC, and
# build CLooG/PPL/ISL with the GCC we're building in stage 2
# then (bootstrap) build GCC in stage 3
# also, no need to stage cloog/ppl/isl in stage3 (may even cause troubles)
self.stagedbuild = True
extra_src_dirs.remove(extra)
# try and find source directories with given prefixes
# these sources should be included in list of sources in .eb spec file,
# so EasyBuild can unpack them in the build dir
found_src_dirs = []
versions = {}
names = {}
all_dirs = os.listdir(self.builddir)
for d in all_dirs:
for sd in extra_src_dirs:
if d.startswith(sd):
found_src_dirs.append({
'source_dir': d,
'target_dir': sd
})
# expected format: get_name[-subname]-get_version
ds = os.path.basename(d).split('-')
name = '-'.join(ds[0:-1])
names.update({sd: name})
ver = ds[-1]
versions.update({sd: ver})
# we need to find all dirs specified, or else...
if not len(found_src_dirs) == len(extra_src_dirs):
raise EasyBuildError("Couldn't find all source dirs %s: found %s from %s",
extra_src_dirs, found_src_dirs, all_dirs)
# copy to a dir with name as expected by GCC build framework
for d in found_src_dirs:
src = os.path.join(self.builddir, d['source_dir'])
if target_prefix:
dst = os.path.join(target_prefix, d['target_dir'])
else:
dst = os.path.join(self.cfg['start_dir'], d['target_dir'])
if not os.path.exists(dst):
try:
shutil.copytree(src, dst)
except OSError, err:
raise EasyBuildError("Failed to copy src %s to dst %s: %s", src, dst, err)
self.log.debug("Copied %s to %s, so GCC can build %s" % (src, dst, d['target_dir']))
else:
self.log.debug("No need to copy %s to %s, it's already there." % (src, dst))
else:
# in versions prior to GCC v4.5, there's no support for extra source dirs, so return only empty info
configopts = ''
names = {}
versions = {}
return {
'configopts': configopts,
'names': names,
'versions': versions
}
def run_configure_cmd(self, cmd):
"""
Run a configure command, with some extra checking (e.g. for unrecognized options).
"""
(out, ec) = run_cmd("%s %s" % (self.cfg['preconfigopts'], cmd), log_all=True, simple=False)
if ec != 0:
raise EasyBuildError("Command '%s' exited with exit code != 0 (%s)", cmd, ec)
# configure scripts tend to simply ignore unrecognized options
# we should be more strict here, because GCC is very much a moving target
unknown_re = re.compile("WARNING: unrecognized options")
unknown_options = unknown_re.findall(out)
if unknown_options:
raise EasyBuildError("Unrecognized options found during configure: %s", unknown_options)
def configure_step(self):
"""
Configure for GCC build:
- prepare extra source dirs (GMP, MPFR, MPC, ...)
- create obj dir to build in (GCC doesn't like to be built in source dir)
- add configure and make options, according to .eb spec file
- decide whether or not to do a staged build (which is required to enable PPL/CLooG support)
- set platform_lib based on config.guess output
"""
# self.configopts will be reused in a 3-staged build,
# configopts is only used in first configure
self.configopts = self.cfg['configopts']
# I) prepare extra source dirs, e.g. for GMP, MPFR, MPC (if required), so GCC can build them
stage1_info = self.prep_extra_src_dirs("stage1")
configopts = stage1_info['configopts']
# II) update config options
# enable specified language support
if self.cfg['languages']:
self.configopts += " --enable-languages=%s" % ','.join(self.cfg['languages'])
# enable link-time-optimization (LTO) support, if desired
if self.cfg['withlto']:
self.configopts += " --enable-lto"
# configure for a release build
self.configopts += " --enable-checking=release "
# enable multilib: allow both 32 and 64 bit
if self.cfg['multilib']:
glibc_32bit = [
"glibc.i686", # Fedora, RedHat-based
"libc6-dev-i386", # Debian-based
"gcc-c++-32bit", # OpenSuSE, SLES
]
if not any([check_os_dependency(dep) for dep in glibc_32bit]):
raise EasyBuildError("Using multilib requires 32-bit glibc (install one of %s, depending on your OS)",
', '.join(glibc_32bit))
self.configopts += " --enable-multilib --with-multilib-list=m32,m64"
else:
self.configopts += " --disable-multilib"
# build both static and dynamic libraries (???)
self.configopts += " --enable-shared=yes --enable-static=yes "
# use POSIX threads
self.configopts += " --enable-threads=posix "
# use GOLD as default linker, enable plugin support
self.configopts += " --enable-gold=default --enable-plugins "
self.configopts += " --enable-ld --with-plugin-ld=ld.gold"
# enable bootstrap build for self-containment (unless for staged build)
if not self.stagedbuild:
configopts += " --enable-bootstrap"
else:
configopts += " --disable-bootstrap"
if self.stagedbuild:
#
# STAGE 1: configure GCC build that will be used to build PPL/CLooG
#
self.log.info("Starting with stage 1 of 3-staged build to enable CLooG and/or PPL, ISL support...")
self.stage1installdir = os.path.join(self.builddir, 'GCC_stage1_eb')
configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {'p': self.stage1installdir}
else:
# unstaged build, so just run standard configure/make/make install
# set prefixes
self.log.info("Performing regular GCC build...")
configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {'p': self.installdir}
# III) create obj dir to build in, and change to it
# GCC doesn't like to be built in the source dir
if self.stagedbuild:
self.stage1prefix = self.create_dir("stage1_obj")
else:
self.create_dir("obj")
# IV) actual configure, but not on default path
cmd = "../configure %s %s" % (self.configopts, configopts)
# instead of relying on uname, we run the same command GCC uses to
# determine the platform
out, ec = run_cmd("../config.guess", simple=False)
if ec == 0:
self.platform_lib = out.rstrip()
self.run_configure_cmd(cmd)
def build_step(self):
if self.stagedbuild:
# make and install stage 1 build of GCC
paracmd = ''
if self.cfg['parallel']:
paracmd = "-j %s" % self.cfg['parallel']
cmd = "%s make %s %s" % (self.cfg['prebuildopts'], paracmd, self.cfg['buildopts'])
run_cmd(cmd, log_all=True, simple=True)
cmd = "make install %s" % (self.cfg['installopts'])
run_cmd(cmd, log_all=True, simple=True)
# register built GCC as compiler to use for stage 2/3
path = "%s/bin:%s" % (self.stage1installdir, os.getenv('PATH'))
env.setvar('PATH', path)
ld_lib_path = "%(dir)s/lib64:%(dir)s/lib:%(val)s" % {
'dir': self.stage1installdir,
'val': os.getenv('LD_LIBRARY_PATH')
}
env.setvar('LD_LIBRARY_PATH', ld_lib_path)
#
# STAGE 2: build GMP/PPL/CLooG for stage 3
#
# create dir to build GMP/PPL/CLooG in
stage2dir = "stage2_stuff"
stage2prefix = self.create_dir(stage2dir)
# prepare directories to build GMP/PPL/CLooG
stage2_info = self.prep_extra_src_dirs("stage2", target_prefix=stage2prefix)
configopts = stage2_info['configopts']
# build PPL and CLooG (GMP as dependency)
for lib in ["gmp"] + self.with_dirs:
self.log.debug("Building %s in stage 2" % lib)
if lib == "gmp" or self.cfg['with%s' % lib]:
libdir = os.path.join(stage2prefix, lib)
try:
os.chdir(libdir)
except OSError, err:
raise EasyBuildError("Failed to change to %s: %s", libdir, err)
if lib == "gmp":
cmd = "./configure --prefix=%s " % stage2prefix
cmd += "--with-pic --disable-shared --enable-cxx"
elif lib == "ppl":
self.pplver = LooseVersion(stage2_info['versions']['ppl'])
cmd = "./configure --prefix=%s --with-pic -disable-shared " % stage2prefix
# only enable C/C++ interfaces (Java interface is sometimes troublesome)
cmd += "--enable-interfaces='c c++' "
# enable watchdog (or not)
if self.pplver <= LooseVersion("0.11"):
if self.cfg['pplwatchdog']:
cmd += "--enable-watchdog "
else:
cmd += "--disable-watchdog "
elif self.cfg['pplwatchdog']:
raise EasyBuildError("Enabling PPL watchdog only supported in PPL <= v0.11 .")
# make sure GMP we just built is found
cmd += "--with-gmp=%s " % stage2prefix
elif lib == "isl":
cmd = "./configure --prefix=%s --with-pic --disable-shared " % stage2prefix
cmd += "--with-gmp=system --with-gmp-prefix=%s " % stage2prefix
elif lib == "cloog":
self.cloogname = stage2_info['names']['cloog']
self.cloogver = LooseVersion(stage2_info['versions']['cloog'])
v0_15 = LooseVersion("0.15")
v0_16 = LooseVersion("0.16")
cmd = "./configure --prefix=%s --with-pic --disable-shared " % stage2prefix
# use ISL or PPL
if self.cfg['clooguseisl']:
if self.cfg['withisl']:
self.log.debug("Using external ISL for CLooG")
cmd += "--with-isl=system --with-isl-prefix=%s " % stage2prefix
elif self.cloogver >= v0_16:
self.log.debug("Using bundled ISL for CLooG")
cmd += "--with-isl=bundled "
else:
raise EasyBuildError("Using ISL is only supported in CLooG >= v0.16 (detected v%s).",
self.cloogver)
else:
if self.cloogname == "cloog-ppl" and self.cloogver >= v0_15 and self.cloogver < v0_16:
cmd += "--with-ppl=%s " % stage2prefix
else:
errormsg = "PPL only supported with CLooG-PPL v0.15.x (detected v%s)" % self.cloogver
errormsg += "\nNeither using PPL or ISL-based ClooG, I'm out of options..."
raise EasyBuildError(errormsg)
# make sure GMP is found
if self.cloogver >= v0_15 and self.cloogver < v0_16:
cmd += "--with-gmp=%s " % stage2prefix
elif self.cloogver >= v0_16:
cmd += "--with-gmp=system --with-gmp-prefix=%s " % stage2prefix
else:
raise EasyBuildError("Don't know how to specify location of GMP to configure of CLooG v%s.",
self.cloogver)
else:
raise EasyBuildError("Don't know how to configure for %s", lib)
# configure
self.run_configure_cmd(cmd)
# build and 'install'
cmd = "make %s" % paracmd
run_cmd(cmd, log_all=True, simple=True)
cmd = "make install"
run_cmd(cmd, log_all=True, simple=True)
if lib == "gmp":
# make sure correct GMP is found
libpath = os.path.join(stage2prefix, 'lib')
incpath = os.path.join(stage2prefix, 'include')
cppflags = os.getenv('CPPFLAGS', '')
env.setvar('CPPFLAGS', "%s -L%s -I%s " % (cppflags, libpath, incpath))
#
# STAGE 3: bootstrap build of final GCC (with PPL/CLooG support)
#
# create new obj dir and change into it
self.create_dir("stage3_obj")
# reconfigure for stage 3 build
self.log.info("Stage 2 of 3-staged build completed, continuing with stage 2 "
"(with CLooG and/or PPL, ISL support enabled)...")
stage3_info = self.prep_extra_src_dirs("stage3")
configopts = stage3_info['configopts']
configopts += " --prefix=%(p)s --with-local-prefix=%(p)s" % {'p': self.installdir}
# enable bootstrapping for self-containment
configopts += " --enable-bootstrap "
# PPL config options
if self.cfg['withppl']:
# for PPL build and CLooG-PPL linking
for lib in ["lib64", "lib"]:
path = os.path.join(self.stage1installdir, lib, "libstdc++.a")
if os.path.exists(path):
libstdcxxpath = path
break
configopts += "--with-host-libstdcxx='-static-libgcc %s -lm' " % libstdcxxpath
configopts += "--with-ppl=%s " % stage2prefix
if self.pplver <= LooseVersion("0.11"):
if self.cfg['pplwatchdog']:
configopts += "--enable-watchdog "
else:
configopts += "--disable-watchdog "
# CLooG config options
if self.cfg['withcloog']:
configopts += "--with-cloog=%s " % stage2prefix
if self.cfg['clooguseisl'] and self.cloogver >= LooseVersion("0.16") and LooseVersion(self.version) < LooseVersion("4.8.0"):
configopts += "--enable-cloog-backend=isl "
if self.cfg['withisl']:
configopts += "--with-isl=%s " % stage2prefix
# configure
cmd = "../configure %s %s" % (self.configopts, configopts)
self.run_configure_cmd(cmd)
# build with bootstrapping for self-containment
self.cfg.update('buildopts', 'bootstrap')
# call standard build_step
super(EB_GCC, self).build_step()
# make install is just standard install_step, nothing special there
def sanity_check_step(self):
"""
Custom sanity check for GCC
"""
os_type = get_os_type()
sharedlib_ext = get_shared_lib_ext()
common_infix = os.path.join('gcc', self.platform_lib, self.version)
bin_files = ["gcov"]
lib_files = []
if LooseVersion(self.version) >= LooseVersion('4.2'):
# libgomp was added in GCC 4.2.0
["libgomp.%s" % sharedlib_ext, "libgomp.a"]
if os_type == 'Linux':
lib_files.extend(["libgcc_s.%s" % sharedlib_ext])
# libmudflap is replaced by asan (see release notes gcc 4.9.0)
if LooseVersion(self.version) < LooseVersion("4.9.0"):
lib_files.extend(["libmudflap.%s" % sharedlib_ext, "libmudflap.a"])
else:
lib_files.extend(["libasan.%s" % sharedlib_ext, "libasan.a"])
libexec_files = []
dirs = ['lib/%s' % common_infix]
if not self.cfg['languages']:
# default languages are c, c++, fortran
bin_files = ["c++", "cpp", "g++", "gcc", "gcov", "gfortran"]
lib_files.extend(["libstdc++.%s" % sharedlib_ext, "libstdc++.a"])
libexec_files = ['cc1', 'cc1plus', 'collect2', 'f951']
if 'c' in self.cfg['languages']:
bin_files.extend(['cpp', 'gcc'])
if 'c++' in self.cfg['languages']:
bin_files.extend(['c++', 'g++'])
dirs.append('include/c++/%s' % self.version)
lib_files.extend(["libstdc++.%s" % sharedlib_ext, "libstdc++.a"])
if 'fortran' in self.cfg['languages']:
bin_files.append('gfortran')
lib_files.extend(['libgfortran.%s' % sharedlib_ext, 'libgfortran.a'])
if self.cfg['withlto']:
libexec_files.extend(['lto1', 'lto-wrapper'])
if os_type in ['Linux']:
libexec_files.append('liblto_plugin.%s' % sharedlib_ext)
bin_files = ["bin/%s" % x for x in bin_files]
libdirs64 = ['lib64']
libdirs32 = ['lib', 'lib32']
libdirs = libdirs64 + libdirs32
if self.cfg['multilib']:
# with multilib enabled, both lib and lib64 should be there
lib_files64 = [os.path.join(libdir, x) for libdir in libdirs64 for x in lib_files]
lib_files32 = [tuple([os.path.join(libdir, x) for libdir in libdirs32]) for x in lib_files]
lib_files = lib_files64 + lib_files32
else:
# lib64 on SuSE and Darwin, lib otherwise
lib_files = [tuple([os.path.join(libdir, x) for libdir in libdirs]) for x in lib_files]
# lib on SuSE, libexec otherwise
libdirs = ['libexec', 'lib']
libexec_files = [tuple([os.path.join(libdir, common_infix, x) for libdir in libdirs]) for x in libexec_files]
custom_paths = {
'files': bin_files + lib_files + libexec_files,
'dirs': dirs,
}
super(EB_GCC, self).sanity_check_step(custom_paths=custom_paths)
def make_module_req_guess(self):
"""
Make sure all GCC libs are in LD_LIBRARY_PATH
"""
guesses = super(EB_GCC, self).make_module_req_guess()
guesses.update({
'PATH': ['bin'],
'LD_LIBRARY_PATH': ['lib', 'lib64',
'lib/gcc/%s/%s' % (self.platform_lib, self.cfg['version'])],
'MANPATH': ['man', 'share/man']
})
return guesses
| {
"content_hash": "32f00800eef81e2c5bb47dd9900f35cb",
"timestamp": "",
"source": "github",
"line_count": 549,
"max_line_length": 140,
"avg_line_length": 44.970856102003644,
"alnum_prop": 0.5289805176394345,
"repo_name": "ULHPC/modules",
"id": "42f14077fe63a47f956c8bad8e1a404b47ca5d60",
"size": "25747",
"binary": false,
"copies": "10",
"ref": "refs/heads/devel",
"path": "easybuild/easybuild-easyblocks/easybuild/easyblocks/g/gcc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Groff",
"bytes": "36174"
},
{
"name": "Perl",
"bytes": "34780"
},
{
"name": "Python",
"bytes": "2711250"
},
{
"name": "Ruby",
"bytes": "932"
},
{
"name": "Shell",
"bytes": "51560"
}
],
"symlink_target": ""
} |
'''
This holds onto the UpdateObject class.
It allows us to easily abstract away different update types and provide a uniform
way to interpret the results through a set of basic actionable functions.
'''
from __future__ import print_function
import time
from twisted.internet import defer, reactor
from twisted.python.failure import Failure
from paradrop.base import nexus, settings
from paradrop.base.output import out
from paradrop.core import plan
from paradrop.core.chute.builder import build_chute, rebuild_chute
from paradrop.core.chute.chute import Chute
from paradrop.core.chute.chute_storage import ChuteStorage
from paradrop.core.agent.http import PDServerRequest
from paradrop.core.plan import executionplan
from paradrop.core.plan import hostconfig
from paradrop.core.plan import name
from paradrop.core.plan import resource
from paradrop.core.plan import router
from paradrop.core.plan import runtime
from paradrop.core.plan import snap
from paradrop.core.plan import state
from paradrop.core.plan import struct
from paradrop.core.plan import traffic
# Examples of update objects, first from local installation, second from cloud
# update.
#
# {'web': {'port': 8080}, 'workdir': '/tmp/tmpo67yf7', 'name': 'go-hello-world', 'deferred': <Deferred at 0x7f46dc5fae18>, 'updateType': 'create', 'state': 'running', 'tok': 1524765733, 'updateClass': 'CHUTE', 'version': 'x1524765733', 'change_id': 2}
#
# {u'web': {u'port': u'8080'}, u'name': u'go-hello-world', 'deferred': <Deferred at 0x7f46dc5adfc8>, 'updateType': u'update', 'state': 'running', 'tok': 1524765792, 'updateClass': u'CHUTE', u'version': 1, 'external': {'chute_id': u'5ae20aa29ca0e4049ca2fff3', 'update_id': u'5ae2145f9ca0e4049ca3017d', 'version_id': u'5ae20ab59ca0e4049ca2fff8'}, 'change_id': 3, u'download': {u'url': u'https://github.com/ParadropLabs/go-hello-world', u'checkout': u'master'}}
class UpdateObject(object):
"""
The base UpdateObject class, covers a few basic methods but otherwise all the intelligence
exists in the inherited classes.
All update information passed by the API server is contained as variables of this class
such as update.updateType, update.updateClass, etc...
By default, the following variables should be utilized:
responses : an array of messages any module can choose to append warnings or errors to
failure : the module that chose to fail this update can set a string message to return
: to the user in the failure variable. It should be very clear as to why the
: failure occurred, but if the user wants more information they may find it
: in the responses variable which may contain debug information, etc...
"""
updateModuleList = []
def __init__(self, obj):
self.change_id = None
self.pkg = None
# Pull in all the keys from the obj identified
self.__dict__.update(obj)
# Any module can add notes and warnings here
self.responses = []
# In case of a failure, the final message about failure goes here
self.failure = None
# Each update gets its own plan map
self.plans = plan.plangraph.PlanMap(self.name)
# Grab a reference to our storage system
self.chuteStor = ChuteStorage()
# Build new Chute object.
self.new = build_chute(obj)
# Grab the old version if it exists
self.old = self.chuteStor.getChute(self.name)
# Save a timestamp from when the update object was created.
self.createdTime = time.time()
# Set to True if this update is delegated to an external program (e.g.
# pdinstall). In that case, the external program will be responsible
# for reporting on the completion status of the update.
self.delegated = False
# Store progress messages so that they can be retrieved by API.
self.messages = []
self.message_observers = []
self.completed = False
# Cache for passing intermediate values between plan functions.
# Previously, this was all done in the chute object, but the
# functionality extends to other operations such as a node
# configuration change.
self.cache = {}
# Set by the execute function on the first call and used to detect
# whether its new or has been resumed.
self.execute_called = False
def __repr__(self):
return "<Update({}) :: {} - {} @ {}>".format(self.updateClass, self.name, self.updateType, self.tok)
def __str__(self):
return "<Update({}) :: {}>".format(self.updateClass, self.name)
def started(self):
"""
This function should be called when the updated object is dequeued and
execution is about to begin.
Sends a notification to the pdserver if this is a tracked update.
"""
# TODO Look into this.
# This might happen during router initialization. If nexus.core is
# None, we do not know the router's identity, so we cannot publish any
# messages.
if nexus.core is None:
return
# The external field is set for updates from pdserver but not for
# locally-initiated (sideloaded) updates.
if not self.execute_called and hasattr(self, 'external'):
update_id = self.external['update_id']
request = PDServerRequest('/api/routers/{router_id}/updates/' + str(update_id))
request.patch({'op': 'replace', 'path': '/started', 'value': True})
def progress(self, message):
if self.pkg is not None:
self.pkg.request.write(message + '\n')
# TODO Look into this.
# This might happen during router initialization. If nexus.core is
# None, we do not know the router's identity, so we cannot publish any
# messages.
if nexus.core is None:
return
data = {
'time': time.time(),
'message': message
}
def handleError(error):
print("Error sending message: {}".format(error.getErrorMessage()))
# The external field is set for updates from pdserver but not for
# locally-initiated (sideloaded) updates.
update_id = None
if hasattr(self, 'external'):
update_id = self.external['update_id']
request = PDServerRequest('/api/routers/{}/updates/{}/messages'
.format(nexus.core.info.pdid, update_id))
d = request.post(**data)
d.addErrback(handleError)
session = getattr(nexus.core, 'session', None)
if session is not None:
data['update_id'] = update_id
# Catch the occasional Exception due to connectivity failure. We
# don't want to fail a chute installation just because we had problems
# sending the log messages.
try:
session.publish(session.uriPrefix + 'updateProgress', data)
except Exception as error:
out.warn("Publish failed: {} {}".format(error.__class__, error))
# Send messages to internal consumers (e.g. open websocket connections)
self.messages.append(data)
for observer in self.message_observers:
observer.on_message(data)
def complete(self, **kwargs):
"""
Signal to the API server that any action we need to perform is
complete and the API server can finish its connection with the
client that initiated the API request.
"""
# Save a timestamp from when we finished execution.
self.endTime = time.time()
if(settings.DEBUG_MODE):
kwargs['responses'] = self.responses
# Set our results
self.result = kwargs
d = None
if hasattr(self, 'deferred'):
d = self.deferred
self.deferred = None
try:
message = "Completed {} operation on chute {}: {}".format(
self.updateType, self.new.name,
"success" if kwargs['success'] else "failure")
out.usage(message, chute=self.new.name, updateType=self.updateType,
createdTime=self.createdTime, startTime=self.startTime,
endTime=self.endTime, **kwargs)
except Exception as e:
out.exception(e, True)
if d:
reactor.callFromThread(d.errback, Failure(e))
# Last message to send to observers.
msg = {
'time': self.endTime,
'message': message
}
self.messages.append(msg)
# Mark the update as complete and notify any observers. Observers
# should call remove_message_observer in their on_complete handler.
self.completed = True
for observer in self.message_observers:
observer.on_message(msg)
observer.on_complete()
if 'message' in kwargs:
self.progress(kwargs['message'])
if d:
reactor.callFromThread(d.callback, self)
def execute(self):
"""
The function that actually walks through the main process required to create the chute.
It follows the executeplan module through the paces of:
1) Generate the plans for each plan module
2) Prioritize the plans
3) Execute the plans
If at any point we fail then this function will directly take care of completing
the update process with an error state and will close the API connection.
"""
if not self.execute_called:
# Save a timestamp from when we started execution.
self.startTime = time.time()
# Generate the plans we need to setup the chute
if(executionplan.generatePlans(self)):
out.warn('Failed to generate plans\n')
self.complete(success=False, message=self.failure)
return
# Aggregate those plans
executionplan.aggregatePlans(self)
self.execute_called = True
# Execute on those plans
exec_result = executionplan.executePlans(self)
if isinstance(exec_result, defer.Deferred):
return exec_result
elif exec_result is True:
# Getting here means we need to abort what we did
res = executionplan.abortPlans(self)
# Did aborting also fail? This is bad!
if(res):
###################################################################################
# Getting here means the abort system thinks it wasn't able to get the system
# back into the state it was in prior to this update.
###################################################################################
out.err('TODO: What do we do when we fail during abort?\n')
pass
# Report the failure back to the user
self.complete(success=False, message=self.failure)
return
# Respond to the API server to let them know the result
self.complete(success=True, message='Chute {} {} success'.format(
self.name, self.updateType))
def add_message_observer(self, observer):
for msg in self.messages:
observer.on_message(msg)
self.message_observers.append(observer)
# If the update is already complete, send the complete event. Other
# observers would have already received this.
if self.completed:
observer.on_complete()
def remove_message_observer(self, observer):
self.message_observers.remove(observer)
def has_chute_build(self):
"""
Check whether this update involves building a chute.
"""
return False
def cache_get(self, key, default=None):
"""
Get a value from the cache or the default value if it does not exist.
"""
return self.cache.get(key, default)
def cache_set(self, key, value):
"""
Set a value in the cache.
"""
self.cache[key] = value
# This gives the new chute state if an update of a given type succeeds.
NEW_CHUTE_STATE = {
'create': Chute.STATE_RUNNING,
'update': Chute.STATE_RUNNING,
'start': Chute.STATE_RUNNING,
'restart': Chute.STATE_RUNNING,
'delete': Chute.STATE_STOPPED,
'stop': Chute.STATE_STOPPED
}
class UpdateChute(UpdateObject):
"""
Updates specifically tailored to chute actions like create, delete, etc...
"""
# List of all modules that need to be called during execution planning
updateModuleList = [
name,
state,
struct,
resource,
traffic,
runtime
]
def __init__(self, obj, reuse_existing=False):
"""
Create an update object that affects a chute.
Args:
obj (dict): update specification.
reuse_existing (bool): use the existing Chute object from storage, e.g.
when restarting a chute without changes.
"""
updateType = obj.get('updateType', None)
# TODO: Remove this if unused. It is not the update that has a running
# state but rather the chute.
obj['state'] = NEW_CHUTE_STATE.get(updateType, Chute.STATE_INVALID)
super(UpdateChute, self).__init__(obj)
# If going from one version of a chute to another, try to fill in
# any missing values in the new chute using the old chute.
if reuse_existing:
self.new = self.old
elif self.old is not None:
old_spec = self.old.create_specification()
self.new = rebuild_chute(old_spec, obj)
self.new.state = NEW_CHUTE_STATE.get(updateType, Chute.STATE_INVALID)
def has_chute_build(self):
"""
Check whether this update involves building a chute.
"""
return self.updateType in ["create", "update"]
class UpdateRouter(UpdateObject):
"""
Updates specifically tailored to router configuration.
"""
# List of all modules that need to be called during execution planning
#
# All of the modules listed in UpdateChute perform an extensive series of
# steps that are largely irrelevant for host config updates. Therefore, we
# use a different module here.
updateModuleList = [
hostconfig,
router
]
class UpdateSnap(UpdateObject):
"""
Updates specifically tailored to installing snaps.
"""
# List of all modules that need to be called during execution planning
#
# All of the modules listed in UpdateChute perform an extensive series of
# steps that are largely irrelevant for host config updates. Therefore, we
# use a different module here.
updateModuleList = [
snap
]
###################################################################################################
# Module functions and variables
###################################################################################################
UPDATE_CLASSES = {
"CHUTE": UpdateChute,
"ROUTER": UpdateRouter,
"SNAP": UpdateSnap
}
def parse(obj):
"""
Determines the update type and returns the proper class.
"""
uclass = obj.get('updateClass', None)
cls = UPDATE_CLASSES.get(uclass, None)
if(cls is None):
raise Exception('BadUpdateType', 'updateClass is invalid, must be one of: %s' % ", ".join(UPDATE_CLASSES))
return cls(obj)
| {
"content_hash": "8c85c5e54241f5dbb1fb0035dd6da6bc",
"timestamp": "",
"source": "github",
"line_count": 419,
"max_line_length": 458,
"avg_line_length": 37.3054892601432,
"alnum_prop": 0.6133964557609878,
"repo_name": "ParadropLabs/Paradrop",
"id": "ed70642c75c6b2d7b8fd35b13e387f93e194891c",
"size": "15631",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "paradrop/daemon/paradrop/core/update/update_object.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "148071"
},
{
"name": "Dockerfile",
"bytes": "10449"
},
{
"name": "HTML",
"bytes": "554"
},
{
"name": "Makefile",
"bytes": "1665"
},
{
"name": "Python",
"bytes": "1049444"
},
{
"name": "Shell",
"bytes": "9897"
}
],
"symlink_target": ""
} |
class Table(object):
def __init__(self, columns=()):
self.columns = columns
def __add__(self, other):
return Table(self.columns[:] + other.columns[:])
class Column(object):
def __init__(self, header, cell):
self.header = header
self.cell = cell
self.attrs = {}
def max_width(self, data):
data_max = 0
if data:
data_max = max([len(self.cell(item)) for item in data])
return max(data_max, len(self.header))
def set_attr(table, key, value):
"""
Set an attribute on all columns of a table.
:param key: name of the attribute
:param value: value of the attribute
:returns: new table
"""
def worker(c, k, v):
c.attrs[k] = v
return c
return Table(tuple(worker(col, key, value) for col in table.columns[:]))
| {
"content_hash": "922488b83342d28cef0ce08d9cf7bcd6",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 76,
"avg_line_length": 25.636363636363637,
"alnum_prop": 0.5709219858156028,
"repo_name": "lubomir/monoidal-tables",
"id": "245cacd8aca7e618c421d7c5608cb148681f082a",
"size": "872",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "monoidal_tables/types.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "7778"
}
],
"symlink_target": ""
} |
import math
import types
import string
from panda3d.core import *
from DirectUtil import *
from direct.showbase.DirectObject import DirectObject
from direct.task import Task
from DirectGlobals import DIRECT_NO_MOD
from DirectCameraControl import DirectCameraControl
from DirectManipulation import DirectManipulationControl
from DirectSelection import SelectionRay, COA_ORIGIN, SelectedNodePaths
from DirectGrid import DirectGrid
#from DirectGeometry import *
from DirectLights import DirectLights
from direct.cluster.ClusterClient import createClusterClient, DummyClusterClient
from direct.cluster.ClusterServer import ClusterServer
## from direct.tkpanels import Placer
## from direct.tkwidgets import Slider
## from direct.tkwidgets import SceneGraphExplorer
from direct.gui import OnscreenText
from direct.showbase import Loader
from direct.interval.IntervalGlobal import *
class DirectSession(DirectObject):
# post this to the bboard to make sure DIRECT doesn't turn on
DIRECTdisablePost = 'disableDIRECT'
def __init__(self):
# Establish a global pointer to the direct object early on
# so dependant classes can access it in their code
__builtins__["direct"] = base.direct = self
# These come early since they are used later on
self.group = render.attachNewNode('DIRECT')
self.font = TextNode.getDefaultFont()
self.fEnabled = 0
self.fEnabledLight = 0
self.fScaleWidgetByCam = 0 # [gjeon] flag for scaling widget by distance from the camera
self.fIgnoreDirectOnlyKeyMap = 0 # [gjeon] to skip old direct controls in new LE
self.drList = DisplayRegionList()
self.iRayList = [x.iRay for x in self.drList]
self.dr = self.drList[0]
self.win = base.win
self.camera = base.camera
self.cam = base.cam
self.camNode = base.camNode
self.trueCamera = self.camera
self.iRay = self.dr.iRay
self.coaMode = COA_ORIGIN
self.cameraControl = DirectCameraControl()
self.manipulationControl = DirectManipulationControl()
self.useObjectHandles()
self.grid = DirectGrid()
self.grid.disable()
self.lights = DirectLights(base.direct.group)
# Create some default lights
self.lights.createDefaultLights()
# But turn them off
self.lights.allOff()
# Initialize the collection of selected nodePaths
self.selected = SelectedNodePaths()
# Ancestry of currently selected object
self.ancestry = []
self.ancestryIndex = 0
self.activeParent = None
self.selectedNPReadout = OnscreenText.OnscreenText(
pos = (-1.0, -0.9), bg=Vec4(1, 1, 1, 1),
scale = 0.05, align = TextNode.ALeft,
mayChange = 1, font = self.font)
# Make sure readout is never lit or drawn in wireframe
useDirectRenderStyle(self.selectedNPReadout)
self.selectedNPReadout.reparentTo(hidden)
self.activeParentReadout = OnscreenText.OnscreenText(
pos = (-1.0, -0.975), bg=Vec4(1, 1, 1, 1),
scale = 0.05, align = TextNode.ALeft,
mayChange = 1, font = self.font)
# Make sure readout is never lit or drawn in wireframe
useDirectRenderStyle(self.activeParentReadout)
self.activeParentReadout.reparentTo(hidden)
self.directMessageReadout = OnscreenText.OnscreenText(
pos = (-1.0, 0.9), bg=Vec4(1, 1, 1, 1),
scale = 0.05, align = TextNode.ALeft,
mayChange = 1, font = self.font)
# Make sure readout is never lit or drawn in wireframe
useDirectRenderStyle(self.directMessageReadout)
self.directMessageReadout.reparentTo(hidden)
# Create a vrpn client vrpn-server or default
self.deviceManager = None
self.joybox = None
self.radamec = None
self.fastrak = []
if base.config.GetBool('want-vrpn', 0):
from direct.directdevices import DirectDeviceManager
self.deviceManager = DirectDeviceManager.DirectDeviceManager()
# Automatically create any devices specified in config file
joybox = base.config.GetString('vrpn-joybox-device', '')
radamec = base.config.GetString('vrpn-radamec-device', '')
fastrak = base.config.GetString('vrpn-fastrak-device', '')
if joybox:
from direct.directdevices import DirectJoybox
self.joybox = DirectJoybox.DirectJoybox(joybox)
if radamec:
from direct.directdevices import DirectRadamec
self.radamec = DirectRadamec.DirectRadamec(radamec)
if fastrak:
from direct.directdevices import DirectFastrak
# parse string into format device:N where N is the sensor name
fastrak = string.split(fastrak)
for i in range(len(fastrak))[1:]:
self.fastrak.append(DirectFastrak.DirectFastrak(fastrak[0] + ':' + fastrak[i]))
self.fControl = 0
self.fAlt = 0
self.fShift = 0
self.fMouse1 = 0 # [gjeon] to update alt key information while mouse1 is pressed
self.fMouse2 = 0
self.fMouse3 = 0
self.pos = VBase3()
self.hpr = VBase3()
self.scale = VBase3()
self.hitPt = Point3(0.0)
# Lists for managing undo/redo operations
self.undoList = []
self.redoList = []
# One run through the context task to init everything
self.drList.updateContext()
for dr in self.drList:
dr.camUpdate()
self.actionEvents = [
['select', self.select],
['DIRECT-select', self.selectCB],
['deselect', self.deselect],
['deselectAll', self.deselectAll],
['DIRECT-preDeselectAll', self.deselectAllCB],
['highlightAll', self.selected.highlightAll],
['preRemoveNodePath', self.deselect],
# Scene graph explorer functions
['SGE_Select', self.select],
['SGE_Deselect', self.deselect],
['SGE_Set Reparent Target', self.setActiveParent],
['SGE_Reparent', self.reparent],
['SGE_WRT Reparent', lambda np, s=self: s.reparent(np, fWrt = 1)],
['SGE_Flash', self.flash],
['SGE_Isolate', self.isolate],
['SGE_Toggle Vis', self.toggleVis],
['SGE_Show All', self.showAllDescendants],
['SGE_Fit', self.fitOnNodePath],
['SGE_Delete', self.removeNodePath],
['SGE_Set Name', self.getAndSetName],
['DIRECT-delete', self.removeAllSelected],
['DIRECT-Undo', self.undo],
['DIRECT-Redo', self.redo],
['DIRECT-OOBE', self.oobe],
['DIRECT-toggleWidgetVis', self.toggleWidgetVis],
['DIRECT-toggleWireframe', base.toggleWireframe],
['DIRECT-toggleVisAll', self.selected.toggleVisAll],
['DIRECT-toggleTexture', base.toggleTexture],
['DIRECT-upAncestry', self.upAncestry],
['DIRECT-downAncestry', self.downAncestry],
['DIRECT-toggleBackface', base.toggleBackface],
['DIRECT-flash', self.flash],
['DIRECT-toggleLigths', self.lights.toggle],
['DIRECT-toggleCOALock', self.cameraControl.toggleCOALock],
['DIRECT-setActiveParent', self.doSetActiveParent],
['DIRECT-doWrtReparent', self.doWrtReparent],
['DIRECT-doReparent', self.doReparent],
['DIRECT-doSelect', self.doSelect],
]
if base.wantTk:
from direct.tkpanels import Placer
from direct.tkwidgets import Slider
from direct.tkwidgets import SceneGraphExplorer
self.actionEvents.extend([
['SGE_Place', Placer.place],
['SGE_Set Color', Slider.rgbPanel],
['SGE_Explore', SceneGraphExplorer.explore],])
self.modifierEvents = ['control', 'control-up', 'control-repeat',
'shift', 'shift-up', 'shift-repeat',
'alt', 'alt-up', 'alt-repeat',
]
keyList = [chr(i) for i in range(97, 123)]
keyList.extend([chr(i) for i in range(48, 58)])
keyList.extend(["`", "-", "=", "[", "]", ";", "'", ",", ".", "/", "\\"])
self.specialKeys = ['escape', 'delete', 'page_up', 'page_down', 'enter']
def addCtrl(a):
return "control-%s"%a
def addShift(a):
return "shift-%s"%a
self.keyEvents = keyList[:]
self.keyEvents.extend(list(map(addCtrl, keyList)))
self.keyEvents.extend(list(map(addShift, keyList)))
self.keyEvents.extend(self.specialKeys)
self.mouseEvents = ['mouse1', 'mouse1-up',
'shift-mouse1', 'shift-mouse1-up',
'control-mouse1', 'control-mouse1-up',
'alt-mouse1', 'alt-mouse1-up',
'mouse2', 'mouse2-up',
'shift-mouse2', 'shift-mouse2-up',
'control-mouse2', 'control-mouse2-up',
'alt-mouse2', 'alt-mouse2-up',
'mouse3', 'mouse3-up',
'shift-mouse3', 'shift-mouse3-up',
'control-mouse3', 'control-mouse3-up',
'alt-mouse3', 'alt-mouse3-up',
]
self.directOnlyKeyMap = {
'u': ('Orbit Upright Camera', 'DIRECT-orbitUprightCam'),
'shift-u': ('Upright Camera', 'DIRECT-uprightCam'),
'1': ('Move Camera to View 1', 'DIRECT-spwanMoveToView-1'),
'2': ('Move Camera to View 2', 'DIRECT-spwanMoveToView-2'),
'3': ('Move Camera to View 3', 'DIRECT-spwanMoveToView-3'),
'4': ('Move Camera to View 4', 'DIRECT-spwanMoveToView-4'),
'5': ('Move Camera to View 5', 'DIRECT-spwanMoveToView-5'),
'6': ('Move Camera to View 6', 'DIRECT-spwanMoveToView-6'),
'7': ('Move Camera to View 7', 'DIRECT-spwanMoveToView-7'),
'8': ('Move Camera to View 8', 'DIRECT-spwanMoveToView-8'),
'9': ('Rotate Camera About widget 90 degrees Counterclockwise', 'DIRECT-swingCamAboutWidget-0'),
'0': ('Rotate Camera About widget 90 degrees Clockwise', 'DIRECT-swingCamAboutWidget-1'),
'`': ('Remove ManipulateCameraTask', 'DIRECT-removeManipulateCameraTask'),
'=': ('Zoom In', 'DIRECT-zoomInCam'),
'shift-=': ('Zoom In', 'DIRECT-zoomInCam'),
'shift-_': ('Zoom Out', 'DIRECT-zoomOutCam'),
'-': ('Zoom Out', 'DIRECT-zoomOutCam'),
'o': ('Toggle OOBE', 'DIRECT-OOBE'),
'[': ('DIRECT-Undo', 'DIRECT-Undo'),
'shift-[': ('DIRECT-Undo', 'DIRECT-Undo'),
']': ('DIRECT-Redo', 'DIRECT-Redo'),
'shift-]': ('DIRECT-Redo', 'DIRECT-Redo'),
}
self.hotKeyMap = {
'c': ('Center Camera', 'DIRECT-centerCamIn'),
'f': ('Fit on Widget', 'DIRECT-fitOnWidget'),
'h': ('Move Camera to ', 'DIRECT-homeCam'),
'shift-v': ('Toggle Marker', 'DIRECT-toggleMarkerVis'),
'm': ('Move to fit', 'DIRECT-moveToFit'),
'n': ('Pick Next COA', 'DIRECT-pickNextCOA'),
'delete': ('Delete', 'DIRECT-delete'),
'.': ('Scale Up Widget', 'DIRECT-widgetScaleUp'),
',': ('Scale Down Widget', 'DIRECT-widgetScaleDown'),
'page_up': ('Up Ancestry', 'DIRECT-upAncestry'),
'page_down': ('Down Ancestry', 'DIRECT-downAncestry'),
'escape': ('Deselect All', 'deselectAll'),
'v': ('Toggle Manipulating Widget', 'DIRECT-toggleWidgetVis'),
'b': ('Toggle Backface', 'DIRECT-toggleBackface'),
'control-f': ('Flash', 'DIRECT-flash'),
'l': ('Toggle lights', 'DIRECT-toggleLigths'),
'shift-l': ('Toggle COA Lock', 'DIRECT-toggleCOALock'),
'p': ('Set Active Parent', 'DIRECT-setActiveParent'),
'r': ('Wrt Reparent', 'DIRECT-doWrtReparent'),
'shift-r': ('Reparent', 'DIRECT-doReparent'),
's': ('Select', 'DIRECT-doSelect'),
't': ('Toggle Textures', 'DIRECT-toggleTexture'),
'shift-a': ('Toggle Vis all', 'DIRECT-toggleVisAll'),
'w': ('Toggle Wireframe', 'DIRECT-toggleWireframe'),
'control-z': ('Undo', 'LE-Undo'),
'shift-z' : ('Redo', 'LE-Redo'),
'control-d': ('Duplicate', 'LE-Duplicate'),
'control-l': ('Make Live', 'LE-MakeLive'),
'control-n': ('New Scene', 'LE-NewScene'),
'control-s': ('Save Scene', 'LE-SaveScene'),
'control-o': ('Open Scene', 'LE-OpenScene'),
'control-q': ('Quit', 'LE-Quit'),
}
self.speicalKeyMap = {
'enter': 'DIRECT-enter',
}
self.passThroughKeys = ['v','b','l','p', 'r', 'shift-r', 's', 't','shift-a', 'w']
if base.wantTk:
from direct.showbase import TkGlobal
from direct.tkpanels import DirectSessionPanel
self.panel = DirectSessionPanel.DirectSessionPanel(parent = tkroot)
try:
# Has the clusterMode been set externally (i.e. via the
# bootstrap application?
self.clusterMode = clusterMode
except NameError:
# Has the clusterMode been set via a config variable?
self.clusterMode = base.config.GetString("cluster-mode", '')
if self.clusterMode == 'client':
self.cluster = createClusterClient()
elif self.clusterMode == 'server':
self.cluster = ClusterServer(base.camera, base.cam)
else:
self.cluster = DummyClusterClient()
__builtins__['cluster'] = self.cluster
def addPassThroughKey(self,key):
self.passThroughKeys.append(key)
def enable(self):
# don't enable DIRECT if someone has posted DIRECTdisablePost
if bboard.has(DirectSession.DIRECTdisablePost):
return
if self.fEnabled:
return
# Make sure old tasks are shut down
self.disable()
# Start all display region context tasks
self.drList.spawnContextTask()
if not self.fEnabledLight:
# Turn on mouse Flying
self.cameraControl.enableMouseFly()
# Turn on object manipulation
self.manipulationControl.enableManipulation()
# Make sure list of selected items is reset
self.selected.reset()
# Accept appropriate hooks
if not self.fEnabledLight:
self.enableKeyEvents()
self.enableMouseEvents()
self.enableActionEvents()
self.enableModifierEvents()
# Set flag
self.fEnabled = 1
def enableLight(self):
self.fEnabledLight = 1
self.enable()
def disable(self):
# Shut down all display region context tasks
self.drList.removeContextTask()
# Turn off camera fly
self.cameraControl.disableMouseFly()
# Turn off object manipulation
self.deselectAll()
self.manipulationControl.disableManipulation()
self.disableKeyEvents()
self.disableModifierEvents()
self.disableMouseEvents()
self.disableActionEvents()
# Kill tasks
taskMgr.remove('flashNodePath')
taskMgr.remove('hideDirectMessage')
taskMgr.remove('hideDirectMessageLater')
# Set flag
self.fEnabled = 0
def toggleDirect(self):
if self.fEnabled:
self.disable()
else:
self.enable()
def minimumConfiguration(self):
# Remove context task
self.drList.removeContextTask()
# Turn off camera fly
self.cameraControl.disableMouseFly()
# Ignore keyboard and action events
self.disableKeyEvents()
self.disableActionEvents()
# But let mouse events pass through
self.enableMouseEvents()
self.enableModifierEvents()
def oobe(self):
# If oobeMode was never set, set it to false and create the
# structures we need to implement OOBE.
try:
self.oobeMode
except:
self.oobeMode = 0
self.oobeCamera = hidden.attachNewNode('oobeCamera')
self.oobeVis = loader.loadModel('models/misc/camera')
if self.oobeVis:
self.oobeVis.node().setFinal(1)
if self.oobeMode:
# Position a target point to lerp the oobe camera to
base.direct.cameraControl.camManipRef.setPosHpr(self.trueCamera, 0, 0, 0, 0, 0, 0)
ival = self.oobeCamera.posHprInterval(
2.0, pos = Point3(0), hpr = Vec3(0),
other = base.direct.cameraControl.camManipRef,
blendType = 'easeInOut')
ival = Sequence(ival, Func(self.endOOBE), name = 'oobeTransition')
ival.start()
else:
# Place camera marker at true camera location
self.oobeVis.reparentTo(self.trueCamera)
# Remove any transformation on the models arc
self.oobeVis.clearMat()
# Make oobeCamera be a sibling of wherever camera is now.
cameraParent = self.camera.getParent()
# Prepare oobe camera
self.oobeCamera.reparentTo(cameraParent)
self.oobeCamera.setPosHpr(self.trueCamera, 0, 0, 0, 0, 0, 0)
# Put camera under new oobe camera
self.cam.reparentTo(self.oobeCamera)
# Position a target point to lerp the oobe camera to
base.direct.cameraControl.camManipRef.setPos(
self.trueCamera, Vec3(-2, -20, 5))
base.direct.cameraControl.camManipRef.lookAt(self.trueCamera)
ival = self.oobeCamera.posHprInterval(
2.0, pos = Point3(0), hpr = Vec3(0),
other = base.direct.cameraControl.camManipRef,
blendType = 'easeInOut')
ival = Sequence(ival, Func(self.beginOOBE), name = 'oobeTransition')
ival.start()
def beginOOBE(self):
# Make sure we've reached our final destination
self.oobeCamera.setPosHpr(base.direct.cameraControl.camManipRef, 0, 0, 0, 0, 0, 0)
base.direct.camera = self.oobeCamera
self.oobeMode = 1
def endOOBE(self):
# Make sure we've reached our final destination
self.oobeCamera.setPosHpr(self.trueCamera, 0, 0, 0, 0, 0, 0)
# Disable OOBE mode.
self.cam.reparentTo(self.trueCamera)
base.direct.camera = self.trueCamera
# Get rid of ancillary node paths
self.oobeVis.reparentTo(hidden)
self.oobeCamera.reparentTo(hidden)
self.oobeMode = 0
def destroy(self):
self.disable()
def reset(self):
self.enable()
# EVENT FUNCTIONS
def enableActionEvents(self):
for event in self.actionEvents:
self.accept(event[0], event[1], extraArgs = event[2:])
def enableModifierEvents(self):
for event in self.modifierEvents:
self.accept(event, self.inputHandler, [event])
def enableKeyEvents(self):
for event in self.keyEvents:
self.accept(event, self.inputHandler, [event])
def enableMouseEvents(self):
for event in self.mouseEvents:
self.accept(event, self.inputHandler, [event])
def disableActionEvents(self):
for event, method in self.actionEvents:
self.ignore(event)
def disableModifierEvents(self):
for event in self.modifierEvents:
self.ignore(event)
def disableKeyEvents(self):
for event in self.keyEvents:
self.ignore(event)
def disableMouseEvents(self):
for event in self.mouseEvents:
self.ignore(event)
def inputHandler(self, input):
if not hasattr(self, 'oobeMode') or self.oobeMode == 0:
# [gjeon] change current camera dr, iRay, mouseWatcher accordingly to support multiple windows
if base.direct.manipulationControl.fMultiView:
# handling orphan events
if self.fMouse1 and 'mouse1' not in input or\
self.fMouse2 and 'mouse2' not in input or\
self.fMouse3 and 'mouse3' not in input:
if input.endswith('-up') or\
input not in self.modifierEvents:
# to handle orphan events
return
if (self.fMouse1 == 0 and 'mouse1-up' in input) or\
(self.fMouse2 == 0 and 'mouse2-up' in input) or\
(self.fMouse3 == 0 and 'mouse3-up' in input):
# to handle orphan events
return
if (self.fMouse1 or self.fMouse2 or self.fMouse3) and\
input[4:7] != base.direct.camera.getName()[:3] and\
input.endswith('-up'):
# to handle orphan events
return
winCtrl = None
possibleWinCtrls = []
for cWinCtrl in base.winControls:
if cWinCtrl.mouseWatcher.node().hasMouse():
possibleWinCtrls.append(cWinCtrl)
if len(possibleWinCtrls) == 1:
winCtrl = possibleWinCtrls[0]
elif len(possibleWinCtrls) > 1:
for cWinCtrl in possibleWinCtrls:
if (input.endswith('-up') and\
not input in self.modifierEvents and\
not input in self.mouseEvents) or\
(input in self.mouseEvents):
if input[4:7] == cWinCtrl.camera.getName()[:3]:
winCtrl = cWinCtrl
else:
if input[4:7] != cWinCtrl.camera.getName()[:3]:
winCtrl = cWinCtrl
if winCtrl is None:
return
if input not in self.modifierEvents:
self.win = winCtrl.win
self.camera = winCtrl.camera
self.trueCamera = self.camera
self.cam = NodePath(winCtrl.camNode)
self.camNode = winCtrl.camNode
if hasattr(winCtrl, 'grid'):
base.direct.grid = winCtrl.grid
base.direct.dr = base.direct.drList[base.camList.index(NodePath(winCtrl.camNode))]
base.direct.iRay = base.direct.dr.iRay
base.mouseWatcher = winCtrl.mouseWatcher
base.mouseWatcherNode = winCtrl.mouseWatcher.node()
base.direct.dr.mouseUpdate()
LE_showInOneCam(self.selectedNPReadout, self.camera.getName())
base.direct.widget = base.direct.manipulationControl.widgetList[base.camList.index(NodePath(winCtrl.camNode))]
input = input[8:] # get rid of camera prefix
if self.fAlt and 'alt' not in input and not input.endswith('-up'):
input = 'alt-' + input
if input.endswith('-repeat'):
input = input[:-7]
# Deal with keyboard and mouse input
if input in self.hotKeyMap.keys():
keyDesc = self.hotKeyMap[input]
messenger.send(keyDesc[1])
elif input in self.speicalKeyMap.keys():
messenger.send(self.speicalKeyMap[input])
elif input in self.directOnlyKeyMap.keys():
if self.fIgnoreDirectOnlyKeyMap:
return
keyDesc = self.directOnlyKeyMap[input]
messenger.send(keyDesc[1])
elif input == 'mouse1-up':
self.fMouse1 = 0 # [gjeon] to update alt key information while mouse1 is pressed
messenger.send('DIRECT-mouse1Up')
elif input.find('mouse1') != -1:
self.fMouse1 = 1 # [gjeon] to update alt key information while mouse1 is pressed
modifiers = self.getModifiers(input, 'mouse1')
messenger.send('DIRECT-mouse1', sentArgs = [modifiers])
elif input == 'mouse2-up':
self.fMouse2 = 0
messenger.send('DIRECT-mouse2Up')
elif input.find('mouse2') != -1:
self.fMouse2 = 1
modifiers = self.getModifiers(input, 'mouse2')
messenger.send('DIRECT-mouse2', sentArgs = [modifiers])
elif input == 'mouse3-up':
self.fMouse3 = 0
messenger.send('DIRECT-mouse3Up')
elif input.find('mouse3') != -1:
self.fMouse3 = 1
modifiers = self.getModifiers(input, 'mouse3')
messenger.send('DIRECT-mouse3', sentArgs = [modifiers])
elif input == 'shift':
self.fShift = 1
elif input == 'shift-up':
self.fShift = 0
elif input == 'control':
self.fControl = 1
# [gjeon] to update control key information while mouse1 is pressed
if self.fMouse1:
modifiers = DIRECT_NO_MOD
modifiers |= DIRECT_CONTROL_MOD
messenger.send('DIRECT-mouse1', sentArgs = [modifiers])
elif input == 'control-up':
self.fControl = 0
elif input == 'alt':
if self.fAlt:
return
self.fAlt = 1
# [gjeon] to update alt key information while mouse1 is pressed
if self.fMouse1:
modifiers = DIRECT_NO_MOD
modifiers |= DIRECT_ALT_MOD
messenger.send('DIRECT-mouse1', sentArgs = [modifiers])
elif self.fMouse2:
modifiers = DIRECT_NO_MOD
modifiers |= DIRECT_ALT_MOD
messenger.send('DIRECT-mouse2', sentArgs = [modifiers])
elif self.fMouse3:
modifiers = DIRECT_NO_MOD
modifiers |= DIRECT_ALT_MOD
messenger.send('DIRECT-mouse3', sentArgs = [modifiers])
elif input == 'alt-up':
self.fAlt = 0
#Pass along certain events if this display is a cluster client
if self.clusterMode == 'client':
if input in self.passThroughKeys:
self.cluster('messenger.send("%s")' % input, 0)
def doSetActiveParent(self):
if self.selected.last:
self.setActiveParent(self.selected.last)
def doReparent(self):
if self.selected.last:
self.reparent(self.selected.last)
def doWrtReparent(self):
if self.selected.last:
self.reparent(self.selected.last, fWrt = 1)
def doSelect(self):
if self.selected.last:
self.select(self.selected.last)
def getModifiers(self, input, base):
modifiers = DIRECT_NO_MOD
modifierString = input[: input.find(base)]
if modifierString.find('shift') != -1:
modifiers |= DIRECT_SHIFT_MOD
if modifierString.find('control') != -1:
modifiers |= DIRECT_CONTROL_MOD
if modifierString.find('alt') != -1:
modifiers |= DIRECT_ALT_MOD
return modifiers
def gotShift(self, modifiers):
return modifiers & DIRECT_SHIFT_MOD
def gotControl(self, modifiers):
return modifiers & DIRECT_CONTROL_MOD
def gotAlt(self, modifiers):
return modifiers & DIRECT_ALT_MOD
def setFScaleWidgetByCam(self, flag):
self.fScaleWidgetByCam = flag
if flag:
taskMgr.add(self.widgetResizeTask, 'DIRECTWidgetResize')
else:
taskMgr.remove('DIRECTWidgetResize')
def widgetResizeTask(self, state):
if not taskMgr.hasTaskNamed('resizeObjectHandles'):
dnp = self.selected.last
if dnp:
if self.manipulationControl.fMultiView:
for i in range(3):
sf = 30.0 * direct.drList[i].orthoFactor
self.manipulationControl.widgetList[i].setDirectScalingFactor(sf)
nodeCamDist = Vec3(dnp.getPos(base.camList[3])).length()
sf = 0.075 * nodeCamDist * math.tan(deg2Rad(direct.drList[3].fovV))
self.manipulationControl.widgetList[3].setDirectScalingFactor(sf)
else:
nodeCamDist = Vec3(dnp.getPos(direct.camera)).length()
sf = 0.075 * nodeCamDist * math.tan(deg2Rad(direct.drList.getCurrentDr().fovV))
self.widget.setDirectScalingFactor(sf)
return Task.cont
def select(self, nodePath, fMultiSelect = 0,
fSelectTag = 1, fResetAncestry = 1, fLEPane=0, fUndo=1):
messenger.send('DIRECT-select', [nodePath, fMultiSelect, fSelectTag, fResetAncestry, fLEPane, fUndo])
def selectCB(self, nodePath, fMultiSelect = 0,
fSelectTag = 1, fResetAncestry = 1, fLEPane = 0, fUndo=1):
dnp = self.selected.select(nodePath, fMultiSelect, fSelectTag)
if dnp:
messenger.send('DIRECT_preSelectNodePath', [dnp])
if fResetAncestry:
# Update ancestry
self.ancestry = dnp.getAncestors()
self.ancestryIndex = 0
# Update the selectedNPReadout
self.selectedNPReadout.reparentTo(aspect2d)
self.selectedNPReadout.setText(
'Selected:' + dnp.getName())
# Show the manipulation widget
if self.manipulationControl.fMultiView:
for widget in self.manipulationControl.widgetList:
widget.showWidget()
else:
self.widget.showWidget()
editTypes = self.manipulationControl.getEditTypes([dnp])
if (editTypes & EDIT_TYPE_UNEDITABLE == EDIT_TYPE_UNEDITABLE):
self.manipulationControl.disableWidgetMove()
else:
self.manipulationControl.enableWidgetMove()
# Update camera controls coa to this point
# Coa2Camera = Coa2Dnp * Dnp2Camera
mCoa2Camera = dnp.mCoa2Dnp * dnp.getMat(self.camera)
row = mCoa2Camera.getRow(3)
coa = Vec3(row[0], row[1], row[2])
self.cameraControl.updateCoa(coa)
# Adjust widgets size
# This uses the additional scaling factor used to grow and
# shrink the widget
if not self.fScaleWidgetByCam: # [gjeon] for not scaling widget by distance from camera
if self.manipulationControl.fMultiView:
for widget in self.manipulationControl.widgetList:
widget.setScalingFactor(dnp.getRadius())
else:
self.widget.setScalingFactor(dnp.getRadius())
# Spawn task to have object handles follow the selected object
taskMgr.remove('followSelectedNodePath')
t = Task.Task(self.followSelectedNodePathTask)
t.dnp = dnp
taskMgr.add(t, 'followSelectedNodePath')
# Send an message marking the event
messenger.send('DIRECT_selectedNodePath', [dnp])
messenger.send('DIRECT_selectedNodePath_fMulti_fTag', [dnp, fMultiSelect, fSelectTag])
messenger.send('DIRECT_selectedNodePath_fMulti_fTag_fLEPane', [dnp, fMultiSelect, fSelectTag, fLEPane])
def followSelectedNodePathTask(self, state):
mCoa2Render = state.dnp.mCoa2Dnp * state.dnp.getMat(render)
decomposeMatrix(mCoa2Render,
self.scale, self.hpr, self.pos,
CSDefault)
self.widget.setPosHpr(self.pos, self.hpr)
return Task.cont
def deselect(self, nodePath):
dnp = self.selected.deselect(nodePath)
if dnp:
# Hide the manipulation widget
if self.manipulationControl.fMultiView:
for widget in self.manipulationControl.widgetList:
widget.hideWidget()
else:
self.widget.hideWidget()
self.selectedNPReadout.reparentTo(hidden)
self.selectedNPReadout.setText(' ')
taskMgr.remove('followSelectedNodePath')
self.ancestry = []
# Send an message marking the event
messenger.send('DIRECT_deselectedNodePath', [dnp])
def deselectAll(self):
messenger.send('DIRECT-preDeselectAll')
def deselectAllCB(self):
self.selected.deselectAll()
# Hide the manipulation widget
if self.manipulationControl.fMultiView:
for widget in self.manipulationControl.widgetList:
widget.hideWidget()
else:
self.widget.hideWidget()
self.selectedNPReadout.reparentTo(hidden)
self.selectedNPReadout.setText(' ')
taskMgr.remove('followSelectedNodePath')
messenger.send('DIRECT_deselectAll')
def setActiveParent(self, nodePath = None):
# Record new parent
self.activeParent = nodePath
# Update the activeParentReadout
self.activeParentReadout.reparentTo(aspect2d)
self.activeParentReadout.setText(
'Active Reparent Target:' + nodePath.getName())
# Alert everyone else
messenger.send('DIRECT_activeParent', [self.activeParent])
def reparent(self, nodePath = None, fWrt = 0):
if (nodePath and self.activeParent and
self.isNotCycle(nodePath, self.activeParent)):
oldParent = nodePath.getParent()
if fWrt:
nodePath.wrtReparentTo(self.activeParent)
else:
nodePath.reparentTo(self.activeParent)
# Alert everyone else
messenger.send('DIRECT_reparent',
[nodePath, oldParent, self.activeParent])
# [gjeon] for others who needs fWrt information
messenger.send('DIRECT_reparent_fWrt',
[nodePath, oldParent, self.activeParent, fWrt])
def isNotCycle(self, nodePath, parent):
if nodePath == parent:
print 'DIRECT.reparent: Invalid parent'
return 0
elif parent.hasParent():
return self.isNotCycle(nodePath, parent.getParent())
else:
return 1
def flash(self, nodePath = 'None Given'):
""" Highlight an object by setting it red for a few seconds """
# Clean up any existing task
taskMgr.remove('flashNodePath')
# Spawn new task if appropriate
if nodePath == 'None Given':
# If nothing specified, try selected node path
nodePath = self.selected.last
if nodePath:
if nodePath.hasColor():
doneColor = nodePath.getColor()
flashColor = VBase4(1) - doneColor
flashColor.setW(1)
else:
doneColor = None
flashColor = VBase4(1, 0, 0, 1)
# Temporarily set node path color
nodePath.setColor(flashColor)
# Clean up color in a few seconds
t = taskMgr.doMethodLater(DIRECT_FLASH_DURATION,
# This is just a dummy task
self.flashDummy,
'flashNodePath',)
t.nodePath = nodePath
t.doneColor = doneColor
# This really does all the work
t.setUponDeath(self.flashDone)
def flashDummy(self, state):
# Real work is done in upon death function
return Task.done
def flashDone(self, state):
# Return node Path to original state
if state.nodePath.isEmpty():
# Node path doesn't exist anymore, bail
return
if state.doneColor:
state.nodePath.setColor(state.doneColor)
else:
state.nodePath.clearColor()
def fitOnNodePath(self, nodePath = 'None Given'):
if nodePath == 'None Given':
# If nothing specified, try selected node path
nodePath = self.selected.last
base.direct.select(nodePath)
def fitTask(state, self = self):
self.cameraControl.fitOnWidget()
return Task.done
taskMgr.doMethodLater(0.1, fitTask, 'manipulateCamera')
def isolate(self, nodePath = 'None Given'):
""" Show a node path and hide its siblings """
# First kill the flashing task to avoid complications
taskMgr.remove('flashNodePath')
# Use currently selected node path if node selected
if nodePath == 'None Given':
nodePath = self.selected.last
# Do we have a node path?
if nodePath:
# Yes, show everything in level
self.showAllDescendants(nodePath.getParent())
# Now hide all of this node path's siblings
for sib in nodePath.getParent().getChildren():
if sib.node() != nodePath.node():
sib.hide()
def toggleVis(self, nodePath = 'None Given'):
""" Toggle visibility of node path """
# First kill the flashing task to avoid complications
taskMgr.remove('flashNodePath')
if nodePath == 'None Given':
# If nothing specified, try selected node path
nodePath = self.selected.last
if nodePath:
# Now toggle node path's visibility state
if nodePath.isHidden():
nodePath.show()
else:
nodePath.hide()
def removeNodePath(self, nodePath = 'None Given'):
if nodePath == 'None Given':
# If nothing specified, try selected node path
nodePath = self.selected.last
if nodePath:
nodePath.remove()
def removeAllSelected(self):
self.selected.removeAll()
def showAllDescendants(self, nodePath = None):
""" Show the level and its descendants """
if nodePath is None:
nodePath = base.render
if not isinstance(nodePath, CollisionNode):
nodePath.show()
for child in nodePath.getChildren():
self.showAllDescendants(child)
def upAncestry(self):
if self.ancestry:
l = len(self.ancestry)
i = self.ancestryIndex + 1
if i < l:
np = self.ancestry[i]
name = np.getName()
if (name != 'render') and (name != 'renderTop'):
self.ancestryIndex = i
self.select(np, 0, 0)
self.flash(np)
def downAncestry(self):
if self.ancestry:
l = len(self.ancestry)
i = self.ancestryIndex - 1
if i >= 0:
np = self.ancestry[i]
name = np.getName()
if (name != 'render') and (name != 'renderTop'):
self.ancestryIndex = i
self.select(np, 0, 0)
self.flash(np)
def getAndSetName(self, nodePath):
""" Prompt user for new node path name """
from tkSimpleDialog import askstring
newName = askstring('Node Path: ' + nodePath.getName(),
'Enter new name:')
if newName:
nodePath.setName(newName)
messenger.send('DIRECT_nodePathSetName', [nodePath, newName])
# UNDO REDO FUNCTIONS
def pushUndo(self, nodePathList, fResetRedo = 1):
# Assemble group of changes
undoGroup = []
for nodePath in nodePathList:
t = nodePath.getTransform()
undoGroup.append([nodePath, t])
# Now record group
self.undoList.append(undoGroup)
# Truncate list
self.undoList = self.undoList[-25:]
# Alert anyone who cares
messenger.send('DIRECT_pushUndo')
if fResetRedo and (nodePathList != []):
self.redoList = []
messenger.send('DIRECT_redoListEmpty')
def popUndoGroup(self):
# Get last item
undoGroup = self.undoList[-1]
# Strip last item off of undo list
self.undoList = self.undoList[:-1]
# Update state of undo button
if not self.undoList:
messenger.send('DIRECT_undoListEmpty')
# Return last item
return undoGroup
def pushRedo(self, nodePathList):
# Assemble group of changes
redoGroup = []
for nodePath in nodePathList:
t = nodePath.getTransform()
redoGroup.append([nodePath, t])
# Now record redo group
self.redoList.append(redoGroup)
# Truncate list
self.redoList = self.redoList[-25:]
# Alert anyone who cares
messenger.send('DIRECT_pushRedo')
def popRedoGroup(self):
# Get last item
redoGroup = self.redoList[-1]
# Strip last item off of redo list
self.redoList = self.redoList[:-1]
# Update state of redo button
if not self.redoList:
messenger.send('DIRECT_redoListEmpty')
# Return last item
return redoGroup
def undo(self):
if self.undoList:
# Get last item off of redo list
undoGroup = self.popUndoGroup()
# Record redo information
nodePathList = [x[0] for x in undoGroup]
self.pushRedo(nodePathList)
# Now undo xform for group
for pose in undoGroup:
# Undo xform
pose[0].setTransform(pose[1])
# Alert anyone who cares
messenger.send('DIRECT_undo', [nodePathList])
def redo(self):
if self.redoList:
# Get last item off of redo list
redoGroup = self.popRedoGroup()
# Record undo information
nodePathList = [x[0] for x in redoGroup]
self.pushUndo(nodePathList, fResetRedo = 0)
# Redo xform
for pose in redoGroup:
pose[0].setTransform(pose[1])
# Alert anyone who cares
messenger.send('DIRECT_redo', [nodePathList])
# UTILITY FUNCTIONS
def message(self, text):
taskMgr.remove('hideDirectMessage')
taskMgr.remove('hideDirectMessageLater')
self.directMessageReadout.reparentTo(aspect2d)
self.directMessageReadout.setText(text)
self.hideDirectMessageLater()
def hideDirectMessageLater(self):
taskMgr.doMethodLater(3.0, self.hideDirectMessage, 'hideDirectMessage')
def hideDirectMessage(self, state):
self.directMessageReadout.reparentTo(hidden)
return Task.done
def useObjectHandles(self):
self.widget = self.manipulationControl.objectHandles
self.widget.reparentTo(base.direct.group)
def hideSelectedNPReadout(self):
self.selectedNPReadout.reparentTo(hidden)
def hideActiveParentReadout(self):
self.activeParentReadout.reparentTo(hidden)
def toggleWidgetVis(self):
self.widget.toggleWidget()
def setCOAMode(self, mode):
self.coaMode = mode
def isEnabled(self):
return self.fEnabled
def addUnpickable(self, item):
for iRay in self.iRayList:
iRay.addUnpickable(item)
def removeUnpickable(self, item):
for iRay in self.iRayList:
iRay.removeUnpickable(item)
class DisplayRegionContext(DirectObject):
regionCount = 0
def __init__(self, cam):
self.cam = cam
self.camNode = self.cam.node()
self.camLens = self.camNode.getLens()
# set lens change callback
changeEvent = 'dr%d-change-event' % DisplayRegionContext.regionCount
DisplayRegionContext.regionCount += 1
self.camLens.setChangeEvent(changeEvent)
self.accept(changeEvent, self.camUpdate)
self.iRay = SelectionRay(self.cam)
self.nearVec = Vec3(0)
self.mouseX = 0.0
self.mouseY = 0.0
self.orthoFactor = 0.1
# A Camera node can have more than one display region
# associated with it. Here I assume that there is only
# one display region per camera, since we are defining a
# display region on a per-camera basis. See note in
# DisplayRegionList.__init__()
try:
self.dr = self.camNode.getDr(0)
except:
self.dr = self.camNode.getDisplayRegion(0)
left = self.dr.getLeft()
right = self.dr.getRight()
bottom = self.dr.getBottom()
top = self.dr.getTop()
self.originX = left+right-1
self.originY = top+bottom-1
self.scaleX = 1.0/(right-left)
self.scaleY = 1.0/(top-bottom)
self.setOrientation()
self.camUpdate()
def __getitem__(self, key):
return self.__dict__[key]
def setOrientation(self):
# MRM This assumes orientation is set on transform above cam
hpr = self.cam.getHpr()
if hpr[2] < 135 and hpr[2]>45 or hpr[2]>225 and hpr[2]<315:
self.isSideways = 1
elif hpr[2] > -135 and hpr[2] < -45 or hpr[2] < -225 and hpr[2] > -315:
self.isSideways = 1
else:
self.isSideways = 0
# The following take into consideration sideways displays
def getHfov(self):
if self.isSideways:
return self.camLens.getVfov()
else:
return self.camLens.getHfov()
def getVfov(self):
if self.isSideways:
return self.camLens.getHfov()
else:
return self.camLens.getVfov()
def setHfov(self, hfov):
if self.isSideways:
self.camLens.setFov(self.camLens.getHfov(), hfov)
else:
self.camLens.setFov(hfov, self.camLens.getVfov())
def setVfov(self, vfov):
if self.isSideways:
self.camLens.setFov(vfov, self.camLens.getVfov())
else:
self.camLens.setFov(self.camLens.getHfov(), vfov)
def setFov(self, hfov, vfov):
if self.isSideways:
self.camLens.setFov(vfov, hfov)
else:
self.camLens.setFov(hfov, vfov)
def getWidth(self):
prop = base.direct.win.getProperties()
if prop.hasSize():
return prop.getXSize()
else:
return 640
def getHeight(self):
prop = base.direct.win.getProperties()
if prop.hasSize():
return prop.getYSize()
else:
return 480
def updateFilmSize(self, width, height):
if self.camLens.__class__.__name__ == "OrthographicLens":
width *= self.orthoFactor
height *= self.orthoFactor
self.camLens.setFilmSize(width, height)
def camUpdate(self, lens = None):
# Window Data
self.near = self.camLens.getNear()
self.far = self.camLens.getFar()
self.fovH = self.camLens.getHfov()
self.fovV = self.camLens.getVfov()
self.nearWidth = math.tan(deg2Rad(self.fovH * 0.5)) * self.near * 2.0
self.nearHeight = math.tan(deg2Rad(self.fovV * 0.5)) * self.near * 2.0
self.left = -self.nearWidth * 0.5
self.right = self.nearWidth * 0.5
self.top = self.nearHeight * 0.5
self.bottom = -self.nearHeight * 0.5
def mouseUpdate(self):
# Mouse Data
# Last frame
self.mouseLastX = self.mouseX
self.mouseLastY = self.mouseY
# Values for this frame
# This ranges from -1 to 1
if base.mouseWatcherNode and base.mouseWatcherNode.hasMouse():
self.mouseX = base.mouseWatcherNode.getMouseX()
self.mouseY = base.mouseWatcherNode.getMouseY()
self.mouseX = (self.mouseX-self.originX)*self.scaleX
self.mouseY = (self.mouseY-self.originY)*self.scaleY
# Delta percent of window the mouse moved
self.mouseDeltaX = self.mouseX - self.mouseLastX
self.mouseDeltaY = self.mouseY - self.mouseLastY
self.nearVec.set((self.nearWidth*0.5) * self.mouseX,
self.near,
(self.nearHeight*0.5) * self.mouseY)
class DisplayRegionList(DirectObject):
def __init__(self):
self.displayRegionList = []
i = 0
# Things are funky if we are oobe
if (hasattr(base, 'oobeMode') and base.oobeMode):
# assume we only have one cam at this point
drc = DisplayRegionContext(base.cam)
self.displayRegionList.append(drc)
else:
# MRM: Doesn't properly handle multiple camera groups anymore
# Assumes everything is under main camera
# This is following the old way of setting up
# display regions. A display region is set up for
# each camera node in the scene graph. This was done
# so that only display regions in the scene graph are
# considered. The right way to do this is to set up
# a display region for each real display region, and then
# keep track of which are currently active (e.g. use a flag)
# processing only them.
for camIndex in range(len(base.camList)):
cam = base.camList[camIndex]
if cam.getName()=='<noname>':
cam.setName('Camera%d' % camIndex)
drc = DisplayRegionContext(cam)
self.displayRegionList.append(drc)
self.accept("DIRECT-mouse1", self.mouseUpdate)
self.accept("DIRECT-mouse2", self.mouseUpdate)
self.accept("DIRECT-mouse3", self.mouseUpdate)
self.accept("DIRECT-mouse1Up", self.mouseUpdate)
self.accept("DIRECT-mouse2Up", self.mouseUpdate)
self.accept("DIRECT-mouse3Up", self.mouseUpdate)
self.tryToGetCurrentDr = True
def __getitem__(self, index):
return self.displayRegionList[index]
def __len__(self):
return len(self.displayRegionList)
def updateContext(self):
self.contextTask(None)
def setNearFar(self, near, far):
for dr in self.displayRegionList:
dr.camLens.setNearFar(near, far)
def setNear(self, near):
for dr in self.displayRegionList:
dr.camLens.setNear(near)
def setFar(self, far):
for dr in self.displayRegionList:
dr.camLens.setFar(far)
def setFov(self, hfov, vfov):
for dr in self.displayRegionList:
dr.setFov(hfov, vfov)
def setHfov(self, fov):
for dr in self.displayRegionList:
dr.setHfov(fov)
def setVfov(self, fov):
for dr in self.displayRegionList:
dr.setVfov(fov)
def mouseUpdate(self, modifiers = DIRECT_NO_MOD):
for dr in self.displayRegionList:
dr.mouseUpdate()
#base.direct.dr = self.getCurrentDr()
def getCurrentDr(self):
if not self.tryToGetCurrentDr:
return base.direct.dr
for dr in self.displayRegionList:
if (dr.mouseX >= -1.0 and dr.mouseX <= 1.0 and
dr.mouseY >= -1.0 and dr.mouseY <= 1.0):
return dr
return self.displayRegionList[0]
def start(self):
# First shutdown any existing task
self.stop()
# Start a new context task
self.spawnContextTask()
def stop(self):
# Kill the existing context task
taskMgr.remove('DIRECTContextTask')
def spawnContextTask(self):
taskMgr.add(self.contextTask, 'DIRECTContextTask')
def removeContextTask(self):
taskMgr.remove('DIRECTContextTask')
def contextTask(self, state):
# Window Data
self.mouseUpdate()
# hack to test movement
return Task.cont
def addDisplayRegionContext(self, cam):
self.displayRegionList.append(DisplayRegionContext(cam))
def removeDisplayRegionContext(self, cam):
for drc in self.displayRegionList:
if drc.cam == cam:
self.displayRegionList.remove(drc)
break
| {
"content_hash": "5f4be184d0473e693eaa91bf912536a5",
"timestamp": "",
"source": "github",
"line_count": 1322,
"max_line_length": 130,
"avg_line_length": 39.726172465960666,
"alnum_prop": 0.5777257321299364,
"repo_name": "mgracer48/panda3d",
"id": "2fb18b5806f167440f938cd4125fd61498bff330",
"size": "52518",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "direct/src/directtools/DirectSession.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4004"
},
{
"name": "C",
"bytes": "6395157"
},
{
"name": "C++",
"bytes": "31241851"
},
{
"name": "Emacs Lisp",
"bytes": "166274"
},
{
"name": "Groff",
"bytes": "3106"
},
{
"name": "HTML",
"bytes": "8081"
},
{
"name": "Java",
"bytes": "3777"
},
{
"name": "JavaScript",
"bytes": "7003"
},
{
"name": "Logos",
"bytes": "5504"
},
{
"name": "MAXScript",
"bytes": "1745"
},
{
"name": "NSIS",
"bytes": "91955"
},
{
"name": "Nemerle",
"bytes": "4403"
},
{
"name": "Objective-C",
"bytes": "30065"
},
{
"name": "Objective-C++",
"bytes": "300394"
},
{
"name": "Perl",
"bytes": "206982"
},
{
"name": "Perl6",
"bytes": "30636"
},
{
"name": "Puppet",
"bytes": "2627"
},
{
"name": "Python",
"bytes": "5530563"
},
{
"name": "Rebol",
"bytes": "421"
},
{
"name": "Shell",
"bytes": "55940"
},
{
"name": "Visual Basic",
"bytes": "136"
}
],
"symlink_target": ""
} |
"""Tools for interacting with a GTFS database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
def get_shape_dicts(route_short_name, septa_fn):
"""Returns dicts that are useful for mapping from a route_short_name and block_id to a list of coordinates representing the route.
Returns:
route_block_to_shape -- (route_short_name, block_id) -> shape_id
shape_to_path -- shape_id -> [(lat, lon), ...]
"""
#modify this path to a sqlite file with
#the gtfs data in it.
#to create this file, i used
#https://github.com/jarondl/pygtfs.git
e = create_engine(septa_fn)
Session = sessionmaker(bind = e)
s = Session()
route_block_to_shape = {}
q = "SELECT routes.route_short_name, trips.block_id, trips.shape_id \
FROM routes INNER JOIN trips \
ON routes.route_id == trips.route_id \
WHERE routes.route_short_name == :rsn \
GROUP BY trips.block_id"
results = s.execute(q, {"rsn":route_short_name})
for r in results:
route_block_to_shape[(r.route_short_name, r.block_id)] = r.shape_id
s_ids = set(route_block_to_shape.values())
shape_to_path = {}
for s_id in s_ids:
q = "SELECT shapes.shape_pt_lat, shapes.shape_pt_lon \
FROM shapes \
WHERE shapes.shape_id == :s_id"
results = s.execute(q, {'s_id':s_id})
path = [tuple(r) for r in results]
shape_to_path[s_id] = path
s.close()
return route_block_to_shape, shape_to_path
def test_get_shape_dicts():
route_short_name = "47"
#XXX change me
septa_fn = "sqlite:////Users/lewfish/data/septa/septa_bus.sqlite"
route_block_to_shape, shape_to_path = get_shape_dicts(route_short_name, septa_fn)
for s_id, path in shape_to_path.iteritems():
print s_id
print path
print
if __name__ == "__main__":
test_get_shape_maps()
| {
"content_hash": "39d3a99f4cfb9c8a3a0c87acc69c797d",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 134,
"avg_line_length": 32.47457627118644,
"alnum_prop": 0.6221294363256785,
"repo_name": "lewfish/septa-bus-prediction",
"id": "4d59671f2134a9c8038221ef7c051755386018de",
"size": "1916",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lewfish/septaprediction/gtfs.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "5984"
}
],
"symlink_target": ""
} |
import sys
import os
if __name__ == '__main__':
pkg_dir = os.path.split(os.path.abspath(__file__))[0]
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
import unittest
if is_pygame_pkg:
from pygame.tests.test_utils import example_path
else:
from test.test_utils import example_path
import pygame
from pygame import mixer
from pygame.compat import xrange_, unicode_, as_bytes, geterror, bytes_
################################### CONSTANTS ##################################
FREQUENCIES = [11025, 22050, 44100, 48000]
SIZES = [-16, -8, 8, 16]
CHANNELS = [1, 2]
BUFFERS = [3024]
############################## MODULE LEVEL TESTS ##############################
class MixerModuleTest(unittest.TestCase):
def test_init__keyword_args(self):
# Fails on a Mac; probably older SDL_mixer
## Probably don't need to be so exhaustive. Besides being slow the repeated
## init/quit calls may be causing problems on the Mac.
## configs = ( {'frequency' : f, 'size' : s, 'channels': c }
## for f in FREQUENCIES
## for s in SIZES
## for c in CHANNELS )
#### configs = [{'frequency' : 44100, 'size' : 16, 'channels' : 1}]
configs = [{'frequency' : 22050, 'size' : -16, 'channels' : 2}]
for kw_conf in configs:
mixer.init(**kw_conf)
mixer_conf = mixer.get_init()
self.assertEquals(
# Not all "sizes" are supported on all systems.
(mixer_conf[0], abs(mixer_conf[1]), mixer_conf[2]),
(kw_conf['frequency'],
abs(kw_conf['size']),
kw_conf['channels'])
)
mixer.quit()
def todo_test_pre_init__keyword_args(self):
# Fails on Mac; probably older SDL_mixer
## Probably don't need to be so exhaustive. Besides being slow the repeated
## init/quit calls may be causing problems on the Mac.
## configs = ( {'frequency' : f, 'size' : s, 'channels': c }
## for f in FREQUENCIES
## for s in SIZES
## for c in CHANNELS )
configs = [{'frequency' : 44100, 'size' : 16, 'channels' : 1}]
for kw_conf in configs:
mixer.pre_init(**kw_conf)
mixer.init()
mixer_conf = mixer.get_init()
self.assertEquals(
# Not all "sizes" are supported on all systems.
(mixer_conf[0], abs(mixer_conf[1]), mixer_conf[2]),
(kw_conf['frequency'],
abs(kw_conf['size']),
kw_conf['channels'])
)
mixer.quit()
def todo_test_pre_init__zero_values(self):
# Ensure that argument values of 0 are replaced with
# default values. No way to check buffer size though.
mixer.pre_init(44100, -8, 1) # Non default values
mixer.pre_init(0, 0, 0) # Should reset to default values
mixer.init()
try:
self.failUnlessEqual(mixer.get_init(), (22050, -16, 2))
finally:
mixer.quit()
def todo_test_init__zero_values(self):
# Ensure that argument values of 0 are replaced with
# preset values. No way to check buffer size though.
mixer.pre_init(44100, 8, 1) # None default values
mixer.init(0, 0, 0)
try:
self.failUnlessEqual(mixer.get_init(), (44100, 8, 1))
finally:
mixer.quit()
mixer.pre_init(0, 0, 0, 0)
def test_get_init__returns_exact_values_used_for_init(self):
return
# fix in 1.9 - I think it's a SDL_mixer bug.
# TODO: When this bug is fixed, testing through every combination
# will be too slow so adjust as necessary, at the moment it
# breaks the loop after first failure
configs = []
for f in FREQUENCIES:
for s in SIZES:
for c in CHANNELS:
configs.append ((f,s,c))
print (configs)
for init_conf in configs:
print (init_conf)
f,s,c = init_conf
if (f,s) == (22050,16):continue
mixer.init(f,s,c)
mixer_conf = mixer.get_init()
import time
time.sleep(0.1)
mixer.quit()
time.sleep(0.1)
if init_conf != mixer_conf:
continue
self.assertEquals(init_conf, mixer_conf)
def test_get_init__returns_None_if_mixer_not_initialized(self):
self.assert_(mixer.get_init() is None)
def test_get_num_channels__defaults_eight_after_init(self):
mixer.init()
num_channels = mixer.get_num_channels()
self.assert_(num_channels == 8)
mixer.quit()
def test_set_num_channels(self):
mixer.init()
for i in xrange_(1, mixer.get_num_channels() + 1):
mixer.set_num_channels(i)
self.assert_(mixer.get_num_channels() == i)
mixer.quit()
def test_quit(self):
""" get_num_channels() Should throw pygame.error if uninitialized
after mixer.quit() """
mixer.init()
mixer.quit()
self.assertRaises (
pygame.error, mixer.get_num_channels,
)
def test_sound_args(self):
def get_bytes(snd):
return snd.get_raw()
mixer.init()
try:
sample = as_bytes('\x00\xff') * 24
wave_path = example_path(os.path.join('data', 'house_lo.wav'))
uwave_path = unicode_(wave_path)
bwave_path = uwave_path.encode(sys.getfilesystemencoding())
snd = mixer.Sound(file=wave_path)
self.assert_(snd.get_length() > 0.5)
snd_bytes = get_bytes(snd)
self.assert_(len(snd_bytes) > 1000)
self.assert_(get_bytes(mixer.Sound(wave_path)) == snd_bytes)
self.assert_(get_bytes(mixer.Sound(file=uwave_path)) == snd_bytes)
self.assert_(get_bytes(mixer.Sound(uwave_path)) == snd_bytes)
arg_emsg = 'Sound takes either 1 positional or 1 keyword argument'
try:
mixer.Sound()
except TypeError:
self.assertEqual(str(geterror()), arg_emsg)
else:
self.fail("no exception")
try:
mixer.Sound(wave_path, buffer=sample)
except TypeError:
self.assertEqual(str(geterror()), arg_emsg)
else:
self.fail("no exception")
try:
mixer.Sound(sample, file=wave_path)
except TypeError:
self.assertEqual(str(geterror()), arg_emsg)
else:
self.fail("no exception")
try:
mixer.Sound(buffer=sample, file=wave_path)
except TypeError:
self.assertEqual(str(geterror()), arg_emsg)
else:
self.fail("no exception")
try:
mixer.Sound(foobar=sample)
except TypeError:
emsg = "Unrecognized keyword argument 'foobar'"
self.assertEqual(str(geterror()), emsg)
else:
self.fail("no exception")
snd = mixer.Sound(wave_path, **{})
self.assertEqual(get_bytes(snd), snd_bytes)
snd = mixer.Sound(*[], **{'file': wave_path})
try:
snd = mixer.Sound([])
except TypeError:
emsg = 'Unrecognized argument (type list)'
self.assertEqual(str(geterror()), emsg)
else:
self.fail("no exception")
try:
snd = mixer.Sound(buffer=[])
except TypeError:
emsg = 'Expected object with buffer interface: got a list'
self.assertEqual(str(geterror()), emsg)
else:
self.fail("no exception")
ufake_path = unicode_('12345678')
self.assertRaises(pygame.error, mixer.Sound, ufake_path)
try:
mixer.Sound(buffer=unicode_('something'))
except TypeError:
emsg = 'Unicode object not allowed as buffer object'
self.assertEqual(str(geterror()), emsg)
else:
self.fail("no exception")
self.assertEqual(get_bytes(mixer.Sound(buffer=sample)), sample)
self.assertEqual(get_bytes(mixer.Sound(sample)), sample)
self.assertEqual(get_bytes(mixer.Sound(file=bwave_path)), snd_bytes)
self.assertEqual(get_bytes(mixer.Sound(bwave_path)), snd_bytes)
snd = mixer.Sound(wave_path)
try:
mixer.Sound(wave_path, array=snd)
except TypeError:
self.assertEqual(str(geterror()), arg_emsg)
else:
self.fail("no exception")
try:
mixer.Sound(buffer=sample, array=snd)
except TypeError:
self.assertEqual(str(geterror()), arg_emsg)
else:
self.fail("no exception")
snd2 = mixer.Sound(array=snd)
self.assertEqual(snd.get_raw(), snd2.get_raw())
finally:
mixer.quit()
def test_array_keyword(self):
# If we don't have a real sound card don't do this test because it will fail.
if os.environ.get('SDL_AUDIODRIVER') == 'disk':
return
try:
from numpy import (array, arange, zeros,
int8, uint8,
int16, uint16,
int32, uint32)
except ImportError:
return
freq = 22050
format_list = [-8, 8, -16, 16]
channels_list = [1, 2]
a_lists = dict((f, []) for f in format_list)
a32u_mono = arange(0, 256, 1, uint32)
a16u_mono = a32u_mono.astype(uint16)
a8u_mono = a32u_mono.astype(uint8)
au_list_mono = [(1, a) for a in [a8u_mono, a16u_mono, a32u_mono]]
for format in format_list:
if format > 0:
a_lists[format].extend(au_list_mono)
a32s_mono = arange(-128, 128, 1, int32)
a16s_mono = a32s_mono.astype(int16)
a8s_mono = a32s_mono.astype(int8)
as_list_mono = [(1, a) for a in [a8s_mono, a16s_mono, a32s_mono]]
for format in format_list:
if format < 0:
a_lists[format].extend(as_list_mono)
a32u_stereo = zeros([a32u_mono.shape[0], 2], uint32)
a32u_stereo[:,0] = a32u_mono
a32u_stereo[:,1] = 255 - a32u_mono
a16u_stereo = a32u_stereo.astype(uint16)
a8u_stereo = a32u_stereo.astype(uint8)
au_list_stereo = [(2, a)
for a in [a8u_stereo, a16u_stereo, a32u_stereo]]
for format in format_list:
if format > 0:
a_lists[format].extend(au_list_stereo)
a32s_stereo = zeros([a32s_mono.shape[0], 2], int32)
a32s_stereo[:,0] = a32s_mono
a32s_stereo[:,1] = -1 - a32s_mono
a16s_stereo = a32s_stereo.astype(int16)
a8s_stereo = a32s_stereo.astype(int8)
as_list_stereo = [(2, a)
for a in [a8s_stereo, a16s_stereo, a32s_stereo]]
for format in format_list:
if format < 0:
a_lists[format].extend(as_list_stereo)
for format in format_list:
for channels in channels_list:
try:
mixer.init(freq, format, channels)
except pygame.error:
# Some formats (e.g. 16) may not be supported.
continue
try:
__, f, c = mixer.get_init()
if f != format or c != channels:
# Some formats (e.g. -8) may not be supported.
continue
for c, a in a_lists[format]:
self._test_array_argument(format, a, c == channels)
finally:
mixer.quit()
def _test_array_argument(self, format, a, test_pass):
from numpy import array, all as all_
try:
snd = mixer.Sound(array=a)
except ValueError:
if not test_pass:
return
self.fail("Raised ValueError: Format %i, dtype %s" %
(format, a.dtype))
if not test_pass:
self.fail("Did not raise ValueError: Format %i, dtype %s" %
(format, a.dtype))
a2 = array(snd)
a3 = a.astype(a2.dtype)
lshift = abs(format) - 8 * a.itemsize
if lshift >= 0:
# This is asymmetric with respect to downcasting.
a3 <<= lshift
self.assert_(all_(a2 == a3),
"Format %i, dtype %s" % (format, a.dtype))
def _test_array_interface_fail(self, a):
self.assertRaises(ValueError, mixer.Sound, array=a)
def test_array_interface(self):
mixer.init(22050, -16, 1)
try:
snd = mixer.Sound(as_bytes('\x00\x7f') * 20)
d = snd.__array_interface__
self.assertTrue(isinstance(d, dict))
if pygame.get_sdl_byteorder() == pygame.LIL_ENDIAN:
typestr = '<i2'
else:
typestr = '>i2'
self.assertEqual(d['typestr'], typestr)
self.assertEqual(d['shape'], (20,))
self.assertEqual(d['strides'], (2,))
self.assertEqual(d['data'], (snd._samples_address, False))
finally:
mixer.quit()
if pygame.HAVE_NEWBUF:
def test_newbuf(self):
self.NEWBUF_test_newbuf()
if is_pygame_pkg:
from pygame.tests.test_utils import buftools
else:
from test.test_utils import buftools
def NEWBUF_test_newbuf(self):
mixer.init(22050, -16, 1)
try:
self.NEWBUF_export_check()
finally:
mixer.quit()
mixer.init(22050, -16, 2)
try:
self.NEWBUF_export_check()
finally:
mixer.quit()
def NEWBUF_export_check(self):
freq, fmt, channels = mixer.get_init()
if channels == 1:
ndim = 1
else:
ndim = 2
itemsize = abs(fmt) // 8
formats = {8: 'B', -8: 'b',
16: '=H', -16: '=h',
32: '=I', -32: '=i', # 32 and 64 for future consideration
64: '=Q', -64: '=q'}
format = formats[fmt]
buftools = self.buftools
Exporter = buftools.Exporter
Importer = buftools.Importer
is_lil_endian = pygame.get_sdl_byteorder() == pygame.LIL_ENDIAN
fsys, frev = ('<', '>') if is_lil_endian else ('>', '<')
shape = (10, channels)[:ndim]
strides = (channels * itemsize, itemsize)[2 - ndim:]
exp = Exporter(shape, format=frev + 'i')
snd = mixer.Sound(array=exp)
buflen = len(exp) * itemsize * channels
imp = Importer(snd, buftools.PyBUF_SIMPLE)
self.assertEqual(imp.ndim, 0)
self.assertTrue(imp.format is None)
self.assertEqual(imp.len, buflen)
self.assertEqual(imp.itemsize, itemsize)
self.assertTrue(imp.shape is None)
self.assertTrue(imp.strides is None)
self.assertTrue(imp.suboffsets is None)
self.assertFalse(imp.readonly)
self.assertEqual(imp.buf, snd._samples_address)
imp = Importer(snd, buftools.PyBUF_WRITABLE)
self.assertEqual(imp.ndim, 0)
self.assertTrue(imp.format is None)
self.assertEqual(imp.len, buflen)
self.assertEqual(imp.itemsize, itemsize)
self.assertTrue(imp.shape is None)
self.assertTrue(imp.strides is None)
self.assertTrue(imp.suboffsets is None)
self.assertFalse(imp.readonly)
self.assertEqual(imp.buf, snd._samples_address)
imp = Importer(snd, buftools.PyBUF_FORMAT)
self.assertEqual(imp.ndim, 0)
self.assertEqual(imp.format, format)
self.assertEqual(imp.len, buflen)
self.assertEqual(imp.itemsize, itemsize)
self.assertTrue(imp.shape is None)
self.assertTrue(imp.strides is None)
self.assertTrue(imp.suboffsets is None)
self.assertFalse(imp.readonly)
self.assertEqual(imp.buf, snd._samples_address)
imp = Importer(snd, buftools.PyBUF_ND)
self.assertEqual(imp.ndim, ndim)
self.assertTrue(imp.format is None)
self.assertEqual(imp.len, buflen)
self.assertEqual(imp.itemsize, itemsize)
self.assertEqual(imp.shape, shape)
self.assertTrue(imp.strides is None)
self.assertTrue(imp.suboffsets is None)
self.assertFalse(imp.readonly)
self.assertEqual(imp.buf, snd._samples_address)
imp = Importer(snd, buftools.PyBUF_STRIDES)
self.assertEqual(imp.ndim, ndim)
self.assertTrue(imp.format is None)
self.assertEqual(imp.len, buflen)
self.assertEqual(imp.itemsize, itemsize)
self.assertEqual(imp.shape, shape)
self.assertEqual(imp.strides, strides)
self.assertTrue(imp.suboffsets is None)
self.assertFalse(imp.readonly)
self.assertEqual(imp.buf, snd._samples_address)
imp = Importer(snd, buftools.PyBUF_FULL_RO)
self.assertEqual(imp.ndim, ndim)
self.assertEqual(imp.format, format)
self.assertEqual(imp.len, buflen)
self.assertEqual(imp.itemsize, 2)
self.assertEqual(imp.shape, shape)
self.assertEqual(imp.strides, strides)
self.assertTrue(imp.suboffsets is None)
self.assertFalse(imp.readonly)
self.assertEqual(imp.buf, snd._samples_address)
imp = Importer(snd, buftools.PyBUF_FULL_RO)
self.assertEqual(imp.ndim, ndim)
self.assertEqual(imp.format, format)
self.assertEqual(imp.len, buflen)
self.assertEqual(imp.itemsize, itemsize)
self.assertEqual(imp.shape, exp.shape)
self.assertEqual(imp.strides, strides)
self.assertTrue(imp.suboffsets is None)
self.assertFalse(imp.readonly)
self.assertEqual(imp.buf, snd._samples_address)
imp = Importer(snd, buftools.PyBUF_C_CONTIGUOUS)
self.assertEqual(imp.ndim, ndim)
self.assertTrue(imp.format is None)
self.assertEqual(imp.strides, strides)
imp = Importer(snd, buftools.PyBUF_ANY_CONTIGUOUS)
self.assertEqual(imp.ndim, ndim)
self.assertTrue(imp.format is None)
self.assertEqual(imp.strides, strides)
if (ndim == 1):
imp = Importer(snd, buftools.PyBUF_F_CONTIGUOUS)
self.assertEqual(imp.ndim, 1)
self.assertTrue(imp.format is None)
self.assertEqual(imp.strides, strides)
else:
self.assertRaises(BufferError, Importer, snd,
buftools.PyBUF_F_CONTIGUOUS)
def test_get_raw(self):
from ctypes import pythonapi, c_void_p, py_object
try:
Bytes_FromString = pythonapi.PyBytes_FromString
except:
Bytes_FromString = pythonapi.PyString_FromString
Bytes_FromString.restype = c_void_p
Bytes_FromString.argtypes = [py_object]
mixer.init()
try:
samples = as_bytes('abcdefgh') # keep byte size a multiple of 4
snd = mixer.Sound(buffer=samples)
raw = snd.get_raw()
self.assertTrue(isinstance(raw, bytes_))
self.assertNotEqual(snd._samples_address, Bytes_FromString(samples))
self.assertEqual(raw, samples)
finally:
mixer.quit()
def todo_test_fadeout(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.fadeout:
# pygame.mixer.fadeout(time): return None
# fade out the volume on all sounds before stopping
#
# This will fade out the volume on all active channels over the time
# argument in milliseconds. After the sound is muted the playback will
# stop.
#
self.fail()
def todo_test_find_channel(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.find_channel:
# pygame.mixer.find_channel(force=False): return Channel
# find an unused channel
#
# This will find and return an inactive Channel object. If there are
# no inactive Channels this function will return None. If there are no
# inactive channels and the force argument is True, this will find the
# Channel with the longest running Sound and return it.
#
# If the mixer has reserved channels from pygame.mixer.set_reserved()
# then those channels will not be returned here.
#
self.fail()
def todo_test_get_busy(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.get_busy:
# pygame.mixer.get_busy(): return bool
# test if any sound is being mixed
#
# Returns True if the mixer is busy mixing any channels. If the mixer
# is idle then this return False.
#
self.fail()
def todo_test_init(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.init:
# pygame.mixer.init(frequency=22050, size=-16, channels=2,
# buffer=3072): return None
#
# initialize the mixer module
#
# Initialize the mixer module for Sound loading and playback. The
# default arguments can be overridden to provide specific audio
# mixing. The size argument represents how many bits are used for each
# audio sample. If the value is negative then signed sample values
# will be used. Positive values mean unsigned audio samples will be
# used.
#
# The channels argument is used to specify whether to use mono or
# stereo. 1 for mono and 2 for stereo. No other values are supported.
#
# The buffer argument controls the number of internal samples used in
# the sound mixer. The default value should work for most cases. It
# can be lowered to reduce latency, but sound dropout may occur. It
# can be raised to larger values to ensure playback never skips, but
# it will impose latency on sound playback. The buffer size must be a
# power of two.
#
# Some platforms require the pygame.mixer module to be initialized
# after the display modules have initialized. The top level
# pygame.init() takes care of this automatically, but cannot pass any
# arguments to the mixer init. To solve this, mixer has a function
# pygame.mixer.pre_init() to set the proper defaults before the
# toplevel init is used.
#
# It is safe to call this more than once, but after the mixer is
# initialized you cannot change the playback arguments without first
# calling pygame.mixer.quit().
#
self.fail()
def todo_test_pause(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.pause:
# pygame.mixer.pause(): return None
# temporarily stop playback of all sound channels
#
# This will temporarily stop all playback on the active mixer
# channels. The playback can later be resumed with
# pygame.mixer.unpause()
#
self.fail()
def todo_test_pre_init(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.pre_init:
# pygame.mixer.pre_init(frequency=0, size=0, channels=0,
# buffersize=0): return None
#
# preset the mixer init arguments
#
# Any nonzero arguments change the default values used when the real
# pygame.mixer.init() is called. The best way to set custom mixer
# playback values is to call pygame.mixer.pre_init() before calling
# the top level pygame.init().
#
self.fail()
def todo_test_set_reserved(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.set_reserved:
# pygame.mixer.set_reserved(count): return None
# reserve channels from being automatically used
#
# The mixer can reserve any number of channels that will not be
# automatically selected for playback by Sounds. If sounds are
# currently playing on the reserved channels they will not be stopped.
#
# This allows the application to reserve a specific number of channels
# for important sounds that must not be dropped or have a guaranteed
# channel to play on.
#
self.fail()
def todo_test_stop(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.stop:
# pygame.mixer.stop(): return None
# stop playback of all sound channels
#
# This will stop all playback of all active mixer channels.
self.fail()
def todo_test_unpause(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.unpause:
# pygame.mixer.unpause(): return None
# resume paused playback of sound channels
#
# This will resume all active sound channels after they have been paused.
self.fail()
############################## CHANNEL CLASS TESTS #############################
class ChannelTypeTest(unittest.TestCase):
def todo_test_Channel(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel:
# pygame.mixer.Channel(id): return Channel
# Create a Channel object for controlling playback
#
# Return a Channel object for one of the current channels. The id must
# be a value from 0 to the value of pygame.mixer.get_num_channels().
#
# The Channel object can be used to get fine control over the playback
# of Sounds. A channel can only playback a single Sound at time. Using
# channels is entirely optional since pygame can manage them by
# default.
#
self.fail()
def todo_test_fadeout(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.fadeout:
# Channel.fadeout(time): return None
# stop playback after fading channel out
#
# Stop playback of a channel after fading out the sound over the given
# time argument in milliseconds.
#
self.fail()
def todo_test_get_busy(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.get_busy:
# Channel.get_busy(): return bool
# check if the channel is active
#
# Returns true if the channel is activily mixing sound. If the channel
# is idle this returns False.
#
self.fail()
def todo_test_get_endevent(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.get_endevent:
# Channel.get_endevent(): return type
# get the event a channel sends when playback stops
#
# Returns the event type to be sent every time the Channel finishes
# playback of a Sound. If there is no endevent the function returns
# pygame.NOEVENT.
#
self.fail()
def todo_test_get_queue(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.get_queue:
# Channel.get_queue(): return Sound
# return any Sound that is queued
#
# If a Sound is already queued on this channel it will be returned.
# Once the queued sound begins playback it will no longer be on the
# queue.
#
self.fail()
def todo_test_get_sound(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.get_sound:
# Channel.get_sound(): return Sound
# get the currently playing Sound
#
# Return the actual Sound object currently playing on this channel. If
# the channel is idle None is returned.
#
self.fail()
def todo_test_get_volume(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.get_volume:
# Channel.get_volume(): return value
# get the volume of the playing channel
#
# Return the volume of the channel for the current playing sound. This
# does not take into account stereo separation used by
# Channel.set_volume. The Sound object also has its own volume which
# is mixed with the channel.
#
self.fail()
def todo_test_pause(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.pause:
# Channel.pause(): return None
# temporarily stop playback of a channel
#
# Temporarily stop the playback of sound on a channel. It can be
# resumed at a later time with Channel.unpause()
#
self.fail()
def todo_test_play(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.play:
# Channel.play(Sound, loops=0, maxtime=0, fade_ms=0): return None
# play a Sound on a specific Channel
#
# This will begin playback of a Sound on a specific Channel. If the
# Channel is currently playing any other Sound it will be stopped.
#
# The loops argument has the same meaning as in Sound.play(): it is
# the number of times to repeat the sound after the first time. If it
# is 3, the sound will be played 4 times (the first time, then three
# more). If loops is -1 then the playback will repeat indefinitely.
#
# As in Sound.play(), the maxtime argument can be used to stop
# playback of the Sound after a given number of milliseconds.
#
# As in Sound.play(), the fade_ms argument can be used fade in the sound.
self.fail()
def todo_test_queue(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.queue:
# Channel.queue(Sound): return None
# queue a Sound object to follow the current
#
# When a Sound is queued on a Channel, it will begin playing
# immediately after the current Sound is finished. Each channel can
# only have a single Sound queued at a time. The queued Sound will
# only play if the current playback finished automatically. It is
# cleared on any other call to Channel.stop() or Channel.play().
#
# If there is no sound actively playing on the Channel then the Sound
# will begin playing immediately.
#
self.fail()
def todo_test_set_endevent(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.set_endevent:
# Channel.set_endevent(): return None
# Channel.set_endevent(type): return None
# have the channel send an event when playback stops
#
# When an endevent is set for a channel, it will send an event to the
# pygame queue every time a sound finishes playing on that channel
# (not just the first time). Use pygame.event.get() to retrieve the
# endevent once it's sent.
#
# Note that if you called Sound.play(n) or Channel.play(sound,n), the
# end event is sent only once: after the sound has been played "n+1"
# times (see the documentation of Sound.play).
#
# If Channel.stop() or Channel.play() is called while the sound was
# still playing, the event will be posted immediately.
#
# The type argument will be the event id sent to the queue. This can
# be any valid event type, but a good choice would be a value between
# pygame.locals.USEREVENT and pygame.locals.NUMEVENTS. If no type
# argument is given then the Channel will stop sending endevents.
#
self.fail()
def todo_test_set_volume(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.set_volume:
# Channel.set_volume(value): return None
# Channel.set_volume(left, right): return None
# set the volume of a playing channel
#
# Set the volume (loudness) of a playing sound. When a channel starts
# to play its volume value is reset. This only affects the current
# sound. The value argument is between 0.0 and 1.0.
#
# If one argument is passed, it will be the volume of both speakers.
# If two arguments are passed and the mixer is in stereo mode, the
# first argument will be the volume of the left speaker and the second
# will be the volume of the right speaker. (If the second argument is
# None, the first argument will be the volume of both speakers.)
#
# If the channel is playing a Sound on which set_volume() has also
# been called, both calls are taken into account. For example:
#
# sound = pygame.mixer.Sound("s.wav")
# channel = s.play() # Sound plays at full volume by default
# sound.set_volume(0.9) # Now plays at 90% of full volume.
# sound.set_volume(0.6) # Now plays at 60% (previous value replaced).
# channel.set_volume(0.5) # Now plays at 30% (0.6 * 0.5).
self.fail()
def todo_test_stop(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.stop:
# Channel.stop(): return None
# stop playback on a Channel
#
# Stop sound playback on a channel. After playback is stopped the
# channel becomes available for new Sounds to play on it.
#
self.fail()
def todo_test_unpause(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Channel.unpause:
# Channel.unpause(): return None
# resume pause playback of a channel
#
# Resume the playback on a paused channel.
self.fail()
############################### SOUND CLASS TESTS ##############################
class SoundTypeTest(unittest.TestCase):
def todo_test_fadeout(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Sound.fadeout:
# Sound.fadeout(time): return None
# stop sound playback after fading out
#
# This will stop playback of the sound after fading it out over the
# time argument in milliseconds. The Sound will fade and stop on all
# actively playing channels.
#
self.fail()
def todo_test_get_length(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Sound.get_length:
# Sound.get_length(): return seconds
# get the length of the Sound
#
# Return the length of this Sound in seconds.
self.fail()
def todo_test_get_num_channels(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Sound.get_num_channels:
# Sound.get_num_channels(): return count
# count how many times this Sound is playing
#
# Return the number of active channels this sound is playing on.
self.fail()
def todo_test_get_volume(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Sound.get_volume:
# Sound.get_volume(): return value
# get the playback volume
#
# Return a value from 0.0 to 1.0 representing the volume for this Sound.
self.fail()
def todo_test_play(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Sound.play:
# Sound.play(loops=0, maxtime=0, fade_ms=0): return Channel
# begin sound playback
#
# Begin playback of the Sound (i.e., on the computer's speakers) on an
# available Channel. This will forcibly select a Channel, so playback
# may cut off a currently playing sound if necessary.
#
# The loops argument controls how many times the sample will be
# repeated after being played the first time. A value of 5 means that
# the sound will be played once, then repeated five times, and so is
# played a total of six times. The default value (zero) means the
# Sound is not repeated, and so is only played once. If loops is set
# to -1 the Sound will loop indefinitely (though you can still call
# stop() to stop it).
#
# The maxtime argument can be used to stop playback after a given
# number of milliseconds.
#
# The fade_ms argument will make the sound start playing at 0 volume
# and fade up to full volume over the time given. The sample may end
# before the fade-in is complete.
#
# This returns the Channel object for the channel that was selected.
self.fail()
def todo_test_set_volume(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Sound.set_volume:
# Sound.set_volume(value): return None
# set the playback volume for this Sound
#
# This will set the playback volume (loudness) for this Sound. This
# will immediately affect the Sound if it is playing. It will also
# affect any future playback of this Sound. The argument is a value
# from 0.0 to 1.0.
#
self.fail()
def todo_test_stop(self):
# __doc__ (as of 2008-08-02) for pygame.mixer.Sound.stop:
# Sound.stop(): return None
# stop sound playback
#
# This will stop the playback of this Sound on any active Channels.
self.fail()
##################################### MAIN #####################################
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "2d3aedb7aecf2553c1643b2ef07949ba",
"timestamp": "",
"source": "github",
"line_count": 1035,
"max_line_length": 85,
"avg_line_length": 37.215458937198065,
"alnum_prop": 0.5646451009917441,
"repo_name": "motion3/dino_rumble",
"id": "c9a08d0b228fa1fdd53487b8fdc2980e3fe1ae0f",
"size": "38518",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "pygame/tests/mixer_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "9659"
},
{
"name": "Java",
"bytes": "5793"
},
{
"name": "Python",
"bytes": "1429560"
}
],
"symlink_target": ""
} |
import json
import six
from six.moves.urllib.parse import urlencode
def _totext(val):
"""
Py2 and Py3 compatible function that coerces
any non-Unicode string types into the official "text type" (unicode in Py2, str in Py3).
For objects that are not binary (str/bytes) or text (unicode/str),
return value is unchanged object.
"""
if isinstance(val, six.text_type):
return val
elif isinstance(val, six.binary_type):
return val.decode('utf-8')
else:
return val
def _tobytes(val):
"""
Py2 and Py3 compatible function that coerces
any non-binary string types into the official "binary type" (str in Py2, bytes in Py3).
Values that are not a text or binary type are converted to text (unicode)
first, then encoded via UTF-8 into binary type.
"""
if isinstance(val, six.binary_type):
return val
elif isinstance(val, six.text_type):
return val.encode('utf-8')
else:
return six.text_type(val).encode('utf-8')
def _unicode_urlencode(params):
"""
Convert lists to JSON encoded strings, and correctly handle any
unicode URL parameters.
"""
if isinstance(params, dict):
params = list(six.iteritems(params))
for i, param in enumerate(params):
if isinstance(param[1], list):
params[i] = (param[0], json.dumps(param[1]),)
return urlencode(
[(_tobytes(k), _tobytes(v)) for k, v in params]
)
| {
"content_hash": "55fd8874da34f543604a051ea0d92fe3",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 92,
"avg_line_length": 29.979591836734695,
"alnum_prop": 0.6446562287270252,
"repo_name": "cooncesean/mixpanel-query-py",
"id": "f5cf1f4af9b36db3200714d0b716389c45961ed2",
"size": "1469",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mixpanel_query/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "45081"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from frog.models import (
Gallery,
Image,
Video,
Tag,
UserPref,
GallerySubscription,
VideoQueue,
ReleaseNotes,
Marmoset,
Group,
SiteConfig,
Badge)
class GalleryAdmin(admin.ModelAdmin):
list_display = ("title", "parent", "owner", "security")
class ImageAdmin(admin.ModelAdmin):
list_display = ("title", "guid", "author", "thumbnail_tag")
class VideoAdmin(admin.ModelAdmin):
list_display = (
"title",
"guid",
"author",
"width",
"height",
"thumbnail_tag",
)
actions = ["queue"]
def queue(self, request, queryset):
for obj in queryset:
queuedvideo = VideoQueue.objects.get_or_create(video=obj)[0]
queuedvideo.save()
class TagAdmin(admin.ModelAdmin):
list_display = ("name", "parent")
list_filter = ("artist",)
class GallerySubscriptionAdmin(admin.ModelAdmin):
list_display = ("user", "gallery", "frequency")
list_filter = ("frequency",)
class VideoQueueAdmin(admin.ModelAdmin):
list_display = ("video", "status")
list_filter = ("status",)
class GroupAdmin(admin.ModelAdmin):
list_display = ("title", "child_count")
def child_count(self, obj):
return str(len(obj.children))
admin.site.register(Gallery, GalleryAdmin)
admin.site.register(Image, ImageAdmin)
admin.site.register(Video, VideoAdmin)
admin.site.register(Tag, TagAdmin)
admin.site.register(UserPref)
admin.site.register(GallerySubscription, GallerySubscriptionAdmin)
admin.site.register(VideoQueue, VideoQueueAdmin)
admin.site.register(ReleaseNotes)
admin.site.register(Group, GroupAdmin)
admin.site.register(Marmoset)
admin.site.register(SiteConfig)
admin.site.register(Badge)
| {
"content_hash": "1f5262d171a630b19ce5e2672b119e71",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 72,
"avg_line_length": 23.670886075949365,
"alnum_prop": 0.6823529411764706,
"repo_name": "theiviaxx/Frog",
"id": "14d82244fc71151f52f4082c157ff4f9c8bd3399",
"size": "3156",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "frog/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "22225"
},
{
"name": "JavaScript",
"bytes": "57292"
},
{
"name": "Python",
"bytes": "215494"
}
],
"symlink_target": ""
} |
from __future__ import with_statement
import urlparse
import urllib2
import time
import shutil
import os
from os import path as p
try:
import json
except ImportError:
import simplejson as json
scriptdir = p.dirname(__file__)
sourcefile = p.join(scriptdir, "download.json")
basedir = p.dirname(p.abspath(sourcefile))
def download(match=None, usemodtime=True):
usemodtime = _unrepr(usemodtime)
with open(sourcefile) as f:
downloads = json.load(f)
if match and match in downloads:
groupname = match
match = None
else:
groupname = None
for name, group in downloads.items():
if groupname and groupname != name:
continue
print "[Group: %s]" % name
base = group.get('base')
destdir = p.normpath(p.join(basedir, group.get('destination', "")))
headers = (group.get('headers') or {}).items()
download = group['download']
resources = [(urlparse.urljoin(base, url), p.join(destdir, dest))
for url, dest in download.items()]
for url, dest in resources:
if match and match not in url:
continue
_http_get(url, dest, usemodtime=usemodtime, headers=headers)
_unrepr = lambda v: eval(v) if isinstance(v, str) else v
def _http_get(url, dest, usemodtime=True, headers=()):
if p.isdir(dest):
dest = p.join(dest, url.rsplit('/', 1)[-1])
print "Downloading <%(url)s> to <%(dest)s> ..."%vars(),
req = urllib2.Request(url)
for header, value in headers:
req.add_header(header, value)
if p.exists(dest):
if not usemodtime:
print "Destination exists, skipping."
return
modstamp = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
time.gmtime(os.stat(dest).st_mtime))
req.add_header('If-Modified-Since', modstamp)
try:
res = urllib2.urlopen(req)
except Exception, e:
print e
return
print res.info()
print "Done."
with file(dest, 'w') as out:
shutil.copyfileobj(res, out)
| {
"content_hash": "7fa67c73578ea8c5387f759c58cb1206",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 75,
"avg_line_length": 30.75,
"alnum_prop": 0.599713055954089,
"repo_name": "rinfo/rdl",
"id": "07d1915b5ad7fdd0e7ab19e4e81d29381527692c",
"size": "2091",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage/project/download.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "29127"
},
{
"name": "Groovy",
"bytes": "485442"
},
{
"name": "HTML",
"bytes": "248599"
},
{
"name": "Java",
"bytes": "293050"
},
{
"name": "JavaScript",
"bytes": "176236"
},
{
"name": "Perl",
"bytes": "4959"
},
{
"name": "Python",
"bytes": "121584"
},
{
"name": "Shell",
"bytes": "20714"
},
{
"name": "XSLT",
"bytes": "41154"
}
],
"symlink_target": ""
} |
from cx_Freeze import setup, Executable
#import cx_Freeze
setup(
name = "cidadeBela",
author = "Paulo Mateus, Gabriela Valentim, Samara Oliveira",
author_email = "mateus.moura@hotmail.com",
#url = "http://cx-oracletools.sourceforge.net",
version = "0.3",
description = "Programa cidadeBela",
long_description = "Trabalho dos P4 de informática",
license = "Leia LICENSE.txt",
#build_exe = "C:\\Documents and Settings\\Mateus\\Desktop\\cidadeBela\\Programa",
#target_dir = "C:\\Documents and Settings\\Mateus\\Desktop\\cidadeBela\\Programa",
#cmdclass = dict(build_exe = build_exe),
#options = ['Teste' = 'Teste'],
executables = [Executable(
#"C:\\Documents and Settings\\Mateus\\Desktop\\cidadeBela\\Trabalho\\cidadeBela\\janelaGrafica.py",
"C:\\Documents and Settings\\Mateus\\Desktop\\cidadeBela\\Trabalho\\trabalho\\cidadeBela.pyw",
#"E:\\Programação\\trabalho\\cidadeBela.pyw",
base = "Win32GUI",
icon = "C:\\Documents and Settings\\Mateus\\Desktop\\cidadeBela\\Trabalho\\cidadeBela\\icone.ico")]
#shortcutName = "cidadeBela.ico",
#shortcutDir = "C:\\Documents and Settings\\Mateus\\Desktop\\cidadeBela\\Trabalho\\cidadeBela\\")]
#build_exe = "C:\\Documents and Settings\\Mateus\\Desktop\\cidadeBela\\Programa")]
) | {
"content_hash": "a993011013caa67c869c23353ff1f8bd",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 119,
"avg_line_length": 44.90625,
"alnum_prop": 0.6214335421016005,
"repo_name": "SrMouraSilva/Academic-Projects",
"id": "2d0ad3e093cd34160a44fd5f3bc6db9d2c8eac86",
"size": "1502",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Técnico/P3/Introdução à programação/CidadeBela/Programa/Executável/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "20"
},
{
"name": "C",
"bytes": "165353"
},
{
"name": "Java",
"bytes": "78856"
},
{
"name": "Makefile",
"bytes": "5182"
},
{
"name": "Python",
"bytes": "173791"
},
{
"name": "Ruby",
"bytes": "19695"
},
{
"name": "Tcl",
"bytes": "1365582"
}
],
"symlink_target": ""
} |
import argparse
import os
import sys
from google.datacatalog_connectors.rdbms import\
datacatalog_cli
from .scrape import metadata_scraper
class SQLServer2DatacatalogCli(datacatalog_cli.DatacatalogCli):
def _get_metadata_scraper(self):
return metadata_scraper.MetadataScraper
def _get_host_arg(self, args):
return args.sqlserver_host
def _get_connection_args(self, args):
return {
'database': args.sqlserver_database,
'host': args.sqlserver_host,
'user': args.sqlserver_user,
'pass': args.sqlserver_pass
}
def _get_entry_group_id(self, args):
return args.datacatalog_entry_group_id or 'sqlserver'
def _get_metadata_definition_path(self):
return os.path.join(os.path.dirname(os.path.abspath(__file__)),
'config/metadata_definition.json')
def _get_query_path(self, args):
return os.path.join(os.path.dirname(os.path.abspath(__file__)),
'config/metadata_query.sql')
def _get_connector_config_path(self):
return os.path.join(os.path.dirname(os.path.abspath(__file__)),
'config')
def _parse_args(self, argv):
parser = argparse.ArgumentParser(
description='Command line to sync sqlserver '
'metadata to Datacatalog')
parser.add_argument('--datacatalog-project-id',
help='Your Google Cloud project ID',
required=True)
parser.add_argument(
'--datacatalog-location-id',
help='Location ID to be used for your Google Cloud Datacatalog',
required=True)
parser.add_argument('--datacatalog-entry-group-id',
help='Entry group ID to be used for your Google '
'Cloud Datacatalog')
parser.add_argument('--datacatalog-entry-resource-url-prefix',
help='Entry resource URL prefix '
'used in the ingested Data Catalog Entries')
parser.add_argument(
'--sqlserver-host',
help='Your sqlserver server host, this is required even'
' for the raw_metadata_csv,'
' so we are able to map the created entries'
' resource with the sqlserver host',
required=True)
parser.add_argument('--sqlserver-user',
help='Your sqlserver credentials user')
parser.add_argument('--sqlserver-pass',
help='Your sqlserver credentials password')
parser.add_argument('--sqlserver-database',
help='Your sqlserver database name')
parser.add_argument(
'--raw-metadata-csv',
help='Your raw metadata as a csv file, '
'can be either a local os GCS '
'path (If supplied ignores the sqlserver server credentials)')
parser.add_argument('--service-account-path',
help='Local Service Account path '
'(Can be suplied as '
'GOOGLE_APPLICATION_CREDENTIALS env '
'var)')
parser.add_argument('--enable-monitoring',
help='Enables monitoring metrics on the connector')
return parser.parse_args(argv)
def main():
argv = sys.argv
SQLServer2DatacatalogCli().run(argv[1:] if len(argv) > 0 else argv)
| {
"content_hash": "8e555ebf938878bb83961cae8a520705",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 79,
"avg_line_length": 39.943820224719104,
"alnum_prop": 0.5656821378340365,
"repo_name": "GoogleCloudPlatform/datacatalog-connectors-rdbms",
"id": "a54ed7cbd25a7c698fe6a43d5761ca5dd5ec15f9",
"size": "4151",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "google-datacatalog-sqlserver-connector/src/google/datacatalog_connectors/sqlserver/datacatalog_cli.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "9250"
},
{
"name": "Python",
"bytes": "457511"
},
{
"name": "Shell",
"bytes": "19222"
}
],
"symlink_target": ""
} |
from new import instancemethod
import hashlib
import cPickle
from twisted.internet.defer import succeed, maybeDeferred, inlineCallbacks
from txweb2.dav.util import allDataFromStream
from txweb2.stream import MemoryStream
from txweb2.http_headers import Headers
from twistedcaldav.cache import MemcacheResponseCache, CacheStoreNotifier
from twistedcaldav.cache import MemcacheChangeNotifier
from twistedcaldav.cache import PropfindCacheMixin
from twistedcaldav.test.util import InMemoryMemcacheProtocol
from twistedcaldav.test.util import TestCase
from txdav.xml import element
def _newCacheToken(self):
called = getattr(self, '_called', 0)
token = 'token%d' % (called,)
setattr(self, '_called', called + 1)
return token
class StubDirectoryRecord(object):
def __init__(self, uid):
self.uid = uid
def cacheToken(self):
"""
Generate a token that can be uniquely used to identify the state of this record for use
in a cache.
"""
return hash((
self.__class__.__name__,
self.uid,
))
class StubDirectory(object):
def oldNameToRecordType(self, oldName):
return oldName
def recordWithShortName(self, recordType, recordName):
return StubDirectoryRecord(recordName)
class StubSiteResource(object):
def __init__(self):
self.directory = StubDirectory()
def getDirectory(self):
return self.directory
class StubSite(object):
def __init__(self):
self.resource = StubSiteResource()
class StubPrincipal(object):
def __init__(self, user):
self.user = user
def principalURL(self):
return self.user
def principalElement(self):
return element.Principal(element.HRef.fromString(self.user))
class StubRequest(object):
resources = {}
def __init__(self, method, uri, authnUser, depth='1', body=None):
self.method = method
self.uri = uri
self.authnUser = StubPrincipal(authnUser)
self.headers = Headers({'depth': depth})
if body is None:
body = "foobar"
self.body = body
self.stream = MemoryStream(body)
self.site = StubSite()
def locateResource(self, uri):
assert uri[0] == '/', "URI path didn't begin with '/': %s" % (uri,)
return succeed(self.resources.get(uri))
class StubResponse(object):
def __init__(self, code, headers, body):
self.code = code
self.headers = Headers(headers)
self.body = body
self.stream = MemoryStream(body)
class StubURLResource(object):
def __init__(self, url, record=None):
self._url = url
if record is not None:
self.record = record
def url(self):
return self._url
class MemCacheChangeNotifierTests(TestCase):
def setUp(self):
TestCase.setUp(self)
self.memcache = InMemoryMemcacheProtocol()
self.ccn = MemcacheChangeNotifier(
StubURLResource(':memory:'),
cachePool=self.memcache)
self.ccn._newCacheToken = instancemethod(_newCacheToken,
self.ccn,
MemcacheChangeNotifier)
def assertToken(self, expectedToken):
token = self.memcache._cache['cacheToken::memory:'][1]
self.assertEquals(token, expectedToken)
def test_cacheTokenPropertyIsProvisioned(self):
d = self.ccn.changed()
d.addCallback(lambda _: self.assertToken('token0'))
return d
def test_changedChangesToken(self):
d = self.ccn.changed()
d.addCallback(lambda _: self.ccn.changed())
d.addCallback(lambda _: self.assertToken('token1'))
return d
def tearDown(self):
for call in self.memcache._timeouts.itervalues():
call.cancel()
MemcacheChangeNotifier._memcacheProtocol = None
class BaseCacheTestMixin(object):
def setUp(self):
StubRequest.resources = {
'/calendars/__uids__/cdaboo/': StubURLResource(
'/calendars/__uids__/cdaboo/'),
'/calendars/users/cdaboo/': StubURLResource(
'/calendars/__uids__/cdaboo/'),
'/principals/__uids__/cdaboo/': StubURLResource(
'/principals/__uids__/cdaboo/', record=StubDirectoryRecord('cdaboo')),
'/calendars/__uids__/dreid/': StubURLResource(
'/calendars/__uids__/dreid/'),
'/principals/__uids__/dreid/': StubURLResource(
'/principals/__uids__/dreid/', record=StubDirectoryRecord('dreid'))}
def tearDown(self):
StubRequest.resources = {}
def assertResponse(self, response, expected):
self.assertNotEquals(response, None, "Got None instead of a response.")
self.assertEquals(response.code, expected[0])
self.assertEquals(set(response.headers.getAllRawHeaders()),
set(expected[1].getAllRawHeaders()))
d = allDataFromStream(response.stream)
d.addCallback(self.assertEquals, expected[2])
return d
def test_getResponseForRequestMultiHomedRequestURI(self):
request = StubRequest(
'PROPFIND',
'/calendars/users/cdaboo/',
'/principals/__uids__/cdaboo/')
d = self.rc.getResponseForRequest(request)
d.addCallback(self.assertEquals, None)
return d
def test_getResponseForRequestURINotFound(self):
request = StubRequest(
'PROPFIND',
'/calendars/__uids__/wsanchez/',
'/calendars/__uids__/dreid/')
d = self.rc.getResponseForRequest(request)
d.addCallback(self.assertEquals, None)
return d
def test_getResponseForRequestMultiHomedPrincipalURI(self):
request = StubRequest(
'PROPFIND',
'/calendars/__uids__/cdaboo/',
'/principals/users/cdaboo/')
d = self.rc.getResponseForRequest(request)
d.addCallback(self.assertEquals, None)
return d
def test_getResponseForRequestNotInCache(self):
d = self.rc.getResponseForRequest(StubRequest(
'PROPFIND',
'/calendars/__uids__/dreid/',
'/principals/__uids__/dreid/'))
d.addCallback(self.assertEquals, None)
return d
def test_getResponseForRequestInCache(self):
d = self.rc.getResponseForRequest(StubRequest(
'PROPFIND',
'/calendars/__uids__/cdaboo/',
'/principals/__uids__/cdaboo/'))
d.addCallback(self.assertResponse, self.expected_response)
return d
def test_getResponseForRequestPrincipalTokenChanged(self):
self.tokens['/principals/__uids__/cdaboo/'] = 'principalToken1'
d = self.rc.getResponseForRequest(StubRequest(
'PROPFIND',
'/calendars/__uids__/cdaboo/',
'/principals/__uids__/cdaboo/'))
d.addCallback(self.assertEquals, None)
return d
def test_getResponseForRequestUriTokenChanged(self):
self.tokens['/calendars/__uids__/cdaboo/'] = 'uriToken1'
d = self.rc.getResponseForRequest(StubRequest(
'PROPFIND',
'/calendars/__uids__/cdaboo/',
'/principals/__uids__/cdaboo/'))
d.addCallback(self.assertEquals, None)
return d
def test_getResponseForRequestChildTokenChanged(self):
self.tokens['/calendars/__uids__/cdaboo/calendars/'] = 'childToken1'
d = self.rc.getResponseForRequest(StubRequest(
'PROPFIND',
'/calendars/__uids__/cdaboo/',
'/principals/__uids__/cdaboo/'))
d.addCallback(self.assertEquals, None)
return d
def test_getResponseForDepthZero(self):
d = self.rc.getResponseForRequest(StubRequest(
'PROPFIND',
'/calendars/__uids__/cdaboo/',
'/principals/__uids__/cdaboo/',
depth='0'))
d.addCallback(self.assertEquals, None)
return d
def test_getResponseForBody(self):
d = self.rc.getResponseForRequest(StubRequest(
'PROPFIND',
'/calendars/__uids__/cdaboo/',
'/principals/__uids__/cdaboo/',
body='bazbax'))
d.addCallback(self.assertEquals, None)
return d
def test_getResponseForUnauthenticatedRequest(self):
d = self.rc.getResponseForRequest(StubRequest(
'PROPFIND',
'/calendars/__uids__/cdaboo/',
'{DAV:}unauthenticated',
body='bazbax'))
d.addCallback(self.assertEquals, None)
return d
def test_cacheUnauthenticatedResponse(self):
expected_response = StubResponse(401, {}, "foobar")
d = self.rc.cacheResponseForRequest(
StubRequest('PROPFIND',
'/calendars/__uids__/cdaboo/',
'{DAV:}unauthenticated'),
expected_response)
d.addCallback(self.assertResponse,
(expected_response.code,
expected_response.headers,
expected_response.body))
return d
def test_cacheResponseForRequest(self):
expected_response = StubResponse(200, {}, "Foobar")
def _assertResponse(ign):
d1 = self.rc.getResponseForRequest(StubRequest(
'PROPFIND',
'/principals/__uids__/dreid/',
'/principals/__uids__/dreid/'))
d1.addCallback(self.assertResponse,
(expected_response.code,
expected_response.headers,
expected_response.body))
return d1
d = self.rc.cacheResponseForRequest(
StubRequest('PROPFIND',
'/principals/__uids__/dreid/',
'/principals/__uids__/dreid/'),
expected_response)
d.addCallback(_assertResponse)
return d
def test_recordHashChangeInvalidatesCache(self):
StubRequest.resources[
'/principals/__uids__/cdaboo/'].record = StubDirectoryRecord('cdaboo-changed')
d = self.rc.getResponseForRequest(
StubRequest(
'PROPFIND',
'/calendars/__uids__/cdaboo/',
'/principals/__uids__/cdaboo/'))
d.addCallback(self.assertEquals, None)
return d
class MemcacheResponseCacheTests(BaseCacheTestMixin, TestCase):
def setUp(self):
super(MemcacheResponseCacheTests, self).setUp()
memcacheStub = InMemoryMemcacheProtocol()
self.rc = MemcacheResponseCache(None, cachePool=memcacheStub)
self.tokens = {}
self.tokens['/calendars/__uids__/cdaboo/'] = 'uriToken0'
self.tokens['/calendars/__uids__/cdaboo/calendars/'] = 'childToken0'
self.tokens['/principals/__uids__/cdaboo/'] = 'principalToken0'
self.tokens['/principals/__uids__/dreid/'] = 'principalTokenX'
def _getToken(uri, cachePoolHandle=None):
return succeed(self.tokens.get(uri))
self.rc._tokenForURI = _getToken
self.expected_response = (200, Headers({}), "Foo")
expected_key = hashlib.md5(':'.join([str(t) for t in (
'PROPFIND',
'/principals/__uids__/cdaboo/',
'/calendars/__uids__/cdaboo/',
'1',
hash('foobar'),
)])).hexdigest()
memcacheStub._cache[expected_key] = (
0, #flags
cPickle.dumps((
'principalToken0',
StubDirectoryRecord('cdaboo').cacheToken(),
'uriToken0',
{'/calendars/__uids__/cdaboo/calendars/': 'childToken0'},
(self.expected_response[0],
dict(list(self.expected_response[1].getAllRawHeaders())),
self.expected_response[2]))))
self.memcacheStub = memcacheStub
def tearDown(self):
for call in self.memcacheStub._timeouts.itervalues():
call.cancel()
def test_givenURIsForKeys(self):
expected_response = (200, Headers({}), "Foobarbaz")
_key = (
'PROPFIND',
'/principals/__uids__/cdaboo/',
'/calendars/users/cdaboo/',
'1',
hash('foobar'),
)
expected_key = hashlib.md5(':'.join([str(t) for t in _key])).hexdigest()
self.memcacheStub._cache[expected_key] = (
0, #flags
cPickle.dumps((
'principalToken0',
StubDirectoryRecord('cdaboo').cacheToken(),
'uriToken0',
{'/calendars/__uids__/cdaboo/calendars/': 'childToken0'},
(expected_response[0],
dict(list(expected_response[1].getAllRawHeaders())),
expected_response[2]))))
d = self.rc.getResponseForRequest(
StubRequest('PROPFIND',
'/calendars/users/cdaboo/',
'/principals/__uids__/cdaboo/'))
d.addCallback(self.assertResponse, expected_response)
return d
class StubResponseCacheResource(object):
def __init__(self):
self.cache = {}
self.responseCache = self
def getResponseForRequest(self, request):
if request in self.cache:
return self.cache[request]
def cacheResponseForRequest(self, request, response):
self.cache[request] = response
return response
class TestRenderMixin(object):
davHeaders = ('foo',)
def renderHTTP(self, request):
self.response.headers.setHeader('dav', self.davHeaders)
return self.response
class TestCachingResource(PropfindCacheMixin, TestRenderMixin):
def __init__(self, response):
self.response = response
class TestCacheStoreNotifier(TestCase):
@inlineCallbacks
def test_notify_home_child(self):
"""
Verify that L{CacheStoreNotifier.notify} will generate notifications for homes and home childs.
"""
class StubCacheStoreNotifierFactory(object):
def __init__(self):
self.results = set()
def changed(self, uri):
self.results.add(uri)
return succeed(None)
class StubCacheResource(object):
def __init__(self, notifierID):
self.nid = notifierID
def notifierID(self):
return self.nid
data = (
(("CalDAV", "user01"), ("/calendars/__uids__/user01/",),),
(("CalDAV", "user01/calendar"), ("/calendars/__uids__/user01/", "/calendars/__uids__/user01/calendar/",),),
(("CardDAV", "user01"), ("/addressbooks/__uids__/user01/",),),
(("CardDAV", "user01/addressbook"), ("/addressbooks/__uids__/user01/", "/addressbooks/__uids__/user01/addressbook/",),),
)
for item, results in data:
factory = StubCacheStoreNotifierFactory()
notifier = CacheStoreNotifier(factory, StubCacheResource(item))
yield notifier.notify()
self.assertEqual(factory.results, set(results))
class PropfindCacheMixinTests(TestCase):
"""
Test the PropfindCacheMixin
"""
def setUp(self):
TestCase.setUp(self)
self.resource = TestCachingResource(StubResponse(200, {}, "foobar"))
self.responseCache = StubResponseCacheResource()
def test_DAVHeaderCached(self):
"""
Test that the DAV header set in renderHTTP is cached.
"""
def _checkCache(response):
self.assertEquals(response.headers.getHeader('dav'),
('foo',))
self.assertEquals(
self.responseCache.cache[request].headers.getHeader('dav'),
('foo',))
request = StubRequest('PROPFIND', '/', '/')
request.resources['/'] = self.responseCache
d = maybeDeferred(self.resource.renderHTTP, request)
d.addCallback(_checkCache)
return d
def test_onlyCachePropfind(self):
"""
Test that we only cache the result of a propfind request.
"""
def _checkCache(response):
self.assertEquals(self.responseCache.getResponseForRequest(request),
None)
request = StubRequest('GET', '/', '/')
request.resources['/'] = self.responseCache
d = maybeDeferred(self.resource.renderHTTP, request)
d.addCallback(_checkCache)
return d
| {
"content_hash": "be25f8193293c727e985b37b2cfdbfee",
"timestamp": "",
"source": "github",
"line_count": 585,
"max_line_length": 132,
"avg_line_length": 28.805128205128206,
"alnum_prop": 0.5767016794255534,
"repo_name": "trevor/calendarserver",
"id": "2562280daf724c450d4437c5cb78c015a0682be0",
"size": "17458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twistedcaldav/test/test_cache.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4214"
},
{
"name": "D",
"bytes": "13143"
},
{
"name": "JavaScript",
"bytes": "76566"
},
{
"name": "Python",
"bytes": "9260291"
},
{
"name": "Shell",
"bytes": "78964"
}
],
"symlink_target": ""
} |
import numpy as np
def operations(h, w):
"""
Takes two inputs, h and w, and makes two Numpy arrays A and B of size
h x w, and returns A, B, and s, the sum of A and B.
Arg:
h - an integer describing the height of A and B
w - an integer describing the width of A and B
Returns (in this order):
A - a randomly-generated h x w Numpy array.
B - a randomly-generated h x w Numpy array.
s - the sum of A and B.
"""
A = np.random.random([h,w])
B = np.random.random([h,w])
s = A + B
return [A,B,s]
| {
"content_hash": "ee3458d53e1f8ff160f77c5d0c3a86e2",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 73,
"avg_line_length": 29.526315789473685,
"alnum_prop": 0.5882352941176471,
"repo_name": "xunilrj/sandbox",
"id": "dfe608daf2af8fd26758bd6c8f63067da6c6edcd",
"size": "561",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "courses/MITx/MITx 6.86x Machine Learning with Python-From Linear Models to Deep Learning/project0/operations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "235"
},
{
"name": "ASP.NET",
"bytes": "110"
},
{
"name": "Assembly",
"bytes": "28409"
},
{
"name": "Asymptote",
"bytes": "22978"
},
{
"name": "C",
"bytes": "1022035"
},
{
"name": "C#",
"bytes": "474510"
},
{
"name": "C++",
"bytes": "33387716"
},
{
"name": "CMake",
"bytes": "1288737"
},
{
"name": "CSS",
"bytes": "49690"
},
{
"name": "Common Lisp",
"bytes": "858"
},
{
"name": "Coq",
"bytes": "6200"
},
{
"name": "Dockerfile",
"bytes": "2912"
},
{
"name": "Elixir",
"bytes": "34"
},
{
"name": "Erlang",
"bytes": "8204"
},
{
"name": "F#",
"bytes": "33187"
},
{
"name": "Fortran",
"bytes": "20472"
},
{
"name": "GDB",
"bytes": "701"
},
{
"name": "GLSL",
"bytes": "7478"
},
{
"name": "Go",
"bytes": "8971"
},
{
"name": "HTML",
"bytes": "6469462"
},
{
"name": "Handlebars",
"bytes": "8236"
},
{
"name": "Haskell",
"bytes": "18581"
},
{
"name": "Java",
"bytes": "120539"
},
{
"name": "JavaScript",
"bytes": "5055335"
},
{
"name": "Jupyter Notebook",
"bytes": "1849172"
},
{
"name": "LLVM",
"bytes": "43431"
},
{
"name": "MATLAB",
"bytes": "462980"
},
{
"name": "Makefile",
"bytes": "1622666"
},
{
"name": "Objective-C",
"bytes": "2001"
},
{
"name": "PostScript",
"bytes": "45490"
},
{
"name": "PowerShell",
"bytes": "192867"
},
{
"name": "Python",
"bytes": "726138"
},
{
"name": "R",
"bytes": "31364"
},
{
"name": "Roff",
"bytes": "5700"
},
{
"name": "Ruby",
"bytes": "5865"
},
{
"name": "Rust",
"bytes": "797104"
},
{
"name": "Sage",
"bytes": "654"
},
{
"name": "Scala",
"bytes": "42383"
},
{
"name": "Shell",
"bytes": "154039"
},
{
"name": "TLA",
"bytes": "16779"
},
{
"name": "TSQL",
"bytes": "3412"
},
{
"name": "TeX",
"bytes": "6989202"
},
{
"name": "TypeScript",
"bytes": "8845"
},
{
"name": "Visual Basic .NET",
"bytes": "1090"
},
{
"name": "WebAssembly",
"bytes": "70321"
},
{
"name": "q",
"bytes": "13889"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class ShowlegendValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(self, plotly_name="showlegend", parent_name="scatter3d", **kwargs):
super(ShowlegendValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
role=kwargs.pop("role", "info"),
**kwargs
)
| {
"content_hash": "2098cf04806074e919da13bcae237f78",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 84,
"avg_line_length": 37.833333333333336,
"alnum_prop": 0.6189427312775331,
"repo_name": "plotly/python-api",
"id": "ee956414f96bc460e097eb37f0b10152d5738bed",
"size": "454",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/scatter3d/_showlegend.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2009 Marian Tietz
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
"""
import inspect
def types (**type_dict):
def decorate (fun):
def typecheck_decorator (*args, **kwargs):
argspec = inspect.getargspec (fun)
parameters = argspec[0]
check_dict = {}
# make dict out of tuple parameters and update
# them with values from kwargs
for i in range (len(args[:len(parameters)])):
check_dict[parameters[i]] = args[i]
check_dict.update (kwargs)
for t_param,t_type in type_dict.items():
def raise_error (origin_name, foreign_name):
raise TypeError,\
"Parameter '%s' of function '%s' must "\
"be '%s'. ('%s' given)." % (
t_param,
fun.func_name,
origin_name,
foreign_name)
try:
foreign = check_dict[t_param]
foreign_type = type (check_dict[t_param])
except KeyError:
# skip, this happens if an argument is not
# given, let python handle this.
continue
# FIXME redundant code here...
if type (t_type) == tuple:
# more than one type given
if (not isinstance(foreign, t_type)
and (not issubclass(foreign, t_type))):
typelist_name = " or ".join (
[n.__name__ for n in t_type])
raise_error (typelist_name, foreign_type.__name__)
elif (type (t_type) == type
or type(t_type).__name__ == "GObjectMeta"):
# one type to check
if (not isinstance(foreign, t_type)
and (foreign == type and not issubclass(foreign, t_type))):
raise_error (t_type.__name__, foreign_type.__name__)
else:
# no valid type-type
raise TypeError, "Only tuple or type allowed for "\
"named parameters of function types ('%s' given)." % (
type (t_type).__name__)
return fun (*args, **kwargs)
return typecheck_decorator
return decorate
| {
"content_hash": "8d2a57c8a0eb23338feb753745f2d5f3",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 74,
"avg_line_length": 32.891304347826086,
"alnum_prop": 0.6916721744877726,
"repo_name": "sushi-irc/tekka",
"id": "ba672e69dee69ab4cdf759d91b3667cbef9e82fe",
"size": "3042",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tekka/typecheck.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "483168"
}
],
"symlink_target": ""
} |
from calvin.actor.actor import Actor, manage, condition, calvinlib
class Mustache(Actor):
"""
Formats string based on template and incoming dictionary
Format string uses \"{{access.key.path}}\" to access dict
Also refer to Mustache documentation (http://mustache.github.io/)
Inputs:
dict :
Outputs:
text : formatted string
"""
@manage(['fmt'])
def init(self, fmt):
self.fmt = fmt
self.setup()
def setup(self):
self.mustache = calvinlib.use("mustache")
def did_migrate(self):
self.setup()
@condition(['dict'], ['text'])
def action(self, d):
text = self.mustache.render(self.fmt, d)
return (text, )
action_priority = (action, )
requires = ["mustache"]
test_kwargs = {'fmt': "City:{{city}} Country:{{country}} Weather:{{weather}} Temperature:{{temperature}} Humidity:{{humidity}} Pressure:{{pressure}}"}
test_set = [
{
'inports': {'dict': [{'city': u'Manchester', 'temperature': 7.84, 'country': u'GB', 'humidity': 87, 'pressure': 1033, 'weather': u'overcast clouds'}]},
'outports': {'text': ["City:Manchester Country:GB Weather:overcast clouds Temperature:7.84 Humidity:87 Pressure:1033"]}
}
]
| {
"content_hash": "25b4d6f0c722c8fc3b06607e88ce29bb",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 163,
"avg_line_length": 29,
"alnum_prop": 0.6003134796238244,
"repo_name": "EricssonResearch/calvin-base",
"id": "90c15c756b171b76ae628b8dc3174dc3139909b1",
"size": "1881",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "calvin/actorstore/systemactors/text/Mustache.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "769"
},
{
"name": "Dockerfile",
"bytes": "612"
},
{
"name": "HTML",
"bytes": "24571"
},
{
"name": "JavaScript",
"bytes": "78325"
},
{
"name": "Makefile",
"bytes": "816"
},
{
"name": "Python",
"bytes": "3291484"
},
{
"name": "Shell",
"bytes": "37140"
}
],
"symlink_target": ""
} |
import os
import shutil
import datetime
import json
import sys
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
import unittest2 as unittest
else:
import unittest
from pyquery import PyQuery
from tempfile import NamedTemporaryFile
from Cookie import SimpleCookie
import urlparse
from django.core.urlresolvers import reverse as urlreverse
from django.conf import settings
import debug # pyflakes:ignore
from ietf.doc.models import ( Document, DocAlias, DocRelationshipName, RelatedDocument, State,
DocEvent, BallotPositionDocEvent, LastCallDocEvent, WriteupDocEvent, NewRevisionDocEvent,
save_document_in_history )
from ietf.group.models import Group
from ietf.meeting.models import Meeting, Session, SessionPresentation
from ietf.name.models import SessionStatusName
from ietf.person.models import Person
from ietf.utils.mail import outbox
from ietf.utils.test_data import make_test_data
from ietf.utils.test_utils import login_testing_unauthorized, unicontent
from ietf.utils.test_utils import TestCase
class SearchTests(TestCase):
def test_search(self):
draft = make_test_data()
base_url = urlreverse("doc_search")
# only show form, no search yet
r = self.client.get(base_url)
self.assertEqual(r.status_code, 200)
# no match
r = self.client.get(base_url + "?activedrafts=on&name=thisisnotadocumentname")
self.assertEqual(r.status_code, 200)
self.assertTrue("no documents match" in str(r.content).lower())
r = self.client.get(base_url + "?rfcs=on&name=xyzzy")
self.assertEqual(r.status_code, 200)
self.assertTrue("no documents match" in unicontent(r).lower())
r = self.client.get(base_url + "?olddrafts=on&name=bar")
self.assertEqual(r.status_code, 200)
self.assertTrue("no documents match" in unicontent(r).lower())
r = self.client.get(base_url + "?olddrafts=on&name=foo")
self.assertEqual(r.status_code, 200)
self.assertTrue("draft-foo-mars-test" in unicontent(r).lower())
# find by rfc/active/inactive
draft.set_state(State.objects.get(type="draft", slug="rfc"))
r = self.client.get(base_url + "?rfcs=on&name=%s" % draft.name)
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
draft.set_state(State.objects.get(type="draft", slug="active"))
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.name)
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
draft.set_state(State.objects.get(type="draft", slug="expired"))
r = self.client.get(base_url + "?olddrafts=on&name=%s" % draft.name)
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
draft.set_state(State.objects.get(type="draft", slug="active"))
# find by title
r = self.client.get(base_url + "?activedrafts=on&name=%s" % draft.title.split()[0])
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
# find by author
r = self.client.get(base_url + "?activedrafts=on&by=author&author=%s" % draft.authors.all()[0].person.name_parts()[1])
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
# find by group
r = self.client.get(base_url + "?activedrafts=on&by=group&group=%s" % draft.group.acronym)
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
# find by area
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id)
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
# find by area
r = self.client.get(base_url + "?activedrafts=on&by=area&area=%s" % draft.group.parent_id)
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
# find by AD
r = self.client.get(base_url + "?activedrafts=on&by=ad&ad=%s" % draft.ad_id)
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
# find by IESG state
r = self.client.get(base_url + "?activedrafts=on&by=state&state=%s&substate=" % draft.get_state("draft-iesg").pk)
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
def test_search_for_name(self):
draft = make_test_data()
save_document_in_history(draft)
prev_rev = draft.rev
draft.rev = "%02d" % (int(prev_rev) + 1)
draft.save()
# exact match
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name=draft.name)))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=draft.name)))
# prefix match
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name="-".join(draft.name.split("-")[:-1]))))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=draft.name)))
# non-prefix match
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name="-".join(draft.name.split("-")[1:]))))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=draft.name)))
# other doctypes than drafts
doc = Document.objects.get(name='charter-ietf-mars')
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name='charter-ietf-ma')))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=doc.name)))
doc = Document.objects.filter(name__startswith='conflict-review-').first()
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=doc.name)))
doc = Document.objects.filter(name__startswith='status-change-').first()
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=doc.name)))
doc = Document.objects.filter(name__startswith='agenda-').first()
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=doc.name)))
doc = Document.objects.filter(name__startswith='minutes-').first()
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=doc.name)))
doc = Document.objects.filter(name__startswith='slides-').first()
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name="-".join(doc.name.split("-")[:-1]))))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=doc.name)))
# match with revision
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name=draft.name + "-" + prev_rev)))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=draft.name, rev=prev_rev)))
# match with non-existing revision
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name=draft.name + "-09")))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=draft.name)))
# match with revision and extension
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name=draft.name + "-" + prev_rev + ".txt")))
self.assertEqual(r.status_code, 302)
self.assertEqual(urlparse.urlparse(r["Location"]).path, urlreverse("doc_view", kwargs=dict(name=draft.name, rev=prev_rev)))
# no match
r = self.client.get(urlreverse("doc_search_for_name", kwargs=dict(name="draft-ietf-doesnotexist-42")))
self.assertEqual(r.status_code, 302)
parsed = urlparse.urlparse(r["Location"])
self.assertEqual(parsed.path, urlreverse("doc_search"))
self.assertEqual(urlparse.parse_qs(parsed.query)["name"][0], "draft-ietf-doesnotexist-42")
def test_frontpage(self):
make_test_data()
r = self.client.get("/")
self.assertEqual(r.status_code, 200)
self.assertTrue("Document Search" in unicontent(r))
def test_drafts_pages(self):
draft = make_test_data()
r = self.client.get(urlreverse("docs_for_ad", kwargs=dict(name=draft.ad.full_name_as_key())))
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
draft.set_state(State.objects.get(type="draft-iesg", slug="lc"))
r = self.client.get(urlreverse("drafts_in_last_call"))
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
def test_indexes(self):
draft = make_test_data()
r = self.client.get(urlreverse("index_all_drafts"))
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.name in unicontent(r))
r = self.client.get(urlreverse("index_active_drafts"))
self.assertEqual(r.status_code, 200)
self.assertTrue(draft.title in unicontent(r))
def test_ajax_search_docs(self):
draft = make_test_data()
# Document
url = urlreverse("ajax_select2_search_docs", kwargs={
"model_name": "document",
"doc_type": "draft",
})
r = self.client.get(url, dict(q=draft.name))
self.assertEqual(r.status_code, 200)
data = json.loads(r.content)
self.assertEqual(data[0]["id"], draft.pk)
# DocAlias
doc_alias = draft.docalias_set.get()
url = urlreverse("ajax_select2_search_docs", kwargs={
"model_name": "docalias",
"doc_type": "draft",
})
r = self.client.get(url, dict(q=doc_alias.name))
self.assertEqual(r.status_code, 200)
data = json.loads(r.content)
self.assertEqual(data[0]["id"], doc_alias.pk)
class DocDraftTestCase(TestCase):
draft_text = """
Martian Special Interest Group (mars) P. Man
Internet-Draft March 21, 2015
Intended status: Informational
Expires: September 22, 2015
Optimizing Martian Network Topologies
draft-ietf-mars-test-02.txt
Abstract
Techniques for achieving near-optimal Martian networks.
Status of This Memo
This Internet-Draft is submitted in full conformance with the
provisions of BCP 78 and BCP 79.
Internet-Drafts are working documents of the Internet Engineering
Task Force (IETF). Note that other groups may also distribute
working documents as Internet-Drafts. The list of current Internet-
Drafts is at http://datatracker.ietf.org/drafts/current/.
Internet-Drafts are draft documents valid for a maximum of six months
and may be updated, replaced, or obsoleted by other documents at any
time. It is inappropriate to use Internet-Drafts as reference
material or to cite them other than as "work in progress."
This Internet-Draft will expire on September 22, 2015.
Copyright Notice
Copyright (c) 2015 IETF Trust and the persons identified as the
document authors. All rights reserved.
This document is subject to BCP 78 and the IETF Trust's Legal
Provisions Relating to IETF Documents
(http://trustee.ietf.org/license-info) in effect on the date of
publication of this document. Please review these documents
carefully, as they describe your rights and restrictions with respect
to this document. Code Components extracted from this document must
include Simplified BSD License text as described in Section 4.e of
the Trust Legal Provisions and are provided without warranty as
described in the Simplified BSD License.
This document may contain material from IETF Documents or IETF
Contributions published or made publicly available before November
10, 2008. The person(s) controlling the copyright in some of this
Man Expires September 22, 2015 [Page 1]
Internet-Draft Optimizing Martian Network Topologies March 2015
material may not have granted the IETF Trust the right to allow
modifications of such material outside the IETF Standards Process.
Without obtaining an adequate license from the person(s) controlling
the copyright in such materials, this document may not be modified
outside the IETF Standards Process, and derivative works of it may
not be created outside the IETF Standards Process, except to format
it for publication as an RFC or to translate it into languages other
than English.
Table of Contents
1. Introduction . . . . . . . . . . . . . . . . . . . . . . . . 2
2. Security Considerations . . . . . . . . . . . . . . . . . . . 2
3. IANA Considerations . . . . . . . . . . . . . . . . . . . . . 2
4. Acknowledgements . . . . . . . . . . . . . . . . . . . . . . 3
5. Normative References . . . . . . . . . . . . . . . . . . . . 3
Author's Address . . . . . . . . . . . . . . . . . . . . . . . . 3
1. Introduction
This document describes how to make the Martian networks work. The
methods used in Earth do not directly translate to the efficent
networks on Mars, as the topographical differences caused by planets.
For example the avian carriers, cannot be used in the Mars, thus
RFC1149 ([RFC1149]) cannot be used in Mars.
Some optimizations can be done because Mars is smaller than Earth,
thus the round trip times are smaller. Also as Mars has two moons
instead of only one as we have in Earth, we can use both Deimos and
Phobos when using reflecting radio links off the moon.
The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT",
"SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this
document are to be interpreted as described in [RFC2119].
2. Security Considerations
As Martians are known to listen all traffic in Mars, all traffic in
the Mars MUST be encrypted.
3. IANA Considerations
There is no new IANA considerations in this document.
Man Expires September 22, 2015 [Page 2]
Internet-Draft Optimizing Martian Network Topologies March 2015
4. Acknowledgements
This document is created in the IETF-92 CodeSprint in Dallas, TX.
5. Normative References
[RFC1149] Waitzman, D., "Standard for the transmission of IP
datagrams on avian carriers", RFC 1149, April 1990.
[RFC2119] Bradner, S., "Key words for use in RFCs to Indicate
Requirement Levels", BCP 14, RFC 2119, March 1997.
Author's Address
Plain Man
Deimos street
Mars City MARS-000000
Mars
Email: aliens@example.mars
Man Expires September 22, 2015 [Page 3]
"""
def setUp(self):
self.id_dir = os.path.abspath("tmp-id-dir")
if not os.path.exists(self.id_dir):
os.mkdir(self.id_dir)
settings.INTERNET_DRAFT_PATH = self.id_dir
settings.INTERNET_ALL_DRAFTS_ARCHIVE_DIR = self.id_dir
f = open(os.path.join(self.id_dir, 'draft-ietf-mars-test-01.txt'), 'w')
f.write(self.draft_text)
f.close()
def tearDown(self):
shutil.rmtree(self.id_dir)
def test_document_draft(self):
draft = make_test_data()
# these tests aren't testing all attributes yet, feel free to
# expand them
# active draft
draft.set_state(State.objects.get(type="draft", slug="active"))
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("Active Internet-Draft" in unicontent(r))
self.assertTrue("Show full document text" in unicontent(r))
self.assertFalse("Deimos street" in unicontent(r))
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)) + "?include_text=0")
self.assertEqual(r.status_code, 200)
self.assertTrue("Active Internet-Draft" in unicontent(r))
self.assertFalse("Show full document text" in unicontent(r))
self.assertTrue("Deimos street" in unicontent(r))
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)) + "?include_text=foo")
self.assertEqual(r.status_code, 200)
self.assertTrue("Active Internet-Draft" in unicontent(r))
self.assertFalse("Show full document text" in unicontent(r))
self.assertTrue("Deimos street" in unicontent(r))
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)) + "?include_text=1")
self.assertEqual(r.status_code, 200)
self.assertTrue("Active Internet-Draft" in unicontent(r))
self.assertFalse("Show full document text" in unicontent(r))
self.assertTrue("Deimos street" in unicontent(r))
self.client.cookies = SimpleCookie({'full_draft': 'on'})
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("Active Internet-Draft" in unicontent(r))
self.assertFalse("Show full document text" in unicontent(r))
self.assertTrue("Deimos street" in unicontent(r))
self.client.cookies = SimpleCookie({'full_draft': 'off'})
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("Active Internet-Draft" in unicontent(r))
self.assertTrue("Show full document text" in unicontent(r))
self.assertFalse("Deimos street" in unicontent(r))
self.client.cookies = SimpleCookie({'full_draft': 'foo'})
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("Active Internet-Draft" in unicontent(r))
self.assertTrue("Show full document text" in unicontent(r))
self.assertFalse("Deimos street" in unicontent(r))
# expired draft
draft.set_state(State.objects.get(type="draft", slug="expired"))
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("Expired Internet-Draft" in unicontent(r))
# replaced draft
draft.set_state(State.objects.get(type="draft", slug="repl"))
replacement = Document.objects.create(
name="draft-ietf-replacement",
time=datetime.datetime.now(),
type_id="draft",
title="Replacement Draft",
stream_id=draft.stream_id, group_id=draft.group_id, abstract=draft.stream, rev=draft.rev,
pages=draft.pages, intended_std_level_id=draft.intended_std_level_id,
shepherd_id=draft.shepherd_id, ad_id=draft.ad_id, expires=draft.expires,
notify=draft.notify, note=draft.note)
DocAlias.objects.create(name=replacement.name, document=replacement)
rel = RelatedDocument.objects.create(source=replacement,
target=draft.docalias_set.get(name__startswith="draft"),
relationship_id="replaces")
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("Replaced Internet-Draft" in unicontent(r))
self.assertTrue(replacement.name in unicontent(r))
rel.delete()
# draft published as RFC
draft.set_state(State.objects.get(type="draft", slug="rfc"))
draft.std_level_id = "bcp"
draft.save()
DocEvent.objects.create(doc=draft, type="published_rfc", by=Person.objects.get(name="(System)"))
rfc_alias = DocAlias.objects.create(name="rfc123456", document=draft)
bcp_alias = DocAlias.objects.create(name="bcp123456", document=draft)
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=draft.name)))
self.assertEqual(r.status_code, 302)
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=bcp_alias.name)))
self.assertEqual(r.status_code, 302)
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=rfc_alias.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("RFC 123456" in unicontent(r))
self.assertTrue(draft.name in unicontent(r))
# naked RFC
rfc = Document.objects.create(
name="rfc1234567",
type_id="draft",
title="RFC without a Draft",
stream_id="ise",
group=Group.objects.get(type="individ"),
std_level_id="ps")
DocAlias.objects.create(name=rfc.name, document=rfc)
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=rfc.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("RFC 1234567" in unicontent(r))
# unknown draft
r = self.client.get(urlreverse("doc_view", kwargs=dict(name="draft-xyz123")))
self.assertEqual(r.status_code, 404)
def test_document_primary_and_history_views(self):
make_test_data()
# Ensure primary views of both current and historic versions of documents works
for docname in ["draft-imaginary-independent-submission",
"conflict-review-imaginary-irtf-submission",
"status-change-imaginary-mid-review",
"charter-ietf-mars",
"agenda-42-mars",
"minutes-42-mars",
"slides-42-mars-1",
]:
doc = Document.objects.get(name=docname)
# give it some history
save_document_in_history(doc)
doc.rev="01"
doc.save()
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=doc.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("%s-01"%docname in unicontent(r))
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=doc.name,rev="01")))
self.assertEqual(r.status_code, 302)
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=doc.name,rev="00")))
self.assertEqual(r.status_code, 200)
self.assertTrue("%s-00"%docname in unicontent(r))
class DocTestCase(TestCase):
def test_document_charter(self):
make_test_data()
r = self.client.get(urlreverse("doc_view", kwargs=dict(name="charter-ietf-mars")))
self.assertEqual(r.status_code, 200)
def test_document_conflict_review(self):
make_test_data()
r = self.client.get(urlreverse("doc_view", kwargs=dict(name='conflict-review-imaginary-irtf-submission')))
self.assertEqual(r.status_code, 200)
def test_document_material(self):
draft = make_test_data()
doc = Document.objects.create(
name="slides-testteam-test-slides",
rev="00",
title="Test Slides",
group=draft.group,
type_id="slides"
)
doc.set_state(State.objects.get(type="slides", slug="active"))
DocAlias.objects.create(name=doc.name, document=doc)
session = Session.objects.create(
name = "session-42-mars-1",
meeting = Meeting.objects.get(number='42'),
group = Group.objects.get(acronym='mars'),
status = SessionStatusName.objects.create(slug='scheduled', name='Scheduled'),
modified = datetime.datetime.now(),
requested_by = Person.objects.get(user__username="marschairman"),
type_id = "session",
)
SessionPresentation.objects.create(session=session, document=doc, rev=doc.rev)
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=doc.name)))
self.assertEqual(r.status_code, 200)
def test_document_ballot(self):
doc = make_test_data()
ballot = doc.active_ballot()
save_document_in_history(doc)
pos = BallotPositionDocEvent.objects.create(
doc=doc,
ballot=ballot,
type="changed_ballot_position",
pos_id="yes",
comment="Looks fine to me",
comment_time=datetime.datetime.now(),
ad=Person.objects.get(user__username="ad"),
by=Person.objects.get(name="(System)"))
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue(pos.comment in unicontent(r))
# test with ballot_id
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name, ballot_id=ballot.pk)))
self.assertEqual(r.status_code, 200)
self.assertTrue(pos.comment in unicontent(r))
# test popup too while we're at it
r = self.client.get(urlreverse("ietf.doc.views_doc.ballot_popup", kwargs=dict(name=doc.name, ballot_id=ballot.pk)))
self.assertEqual(r.status_code, 200)
# Now simulate a new revision and make sure positions on older revisions are marked as such
oldrev = doc.rev
e = NewRevisionDocEvent.objects.create(doc=doc,rev='%02d'%(int(doc.rev)+1),type='new_revision',by=Person.objects.get(name="(System)"))
save_document_in_history(doc)
doc.rev = e.rev
doc.save()
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue( '(%s for -%s)' % (pos.comment_time.strftime('%Y-%m-%d'), oldrev) in unicontent(r))
def test_document_ballot_needed_positions(self):
make_test_data()
# draft
doc = Document.objects.get(name='draft-ietf-mars-test')
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
self.assertTrue('more YES or NO' in unicontent(r))
Document.objects.filter(pk=doc.pk).update(intended_std_level='inf')
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
self.assertFalse('more YES or NO' in unicontent(r))
# status change
doc = Document.objects.get(name='status-change-imaginary-mid-review')
iesgeval_pk = str(State.objects.get(slug='iesgeval',type__slug='statchg').pk)
self.client.login(username='ad', password='ad+password')
r = self.client.post(urlreverse('ietf.doc.views_status_change.change_state',kwargs=dict(name=doc.name)),dict(new_state=iesgeval_pk))
self.assertEqual(r.status_code, 302)
r = self.client.get(r._headers["location"][1])
self.assertTrue(">IESG Evaluation<" in unicontent(r))
doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9998'),relationship_id='tohist')
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
self.assertFalse('Needs a YES' in unicontent(r))
self.assertFalse('more YES or NO' in unicontent(r))
doc.relateddocument_set.create(target=DocAlias.objects.get(name='rfc9999'),relationship_id='tois')
r = self.client.get(urlreverse("ietf.doc.views_doc.document_ballot", kwargs=dict(name=doc.name)))
self.assertTrue('more YES or NO' in unicontent(r))
def test_document_json(self):
doc = make_test_data()
r = self.client.get(urlreverse("ietf.doc.views_doc.document_json", kwargs=dict(name=doc.name)))
self.assertEqual(r.status_code, 200)
def test_writeup(self):
doc = make_test_data()
appr = WriteupDocEvent.objects.create(
doc=doc,
desc="Changed text",
type="changed_ballot_approval_text",
text="This is ballot approval text.",
by=Person.objects.get(name="(System)"))
notes = WriteupDocEvent.objects.create(
doc=doc,
desc="Changed text",
type="changed_ballot_writeup_text",
text="This is ballot writeup notes.",
by=Person.objects.get(name="(System)"))
url = urlreverse('doc_writeup', kwargs=dict(name=doc.name))
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertTrue(appr.text in unicontent(r))
self.assertTrue(notes.text in unicontent(r))
def test_history(self):
doc = make_test_data()
e = DocEvent.objects.create(
doc=doc,
desc="Something happened.",
type="added_comment",
by=Person.objects.get(name="(System)"))
url = urlreverse('doc_history', kwargs=dict(name=doc.name))
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertTrue(e.desc in unicontent(r))
def test_document_feed(self):
doc = make_test_data()
e = DocEvent.objects.create(
doc=doc,
desc="Something happened.",
type="added_comment",
by=Person.objects.get(name="(System)"))
r = self.client.get("/feed/document-changes/%s/" % doc.name)
self.assertEqual(r.status_code, 200)
self.assertTrue(e.desc in unicontent(r))
def test_last_call_feed(self):
doc = make_test_data()
doc.set_state(State.objects.get(type="draft-iesg", slug="lc"))
LastCallDocEvent.objects.create(
doc=doc,
desc="Last call",
type="sent_last_call",
by=Person.objects.get(user__username="secretary"),
expires=datetime.date.today() + datetime.timedelta(days=7))
r = self.client.get("/feed/last-call/")
self.assertEqual(r.status_code, 200)
self.assertTrue(doc.name in unicontent(r))
def test_rfc_feed(self):
make_test_data()
r = self.client.get("/feed/rfc/")
self.assertTrue(r.status_code, 200)
def test_state_help(self):
url = urlreverse('state_help', kwargs=dict(type="draft-iesg"))
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertTrue(State.objects.get(type="draft-iesg", slug="lc").name in unicontent(r))
def test_document_nonietf_pubreq_button(self):
doc = make_test_data()
self.client.login(username='iab-chair', password='iab-chair+password')
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=doc.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("Request publication" not in unicontent(r))
Document.objects.filter(pk=doc.pk).update(stream='iab')
r = self.client.get(urlreverse("doc_view", kwargs=dict(name=doc.name)))
self.assertEqual(r.status_code, 200)
self.assertTrue("Request publication" in unicontent(r))
class AddCommentTestCase(TestCase):
def test_add_comment(self):
draft = make_test_data()
url = urlreverse('doc_add_comment', kwargs=dict(name=draft.name))
login_testing_unauthorized(self, "secretary", url)
# normal get
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q('form textarea[name=comment]')), 1)
# request resurrect
events_before = draft.docevent_set.count()
mailbox_before = len(outbox)
r = self.client.post(url, dict(comment="This is a test."))
self.assertEqual(r.status_code, 302)
self.assertEqual(draft.docevent_set.count(), events_before + 1)
self.assertEqual("This is a test.", draft.latest_event().desc)
self.assertEqual("added_comment", draft.latest_event().type)
self.assertEqual(len(outbox), mailbox_before + 1)
self.assertTrue("Comment added" in outbox[-1]['Subject'])
self.assertTrue(draft.name in outbox[-1]['Subject'])
self.assertTrue('draft-ietf-mars-test@' in outbox[-1]['To'])
# Make sure we can also do it as IANA
self.client.login(username="iana", password="iana+password")
# normal get
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
q = PyQuery(r.content)
self.assertEqual(len(q('form textarea[name=comment]')), 1)
class TemplateTagTest(unittest.TestCase):
def test_template_tags(self):
import doctest
from ietf.doc.templatetags import ietf_filters
failures, tests = doctest.testmod(ietf_filters)
self.assertEqual(failures, 0)
class ReferencesTest(TestCase):
def test_references(self):
make_test_data()
doc1 = Document.objects.get(name='draft-ietf-mars-test')
doc2 = DocAlias.objects.get(name='draft-imaginary-independent-submission')
RelatedDocument.objects.get_or_create(source=doc1,target=doc2,relationship=DocRelationshipName.objects.get(slug='refnorm'))
url = urlreverse('doc_references', kwargs=dict(name=doc1.name))
r = self.client.get(url)
self.assertEquals(r.status_code, 200)
self.assertTrue(doc2.name in unicontent(r))
url = urlreverse('doc_referenced_by', kwargs=dict(name=doc2.name))
r = self.client.get(url)
self.assertEquals(r.status_code, 200)
self.assertTrue(doc1.name in unicontent(r))
class EmailAliasesTests(TestCase):
def setUp(self):
make_test_data()
self.doc_alias_file = NamedTemporaryFile(delete=False)
self.doc_alias_file.write("""# Generated by hand at 2015-02-12_16:26:45
virtual.ietf.org anything
draft-ietf-mars-test@ietf.org xfilter-draft-ietf-mars-test
expand-draft-ietf-mars-test@virtual.ietf.org mars-author@example.com, mars-collaborator@example.com
draft-ietf-mars-test.authors@ietf.org xfilter-draft-ietf-mars-test.authors
expand-draft-ietf-mars-test.authors@virtual.ietf.org mars-author@example.mars, mars-collaborator@example.mars
draft-ietf-mars-test.chairs@ietf.org xfilter-draft-ietf-mars-test.chairs
expand-draft-ietf-mars-test.chairs@virtual.ietf.org mars-chair@example.mars
draft-ietf-mars-test.all@ietf.org xfilter-draft-ietf-mars-test.all
expand-draft-ietf-mars-test.all@virtual.ietf.org mars-author@example.mars, mars-collaborator@example.mars, mars-chair@example.mars
draft-ietf-ames-test@ietf.org xfilter-draft-ietf-ames-test
expand-draft-ietf-ames-test@virtual.ietf.org ames-author@example.com, ames-collaborator@example.com
draft-ietf-ames-test.authors@ietf.org xfilter-draft-ietf-ames-test.authors
expand-draft-ietf-ames-test.authors@virtual.ietf.org ames-author@example.ames, ames-collaborator@example.ames
draft-ietf-ames-test.chairs@ietf.org xfilter-draft-ietf-ames-test.chairs
expand-draft-ietf-ames-test.chairs@virtual.ietf.org ames-chair@example.ames
draft-ietf-ames-test.all@ietf.org xfilter-draft-ietf-ames-test.all
expand-draft-ietf-ames-test.all@virtual.ietf.org ames-author@example.ames, ames-collaborator@example.ames, ames-chair@example.ames
""")
self.doc_alias_file.close()
self.saved_draft_virtual_path = settings.DRAFT_VIRTUAL_PATH
settings.DRAFT_VIRTUAL_PATH = self.doc_alias_file.name
def tearDown(self):
settings.DRAFT_VIRTUAL_PATH = self.saved_draft_virtual_path
os.unlink(self.doc_alias_file.name)
def testAliases(self):
url = urlreverse('doc_specific_email_aliases', kwargs=dict(name="draft-ietf-mars-test"))
r = self.client.get(url)
self.assertEqual(r.status_code, 302)
url = urlreverse('ietf.doc.views_doc.email_aliases', kwargs=dict())
login_testing_unauthorized(self, "plain", url)
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertTrue(all([x in unicontent(r) for x in ['mars-test@','mars-test.authors@','mars-test.chairs@']]))
self.assertTrue(all([x in unicontent(r) for x in ['ames-test@','ames-test.authors@','ames-test.chairs@']]))
def testExpansions(self):
url = urlreverse('ietf.doc.views_doc.document_email', kwargs=dict(name="draft-ietf-mars-test"))
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
self.assertTrue('draft-ietf-mars-test.all@ietf.org' in unicontent(r))
self.assertTrue('ballot_saved' in unicontent(r))
| {
"content_hash": "e204d28826282d19e8c8426d79af3cd1",
"timestamp": "",
"source": "github",
"line_count": 897,
"max_line_length": 142,
"avg_line_length": 42.05016722408027,
"alnum_prop": 0.644820912537448,
"repo_name": "wpjesus/codematch",
"id": "12bd84888b2ecd0331aeb136e3ed279956b298f3",
"size": "37719",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "ietf/doc/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "139492"
},
{
"name": "CSS",
"bytes": "733662"
},
{
"name": "Groff",
"bytes": "2349"
},
{
"name": "HTML",
"bytes": "2149789"
},
{
"name": "JavaScript",
"bytes": "1003699"
},
{
"name": "Makefile",
"bytes": "3407"
},
{
"name": "Perl",
"bytes": "17323"
},
{
"name": "PostScript",
"bytes": "35"
},
{
"name": "PowerShell",
"bytes": "468"
},
{
"name": "Python",
"bytes": "4536908"
},
{
"name": "Shell",
"bytes": "74113"
},
{
"name": "TeX",
"bytes": "2556"
}
],
"symlink_target": ""
} |
import numpy as np
import random
class BasePlant(object):
def __init__(self, branch_id, leaf_id):
self.branch_id = branch_id
self.leaf_id = leaf_id
self.set_colors()
self.randomize_colors()
def randomize_colors(self):
self.branch_col = self.branch_col.astype(float)
self.leaf_col = self.leaf_col.astype(float)
self.branch_col /= np.max(self.branch_col)
self.leaf_col /= np.max(self.leaf_col)
self.branch_col += np.random.rand(3)/2
self.leaf_col += np.random.rand(3)/2
self.branch_col /= np.max(self.branch_col)
self.leaf_col /= np.max(self.leaf_col)
self.branch_col = np.abs(self.branch_col)
self.leaf_col = np.abs(self.leaf_col)
def choose_move(self, moves):
return random.choice(moves)
def set_colors(self):
self.branch_col = np.array([139, 69, 19])
self.leaf_col = np.array([0, 100, 0])
class Brancher(BasePlant):
def choose_move(self, moves):
for move in moves:
if move.type_ == 1:
return move
return random.choice(moves)
def set_colors(self):
self.branch_col = np.array([0, 0, 200])
self.leaf_col = np.array([0, 200, 0])
class LeftPlant(BasePlant):
def choose_move(self, moves):
m = moves[0]
for move in moves[1:]:
if move.coords[1] < m.coords[1]:
m = move
return m
def set_colors(self):
self.branch_col = np.array([100, 0, 100])
self.leaf_col = np.array([100, 100, 0])
class RightPlant(BasePlant):
def choose_move(self, moves):
m = moves[0]
for move in moves[1:]:
if move.coords[1] > m.coords[1]:
m = move
return m
def set_colors(self):
self.branch_col = np.array([100, 0, 100])
self.leaf_col = np.array([100, 100, 0])
class UpPlant(BasePlant):
def choose_move(self, moves):
m = moves[0]
for move in moves[1:]:
if move.coords[0] < m.coords[0]:
m = move
return m
def set_colors(self):
self.branch_col = np.array([100, 100, 0])
self.leaf_col = np.array([50, 50, 200])
| {
"content_hash": "d1aa50c46c5377e2a7d6d004308d9cf7",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 55,
"avg_line_length": 24.445652173913043,
"alnum_prop": 0.5504668741662961,
"repo_name": "ndiamant/arboreum",
"id": "b52236771ed0e232b305c1f4ca769b6e375fdc3f",
"size": "2249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fast_arboreum/greenhouse.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9500"
}
],
"symlink_target": ""
} |
import setuptools
import versioneer
LONG_DESCRIPTION = """
**aospy**: automated gridded climate data analysis and management
A framework that enables automated calculations using gridded climate data.
Following some basic description of where your data lives and defining any
functions of variables stored in that data you want to compute, aospy enables
you to fire off an arbitrary number of calculations using that data.
Important links
---------------
- HTML documentation: http://aospy.readthedocs.io/en/latest
- Mailing list: https://groups.google.com/d/forum/aospy
- Issue tracker: https://github.com/spencerahill/aospy/issues
- Source code: https://github.com/spencerahill/aospy
"""
setuptools.setup(
name="aospy",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
packages=setuptools.find_packages(),
author="aospy Developers",
author_email="aospy@googlegroups.com",
description="Automated gridded climate data analysis and management",
long_description=LONG_DESCRIPTION,
install_requires=['numpy >= 1.7',
'scipy >= 0.16',
'pandas >= 0.15.0',
'netCDF4 >= 1.2',
'toolz >= 0.7.2',
'dask >= 0.14',
'distributed >= 1.17.1',
'xarray >= 0.14.1',
'cloudpickle >= 0.2.1',
'cftime >= 1.0.0'],
tests_require=['pytest >= 3.3'],
package_data={'aospy': ['test/data/netcdf/*.nc']},
scripts=['aospy/examples/aospy_main.py',
'aospy/examples/example_obj_lib.py'],
license="Apache",
keywords="climate science netcdf xarray",
url="https://github.com/spencerahill/aospy",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering :: Atmospheric Science'
]
)
| {
"content_hash": "96a6ab404ace830cb7bee04ffec02af5",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 77,
"avg_line_length": 37.813559322033896,
"alnum_prop": 0.6154190945764231,
"repo_name": "spencerahill/aospy",
"id": "ec65e73b749f8e01f1c44ff1f2b938550700d702",
"size": "2231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "105431"
},
{
"name": "Python",
"bytes": "363321"
}
],
"symlink_target": ""
} |
from pythonforandroid.recipe import PythonRecipe
class IpaddressRecipe(PythonRecipe):
name = 'ipaddress'
version = '1.0.15'
url = 'https://pypi.python.org/packages/source/i/ipaddress/ipaddress-{version}.tar.gz'
depends = ['python2']
recipe = IpaddressRecipe()
| {
"content_hash": "11fa7382e7d4ca63e598e13e17b03bc9",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 87,
"avg_line_length": 22.416666666666668,
"alnum_prop": 0.7472118959107806,
"repo_name": "bob-the-hamster/python-for-android",
"id": "69410f6d97a4c69579afa969481f11cea5d120b0",
"size": "269",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pythonforandroid/recipes/ipaddress/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "44223"
},
{
"name": "C++",
"bytes": "491"
},
{
"name": "CMake",
"bytes": "250"
},
{
"name": "CSS",
"bytes": "3487"
},
{
"name": "HTML",
"bytes": "8073"
},
{
"name": "Java",
"bytes": "367857"
},
{
"name": "Makefile",
"bytes": "24764"
},
{
"name": "Python",
"bytes": "1163920"
},
{
"name": "Shell",
"bytes": "19542"
}
],
"symlink_target": ""
} |
from datetime import date
from django.test import TestCase, override_settings
from django.contrib.auth.models import User
from django.test.client import Client
from molo.core.tests.base import MoloTestCaseMixin
from molo.profiles.models import UserProfile
from gem.admin import GemUserAdmin, download_as_csv_gem
from gem.models import GemUserProfile
from molo.profiles.task import send_export_email
from django.conf import settings
from django.core import mail
from gem.tasks import send_export_email_gem
class TestFrontendUsersAdminView(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
self.user = User.objects.create_user(
username='tester',
email='tester@example.com',
password='0000',
is_staff=False)
self.superuser = User.objects.create_superuser(
username='superuser',
email='admin@example.com',
password='0000',
is_staff=True)
self.client = Client()
self.client.login(username='superuser', password='0000')
def test_staff_users_are_not_shown(self):
response = self.client.get(
'/admin/auth/user/?usertype=frontend'
)
self.assertContains(response, self.user.username)
self.assertNotContains(response, self.superuser.email)
def test_export_csv(self):
profile = self.user.profile
profile.alias = 'The Alias'
profile.date_of_birth = date(1985, 1, 1)
profile.mobile_number = '+27784667723'
profile.save()
gem_profile = self.user.gem_profile
gem_profile.gender = 'f'
gem_profile.save()
response = self.client.post('/admin/auth/user/')
self.assertEquals(response.status_code, 302)
def test_send_export_email(self):
send_export_email_gem(self.user.email, {})
message = list(mail.outbox)[0]
self.assertEquals(message.to, [self.user.email])
self.assertEquals(
message.subject, 'Molo export: ' + settings.SITE_NAME)
self.assertEquals(
message.attachments[0],
('Molo_export_GEM.csv',
'username,alias,first_name,last_name,date_of_birth,email,mobile_'
'number,is_active,date_joined,last_login,gender\r\ntester,,,,,t'
'ester@example.com,,1,' + str(
self.user.date_joined.strftime("%Y-%m-%d %H:%M:%S")) +
',,\r\n',
'text/csv'))
def test_export_csv_no_gem_profile(self):
GemUserProfile.objects.all().delete()
self.assertEquals(GemUserProfile.objects.all().count(), 0)
response = self.client.post('/admin/auth/user/')
self.assertEquals(response.status_code, 302)
class ModelsTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
self.user = User.objects.create_user(
username='tester',
email='tester@example.com',
password='tester')
@override_settings(CELERY_ALWAYS_EAGER=True)
def test_download_csv(self):
profile = self.user.profile
profile.alias = 'The Alias'
profile.mobile_number = '+27784667723'
profile.save()
date = str(self.user.date_joined.strftime("%Y-%m-%d %H:%M"))
gem_profile = self.user.gem_profile
gem_profile.gender = 'f'
gem_profile.date_of_birth = date
gem_profile.save()
response = download_as_csv_gem(GemUserAdmin(UserProfile, self.site),
None,
User.objects.all())
expected_output = (
'Content-Type: text/csv\r\nContent-Disposition: attachment;filen'
'ame=export.csv\r\n\r\nusername,email,first_name,last_name,is_sta'
'ff,date_joined,alias,mobile_number,date_of_birth,gender\r\nte'
'ster,tester@example.com,,,False,' + date + ',The Alias,+277'
'84667723,,f\r\n')
self.assertEquals(str(response), expected_output)
@override_settings(CELERY_ALWAYS_EAGER=True)
def test_download_csv_no_gem_profile(self):
gem_profile = self.user.gem_profile
gem_profile.delete()
response = download_as_csv_gem(GemUserAdmin(UserProfile, self.site),
None,
User.objects.all())
expected_output = (
'Content-Type: text/csv\r\nContent-Disposition: attachment;file'
'name=export.csv\r\n\r\nusername,email,first_name,last_name,is_st'
'aff,date_joined,alias,mobile_number,date_of_birth,gender\r\n')
self.assertEquals(str(response), expected_output)
| {
"content_hash": "a3966505e126162df811141c7e74a12d",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 78,
"avg_line_length": 39.53781512605042,
"alnum_prop": 0.6129649309245484,
"repo_name": "Mitso/springstertestapp",
"id": "12216c2e433732a737f298a2a666988127d2162d",
"size": "4729",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gem/tests/test_admin.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "128710"
},
{
"name": "HTML",
"bytes": "138391"
},
{
"name": "JavaScript",
"bytes": "9716"
},
{
"name": "Python",
"bytes": "183268"
},
{
"name": "Shell",
"bytes": "563"
}
],
"symlink_target": ""
} |
import datetime
from oslo_config import cfg
from oslo_log import log as logging
from testtools import matchers
from vitrage.common.constants import DatasourceOpts as DSOpts
from vitrage.common.constants import DatasourceProperties as DSProps
from vitrage.common.constants import EdgeLabel
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import UpdateMethod
from vitrage.common.constants import VertexProperties as VProps
from vitrage.datasources.cinder.volume import CINDER_VOLUME_DATASOURCE
from vitrage.datasources.cinder.volume.transformer \
import CinderVolumeTransformer
from vitrage.datasources.heat.stack import HEAT_STACK_DATASOURCE
from vitrage.datasources.heat.stack.transformer import HeatStackTransformer
from vitrage.datasources.nova.instance import NOVA_INSTANCE_DATASOURCE
from vitrage.datasources.nova.instance.transformer import InstanceTransformer
from vitrage.datasources import transformer_base as tbase
from vitrage.datasources.transformer_base import TransformerBase
from vitrage.tests import base
from vitrage.tests.mocks import mock_driver as mock_sync
LOG = logging.getLogger(__name__)
class TestHeatStackTransformer(base.BaseTest):
OPTS = [
cfg.StrOpt(DSOpts.UPDATE_METHOD,
default=UpdateMethod.PUSH),
]
# noinspection PyAttributeOutsideInit,PyPep8Naming
@classmethod
def setUpClass(cls):
super(TestHeatStackTransformer, cls).setUpClass()
cls.transformers = {}
cls.conf = cfg.ConfigOpts()
cls.conf.register_opts(cls.OPTS, group=HEAT_STACK_DATASOURCE)
cls.transformers[HEAT_STACK_DATASOURCE] = \
HeatStackTransformer(cls.transformers)
cls.transformers[CINDER_VOLUME_DATASOURCE] = \
CinderVolumeTransformer(cls.transformers)
cls.transformers[NOVA_INSTANCE_DATASOURCE] = \
InstanceTransformer(cls.transformers)
def test_create_placeholder_vertex(self):
LOG.debug('Heat Stack transformer test: Create placeholder vertex')
# Tests setup
stack_id = 'Stack123'
timestamp = datetime.datetime.utcnow()
properties = {
VProps.ID: stack_id,
VProps.VITRAGE_TYPE: HEAT_STACK_DATASOURCE,
VProps.VITRAGE_CATEGORY: EntityCategory.RESOURCE,
VProps.VITRAGE_SAMPLE_TIMESTAMP: timestamp
}
transformer = self.transformers[HEAT_STACK_DATASOURCE]
# Test action
placeholder = \
transformer.create_neighbor_placeholder_vertex(**properties)
# Test assertions
observed_uuid = placeholder.vertex_id
expected_key = tbase.build_key(transformer._key_values(
HEAT_STACK_DATASOURCE,
stack_id))
expected_uuid = \
TransformerBase.uuid_from_deprecated_vitrage_id(expected_key)
self.assertEqual(expected_uuid, observed_uuid)
observed_time = placeholder.get(VProps.VITRAGE_SAMPLE_TIMESTAMP)
self.assertEqual(timestamp, observed_time)
observed_type = placeholder.get(VProps.VITRAGE_TYPE)
self.assertEqual(HEAT_STACK_DATASOURCE, observed_type)
observed_entity_id = placeholder.get(VProps.ID)
self.assertEqual(stack_id, observed_entity_id)
observed_vitrage_category = placeholder.get(VProps.VITRAGE_CATEGORY)
self.assertEqual(EntityCategory.RESOURCE, observed_vitrage_category)
vitrage_is_placeholder = placeholder.get(VProps.VITRAGE_IS_PLACEHOLDER)
self.assertTrue(vitrage_is_placeholder)
def test_key_values(self):
LOG.debug('Heat Stack transformer test: get key values')
# Test setup
volume_type = HEAT_STACK_DATASOURCE
volume_id = '12345'
transformer = self.transformers[HEAT_STACK_DATASOURCE]
# Test action
observed_key_fields = transformer._key_values(volume_type,
volume_id)
# Test assertions
self.assertEqual(EntityCategory.RESOURCE, observed_key_fields[0])
self.assertEqual(HEAT_STACK_DATASOURCE, observed_key_fields[1])
self.assertEqual(volume_id, observed_key_fields[2])
def test_snapshot_transform(self):
LOG.debug('Heat Stack transformer test: transform entity event '
'snapshot')
# Test setup
spec_list = \
mock_sync.simple_stack_generators(stack_num=3,
instance_and_volume_num=7,
snapshot_events=7)
static_events = mock_sync.generate_random_events_list(spec_list)
for event in static_events:
# Test action
wrapper = self.transformers[HEAT_STACK_DATASOURCE].transform(
event)
# Test assertions
vertex = wrapper.vertex
self._validate_stack_vertex_props(vertex, event)
neighbors = wrapper.neighbors
self._validate_neighbors(neighbors, vertex.vertex_id, event)
def test_update_transform(self):
LOG.debug('Heat Stack transformer test: transform entity event '
'update')
# Test setup
spec_list = \
mock_sync.simple_stack_generators(stack_num=3,
instance_and_volume_num=7,
snapshot_events=7)
static_events = mock_sync.generate_random_events_list(spec_list)
for event in static_events:
# Test action
wrapper = self.transformers[HEAT_STACK_DATASOURCE].transform(
event)
# Test assertions
vertex = wrapper.vertex
self._validate_stack_vertex_props(vertex, event)
neighbors = wrapper.neighbors
self._validate_neighbors(neighbors, vertex.vertex_id, event)
def _validate_stack_vertex_props(self, vertex, event):
is_update_event = tbase.is_update_event(event)
self.assertEqual(EntityCategory.RESOURCE,
vertex[VProps.VITRAGE_CATEGORY])
self.assertEqual(event[DSProps.ENTITY_TYPE],
vertex[VProps.VITRAGE_TYPE])
id_field_path = 'stack_identity' if is_update_event else 'id'
self.assertEqual(
tbase.extract_field_value(event, id_field_path),
vertex[VProps.ID])
self.assertEqual(event[DSProps.SAMPLE_DATE],
vertex[VProps.VITRAGE_SAMPLE_TIMESTAMP])
name_field_path = 'stack_name'
self.assertEqual(
tbase.extract_field_value(event, name_field_path),
vertex[VProps.NAME])
state_field_path = 'state' if is_update_event else 'stack_status'
self.assertEqual(
tbase.extract_field_value(event, state_field_path),
vertex[VProps.STATE])
self.assertFalse(vertex[VProps.VITRAGE_IS_PLACEHOLDER])
self.assertFalse(vertex[VProps.VITRAGE_IS_DELETED])
def _validate_neighbors(self, neighbors, stack_vertex_id, event):
self.assertThat(neighbors, matchers.HasLength(2))
instance_id = event['resources'][0]['physical_resource_id']
self._validate_neighbor(neighbors[0],
instance_id,
NOVA_INSTANCE_DATASOURCE,
stack_vertex_id)
instance_id = event['resources'][1]['physical_resource_id']
self._validate_neighbor(neighbors[1],
instance_id,
CINDER_VOLUME_DATASOURCE,
stack_vertex_id)
def _validate_neighbor(self,
instance_neighbor,
instance_id,
datasource_type,
stack_vertex_id):
# validate neighbor vertex
self.assertEqual(EntityCategory.RESOURCE,
instance_neighbor.vertex[VProps.VITRAGE_CATEGORY])
self.assertEqual(datasource_type,
instance_neighbor.vertex[VProps.VITRAGE_TYPE])
self.assertEqual(instance_id, instance_neighbor.vertex[VProps.ID])
self.assertTrue(
instance_neighbor.vertex[VProps.VITRAGE_IS_PLACEHOLDER])
self.assertFalse(instance_neighbor.vertex[VProps.VITRAGE_IS_DELETED])
# Validate neighbor edge
edge = instance_neighbor.edge
self.assertEqual(edge.target_id, instance_neighbor.vertex.vertex_id)
self.assertEqual(edge.source_id, stack_vertex_id)
self.assertEqual(edge.label, EdgeLabel.COMPRISED)
| {
"content_hash": "fafdfbbae9f02e730f7286b631a5ad07",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 79,
"avg_line_length": 39.93119266055046,
"alnum_prop": 0.6351522113727742,
"repo_name": "openstack/vitrage",
"id": "6139bf4918db3266d9fc728b1c2e903f536c8055",
"size": "9278",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vitrage/tests/unit/datasources/heat/test_heat_stack_transformer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "26541"
},
{
"name": "Mako",
"bytes": "896"
},
{
"name": "Python",
"bytes": "2074427"
},
{
"name": "Shell",
"bytes": "17668"
}
],
"symlink_target": ""
} |
from collections import defaultdict
from sympy import Mod, Mul
from devito.exceptions import InvalidOperator
from devito.ir.clusters import Queue
from devito.ir.support import Forward, SEQUENTIAL
from devito.tools import (DefaultOrderedDict, frozendict, is_integer,
indices_to_sections, timed_pass)
from devito.types import (CustomDimension, Ge, Le, Lock, WaitLock, WithLock,
FetchWait, FetchWaitPrefetch, Delete, normalize_syncs)
__all__ = ['Tasker', 'Streaming']
class Asynchronous(Queue):
def __init__(self, key):
assert callable(key)
self.key = key
super().__init__()
class Tasker(Asynchronous):
"""
Create asynchronous Clusters, or "tasks".
Parameters
----------
key : callable, optional
A Cluster `c` becomes an asynchronous task only if `key(c)` returns True
Notes
-----
From an implementation viewpoint, an asynchronous Cluster is a Cluster
with attached suitable SyncOps, such as WaitLock, WithLock, etc.
"""
@timed_pass(name='tasker')
def process(self, clusters):
return super().process(clusters)
def callback(self, clusters, prefix):
if not prefix:
return clusters
d = prefix[-1].dim
if not all(SEQUENTIAL in c.properties[d] for c in clusters):
return clusters
locks = {}
waits = defaultdict(list)
tasks = defaultdict(list)
for c0 in clusters:
if not self.key(c0):
# Not a candidate asynchronous task
continue
# Prevent future writes to interfere with a task by waiting on a lock
may_require_lock = set(c0.scope.reads)
# Sort for deterministic code generation
may_require_lock = sorted(may_require_lock, key=lambda i: i.name)
protected = defaultdict(set)
for c1 in clusters:
offset = int(clusters.index(c1) <= clusters.index(c0))
for f in may_require_lock:
try:
writes = c1.scope.writes[f]
except KeyError:
# No read-write dependency, ignore
continue
try:
if all(w.aindices[d].is_Stepping for w in writes) or \
all(w.aindices[d].is_Modulo for w in writes):
size = f.shape_allocated[d]
assert is_integer(size)
ld = CustomDimension(name='ld', symbolic_size=size, parent=d)
elif all(w[d] == 0 for w in writes):
# Special case, degenerates to scalar lock
raise KeyError
else:
# Functions over non-stepping Dimensions need no lock
continue
except KeyError:
# Would degenerate to a scalar, but we rather use a lock
# of size 1 for simplicity
ld = CustomDimension(name='ld', symbolic_size=1)
lock = locks.setdefault(f, Lock(name='lock%d' % len(locks),
dimensions=ld, target=f))
for w in writes:
try:
index = w[d]
logical_index = index + offset
except TypeError:
assert ld.symbolic_size == 1
index = 0
logical_index = 0
if logical_index in protected[f]:
continue
waits[c1].append(WaitLock(lock[index]))
protected[f].add(logical_index)
# Taskify `c0`
for f in protected:
lock = locks[f]
indices = sorted({r[d] for r in c0.scope.reads[f]})
if indices == [None]:
# `lock` is protecting a Function which isn't defined over `d`
# E.g., `d=time` and the protected function is `a(x, y)`
assert lock.size == 1
indices = [0]
tasks[c0].extend(WithLock(lock[i]) for i in indices)
processed = []
for c in clusters:
if waits[c] or tasks[c]:
processed.append(c.rebuild(syncs={d: waits[c] + tasks[c]}))
else:
processed.append(c)
return processed
class Streaming(Asynchronous):
"""
Tag Clusters with the FetchWait, FetchWaitPrefetch and Delete SyncOps to
stream Functions in and out the process memory.
Parameters
----------
key : callable, optional
Return the Functions that need to be streamed in a given Cluster.
"""
@timed_pass(name='streaming')
def process(self, clusters):
return super().process(clusters)
def callback(self, clusters, prefix):
if not prefix:
return clusters
it = prefix[-1]
d = it.dim
direction = it.direction
try:
pd = prefix[-2].dim
except IndexError:
pd = None
# What are the stream-able Dimensions?
# 0) all sequential Dimensions
# 1) all CustomDimensions of fixed (i.e. integer) size, which
# implies a bound on the amount of streamed data
if all(SEQUENTIAL in c.properties[d] for c in clusters):
make_fetch = lambda f, i, s, cb: FetchWaitPrefetch(f, d, direction, i, s, cb)
make_delete = lambda f, i, s, cb: Delete(f, d, direction, i, s, cb)
syncd = d
elif d.is_Custom and is_integer(it.size):
make_fetch = lambda f, i, s, cb: FetchWait(f, d, direction, i, it.size, cb)
make_delete = lambda f, i, s, cb: Delete(f, d, direction, i, it.size, cb)
syncd = pd
else:
return clusters
first_seen = {}
last_seen = {}
for c in clusters:
candidates = self.key(c)
if not candidates:
continue
for i in c.scope.accesses:
f = i.function
if f in candidates:
k = (f, i[d])
first_seen.setdefault(k, c)
last_seen[k] = c
if not first_seen:
return clusters
# Bind fetches and deletes to Clusters
sync_ops = defaultdict(list)
callbacks = [(frozendict(first_seen), make_fetch),
(frozendict(last_seen), make_delete)]
for seen, callback in callbacks:
mapper = defaultdict(lambda: DefaultOrderedDict(list))
for (f, v), c in seen.items():
mapper[c][f].append(v)
for c, m in mapper.items():
for f, v in m.items():
for i, s in indices_to_sections(v):
next_cbk = make_next_cbk(c.guards.get(d), d, direction)
sync_ops[c].append(callback(f, i, s, next_cbk))
# Attach SyncOps to Clusters
processed = []
for c in clusters:
v = sync_ops.get(c)
if v is not None:
processed.append(c.rebuild(syncs=normalize_syncs(c.syncs, {syncd: v})))
else:
processed.append(c)
return processed
# Utilities
def make_next_cbk(rel, d, direction):
"""
Create a callable that given a symbol returns a sympy.Relational usable to
express, in symbolic form, whether the next fetch/prefetch will be executed.
"""
if rel is None:
if direction is Forward:
return lambda s: Le(s, d.symbolic_max)
else:
return lambda s: Ge(s, d.symbolic_min)
else:
# Only case we know how to deal with, today, is the one induced
# by a ConditionalDimension with structured condition (e.g. via `factor`)
if not (rel.is_Equality and rel.rhs == 0 and isinstance(rel.lhs, Mod)):
raise InvalidOperator("Unable to understand data streaming pattern")
_, v = rel.lhs.args
if direction is Forward:
# The LHS rounds `s` up to the nearest multiple of `v`
return lambda s: Le(Mul(((s + v - 1) / v), v, evaluate=False), d.symbolic_max)
else:
# The LHS rounds `s` down to the nearest multiple of `v`
return lambda s: Ge(Mul((s / v), v, evaluate=False), d.symbolic_min)
| {
"content_hash": "e0f1b97e1ef264af1b3d2ffeec944086",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 90,
"avg_line_length": 35.4734693877551,
"alnum_prop": 0.5208836727649292,
"repo_name": "opesci/devito",
"id": "23d278d45bb71b1201813297c2e6ee665da7463d",
"size": "8691",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "devito/passes/clusters/asynchrony.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "812"
},
{
"name": "Python",
"bytes": "1683413"
},
{
"name": "Shell",
"bytes": "3900"
}
],
"symlink_target": ""
} |
"""
scanFAAM.py
===========
Holds the scanFAAM function that is used to work out the first and last
index of real data (i.e. non-missing data in a load of FAAM flight data).
It is used in conjunction with nappy to reduce the size of output files
by missing out beginning and end periods that hold only misssing values.
Usage
=====
scanFAAM.py -f <filename> [-m <missing_value>]
Where:
------
filename - path to a FAAM NetCDF file
missing_value - missing value to use for variables.
"""
import os, sys, cdms, getopt
def scanFAAM(fileName=None, vars=None, nth=4, missingValuesToUse=(-9999., -32767.)):
"""
Scans every 'nth' variable in the list of variables (or found in the
file and gets the first and last index of the first (time) dimension
that holds real (non-missing) values.
"""
if type(missingValuesToUse)!=type((1,2)):
missingValuesToUse=(missingValuesToUse,)
startList=[]
endList=[]
start=None
end=None
if not fileName and not vars:
raise "You must provide either a file name or a list of cdms variables."
if fileName:
f=cdms.open(fileName)
vars=f.listvariables()
for var in vars:
if type(var)!=type(""):
id=var.id
else:
id=var
if id[-4:]=="FLAG" or id=="Time":
continue
if type(var)==type(""):
var=f(var)
step=1000
while (start, end)==(None, None):
(start, end)=findMissing(var, step, missingValuesToUse)
step=step/2
startList.append(start)
endList.append(end)
print "Start/End index: %s %s:%s" % (id, start, end)
startMin=min(startList)
endMax=max(endList)
return (startMin, endMax)
def findMissing(var, step, missingValuesToUse):
"""
Returns the (start, end) tuple for a given variable where
they are indices of an array where missing values end and begin.
"""
start=None
end=None
i0=0
sh=var.shape
iend=sh[0]-1
print var.id, step
for miss in missingValuesToUse:
for i in range(i0, iend, step):
if var[i][0]==miss:
start=i
break
for e in range(iend, i0, -step):
if var[e][0]==miss:
end=e
break
return (start, end)
if __name__=="__main__":
argList=sys.argv[1:]
args=getopt.getopt(argList, "f:m:")
fileName=None
missingValue=None
for arg,value in args[0]:
if arg=="-f":
fileName=value
elif arg=="-m":
missingValue=float(value)
scanFAAM(fileName=fileName, missingValuesToUse=(missingValue,))
| {
"content_hash": "1fc14ccbf92bf06a37e27bc03788fb45",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 84,
"avg_line_length": 25.146788990825687,
"alnum_prop": 0.581904414447282,
"repo_name": "eufarn7sp/egads-eufar",
"id": "6ff0d9aa2dc1bd7dd8ed525703fb91abe6dbdc99",
"size": "2764",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "egads/thirdparty/nappy/contrib/aircraft/scanFAAM.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "309547"
}
],
"symlink_target": ""
} |
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
sys.path.append('/home/carlos/ws_scalable/ws_scalable/')
sys.path.append('/home/carlos/ws_scalable/env/')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ws_scalable'
copyright = u'2014, Carlos'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1'
# The full version, including alpha/beta/rc tags.
release = '1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ws_scalabledoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ws_scalable.tex', u'ws\\_scalable Documentation',
u'Carlos', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ws_scalable', u'ws_scalable Documentation',
[u'Carlos'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ws_scalable', u'ws_scalable Documentation',
u'Carlos', 'ws_scalable', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| {
"content_hash": "129c8975176362cc30d4664592b737d2",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 79,
"avg_line_length": 31.692,
"alnum_prop": 0.7054146156758804,
"repo_name": "ccarvalheira/wsep",
"id": "331cf3dc12b5b9d9274c9f273de25923c21399a8",
"size": "8347",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "confs/conf.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Perl",
"bytes": "6895"
},
{
"name": "Python",
"bytes": "73216"
},
{
"name": "Shell",
"bytes": "2048"
}
],
"symlink_target": ""
} |
"""
Provides various authentication policies.
"""
from __future__ import unicode_literals
import base64
import binascii
from django.contrib.auth import authenticate, get_user_model
from django.middleware.csrf import CsrfViewMiddleware
from django.utils.six import text_type
from django.utils.translation import ugettext_lazy as _
from rest_framework import HTTP_HEADER_ENCODING, exceptions
from rest_framework.authentication import TokenAuthentication as DefaultTokenAuthentication
from django.contrib.auth import get_user_model
class TokenAuthentication(DefaultTokenAuthentication):
def authenticate(self, request, *args, **kwargs):
if request.META.get('HTTP_X_USER_TOKEN'):
request.META['HTTP_AUTHORIZATION'] = 'Token %s'%request.META['HTTP_X_USER_TOKEN']
return super(TokenAuthentication,self).authenticate(request, *args, **kwargs)
class EmailBackend(object):
def authenticate(self, username=None, password=None, **kwargs):
UserModel = get_user_model()
try:
user = UserModel.objects.get(email=username)
except UserModel.DoesNotExist:
return None
else:
if getattr(user, 'is_active', False) and user.check_password(password):
return user
return None
def get_user(self, user_id):
UserModel = get_user_model()
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
| {
"content_hash": "710c57452399c4be49524b1d84a10b0c",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 93,
"avg_line_length": 31.163265306122447,
"alnum_prop": 0.6836935166994106,
"repo_name": "scottbecker/autolims",
"id": "9512a2db08574e0ea7550e927ca2b2e7c95a2c0a",
"size": "1527",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "autolims/authentication.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "99408"
},
{
"name": "HTML",
"bytes": "17180"
},
{
"name": "JavaScript",
"bytes": "205964"
},
{
"name": "Python",
"bytes": "340397"
}
],
"symlink_target": ""
} |
import pygame as pg
from random import uniform, choice, randint
from settings import *
from tilemap import collide_hit_rect
vec = pg.math.Vector2
def collide_with_walls(sprite, group, dir):
if dir == 'x':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if hits[0].rect.centerx > sprite.hit_rect.centerx:
sprite.pos.x = hits[0].rect.left - sprite.hit_rect.width / 2
if hits[0].rect.centerx < sprite.hit_rect.centerx:
sprite.pos.x = hits[0].rect.right + sprite.hit_rect.width / 2
sprite.vel.x = 0
sprite.hit_rect.centerx = sprite.pos.x
if dir == 'y':
hits = pg.sprite.spritecollide(sprite, group, False, collide_hit_rect)
if hits:
if hits[0].rect.centery > sprite.hit_rect.centery:
sprite.pos.y = hits[0].rect.top - sprite.hit_rect.height / 2
if hits[0].rect.centery < sprite.hit_rect.centery:
sprite.pos.y = hits[0].rect.bottom + sprite.hit_rect.height / 2
sprite.vel.y = 0
sprite.hit_rect.centery = sprite.pos.y
class Player(pg.sprite.Sprite):
def __init__(self, game, x, y):
self._layer = PLAYER_LAYER
self.groups = game.all_sprites
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.player_img
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = PLAYER_HIT_RECT
self.hit_rect.center = self.rect.center
self.vel = vec(0, 0)
self.pos = vec(x, y)
self.rot = 0
self.last_shot = 0
self.health = PLAYER_HEALTH
def get_keys(self):
self.rot_speed = 0
self.vel = vec(0, 0)
keys = pg.key.get_pressed()
if keys[pg.K_LEFT] or keys[pg.K_a]:
self.rot_speed = PLAYER_ROT_SPEED
if keys[pg.K_RIGHT] or keys[pg.K_d]:
self.rot_speed = -PLAYER_ROT_SPEED
if keys[pg.K_UP] or keys[pg.K_w]:
self.vel = vec(PLAYER_SPEED, 0).rotate(-self.rot)
if keys[pg.K_DOWN] or keys[pg.K_s]:
self.vel = vec(-PLAYER_SPEED / 2, 0).rotate(-self.rot)
if keys[pg.K_SPACE]:
now = pg.time.get_ticks()
if now - self.last_shot > BULLET_RATE:
self.last_shot = now
dir = vec(1, 0).rotate(-self.rot)
pos = self.pos + BARREL_OFFSET.rotate(-self.rot)
Bullet(self.game, pos, dir)
self.vel = vec(-KICKBACK, 0).rotate(-self.rot)
MuzzleFlash(self.game, pos)
def update(self):
self.get_keys()
self.rot = (self.rot + self.rot_speed * self.game.dt) % 360
self.image = pg.transform.rotate(self.game.player_img, self.rot)
self.rect = self.image.get_rect()
self.rect.center = self.pos
self.pos += self.vel * self.game.dt
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center
class Mob(pg.sprite.Sprite):
def __init__(self, game, x, y):
self._layer = MOB_LAYER
self.groups = game.all_sprites, game.mobs
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.mob_img
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.hit_rect = MOB_HIT_RECT.copy()
self.hit_rect.center = self.rect.center
self.pos = vec(x, y)
self.vel = vec(0, 0)
self.acc = vec(0, 0)
self.rect.center = self.pos
self.rot = 0
self.health = MOB_HEALTH
self.speed = choice(MOB_SPEEDS)
def avoid_mobs(self):
for mob in self.game.mobs:
if mob != self:
dist = self.pos - mob.pos
if 0 < dist.length() < AVOID_RADIUS:
self.acc += dist.normalize()
def update(self):
self.rot = (self.game.player.pos - self.pos).angle_to(vec(1, 0))
self.image = pg.transform.rotate(self.game.mob_img, self.rot)
# self.rect = self.image.get_rect()
self.rect.center = self.pos
self.acc = vec(1, 0).rotate(-self.rot)
self.avoid_mobs()
self.acc.scale_to_length(self.speed)
self.acc += self.vel * -1
self.vel += self.acc * self.game.dt
self.pos += self.vel * self.game.dt + 0.5 * self.acc * self.game.dt ** 2
self.hit_rect.centerx = self.pos.x
collide_with_walls(self, self.game.walls, 'x')
self.hit_rect.centery = self.pos.y
collide_with_walls(self, self.game.walls, 'y')
self.rect.center = self.hit_rect.center
if self.health <= 0:
self.kill()
def draw_health(self):
if self.health > 60:
col = GREEN
elif self.health > 30:
col = YELLOW
else:
col = RED
width = int(self.rect.width * self.health / MOB_HEALTH)
self.health_bar = pg.Rect(0, 0, width, 7)
if self.health < MOB_HEALTH:
pg.draw.rect(self.image, col, self.health_bar)
class Bullet(pg.sprite.Sprite):
def __init__(self, game, pos, dir):
self._layer = BULLET_LAYER
self.groups = game.all_sprites, game.bullets
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.bullet_img
self.rect = self.image.get_rect()
self.hit_rect = self.rect
self.pos = vec(pos)
self.rect.center = pos
spread = uniform(-GUN_SPREAD, GUN_SPREAD)
self.vel = dir.rotate(spread) * BULLET_SPEED
self.spawn_time = pg.time.get_ticks()
def update(self):
self.pos += self.vel * self.game.dt
self.rect.center = self.pos
if pg.sprite.spritecollideany(self, self.game.walls):
self.kill()
if pg.time.get_ticks() - self.spawn_time > BULLET_LIFETIME:
self.kill()
class Wall(pg.sprite.Sprite):
def __init__(self, game, x, y):
self._layer = WALL_LAYER
self.groups = game.all_sprites, game.walls
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = game.wall_img
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * TILESIZE
self.rect.y = y * TILESIZE
class Obstacle(pg.sprite.Sprite):
def __init__(self, game, x, y, w, h):
self.groups = game.walls
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.rect = pg.Rect(x, y, w, h)
self.hit_rect = self.rect
self.x = x
self.y = y
self.rect.x = x
self.rect.y = y
class MuzzleFlash(pg.sprite.Sprite):
def __init__(self, game, pos):
self._layer = EFFECTS_LAYER
self.groups = game.all_sprites
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
size = randint(20, 50)
self.image = pg.transform.scale(choice(game.gun_flashes), (size, size))
self.rect = self.image.get_rect()
self.pos = pos
self.rect.center = pos
self.spawn_time = pg.time.get_ticks()
def update(self):
if pg.time.get_ticks() - self.spawn_time > FLASH_DURATION:
self.kill()
| {
"content_hash": "6430453aa9505df0bc6a6194dd10f709",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 80,
"avg_line_length": 37.5678391959799,
"alnum_prop": 0.5593900481540931,
"repo_name": "kidscancode/gamedev",
"id": "5f8fa99cb2539c11b42ed90a069c7ce09e871a56",
"size": "7476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tutorials/tilemap/part 15/sprites.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1690942"
}
],
"symlink_target": ""
} |
import time
import json
class Task():
def __init__(self, data, task_time=None):
if not isinstance(data, str):
raise TypeError("Data must be a JSON string ")
self.data = data
if not task_time:
self.time = int(time.time())
else:
self.time = task_time
def get_json(self):
return json.dumps([0, self.time, "{{placeholder}}", 0]).replace("\"{{placeholder}}\"", self.data)
| {
"content_hash": "eea73d0b26be7c36556248cf6612ce67",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 105,
"avg_line_length": 24.05263157894737,
"alnum_prop": 0.5536105032822757,
"repo_name": "joshuakarjala/redisq-py",
"id": "6f6f45ea34931822dd9d9aadda613af541987c98",
"size": "481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "redisq/task.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2791"
}
],
"symlink_target": ""
} |
'''Runs the basic test suite through a cross compiler.
This is now just a wrapper around run_project_tests.py with specific arguments
'''
import argparse
import subprocess
from mesonbuild import mesonlib
from mesonbuild.coredata import version as meson_version
def runtests(cross_file, failfast):
tests = ['--only', 'common']
cmd = mesonlib.python_command + ['run_project_tests.py', '--backend', 'ninja'] + (['--failfast'] if failfast else []) + tests + ['--cross-file', cross_file]
return subprocess.call(cmd)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--failfast', action='store_true')
parser.add_argument('cross_file')
options = parser.parse_args()
return runtests(options.cross_file, options.failfast)
if __name__ == '__main__':
print('Meson build system', meson_version, 'Cross Tests')
raise SystemExit(main())
| {
"content_hash": "ff14d9de38d47c209470d286b8b49639",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 160,
"avg_line_length": 34.11538461538461,
"alnum_prop": 0.6967305524239008,
"repo_name": "becm/meson",
"id": "1e67876ab66d9621bd46145e1c8e85eaa6a000b9",
"size": "1504",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run_cross_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "4190"
},
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C",
"bytes": "167971"
},
{
"name": "C#",
"bytes": "1130"
},
{
"name": "C++",
"bytes": "51171"
},
{
"name": "CMake",
"bytes": "27103"
},
{
"name": "Cuda",
"bytes": "7454"
},
{
"name": "D",
"bytes": "5313"
},
{
"name": "Dockerfile",
"bytes": "1960"
},
{
"name": "Emacs Lisp",
"bytes": "919"
},
{
"name": "Fortran",
"bytes": "11539"
},
{
"name": "Genie",
"bytes": "341"
},
{
"name": "HTML",
"bytes": "117"
},
{
"name": "Inno Setup",
"bytes": "354"
},
{
"name": "Java",
"bytes": "2570"
},
{
"name": "JavaScript",
"bytes": "136"
},
{
"name": "LLVM",
"bytes": "75"
},
{
"name": "Lex",
"bytes": "139"
},
{
"name": "Meson",
"bytes": "454262"
},
{
"name": "Objective-C",
"bytes": "1235"
},
{
"name": "Objective-C++",
"bytes": "381"
},
{
"name": "PowerShell",
"bytes": "2242"
},
{
"name": "Python",
"bytes": "2912935"
},
{
"name": "Roff",
"bytes": "569"
},
{
"name": "Rust",
"bytes": "1079"
},
{
"name": "Shell",
"bytes": "6800"
},
{
"name": "Swift",
"bytes": "1152"
},
{
"name": "Vala",
"bytes": "10025"
},
{
"name": "Verilog",
"bytes": "709"
},
{
"name": "Vim script",
"bytes": "9919"
},
{
"name": "Yacc",
"bytes": "50"
}
],
"symlink_target": ""
} |
'''author@esilgard'''
'''
test 1 - make sure that the path to the nlp_engine is valid (the python script exists)
'''
test_num='1'
test_desc='make sure that the path to the nlp_engine is valid'
def get(engine_path):
try:
python_script=open(engine_path,'r').read()
return test_num,'Pass',test_desc
except:
return test_num,'Fail',test_desc
| {
"content_hash": "be92ad9a2fef9bf476519a61cb1fbae6",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 86,
"avg_line_length": 21.5,
"alnum_prop": 0.6253229974160207,
"repo_name": "esilgard/argos_nlp_tests",
"id": "0dc057eec2c3fd619178b509ae158f680673e67d",
"size": "547",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tests/test1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "10749"
}
],
"symlink_target": ""
} |
from basescript import BaseScript
class Adder(BaseScript):
# The following specifies the script description so that it be used
# as a part of the usage doc when --help option is used during running.
DESC = "Adds numbers"
def __init__(self):
super(Adder, self).__init__()
self.a = 10
self.b = 20
def define_args(self, parser):
parser.add_argument("c", type=int, help="Number to add")
def run(self):
self.log.info("Starting run of script ...")
print(self.a + self.b + self.args.c)
self.log.info("Script is done")
if __name__ == "__main__":
Adder().start()
| {
"content_hash": "1a1183d0978565c2229b21a0c1b28c9f",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 75,
"avg_line_length": 24.884615384615383,
"alnum_prop": 0.5965996908809892,
"repo_name": "deep-compute/basescript",
"id": "0eb57ccf097b53339a403a5fe6478d3ffe4c3fd0",
"size": "647",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/adder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22832"
}
],
"symlink_target": ""
} |
from __future__ import annotations
from .apibible import ApiBible
from .biblegateway import BibleGateway
__all__ = ('ApiBible', 'BibleGateway')
| {
"content_hash": "f553147df31f1dca4de07f3f9d3d4068",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 38,
"avg_line_length": 24.333333333333332,
"alnum_prop": 0.7534246575342466,
"repo_name": "bryanforbes/Erasmus",
"id": "9a1a37f3c38baa0aa8c5e8c7db9fc07bbf76a3e3",
"size": "146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "erasmus/services/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Fluent",
"bytes": "20011"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "523473"
}
],
"symlink_target": ""
} |
import os, glob
from distutils.core import setup
def find_packages():
here = os.path.dirname(os.path.realpath(__file__))
lib = os.path.join(here, "servequnit", "**.py")
packages = []
for name in glob.glob(lib):
if os.path.basename(name) == "__init__.py":
name = os.path.dirname(name)
else:
name = os.path.splitext(name)[0]
name.replace(here, "").replace(os.path.sep, ".")
return packages
setup(name="servequnit", version="1.0.0",
description="Run browser-based javascript unit tests.",
long_description=open('README.rst').read(), license="MIT",
author="James Webber", author_email="bunkerprivate@gmail.com",
packages=['servequnit'], py_modules=find_packages(),
scripts=['scripts/servequnit',],
url="http://github.com/bnkr/servequnit",)
| {
"content_hash": "aa43a5a3ad6bc5da41170f214238e109",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 68,
"avg_line_length": 35.166666666666664,
"alnum_prop": 0.6208530805687204,
"repo_name": "bnkr/servequnit",
"id": "12a0035d1ff2cbe168c1ba82a2d5cd87a2512db3",
"size": "862",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "193"
},
{
"name": "Python",
"bytes": "53980"
},
{
"name": "Shell",
"bytes": "177"
}
],
"symlink_target": ""
} |
"""Config flow for Plum Lightpad."""
from __future__ import annotations
import logging
from typing import Any
from aiohttp import ContentTypeError
from requests.exceptions import ConnectTimeout, HTTPError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN
from .utils import load_plum
_LOGGER = logging.getLogger(__name__)
class PlumLightpadConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Config flow for Plum Lightpad integration."""
VERSION = 1
def _show_form(self, errors=None):
schema = {
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(schema),
errors=errors or {},
)
async def async_step_user(
self, user_input: ConfigType | None = None
) -> dict[str, Any]:
"""Handle a flow initialized by the user or redirected to by import."""
if not user_input:
return self._show_form()
username = user_input[CONF_USERNAME]
password = user_input[CONF_PASSWORD]
# load Plum just so we know username/password work
try:
await load_plum(username, password, self.hass)
except (ContentTypeError, ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect/authenticate to Plum cloud: %s", str(ex))
return self._show_form({"base": "cannot_connect"})
await self.async_set_unique_id(username)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=username, data={CONF_USERNAME: username, CONF_PASSWORD: password}
)
async def async_step_import(
self, import_config: ConfigType | None
) -> dict[str, Any]:
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
| {
"content_hash": "91a66c93d72809962308c34f0a2b85ba",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 86,
"avg_line_length": 31.636363636363637,
"alnum_prop": 0.6537356321839081,
"repo_name": "adrienbrault/home-assistant",
"id": "40432810cc5fa282e3c082181fcf52fcbc926122",
"size": "2088",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/plum_lightpad/config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "32021043"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
} |
"""Unittest for pyreringconfig."""
__author__ = 'mwu@google.com (Mingyu Wu)'
import getpass
import unittest
from lib import mock_filesystemhandlerextend
from lib import pyreringconfig
class PyreRingConfigTest(unittest.TestCase):
"""Unit test cases for PowrRingConfigClass.
This will only test the PyreRingConfig class, those module level methods are
not tested currently.
"""
def setUp(self):
"""Create a mock filesystem and init PyreRingConfig with it."""
self.filesystem = (
mock_filesystemhandlerextend.MockFileSystemHandlerExtend())
self.filesystem.fake_env_vars['HOSTNAME'] = 'fake_machine'
self.settings = pyreringconfig.PyreRingConfig(self.filesystem)
def _PopulateFileSystem(self, temp_file_system):
"""Assistant method to populate mock file system with a dict."""
for key, value in temp_file_system.iteritems():
self.filesystem.WriteToFile(key, value)
def testEmptySettingsAfterInit(self):
"""As default the settings dict should be created and empty."""
self.assertEqual(len(self.settings.settings), 0)
def testCreateConfFileIfNotExist(self):
"""Test conf file should be created if not exist."""
temp_file_system = {
'/tmp/placeholder': ''
}
self._PopulateFileSystem(temp_file_system)
self.settings.Populate('/tmp/', {})
self.assertTrue(self.filesystem.CheckFile('/tmp/conf/pyrering.conf'))
def testDefaultSettings(self):
"""test default values for settings if nothing specified."""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/conf/pyrering.conf': ''
}
self._PopulateFileSystem(temp_file_system)
self.settings.Populate('/tmp/', {})
self.assertEqual(self.settings.settings['root_dir'], '/tmp')
self.assertEqual(self.settings.settings['report_dir'],
'/tmp/reports')
self.assertEqual(self.settings.settings['conf_file'],
'/tmp/conf/pyrering.conf')
self.assertEqual(self.settings.settings['host_name'], 'fake_machine')
self.assertEqual(self.settings.settings['tester'], getpass.getuser())
self.assertEqual(self.settings.settings['project_name'],
'<YOUR PROJECT NAME>')
self.assertEqual(self.settings.settings['default_suite'], 'default_suite')
# Due to the limitation of current mock_filesystem. This value can't be
# tested. The test will set source_dir to current path. But I haven't mock
# the os.path.abspath yet. I can't get a predictable value to test. It will
# change based on the current location to run pyrering.
#self.assertEqual(self.settings.settings['source_dir'],
# '<YOUR TEST SCRIPT TOP DIRECTORY>')
self.assertFalse(self.settings.settings['sendmail'])
self.assertEqual(self.settings.settings['email_recipients'],
getpass.getuser())
self.assertEqual(self.settings.settings['log_file'], 'pyrering.log')
self.assertEqual(self.settings.settings['file_errors'])
self.assertFalse(self.settings.settings['reset'])
self.assertEqual(self.settings.settings['runner'], 'baserunner')
self.assertEqual(self.settings.settings['FATAL_STRING'], '')
self.assertEqual(self.settings.settings['header_file'], 'header_info.txt')
self.assertFalse(self.settings.settings['skip_setup'])
def testFileSettingsOverWriteDefaultSettings(self):
"""Config file take over default values."""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/conf/pyrering.conf': ['source_dir = "/tmp/source_dir"\n',
'project_name = "unittest project"\n',
]
}
self._PopulateFileSystem(temp_file_system)
self.settings.Populate('/tmp', {})
self.assertEqual(self.settings.settings['source_dir'], '/tmp/source_dir')
self.assertEqual(self.settings.settings['project_name'],
'unittest project')
def testUserSettingsOverWriteDefaultSettings(self):
"""User settings take over all other settings."""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/conf/pyrering.conf': ['source_dir = "/tmp/source_dir"\n',
'project_name = "unittest project"\n',
]
}
self._PopulateFileSystem(temp_file_system)
user_settings = {'project_name': 'user_name'}
self.settings.Populate('/tmp', user_settings)
self.assertEqual(self.settings.settings['project_name'], 'user_name')
def testConfigFileUnOverWriteableConfig(self):
"""Config file can't take over 'host_name' and 'tester' values.
'host_name' and 'tester' values are gathered at run time. It can't be taken
over by config file. Another string not tested here 'time'. It is not
actually used yet.
"""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/conf/pyrering.conf': ['host_name = "nonexist"\n',
'tester = "nonuser"\n',
]
}
self._PopulateFileSystem(temp_file_system)
self.settings.Populate('/tmp', {})
self.assertEqual(self.settings.settings['host_name'], 'fake_machine')
self.assertEqual(self.settings.settings['tester'], getpass.getuser())
def testUserSettingsOverWriteUnOverWriteableConfig(self):
"""User settings over write anything.
Including those not overwriteable settings by config file.
"""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/conf/pyrering.conf': ['host_name = "nonexist"\n',
'tester = "nonuser"\n',
]
}
self._PopulateFileSystem(temp_file_system)
user_settings = {'host_name': 'user_host', 'tester': 'user_tester'}
self.settings.Populate('/tmp', user_settings)
self.assertEqual(self.settings.settings['host_name'], 'user_host')
self.assertEqual(self.settings.settings['tester'], 'user_tester')
def testSomeStringsTreatedAsBooleanTrue(self):
"""sendmail and reset should be treated as boolean correctly."""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/conf/pyrering.conf': ['sendmail = True\n',
'reset = True\n',
'skip_setup = True\n',
]
}
self._PopulateFileSystem(temp_file_system)
self.settings.Populate('/tmp', {})
self.assertTrue(self.settings.settings['sendmail'])
self.assertTrue(self.settings.settings['reset'])
self.assertTrue(self.settings.settings['skip_setup'])
def testOneTreatedAsBooleanTrue(self):
"""sendmail and reset should be treated as boolean correctly."""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/conf/pyrering.conf': ['sendmail = 1\n',
'reset = 1\n',
'skip_setup = 1\n',
]
}
self._PopulateFileSystem(temp_file_system)
self.settings.Populate('/tmp', {})
self.assertTrue(self.settings.settings['sendmail'])
self.assertTrue(self.settings.settings['reset'])
self.assertTrue(self.settings.settings['skip_setup'])
def testSomeStringsTreatedAsBooleanFalse(self):
"""sendmail and reset should be treated as boolean correctly."""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/conf/pyrering.conf': ['sendmail = False\n',
'reset = False\n',
'skip_setup = False\n',
]
}
self._PopulateFileSystem(temp_file_system)
self.settings.Populate('/tmp', {})
self.assertFalse(self.settings.settings['sendmail'])
self.assertFalse(self.settings.settings['reset'])
self.assertFalse(self.settings.settings['skip_setup'])
def testZeroTreatedAsBooleanFalse(self):
"""sendmail and reset should be treated as boolean correctly."""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/conf/pyrering.conf': ['sendmail = 0\n',
'reset = 0\n',
'skip_setup = 0\n',
]
}
self._PopulateFileSystem(temp_file_system)
self.settings.Populate('/tmp', {})
self.assertFalse(self.settings.settings['sendmail'])
self.assertFalse(self.settings.settings['reset'])
self.assertFalse(self.settings.settings['skip_setup'])
def testUserCanSpecifyConfigFile(self):
"""User can use nondefault config file."""
temp_file_system = {
'/tmp/placeholder': '',
'/tmp/newconfig/pyrering_new.conf': ['project_name = my_project']
}
self._PopulateFileSystem(temp_file_system)
user_settings = {'conf_file': '/tmp/newconfig/pyrering_new.conf'}
self.settings.Populate('/tmp', user_settings)
self.assertEqual(self.settings.settings['project_name'], 'my_project')
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "02a57dc0d2117740505648479088cf39",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 79,
"avg_line_length": 42.413953488372094,
"alnum_prop": 0.6168439521877399,
"repo_name": "kdlucas/pyrering",
"id": "d6abc4d6f62cb7a3dc43bc6c3075dee2666f6cb0",
"size": "9732",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/pyreringconfig_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "212695"
},
{
"name": "Shell",
"bytes": "1546"
}
],
"symlink_target": ""
} |
import os
import sys
import unittest
import logging
import tempfile
import shutil
import contextlib
import argparse
from mock import patch, Mock
import test_mic
import vocabcompiler
import g2p
import brain
import saschapath
import tts
import diagnose
from stt import TranscriptionMode
DEFAULT_PROFILE = {
'prefers_email': False,
'location': '08544',
'timezone': 'US/Eastern',
'phone_number': '012344321'
}
class TestVocabCompiler(unittest.TestCase):
def testPhraseExtraction(self):
expected_phrases = ['MOCK']
mock_module = Mock()
mock_module.WORDS = ['MOCK']
with patch.object(brain.Brain, 'get_modules',
classmethod(lambda cls: [mock_module])):
extracted_phrases = vocabcompiler.get_all_phrases()
self.assertEqual(expected_phrases, extracted_phrases)
def testKeywordPhraseExtraction(self):
expected_phrases = ['MOCK']
with tempfile.TemporaryFile() as f:
# We can't use mock_open here, because it doesn't seem to work
# with the 'for line in f' syntax
f.write("MOCK\n")
f.seek(0)
with patch('%s.open' % vocabcompiler.__name__,
return_value=f, create=True):
extracted_phrases = vocabcompiler.get_keyword_phrases()
self.assertEqual(expected_phrases, extracted_phrases)
class TestVocabulary(unittest.TestCase):
VOCABULARY = vocabcompiler.DummyVocabulary
@contextlib.contextmanager
def do_in_tempdir(self):
tempdir = tempfile.mkdtemp()
yield tempdir
shutil.rmtree(tempdir)
def testVocabulary(self):
phrases = ['GOOD BAD UGLY']
with self.do_in_tempdir() as tempdir:
self.vocab = self.VOCABULARY(path=tempdir)
self.assertIsNone(self.vocab.compiled_revision)
self.assertFalse(self.vocab.is_compiled)
self.assertFalse(self.vocab.matches_phrases(phrases))
# We're now testing error handling. To avoid flooding the
# output with error messages that are catched anyway,
# we'll temporarly disable logging. Otherwise, error log
# messages and traceback would be printed so that someone
# might think that tests failed even though they succeeded.
logging.disable(logging.ERROR)
with self.assertRaises(OSError):
with patch('os.makedirs', side_effect=OSError('test')):
self.vocab.compile(phrases)
with self.assertRaises(OSError):
with patch('%s.open' % vocabcompiler.__name__,
create=True,
side_effect=OSError('test')):
self.vocab.compile(phrases)
class StrangeCompilationError(Exception):
pass
with patch.object(self.vocab, '_compile_vocabulary',
side_effect=StrangeCompilationError('test')):
with self.assertRaises(StrangeCompilationError):
self.vocab.compile(phrases)
with self.assertRaises(StrangeCompilationError):
with patch('os.remove',
side_effect=OSError('test')):
self.vocab.compile(phrases)
# Re-enable logging again
logging.disable(logging.NOTSET)
self.vocab.compile(phrases)
self.assertIsInstance(self.vocab.compiled_revision, str)
self.assertTrue(self.vocab.is_compiled)
self.assertTrue(self.vocab.matches_phrases(phrases))
self.vocab.compile(phrases)
self.vocab.compile(phrases, force=True)
class TestPocketsphinxVocabulary(TestVocabulary):
VOCABULARY = vocabcompiler.PocketsphinxVocabulary
def testVocabulary(self):
super(TestPocketsphinxVocabulary, self).testVocabulary()
self.assertIsInstance(self.vocab.decoder_kwargs, dict)
self.assertIn('lm', self.vocab.decoder_kwargs)
self.assertIn('dict', self.vocab.decoder_kwargs)
class TestPatchedPocketsphinxVocabulary(TestPocketsphinxVocabulary):
def testVocabulary(self):
def write_test_vocab(text, output_file):
with open(output_file, "w") as f:
for word in text.split(' '):
f.write("%s\n" % word)
def write_test_lm(text, output_file, **kwargs):
with open(output_file, "w") as f:
f.write("TEST")
class DummyG2P(object):
def __init__(self, *args, **kwargs):
pass
@classmethod
def get_config(self, *args, **kwargs):
return {}
def translate(self, *args, **kwargs):
return {'GOOD': ['G UH D',
'G UW D'],
'BAD': ['B AE D'],
'UGLY': ['AH G L IY']}
with patch('vocabcompiler.cmuclmtk',
create=True) as mocked_cmuclmtk:
mocked_cmuclmtk.text2vocab = write_test_vocab
mocked_cmuclmtk.text2lm = write_test_lm
with patch('vocabcompiler.PhonetisaurusG2P', DummyG2P):
super(TestPatchedPocketsphinxVocabulary,
self).testVocabulary()
class TestMic(unittest.TestCase):
def setUp(self):
self.jasper_clip = jasperpath.data('audio', 'jasper.wav')
self.time_clip = jasperpath.data('audio', 'time.wav')
from stt import PocketSphinxSTT
self.stt = PocketSphinxSTT(**PocketSphinxSTT.get_config())
def testTranscribeJasper(self):
"""
Does Jasper recognize his name (i.e., passive listen)?
"""
with open(self.jasper_clip, mode="rb") as f:
transcription = self.stt.transcribe(f,
mode=TranscriptionMode.KEYWORD)
self.assertIn("JASPER", transcription)
def testTranscribe(self):
"""
Does Jasper recognize 'time' (i.e., active listen)?
"""
with open(self.time_clip, mode="rb") as f:
transcription = self.stt.transcribe(f)
self.assertIn("TIME", transcription)
class TestG2P(unittest.TestCase):
def setUp(self):
self.g2pconverter = g2p.PhonetisaurusG2P(
**g2p.PhonetisaurusG2P.get_config())
self.words = ['GOOD', 'BAD', 'UGLY']
def testTranslateWord(self):
for word in self.words:
self.assertIn(word, self.g2pconverter.translate(word).keys())
def testTranslateWords(self):
results = self.g2pconverter.translate(self.words).keys()
for word in self.words:
self.assertIn(word, results)
class TestPatchedG2P(TestG2P):
class DummyProc(object):
def __init__(self, *args, **kwargs):
self.returncode = 0
def communicate(self):
return ("GOOD\t9.20477\t<s> G UH D </s>\n" +
"GOOD\t14.4036\t<s> G UW D </s>\n" +
"GOOD\t16.0258\t<s> G UH D IY </s>\n" +
"BAD\t0.7416\t<s> B AE D </s>\n" +
"BAD\t12.5495\t<s> B AA D </s>\n" +
"BAD\t13.6745\t<s> B AH D </s>\n" +
"UGLY\t12.572\t<s> AH G L IY </s>\n" +
"UGLY\t17.9278\t<s> Y UW G L IY </s>\n" +
"UGLY\t18.9617\t<s> AH G L AY </s>\n", "")
def setUp(self):
with patch('g2p.diagnose.check_executable',
return_value=True):
with tempfile.NamedTemporaryFile() as f:
conf = g2p.PhonetisaurusG2P.get_config().items()
with patch.object(g2p.PhonetisaurusG2P, 'get_config',
classmethod(lambda cls: dict(
conf + [('fst_model', f.name)]))):
super(self.__class__, self).setUp()
def testTranslateWord(self):
with patch('subprocess.Popen',
return_value=TestPatchedG2P.DummyProc()):
super(self.__class__, self).testTranslateWord()
def testTranslateWords(self):
with patch('subprocess.Popen',
return_value=TestPatchedG2P.DummyProc()):
super(self.__class__, self).testTranslateWords()
class TestDiagnose(unittest.TestCase):
def testPythonImportCheck(self):
# This a python stdlib module that definitely exists
self.assertTrue(diagnose.check_python_import("os"))
# I sincerly hope nobody will ever create a package with that name
self.assertFalse(diagnose.check_python_import("nonexistant_package"))
class TestModules(unittest.TestCase):
def setUp(self):
self.profile = DEFAULT_PROFILE
self.send = False
def runConversation(self, query, inputs, module):
"""Generic method for spoofing conversation.
Arguments:
query -- The initial input to the server.
inputs -- Additional input, if conversation is extended.
Returns:
The server's responses, in a list.
"""
self.assertTrue(module.isValid(query))
mic = test_mic.Mic(inputs)
module.handle(query, mic, self.profile)
return mic.outputs
def testLife(self):
from modules import Life
query = "What is the meaning of life?"
inputs = []
outputs = self.runConversation(query, inputs, Life)
self.assertEqual(len(outputs), 1)
self.assertTrue("42" in outputs[0])
def testJoke(self):
from modules import Joke
query = "Tell me a joke."
inputs = ["Who's there?", "Random response"]
outputs = self.runConversation(query, inputs, Joke)
self.assertEqual(len(outputs), 3)
allJokes = open(jasperpath.data('text', 'JOKES.txt'), 'r').read()
self.assertTrue(outputs[2] in allJokes)
def testTime(self):
from modules import Time
query = "What time is it?"
inputs = []
self.runConversation(query, inputs, Time)
@unittest.skipIf(not diagnose.check_network_connection(),
"No internet connection")
def testGmail(self):
key = 'gmail_password'
if key not in self.profile or not self.profile[key]:
return
from modules import Gmail
query = "Check my email"
inputs = []
self.runConversation(query, inputs, Gmail)
@unittest.skipIf(not diagnose.check_network_connection(),
"No internet connection")
def testHN(self):
from modules import HN
query = "find me some of the top hacker news stories"
if self.send:
inputs = ["the first and third"]
else:
inputs = ["no"]
outputs = self.runConversation(query, inputs, HN)
self.assertTrue("front-page articles" in outputs[1])
@unittest.skipIf(not diagnose.check_network_connection(),
"No internet connection")
def testNews(self):
from modules import News
query = "find me some of the top news stories"
if self.send:
inputs = ["the first"]
else:
inputs = ["no"]
outputs = self.runConversation(query, inputs, News)
self.assertTrue("top headlines" in outputs[1])
@unittest.skipIf(not diagnose.check_network_connection(),
"No internet connection")
def testWeather(self):
from modules import Weather
query = "what's the weather like tomorrow"
inputs = []
outputs = self.runConversation(query, inputs, Weather)
self.assertTrue(
"can't see that far ahead" in outputs[0]
or "Tomorrow" in outputs[0])
class TestTTS(unittest.TestCase):
def testTTS(self):
tts_engine = tts.get_engine_by_slug('dummy-tts')
tts_instance = tts_engine()
tts_instance.say('This is a test.')
class TestBrain(unittest.TestCase):
@staticmethod
def _emptyBrain():
mic = test_mic.Mic([])
profile = DEFAULT_PROFILE
return brain.Brain(mic, profile)
def testLog(self):
"""Does Brain correctly log errors when raised by modules?"""
my_brain = TestBrain._emptyBrain()
unclear = my_brain.modules[-1]
with patch.object(unclear, 'handle') as mocked_handle:
with patch.object(my_brain._logger, 'error') as mocked_loggingcall:
mocked_handle.side_effect = KeyError('foo')
my_brain.query("zzz gibberish zzz")
self.assertTrue(mocked_loggingcall.called)
def testSortByPriority(self):
"""Does Brain sort modules by priority?"""
my_brain = TestBrain._emptyBrain()
priorities = filter(lambda m: hasattr(m, 'PRIORITY'), my_brain.modules)
target = sorted(priorities, key=lambda m: m.PRIORITY, reverse=True)
self.assertEqual(target, priorities)
def testPriority(self):
"""Does Brain correctly send query to higher-priority module?"""
my_brain = TestBrain._emptyBrain()
hn_module = 'HN'
hn = filter(lambda m: m.__name__ == hn_module, my_brain.modules)[0]
with patch.object(hn, 'handle') as mocked_handle:
my_brain.query(["hacker news"])
self.assertTrue(mocked_handle.called)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Test suite for the Jasper client code.')
parser.add_argument('--light', action='store_true',
help='runs a subset of the tests (only requires ' +
'Python dependencies)')
parser.add_argument('--debug', action='store_true',
help='show debug messages')
args = parser.parse_args()
logging.basicConfig()
logger = logging.getLogger()
if args.debug:
logger.setLevel(logging.DEBUG)
# Change CWD to jasperpath.LIB_PATH
os.chdir(jasperpath.LIB_PATH)
test_cases = [TestBrain, TestModules, TestDiagnose, TestTTS,
TestVocabCompiler, TestVocabulary]
if args.light:
test_cases.append(TestPatchedG2P)
test_cases.append(TestPatchedPocketsphinxVocabulary)
else:
test_cases.append(TestG2P)
test_cases.append(TestPocketsphinxVocabulary)
test_cases.append(TestMic)
suite = unittest.TestSuite()
for test_case in test_cases:
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(test_case))
result = unittest.TextTestRunner(verbosity=2).run(suite)
if not result.wasSuccessful():
sys.exit("Tests failed")
| {
"content_hash": "02ae82ed6ca30246d3b2ffd0f38a273e",
"timestamp": "",
"source": "github",
"line_count": 422,
"max_line_length": 79,
"avg_line_length": 34.964454976303315,
"alnum_prop": 0.5901728227719417,
"repo_name": "tdmike/SASCHA",
"id": "71cdab69125a4236b00aca045da52fb1cd5a8870",
"size": "14801",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sascha/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "7143"
},
{
"name": "Arduino",
"bytes": "28602"
},
{
"name": "C",
"bytes": "9133"
},
{
"name": "C++",
"bytes": "166918"
},
{
"name": "CSS",
"bytes": "56479"
},
{
"name": "Elixir",
"bytes": "391"
},
{
"name": "JavaScript",
"bytes": "5155"
},
{
"name": "PHP",
"bytes": "17610"
},
{
"name": "Processing",
"bytes": "106955"
},
{
"name": "Python",
"bytes": "109227"
},
{
"name": "Shell",
"bytes": "7183"
},
{
"name": "XSLT",
"bytes": "2042"
}
],
"symlink_target": ""
} |
"""
Tests for file operations.
"""
from os.path import dirname, join
from unittest import TestCase
from nose.tools import eq_, ok_
from confab.files import _import
class TestImport(TestCase):
def setUp(self):
self.dir_name = join(dirname(__file__), 'data/default')
def test_import_empty(self):
module = _import("empty", self.dir_name)
ok_(module)
def test_import_simple(self):
module = _import("simple", self.dir_name)
ok_(module)
eq_("bar", module.foo)
def test_import_not_found(self):
with self.assertRaises(ImportError) as e:
_import("missing", self.dir_name)
# module_path not set: the module itself could not be found
eq_(None, getattr(e.exception, 'module_path', None))
def test_import_broken(self):
with self.assertRaises(ImportError) as e:
_import("broken", self.dir_name)
# module_path was set: the module was found but had an import error
eq_(join(self.dir_name, 'broken.py'), e.exception.module_path)
| {
"content_hash": "8921c4dbcf6f820e81a868e3ddc3d6f6",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 75,
"avg_line_length": 30.285714285714285,
"alnum_prop": 0.6339622641509434,
"repo_name": "locationlabs/confab",
"id": "2353ffec0958de980c5920d9a3d992af972c6767",
"size": "1060",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "confab/tests/test_files.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "119227"
}
],
"symlink_target": ""
} |
import logging
import logging.handlers
import unittest
from unittest.mock import patch
from ecs_scheduler import env, __version__
@patch('ecs_scheduler.env.triggers')
class InitTests(unittest.TestCase):
@patch.object(logging, 'basicConfig')
@patch.dict('os.environ', clear=True)
def test(self, fake_log, triggers):
env.init()
triggers.init.assert_called_with()
@patch.object(logging, 'basicConfig')
@patch.dict('os.environ', clear=True)
def test_with_no_predefined_vars(self, fake_log, triggers):
env.init()
fake_log.assert_called_with(
level=None,
handlers=unittest.mock.ANY,
format='%(levelname)s:%(name)s:%(asctime)s %(message)s'
)
pos_args, expected_args = fake_log.call_args
expected_handlers = expected_args['handlers']
self.assertEqual(1, len(expected_handlers))
self.assertIsInstance(expected_handlers[0], logging.StreamHandler)
@patch.object(logging, 'basicConfig')
@patch.dict('os.environ', {'ECSS_LOG_LEVEL': 'INFO'})
def test_sets_loglevel_if_specified(self, fake_log, triggers):
env.init()
fake_log.assert_called_with(
level=logging.INFO,
handlers=unittest.mock.ANY,
format='%(levelname)s:%(name)s:%(asctime)s %(message)s'
)
@patch.object(logging, 'basicConfig')
@patch('os.path.abspath', side_effect=lambda p: '/abs/path/' + p)
@patch('os.makedirs')
@patch.dict(
'os.environ', {'ECSS_LOG_FOLDER': 'foo/bar/testlog'}, clear=True
)
def test_sets_logfile(self, fake_makedirs, abspath, fake_log, triggers):
with patch.object(
logging.handlers, 'RotatingFileHandler',
spec=logging.handlers.RotatingFileHandler
) as fake_file_handler:
env.init()
fake_makedirs.assert_called_with(
'/abs/path/foo/bar/testlog', exist_ok=True
)
fake_file_handler.assert_called_with(
'foo/bar/testlog/app.log', maxBytes=5*1024*1024, backupCount=1
)
pos_args, expected_args = fake_log.call_args
expected_handlers = expected_args['handlers']
self.assertEqual(2, len(expected_handlers))
self.assertIsInstance(expected_handlers[1],
logging.handlers.RotatingFileHandler)
@patch.object(logging, 'basicConfig')
@patch('os.path.abspath', side_effect=lambda p: '/abs/path/' + p)
@patch('os.makedirs')
@patch.dict(
'os.environ',
{
'ECSS_LOG_FOLDER': 'foo/bar/{HOSTNAME}/testlog',
'HOSTNAME': 'testhost',
},
clear=True
)
def test_sets_logfile_with_env_vars(
self, fake_makedirs, abspath, fake_log, triggers
):
with patch.object(
logging.handlers, 'RotatingFileHandler',
spec=logging.handlers.RotatingFileHandler
) as fake_file_handler:
env.init()
fake_makedirs.assert_called_with(
'/abs/path/foo/bar/testhost/testlog', exist_ok=True
)
fake_file_handler.assert_called_with(
'foo/bar/testhost/testlog/app.log',
maxBytes=5*1024*1024,
backupCount=1
)
pos_args, expected_args = fake_log.call_args
expected_handlers = expected_args['handlers']
self.assertEqual(2, len(expected_handlers))
self.assertIsInstance(
expected_handlers[1], logging.handlers.RotatingFileHandler
)
class GetVarTests(unittest.TestCase):
@patch.dict('os.environ', {'ECSS_FOO': 'foobar'})
def test_return_var(self):
value = env.get_var('FOO')
self.assertEqual('foobar', value)
@patch.dict('os.environ', clear=True)
def test_return_missing_var(self):
value = env.get_var('FOO')
self.assertIsNone(value)
@patch.dict('os.environ', clear=True)
def test_return_default_var(self):
value = env.get_var('FOO', default='def_foo')
self.assertEqual('def_foo', value)
@patch.dict(
'os.environ', {'ECSS_FOO': 'foo{BAZ}bar', 'BAZ': '12'}, clear=True
)
def test_applies_environ_to_formatted_value(self):
value = env.get_var('FOO')
self.assertEqual('foo12bar', value)
@patch.dict('os.environ', {'ECSS_FOO': 'foobar'})
def test_return_required_var(self):
value = env.get_var('FOO', required=True)
self.assertEqual('foobar', value)
@patch.dict('os.environ', clear=True)
def test_raises_on_missing_required_var(self):
with self.assertRaises(KeyError):
env.get_var('FOO', required=True)
@patch.dict(
'os.environ', {'ECSS_FOO': 'foo{BAZ}bar', 'BAZ': '12'}, clear=True
)
def test_applies_environ_to_formatted_required_value(self):
value = env.get_var('FOO', required=True)
self.assertEqual('foo12bar', value)
@patch('setuptools_scm.get_version')
class GetVersionTests(unittest.TestCase):
def test_get_from_setuptools(self, scm):
result = env.get_version()
self.assertIs(scm.return_value, result)
def test_get_fallsback_to_hardcoded_if_scm_fails(self, scm):
scm.side_effect = LookupError
result = env.get_version()
self.assertEqual(__version__, result)
| {
"content_hash": "17119cbf1d6d04bcbccb7db1cdd7929d",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 78,
"avg_line_length": 32.83536585365854,
"alnum_prop": 0.6064995357474466,
"repo_name": "drmonkeysee/ecs-scheduler",
"id": "9b0916ce18bfca18ff8540837480e0ed566e7b9f",
"size": "5385",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "test/test_env.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "724"
},
{
"name": "Makefile",
"bytes": "901"
},
{
"name": "Python",
"bytes": "232285"
},
{
"name": "Shell",
"bytes": "181"
}
],
"symlink_target": ""
} |
"""
Service Support module: views
"""
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.db.models import Q
from anaf.core.conf import settings
from anaf.core.rendering import render_to_response, render_string_template, render_to_string
from anaf.core.decorators import mylogin_required, handle_response_format, require_response_format
from anaf.core.views import user_denied
from anaf.core.models import Object, ModuleSetting
from anaf.services.models import Ticket, TicketRecord, TicketStatus, TicketQueue, Service, \
ServiceLevelAgreement, ServiceAgent
from anaf.services.forms import SettingsForm, MassActionForm, TicketForm, TicketStatusForm, \
TicketRecordForm, QueueForm, ServiceForm, ServiceLevelAgreementForm, \
AgentForm, FilterForm, SLAFilterForm, AgentFilterForm
from anaf.identities.models import Contact
def _get_filter_query(args, model=Ticket):
"""Creates a query to filter Tickets based on FilterForm arguments"""
query = Q()
for arg in args:
if hasattr(model, arg) and args[arg]:
kwargs = {str(arg + '__id'): int(args[arg])}
query = query & Q(**kwargs)
return query
def _get_default_context(request):
"""Returns default context for all views as dict()"""
queues = Object.filter_by_request(
request, TicketQueue.objects.filter(active=True, parent__isnull=True))
statuses = Object.filter_by_request(request, TicketStatus.objects)
try:
agent = request.user.profile.serviceagent_set.all()[0]
except Exception:
agent = None
massform = MassActionForm(request.user.profile)
context = {
'statuses': statuses,
'queues': queues,
'agent': agent,
'massform': massform
}
return context
def _process_mass_form(f):
"""Pre-process request to handle mass action form for Tasks and Milestones"""
def wrap(request, *args, **kwargs):
"""wrap"""
if 'massform' in request.POST:
for key in request.POST:
if 'mass-ticket' in key:
try:
ticket = Ticket.objects.get(pk=request.POST[key])
form = MassActionForm(
request.user.profile, request.POST, instance=ticket)
if form.is_valid() and request.user.profile.has_permission(ticket, mode='w'):
form.save()
except Exception:
pass
return f(request, *args, **kwargs)
wrap.__doc__ = f.__doc__
wrap.__name__ = f.__name__
return wrap
@handle_response_format
@mylogin_required
@_process_mass_form
def index(request, response_format='html'):
"""All available tickets"""
if request.GET:
if 'status' in request.GET and request.GET['status']:
query = _get_filter_query(request.GET)
else:
query = Q(status__hidden=False) & _get_filter_query(request.GET)
tickets = Object.filter_by_request(
request, Ticket.objects.filter(query))
else:
tickets = Object.filter_by_request(
request, Ticket.objects.filter(status__hidden=False))
filters = FilterForm(request.user.profile, '', request.GET)
context = _get_default_context(request)
context.update({'tickets': tickets,
'filters': filters, })
return render_to_response('services/index', context,
context_instance=RequestContext(request),
response_format=response_format)
@handle_response_format
@mylogin_required
@_process_mass_form
def index_assigned(request, response_format='html'):
"""Tickets assigned to current user"""
context = _get_default_context(request)
agent = context['agent']
if agent:
query = Q(assigned=agent)
if request.GET:
if 'status' in request.GET and request.GET['status']:
query = query & _get_filter_query(request.GET)
else:
query = query & Q(
status__hidden=False) & _get_filter_query(request.GET)
else:
query = query & Q(status__hidden=False)
tickets = Object.filter_by_request(
request, Ticket.objects.filter(query))
else:
return user_denied(request, "You are not a Service Support Agent.", response_format=response_format)
filters = FilterForm(request.user.profile, 'assigned', request.GET)
context.update({'tickets': tickets,
'filters': filters})
return render_to_response('services/index_assigned', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
@_process_mass_form
def index_owned(request, response_format='html'):
"""Tickets owned by current user"""
context = _get_default_context(request)
query = Q(caller__related_user=request.user.profile)
if request.GET:
if 'status' in request.GET and request.GET['status']:
query = query & _get_filter_query(request.GET)
else:
query = query & Q(
status__hidden=False) & _get_filter_query(request.GET)
else:
query = query & Q(status__hidden=False)
tickets = Object.filter_by_request(request, Ticket.objects.filter(query))
filters = FilterForm(request.user.profile, 'caller', request.GET)
context.update({'tickets': tickets,
'filters': filters})
return render_to_response('services/index_owned', context,
context_instance=RequestContext(request), response_format=response_format)
#
# Ticket Statuses
#
@handle_response_format
@mylogin_required
@_process_mass_form
def status_view(request, status_id, response_format='html'):
"""Tickets filtered by status"""
status = get_object_or_404(TicketStatus, pk=status_id)
if not request.user.profile.has_permission(status):
return user_denied(request, message="You don't have access to this Ticket Status")
query = Q(status=status)
if request.GET:
query = query & _get_filter_query(request.GET)
tickets = Object.filter_by_request(request, Ticket.objects.filter(query))
filters = FilterForm(request.user.profile, 'status', request.GET)
context = _get_default_context(request)
context.update({'status': status,
'filters': filters,
'tickets': tickets})
return render_to_response('services/status_view', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def status_edit(request, status_id, response_format='html'):
"TicketStatus edit"
status = get_object_or_404(TicketStatus, pk=status_id)
if not request.user.profile.has_permission(status, mode='w') \
and not request.user.profile.is_admin('anaf'):
return user_denied(request, "You don't have access to this Ticket Status", response_format)
if request.POST:
if 'cancel' not in request.POST:
form = TicketStatusForm(
request.user.profile, request.POST, instance=status)
if form.is_valid():
status = form.save()
return HttpResponseRedirect(reverse('services_status_view', args=[status.id]))
else:
return HttpResponseRedirect(reverse('services_status_view', args=[status.id]))
else:
form = TicketStatusForm(request.user.profile, instance=status)
context = _get_default_context(request)
context.update({'form': form,
'status': status})
return render_to_response('services/status_edit', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def status_delete(request, status_id, response_format='html'):
"TicketStatus delete"
status = get_object_or_404(TicketStatus, pk=status_id)
if not request.user.profile.has_permission(status, mode='w'):
return user_denied(request, "You don't have access to this Ticket Status", response_format)
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
status.trash = True
status.save()
else:
status.delete()
return HttpResponseRedirect(reverse('services_settings_view'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('services_status_view', args=[status.id]))
context = _get_default_context(request)
context.update({'status': status})
return render_to_response('services/status_delete', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def status_add(request, response_format='html'):
"TicketStatus add"
if not request.user.profile.is_admin('anaf.services'):
return user_denied(request,
message="You don't have administrator access to the Service Support module")
if request.POST:
if 'cancel' not in request.POST:
status = TicketStatus()
form = TicketStatusForm(
request.user.profile, request.POST, instance=status)
if form.is_valid():
status = form.save()
status.set_user_from_request(request)
return HttpResponseRedirect(reverse('services_status_view', args=[status.id]))
else:
return HttpResponseRedirect(reverse('services_settings_view'))
else:
form = TicketStatusForm(request.user.profile)
context = _get_default_context(request)
context.update({'form': form})
return render_to_response('services/status_add', context,
context_instance=RequestContext(request), response_format=response_format)
#
# Queues
#
@handle_response_format
@mylogin_required
@_process_mass_form
def queue_view(request, queue_id, response_format='html'):
"Queue view"
queue = get_object_or_404(TicketQueue, pk=queue_id)
if not request.user.profile.has_permission(queue):
return user_denied(request, message="You don't have access to this Queue")
query = Q(queue=queue)
if request.GET:
if 'status' in request.GET and request.GET['status']:
query = query & _get_filter_query(request.GET)
else:
query = query & Q(
status__hidden=False) & _get_filter_query(request.GET)
else:
query = query & Q(status__hidden=False)
tickets = Object.filter_by_request(request, Ticket.objects.filter(query))
filters = FilterForm(request.user.profile, 'queue', request.GET)
subqueues = Object.filter_by_request(
request, TicketQueue.objects.filter(parent=queue))
context = _get_default_context(request)
context.update({'queue': queue,
'subqueues': subqueues,
'filters': filters,
'tickets': tickets})
return render_to_response('services/queue_view', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def queue_edit(request, queue_id, response_format='html'):
"Queue edit"
queue = get_object_or_404(TicketQueue, pk=queue_id)
if not request.user.profile.has_permission(queue, mode='w'):
return user_denied(request, message="You don't have access to this Queue")
if request.POST:
if 'cancel' not in request.POST:
form = QueueForm(
request.user.profile, request.POST, instance=queue)
if form.is_valid():
queue = form.save()
return HttpResponseRedirect(reverse('services_queue_view', args=[queue.id]))
else:
return HttpResponseRedirect(reverse('services_queue_view', args=[queue.id]))
else:
form = QueueForm(request.user.profile, instance=queue)
context = _get_default_context(request)
context.update({'queue': queue, 'form': form})
return render_to_response('services/queue_edit', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def queue_delete(request, queue_id, response_format='html'):
"Queue delete"
queue = get_object_or_404(TicketQueue, pk=queue_id)
if not request.user.profile.has_permission(queue, mode='w'):
return user_denied(request, message="You don't have access to this Queue")
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
queue.trash = True
queue.save()
else:
queue.delete()
return HttpResponseRedirect(reverse('services_settings_view'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('services_queue_view', args=[queue.id]))
query = Q(queue=queue) & Q(status__hidden=False)
tickets = Object.filter_by_request(request, Ticket.objects.filter(query))
subqueues = Object.filter_by_request(
request, TicketQueue.objects.filter(parent=queue))
context = _get_default_context(request)
context.update({'queue': queue,
'subqueues': subqueues,
'tickets': tickets})
return render_to_response('services/queue_delete', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def queue_add(request, response_format='html'):
"Queue add"
if not request.user.profile.is_admin('anaf.services'):
return user_denied(request,
message="You don't have administrator access to the Service Support module")
if request.POST:
if 'cancel' not in request.POST:
queue = TicketQueue()
form = QueueForm(
request.user.profile, request.POST, instance=queue)
if form.is_valid():
queue = form.save()
queue.set_user_from_request(request)
return HttpResponseRedirect(reverse('services_queue_view', args=[queue.id]))
else:
return HttpResponseRedirect(reverse('services_settings_view'))
else:
form = QueueForm(request.user.profile)
context = _get_default_context(request)
context.update({'form': form})
return render_to_response('services/queue_add', context,
context_instance=RequestContext(request), response_format=response_format)
#
# Tickets
#
@handle_response_format
@mylogin_required
def ticket_view(request, ticket_id, response_format='html'):
"Ticket view"
context = _get_default_context(request)
agent = context['agent']
profile = request.user.profile
ticket = get_object_or_404(Ticket, pk=ticket_id)
if not profile.has_permission(ticket):
return user_denied(request, message="You don't have access to this Ticket")
if ticket.message:
ticket.message.read_by.add(profile)
if profile.has_permission(ticket, mode='w'):
if request.POST:
record = TicketRecord(sender=profile.get_contact())
record.record_type = 'manual'
if ticket.message:
record.message = ticket.message
form = TicketRecordForm(
agent, ticket, request.POST, instance=record)
if form.is_valid():
record = form.save()
record.save()
record.set_user_from_request(request)
record.about.add(ticket)
ticket.set_last_updated()
return HttpResponseRedirect(reverse('services_ticket_view', args=[ticket.id]))
else:
form = TicketRecordForm(agent, ticket)
else:
form = None
context.update({'ticket': ticket, 'record_form': form})
return render_to_response('services/ticket_view', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def ticket_edit(request, ticket_id, response_format='html'):
"Ticket edit"
context = _get_default_context(request)
agent = context['agent']
ticket = get_object_or_404(Ticket, pk=ticket_id)
if not request.user.profile.has_permission(ticket, mode='w'):
return user_denied(request, message="You don't have access to this Ticket")
if request.POST:
if 'cancel' not in request.POST:
form = TicketForm(
request.user.profile, None, agent, request.POST, instance=ticket)
if form.is_valid():
ticket = form.save()
return HttpResponseRedirect(reverse('services_ticket_view', args=[ticket.id]))
else:
return HttpResponseRedirect(reverse('services_ticket_view', args=[ticket.id]))
else:
form = TicketForm(
request.user.profile, None, agent, instance=ticket)
context.update({'form': form,
'ticket': ticket})
return render_to_response('services/ticket_edit', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def ticket_set_status(request, ticket_id, status_id, response_format='html'):
"Ticket quick set: Status"
ticket = get_object_or_404(Ticket, pk=ticket_id)
if not request.user.profile.has_permission(ticket, mode='w'):
return user_denied(request, message="You don't have access to this Ticket")
status = get_object_or_404(TicketStatus, pk=status_id)
if not request.user.profile.has_permission(status):
return user_denied(request, message="You don't have access to this Ticket Status")
if not ticket.status == status:
ticket.status = status
ticket.save()
return ticket_view(request, ticket_id, response_format)
@handle_response_format
@mylogin_required
def ticket_delete(request, ticket_id, response_format='html'):
"Ticket delete"
ticket = get_object_or_404(Ticket, pk=ticket_id)
if not request.user.profile.has_permission(ticket, mode='w'):
return user_denied(request, message="You don't have access to this Ticket")
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
ticket.trash = True
ticket.save()
else:
ticket.delete()
return HttpResponseRedirect(reverse('services_index'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('services_ticket_view', args=[ticket.id]))
context = _get_default_context(request)
context.update({'ticket': ticket})
return render_to_response('services/ticket_delete', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def ticket_add(request, queue_id=None, response_format='html'):
"Ticket add"
context = _get_default_context(request)
agent = context['agent']
profile = request.user.profile
queue = None
if queue_id:
queue = get_object_or_404(TicketQueue, pk=queue_id)
if not profile.has_permission(queue, mode='w'):
queue = None
if request.POST:
if 'cancel' not in request.POST:
ticket = Ticket(creator=profile)
if not agent:
if queue:
ticket.queue = queue
if queue.default_ticket_status:
ticket.status = queue.default_ticket_status
else:
try:
conf = ModuleSetting.get_for_module(
'anaf.services', 'default_ticket_status')[0]
ticket.status = TicketStatus.objects.get(
pk=long(conf.value))
except:
if 'statuses' in context:
try:
ticket.status = context['statuses'][0]
except:
pass
ticket.priority = queue.default_ticket_priority
ticket.service = queue.default_service
else:
try:
conf = ModuleSetting.get_for_module(
'anaf.services', 'default_ticket_status')[0]
ticket.status = TicketStatus.objects.get(
pk=long(conf.value))
except:
if 'statuses' in context:
try:
ticket.status = context['statuses'][0]
except:
pass
try:
conf = ModuleSetting.get_for_module(
'anaf.services', 'default_ticket_queue')[0]
ticket.queue = TicketQueue.objects.get(
pk=long(conf.value))
except:
if 'queues' in context:
try:
ticket.queue = context['queues'][0]
except:
pass
try:
ticket.caller = profile.get_contact()
except:
pass
form = TicketForm(
profile, queue, agent, request.POST, instance=ticket)
if form.is_valid():
ticket = form.save()
ticket.set_user_from_request(request)
return HttpResponseRedirect(reverse('services_ticket_view', args=[ticket.id]))
else:
return HttpResponseRedirect(reverse('services'))
else:
form = TicketForm(request.user.profile, queue, agent)
context.update({'form': form, 'queue': queue})
return render_to_response('services/ticket_add', context,
context_instance=RequestContext(request), response_format=response_format)
#
# Services
#
@handle_response_format
@mylogin_required
def service_catalogue(request, response_format='html'):
"All available Services"
services = Object.filter_by_request(
request, Service.objects.filter(parent__isnull=True))
filters = FilterForm(request.user.profile, '', request.GET)
context = _get_default_context(request)
context.update({'services': services, 'filters': filters})
return render_to_response('services/service_catalogue', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def service_view(request, service_id, response_format='html'):
"Service view"
service = get_object_or_404(Service, pk=service_id)
if not request.user.profile.has_permission(service) \
and not request.user.profile.is_admin('anaf'):
return user_denied(request, message="You don't have access to this Service")
context = _get_default_context(request)
context.update({'service': service})
return render_to_response('services/service_view', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def service_edit(request, service_id, response_format='html'):
"Service edit"
service = get_object_or_404(Service, pk=service_id)
if not request.user.profile.has_permission(service, mode='w') \
and not request.user.profile.is_admin('anaf'):
return user_denied(request, message="You don't have access to this Service")
if request.POST:
if 'cancel' not in request.POST:
form = ServiceForm(
request.user.profile, request.POST, instance=service)
if form.is_valid():
service = form.save()
return HttpResponseRedirect(reverse('services_service_view', args=[service.id]))
else:
return HttpResponseRedirect(reverse('services_service_view', args=[service.id]))
else:
form = ServiceForm(request.user.profile, instance=service)
context = _get_default_context(request)
context.update({'form': form, 'service': service})
return render_to_response('services/service_edit', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def service_delete(request, service_id, response_format='html'):
"Service delete"
service = get_object_or_404(Service, pk=service_id)
if not request.user.profile.has_permission(service, mode='w') \
and not request.user.profile.is_admin('anaf'):
return user_denied(request, message="You don't have access to this Service")
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
service.trash = True
service.save()
else:
service.delete()
return HttpResponseRedirect(reverse('services_service_catalogue'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('services_service_view', args=[service.id]))
context = _get_default_context(request)
context.update({'service': service})
return render_to_response('services/service_delete', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def service_add(request, response_format='html'):
"Service add"
if not request.user.profile.is_admin('anaf.services'):
return user_denied(request,
message="You don't have administrator access to the Service Support module")
if request.POST:
if 'cancel' not in request.POST:
service = Service()
form = ServiceForm(
request.user.profile, request.POST, instance=service)
if form.is_valid():
service = form.save()
service.set_user_from_request(request)
return HttpResponseRedirect(reverse('services_service_view', args=[service.id]))
else:
return HttpResponseRedirect(reverse('services'))
else:
form = ServiceForm(request.user.profile)
context = _get_default_context(request)
context.update({'form': form})
return render_to_response('services/service_add', context,
context_instance=RequestContext(request), response_format=response_format)
#
# ServiceLevelAgreements
#
@handle_response_format
@mylogin_required
def sla_index(request, response_format='html'):
"All available Service Level Agreements"
if request.GET:
query = _get_filter_query(request.GET, ServiceLevelAgreement)
slas = Object.filter_by_request(request,
ServiceLevelAgreement.objects.filter(query))
else:
slas = Object.filter_by_request(request,
ServiceLevelAgreement.objects)
filters = SLAFilterForm(request.user.profile, '', request.GET)
context = _get_default_context(request)
context.update({'slas': slas, 'filters': filters})
return render_to_response('services/sla_index', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def sla_view(request, sla_id, response_format='html'):
"ServiceLevelAgreement view"
sla = get_object_or_404(ServiceLevelAgreement, pk=sla_id)
if not request.user.profile.has_permission(sla):
return user_denied(request, message="You don't have access to this Service Level Agreement")
context = _get_default_context(request)
context.update({'sla': sla})
return render_to_response('services/sla_view', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def sla_edit(request, sla_id, response_format='html'):
"ServiceLevelAgreement edit"
sla = get_object_or_404(ServiceLevelAgreement, pk=sla_id)
if not request.user.profile.has_permission(sla, mode='w'):
return user_denied(request, message="You don't have access to this Service Level Agreement")
if request.POST:
if 'cancel' not in request.POST:
form = ServiceLevelAgreementForm(
request.user.profile, request.POST, instance=sla)
if form.is_valid():
sla = form.save()
return HttpResponseRedirect(reverse('services_sla_view', args=[sla.id]))
else:
return HttpResponseRedirect(reverse('services_sla_view', args=[sla.id]))
else:
form = ServiceLevelAgreementForm(
request.user.profile, instance=sla)
context = _get_default_context(request)
context.update({'sla': sla, 'form': form})
return render_to_response('services/sla_edit', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def sla_delete(request, sla_id, response_format='html'):
"ServiceLevelAgreement delete"
sla = get_object_or_404(ServiceLevelAgreement, pk=sla_id)
if not request.user.profile.has_permission(sla, mode='w'):
return user_denied(request, message="You don't have access to this Service Level Agreement")
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
sla.trash = True
sla.save()
else:
sla.delete()
return HttpResponseRedirect(reverse('services_sla_index'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('services_sla_view', args=[sla.id]))
context = _get_default_context(request)
context.update({'sla': sla})
return render_to_response('services/sla_delete', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def sla_add(request, response_format='html'):
"ServiceLevelAgreement add"
if not request.user.profile.is_admin('anaf.services'):
return user_denied(request,
message="You don't have administrator access to the Service Support module")
if request.POST:
if 'cancel' not in request.POST:
sla = ServiceLevelAgreement()
form = ServiceLevelAgreementForm(
request.user.profile, request.POST, instance=sla)
if form.is_valid():
sla = form.save()
sla.set_user_from_request(request)
return HttpResponseRedirect(reverse('services_sla_view', args=[sla.id]))
else:
return HttpResponseRedirect(reverse('services'))
else:
form = ServiceLevelAgreementForm(request.user.profile)
context = _get_default_context(request)
context.update({'form': form})
return render_to_response('services/sla_add', context,
context_instance=RequestContext(request), response_format=response_format)
#
# Settings
#
@handle_response_format
@mylogin_required
def settings_view(request, response_format='html'):
"Settings"
if not request.user.profile.is_admin('anaf.services'):
return user_denied(request,
message="You don't have administrator access to the Service Support module")
# default ticket status
try:
conf = ModuleSetting.get_for_module(
'anaf.services', 'default_ticket_status')[0]
default_ticket_status = TicketStatus.objects.get(pk=long(conf.value))
except Exception:
default_ticket_status = None
# default queue
try:
conf = ModuleSetting.get_for_module(
'anaf.services', 'default_ticket_queue')[0]
default_ticket_queue = TicketQueue.objects.get(pk=long(conf.value))
except Exception:
default_ticket_queue = None
# notify ticket caller by email
try:
conf = ModuleSetting.get_for_module(
'anaf.services', 'send_email_to_caller')[0]
send_email_to_caller = conf.value
except Exception:
send_email_to_caller = settings.ANAF_SEND_EMAIL_TO_CALLER
# notification template
send_email_example = ''
try:
conf = ModuleSetting.get_for_module(
'anaf.services', 'send_email_template')[0]
send_email_template = conf.value
except Exception:
send_email_template = None
queues = TicketQueue.objects.filter(trash=False, parent__isnull=True)
statuses = TicketStatus.objects.filter(trash=False)
if send_email_to_caller:
# Render example e-mail
try:
ticket = Object.filter_by_request(
request, Ticket.objects.filter(status__hidden=False, caller__isnull=False))[0]
except IndexError:
ticket = Ticket(reference='REF123', name='New request')
if not ticket.caller:
try:
caller = Object.filter_by_request(request, Contact.objects)[0]
except IndexError:
caller = Contact(name='John Smith')
ticket.caller = caller
try:
ticket.status
except:
try:
ticket.status = statuses[0]
except IndexError:
ticket.status = TicketStatus(name='Open')
if send_email_template:
try:
send_email_example = render_string_template(
send_email_template, {'ticket': ticket})
except:
send_email_example = render_to_string(
'services/emails/notify_caller', {'ticket': ticket}, response_format='html')
else:
send_email_example = render_to_string(
'services/emails/notify_caller', {'ticket': ticket}, response_format='html')
context = _get_default_context(request)
context.update({'settings_queues': queues,
'settings_statuses': statuses,
'default_ticket_status': default_ticket_status,
'default_ticket_queue': default_ticket_queue,
'send_email_to_caller': send_email_to_caller,
'send_email_example': send_email_example})
return render_to_response('services/settings_view', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def settings_edit(request, response_format='html'):
"Settings"
if not request.user.profile.is_admin('anaf.services'):
return user_denied(request,
message="You don't have administrator access to the Service Support module")
if request.POST:
if 'cancel' not in request.POST:
form = SettingsForm(request.user.profile, request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('services_settings_view'))
else:
return HttpResponseRedirect(reverse('services_settings_view'))
else:
form = SettingsForm(request.user.profile)
context = _get_default_context(request)
context.update({'form': form})
return render_to_response('services/settings_edit', context,
context_instance=RequestContext(request), response_format=response_format)
#
# Agents
#
@handle_response_format
@mylogin_required
def agent_index(request, response_format='html'):
"All available Agents"
if not request.user.profile.is_admin('anaf.services'):
return user_denied(request,
message="You don't have administrator access to the Service Support module")
if request.GET:
query = _get_filter_query(request.GET, ServiceAgent)
agents = Object.filter_by_request(request,
ServiceAgent.objects.filter(query))
else:
agents = Object.filter_by_request(request,
ServiceAgent.objects)
filters = AgentFilterForm(request.user.profile, '', request.GET)
context = _get_default_context(request)
context.update({'agents': agents, 'filters': filters})
return render_to_response('services/agent_index', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def agent_view(request, agent_id, response_format='html'):
"Agent view"
view_agent = get_object_or_404(ServiceAgent, pk=agent_id)
if not request.user.profile.has_permission(view_agent):
return user_denied(request, message="You don't have access to this Service Agent")
context = _get_default_context(request)
context.update({'view_agent': view_agent})
return render_to_response('services/agent_view', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def agent_edit(request, agent_id, response_format='html'):
"Agent edit"
view_agent = get_object_or_404(ServiceAgent, pk=agent_id)
if not request.user.profile.has_permission(view_agent):
return user_denied(request, message="You don't have access to this Service Agent")
if request.POST:
if 'cancel' not in request.POST:
form = AgentForm(
request.user.profile, request.POST, instance=view_agent)
if form.is_valid():
view_agent = form.save()
return HttpResponseRedirect(reverse('services_agent_view', args=[view_agent.id]))
else:
return HttpResponseRedirect(reverse('services_agent_view', args=[view_agent.id]))
else:
form = AgentForm(request.user.profile, instance=view_agent)
context = _get_default_context(request)
context.update({'form': form, 'view_agent': view_agent})
return render_to_response('services/agent_edit', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def agent_delete(request, agent_id, response_format='html'):
"Agent delete"
view_agent = get_object_or_404(ServiceAgent, pk=agent_id)
if not request.user.profile.has_permission(view_agent, mode='w'):
return user_denied(request, message="You don't have access to this Service Agent")
if request.POST:
if 'delete' in request.POST:
if 'trash' in request.POST:
view_agent.trash = True
view_agent.save()
else:
view_agent.delete()
return HttpResponseRedirect(reverse('services_agent_index'))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse('services_agent_view', args=[view_agent.id]))
context = _get_default_context(request)
context.update({'view_agent': view_agent})
return render_to_response('services/agent_delete', context,
context_instance=RequestContext(request), response_format=response_format)
@handle_response_format
@mylogin_required
def agent_add(request, response_format='html'):
"Agent add"
if not request.user.profile.is_admin('anaf.services'):
return user_denied(request,
message="You don't have administrator access to the Service Support module")
if request.POST:
if 'cancel' not in request.POST:
new_agent = ServiceAgent()
form = AgentForm(
request.user.profile, request.POST, instance=new_agent)
if form.is_valid():
new_agent = form.save()
new_agent.set_user_from_request(request)
return HttpResponseRedirect(reverse('services_agent_view', args=[new_agent.id]))
else:
return HttpResponseRedirect(reverse('services_agent_index'))
else:
form = AgentForm(request.user.profile)
context = _get_default_context(request)
context.update({'form': form})
return render_to_response('services/agent_add', context,
context_instance=RequestContext(request), response_format=response_format)
@mylogin_required
def widget_index(request, response_format='html'):
"All Active Tickets"
tickets = Object.filter_by_request(
request, Ticket.objects.filter(status__hidden=False))
context = _get_default_context(request)
context.update({'tickets': tickets})
return render_to_response('services/widgets/index', context,
context_instance=RequestContext(request),
response_format=response_format)
@mylogin_required
def widget_index_assigned(request, response_format='html'):
"Tickets assigned to current user"
context = _get_default_context(request)
agent = context['agent']
if agent:
tickets = Object.filter_by_request(request, Ticket.objects.filter(assigned=agent,
status__hidden=False))
else:
return user_denied(request, "You are not a Service Support Agent.")
context.update({'tickets': tickets})
return render_to_response('services/widgets/index_assigned', context,
context_instance=RequestContext(request), response_format=response_format)
#
# AJAX lookups
#
@require_response_format(['json'])
@mylogin_required
def ajax_ticket_lookup(request, response_format='json'):
"""Returns a list of matching tickets"""
tickets = []
if request.GET and 'term' in request.GET:
tickets = Ticket.objects.filter(
name__icontains=request.GET['term'])[:10]
return render_to_response('services/ajax_ticket_lookup',
{'tickets': tickets},
context_instance=RequestContext(request),
response_format=response_format)
@require_response_format(['json'])
@mylogin_required
def ajax_agent_lookup(request, response_format='json'):
"""Returns a list of matching agents"""
agents = []
if request.GET and 'term' in request.GET:
agents = ServiceAgent.objects.filter(Q(related_user__name__icontains=request.GET['term']) |
Q(related_user__contact__name__icontains=request.GET['term']))
return render_to_response('services/ajax_agent_lookup',
{'agents': agents},
context_instance=RequestContext(request),
response_format=response_format)
| {
"content_hash": "ddcab340f02c685b0f5a21ee20d9157b",
"timestamp": "",
"source": "github",
"line_count": 1194,
"max_line_length": 108,
"avg_line_length": 36.75460636515913,
"alnum_prop": 0.6186396262960009,
"repo_name": "tovmeod/anaf",
"id": "36ae5731a84fcd999208ad5aebc5832d900d3fdc",
"size": "43885",
"binary": false,
"copies": "1",
"ref": "refs/heads/drf",
"path": "anaf/services/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "400736"
},
{
"name": "HTML",
"bytes": "1512873"
},
{
"name": "JavaScript",
"bytes": "2136807"
},
{
"name": "PHP",
"bytes": "25856"
},
{
"name": "Python",
"bytes": "2045934"
},
{
"name": "Shell",
"bytes": "18005"
},
{
"name": "TSQL",
"bytes": "147855"
}
],
"symlink_target": ""
} |
import os
from flask import Flask
from flask_login import LoginManager
from flask_openid import OpenID
def project_relative_location(*args):
return os.path.join(os.path.dirname(os.path.realpath(__file__)), *args)
app = Flask(__name__)
default_db = 'sqlite:///{}'.format(project_relative_location('data', 'tv_show.db'))
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL', default_db)
pool_size = os.environ.get('DB_CONNECTION_POOL_SIZE', None)
if pool_size:
app.config['SQLALCHEMY_POOL_SIZE'] = int(pool_size)
app.config['SECRET_KEY'] = 'LAjRXSzZDY8LSfTVxKWwJYou6rb2Y3OB'
app.config['UPLOAD_FOLDER'] = project_relative_location('data', 'uploads')
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
oid = OpenID(app, project_relative_location('data', 'oid-tmp'))
user_email = os.environ.get('USER_EMAIL', 'invalid_email')
user_password_hash = os.environ.get('USER_PASSWORD_HASH', '')
user_nickname = os.environ.get('USER_NICKNAME', 'Intruder')
allowed_emails = [user_email]
user_information = {
user_email: {'password_hash': user_password_hash, 'nickname': user_nickname}
}
| {
"content_hash": "b1d44f840256b2b6bdc539caee260571",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 83,
"avg_line_length": 32.138888888888886,
"alnum_prop": 0.7225583405358686,
"repo_name": "fahadshaon/tv_tracker",
"id": "d9aa2915cad6ea47332f9ed905fff3f8ccf498ab",
"size": "1157",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11815"
},
{
"name": "JavaScript",
"bytes": "6828"
},
{
"name": "Python",
"bytes": "22420"
}
],
"symlink_target": ""
} |
"""
===========================
Calculate SHORE scalar maps
===========================
We show how to calculate two SHORE-based scalar maps: return to origin
probability (rtop) [Descoteaux2011]_ and mean square displacement (msd)
[Wu2007]_, [Wu2008]_ on your data. SHORE can be used with any multiple b-value
dataset like multi-shell or DSI.
First import the necessary modules:
"""
import nibabel as nib
import numpy as np
import matplotlib.pyplot as plt
from dipy.data import fetch_taiwan_ntu_dsi, read_taiwan_ntu_dsi, get_sphere
from dipy.data import get_data, dsi_voxels
from dipy.reconst.shore import ShoreModel
"""
Download and read the data for this tutorial.
"""
fetch_taiwan_ntu_dsi()
img, gtab = read_taiwan_ntu_dsi()
"""
img contains a nibabel Nifti1Image object (data) and gtab contains a GradientTable
object (gradient information e.g. b-values). For example, to read the b-values
it is possible to write print(gtab.bvals).
Load the raw diffusion data and the affine.
"""
data = img.get_data()
affine = img.affine
print('data.shape (%d, %d, %d, %d)' % data.shape)
"""
Instantiate the Model.
"""
asm = ShoreModel(gtab)
"""
Let's just use only one slice only from the data.
"""
dataslice = data[30:70, 20:80, data.shape[2] // 2]
"""
Fit the signal with the model and calculate the SHORE coefficients.
"""
asmfit = asm.fit(dataslice)
"""
Calculate the analytical rtop on the signal
that corresponds to the integral of the signal.
"""
print('Calculating... rtop_signal')
rtop_signal = asmfit.rtop_signal()
"""
Now we calculate the analytical rtop on the propagator,
that corresponds to its central value.
"""
print('Calculating... rtop_pdf')
rtop_pdf = asmfit.rtop_pdf()
"""
In theory, these two measures must be equal,
to show that we calculate the mean square error on this two measures.
"""
mse = np.sum((rtop_signal - rtop_pdf) ** 2) / rtop_signal.size
print("mse = %f" % mse)
"""
mse = 0.000000
Let's calculate the analytical mean square displacement on the propagator.
"""
print('Calculating... msd')
msd = asmfit.msd()
"""
Show the maps and save them to a file.
"""
fig = plt.figure(figsize=(6, 6))
ax1 = fig.add_subplot(2, 2, 1, title='rtop_signal')
ax1.set_axis_off()
ind = ax1.imshow(rtop_signal.T, interpolation='nearest', origin='lower')
plt.colorbar(ind)
ax2 = fig.add_subplot(2, 2, 2, title='rtop_pdf')
ax2.set_axis_off()
ind = ax2.imshow(rtop_pdf.T, interpolation='nearest', origin='lower')
plt.colorbar(ind)
ax3 = fig.add_subplot(2, 2, 3, title='msd')
ax3.set_axis_off()
ind = ax3.imshow(msd.T, interpolation='nearest', origin='lower', vmin=0)
plt.colorbar(ind)
plt.savefig('SHORE_maps.png')
"""
.. figure:: SHORE_maps.png
:align: center
rtop and msd calculated using the SHORE model.
References
----------
.. [Descoteaux2011] Descoteaux M. et al., "Multiple q-shell diffusion
propagator imaging", Medical Image Analysis, vol 15, No. 4, p. 603-621,
2011.
.. [Wu2007] Wu Y. et al., "Hybrid diffusion imaging", NeuroImage, vol 36, p.
617-629, 2007.
.. [Wu2008] Wu Y. et al., "Computation of Diffusion Function Measures in
q-Space Using Magnetic Resonance Hybrid Diffusion Imaging", IEEE
TRANSACTIONS ON MEDICAL IMAGING, vol. 27, No. 6, p. 858-865, 2008.
.. include:: ../links_names.inc
"""
| {
"content_hash": "40bf8da6effae406156c38d6b6091baa",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 82,
"avg_line_length": 24.931297709923665,
"alnum_prop": 0.6956521739130435,
"repo_name": "nilgoyyou/dipy",
"id": "82fa5f618f1e6d9ae6adbed0476bd1f97899e962",
"size": "3266",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "doc/examples/reconst_shore_metrics.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "2844"
},
{
"name": "Makefile",
"bytes": "3639"
},
{
"name": "Python",
"bytes": "2968209"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.