text stringlengths 4 1.02M | meta dict |
|---|---|
import unittest
import numpy as np
from plotting import path_figure
from fixtures import robot_arm1
class TestPlotting(unittest.TestCase):
def setUp(self):
self.robot_arm = robot_arm1
n = len(self.robot_arm.lengths)
s = len(self.robot_arm.destinations[0])
total_joints = n * s
self.theta_matrix = np.arange(total_joints).reshape((n, s))
def test_plot_pure_functon(self):
# Save values before function invocation
original_destinations = self.robot_arm.destinations.copy()
original_theta_matrix = self.theta_matrix.copy()
# Run the pure function
path_figure(self.theta_matrix, self.robot_arm, show=False)
# Assert that none of the arguments have been changed
np.testing.assert_array_equal(original_destinations, self.robot_arm.destinations)
np.testing.assert_array_equal(original_theta_matrix, self.theta_matrix)
| {
"content_hash": "f320f5c4cb995051bd0310f825dc66f2",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 89,
"avg_line_length": 37.12,
"alnum_prop": 0.6885775862068966,
"repo_name": "JakobGM/robotarm-optimization",
"id": "04f1a753487de85a1b20e57c0927fdc466542015",
"size": "928",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_plotting.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61319"
}
],
"symlink_target": ""
} |
import os
def match(command, settings):
return os.path.exists(command.script.split()[0]) \
and 'command not found' in command.stderr
def get_new_command(command, settings):
return u'./{}'.format(command.script)
| {
"content_hash": "1639be23096f1d6f5d5672a870360af3",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 54,
"avg_line_length": 21.09090909090909,
"alnum_prop": 0.6810344827586207,
"repo_name": "dionyziz/thefuck",
"id": "4ceac489ded0d6acb9543172aec391e67351f8d5",
"size": "232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "thefuck/rules/has_exists_script.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23047"
}
],
"symlink_target": ""
} |
from tempest.lib.common.utils import data_utils
from functional.tests.identity.v2 import test_identity
class ProjectTests(test_identity.IdentityTests):
def test_project_create(self):
project_name = data_utils.rand_name('TestProject')
description = data_utils.rand_name('description')
raw_output = self.openstack(
'project create '
'--description %(description)s '
'--enable '
'--property k1=v1 '
'--property k2=v2 '
'%(name)s' % {'description': description,
'name': project_name})
self.addCleanup(
self.openstack,
'project delete %s' % project_name
)
items = self.parse_show(raw_output)
show_fields = list(self.PROJECT_FIELDS)
show_fields.extend(['k1', 'k2'])
self.assert_show_fields(items, show_fields)
project = self.parse_show_as_object(raw_output)
self.assertEqual('v1', project['k1'])
self.assertEqual('v2', project['k2'])
def test_project_delete(self):
project_name = self._create_dummy_project(add_clean_up=False)
raw_output = self.openstack(
'project delete %s' % project_name)
self.assertEqual(0, len(raw_output))
def test_project_list(self):
raw_output = self.openstack('project list')
items = self.parse_listing(raw_output)
self.assert_table_structure(items, test_identity.BASIC_LIST_HEADERS)
def test_project_set(self):
project_name = self._create_dummy_project()
new_project_name = data_utils.rand_name('NewTestProject')
raw_output = self.openstack(
'project set '
'--name %(new_name)s '
'--disable '
'--property k0=v0 '
'%(name)s' % {'new_name': new_project_name,
'name': project_name})
self.assertEqual(0, len(raw_output))
# check project details
raw_output = self.openstack(
'project show %s' % new_project_name
)
items = self.parse_show(raw_output)
fields = list(self.PROJECT_FIELDS)
fields.extend(['properties'])
self.assert_show_fields(items, fields)
project = self.parse_show_as_object(raw_output)
self.assertEqual(new_project_name, project['name'])
self.assertEqual('False', project['enabled'])
self.assertEqual("k0='v0'", project['properties'])
def test_project_show(self):
project_name = self._create_dummy_project()
raw_output = self.openstack(
'project show %s' % project_name
)
items = self.parse_show(raw_output)
fields = list(self.PROJECT_FIELDS)
fields.extend(['properties'])
self.assert_show_fields(items, fields)
| {
"content_hash": "bd20926f33ba12dd6e8dac9081cf3d95",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 76,
"avg_line_length": 38.32432432432432,
"alnum_prop": 0.5863892806770099,
"repo_name": "redhat-openstack/python-openstackclient",
"id": "e9580ecfb675eaf62ed3fcfa6f6ac1c8a0952904",
"size": "3406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master-patches",
"path": "functional/tests/identity/v2/test_project.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2229284"
},
{
"name": "Shell",
"bytes": "591"
}
],
"symlink_target": ""
} |
import sublime, sublime_plugin
import subprocess
import re, os, os.path
import itertools
from User.build import *
from User.output import *
import traceback
def make_build_system_name(system, variant):
if system.endswith('.'):
return "{} - {}".format(system[:-1], variant)
else:
return "{} - {}".format(system[:], variant)
class Window:
def __init__(self, window):
self.window = window
def __getattr__(self, name):
return getattr(self.window, name)
def get_working_dir(self):
build_systems = self.project_data()["build_systems"]
for build_system in build_systems:
if "working_dir" in build_system:
return sublime.expand_variables(build_system["working_dir"], self.extract_variables())
return None
class MinionClearViewCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.view.erase(edit, sublime.Region(0, self.view.size()))
class MinionPanelAppendCommand(sublime_plugin.TextCommand):
def run(self, edit, text):
scroll = self.view.visible_region().end() == self.view.size()
self.view.insert(edit, self.view.size(), text)
if scroll:
self.view.show(self.view.size())
class MinionBuildCommand(sublime_plugin.WindowCommand):
def __init__(self, *args):
super().__init__(*args)
self.build_system = None
def build_systems(self):
window = self.window
build_systems_data = self.window.project_data()["build_systems"]
build_systems_data = expand_variables_ex(build_systems_data, window.extract_variables())
build_systems = {}
for data in build_systems_data:
for key, value in data.items():
if key == "variants":
for variant in value:
build_systems[make_build_system_name(data["name"], variant["name"])] = variant
else:
try:
build_systems[data["name"]][key] = value
except KeyError:
build_systems[data["name"]] = { key : value }
return build_systems
def run_build(self, config):
self.window.run_command("save_all")
config["command"] = config["cmd"]
self.window.run_command(
"minion_generic_build",
{ "config" : config })
def is_project_opened(self):
return self.window.project_data() != None
def run_file_build(self):
view = self.window.active_view()
if view.file_name().endswith(".cpp"):
print("Not implemented")
else:
print("Not implemented")
def is_latex_project(self):
return "latex" in self.window.project_data()
def build_latex_project(self):
self.window.run_command("minion_build_latex")
def run(self):
if self.is_latex_project():
self.window.run_command("save_all")
self.build_latex_project()
elif self.is_project_opened():
window = self.window
OutputView.request().clear()
build_systems = self.build_systems()
if self.build_system in build_systems:
self.run_build(build_systems[self.build_system])
else:
build_system_names = sorted(list(build_systems.keys()))
def on_done(index):
self.build_system = build_system_names[index]
self.run_build(build_systems[self.build_system])
window.show_quick_panel(build_system_names, on_done)
else:
self.run_file_build()
class MinionCancellBuildCommand(sublime_plugin.WindowCommand):
def run(self):
self.window.run_command("minion_generic_build");
class MinionFocusSublimeCommand(sublime_plugin.WindowCommand):
def window_name(self):
active = self.window.active_view()
project = os.path.basename(self.window.project_file_name())
project = project.replace(".sublime-project", "")
if active and active.file_name():
home = os.path.expanduser("~")
path = active.file_name().replace(home, "~")
return "{} ({}) - Sublime Text".format(path, project)
else:
return "untitled ({}) - Sublime Text".format(project)
def run(self, depth = 3):
subprocess.check_call(["wmctrl", "-a", self.window_name()])
if depth:
args = ("minion_focus_sublime", { "depth" : depth - 1 })
sublime.set_timeout_async(lambda: self.window.run_command(*args), 333)
def get_working_dir():
window = sublime.active_window()
if window.project_data():
build_systems = window.project_data()["build_systems"]
for build_system in build_systems:
if "working_dir" in build_system:
return sublime.expand_variables(build_system["working_dir"], window.extract_variables())
view = window.active_view()
return os.path.dirname(view.file_name())
class MinionCommand(sublime_plugin.WindowCommand):
def __init__(self, window):
super().__init__(window)
def _active_view_dir(self):
view = self.window.active_view()
return os.path.dirname(view.file_name())
def _makefile_exists(self):
pass
def run(self, **kwargs):
if "working_dir" not in kwargs or kwargs["working_dir"] == "":
kwargs["working_dir"] = self._active_view_dir()
self.window.run_command(
"minion_generic_build",
{ "config" : kwargs })
class MinionFormatCommand(sublime_plugin.WindowCommand):
def __init__(self, window):
super().__init__(window)
def _get_syntax(self):
view = self.window.active_view()
syntax = view.settings().get('syntax')
return os.path.splitext(os.path.basename(syntax))[0]
def _active_file(self):
view = self.window.active_view()
return view.file_name()
def run(self, **kwargs):
syntax = self._get_syntax()
if syntax == "C++":
self.window.run_command("save")
subprocess.call(["clang-format-3.8", "-i", "-style=Google", self._active_file()])
self.window.active_view().set_status("minion-format", "clang-format-3.8: DONE...")
def erase_status():
self.window.active_view().erase_status("minion-format")
sublime.set_timeout(erase_status, 4096)
class MinionDetectCpp(sublime_plugin.EventListener):
@staticmethod
def is_cpp(view):
pass
def detect_cpp(self, view):
if (os.path.splitext(view.file_name())[1] == "" and
view.settings().get('syntax') == "Packages/Text/Plain text.tmLanguage" and
not view.find("#pragma|#include", 0).empty()):
view.set_syntax_file("Packages/C++/C++.tmLanguage")
def on_load(self, view):
self.detect_cpp(view)
def on_post_save(self, view):
self.detect_cpp(view)
def walk_project_files(path):
IGNORE = ".git"
stack = [path]
full_path = [path]
while stack != []:
top = stack[-1]
stack.pop()
for file in os.listdir(top):
full = os.path.join(top, file)
if os.path.isdir(full):
if (not file.startswith(".") and
file not in IGNORE):
stack.append(full)
else:
yield full
def is_source(path):
if not path:
return False
return path.endswith(".cpp") or path.endswith(".c") or path.endswith(".cxx")
def is_header(path):
if not path:
return False
ext = os.path.splitext(path)[1]
if (ext == ""):
file = open(path, "r")
try:
for line in itertools.islice(file.readlines(), 32):
if "#pragma" in line:
return True
except:
return False
return False
else:
return ext == ".hpp" or ext == ".h" or ext == ".hxx"
class MinionToggleHeader(sublime_plugin.WindowCommand):
@staticmethod
def find_the_other(window, path, predicate):
variables = window.extract_variables()
if "project_path" in variables:
project_path = variables["project_path"]
elif "folder" in variables:
project_path = variables["folder"]
source_base = os.path.splitext(os.path.basename(path))[0]
for entry in walk_project_files(project_path):
if predicate(entry):
header_base = os.path.splitext(os.path.basename(entry))[0]
if header_base == source_base:
return entry
return None
def toggle(self, view):
target = None
if is_header(view.file_name()):
target = MinionToggleHeader.find_the_other(self.window, view.file_name(), is_source)
elif is_source(view.file_name()):
target = MinionToggleHeader.find_the_other(self.window, view.file_name(), is_header)
if target:
self.window.open_file(target)
self.window.status_message("Toggle to {}...".format(os.path.basename(target)))
else:
self.window.status_message("Cannot toggle...")
def run(self, **kwargs):
self.toggle(self.window.active_view())
class MinionBuildCurrentFile(sublime_plugin.WindowCommand):
def make(self, view):
target = view.file_name()
if is_header(target):
target = MinionToggleHeader.find_the_other(self.window, target, is_source)
if is_source(target):
working_dir = self.window.extract_variables()["project_path"]
dirname, basename = os.path.split(os.path.splitext(target)[0])
object_file = os.path.join(dirname, "build", basename) + ".o"
object_file = os.path.relpath(object_file, working_dir)
config = {
"working_dir" : working_dir,
"cmd" : ["make", object_file] }
self.window.run_command("minion_generic_build", { "config" : config })
self.window.status_message("Building {}...".format(os.path.basename(target)))
def run(self, **kwargs):
if self.window.active_view().is_dirty():
self.window.run_command("save")
self.make(self.window.active_view())
| {
"content_hash": "4af1968f7cdaedc8aec7d3ebefb5f4e9",
"timestamp": "",
"source": "github",
"line_count": 335,
"max_line_length": 103,
"avg_line_length": 31.02686567164179,
"alnum_prop": 0.5793727150279007,
"repo_name": "ciechowoj/minion",
"id": "24b2b0cd1786d5688ee655481cf27b0351c175c8",
"size": "10394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "minion.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61628"
}
],
"symlink_target": ""
} |
class DeploymentModificationState(object):
STARTED = 'started'
FINISHED = 'finished'
ROLLEDBACK = 'rolledback'
STATES = [STARTED, FINISHED, ROLLEDBACK]
END_STATES = [FINISHED, ROLLEDBACK]
class SnapshotState(object):
CREATED = 'created'
FAILED = 'failed'
CREATING = 'creating'
UPLOADED = 'uploaded'
STATES = [CREATED, FAILED, CREATING, UPLOADED]
END_STATES = [CREATED, FAILED, UPLOADED]
class ExecutionState(object):
TERMINATED = 'terminated'
FAILED = 'failed'
CANCELLED = 'cancelled'
PENDING = 'pending'
STARTED = 'started'
CANCELLING = 'cancelling'
FORCE_CANCELLING = 'force_cancelling'
STATES = [TERMINATED, FAILED, CANCELLED, PENDING, STARTED,
CANCELLING, FORCE_CANCELLING]
END_STATES = [TERMINATED, FAILED, CANCELLED]
ACTIVE_STATES = [state for state in STATES if state not in END_STATES]
| {
"content_hash": "c26167adef7ddc2888bb0dce062e4205",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 74,
"avg_line_length": 28.09375,
"alnum_prop": 0.6718576195773082,
"repo_name": "isaac-s/cloudify-manager",
"id": "3ea36d123edad7b50f0074bbffaa475821302880",
"size": "1538",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rest-service/manager_rest/storage/models_states.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "4067"
},
{
"name": "Mako",
"bytes": "541"
},
{
"name": "Python",
"bytes": "1793118"
},
{
"name": "Ruby",
"bytes": "40193"
},
{
"name": "Shell",
"bytes": "41526"
}
],
"symlink_target": ""
} |
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import bt_sphere_description1263_all_of
except ImportError:
bt_sphere_description1263_all_of = sys.modules[
"onshape_client.oas.models.bt_sphere_description1263_all_of"
]
try:
from onshape_client.oas.models import bt_surface_description1564
except ImportError:
bt_surface_description1564 = sys.modules[
"onshape_client.oas.models.bt_surface_description1564"
]
try:
from onshape_client.oas.models import bt_vector3d389
except ImportError:
bt_vector3d389 = sys.modules["onshape_client.oas.models.bt_vector3d389"]
class BTSphereDescription1263(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("type",): {
"PLANE": "PLANE",
"CYLINDER": "CYLINDER",
"CONE": "CONE",
"SPHERE": "SPHERE",
"TORUS": "TORUS",
"SPUN": "SPUN",
"SWEEP": "SWEEP",
"OFFSET": "OFFSET",
"BLEND": "BLEND",
"BSURFACE": "BSURFACE",
"OTHER": "OTHER",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"origin": (bt_vector3d389.BTVector3d389,), # noqa: E501
"radius": (float,), # noqa: E501
"type": (str,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"origin": "origin", # noqa: E501
"radius": "radius", # noqa: E501
"type": "type", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""bt_sphere_description1263.BTSphereDescription1263 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
origin (bt_vector3d389.BTVector3d389): [optional] # noqa: E501
radius (float): [optional] # noqa: E501
type (str): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
bt_sphere_description1263_all_of.BTSphereDescription1263AllOf,
bt_surface_description1564.BTSurfaceDescription1564,
],
"oneOf": [],
}
| {
"content_hash": "3960271abebac6ec48f67690e41a09b1",
"timestamp": "",
"source": "github",
"line_count": 222,
"max_line_length": 89,
"avg_line_length": 35.54054054054054,
"alnum_prop": 0.5779467680608364,
"repo_name": "onshape-public/onshape-clients",
"id": "b7974c2db15749c7b29d5b962b7b84118feb6370",
"size": "7907",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/onshape_client/oas/models/bt_sphere_description1263.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4873"
},
{
"name": "Go",
"bytes": "59674"
},
{
"name": "HTML",
"bytes": "3851790"
},
{
"name": "JavaScript",
"bytes": "2217"
},
{
"name": "Makefile",
"bytes": "559"
},
{
"name": "Python",
"bytes": "7560009"
},
{
"name": "Shell",
"bytes": "3475"
},
{
"name": "TypeScript",
"bytes": "1412661"
}
],
"symlink_target": ""
} |
'''
based on solution 2 of the link: http://www.cnblogs.com/grandyang/p/4635425.html
'''
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param {ListNode} head
# @return {boolean}
def isPalindrome(self, head):
if not head or not head.next:
return True
slow, fast = head, head
while fast.next and fast.next.next:
slow = slow.next
fast = fast.next.next
last = slow.next
while last.next:
tmp = last.next
last.next = tmp.next
tmp.next = slow.next
slow.next = tmp
pre = head
while slow.next:
slow = slow.next
if slow.val != pre.val:
return False
pre = pre.next
return True
| {
"content_hash": "006f76e3a6d1e1b586761071b3e2570a",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 82,
"avg_line_length": 24.10810810810811,
"alnum_prop": 0.5179372197309418,
"repo_name": "Chasego/codi",
"id": "cf0f65f6cbd23fcd9221b5f94da143102f5f6bf4",
"size": "892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leetcode/234-Palindrome-Linked-List/PalLinkedList_002.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9684"
},
{
"name": "HTML",
"bytes": "48035"
},
{
"name": "Java",
"bytes": "102385"
},
{
"name": "JavaScript",
"bytes": "2001"
},
{
"name": "Python",
"bytes": "284844"
}
],
"symlink_target": ""
} |
class Authentication:
def __init__(self, userid, password):
self._userid=userid
self._password=password
self._token=None
def get_token(self):
return self._token
def set_token(self, token):
self._token=token
| {
"content_hash": "ac4aabf09ad541000523fbd03075a939",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 39,
"avg_line_length": 20.5,
"alnum_prop": 0.6382113821138211,
"repo_name": "brython-dev/brython-in-the-classroom",
"id": "8333449e97a8acb90e81e5bc9ec9d194ca98df4d",
"size": "246",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyschool/static/libs/FileSystem/Authentication.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "107273"
},
{
"name": "HTML",
"bytes": "38548"
},
{
"name": "JavaScript",
"bytes": "8789"
},
{
"name": "Python",
"bytes": "58999"
}
],
"symlink_target": ""
} |
import unittest
import rover.config
from StringIO import StringIO
class RepoInfoTest(unittest.TestCase):
def test_github_repo_info(self):
repo = rover.config.RepoInfo("github, git, git://github.com/")
self.assertEqual("github", repo.name)
self.assertEqual("git", repo.vcs)
self.assertEqual("git://github.com/", repo.uri)
def test_repo_info_for_comment(self):
"RepoInfo() for a commented line fails"
try:
repo = rover.config.RepoInfo(" # comment line")
except Exception, x:
self.assertEqual('Cannot initialize RepoInfo for commented line' \
, str(x))
else:
self.fail("Commented repo line should have thrown an exception")
def test_repo_info_on_blank(self):
"RepoInfo() for a blank line fails"
try:
repo = rover.config.RepoInfo(" ")
except Exception, x:
self.assertEqual('Cannot initialize RepoInfo for blank line' \
, str(x))
else:
self.fail("Blank repo line should have thrown an exception")
BASIC_REPOS_TEST_CASE = """
github, git, git://github.com/
# verification comment
tigris, svn, svn://tigris.com/
sourceforge, cvs, :pserver:cvs.sourceforge.net:2401/cvsroot/
"""
class ParseRepoTest(unittest.TestCase):
def test_empty_repofile(self):
repos = rover.config.parse_repos(StringIO(''))
self.assertEqual([], repos)
def test_basic_parse_repos(self):
repos = rover.config.parse_repos(StringIO(BASIC_REPOS_TEST_CASE))
self.assertEqual(3, len(repos))
self.assertEqual('github', repos[0].name)
self.assertEqual('git', repos[0].vcs)
self.assertEqual('git://github.com/', repos[0].uri)
self.assertEqual('svn://tigris.com/', repos[1].uri)
self.assertEqual('sourceforge', repos[2].name)
| {
"content_hash": "f2d3f689e30c4c0cd7923c0718445d3e",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 78,
"avg_line_length": 32.689655172413794,
"alnum_prop": 0.6239451476793249,
"repo_name": "amplify-education/rover",
"id": "2e64f7444d9e08579f3cd7d487210c947222adc2",
"size": "3007",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/config_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "127411"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MnemonicGenerated(object):
def setupUi(self, MnemonicGenerated):
MnemonicGenerated.setObjectName(_fromUtf8("MnemonicGenerated"))
MnemonicGenerated.resize(831, 353)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 170, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
MnemonicGenerated.setPalette(palette)
self.label_3 = QtGui.QLabel(MnemonicGenerated)
self.label_3.setGeometry(QtCore.QRect(50, 10, 131, 61))
self.label_3.setText(_fromUtf8(""))
self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8(":/resources/storj-logo-horizontal.png")))
self.label_3.setScaledContents(True)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.file_name = QtGui.QLabel(MnemonicGenerated)
self.file_name.setGeometry(QtCore.QRect(210, 10, 591, 61))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Lato"))
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.file_name.setFont(font)
self.file_name.setStyleSheet(_fromUtf8("QLabel{\n"
"color: #2683ff;\n"
"}\n"
""))
self.file_name.setObjectName(_fromUtf8("file_name"))
self.mnemonic_edit = QtGui.QPlainTextEdit(MnemonicGenerated)
self.mnemonic_edit.setGeometry(QtCore.QRect(40, 80, 751, 96))
font = QtGui.QFont()
font.setPointSize(18)
font.setBold(True)
font.setItalic(True)
font.setWeight(75)
self.mnemonic_edit.setFont(font)
self.mnemonic_edit.setObjectName(_fromUtf8("mnemonic_edit"))
self.copy_to_clipboard = QtGui.QPushButton(MnemonicGenerated)
self.copy_to_clipboard.setGeometry(QtCore.QRect(590, 200, 221, 41))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Lato"))
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.copy_to_clipboard.setFont(font)
self.copy_to_clipboard.setStyleSheet(_fromUtf8("QPushButton {\n"
" background-color: #F2C90F;\n"
" border: 1px solid #F2C90F;\n"
" color: #fff;\n"
" border-radius: 7px;\n"
"}\n"
"QPushButton:hover{\n"
" background-color: #F2C90F;\n"
" border-color: #F2C90F;\n"
"}\n"
"QPushButton:active {\n"
" background-color: #F2C90F;\n"
" border-color: #F2C90F\n"
"}"))
self.copy_to_clipboard.setObjectName(_fromUtf8("copy_to_clipboard"))
self.file_name_2 = QtGui.QLabel(MnemonicGenerated)
self.file_name_2.setGeometry(QtCore.QRect(20, 200, 561, 91))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Lato"))
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.file_name_2.setFont(font)
self.file_name_2.setStyleSheet(_fromUtf8("QLabel{\n"
"color: #2683ff;\n"
"}\n"
""))
self.file_name_2.setObjectName(_fromUtf8("file_name_2"))
self.apply_bt = QtGui.QPushButton(MnemonicGenerated)
self.apply_bt.setGeometry(QtCore.QRect(20, 300, 791, 41))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Lato"))
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.apply_bt.setFont(font)
self.apply_bt.setStyleSheet(_fromUtf8("QPushButton:hover{\n"
" background-color: #83bf20;\n"
" border-color: #83bf20;\n"
"}\n"
"QPushButton:active {\n"
" background-color: #93cc36;\n"
" border-color: #93cc36;\n"
"}\n"
"QPushButton{\n"
" background-color: #88c425;\n"
" border: 1px solid #88c425;\n"
" color: #fff;\n"
" border-radius: 7px;\n"
"}"))
self.apply_bt.setObjectName(_fromUtf8("apply_bt"))
self.generate_new_key = QtGui.QPushButton(MnemonicGenerated)
self.generate_new_key.setGeometry(QtCore.QRect(590, 250, 221, 41))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Lato"))
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.generate_new_key.setFont(font)
self.generate_new_key.setStyleSheet(_fromUtf8("QPushButton {\n"
" background-color: #2683ff;\n"
" border: 1px solid #2683ff;\n"
" color: #fff;\n"
" border-radius: 7px;\n"
"}\n"
"QPushButton:hover{\n"
" background-color: #2274e2;\n"
" border-color: #2274e2;\n"
"}\n"
"QPushButton:active{\n"
" background-color: #4393ff;\n"
" border-color: #4393ff;\n"
"}"))
self.generate_new_key.setObjectName(_fromUtf8("generate_new_key"))
self.retranslateUi(MnemonicGenerated)
QtCore.QMetaObject.connectSlotsByName(MnemonicGenerated)
def retranslateUi(self, MnemonicGenerated):
MnemonicGenerated.setWindowTitle(_translate("MnemonicGenerated", "Generate mnemonic key - Storj GUI Client", None))
self.file_name.setText(_translate("MnemonicGenerated", "<html><head/><body><p><span style=\" font-size:18pt;\">Your generated encryption key - mnemonic</span></p></body></html>", None))
self.copy_to_clipboard.setText(_translate("MnemonicGenerated", "COPY TO CLIPBOARD", None))
self.file_name_2.setText(_translate("MnemonicGenerated", "<html><head/><body><p align=\"center\">Please save your key in secure location! </p><p align=\"center\">Without correct encryption key your files cannot be recovered!</p><p align=\"center\"><span style=\" text-decoration: underline;\">Warning! Lost encrytpion key cannot be recovered!</span></p></body></html>", None))
self.apply_bt.setText(_translate("MnemonicGenerated", "I SAVED MY ENCRYPTION KEY IN SECURE LOCATION, LET\'S GO!", None))
self.generate_new_key.setText(_translate("MnemonicGenerated", "GENERATE NEW KEY", None))
import resources_rc
| {
"content_hash": "b9539ef460f5fd9bf3fdee69f8003f77",
"timestamp": "",
"source": "github",
"line_count": 280,
"max_line_length": 384,
"avg_line_length": 51.010714285714286,
"alnum_prop": 0.6806693271721627,
"repo_name": "lakewik/storj-gui-client",
"id": "2f91c24c90299fc9386f3c4eeaaa28e1b71e4624",
"size": "14490",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "UI/qt_interfaces/generated_mnemonic_ui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "279"
},
{
"name": "Python",
"bytes": "2470041"
}
],
"symlink_target": ""
} |
"""Tests for an actual dns resolution."""
import logging
import unittest
import grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_METHOD = '/ANY/METHOD'
_REQUEST = b'\x00\x00\x00'
_RESPONSE = _REQUEST
class GenericHandler(grpc.GenericRpcHandler):
def service(self, unused_handler_details):
return grpc.unary_unary_rpc_method_handler(
lambda request, unused_context: request,
)
class DNSResolverTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
self._server.add_generic_rpc_handlers((GenericHandler(),))
self._port = self._server.add_insecure_port('[::]:0')
self._server.start()
def tearDown(self):
self._server.stop(None)
def test_connect_loopback(self):
# NOTE(https://github.com/grpc/grpc/issues/18422)
# In short, Gevent + C-Ares = Segfault. The C-Ares driver is not
# supported by custom io manager like "gevent"
# NOTE(b/201064791): use loopback46.unittest.grpc.io since
# it returns the expected responses even when DNS64 dns servers
# are used on the test worker (and for purposes of this
# test the use of loopback4 vs loopback46 makes no difference).
with grpc.insecure_channel('loopback46.unittest.grpc.io:%d' %
self._port) as channel:
self.assertEqual(
channel.unary_unary(_METHOD)(
_REQUEST,
timeout=10,
), _RESPONSE)
if __name__ == '__main__':
logging.basicConfig()
unittest.main(verbosity=2)
| {
"content_hash": "5d1fbea9fc83de672cbed5a752300fcd",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 72,
"avg_line_length": 31,
"alnum_prop": 0.6260454002389486,
"repo_name": "stanley-cheung/grpc",
"id": "f4196aaaac16963a4545bd1250218fb21de859d1",
"size": "2254",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/python/grpcio_tests/tests/unit/_dns_resolver_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "5444"
},
{
"name": "Batchfile",
"bytes": "37697"
},
{
"name": "C",
"bytes": "1340616"
},
{
"name": "C#",
"bytes": "113402"
},
{
"name": "C++",
"bytes": "17499842"
},
{
"name": "CMake",
"bytes": "29311"
},
{
"name": "CSS",
"bytes": "1519"
},
{
"name": "Cython",
"bytes": "258997"
},
{
"name": "Dockerfile",
"bytes": "181867"
},
{
"name": "Go",
"bytes": "34794"
},
{
"name": "HTML",
"bytes": "14"
},
{
"name": "Java",
"bytes": "14329"
},
{
"name": "JavaScript",
"bytes": "5572"
},
{
"name": "Objective-C",
"bytes": "724877"
},
{
"name": "Objective-C++",
"bytes": "79586"
},
{
"name": "PHP",
"bytes": "488004"
},
{
"name": "PowerShell",
"bytes": "5008"
},
{
"name": "Python",
"bytes": "3829879"
},
{
"name": "Ruby",
"bytes": "649843"
},
{
"name": "Shell",
"bytes": "774868"
},
{
"name": "Starlark",
"bytes": "874094"
},
{
"name": "Swift",
"bytes": "7487"
},
{
"name": "XSLT",
"bytes": "9846"
}
],
"symlink_target": ""
} |
import binascii
import struct
import logging
logger = logging.getLogger(__name__)
import string
from counterpartylib.lib import (config, exceptions, util, message_type)
from . import rps
# move random rps_match_id
FORMAT = '>H16s32s32s'
LENGTH = 2 + 16 + 32 + 32
ID = 81
def initialise (db):
cursor = db.cursor()
cursor.execute('''CREATE TABLE IF NOT EXISTS rpsresolves(
tx_index INTEGER PRIMARY KEY,
tx_hash TEXT UNIQUE,
block_index INTEGER,
source TEXT,
move INTEGER,
random TEXT,
rps_match_id TEXT,
status TEXT,
FOREIGN KEY (tx_index, tx_hash, block_index) REFERENCES transactions(tx_index, tx_hash, block_index))
''')
cursor.execute('''CREATE INDEX IF NOT EXISTS
block_index_idx ON rpsresolves (block_index)
''')
cursor.execute('''CREATE INDEX IF NOT EXISTS
source_idx ON rpsresolves (source)
''')
cursor.execute('''CREATE INDEX IF NOT EXISTS
rps_match_id_idx ON rpsresolves (rps_match_id)
''')
def validate (db, source, move, random, rps_match_id):
problems = []
rps_match = None
if not isinstance(move, int):
problems.append('move must be a integer')
return None, None, problems
if not all(c in string.hexdigits for c in random):
problems.append('random must be an hexadecimal string')
return None, None, problems
random_bytes = binascii.unhexlify(random)
if len(random_bytes) != 16:
problems.append('random must be 16 bytes in hexadecimal format')
return None, None, problems
cursor = db.cursor()
rps_matches = list(cursor.execute('''SELECT * FROM rps_matches WHERE id = ?''', (rps_match_id,)))
cursor.close()
if len(rps_matches) == 0:
problems.append('no such rps match')
return None, rps_match, problems
elif len(rps_matches) > 1:
assert False
rps_match = rps_matches[0]
if move<1:
problems.append('move must be greater than 0')
elif move > rps_match['possible_moves']:
problems.append('move must be lower than {}'.format(rps_match['possible_moves']))
if source not in [rps_match['tx0_address'], rps_match['tx1_address']]:
problems.append('invalid source address')
return None, rps_match, problems
if rps_match['tx0_address'] == source:
txn = 0
rps_match_status = ['pending', 'pending and resolved']
else:
txn = 1
rps_match_status = ['pending', 'resolved and pending']
move_random_hash = util.dhash(random_bytes + int(move).to_bytes(2, byteorder='big'))
move_random_hash = binascii.hexlify(move_random_hash).decode('utf-8')
if rps_match['tx{}_move_random_hash'.format(txn)] != move_random_hash:
problems.append('invalid move or random value')
return txn, rps_match, problems
if rps_match['status'] == 'expired':
problems.append('rps match expired')
elif rps_match['status'].startswith('concluded'):
problems.append('rps match concluded')
elif rps_match['status'].startswith('invalid'):
problems.append('rps match invalid')
elif rps_match['status'] not in rps_match_status:
problems.append('rps already resolved')
return txn, rps_match, problems
def compose (db, source, move, random, rps_match_id):
tx0_hash, tx1_hash = util.parse_id(rps_match_id)
txn, rps_match, problems = validate(db, source, move, random, rps_match_id)
if problems: raise exceptions.ComposeError(problems)
# Warn if down to the wire.
time_left = rps_match['match_expire_index'] - util.CURRENT_BLOCK_INDEX
if time_left < 4:
logger.warning('Only {} blocks until that rps match expires. The conclusion might not make into the blockchain in time.'.format(time_left))
tx0_hash_bytes = binascii.unhexlify(bytes(tx0_hash, 'utf-8'))
tx1_hash_bytes = binascii.unhexlify(bytes(tx1_hash, 'utf-8'))
random_bytes = binascii.unhexlify(bytes(random, 'utf-8'))
data = message_type.pack(ID)
data += struct.pack(FORMAT, move, random_bytes, tx0_hash_bytes, tx1_hash_bytes)
return (source, [], data)
def parse (db, tx, message):
cursor = db.cursor()
# Unpack message.
try:
if len(message) != LENGTH:
raise exceptions.UnpackError
move, random, tx0_hash_bytes, tx1_hash_bytes = struct.unpack(FORMAT, message)
tx0_hash, tx1_hash = binascii.hexlify(tx0_hash_bytes).decode('utf-8'), binascii.hexlify(tx1_hash_bytes).decode('utf-8')
rps_match_id = util.make_id(tx0_hash, tx1_hash)
random = binascii.hexlify(random).decode('utf-8')
status = 'valid'
except (exceptions.UnpackError, struct.error) as e:
move, random, tx0_hash, tx1_hash, rps_match_id = None, None, None, None, None
status = 'invalid: could not unpack'
if status == 'valid':
txn, rps_match, problems = validate(db, tx['source'], move, random, rps_match_id)
if problems:
rps_match = None
status = 'invalid: ' + '; '.join(problems)
# Add parsed transaction to message-type–specific table.
rpsresolves_bindings = {
'tx_index': tx['tx_index'],
'tx_hash': tx['tx_hash'],
'block_index': tx['block_index'],
'source': tx['source'],
'move': move,
'random': random,
'rps_match_id': rps_match_id,
'status': status
}
if status == 'valid':
rps_match_status = 'concluded'
if rps_match['status'] == 'pending':
rps_match_status = 'resolved and pending' if txn==0 else 'pending and resolved'
if rps_match_status == 'concluded':
counter_txn = 0 if txn == 1 else 1
counter_source = rps_match['tx{}_address'.format(counter_txn)]
sql = '''SELECT * FROM rpsresolves WHERE rps_match_id = ? AND source = ? AND status = ?'''
counter_games = list(cursor.execute(sql, (rps_match_id, counter_source, 'valid')))
assert len(counter_games) == 1
counter_game = counter_games[0]
winner = resolve_game(db, rpsresolves_bindings, counter_game)
if winner == 0:
rps_match_status = 'concluded: tie'
elif winner == counter_game['tx_index']:
rps_match_status = 'concluded: {} player wins'.format('first' if counter_txn == 0 else 'second')
else:
rps_match_status = 'concluded: {} player wins'.format('first' if txn == 0 else 'second')
rps.update_rps_match_status(db, rps_match, rps_match_status, tx['block_index'])
sql = '''INSERT INTO rpsresolves VALUES (:tx_index, :tx_hash, :block_index, :source, :move, :random, :rps_match_id, :status)'''
cursor.execute(sql, rpsresolves_bindings)
cursor.close()
# https://en.wikipedia.org/wiki/Rock-paper-scissors#Additional_weapons:
def resolve_game(db, resovlerps1, resovlerps2):
move1 = resovlerps1['move']
move2 = resovlerps2['move']
same_parity = (move1 % 2) == (move2 % 2)
if (same_parity and move1 < move2) or (not same_parity and move1 > move2):
return resovlerps1['tx_index']
elif (same_parity and move1 > move2) or (not same_parity and move1 < move2):
return resovlerps2['tx_index']
else:
return 0
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| {
"content_hash": "4c453318f83023bfdff6cac7c855d708",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 147,
"avg_line_length": 38.765306122448976,
"alnum_prop": 0.6079231376678074,
"repo_name": "CounterpartyXCP/counterparty-lib",
"id": "c56f21c9e951824b9a2a2cbb117f8415cb8d6835",
"size": "7621",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "counterpartylib/lib/messages/rpsresolve.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1751"
},
{
"name": "Python",
"bytes": "733063"
},
{
"name": "Shell",
"bytes": "1258"
}
],
"symlink_target": ""
} |
import os
import shutil
import tempfile
import unittest
import logging
from oyProjectManager.models.auth import User
class ConfigTester(unittest.TestCase):
"""test the system configuration
"""
def setUp(self):
"""setup the test
"""
logger = logging.getLogger("oyProjectManager")
logger.setLevel(logging.DEBUG)
# so we need a temp directory to be specified as our config folder
self.temp_config_folder = tempfile.mkdtemp()
# we should set the environment variable
os.environ["OYPROJECTMANAGER_PATH"] = self.temp_config_folder
self.config_full_path = os.path.join(self.temp_config_folder, "config.py")
def tearDown(self):
"""clean up the test
"""
# and remove the temp directory
shutil.rmtree(self.temp_config_folder)
def test_config_variable_updates_with_user_config(self):
"""testing if the database_file_name will be updated by the user
config
"""
# now create a config.py file and fill it with the desired values
# like database_file_name = "test_value.db"
test_value = ".test_value.db"
config_file = open(self.config_full_path, "w")
config_file.writelines(["#-*- coding: utf-8 -*-\n",
'database_url = "' + test_value + '"\n'])
config_file.close()
# now import the config.py and see if it updates the
# database_file_name variable
from oyProjectManager import config
conf = config.Config()
self.assertEqual(test_value, conf.database_url)
# def test_config_variable_extended_by_user_config(self):
# """testing if the config variables are extended with user config values
# """
#
# config_file = open(self.config_full_path, "w")
# config_file.writelines(["#-*- coding: utf-8 -*-\n",
# 'resolution_presets = {\n',
# ' "2k New Plate": [2048, 1024, 1.0]\n',
# '}\n'])
# config_file.close()
#
# # now import the config and see the variable is extended with new
# # values but also has the previous values
# from oyProjectManager import config
# conf = config.Config()
#
# self.assertTrue(conf.resolution_presets.has_key("2k New Plate"))
# self.assertTrue(conf.resolution_presets.has_key("PAL"))
def test_config_variable_doesnt_create_new_variables_with_user_config(self):
"""testing if the config will not be updated by the user config by
adding new variables
"""
# now create a config.py file and fill it with the desired values
# like database_file_name = "test_value.db"
test_value = ".test_value.db"
config_file = open(self.config_full_path, "w")
config_file.writelines(["#-*- coding: utf-8 -*-\n",
'test_value = "' + test_value + '"\n'])
config_file.close()
# now import the config.py and see if it updates the
# database_file_name variable
from oyProjectManager import config
conf = config.Config()
self.assertRaises(KeyError, getattr, conf, "test_value")
def test_env_variable_with_vars_module_import_with_shortcuts(self):
"""testing if the module path has shortcuts like ~ and other env
variables
"""
splits = os.path.split(self.temp_config_folder)
var1 = splits[0]
var2 = os.path.sep.join(splits[1:])
os.environ["var1"] = var1
os.environ["var2"] = var2
os.environ["OYPROJECTMANAGER_PATH"] = "$var1/$var2"
test_value = "sqlite3:///.test_value.db"
config_file = open(self.config_full_path, "w")
config_file.writelines(["#-*- coding: utf-8 -*-\n",
'database_url = "' + test_value + '"\n'])
config_file.close()
# now import the config.py and see if it updates the
# database_file_name variable
from oyProjectManager import config
conf = config.Config()
self.assertEqual(test_value, conf.database_url)
def test_env_variable_with_deep_vars_module_import_with_shortcuts(self):
"""testing if the module path has multiple shortcuts like ~ and other
env variables
"""
splits = os.path.split(self.temp_config_folder)
var1 = splits[0]
var2 = os.path.sep.join(splits[1:])
var3 = os.path.join("$var1", "$var2")
os.environ["var1"] = var1
os.environ["var2"] = var2
os.environ["var3"] = var3
os.environ["OYPROJECTMANAGER_PATH"] = "$var3"
test_value = "sqlite:///.test_value.db"
config_file = open(self.config_full_path, "w")
config_file.writelines(["#-*- coding: utf-8 -*-\n",
'database_url = "' + test_value + '"\n'])
config_file.close()
# now import the config.py and see if it updates the
# database_file_name variable
from oyProjectManager import config
conf = config.Config()
self.assertEqual(test_value, conf.database_url)
def test_non_existing_path_in_environment_variable(self):
"""testing if the non existing path situation will be handled
gracefully by warning the user
"""
os.environ["OYPROJECTMANAGER_PATH"] = "/tmp/non_existing_path"
from oyProjectManager import config
config.Config()
def test_syntax_error_in_settings_file(self):
"""testing if a RuntimeError will be raised when there are syntax
errors in the config.py file
"""
# now create a config.py file and fill it with the desired values
# like database_file_name = "test_value.db"
# but do a syntax error on purpose, like forgetting the last quato sign
test_value = ".test_value.db"
config_file = open(self.config_full_path, "w")
config_file.writelines(["#-*- coding: utf-8 -*-\n",
'database_file_name = "' + test_value + '\n'])
config_file.close()
# now import the config.py and see if it updates the
# database_file_name variable
from oyProjectManager import config
self.assertRaises(RuntimeError, config.Config)
# def test_users_attribute_will_return_a_list_of_User_instances(self):
# """testing if the config.users will return a list of
# oyProjectManager.models.auth.User instances defined in the config.py
# file with the users_data variable
# """
#
# config_file = open(self.config_full_path, "w")
# config_file.writelines(["#-*- coding: utf-8 -*-\n",
# 'users_data = [\n'
# ' {"name":"Test User 1",\n',
# ' "initials":"tu1",\n'
# ' "email": "testuser1@user.com"},\n'
# ' {"name":"Test User 2",\n',
# ' "initials":"tu2",\n'
# ' "email": "testuser2@user.com"},\n'
# ']\n'])
# config_file.close()
#
# # now import the config.py and see if it updates the
# # database_file_name variable
# from oyProjectManager import config
# conf = config.Config()
#
# self.assertIsInstance(conf.users, list)
# user1 = conf.users[0]
# user2 = conf.users[1]
# self.assertIsInstance(user1, User)
# self.assertIsInstance(user2, User)
#
# self.assertEqual(user1.name, "Test User 1")
# self.assertEqual(user1.initials, "tu1")
# self.assertEqual(user1.email, "testuser1@user.com")
#
# self.assertEqual(user2.name, "Test User 2")
# self.assertEqual(user2.initials, "tu2")
# self.assertEqual(user2.email, "testuser2@user.com")
| {
"content_hash": "b076af66a3d2187dcadbd0054c5afda7",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 82,
"avg_line_length": 40.338235294117645,
"alnum_prop": 0.5589986632640661,
"repo_name": "code-google-com/oyprojectmanager",
"id": "df90c632541a27bf4d3fab5e7db85a8b5dd3c81c",
"size": "8436",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/config/test_config.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Perl",
"bytes": "171"
},
{
"name": "Python",
"bytes": "1091868"
}
],
"symlink_target": ""
} |
"""Test the setban rpc call."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
connect_nodes,
p2p_port
)
class SetBanTests(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [[], []]
def run_test(self):
# Node 0 connects to Node 1, check that the noban permission is not
# granted
connect_nodes(self.nodes[0], self.nodes[1])
peerinfo = self.nodes[1].getpeerinfo()[0]
assert('noban' not in peerinfo['permissions'])
# Node 0 get banned by Node 1
self.nodes[1].setban("127.0.0.1", "add")
# Node 0 should not be able to reconnect
with self.nodes[1].assert_debug_log(expected_msgs=['dropped (banned)\n'], timeout=5):
self.restart_node(1, [])
self.nodes[0].addnode("127.0.0.1:" + str(p2p_port(1)), "onetry")
# However, node 0 should be able to reconnect if it has noban
# permission
self.restart_node(1, ['-whitelist=127.0.0.1'])
connect_nodes(self.nodes[0], self.nodes[1])
peerinfo = self.nodes[1].getpeerinfo()[0]
assert('noban' in peerinfo['permissions'])
# If we remove the ban, Node 0 should be able to reconnect even without
# noban permission
self.nodes[1].setban("127.0.0.1", "remove")
self.restart_node(1, [])
connect_nodes(self.nodes[0], self.nodes[1])
peerinfo = self.nodes[1].getpeerinfo()[0]
assert('noban' not in peerinfo['permissions'])
if __name__ == '__main__':
SetBanTests().main()
| {
"content_hash": "a969cbe8a78650eb8cabe0ec1e2deda0",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 93,
"avg_line_length": 34.666666666666664,
"alnum_prop": 0.6069711538461539,
"repo_name": "cculianu/bitcoin-abc",
"id": "89b7c1d079306ff3790f97a927bbfe2ca6bb7c27",
"size": "1878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/functional/rpc_setban.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28456"
},
{
"name": "C",
"bytes": "676074"
},
{
"name": "C++",
"bytes": "5385212"
},
{
"name": "HTML",
"bytes": "20970"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "192408"
},
{
"name": "Makefile",
"bytes": "112555"
},
{
"name": "Objective-C",
"bytes": "123566"
},
{
"name": "Objective-C++",
"bytes": "7251"
},
{
"name": "PHP",
"bytes": "4085"
},
{
"name": "Python",
"bytes": "1027736"
},
{
"name": "QMake",
"bytes": "756"
},
{
"name": "Ruby",
"bytes": "740"
},
{
"name": "Shell",
"bytes": "59432"
}
],
"symlink_target": ""
} |
import logging
import os
import re
import tempfile
import zipfile
from django import forms
from django.template.defaultfilters import slugify
from django.core.files import File
from dali.gallery.models import Gallery, Picture
class ZipFileForm(forms.Form):
gallery = forms.ModelChoiceField(queryset=Gallery.objects.all())
zip_file = forms.FileField()
valid_content_types = ('application/zip', 'application/x-zip',
'application/x-zip-compressed', 'application/x-compress',
'application/x-compressed', 'multipart/x-zip')
valid_file_extensions = ('zip',)
def clean_zip_file(self):
"""
Returns the zip file as an UploadedFile if valid, else raises
a ValidationError.
"""
zf = self.cleaned_data['zip_file']
if _file_ext(zf.name) in ZipFileForm.valid_file_extensions \
and zf.content_type in ZipFileForm.valid_content_types:
return zf
raise forms.ValidationError("A zip file is required:)")
def save(self):
"""
Add all images in zip_file to gallery.
Returns a list of names of invalid files in the zip file.
"""
try:
zip = zipfile.ZipFile(self.cleaned_data['zip_file'])
except zipfile.BadZipfile:
raise forms.ValidationError("The zip file is corrupted:(")
files = {'valid': [], 'invalid': []}
filenames = zip.namelist()
for filename in filenames:
name = _normalize_name(filename)
slug = _unique_slug(name)
pic = Picture(name=name, slug=slug, gallery=self.cleaned_data['gallery'])
tf = tempfile.NamedTemporaryFile('wb+')
tf.write(zip.read(filename))
try:
pic.original.save(filename, File(tf))
files['valid'].append(filename)
except IOError:
files['invalid'].append(filename)
tf.close()
zip.close()
return files
def _file_ext(filename):
"""Returns the extension of the filename as a lower case string."""
return os.path.splitext(filename)[1][1:].lower()
def _normalize_name(name):
"""
Returns a normalized name for the image.
Removes the extension from the end.
Removes leading paths.
Replace underscores "_" with spaces " ".
"""
logging.debug("name is [%s]", name)
# Remove leading paths
name = os.path.split(name)[1]
# Remove all extentions
# Seem to get many .jpg.jgp from Photoshop exports
try:
period = name.index('.')
if period > 0:
name = name[:period]
else:
name = name[1:]
except ValueError:
pass # Do nothing:)
name = name.replace('_', ' ')
logging.debug("resulting name is [%s]", name)
return name
def _unique_slug(slug):
"""Returns a slug that is not in use."""
slug = slugify(slug)
count = 1
name = slug
while True:
try:
Picture.objects.get(slug=name)
except Picture.DoesNotExist:
return name
else:
name = "%s-%d" % (slug, count)
count += 1
| {
"content_hash": "14912fce0968f909f9e4f19cfa314c1a",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 85,
"avg_line_length": 29.60185185185185,
"alnum_prop": 0.5877385048482953,
"repo_name": "varikin/dali",
"id": "0bfa96e38af7d22cbd934c4cc6380ecfe81158cd",
"size": "3197",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dali/gallery/admin/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "18209"
},
{
"name": "Python",
"bytes": "61378"
}
],
"symlink_target": ""
} |
import ljd.ast.nodes as nodes
import ljd.ast.traverse as traverse
def mark_locals(ast):
traverse.traverse(_LocalsMarker(), ast)
def mark_local_definitions(ast):
traverse.traverse(_LocalDefinitionsMarker(), ast)
class _LocalsMarker(traverse.Visitor):
class _State():
def __init__(self):
self.pending_slots = {}
self.debuginfo = None
self.addr = -1
def __init__(self):
self._states = []
# ##
def _push_state(self):
self._states.append(_LocalsMarker._State())
def _pop_state(self):
self._states.pop()
def _state(self):
return self._states[-1]
def _process_slots(self, addr):
debuginfo = self._state().debuginfo
cleanup = []
for slot, nodes in self._state().pending_slots.items():
varinfo = debuginfo.lookup_local_name(addr, slot)
if varinfo is None:
continue
cleanup.append(slot)
if varinfo.type == varinfo.T_INTERNAL:
continue
for node in nodes:
node.name = varinfo.name
node.type = node.T_LOCAL
setattr(node, "_varinfo", varinfo)
for slot in cleanup:
del self._state().pending_slots[slot]
def _reset_slot(self, slot):
self._state().pending_slots.pop(slot, None)
def _reset_all(self, slots):
for slot in slots:
if isinstance(slot, nodes.Identifier):
self._reset_slot(slot.slot)
# ##
def visit_function_definition(self, node):
self._push_state()
self._state().debuginfo = node._debuginfo
def leave_function_definition(self, node):
addr = node._instructions_count
self._process_slots(addr)
self._pop_state()
# ##
def visit_variables_list(self, node):
# Last chance for a local = local + 1 type assignments
self._process_slots(self._state().addr)
self._reset_all(node.contents)
def visit_identifiers_list(self, node):
self._reset_all(node.contents)
def visit_numeric_loop_warp(self, node):
self._reset_slot(node.index.slot)
def visit_identifier(self, node):
if node.type == nodes.Identifier.T_SLOT:
queue = self._state().pending_slots
slots = queue.setdefault(node.slot, [])
slots.append(node)
# ##
def _process_worthy_node(self, node):
addr = getattr(node, "_addr", None)
if not isinstance(node, nodes.Identifier) and addr is not None:
assert self._state().addr <= addr
self._state().addr = addr
self._process_slots(addr)
# We need to process slots twice as it could be the last
# statement in the function/block and it could be anassignment
# as well so we need to process slots before the reset
def _leave_node(self, handler, node):
traverse.Visitor._leave_node(self, handler, node)
self._process_worthy_node(node)
def _visit_node(self, handler, node):
self._process_worthy_node(node)
traverse.Visitor._visit_node(self, handler, node)
class _LocalDefinitionsMarker(traverse.Visitor):
class _State():
def __init__(self):
self.known_locals = [None] * 255
self.addr = 0
def __init__(self):
self._states = []
def _push_state(self):
self._states.append(_LocalDefinitionsMarker._State())
def _pop_state(self):
self._states.pop()
def _state(self):
return self._states[-1]
def _update_known_locals(self, local, addr):
varinfo = self._state().known_locals[local.slot]
self._state().known_locals[local.slot] = getattr(local,
"_varinfo",
None)
if varinfo is None:
return False
if varinfo.end_addr <= addr:
return False
return True
# ##
def visit_function_definition(self, node):
self._push_state()
for local in node.arguments.contents:
if not isinstance(local, nodes.Vararg):
self._update_known_locals(local, 1)
def leave_function_definition(self, node):
self._pop_state()
def visit_iterator_for(self, node):
addr = node._addr
for local in node.identifiers.contents:
if local.type == nodes.Identifier.T_LOCAL:
self._update_known_locals(local, addr)
def visit_numeric_for(self, node):
addr = node._addr
if node.variable.type == nodes.Identifier.T_LOCAL:
self._update_known_locals(node.variable, addr)
# ##
def visit_assignment(self, node):
dst = node.destinations.contents[0]
addr = self._state().addr
if not isinstance(dst, nodes.Identifier):
return
if dst.type != nodes.Identifier.T_LOCAL:
return
known_slot = self._update_known_locals(dst, addr)
for slot in node.destinations.contents[1:]:
if not isinstance(slot, nodes.Identifier):
return
if slot.type != nodes.Identifier.T_LOCAL:
return
also_known = self._update_known_locals(slot, addr)
assert known_slot == also_known
if not known_slot:
node.type = nodes.Assignment.T_LOCAL_DEFINITION
def _visit(self, node):
node_addr = getattr(node, "_addr", -1)
if node_addr >= 0:
self._state().addr = node_addr
traverse.Visitor._visit(self, node)
| {
"content_hash": "04e8f60ae7432fd2ae5a71a8cfd8c287",
"timestamp": "",
"source": "github",
"line_count": 216,
"max_line_length": 65,
"avg_line_length": 21.99074074074074,
"alnum_prop": 0.6791578947368421,
"repo_name": "mrexodia/ljd",
"id": "00fdb26ffbb76d69a9b2c00dca531e232a0f24fa",
"size": "4823",
"binary": false,
"copies": "2",
"ref": "refs/heads/bugfix",
"path": "ljd/ast/locals.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Lua",
"bytes": "34162"
},
{
"name": "Python",
"bytes": "180511"
}
],
"symlink_target": ""
} |
import sys, os
from datetime import datetime
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinxcontrib.phpdomain', 'sphinxcontrib_phpautodoc']
primary_domain = 'php'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Services_Twilio'
copyright = unicode(datetime.utcnow().year) + u', Twilio Inc'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '4.3'
# The full version, including alpha/beta/rc tags.
release = '4.3.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes']
html_theme = 'kr'
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True)
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Services_Twiliodoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Services_Twilio.tex', u'Services\\_Twilio Documentation',
u'Neuman Vong', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'services_twilio', u'Services_Twilio Documentation',
[u'Neuman Vong'], 1)
]
| {
"content_hash": "81ea660260e7382aa2b3e99dc03b7fab",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 80,
"avg_line_length": 32.66197183098591,
"alnum_prop": 0.7089262613195343,
"repo_name": "iGaskin/TMS_Alert",
"id": "ef215b92e88981827ca1c0158fcd59bee1748e1b",
"size": "7383",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "php/vendor/twilio/sdk/docs/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "125108"
},
{
"name": "HTML",
"bytes": "7096"
},
{
"name": "JavaScript",
"bytes": "14694"
},
{
"name": "PHP",
"bytes": "10163"
}
],
"symlink_target": ""
} |
import base64
from Crypto.Cipher import AES
from Crypto import Random
from Crypto.Hash import SHA256
BS = 16
pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS)
unpad = lambda s: s[:-ord(s[len(s)-1:])]
class AESCipher:
def __init__(self, key):
self.key = key
def encrypt(self, raw):
raw = pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw))
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:16]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return unpad(cipher.decrypt(enc[16:]))
| {
"content_hash": "b64235e08c1faf12ccecc4cf958b4bb1",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 62,
"avg_line_length": 26.26923076923077,
"alnum_prop": 0.595900439238653,
"repo_name": "tghack/tg17hack",
"id": "0b204b977e5307345c9aa72ec7362539da1b725d",
"size": "683",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "crypto/transmission_control/src/aes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "30793"
},
{
"name": "C++",
"bytes": "2368"
},
{
"name": "CMake",
"bytes": "1635"
},
{
"name": "Go",
"bytes": "9715"
},
{
"name": "HTML",
"bytes": "612"
},
{
"name": "JavaScript",
"bytes": "685"
},
{
"name": "Makefile",
"bytes": "2151"
},
{
"name": "Python",
"bytes": "20742"
},
{
"name": "Shell",
"bytes": "3072"
}
],
"symlink_target": ""
} |
"""Nova base exception handling.
Includes decorator for re-raising Nova-type exceptions.
SHOULD include dedicated exception logging.
"""
import functools
import inspect
import sys
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
import six
import webob.exc
from webob import util as woutil
from nova.i18n import _, _LE
from nova import safe_utils
LOG = logging.getLogger(__name__)
exc_log_opts = [
cfg.BoolOpt('fatal_exception_format_errors',
default=False,
help='Make exception message format errors fatal'),
]
CONF = cfg.CONF
CONF.register_opts(exc_log_opts)
class ConvertedException(webob.exc.WSGIHTTPException):
def __init__(self, code, title="", explanation=""):
self.code = code
# There is a strict rule about constructing status line for HTTP:
# '...Status-Line, consisting of the protocol version followed by a
# numeric status code and its associated textual phrase, with each
# element separated by SP characters'
# (http://www.faqs.org/rfcs/rfc2616.html)
# 'code' and 'title' can not be empty because they correspond
# to numeric status code and its associated text
if title:
self.title = title
else:
try:
self.title = woutil.status_reasons[self.code]
except KeyError:
msg = _LE("Improper or unknown HTTP status code used: %d")
LOG.error(msg, code)
self.title = woutil.status_generic_reasons[self.code // 100]
self.explanation = explanation
super(ConvertedException, self).__init__()
def _cleanse_dict(original):
"""Strip all admin_password, new_pass, rescue_pass keys from a dict."""
return {k: v for k, v in six.iteritems(original) if "_pass" not in k}
def wrap_exception(notifier=None, get_notifier=None):
"""This decorator wraps a method to catch any exceptions that may
get thrown. It also optionally sends the exception to the notification
system.
"""
def inner(f):
def wrapped(self, context, *args, **kw):
# Don't store self or context in the payload, it now seems to
# contain confidential information.
try:
return f(self, context, *args, **kw)
except Exception as e:
with excutils.save_and_reraise_exception():
if notifier or get_notifier:
payload = dict(exception=e)
wrapped_func = safe_utils.get_wrapped_function(f)
call_dict = inspect.getcallargs(wrapped_func, self,
context, *args, **kw)
# self can't be serialized and shouldn't be in the
# payload
call_dict.pop('self', None)
cleansed = _cleanse_dict(call_dict)
payload.update({'args': cleansed})
# If f has multiple decorators, they must use
# functools.wraps to ensure the name is
# propagated.
event_type = f.__name__
(notifier or get_notifier()).error(context,
event_type,
payload)
return functools.wraps(f)(wrapped)
return inner
class NovaException(Exception):
"""Base Nova Exception
To correctly use this class, inherit from it and define
a 'msg_fmt' property. That msg_fmt will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = _("An unknown exception occurred.")
code = 500
headers = {}
safe = False
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if not message:
try:
message = self.msg_fmt % kwargs
except Exception:
exc_info = sys.exc_info()
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception(_LE('Exception in string format operation'))
for name, value in six.iteritems(kwargs):
LOG.error("%s: %s" % (name, value)) # noqa
if CONF.fatal_exception_format_errors:
six.reraise(*exc_info)
else:
# at least get the core message out if something happened
message = self.msg_fmt
self.message = message
super(NovaException, self).__init__(message)
def format_message(self):
# NOTE(mrodden): use the first argument to the python Exception object
# which should be our full NovaException message, (see __init__)
return self.args[0]
class EncryptionFailure(NovaException):
msg_fmt = _("Failed to encrypt text: %(reason)s")
class DecryptionFailure(NovaException):
msg_fmt = _("Failed to decrypt text: %(reason)s")
class RevokeCertFailure(NovaException):
msg_fmt = _("Failed to revoke certificate for %(project_id)s")
class VirtualInterfaceCreateException(NovaException):
msg_fmt = _("Virtual Interface creation failed")
class VirtualInterfaceMacAddressException(NovaException):
msg_fmt = _("Creation of virtual interface with "
"unique mac address failed")
class VirtualInterfacePlugException(NovaException):
msg_fmt = _("Virtual interface plugin failed")
class GlanceConnectionFailed(NovaException):
msg_fmt = _("Connection to glance host %(server)s failed: "
"%(reason)s")
class CinderConnectionFailed(NovaException):
msg_fmt = _("Connection to cinder host failed: %(reason)s")
class Forbidden(NovaException):
ec2_code = 'AuthFailure'
msg_fmt = _("Not authorized.")
code = 403
class AdminRequired(Forbidden):
msg_fmt = _("User does not have admin privileges")
class PolicyNotAuthorized(Forbidden):
msg_fmt = _("Policy doesn't allow %(action)s to be performed.")
class VolumeLimitExceeded(Forbidden):
msg_fmt = _("Volume resource quota exceeded")
class ImageNotActive(NovaException):
# NOTE(jruzicka): IncorrectState is used for volumes only in EC2,
# but it still seems like the most appropriate option.
ec2_code = 'IncorrectState'
msg_fmt = _("Image %(image_id)s is not active.")
class ImageNotAuthorized(NovaException):
msg_fmt = _("Not authorized for image %(image_id)s.")
class Invalid(NovaException):
msg_fmt = _("Unacceptable parameters.")
code = 400
class InvalidBDM(Invalid):
msg_fmt = _("Block Device Mapping is Invalid.")
class InvalidBDMSnapshot(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get snapshot %(id)s.")
class InvalidBDMVolume(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get volume %(id)s.")
class UnsupportedBDMVolumeAuthMethod(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"%(auth_method)s is unsupported.")
class InvalidBDMImage(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get image %(id)s.")
class InvalidBDMBootSequence(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"Boot sequence for the instance "
"and image/block device mapping "
"combination is not valid.")
class InvalidBDMLocalsLimit(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"You specified more local devices than the "
"limit allows")
class InvalidBDMEphemeralSize(InvalidBDM):
msg_fmt = _("Ephemeral disks requested are larger than "
"the instance type allows.")
class InvalidBDMSwapSize(InvalidBDM):
msg_fmt = _("Swap drive requested is larger than instance type allows.")
class InvalidBDMFormat(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"%(details)s")
class InvalidBDMForLegacy(InvalidBDM):
msg_fmt = _("Block Device Mapping cannot "
"be converted to legacy format. ")
class InvalidBDMVolumeNotBootable(InvalidBDM):
msg_fmt = _("Block Device %(id)s is not bootable.")
class InvalidAttribute(Invalid):
msg_fmt = _("Attribute not supported: %(attr)s")
class ValidationError(Invalid):
msg_fmt = "%(detail)s"
class VolumeAttachFailed(Invalid):
msg_fmt = _("Volume %(volume_id)s could not be attached. "
"Reason: %(reason)s")
class VolumeUnattached(Invalid):
ec2_code = 'IncorrectState'
msg_fmt = _("Volume %(volume_id)s is not attached to anything")
class VolumeNotCreated(NovaException):
msg_fmt = _("Volume %(volume_id)s did not finish being created"
" even after we waited %(seconds)s seconds or %(attempts)s"
" attempts. And its status is %(volume_status)s.")
class VolumeEncryptionNotSupported(Invalid):
msg_fmt = _("Volume encryption is not supported for %(volume_type)s "
"volume %(volume_id)s")
class InvalidKeypair(Invalid):
ec2_code = 'InvalidKeyPair.Format'
msg_fmt = _("Keypair data is invalid: %(reason)s")
class InvalidRequest(Invalid):
msg_fmt = _("The request is invalid.")
class InvalidInput(Invalid):
msg_fmt = _("Invalid input received: %(reason)s")
class InvalidVolume(Invalid):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("Invalid volume: %(reason)s")
class InvalidVolumeAccessMode(Invalid):
msg_fmt = _("Invalid volume access mode: %(access_mode)s")
class InvalidMetadata(Invalid):
msg_fmt = _("Invalid metadata: %(reason)s")
class InvalidMetadataSize(Invalid):
msg_fmt = _("Invalid metadata size: %(reason)s")
class InvalidPortRange(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("Invalid port range %(from_port)s:%(to_port)s. %(msg)s")
class InvalidIpProtocol(Invalid):
msg_fmt = _("Invalid IP protocol %(protocol)s.")
class InvalidContentType(Invalid):
msg_fmt = _("Invalid content type %(content_type)s.")
class InvalidAPIVersionString(Invalid):
msg_fmt = _("API Version String %(version)s is of invalid format. Must "
"be of format MajorNum.MinorNum.")
class VersionNotFoundForAPIMethod(Invalid):
msg_fmt = _("API version %(version)s is not supported on this method.")
class InvalidGlobalAPIVersion(Invalid):
msg_fmt = _("Version %(req_ver)s is not supported by the API. Minimum "
"is %(min_ver)s and maximum is %(max_ver)s.")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("%(err)s")
class InvalidAggregateAction(Invalid):
msg_fmt = _("Unacceptable parameters.")
code = 400
class InvalidAggregateActionAdd(InvalidAggregateAction):
msg_fmt = _("Cannot add host to aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidAggregateActionDelete(InvalidAggregateAction):
msg_fmt = _("Cannot remove host from aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidAggregateActionUpdate(InvalidAggregateAction):
msg_fmt = _("Cannot update aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidAggregateActionUpdateMeta(InvalidAggregateAction):
msg_fmt = _("Cannot update metadata of aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidGroup(Invalid):
msg_fmt = _("Group not valid. Reason: %(reason)s")
class InvalidSortKey(Invalid):
msg_fmt = _("Sort key supplied was not valid.")
class InvalidStrTime(Invalid):
msg_fmt = _("Invalid datetime string: %(reason)s")
class InvalidName(Invalid):
msg_fmt = _("An invalid 'name' value was provided. "
"The name must be: %(reason)s")
class InstanceInvalidState(Invalid):
msg_fmt = _("Instance %(instance_uuid)s in %(attr)s %(state)s. Cannot "
"%(method)s while the instance is in this state.")
class InstanceNotRunning(Invalid):
msg_fmt = _("Instance %(instance_id)s is not running.")
class InstanceNotInRescueMode(Invalid):
msg_fmt = _("Instance %(instance_id)s is not in rescue mode")
class InstanceNotRescuable(Invalid):
msg_fmt = _("Instance %(instance_id)s cannot be rescued: %(reason)s")
class InstanceNotReady(Invalid):
msg_fmt = _("Instance %(instance_id)s is not ready")
class InstanceSuspendFailure(Invalid):
msg_fmt = _("Failed to suspend instance: %(reason)s")
class InstanceResumeFailure(Invalid):
msg_fmt = _("Failed to resume instance: %(reason)s")
class InstancePowerOnFailure(Invalid):
msg_fmt = _("Failed to power on instance: %(reason)s")
class InstancePowerOffFailure(Invalid):
msg_fmt = _("Failed to power off instance: %(reason)s")
class InstanceRebootFailure(Invalid):
msg_fmt = _("Failed to reboot instance: %(reason)s")
class InstanceTerminationFailure(Invalid):
msg_fmt = _("Failed to terminate instance: %(reason)s")
class InstanceDeployFailure(Invalid):
msg_fmt = _("Failed to deploy instance: %(reason)s")
class MultiplePortsNotApplicable(Invalid):
msg_fmt = _("Failed to launch instances: %(reason)s")
class InvalidFixedIpAndMaxCountRequest(Invalid):
msg_fmt = _("Failed to launch instances: %(reason)s")
class ServiceUnavailable(Invalid):
msg_fmt = _("Service is unavailable at this time.")
class ComputeResourcesUnavailable(ServiceUnavailable):
msg_fmt = _("Insufficient compute resources: %(reason)s.")
class HypervisorUnavailable(NovaException):
msg_fmt = _("Connection to the hypervisor is broken on host: %(host)s")
class ComputeServiceUnavailable(ServiceUnavailable):
msg_fmt = _("Compute service of %(host)s is unavailable at this time.")
class ComputeServiceInUse(NovaException):
msg_fmt = _("Compute service of %(host)s is still in use.")
class UnableToMigrateToSelf(Invalid):
msg_fmt = _("Unable to migrate instance (%(instance_id)s) "
"to current host (%(host)s).")
class InvalidHypervisorType(Invalid):
msg_fmt = _("The supplied hypervisor type of is invalid.")
class HypervisorTooOld(Invalid):
msg_fmt = _("This compute node's hypervisor is older than the minimum "
"supported version: %(version)s.")
class DestinationHypervisorTooOld(Invalid):
msg_fmt = _("The instance requires a newer hypervisor version than "
"has been provided.")
class ServiceTooOld(Invalid):
msg_fmt = _("This service is older (v%(thisver)i) than the minimum "
"(v%(minver)i) version of the rest of the deployment. "
"Unable to continue.")
class DestinationDiskExists(Invalid):
msg_fmt = _("The supplied disk path (%(path)s) already exists, "
"it is expected not to exist.")
class InvalidDevicePath(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is invalid.")
class DevicePathInUse(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is in use.")
code = 409
class DeviceIsBusy(Invalid):
msg_fmt = _("The supplied device (%(device)s) is busy.")
class InvalidCPUInfo(Invalid):
msg_fmt = _("Unacceptable CPU info: %(reason)s")
class InvalidIpAddressError(Invalid):
msg_fmt = _("%(address)s is not a valid IP v4/6 address.")
class InvalidVLANTag(Invalid):
msg_fmt = _("VLAN tag is not appropriate for the port group "
"%(bridge)s. Expected VLAN tag is %(tag)s, "
"but the one associated with the port group is %(pgroup)s.")
class InvalidVLANPortGroup(Invalid):
msg_fmt = _("vSwitch which contains the port group %(bridge)s is "
"not associated with the desired physical adapter. "
"Expected vSwitch is %(expected)s, but the one associated "
"is %(actual)s.")
class InvalidDiskFormat(Invalid):
msg_fmt = _("Disk format %(disk_format)s is not acceptable")
class InvalidDiskInfo(Invalid):
msg_fmt = _("Disk info file is invalid: %(reason)s")
class DiskInfoReadWriteFail(Invalid):
msg_fmt = _("Failed to read or write disk info file: %(reason)s")
class ImageUnacceptable(Invalid):
msg_fmt = _("Image %(image_id)s is unacceptable: %(reason)s")
class ImageBadRequest(Invalid):
msg_fmt = _("Request of image %(image_id)s got BadRequest response: "
"%(response)s")
class InstanceUnacceptable(Invalid):
msg_fmt = _("Instance %(instance_id)s is unacceptable: %(reason)s")
class InvalidEc2Id(Invalid):
msg_fmt = _("Ec2 id %(ec2_id)s is unacceptable.")
class InvalidUUID(Invalid):
msg_fmt = _("Expected a uuid but received %(uuid)s.")
class InvalidID(Invalid):
msg_fmt = _("Invalid ID received %(id)s.")
class ConstraintNotMet(NovaException):
msg_fmt = _("Constraint not met.")
code = 412
class NotFound(NovaException):
msg_fmt = _("Resource could not be found.")
code = 404
class AgentBuildNotFound(NotFound):
msg_fmt = _("No agent-build associated with id %(id)s.")
class AgentBuildExists(NovaException):
msg_fmt = _("Agent-build with hypervisor %(hypervisor)s os %(os)s "
"architecture %(architecture)s exists.")
class VolumeNotFound(NotFound):
ec2_code = 'InvalidVolume.NotFound'
msg_fmt = _("Volume %(volume_id)s could not be found.")
class UndefinedRootBDM(NovaException):
msg_fmt = _("Undefined Block Device Mapping root: BlockDeviceMappingList "
"contains Block Device Mappings from multiple instances.")
class BDMNotFound(NotFound):
msg_fmt = _("No Block Device Mapping with id %(id)s.")
class VolumeBDMNotFound(NotFound):
msg_fmt = _("No volume Block Device Mapping with id %(volume_id)s.")
class VolumeBDMIsMultiAttach(Invalid):
msg_fmt = _("Block Device Mapping %(volume_id)s is a multi-attach volume"
" and is not valid for this operation.")
class VolumeBDMPathNotFound(VolumeBDMNotFound):
msg_fmt = _("No volume Block Device Mapping at path: %(path)s")
class DeviceDetachFailed(NovaException):
msg_fmt = _("Device detach failed for %(device)s: %(reason)s)")
class DeviceNotFound(NotFound):
msg_fmt = _("Device '%(device)s' not found.")
class SnapshotNotFound(NotFound):
ec2_code = 'InvalidSnapshot.NotFound'
msg_fmt = _("Snapshot %(snapshot_id)s could not be found.")
class DiskNotFound(NotFound):
msg_fmt = _("No disk at %(location)s")
class VolumeDriverNotFound(NotFound):
msg_fmt = _("Could not find a handler for %(driver_type)s volume.")
class InvalidImageRef(Invalid):
msg_fmt = _("Invalid image href %(image_href)s.")
class AutoDiskConfigDisabledByImage(Invalid):
msg_fmt = _("Requested image %(image)s "
"has automatic disk resize disabled.")
class ImageNotFound(NotFound):
msg_fmt = _("Image %(image_id)s could not be found.")
class PreserveEphemeralNotSupported(Invalid):
msg_fmt = _("The current driver does not support "
"preserving ephemeral partitions.")
# NOTE(jruzicka): ImageNotFound is not a valid EC2 error code.
class ImageNotFoundEC2(ImageNotFound):
msg_fmt = _("Image %(image_id)s could not be found. The nova EC2 API "
"assigns image ids dynamically when they are listed for the "
"first time. Have you listed image ids since adding this "
"image?")
class ProjectNotFound(NotFound):
msg_fmt = _("Project %(project_id)s could not be found.")
class StorageRepositoryNotFound(NotFound):
msg_fmt = _("Cannot find SR to read/write VDI.")
class InstanceMappingNotFound(NotFound):
msg_fmt = _("Instance %(uuid)s has no mapping to a cell.")
class NetworkDuplicated(Invalid):
msg_fmt = _("Network %(network_id)s is duplicated.")
class NetworkDhcpReleaseFailed(NovaException):
msg_fmt = _("Failed to release IP %(address)s with MAC %(mac_address)s")
class NetworkInUse(NovaException):
msg_fmt = _("Network %(network_id)s is still in use.")
class NetworkSetHostFailed(NovaException):
msg_fmt = _("Network set host failed for network %(network_id)s.")
class NetworkNotCreated(Invalid):
msg_fmt = _("%(req)s is required to create a network.")
class LabelTooLong(Invalid):
msg_fmt = _("Maximum allowed length for 'label' is 255.")
class InvalidIntValue(Invalid):
msg_fmt = _("%(key)s must be an integer.")
class InvalidCidr(Invalid):
msg_fmt = _("%(cidr)s is not a valid IP network.")
class InvalidAddress(Invalid):
msg_fmt = _("%(address)s is not a valid IP address.")
class AddressOutOfRange(Invalid):
msg_fmt = _("%(address)s is not within %(cidr)s.")
class DuplicateVlan(NovaException):
msg_fmt = _("Detected existing vlan with id %(vlan)d")
code = 409
class CidrConflict(NovaException):
msg_fmt = _('Requested cidr (%(cidr)s) conflicts '
'with existing cidr (%(other)s)')
code = 409
class NetworkHasProject(NetworkInUse):
msg_fmt = _('Network must be disassociated from project '
'%(project_id)s before it can be deleted.')
class NetworkNotFound(NotFound):
msg_fmt = _("Network %(network_id)s could not be found.")
class PortNotFound(NotFound):
msg_fmt = _("Port id %(port_id)s could not be found.")
class NetworkNotFoundForBridge(NetworkNotFound):
msg_fmt = _("Network could not be found for bridge %(bridge)s")
class NetworkNotFoundForUUID(NetworkNotFound):
msg_fmt = _("Network could not be found for uuid %(uuid)s")
class NetworkNotFoundForCidr(NetworkNotFound):
msg_fmt = _("Network could not be found with cidr %(cidr)s.")
class NetworkNotFoundForInstance(NetworkNotFound):
msg_fmt = _("Network could not be found for instance %(instance_id)s.")
class NoNetworksFound(NotFound):
msg_fmt = _("No networks defined.")
class NoMoreNetworks(NovaException):
msg_fmt = _("No more available networks.")
class NetworkNotFoundForProject(NetworkNotFound):
msg_fmt = _("Either network uuid %(network_uuid)s is not present or "
"is not assigned to the project %(project_id)s.")
class NetworkAmbiguous(Invalid):
msg_fmt = _("More than one possible network found. Specify "
"network ID(s) to select which one(s) to connect to.")
class NetworkRequiresSubnet(Invalid):
msg_fmt = _("Network %(network_uuid)s requires a subnet in order to boot"
" instances on.")
class ExternalNetworkAttachForbidden(Forbidden):
msg_fmt = _("It is not allowed to create an interface on "
"external network %(network_uuid)s")
class NetworkMissingPhysicalNetwork(NovaException):
msg_fmt = _("Physical network is missing for network %(network_uuid)s")
class VifDetailsMissingVhostuserSockPath(Invalid):
msg_fmt = _("vhostuser_sock_path not present in vif_details"
" for vif %(vif_id)s")
class VifDetailsMissingMacvtapParameters(Invalid):
msg_fmt = _("Parameters %(missing_params)s not present in"
" vif_details for vif %(vif_id)s. Check your Neutron"
" configuration to validate that the macvtap parameters are"
" correct.")
class DatastoreNotFound(NotFound):
msg_fmt = _("Could not find the datastore reference(s) which the VM uses.")
class PortInUse(Invalid):
msg_fmt = _("Port %(port_id)s is still in use.")
class PortRequiresFixedIP(Invalid):
msg_fmt = _("Port %(port_id)s requires a FixedIP in order to be used.")
class PortNotUsable(Invalid):
msg_fmt = _("Port %(port_id)s not usable for instance %(instance)s.")
class PortNotUsableDNS(Invalid):
msg_fmt = _("Port %(port_id)s not usable for instance %(instance)s. "
"Value %(value)s assigned to dns_name attribute does not "
"match instance's hostname %(hostname)s")
class PortNotFree(Invalid):
msg_fmt = _("No free port available for instance %(instance)s.")
class PortBindingFailed(Invalid):
msg_fmt = _("Binding failed for port %(port_id)s, please check neutron "
"logs for more information.")
class FixedIpExists(NovaException):
msg_fmt = _("Fixed IP %(address)s already exists.")
class FixedIpNotFound(NotFound):
msg_fmt = _("No fixed IP associated with id %(id)s.")
class FixedIpNotFoundForAddress(FixedIpNotFound):
msg_fmt = _("Fixed IP not found for address %(address)s.")
class FixedIpNotFoundForInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s has zero fixed IPs.")
class FixedIpNotFoundForNetworkHost(FixedIpNotFound):
msg_fmt = _("Network host %(host)s has zero fixed IPs "
"in network %(network_id)s.")
class FixedIpNotFoundForSpecificInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s doesn't have fixed IP '%(ip)s'.")
class FixedIpNotFoundForNetwork(FixedIpNotFound):
msg_fmt = _("Fixed IP address (%(address)s) does not exist in "
"network (%(network_uuid)s).")
class FixedIpAssociateFailed(NovaException):
msg_fmt = _("Fixed IP associate failed for network: %(net)s.")
class FixedIpAlreadyInUse(NovaException):
msg_fmt = _("Fixed IP address %(address)s is already in use on instance "
"%(instance_uuid)s.")
class FixedIpAssociatedWithMultipleInstances(NovaException):
msg_fmt = _("More than one instance is associated with fixed IP address "
"'%(address)s'.")
class FixedIpInvalid(Invalid):
msg_fmt = _("Fixed IP address %(address)s is invalid.")
class NoMoreFixedIps(NovaException):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("No fixed IP addresses available for network: %(net)s")
class NoFixedIpsDefined(NotFound):
msg_fmt = _("Zero fixed IPs could be found.")
class FloatingIpExists(NovaException):
msg_fmt = _("Floating IP %(address)s already exists.")
class FloatingIpNotFound(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating IP not found for ID %(id)s.")
class FloatingIpDNSExists(Invalid):
msg_fmt = _("The DNS entry %(name)s already exists in domain %(domain)s.")
class FloatingIpNotFoundForAddress(FloatingIpNotFound):
msg_fmt = _("Floating IP not found for address %(address)s.")
class FloatingIpNotFoundForHost(FloatingIpNotFound):
msg_fmt = _("Floating IP not found for host %(host)s.")
class FloatingIpMultipleFoundForAddress(NovaException):
msg_fmt = _("Multiple floating IPs are found for address %(address)s.")
class FloatingIpPoolNotFound(NotFound):
msg_fmt = _("Floating IP pool not found.")
safe = True
class NoMoreFloatingIps(FloatingIpNotFound):
msg_fmt = _("Zero floating IPs available.")
safe = True
class FloatingIpAssociated(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating IP %(address)s is associated.")
class FloatingIpNotAssociated(NovaException):
msg_fmt = _("Floating IP %(address)s is not associated.")
class NoFloatingIpsDefined(NotFound):
msg_fmt = _("Zero floating IPs exist.")
class NoFloatingIpInterface(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Interface %(interface)s not found.")
class FloatingIpAllocateFailed(NovaException):
msg_fmt = _("Floating IP allocate failed.")
class FloatingIpAssociateFailed(NovaException):
msg_fmt = _("Floating IP %(address)s association has failed.")
class FloatingIpBadRequest(Invalid):
ec2_code = "UnsupportedOperation"
msg_fmt = _("The floating IP request failed with a BadRequest")
class CannotDisassociateAutoAssignedFloatingIP(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Cannot disassociate auto assigned floating IP")
class KeypairNotFound(NotFound):
ec2_code = 'InvalidKeyPair.NotFound'
msg_fmt = _("Keypair %(name)s not found for user %(user_id)s")
class ServiceNotFound(NotFound):
msg_fmt = _("Service %(service_id)s could not be found.")
class ServiceBinaryExists(NovaException):
msg_fmt = _("Service with host %(host)s binary %(binary)s exists.")
class ServiceTopicExists(NovaException):
msg_fmt = _("Service with host %(host)s topic %(topic)s exists.")
class HostNotFound(NotFound):
msg_fmt = _("Host %(host)s could not be found.")
class ComputeHostNotFound(HostNotFound):
msg_fmt = _("Compute host %(host)s could not be found.")
class ComputeHostNotCreated(HostNotFound):
msg_fmt = _("Compute host %(name)s needs to be created first"
" before updating.")
class HostBinaryNotFound(NotFound):
msg_fmt = _("Could not find binary %(binary)s on host %(host)s.")
class InvalidReservationExpiration(Invalid):
msg_fmt = _("Invalid reservation expiration %(expire)s.")
class InvalidQuotaValue(Invalid):
msg_fmt = _("Change would make usage less than 0 for the following "
"resources: %(unders)s")
class InvalidQuotaMethodUsage(Invalid):
msg_fmt = _("Wrong quota method %(method)s used on resource %(res)s")
class QuotaNotFound(NotFound):
msg_fmt = _("Quota could not be found")
class QuotaExists(NovaException):
msg_fmt = _("Quota exists for project %(project_id)s, "
"resource %(resource)s")
class QuotaResourceUnknown(QuotaNotFound):
msg_fmt = _("Unknown quota resources %(unknown)s.")
class ProjectUserQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for user %(user_id)s in project %(project_id)s "
"could not be found.")
class ProjectQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for project %(project_id)s could not be found.")
class QuotaClassNotFound(QuotaNotFound):
msg_fmt = _("Quota class %(class_name)s could not be found.")
class QuotaUsageNotFound(QuotaNotFound):
msg_fmt = _("Quota usage for project %(project_id)s could not be found.")
class ReservationNotFound(QuotaNotFound):
msg_fmt = _("Quota reservation %(uuid)s could not be found.")
class OverQuota(NovaException):
msg_fmt = _("Quota exceeded for resources: %(overs)s")
class SecurityGroupNotFound(NotFound):
msg_fmt = _("Security group %(security_group_id)s not found.")
class SecurityGroupNotFoundForProject(SecurityGroupNotFound):
msg_fmt = _("Security group %(security_group_id)s not found "
"for project %(project_id)s.")
class SecurityGroupNotFoundForRule(SecurityGroupNotFound):
msg_fmt = _("Security group with rule %(rule_id)s not found.")
class SecurityGroupExists(Invalid):
ec2_code = 'InvalidGroup.Duplicate'
msg_fmt = _("Security group %(security_group_name)s already exists "
"for project %(project_id)s.")
class SecurityGroupExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is already associated"
" with the instance %(instance_id)s")
class SecurityGroupNotExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is not associated with"
" the instance %(instance_id)s")
class SecurityGroupDefaultRuleNotFound(Invalid):
msg_fmt = _("Security group default rule (%rule_id)s not found.")
class SecurityGroupCannotBeApplied(Invalid):
msg_fmt = _("Network requires port_security_enabled and subnet associated"
" in order to apply security groups.")
class SecurityGroupRuleExists(Invalid):
ec2_code = 'InvalidPermission.Duplicate'
msg_fmt = _("Rule already exists in group: %(rule)s")
class NoUniqueMatch(NovaException):
msg_fmt = _("No Unique Match Found.")
code = 409
class MigrationNotFound(NotFound):
msg_fmt = _("Migration %(migration_id)s could not be found.")
class MigrationNotFoundByStatus(MigrationNotFound):
msg_fmt = _("Migration not found for instance %(instance_id)s "
"with status %(status)s.")
class MigrationNotFoundForInstance(MigrationNotFound):
msg_fmt = _("Migration %(migration_id)s not found for instance "
"%(instance_id)s")
class InvalidMigrationState(Invalid):
msg_fmt = _("Migration %(migration_id)s state of instance "
"%(instance_uuid)s is %(state)s. Cannot %(method)s while the "
"migration is in this state.")
class ConsoleLogOutputException(NovaException):
msg_fmt = _("Console log output could not be retrieved for instance "
"%(instance_id)s. Reason: %(reason)s")
class ConsolePoolNotFound(NotFound):
msg_fmt = _("Console pool %(pool_id)s could not be found.")
class ConsolePoolExists(NovaException):
msg_fmt = _("Console pool with host %(host)s, console_type "
"%(console_type)s and compute_host %(compute_host)s "
"already exists.")
class ConsolePoolNotFoundForHostType(NotFound):
msg_fmt = _("Console pool of type %(console_type)s "
"for compute host %(compute_host)s "
"on proxy host %(host)s not found.")
class ConsoleNotFound(NotFound):
msg_fmt = _("Console %(console_id)s could not be found.")
class ConsoleNotFoundForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s could not be found.")
class ConsoleNotAvailable(NotFound):
msg_fmt = _("Guest does not have a console available.")
class ConsoleNotFoundInPoolForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s "
"in pool %(pool_id)s could not be found.")
class ConsoleTypeInvalid(Invalid):
msg_fmt = _("Invalid console type %(console_type)s")
class ConsoleTypeUnavailable(Invalid):
msg_fmt = _("Unavailable console type %(console_type)s.")
class ConsolePortRangeExhausted(NovaException):
msg_fmt = _("The console port range %(min_port)d-%(max_port)d is "
"exhausted.")
class FlavorNotFound(NotFound):
msg_fmt = _("Flavor %(flavor_id)s could not be found.")
class FlavorNotFoundByName(FlavorNotFound):
msg_fmt = _("Flavor with name %(flavor_name)s could not be found.")
class FlavorAccessNotFound(NotFound):
msg_fmt = _("Flavor access not found for %(flavor_id)s / "
"%(project_id)s combination.")
class FlavorExtraSpecUpdateCreateFailed(NovaException):
msg_fmt = _("Flavor %(id)s extra spec cannot be updated or created "
"after %(retries)d retries.")
class CellNotFound(NotFound):
msg_fmt = _("Cell %(cell_name)s doesn't exist.")
class CellExists(NovaException):
msg_fmt = _("Cell with name %(name)s already exists.")
class CellRoutingInconsistency(NovaException):
msg_fmt = _("Inconsistency in cell routing: %(reason)s")
class CellServiceAPIMethodNotFound(NotFound):
msg_fmt = _("Service API method not found: %(detail)s")
class CellTimeout(NotFound):
msg_fmt = _("Timeout waiting for response from cell")
class CellMaxHopCountReached(NovaException):
msg_fmt = _("Cell message has reached maximum hop count: %(hop_count)s")
class NoCellsAvailable(NovaException):
msg_fmt = _("No cells available matching scheduling criteria.")
class CellsUpdateUnsupported(NovaException):
msg_fmt = _("Cannot update cells configuration file.")
class InstanceUnknownCell(NotFound):
msg_fmt = _("Cell is not known for instance %(instance_uuid)s")
class SchedulerHostFilterNotFound(NotFound):
msg_fmt = _("Scheduler Host Filter %(filter_name)s could not be found.")
class FlavorExtraSpecsNotFound(NotFound):
msg_fmt = _("Flavor %(flavor_id)s has no extra specs with "
"key %(extra_specs_key)s.")
class ComputeHostMetricNotFound(NotFound):
msg_fmt = _("Metric %(name)s could not be found on the compute "
"host node %(host)s.%(node)s.")
class FileNotFound(NotFound):
msg_fmt = _("File %(file_path)s could not be found.")
class SwitchNotFoundForNetworkAdapter(NotFound):
msg_fmt = _("Virtual switch associated with the "
"network adapter %(adapter)s not found.")
class NetworkAdapterNotFound(NotFound):
msg_fmt = _("Network adapter %(adapter)s could not be found.")
class ClassNotFound(NotFound):
msg_fmt = _("Class %(class_name)s could not be found: %(exception)s")
class InstanceTagNotFound(NotFound):
msg_fmt = _("Instance %(instance_id)s has no tag '%(tag)s'")
class RotationRequiredForBackup(NovaException):
msg_fmt = _("Rotation param is required for backup image_type")
class KeyPairExists(NovaException):
ec2_code = 'InvalidKeyPair.Duplicate'
msg_fmt = _("Key pair '%(key_name)s' already exists.")
class InstanceExists(NovaException):
msg_fmt = _("Instance %(name)s already exists.")
class FlavorExists(NovaException):
msg_fmt = _("Flavor with name %(name)s already exists.")
class FlavorIdExists(NovaException):
msg_fmt = _("Flavor with ID %(flavor_id)s already exists.")
class FlavorAccessExists(NovaException):
msg_fmt = _("Flavor access already exists for flavor %(flavor_id)s "
"and project %(project_id)s combination.")
class InvalidSharedStorage(NovaException):
msg_fmt = _("%(path)s is not on shared storage: %(reason)s")
class InvalidLocalStorage(NovaException):
msg_fmt = _("%(path)s is not on local storage: %(reason)s")
class StorageError(NovaException):
msg_fmt = _("Storage error: %(reason)s")
class MigrationError(NovaException):
msg_fmt = _("Migration error: %(reason)s")
class MigrationPreCheckError(MigrationError):
msg_fmt = _("Migration pre-check error: %(reason)s")
class MigrationSchedulerRPCError(MigrationError):
msg_fmt = _("Migration select destinations error: %(reason)s")
class MalformedRequestBody(NovaException):
msg_fmt = _("Malformed message body: %(reason)s")
# NOTE(johannes): NotFound should only be used when a 404 error is
# appropriate to be returned
class ConfigNotFound(NovaException):
msg_fmt = _("Could not find config at %(path)s")
class PasteAppNotFound(NovaException):
msg_fmt = _("Could not load paste app '%(name)s' from %(path)s")
class CannotResizeToSameFlavor(NovaException):
msg_fmt = _("When resizing, instances must change flavor!")
class ResizeError(NovaException):
msg_fmt = _("Resize error: %(reason)s")
class CannotResizeDisk(NovaException):
msg_fmt = _("Server disk was unable to be resized because: %(reason)s")
class FlavorMemoryTooSmall(NovaException):
msg_fmt = _("Flavor's memory is too small for requested image.")
class FlavorDiskTooSmall(NovaException):
msg_fmt = _("The created instance's disk would be too small.")
class FlavorDiskSmallerThanImage(FlavorDiskTooSmall):
msg_fmt = _("Flavor's disk is too small for requested image. Flavor disk "
"is %(flavor_size)i bytes, image is %(image_size)i bytes.")
class FlavorDiskSmallerThanMinDisk(FlavorDiskTooSmall):
msg_fmt = _("Flavor's disk is smaller than the minimum size specified in "
"image metadata. Flavor disk is %(flavor_size)i bytes, "
"minimum size is %(image_min_disk)i bytes.")
class VolumeSmallerThanMinDisk(FlavorDiskTooSmall):
msg_fmt = _("Volume is smaller than the minimum size specified in image "
"metadata. Volume size is %(volume_size)i bytes, minimum "
"size is %(image_min_disk)i bytes.")
class InsufficientFreeMemory(NovaException):
msg_fmt = _("Insufficient free memory on compute node to start %(uuid)s.")
class NoValidHost(NovaException):
msg_fmt = _("No valid host was found. %(reason)s")
class MaxRetriesExceeded(NoValidHost):
msg_fmt = _("Exceeded maximum number of retries. %(reason)s")
class QuotaError(NovaException):
ec2_code = 'ResourceLimitExceeded'
msg_fmt = _("Quota exceeded: code=%(code)s")
# NOTE(cyeoh): 413 should only be used for the ec2 API
# The error status code for out of quota for the nova api should be
# 403 Forbidden.
code = 413
safe = True
class TooManyInstances(QuotaError):
msg_fmt = _("Quota exceeded for %(overs)s: Requested %(req)s,"
" but already used %(used)s of %(allowed)s %(overs)s")
class FloatingIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of floating IPs exceeded")
class FixedIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of fixed IPs exceeded")
class MetadataLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of metadata items exceeds %(allowed)d")
class OnsetFileLimitExceeded(QuotaError):
msg_fmt = _("Personality file limit exceeded")
class OnsetFilePathLimitExceeded(OnsetFileLimitExceeded):
msg_fmt = _("Personality file path too long")
class OnsetFileContentLimitExceeded(OnsetFileLimitExceeded):
msg_fmt = _("Personality file content too long")
class KeypairLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of key pairs exceeded")
class SecurityGroupLimitExceeded(QuotaError):
ec2_code = 'SecurityGroupLimitExceeded'
msg_fmt = _("Maximum number of security groups or rules exceeded")
class PortLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of ports exceeded")
class AggregateError(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s: action '%(action)s' "
"caused an error: %(reason)s.")
class AggregateNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s could not be found.")
class AggregateNameExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_name)s already exists.")
class AggregateHostNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no host %(host)s.")
class AggregateMetadataNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no metadata with "
"key %(metadata_key)s.")
class AggregateHostExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s already has host %(host)s.")
class FlavorCreateFailed(NovaException):
msg_fmt = _("Unable to create flavor")
class InstancePasswordSetFailed(NovaException):
msg_fmt = _("Failed to set admin password on %(instance)s "
"because %(reason)s")
safe = True
class InstanceNotFound(NotFound):
ec2_code = 'InvalidInstanceID.NotFound'
msg_fmt = _("Instance %(instance_id)s could not be found.")
class InstanceInfoCacheNotFound(NotFound):
msg_fmt = _("Info cache for instance %(instance_uuid)s could not be "
"found.")
class InvalidAssociation(NotFound):
ec2_code = 'InvalidAssociationID.NotFound'
msg_fmt = _("Invalid association.")
class MarkerNotFound(NotFound):
msg_fmt = _("Marker %(marker)s could not be found.")
class InvalidInstanceIDMalformed(Invalid):
msg_fmt = _("Invalid id: %(instance_id)s (expecting \"i-...\")")
ec2_code = 'InvalidInstanceID.Malformed'
class InvalidVolumeIDMalformed(Invalid):
msg_fmt = _("Invalid id: %(volume_id)s (expecting \"i-...\")")
ec2_code = 'InvalidVolumeID.Malformed'
class CouldNotFetchImage(NovaException):
msg_fmt = _("Could not fetch image %(image_id)s")
class CouldNotUploadImage(NovaException):
msg_fmt = _("Could not upload image %(image_id)s")
class TaskAlreadyRunning(NovaException):
msg_fmt = _("Task %(task_name)s is already running on host %(host)s")
class TaskNotRunning(NovaException):
msg_fmt = _("Task %(task_name)s is not running on host %(host)s")
class InstanceIsLocked(InstanceInvalidState):
msg_fmt = _("Instance %(instance_uuid)s is locked")
class ConfigDriveInvalidValue(Invalid):
msg_fmt = _("Invalid value for Config Drive option: %(option)s")
class ConfigDriveUnsupportedFormat(Invalid):
msg_fmt = _("Config drive format '%(format)s' is not supported.")
class ConfigDriveMountFailed(NovaException):
msg_fmt = _("Could not mount vfat config drive. %(operation)s failed. "
"Error: %(error)s")
class ConfigDriveUnknownFormat(NovaException):
msg_fmt = _("Unknown config drive format %(format)s. Select one of "
"iso9660 or vfat.")
class ConfigDriveNotFound(NotFound):
msg_fmt = _("Instance %(instance_uuid)s requires config drive, but it "
"does not exist.")
class InterfaceAttachFailed(Invalid):
msg_fmt = _("Failed to attach network adapter device to "
"%(instance_uuid)s")
class InterfaceAttachFailedNoNetwork(InterfaceAttachFailed):
msg_fmt = _("No specific network was requested and none are available "
"for project '%(project_id)s'.")
class InterfaceDetachFailed(Invalid):
msg_fmt = _("Failed to detach network adapter device from "
"%(instance_uuid)s")
class InstanceUserDataTooLarge(NovaException):
msg_fmt = _("User data too large. User data must be no larger than "
"%(maxsize)s bytes once base64 encoded. Your data is "
"%(length)d bytes")
class InstanceUserDataMalformed(NovaException):
msg_fmt = _("User data needs to be valid base 64.")
class InstanceUpdateConflict(NovaException):
msg_fmt = _("Conflict updating instance %(instance_uuid)s. "
"Expected: %(expected)s. Actual: %(actual)s")
class UnknownInstanceUpdateConflict(InstanceUpdateConflict):
msg_fmt = _("Conflict updating instance %(instance_uuid)s, but we were "
"unable to determine the cause")
class UnexpectedTaskStateError(InstanceUpdateConflict):
pass
class UnexpectedDeletingTaskStateError(UnexpectedTaskStateError):
pass
class InstanceActionNotFound(NovaException):
msg_fmt = _("Action for request_id %(request_id)s on instance"
" %(instance_uuid)s not found")
class InstanceActionEventNotFound(NovaException):
msg_fmt = _("Event %(event)s not found for action id %(action_id)s")
class CryptoCAFileNotFound(FileNotFound):
msg_fmt = _("The CA file for %(project)s could not be found")
class CryptoCRLFileNotFound(FileNotFound):
msg_fmt = _("The CRL file for %(project)s could not be found")
class InstanceRecreateNotSupported(Invalid):
msg_fmt = _('Instance recreate is not supported.')
class DBNotAllowed(NovaException):
msg_fmt = _('%(binary)s attempted direct database access which is '
'not allowed by policy')
class UnsupportedVirtType(Invalid):
msg_fmt = _("Virtualization type '%(virt)s' is not supported by "
"this compute driver")
class UnsupportedHardware(Invalid):
msg_fmt = _("Requested hardware '%(model)s' is not supported by "
"the '%(virt)s' virt driver")
class Base64Exception(NovaException):
msg_fmt = _("Invalid Base 64 data for file %(path)s")
class BuildAbortException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s aborted: %(reason)s")
class RescheduledException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s was re-scheduled: "
"%(reason)s")
class ShadowTableExists(NovaException):
msg_fmt = _("Shadow table with name %(name)s already exists.")
class InstanceFaultRollback(NovaException):
def __init__(self, inner_exception=None):
message = _("Instance rollback performed due to: %s")
self.inner_exception = inner_exception
super(InstanceFaultRollback, self).__init__(message % inner_exception)
class OrphanedObjectError(NovaException):
msg_fmt = _('Cannot call %(method)s on orphaned %(objtype)s object')
class ObjectActionError(NovaException):
msg_fmt = _('Object action %(action)s failed because: %(reason)s')
class CoreAPIMissing(NovaException):
msg_fmt = _("Core API extensions are missing: %(missing_apis)s")
class AgentError(NovaException):
msg_fmt = _('Error during following call to agent: %(method)s')
class AgentTimeout(AgentError):
msg_fmt = _('Unable to contact guest agent. '
'The following call timed out: %(method)s')
class AgentNotImplemented(AgentError):
msg_fmt = _('Agent does not support the call: %(method)s')
class InstanceGroupNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s could not be found.")
class InstanceGroupIdExists(NovaException):
msg_fmt = _("Instance group %(group_uuid)s already exists.")
class InstanceGroupMemberNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no member with "
"id %(instance_id)s.")
class InstanceGroupPolicyNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no policy %(policy)s.")
class InstanceGroupSaveException(NovaException):
msg_fmt = _("%(field)s should not be part of the updates.")
class PluginRetriesExceeded(NovaException):
msg_fmt = _("Number of retries to plugin (%(num_retries)d) exceeded.")
class ImageDownloadModuleError(NovaException):
msg_fmt = _("There was an error with the download module %(module)s. "
"%(reason)s")
class ImageDownloadModuleMetaDataError(ImageDownloadModuleError):
msg_fmt = _("The metadata for this location will not work with this "
"module %(module)s. %(reason)s.")
class ImageDownloadModuleNotImplementedError(ImageDownloadModuleError):
msg_fmt = _("The method %(method_name)s is not implemented.")
class ImageDownloadModuleConfigurationError(ImageDownloadModuleError):
msg_fmt = _("The module %(module)s is misconfigured: %(reason)s.")
class SignatureVerificationError(NovaException):
msg_fmt = _("Signature verification for the image "
"failed: %(reason)s.")
class ResourceMonitorError(NovaException):
msg_fmt = _("Error when creating resource monitor: %(monitor)s")
class PciDeviceWrongAddressFormat(NovaException):
msg_fmt = _("The PCI address %(address)s has an incorrect format.")
class PciDeviceInvalidAddressField(NovaException):
msg_fmt = _("Invalid PCI Whitelist: "
"The PCI address %(address)s has an invalid %(field)s.")
class PciDeviceInvalidDeviceName(NovaException):
msg_fmt = _("Invalid PCI Whitelist: "
"The PCI whitelist can specify devname or address,"
" but not both")
class PciDeviceNotFoundById(NotFound):
msg_fmt = _("PCI device %(id)s not found")
class PciDeviceNotFound(NotFound):
msg_fmt = _("PCI Device %(node_id)s:%(address)s not found.")
class PciDeviceInvalidStatus(Invalid):
msg_fmt = _(
"PCI device %(compute_node_id)s:%(address)s is %(status)s "
"instead of %(hopestatus)s")
class PciDeviceVFInvalidStatus(Invalid):
msg_fmt = _(
"Not all Virtual Functions of PF %(compute_node_id)s:%(address)s "
"are free.")
class PciDevicePFInvalidStatus(Invalid):
msg_fmt = _(
"Physical Function %(compute_node_id)s:%(address)s, related to VF"
" %(compute_node_id)s:%(vf_address)s is %(status)s "
"instead of %(hopestatus)s")
class PciDeviceInvalidOwner(Invalid):
msg_fmt = _(
"PCI device %(compute_node_id)s:%(address)s is owned by %(owner)s "
"instead of %(hopeowner)s")
class PciDeviceRequestFailed(NovaException):
msg_fmt = _(
"PCI device request %(requests)s failed")
class PciDevicePoolEmpty(NovaException):
msg_fmt = _(
"Attempt to consume PCI device %(compute_node_id)s:%(address)s "
"from empty pool")
class PciInvalidAlias(Invalid):
msg_fmt = _("Invalid PCI alias definition: %(reason)s")
class PciRequestAliasNotDefined(NovaException):
msg_fmt = _("PCI alias %(alias)s is not defined")
class MissingParameter(NovaException):
ec2_code = 'MissingParameter'
msg_fmt = _("Not enough parameters: %(reason)s")
code = 400
class PciConfigInvalidWhitelist(Invalid):
msg_fmt = _("Invalid PCI devices Whitelist config %(reason)s")
# Cannot be templated, msg needs to be constructed when raised.
class InternalError(NovaException):
ec2_code = 'InternalError'
msg_fmt = "%(err)s"
class PciDevicePrepareFailed(NovaException):
msg_fmt = _("Failed to prepare PCI device %(id)s for instance "
"%(instance_uuid)s: %(reason)s")
class PciDeviceDetachFailed(NovaException):
msg_fmt = _("Failed to detach PCI device %(dev)s: %(reason)s")
class PciDeviceUnsupportedHypervisor(NovaException):
msg_fmt = _("%(type)s hypervisor does not support PCI devices")
class KeyManagerError(NovaException):
msg_fmt = _("Key manager error: %(reason)s")
class VolumesNotRemoved(Invalid):
msg_fmt = _("Failed to remove volume(s): (%(reason)s)")
class InvalidVideoMode(Invalid):
msg_fmt = _("Provided video model (%(model)s) is not supported.")
class RngDeviceNotExist(Invalid):
msg_fmt = _("The provided RNG device path: (%(path)s) is not "
"present on the host.")
class RequestedVRamTooHigh(NovaException):
msg_fmt = _("The requested amount of video memory %(req_vram)d is higher "
"than the maximum allowed by flavor %(max_vram)d.")
class InvalidWatchdogAction(Invalid):
msg_fmt = _("Provided watchdog action (%(action)s) is not supported.")
class NoLiveMigrationForConfigDriveInLibVirt(NovaException):
msg_fmt = _("Live migration of instances with config drives is not "
"supported in libvirt unless libvirt instance path and "
"drive data is shared across compute nodes.")
class LiveMigrationWithOldNovaNotSafe(NovaException):
msg_fmt = _("Host %(server)s is running an old version of Nova, "
"live migrations involving that version may cause data loss. "
"Upgrade Nova on %(server)s and try again.")
class LiveMigrationWithOldNovaNotSupported(NovaException):
msg_fmt = _("Live migration with API v2.25 requires all the Mitaka "
"upgrade to be complete before it is available.")
class LiveMigrationURINotAvailable(NovaException):
msg_fmt = _('No live migration URI configured and no default available '
'for "%(virt_type)s" hypervisor virtualization type.')
class UnshelveException(NovaException):
msg_fmt = _("Error during unshelve instance %(instance_id)s: %(reason)s")
class ImageVCPULimitsRangeExceeded(Invalid):
msg_fmt = _("Image vCPU limits %(sockets)d:%(cores)d:%(threads)d "
"exceeds permitted %(maxsockets)d:%(maxcores)d:%(maxthreads)d")
class ImageVCPUTopologyRangeExceeded(Invalid):
msg_fmt = _("Image vCPU topology %(sockets)d:%(cores)d:%(threads)d "
"exceeds permitted %(maxsockets)d:%(maxcores)d:%(maxthreads)d")
class ImageVCPULimitsRangeImpossible(Invalid):
msg_fmt = _("Requested vCPU limits %(sockets)d:%(cores)d:%(threads)d "
"are impossible to satisfy for vcpus count %(vcpus)d")
class InvalidArchitectureName(Invalid):
msg_fmt = _("Architecture name '%(arch)s' is not recognised")
class ImageNUMATopologyIncomplete(Invalid):
msg_fmt = _("CPU and memory allocation must be provided for all "
"NUMA nodes")
class ImageNUMATopologyForbidden(Forbidden):
msg_fmt = _("Image property '%(name)s' is not permitted to override "
"NUMA configuration set against the flavor")
class ImageNUMATopologyAsymmetric(Invalid):
msg_fmt = _("Asymmetric NUMA topologies require explicit assignment "
"of CPUs and memory to nodes in image or flavor")
class ImageNUMATopologyCPUOutOfRange(Invalid):
msg_fmt = _("CPU number %(cpunum)d is larger than max %(cpumax)d")
class ImageNUMATopologyCPUDuplicates(Invalid):
msg_fmt = _("CPU number %(cpunum)d is assigned to two nodes")
class ImageNUMATopologyCPUsUnassigned(Invalid):
msg_fmt = _("CPU number %(cpuset)s is not assigned to any node")
class ImageNUMATopologyMemoryOutOfRange(Invalid):
msg_fmt = _("%(memsize)d MB of memory assigned, but expected "
"%(memtotal)d MB")
class InvalidHostname(Invalid):
msg_fmt = _("Invalid characters in hostname '%(hostname)s'")
class NumaTopologyNotFound(NotFound):
msg_fmt = _("Instance %(instance_uuid)s does not specify a NUMA topology")
class MigrationContextNotFound(NotFound):
msg_fmt = _("Instance %(instance_uuid)s does not specify a migration "
"context.")
class SocketPortRangeExhaustedException(NovaException):
msg_fmt = _("Not able to acquire a free port for %(host)s")
class SocketPortInUseException(NovaException):
msg_fmt = _("Not able to bind %(host)s:%(port)d, %(error)s")
class ImageSerialPortNumberInvalid(Invalid):
msg_fmt = _("Number of serial ports '%(num_ports)s' specified in "
"'%(property)s' isn't valid.")
class ImageSerialPortNumberExceedFlavorValue(Invalid):
msg_fmt = _("Forbidden to exceed flavor value of number of serial "
"ports passed in image meta.")
class InvalidImageConfigDrive(Invalid):
msg_fmt = _("Image's config drive option '%(config_drive)s' is invalid")
class InvalidHypervisorVirtType(Invalid):
msg_fmt = _("Hypervisor virtualization type '%(hv_type)s' is not "
"recognised")
class InvalidVirtualMachineMode(Invalid):
msg_fmt = _("Virtual machine mode '%(vmmode)s' is not recognised")
class InvalidToken(Invalid):
msg_fmt = _("The token '%(token)s' is invalid or has expired")
class InvalidConnectionInfo(Invalid):
msg_fmt = _("Invalid Connection Info")
class InstanceQuiesceNotSupported(Invalid):
msg_fmt = _('Quiescing is not supported in instance %(instance_id)s')
class QemuGuestAgentNotEnabled(Invalid):
msg_fmt = _('QEMU guest agent is not enabled')
class SetAdminPasswdNotSupported(Invalid):
msg_fmt = _('Set admin password is not supported')
class MemoryPageSizeInvalid(Invalid):
msg_fmt = _("Invalid memory page size '%(pagesize)s'")
class MemoryPageSizeForbidden(Invalid):
msg_fmt = _("Page size %(pagesize)s forbidden against '%(against)s'")
class MemoryPageSizeNotSupported(Invalid):
msg_fmt = _("Page size %(pagesize)s is not supported by the host.")
class CPUPinningNotSupported(Invalid):
msg_fmt = _("CPU pinning is not supported by the host: "
"%(reason)s")
class CPUPinningInvalid(Invalid):
msg_fmt = _("Cannot pin/unpin cpus %(requested)s from the following "
"pinned set %(pinned)s")
class CPUPinningUnknown(Invalid):
msg_fmt = _("CPU set to pin/unpin %(requested)s must be a subset of "
"known CPU set %(cpuset)s")
class ImageCPUPinningForbidden(Forbidden):
msg_fmt = _("Image property 'hw_cpu_policy' is not permitted to override "
"CPU pinning policy set against the flavor")
class ImageCPUThreadPolicyForbidden(Forbidden):
msg_fmt = _("Image property 'hw_cpu_thread_policy' is not permitted to "
"override CPU thread pinning policy set against the flavor")
class UnsupportedPolicyException(Invalid):
msg_fmt = _("ServerGroup policy is not supported: %(reason)s")
class CellMappingNotFound(NotFound):
msg_fmt = _("Cell %(uuid)s has no mapping.")
class NUMATopologyUnsupported(Invalid):
msg_fmt = _("Host does not support guests with NUMA topology set")
class MemoryPagesUnsupported(Invalid):
msg_fmt = _("Host does not support guests with custom memory page sizes")
class EnumFieldInvalid(Invalid):
msg_fmt = _('%(typename)s in %(fieldname)s is not an instance of Enum')
class EnumFieldUnset(Invalid):
msg_fmt = _('%(fieldname)s missing field type')
class InvalidImageFormat(Invalid):
msg_fmt = _("Invalid image format '%(format)s'")
class UnsupportedImageModel(Invalid):
msg_fmt = _("Image model '%(image)s' is not supported")
class HostMappingNotFound(Invalid):
msg_fmt = _("Host '%(name)s' is not mapped to any cell")
class RealtimeConfigurationInvalid(Invalid):
msg_fmt = _("Cannot set realtime policy in a non dedicated "
"cpu pinning policy")
class CPUThreadPolicyConfigurationInvalid(Invalid):
msg_fmt = _("Cannot set cpu thread pinning policy in a non dedicated "
"cpu pinning policy")
class RequestSpecNotFound(NotFound):
msg_fmt = _("RequestSpec not found for instance %(instance_uuid)s")
class UEFINotSupported(Invalid):
msg_fmt = _("UEFI is not supported")
class TriggerCrashDumpNotSupported(Invalid):
msg_fmt = _("Triggering crash dump is not supported")
class UnsupportedHostCPUControlPolicy(Invalid):
msg_fmt = _("Requested CPU control policy not supported by host")
class RealtimePolicyNotSupported(Invalid):
msg_fmt = _("Realtime policy not supported by hypervisor")
class RealtimeMaskNotFoundOrInvalid(Invalid):
msg_fmt = _("Realtime policy needs vCPU(s) mask configured with at least "
"1 RT vCPU and 1 ordinary vCPU. See hw:cpu_realtime_mask "
"or hw_cpu_realtime_mask")
class OsInfoNotFound(NotFound):
msg_fmt = _("No configuration information found for operating system "
"%(os_name)s")
class BuildRequestNotFound(NotFound):
msg_fmt = _("BuildRequest not found for instance %(uuid)s")
class InvalidReservedMemoryPagesOption(Invalid):
msg_fmt = _("The format of the option 'reserved_memory_pages' is invalid. "
"(found '%(conf)s') Please refer to the nova "
"config-reference.")
| {
"content_hash": "bf8b81b7a136cb9165d8807aa7f6f4f7",
"timestamp": "",
"source": "github",
"line_count": 2108,
"max_line_length": 79,
"avg_line_length": 29.25189753320683,
"alnum_prop": 0.6707750190551871,
"repo_name": "CEG-FYP-OpenStack/scheduler",
"id": "d12b42d0b76d2fc895d2a833b35c1a67c3fa6584",
"size": "62395",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/exception.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17233936"
},
{
"name": "Shell",
"bytes": "36943"
},
{
"name": "Smarty",
"bytes": "291947"
}
],
"symlink_target": ""
} |
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.http import Http404, HttpResponse
from django.shortcuts import get_object_or_404, render, redirect
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
from app.ldaputils import get_attrs_of_sciper
from members.forms2 import MembershipAddForm, MembershipImportForm, MembershipImportListForm
from generic.datatables import generic_list_json
from users.models import TruffeUser
import json
import re
import string
import uuid
@login_required
def membership_add(request, pk):
from members.models import MemberSet, MemberSetLogging
memberset = get_object_or_404(MemberSet, pk=pk)
if not memberset.rights_can('EDIT', request.user):
raise Http404
done = False
done_user = None
if request.method == 'POST':
form = MembershipAddForm(request.user, memberset, request.POST)
if form.is_valid():
# Try to find the user. If he dosen't exists, create it.
try:
user = TruffeUser.objects.get(username=form.cleaned_data['user'])
except TruffeUser.DoesNotExist:
user = TruffeUser(username=form.cleaned_data['user'], is_active=True)
user.last_name, user.first_name, user.email = get_attrs_of_sciper(user.username)
user.save()
if memberset.membership_set.filter(user=user, end_date=None).exists():
pass
else:
membership = form.save(commit=False)
membership.group = memberset
membership.user = user
membership.save()
MemberSetLogging(who=request.user, what='edited', object=memberset, extra_data='{"edited": {"%s": ["None", "Membre"]}}' % (user.get_full_name(),)).save()
done_user = user
done = True
form = MembershipAddForm(request.user, memberset)
else:
form = MembershipAddForm(request.user, memberset)
return render(request, 'members/membership/add.html', {'form': form, 'done': done, 'done_user': done_user, 'group': memberset})
@login_required
def membership_delete(request, pk):
"""Delete a membership"""
from members.models import Membership, MemberSetLogging
membership = get_object_or_404(Membership, pk=pk)
if not membership.group.rights_can('DELETE', request.user):
raise Http404
if request.method == 'POST':
membership.end_date = now()
membership.save()
MemberSetLogging(who=request.user, what='edited', object=membership.group, extra_data='{"edited": {"%s": ["Membre", "None"]}}' % (membership.user.get_full_name(),)).save()
messages.success(request, _(u'Membre retiré !'))
return redirect('members.views.memberset_show', membership.group.pk)
return render(request, 'members/membership/delete.html', {'membership': membership})
@login_required
def membership_toggle_fees(request, pk):
from members.models import Membership, MemberSetLogging
membership = get_object_or_404(Membership, pk=pk)
if not membership.group.rights_can('EDIT', request.user):
raise Http404
membership.payed_fees = not membership.payed_fees
membership.save()
MemberSetLogging(who=request.user, what='edited', object=membership.group,
extra_data='{"edited": {"Cotisation %s": ["%s ", "%s"]}}' % (membership.user.get_full_name(), not membership.payed_fees, membership.payed_fees)).save()
messages.success(request, _(u'Cotisation mise à jour !'))
return redirect('members.views.memberset_show', membership.group.pk)
@login_required
@csrf_exempt
def membership_list_json(request, pk):
"""Display the list of members, json call for the list"""
from members.models import MemberSet, Membership
# Get the MemberSet
memberset = get_object_or_404(MemberSet, pk=pk)
# Check user access
if not memberset.rights_can('SHOW', request.user):
raise Http404
# Filter by group and check they are still in the group
filter2 = lambda x: x.filter(group=memberset, end_date=None)
return generic_list_json(request, Membership, ['user', 'start_date', 'payed_fees', 'group', 'pk'], 'members/membership/list_json.html', bonus_data={'handle_fees': memberset.handle_fees}, filter_fields=['user__first_name', 'user__last_name', 'user__username'], bonus_filter_function=filter2, columns_mapping={'user': 'user__first_name'})
@login_required
def export_members(request, pk):
from members.models import MemberSet
memberset = MemberSet.objects.get(pk=pk)
if not memberset.rights_can('EDIT', request.user):
raise Http404
list_users = map(lambda mship: (mship.user.username, mship.payed_fees), memberset.membership_set.filter(end_date=None))
response = HttpResponse(json.dumps(list_users), mimetype='application/force-download')
response['Content-Disposition'] = 'attachment; filename=export_%s_%s.json' % (filter(lambda x: x in string.ascii_letters + string.digits, memberset.name), filter(lambda x: x in string.ascii_letters + string.digits + '._', str(now())),)
return response
@login_required
def import_members(request, pk):
from members.models import MemberSet, Membership, MemberSetLogging
memberset = get_object_or_404(MemberSet, pk=pk)
if not memberset.rights_can('EDIT', request.user):
raise Http404
logs = []
if request.method == 'POST':
form = MembershipImportForm(request.user, memberset, request.POST, request.FILES)
if form.is_valid():
edition_extra_data = {}
try:
imp_file = json.loads(request.FILES['imported'].read())
for user_data in imp_file:
if isinstance(user_data, (int, str, unicode)):
username = str(user_data)
fees = False
elif type(user_data) is list:
username = user_data[0]
fees = len(user_data) > 1 and user_data[1]
else:
continue
try:
user = TruffeUser.objects.get(username=username)
except TruffeUser.DoesNotExist:
if re.match('^\d{6}$', username):
user = TruffeUser(username=username, is_active=True)
user.last_name, user.first_name, user.email = get_attrs_of_sciper(username)
user.save()
else:
logs.append(('danger', username, _(u'Impossible de créer l\'utilisateur')))
user = None
if user:
if memberset.membership_set.filter(user=user, end_date=None).exists():
logs.append(('warning', user, _(u'L\'utilisateur est déjà membre de ce groupe')))
else:
# Copy the fees status if asked
payed_fees = form.cleaned_data.get('copy_fees_status', False) and fees
Membership(group=memberset, user=user, payed_fees=payed_fees).save()
logs.append(('success', user, _(u'Utilisateur ajouté avec succès')))
edition_extra_data[user.get_full_name()] = ["None", "Membre"]
MemberSetLogging(who=request.user, what='edited', object=memberset, extra_data=json.dumps({'edited': edition_extra_data})).save()
except ValueError:
logs.append(('danger', _(u'ERREUR'), _(u'Le fichier ne peut pas être lu correctement, l\'import a été annulé')))
else:
form = MembershipImportForm(request.user, memberset)
logs.sort(key=lambda x: x[0])
return render(request, 'members/membership/import.html', {'form': form, 'logs': logs, 'group': memberset, 'display_list_panel': True})
@login_required
def import_members_list(request, pk):
from members.models import MemberSet, Membership, MemberSetLogging
memberset = get_object_or_404(MemberSet, pk=pk)
if not memberset.rights_can('EDIT', request.user):
raise Http404
logs = []
if request.method == 'POST':
form = MembershipImportListForm(request.POST, request.FILES)
if form.is_valid():
edition_extra_data = {}
for username in form.cleaned_data['data'].split('\n'):
username = username.strip()
if username:
try:
user = TruffeUser.objects.get(username=username)
except TruffeUser.DoesNotExist:
if re.match('^\d{6}$', username):
user = TruffeUser(username=username, is_active=True)
user.last_name, user.first_name, user.email = get_attrs_of_sciper(username)
user.save()
else:
logs.append(('danger', username, _(u'Impossible de créer l\'utilisateur')))
user = None
if user:
if memberset.membership_set.filter(user=user, end_date=None).exists():
logs.append(('warning', user, _(u'L\'utilisateur est déjà membre de ce groupe')))
else:
Membership(group=memberset, user=user, payed_fees=form.cleaned_data['fee_status']).save()
logs.append(('success', user, _(u'Utilisateur ajouté avec succès')))
edition_extra_data[user.get_full_name()] = ["None", "Membre"]
MemberSetLogging(who=request.user, what='edited', object=memberset, extra_data=json.dumps({'edited': edition_extra_data})).save()
else:
form = MembershipImportListForm()
logs.sort(key=lambda x: x[0])
return render(request, 'members/membership/import_list.html', {'form': form, 'logs': logs, 'group': memberset, 'display_list_panel': True})
@login_required
def memberset_info_api(request, pk):
from members.models import MemberSet, MemberSetLogging
memberset = get_object_or_404(MemberSet, pk=pk)
if not memberset.rights_can('EDIT', request.user):
raise Http404
key_changed = False
if request.method == 'POST':
memberset.api_secret_key = str(uuid.uuid4())
memberset.save()
key_changed = True
MemberSetLogging(who=request.user, what='edited', object=memberset, extra_data=json.dumps({'edited': {'api_secret_key': ['', 'Key changed']}})).save()
return render(request, 'members/memberset/info_api.html', {'obj': memberset, 'key_changed': key_changed, 'website_path': settings.WEBSITE_PATH})
@csrf_exempt
def memberset_api(request, pk):
from members.models import MemberSet, MemberSetLogging, Membership
key = request.META.get('HTTP_X_TRUFFE2_KEY', request.GET.get('key'))
if not key:
raise Http404
memberset = get_object_or_404(MemberSet, pk=pk)
if not memberset.api_secret_key:
raise Http404
if key != memberset.api_secret_key:
raise Http404
system_user = TruffeUser.objects.get(pk=settings.SYSTEM_USER_PK)
result = {'error': 'WRONG_METHOD'}
if request.method == 'GET':
result = []
for member in memberset.membership_set.filter(end_date=None):
data = {
'sciper': member.user.username,
'added_date': str(member.start_date)
}
if memberset.handle_fees:
data['payed_fees'] = member.payed_fees
result.append(data)
result = {'members': result}
if request.method in ['PUT', 'POST', 'DELETE']:
try:
body_data = json.loads(request.body)
except:
r = HttpResponse(json.dumps({'error': 'JSON_PARSE_ERROR'}))
r.content_type = 'application/json'
return r
if request.method == 'PUT':
if 'member' not in body_data:
result = {'error': 'MISSING_MEMBER'}
else:
if 'sciper' not in body_data['member']:
result = {'error': 'MISSING_SCIPER'}
else:
try:
user = TruffeUser.objects.get(username=body_data['member']['sciper'])
except TruffeUser.DoesNotExist:
user = TruffeUser(username=body_data['member']['sciper'], is_active=True)
user.last_name, user.first_name, user.email = get_attrs_of_sciper(user.username)
if not user.email:
result = {'error': 'WRONG_SCIPER'}
user = None
else:
user.save()
if user:
membership, created = Membership.objects.get_or_create(user=user, group=memberset, end_date=None)
result = {'result': 'ALREADY_OK'}
if memberset.handle_fees and 'payed_fees' in body_data['member'] and membership.payed_fees != body_data['member']['payed_fees']:
membership.payed_fees = body_data['member']['payed_fees']
membership.save()
result = {'result': 'UPDATED_FEE'}
if not created:
MemberSetLogging(who=system_user, what='edited', object=memberset,
extra_data='{"edited": {"Cotisation %s": ["%s ", "%s"]}}' % (membership.user.get_full_name(), not membership.payed_fees, membership.payed_fees)).save()
if created:
MemberSetLogging(who=system_user, what='edited', object=memberset, extra_data='{"edited": {"%s": ["None", "Membre"]}}' % (user.get_full_name(),)).save()
result = {'result': 'CREATED'}
if request.method == 'DELETE':
if 'member' not in body_data:
result = {'error': 'MISSING_MEMBER'}
else:
if 'sciper' not in body_data['member']:
result = {'error': 'MISSING_SCIPER'}
else:
try:
user = TruffeUser.objects.get(username=body_data['member']['sciper'])
except TruffeUser.DoesNotExist:
user = None
result = {'error': 'UNKNOWN_USER'}
if user:
membership = Membership.objects.filter(group=memberset, user=user, end_date=None).first()
if membership:
membership.end_date = now()
membership.save()
MemberSetLogging(who=system_user, what='edited', object=memberset, extra_data='{"edited": {"%s": ["Membre", "None"]}}' % (membership.user.get_full_name(),)).save()
result = {'result': 'REMOVED'}
else:
result = {'result': 'ALREADY_OK'}
if request.method == 'POST':
if 'members' not in body_data:
result = {'error': 'MISSING_MEMBER'}
else:
added = []
updated = []
already_ok = []
errors = []
deleted = []
members_ok = []
for member_data in body_data['members']:
if 'sciper' not in member_data:
errors.append({'sciper': '?', 'error': 'MISSING_SCIPER'})
else:
try:
user = TruffeUser.objects.get(username=member_data['sciper'])
except TruffeUser.DoesNotExist:
user = TruffeUser(username=member_data['sciper'], is_active=True)
user.last_name, user.first_name, user.email = get_attrs_of_sciper(user.username)
if not user.email:
errors.append({'sciper': member_data['sciper'], 'error': 'WRONG_SCIPER'})
user = None
else:
user.save()
if user:
membership, created = Membership.objects.get_or_create(user=user, group=memberset, end_date=None)
result = 'ALREADY_OK'
if memberset.handle_fees and 'payed_fees' in member_data and membership.payed_fees != member_data['payed_fees']:
membership.payed_fees = member_data['payed_fees']
membership.save()
result = 'UPDATED_FEE'
if not created:
MemberSetLogging(who=system_user, what='edited', object=memberset,
extra_data='{"edited": {"Cotisation %s": ["%s ", "%s"]}}' % (membership.user.get_full_name(), not membership.payed_fees, membership.payed_fees)).save()
if created:
MemberSetLogging(who=system_user, what='edited', object=memberset, extra_data='{"edited": {"%s": ["None", "Membre"]}}' % (user.get_full_name(),)).save()
result = 'CREATED'
if result == 'ALREADY_OK':
already_ok.append(member_data['sciper'])
elif result == 'UPDATED_FEE':
updated.append(member_data['sciper'])
elif result == 'CREATED':
added.append(member_data['sciper'])
members_ok.append(membership.pk)
for old_membership in Membership.objects.filter(group=memberset, end_date=None).exclude(pk__in=members_ok):
old_membership.end_date = now()
old_membership.save()
MemberSetLogging(who=system_user, what='edited', object=memberset, extra_data='{"edited": {"%s": ["Membre", "None"]}}' % (old_membership.user.get_full_name(),)).save()
deleted.append(old_membership.user.username)
result = {
'created': added,
'updated': updated,
'already_ok': already_ok,
'deleted': deleted,
'errors': errors,
}
r = HttpResponse(json.dumps(result))
r.content_type = 'application/json'
return r
| {
"content_hash": "e9e58000302a0ab695c0791876994fc4",
"timestamp": "",
"source": "github",
"line_count": 467,
"max_line_length": 340,
"avg_line_length": 39.974304068522486,
"alnum_prop": 0.5620848510820655,
"repo_name": "agepoly/truffe2",
"id": "fe021492f8e3ca9a6d6112b7cf45250a58828336",
"size": "18709",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "truffe2/members/views.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15982"
},
{
"name": "ApacheConf",
"bytes": "45"
},
{
"name": "CSS",
"bytes": "551150"
},
{
"name": "HTML",
"bytes": "692880"
},
{
"name": "JavaScript",
"bytes": "2096877"
},
{
"name": "PHP",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "2473222"
}
],
"symlink_target": ""
} |
"""
Copyright 2021 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
""" Kitchen environment for long horizon manipulation """
import collections
from typing import Dict, Sequence
import gym
from dm_control.mujoco import engine
from gym import spaces
import numpy as np
import copy
from adept_envs.components.robot import RobotComponentBuilder, RobotState
from adept_envs.franka.base_env import BaseFrankaEnv
from adept_envs.utils.resources import get_asset_path
from adept_envs.simulation.sim_scene import SimBackend
import pickle
ASSET_PATH = 'adept_envs/franka/assets/franka_cabinet_slider_knob_switch_toaster.xml'
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
# Class to represent a graph
class Graph:
# A utility function to find the
# vertex with minimum dist value, from
# the set of vertices still in queue
def minDistance(self, dist, queue):
# Initialize min value and min_index as -1
minimum = float("Inf")
min_index = -1
# from the dist array,pick one which
# has min value and is till in queue
for i in range(len(dist)):
if dist[i] < minimum and i in queue:
minimum = dist[i]
min_index = i
return min_index
# Function to print shortest path
# from source to j
# using parent array
def printPath(self, parent, j):
# Base Case : If j is source
if parent[j] == -1:
print(j, end=' ')
return
self.printPath(parent, parent[j])
print(j, end=' ')
# A utility function to print
# the constructed distance
# array
def printSolution(self, dist, parent):
src = 0
print("Vertex \t\tDistance from Source\tPath")
for i in range(1, len(dist)):
print("\n%d --> %d \t\t%d \t\t\t\t\t" % (src, i, dist[i])),
self.printPath(parent, i)
'''Function that implements Dijkstra's single source shortest path
algorithm for a graph represented using adjacency matrix
representation'''
def dijkstra(self, graph, src, excluded):
row = len(graph)
col = len(graph[0])
# The output array. dist[i] will hold
# the shortest distance from src to i
# Initialize all distances as INFINITE
dist = [float("Inf")] * row
# Parent array to store
# shortest path tree
parent = [-1] * row
# Distance of source vertex
# from itself is always 0
dist[src] = 0
# Add all vertices in queue
queue = []
for i in range(row):
if excluded[i] is not None:
queue.append(i)
# Find shortest path for all vertices
while queue:
# Pick the minimum dist vertex
# from the set of vertices
# still in queue
u = self.minDistance(dist, queue)
# remove min element
queue.remove(u)
# Update dist value and parent
# index of the adjacent vertices of
# the picked vertex. Consider only
# those vertices which are still in
# queue
for i in range(col):
'''Update dist[i] only if it is in queue, there is
an edge from u to i, and total weight of path from
src to i through u is smaller than current value of
dist[i]'''
if graph[u][i] and i in queue:
if dist[u] + graph[u][i] < dist[i]:
dist[i] = dist[u] + graph[u][i]
parent[i] = u
goal_paths = [[] for _ in range(len(dist))]
for i in range(len(dist)):
if excluded[i] is not None:
curr_path = []
curr_idx = i
while parent[curr_idx] != -1:
curr_path = [curr_idx] + curr_path
curr_idx = parent[curr_idx]
curr_path = [src] + curr_path
goal_paths[i] = curr_path
return goal_paths
DEFAULT_OBSERVATION_KEYS = (
'qp',
'obj_qp',
'mocap_pos',
# 'mocap_quat',
'goal'
)
import sys
sys.path.append(".")
class FrankaCabinetSliderKnobSwitchToaster(BaseFrankaEnv):
# Number of degrees of freedom of all objects.
N_DOF_OBJ = 6
def __init__(self,
asset_path: str = ASSET_PATH,
observation_keys: Sequence[str] = DEFAULT_OBSERVATION_KEYS,
frame_skip: int = 40,
use_raw_actions: bool = False,
camera_settings=dict(
distance=2.5,
azimuth=66,
elevation=-35,),
attempt_limit=5,
eval_mode=False,
reset_frequency=1,
learned_model=False,
learned_model_path=None,
counts_enabled=False,
graph_search=False,
smoothing_factor=1000,
idx_completion=False,
**kwargs):
"""Initializes the environment.
Args:
asset_path: The XML model file to load.
observation_keys: The keys in `get_obs_dict` to concatenate as the
observations returned by `step` and `reset`.
frame_skip: The number of simulation steps per environment step.
"""
self._eval_mode = eval_mode
self.idx_completion = idx_completion
self._reset_counter = 0
self._reset_frequency = reset_frequency
self._current_idx = 1
self._goal_idx = 1
self._counts_enabled = counts_enabled
self.attempt_limit = attempt_limit
self.attempt_counter = 0
super().__init__(
sim_model=get_asset_path(asset_path),
observation_keys=observation_keys,
frame_skip=frame_skip,
camera_settings=camera_settings,
sim_backend=SimBackend.DM_CONTROL,
**kwargs)
self.goal = np.zeros(64)
self.use_raw_actions = use_raw_actions
self.init_qpos = self.sim.model.key_qpos[0].copy()
self.init_qvel = self.sim.model.key_qvel[0].copy()
# self.labeled_goals = pickle.load(open('sim_slider_cabinet_labeled_goals.pkl', 'rb'))
# self.adjacency_matrix = pickle.load(open('sim_slider_cabinet_adjacency_matrix.pkl', 'rb'))
# self._counts = np.zeros(self.adjacency_matrix.shape[0])
self.midpoint_pos = np.array([-0.440, 0.152, 2.226])
self.range = np.array([0.035, 0.035, 0.02])
self.mocap_pos_clip_lower = np.array([-0.85, 0., 1.8])
self.mocap_pos_clip_upper = np.array([0.55, 0.5, 2.7])
self.goal_matrix = pickle.load(open('./goal_matrix_6elements_onehot_uniformsim_1elem.pkl', 'rb'))
# self.goal_matrix = np.zeros((64, 64))
#
# # TODO: Fix this according to true transition matrix
#
# for i in range(64):
# curr_idx = np.binary_repr(i)
# curr_idx = '0'*(6 - len(curr_idx)) + curr_idx
# curr_idx = np.array([int(curr_idx[k]) for k in range(len(curr_idx))])
# for j in range(6):
# new_idx = copy.deepcopy(curr_idx)
# new_idx[j] = (new_idx[j] + 1) % 2
# new_idx_val = np.sum(np.array([2**i for i in range(6)])[::-1]*new_idx)
# self.goal_matrix[i, new_idx_val] = 1
self.smoothing_factor = smoothing_factor
self.transition_prob = self.goal_matrix.copy()
self._current_state = 0
self._counts = np.zeros(64,)
self._goals = pickle.load(open('./end_states_6elements_onehot_uniformsim_1elem.pkl', 'rb'))
self._goals_val = pickle.load(open('./end_states_6elements_onehot_uniformsim_vals_1elem.pkl', 'rb'))
self.eps = 0.4
# self.smoothing_factor = smoothing_factor
# self.transition_prob = self.goal_matrix.copy()
self.density = np.zeros((64,))
self.transition_prob *= self.smoothing_factor
self.edge_visitation = self.transition_prob.copy()
self.goal = self._goals[self._goal_idx]
self.commanded_start = -1
self.commanded_goal = -1
self.learned_model = learned_model
self.learned_model_path = learned_model_path
self.model = None
if self.learned_model:
self.model = Mlp(input_size=3,
output_size=64,
hidden_sizes=(256, 256, 256))
dat = torch.load(self.learned_model_path)
state_dict = dat.state_dict()
self.model.load_state_dict(state_dict)
print("LOADED IN MODEL SUCCESSFULLY")
self.g = Graph()
self.graph_search = graph_search
def learned_goal_select(self, goal_selected):
if self.learned_model:
print("IN LEARNED MODEL")
o = self._get_obs(self.get_obs_dict())[2:8]
input_x = torch.Tensor(o)[None, :]
output_x = torch.nn.Softmax()(self.model(input_x)*torch.Tensor(np.exp(-self._counts))).detach().numpy()[0]
goal_selected = np.random.choice(range(64), p=output_x)
print("LEARNED LIKELIHOOD PREDICTIONS " + str(output_x))
# Updating counts
curr_count = np.zeros((64,))
curr_count[goal_selected] += 1
self.update_counts(curr_count)
if self.graph_search:
gs, gpl, gp = self.select_goal()
if len(gpl) > 1:
goal_selected = gpl[1]
else:
goal_selected = gpl[0]
return goal_selected
def update_counts(self, new_counts):
if self._counts_enabled:
self._counts += new_counts
def _configure_robot(self, builder: RobotComponentBuilder):
"""Configures the robot component."""
super()._configure_robot(builder)
def _preprocess_action(self, action: np.ndarray) -> np.ndarray:
""" If using raw actions, there is no need to do any processing to the action array."""
if self.use_raw_actions:
return action
else:
return super()._preprocess_action(action)
def _reset(self):
pass
def reset_arm_state(self, start_idx):
self.last_action = None
self.sim.reset()
self.sim.forward()
"""Resets the environment."""
self.robot.set_state({
'arm': RobotState(
qpos=self.init_qpos[0:self.N_DOF_ARM],
qvel=np.zeros(self.N_DOF_ARM)),
'gripper': RobotState(
qpos=self.init_qpos[self.N_DOF_ARM:self.N_DOF_ARM +
self.N_DOF_GRIPPER],
qvel=np.zeros(self.N_DOF_GRIPPER))
})
new_qpos = self.init_qpos.copy()
new_qpos[:7] = np.array(
[-2.64311209, -1.76372997, -0.23182923, -2.1470029, 2.55216266, -0.44102682, -0.01343831])
new_qpos[7:9] = np.array([0.1, 0.1])
new_qpos[9:15] = self._goals_val[start_idx][2:8] # Reset to a particular state
self.sim.data.qpos[:] = new_qpos.copy()
self.sim.data.mocap_pos[:] = np.array([-0.16922002, 0.07353752, 2.57067996])
for _ in range(100):
self.sim.step()
self.robot.step({
'gripper': 1 * np.ones(2)
}, True)
def reset_arm_only(self):
new_qpos = self.sim.data.qpos.copy()
self.last_action = None
self.sim.reset()
self.sim.forward()
"""Resets the environment."""
self.robot.set_state({
'arm': RobotState(
qpos=self.init_qpos[0:self.N_DOF_ARM],
qvel=np.zeros(self.N_DOF_ARM)),
'gripper': RobotState(
qpos=self.init_qpos[self.N_DOF_ARM:self.N_DOF_ARM +
self.N_DOF_GRIPPER],
qvel=np.zeros(self.N_DOF_GRIPPER))
})
new_qpos[:7] = np.array(
[-2.64311209, -1.76372997, -0.23182923, -2.1470029, 2.55216266, -0.44102682, -0.01343831])
new_qpos[7:9] = np.array([0.1, 0.1])
self.sim.data.qpos[:] = new_qpos.copy()
self.sim.data.mocap_pos[:] = np.array([-0.16922002, 0.07353752, 2.57067996])
for _ in range(100):
self.sim.step()
self.robot.step({
'gripper': 1 * np.ones(2)
}, True)
def reset(self):
"""Resets the environment."""
if self._reset_counter > 0 and self.graph_search:
current_end_state = self.check_goal_completion(self.get_obs_dict()['obj_qp'])
self.update_graph(self._current_idx, current_end_state) # Current idx is the state at the start of the epiisode,
# current end state i that the end of the episode
# self._goal_idx is the state commanded during the episode
self.update_transition_prob(init_state=self._current_state,
end_state=current_end_state,
commanded_goal_state=self._goal_idx)
self.update_densities(current_end_state)
print("TRANSITION PROBS " + str(self.transition_prob))
print("DENSITY " + str(self.density))
if self._reset_counter == 0 or self._eval_mode \
or (self._reset_frequency != -1 and self._reset_counter % self._reset_frequency == 0):
""" Resets the environment. """
print("RESETTING FULL ENVIRONMENT:")
# ---------
# TODO: Deal with the case with no density
if self.commanded_start == -1:
start_goal = None
valid_transitions = 0.
while start_goal is None or valid_transitions == 0.:
start_idx = np.random.randint(64)
start_goal = self._goals[start_idx]
valid_transitions = np.sum(self.goal_matrix[start_idx])
else:
start_idx = self.commanded_start
print("START IDX" + str(start_idx))
viable_goals = np.where(self.goal_matrix[start_idx] > 0)[0]
if self.commanded_goal == -1:
goal_idx = np.random.choice(viable_goals)
else:
goal_idx = self.commanded_goal
# ---------
self.reset_arm_state(start_idx)
# self.reset_dynamixels(self._goals[start_idx][2:8].copy())
self._goal_idx = goal_idx
# self.move_arm_to_reset()
self._current_state = -1
self.attempt_counter = 0
else:
print("DOING a reset free skip:")
self.reset_arm_only()
# More complex goal switching mechanism
current_state = self.check_goal_completion(self.get_obs_dict()['obj_qp'])
# If none, keep commanding the last one
# If state is changed, set current state to new and pick the next state to go to
if (current_state != -1 and current_state != self._current_state) or \
(current_state != -1 and self.attempt_counter > self.attempt_limit):
if np.sum(self.goal_matrix[current_state]) == 0.:
start_goal = None
valid_transitions = 0.
while start_goal is None or valid_transitions == 0.:
start_idx = np.random.randint(64)
start_goal = self._goals[start_idx]
valid_transitions = np.sum(self.goal_matrix[start_idx])
viable_goals = np.where(self.goal_matrix[start_idx] > 0)[0]
goal_idx = np.random.choice(viable_goals)
self.reset_arm_state(start_idx)
self._goal_idx = goal_idx
self._current_state = -1
self.attempt_counter = 0
else:
viable_goals = np.where(self.goal_matrix[current_state] > 0)[0]
goal_selected = np.random.choice(viable_goals)
goal_selected = self.learned_goal_select(goal_selected)
self._current_state = current_state
self._goal_idx = goal_selected
self._current_state = current_state
self.attempt_counter = 0
# If state is not changed, with some eps likelihood, change which goal is commanded
elif current_state != -1 and current_state == self._current_state:
self.attempt_counter += 1
if np.random.rand() < self.eps:
if np.sum(self.goal_matrix[current_state]) == 0.:
start_goal = None
valid_transitions = 0.
while start_goal is None or valid_transitions == 0.:
start_idx = np.random.randint(64)
start_goal = self._goals[start_idx]
valid_transitions = np.sum(self.goal_matrix[start_idx])
viable_goals = np.where(self.goal_matrix[start_idx] > 0)[0]
goal_idx = np.random.choice(viable_goals)
self.reset_arm_state(start_idx)
self._goal_idx = goal_idx
self.attempt_counter = 0
else:
print("Selexting a new goal, tried too many times")
viable_goals = np.where(self.goal_matrix[current_state] > 0)[0]
goal_selected = np.random.choice(viable_goals)
goal_selected = self.learned_goal_select(goal_selected)
self._goal_idx = goal_selected
self.attempt_counter = 0
self._reset_counter += 1
print("EVAL MODE IS " + str(self._eval_mode))
print("Current goal is %d" % self._goal_idx)
self._current_idx = self.check_goal_completion(self.get_obs_dict()['obj_qp'][None, :].squeeze(axis=0))
self.goal = self._goals[self._goal_idx]
# self.goal_idx = self._goal_idx
# TODO: current idx and current_stte are a bit repeate
obs_dict = self.get_obs_dict()
return self._get_obs(obs_dict)
#
# def reset(self):
# """Resets the environment.
#
# Args:
# state: The state to reset to. This must match with the state space
# of the environment.
#
# Returns:
# The initial observation of the environment after resetting.
# """
# if self.attempt_counter >= self.attempt_limit:
# self.reset_counter = 0
#
# self.last_action = None
# self.sim.reset()
# self.sim.forward()
# """Resets the environment."""
# self.robot.set_state({
# 'arm': RobotState(
# qpos=self.init_qpos[0:self.N_DOF_ARM],
# qvel=np.zeros(self.N_DOF_ARM)),
# 'gripper': RobotState(
# qpos=self.init_qpos[self.N_DOF_ARM:self.N_DOF_ARM +
# self.N_DOF_GRIPPER],
# qvel=np.zeros(self.N_DOF_GRIPPER))
# })
#
# new_qpos = self.init_qpos.copy()
# new_qpos[:7] = np.array([-2.64311209, -1.76372997, -0.23182923, -2.1470029 , 2.55216266, -0.44102682, -0.01343831])
# new_qpos[7:9] = np.array([0.1, 0.1])
#
# self.sim.data.qpos[:] = new_qpos.copy()
# self.sim.data.mocap_pos[:] = np.array([-0.30164342, 0.15300494, 2.59070813])
# for _ in range(100):
# self.sim.step()
# self.robot.step({
# 'gripper': 1*np.ones(2)
# }, True)
#
# obs_dict = self.get_obs_dict()
# return self._get_obs(obs_dict)
@property
def action_space(self):
return gym.spaces.Box(-1, 1, shape=(5,))
def _step(self, action: np.ndarray):
"""Applies an action to the robot."""
# TODO: How do deal with goal changing?
denormalize = False if self.use_raw_actions else True
current_pos = self.sim.data.mocap_pos.copy()
# meanval = (self.mocap_pos_clip_upper + self.mocap_pos_clip_lower)/2.0
# rng = (self.mocap_pos_clip_upper - self.mocap_pos_clip_lower)/2.0
# new_pos = action[:3]*rng + meanval #current_pos + action[:3]*self.range
# new_pos = current_pos + action[:3]*self.range
new_pos = current_pos + action[:3]*self.range
new_pos = np.clip(new_pos, self.mocap_pos_clip_lower, self.mocap_pos_clip_upper)
self.sim.data.mocap_pos[:] = new_pos.copy()
self.robot.step({
'gripper': action[-2:]
}, denormalize)
def get_obs_dict(self):
"""Returns the current observation of the environment.
Returns:
A dictionary of observation values. This should be an ordered
dictionary if `observation_keys` isn't set.
"""
arm_state = self.robot.get_state('arm')
gripper_state = self.robot.get_state('gripper')
# obj_state = self.robot.get_state('object')
obs_dict = collections.OrderedDict((
('t', self.robot.time),
('qp', np.concatenate([gripper_state.qpos])),
('qv', np.concatenate([gripper_state.qvel])),
('obj_qp', self.sim.data.qpos[-self.N_DOF_OBJ:]),
('mocap_pos', self.sim.data.mocap_pos.copy()),
('mocap_quat', self.sim.data.mocap_quat.copy()),
('goal', self.goal),
))
return obs_dict
def compute_path(self, current_state):
goal_paths = self.g.dijkstra(self.goal_matrix, current_state, self._goals)
return goal_paths
def update_graph(self, init_state, goal_state):
self.goal_matrix[init_state][goal_state] = 1
def update_transition_prob(self, init_state, end_state, commanded_goal_state):
if end_state == commanded_goal_state:
self.transition_prob[init_state][commanded_goal_state] += 1.
else:
self.transition_prob[init_state][commanded_goal_state] += 0.
self.edge_visitation[init_state][commanded_goal_state] += 1.
def update_densities(self, goal_state):
self.density[goal_state] += 1
def select_goal(self):
current_state = self.check_goal_completion(self.get_obs_dict()['obj_qp'])
goal_paths = self.compute_path(current_state)
density_diffs = [-np.inf for i in range(64)]
# TODO: Deal with the cases with no path?
for i in range(64):
if i == current_state:
density_diffs[i] = -np.inf
continue
if self._goals[i] is None:
continue
added_density = np.zeros(64)
weighted_prob = 1
curr_path_idx = current_state
for gs in goal_paths[i][1:]:
weighted_prob *= (self.transition_prob[curr_path_idx][gs]/self.edge_visitation[curr_path_idx][gs])
added_density[gs] = weighted_prob
curr_path_idx = gs
new_density = self.density + added_density
new_density = new_density / new_density.sum()
# TVD to the uniform
density_diffs[i] = -np.sum(np.abs(new_density - np.ones(64) * (1 / 64.)))
new_goal = np.argmax(np.array(density_diffs))
return new_goal, goal_paths[new_goal], goal_paths
def set_goal(self, goal):
self.goal = goal
def get_goal(self):
return self.goal
def get_score_dict(
self,
obs_dict: Dict[str, np.ndarray],
reward_dict: Dict[str, np.ndarray],
) -> Dict[str, np.ndarray]:
"""Returns a standardized measure of success for the environment."""
score_dict = collections.OrderedDict()
return score_dict
# Only include goal
@property
def goal_space(self):
len_obs = self.observation_space.low.shape[0]
env_lim = np.abs(self.observation_space.low[0])
return spaces.Box(low=-env_lim, high=env_lim, shape=(len_obs // 2,))
def render(self, mode='human'):
if mode == 'rgb_array':
camera = engine.MovableCamera(self.sim, 84, 84)
camera.set_pose(
distance=2.2, lookat=[-0.2, .5, 2.1], azimuth=70, elevation=-35)
img = camera.render()
return img
else:
super().render()
def check_goal_completion(self, curr_pos):
# Old things for runs
# max_objs = np.array([-0.2, -0.2, 0.2, 0.75, 1.1, -0.07])
# min_objs = np.array([-0.6, -0.6, 0.08, 0.1, 0.1, -0.21])
max_objs = np.array([-0.25, -0.25, 0.2, 0.75, 1.1, -0.07])
min_objs = np.array([-0.6, -0.6, 0.08, 0.1, 0.1, -0.21])
init_bitflips = np.array([0, 0, 0, 0, 0, 0])
curr_bitflips = init_bitflips.copy()
if len(curr_pos.shape) > 1:
curr_pos = curr_pos.squeeze(axis=0)
for j in range(6):
if curr_pos[j] > max_objs[j]:
curr_bitflips[j] = 1
elif curr_pos[j] < min_objs[j]:
curr_bitflips[j] = 0
mult = np.array([2 ** i for i in range(6)])[::-1]
new_idx = np.sum(mult * curr_bitflips)
return new_idx
def get_reward_dict(
self,
action: np.ndarray,
obs_dict: Dict[str, np.ndarray],
) -> Dict[str, np.ndarray]:
if self.idx_completion:
"""Returns a standardized measure of success for the environment."""
curr_idx = self.check_goal_completion(obs_dict['obj_qp'])
rd = collections.OrderedDict((
('ee_slider', np.array([np.float(curr_idx == self._goal_idx)])),
))
else:
self.switch1_arm_pos = self.sim.data.site_xpos[self.sim.model.site_name2id('light_site')]
self.switch2_arm_pos = self.sim.data.site_xpos[self.sim.model.site_name2id('light_site2')]
self.slider_arm_pos = self.sim.data.site_xpos[self.sim.model.site_name2id('slide_site')]
self.cabinet_arm_pos = self.sim.data.site_xpos[self.sim.model.site_name2id('hinge_site2')]
self.toaster_arm_pos = self.sim.data.site_xpos[self.sim.model.site_name2id('toaster_site')]
self.tray_arm_pos = self.sim.data.site_xpos[self.sim.model.site_name2id('tray_site')]
self.switch1_open_pos = -0.
self.switch2_open_pos = -0.
self.slider_open_pos = 0.4
self.cabinet_open_pos = 1.4
self.toaster_open_pos = 1.5
self.tray_open_pos = 0.0
self.switch1_closed_pos = -0.7
self.switch2_closed_pos = -0.7
self.slider_closed_pos = 0.0
self.cabinet_closed_pos = 0.
self.toaster_closed_pos = 0.
self.tray_closed_pos = -0.25
goal_idx = np.binary_repr(self._goal_idx)
curr_idx = np.binary_repr(self._current_idx)
goal_idx = '0'*(6 - len(goal_idx)) + goal_idx
curr_idx = '0'*(6 - len(curr_idx)) + curr_idx
assert len(goal_idx) == len(curr_idx)
i = 0
for i in range(len(curr_idx)):
if goal_idx[i] != curr_idx[i]:
break
if goal_idx[i] == '1':
goal_open = True
else:
goal_open = False
normalized_dist = [
np.linalg.norm(self.switch1_open_pos - self.switch1_closed_pos),
np.linalg.norm(self.switch2_open_pos - self.switch2_closed_pos),
np.linalg.norm(self.slider_open_pos - self.slider_closed_pos),
np.linalg.norm(self.cabinet_open_pos - self.cabinet_closed_pos),
np.linalg.norm(self.toaster_open_pos - self.toaster_closed_pos),
np.linalg.norm(self.tray_open_pos - self.tray_closed_pos)]
open_arm_positions = [self.switch1_arm_pos,
self.switch2_arm_pos,
self.slider_arm_pos,
self.cabinet_arm_pos,
self.toaster_arm_pos,
self.tray_arm_pos]
closed_arm_positions = [self.switch1_arm_pos,
self.switch2_arm_pos,
self.slider_arm_pos,
self.cabinet_arm_pos,
self.toaster_arm_pos,
self.tray_arm_pos]
open_element_positions = [self.switch1_open_pos,
self.switch2_open_pos,
self.slider_open_pos,
self.cabinet_open_pos,
self.toaster_open_pos,
self.tray_open_pos]
closed_element_positions = [self.switch1_closed_pos,
self.switch2_closed_pos,
self.slider_closed_pos,
self.cabinet_closed_pos,
self.toaster_closed_pos,
self.tray_closed_pos]
if goal_open:
arm_target_pos = closed_arm_positions[i]
element_target_pos = open_element_positions[i]
else:
arm_target_pos = open_arm_positions[i]
element_target_pos = closed_element_positions[i]
delta_pos = np.array([0, -0.2, 0])
arm_target_pos += delta_pos
arm_error = obs_dict['mocap_pos'] - arm_target_pos
slider_error = obs_dict['obj_qp'][:, i] - element_target_pos
normalizer = normalized_dist[i]
rd = collections.OrderedDict((
('ee_slider', np.array([-20*np.float(np.linalg.norm(slider_error))/normalizer])),
('arm_dist', np.array([-np.float(np.linalg.norm(arm_error))])),
))
return rd | {
"content_hash": "c516a1c50f1bec8afb5ef1e3788065c2",
"timestamp": "",
"source": "github",
"line_count": 753,
"max_line_length": 126,
"avg_line_length": 41.091633466135455,
"alnum_prop": 0.5325770796974986,
"repo_name": "google-research/DBAP-simulation",
"id": "8b5ccbd1303a4364301fba867db246da8ccd4506",
"size": "30942",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "adept_envs/franka/franka_cabinet_slider_knob_switch_toaster.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from . import packer
def define():
tabledef = packer.TableDef('_permission')
tabledef.define('uint16', 'id')
tabledef.define('uint8', 'state')
tabledef = packer.TableDef('_destroy')
tabledef.define('uint16', 'id')
tabledef = packer.TableDef('_GameObject')
tabledef.define('uint16', 'id')
tabledef.define('float', 'pos_x')
tabledef.define('float', 'pos_y')
tabledef.define('float', 'pos_z')
tabledef.define('float', 'rot_x')
tabledef.define('float', 'rot_y')
tabledef.define('float', 'rot_z')
tabledef.define('float', 'rot_w')
tabledef = packer.TableDef('_RigidGameObject', template=tabledef)
tabledef.define('float', 'lv_x')
tabledef.define('float', 'lv_y')
tabledef.define('float', 'lv_z')
tabledef.define('float', 'av_x')
tabledef.define('float', 'av_y')
tabledef.define('float', 'av_z') | {
"content_hash": "b8aa6203df4a0a443e04043b1cdefd32",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 69,
"avg_line_length": 31.357142857142858,
"alnum_prop": 0.6309794988610479,
"repo_name": "pqftgs/bge-netplay",
"id": "4e4ed38bffae0105c2a2ee4e532e31d83787e023",
"size": "878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "netplay/builtin_tables.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "196298"
}
],
"symlink_target": ""
} |
"""A module for extracting properties from Python dicts.
A property is a string that represents a value in a JSON-serializable
dict. For example, "x.y" matches 1 in {'x': {'y': 1, 'z': 2}, 'y': [1,
2, 3]}.
See PropertySelector and PropertyGetter's docstrings for example
usage.
The grammar for properties is as follows:
path
::= primary
::= primary '.' path
primary
::= attribute
::= attribute '[' ']'
::= attribute '[' index ']'
index
::= Any non-negative integer. Integers beginning with 0 are
interpreted as base-10.
attribute
:= Any non-empty sequence of characters; The special characters
'[', ']', and '.' may appear if they are preceded by '\'.
The literal '\' may appear if it is itself preceded by a '\'.
There are three operators in the language of properties:
'.': Attribute access which allows one to select the key of
a dict.
'[]': List operator which allows one to apply the rest of the
property to each element of a list.
'[INDEX]': List access which allows one to select an element of
a list.
"""
import collections
import copy
from googlecloudsdk.core.util import tokenizer
class Error(Exception):
"""Base class for exceptions raised by this module."""
class IllegalProperty(Error):
"""Raised for properties that are syntactically incorrect."""
class ConflictingProperties(Error):
"""Raised when a property conflicts with another.
Examples of conflicting properties:
- "a.b" and "a[0].b"
- "a[0].b" and "a[].b"
"""
class _Key(str):
pass
class _Index(int):
pass
class _Slice(object):
def __eq__(self, other):
return type(self) == type(other)
def __hash__(self):
return 0
def _Parse(prop):
"""Parses the given tokens that represent a property."""
tokens = tokenizer.Tokenize(prop, ['[', ']', '.'])
tokens = [token for token in tokens if token]
if not tokens:
raise IllegalProperty('illegal property: {0}'.format(prop))
res = []
while tokens:
if not isinstance(tokens[0], tokenizer.Literal):
raise IllegalProperty('illegal property: {0}'.format(prop))
res.append(_Key(tokens[0]))
tokens = tokens[1:]
# At this point, we expect to be either at the end of the input
# stream or we expect to see a "." or "[".
# We've reached the end of the input stream.
if not tokens:
break
if not isinstance(tokens[0], tokenizer.Separator):
raise IllegalProperty('illegal property: {0}'.format(prop))
if isinstance(tokens[0], tokenizer.Separator) and tokens[0] == '[':
if len(tokens) < 2:
raise IllegalProperty('illegal property: {0}'.format(prop))
tokens = tokens[1:]
# Handles list slices (i.e., "[]").
if (isinstance(tokens[0], tokenizer.Separator) and
tokens[0] == ']'):
res.append(_Slice())
tokens = tokens[1:]
# Handles index accesses (e.g., "[1]").
elif (isinstance(tokens[0], tokenizer.Literal) and
tokens[0].isdigit() and
len(tokens) >= 2 and
isinstance(tokens[1], tokenizer.Separator) and
tokens[1] == ']'):
res.append(_Index(tokens[0]))
tokens = tokens[2:]
else:
raise IllegalProperty('illegal property: {0}'.format(prop))
# We've reached the end of input.
if not tokens:
break
# We expect a "."; we also expect that the "." is not the last
# token in the input.
if (len(tokens) > 1 and
isinstance(tokens[0], tokenizer.Separator) and
tokens[0] == '.'):
tokens = tokens[1:]
continue
else:
raise IllegalProperty('illegal property: {0}'.format(prop))
return res
def _GetProperty(obj, components):
"""Grabs a property from obj."""
if obj is None:
return None
elif not components:
return obj
elif (isinstance(components[0], _Key) and
isinstance(obj, dict)):
return _GetProperty(obj.get(components[0]), components[1:])
elif (isinstance(components[0], _Index) and isinstance(obj, list) and
components[0] < len(obj)):
return _GetProperty(obj[components[0]], components[1:])
elif (isinstance(components[0], _Slice) and
isinstance(obj, list)):
return [_GetProperty(item, components[1:]) for item in obj]
else:
return None
def _DictToOrderedDict(obj):
"""Recursively converts a JSON-serializable dict to an OrderedDict."""
if isinstance(obj, dict):
new_obj = collections.OrderedDict(sorted(obj.items()))
for key, value in new_obj.iteritems():
new_obj[key] = _DictToOrderedDict(value)
return new_obj
elif isinstance(obj, list):
return [_DictToOrderedDict(item) for item in obj]
else:
return copy.deepcopy(obj)
def _Filter(obj, properties):
"""Retains the data specified by properties in a JSON-serializable dict."""
# If any property is empty, then the client wants everything, so
# return obj without filtering it.
if not all(properties):
return _DictToOrderedDict(obj)
head_to_tail = collections.OrderedDict()
for prop in properties:
if prop:
head, tail = prop[0], prop[1:]
if head in head_to_tail:
head_to_tail[head].append(tail)
else:
head_to_tail[head] = [tail]
# TODO(user): Whenever head_to_tail is constructed, ensure that
# all keys are either of type _Key or of type _Index|_Slice. On
# failure, raise an exception saying that "property x conflicts with
# property y".
if isinstance(obj, dict):
filtered_obj = collections.OrderedDict()
for key, value in head_to_tail.iteritems():
if key in obj:
# Note that the keys are converted to strings. This is
# necessary because the keys are of type _Key and we want to
# avoid leaking implementation details.
if all(value):
res = _Filter(obj[key], value)
if res is not None:
filtered_obj[str(key)] = res
else:
filtered_obj[str(key)] = _DictToOrderedDict(obj[key])
if filtered_obj:
return filtered_obj
else:
return None
elif isinstance(obj, list):
if not head_to_tail:
return obj
indices = set([])
for key in head_to_tail:
if isinstance(key, _Index) and key < len(obj):
indices.add(key)
slice_tail = head_to_tail.get(_Slice())
if slice_tail:
res = []
for i, item in enumerate(obj):
if i in indices:
properties = head_to_tail[i] + slice_tail
else:
properties = slice_tail
res.append(_Filter(item, properties))
else:
res = [None] * len(obj)
for index in indices:
properties = head_to_tail[index]
if all(properties):
res[index] = _Filter(obj[index], properties)
else:
res[index] = _DictToOrderedDict(obj[index])
# If all items are None, return None, otherwise return a list.
if [item for item in res if item is not None]:
return res
else:
return None
else:
return _DictToOrderedDict(obj)
return None
def _ApplyTransformation(components, func, obj):
"""Applies the given function to the property pointed to by components.
For example:
obj = {'x': {'y': 1, 'z': 2}, 'y': [1, 2, 3]}
_ApplyTransformation(_Parse('x.y'), lambda x: x* 2, obj)
results in obj becoming:
{'x': {'y': 2, 'z': 2}, 'y': [1, 2, 3]}
Args:
components: A parsed property.
func: The function to apply.
obj: A JSON-serializable dict to apply the function to.
"""
if isinstance(obj, dict) and isinstance(components[0], _Key):
val = obj.get(components[0])
if val is None:
return
if len(components) == 1:
obj[components[0]] = func(val)
else:
_ApplyTransformation(components[1:], func, val)
elif isinstance(obj, list) and isinstance(components[0], _Index):
idx = components[0]
if idx > len(obj) - 1:
return
if len(components) == 1:
obj[idx] = func(obj[idx])
else:
_ApplyTransformation(components[1:], func, obj[idx])
elif isinstance(obj, list) and isinstance(components[0], _Slice):
for i, val in enumerate(obj):
if len(components) == 1:
obj[i] = func(val)
else:
_ApplyTransformation(components[1:], func, val)
class PropertySelector(object):
"""Extracts and/or transforms values in JSON-serializable dicts.
For example:
selector = PropertySelector(
properties=['x.y', 'y[0]'],
transformations=[
('x.y', lambda x: x + 5),
('y[]', lambda x: x * 5),
])
selector.SelectProperties(
{'x': {'y': 1, 'z': 2}, 'y': [1, 2, 3]})
returns:
collections.OrderedDict([
('x', collections.OrderedDict([('y', 6)])),
('y', [5])
])
Items are extracted in the order requested. Transformations are applied
in the order they appear.
"""
def __init__(self, properties=None, transformations=None):
"""Creates a new PropertySelector with the given properties."""
if properties:
self._compiled_properties = [_Parse(p) for p in properties]
else:
self._compiled_properties = None
if transformations:
self._compiled_transformations = [
(_Parse(p), func) for p, func in transformations]
else:
self._compiled_transformations = None
self.properties = properties
self.transformations = transformations
def Apply(self, obj):
"""An OrderedDict resulting from filtering and transforming obj."""
if self._compiled_properties:
res = _Filter(obj, self._compiled_properties) or collections.OrderedDict()
else:
res = _DictToOrderedDict(obj)
if self._compiled_transformations:
for compiled_property, func in self._compiled_transformations:
_ApplyTransformation(compiled_property, func, res)
return res
class PropertyGetter(object):
"""Extracts a single field from JSON-serializable dicts.
For example:
getter = PropertyGetter('x.y')
getter.Get({'x': {'y': 1, 'z': 2}, 'y': [1, 2, 3]})
returns:
1
"""
def __init__(self, p):
"""Creates a new PropertyGetter with the given property."""
self._compiled_property = _Parse(p)
def Get(self, obj):
"""Returns the property in obj or None if the property does not exist."""
return copy.deepcopy(_GetProperty(obj, self._compiled_property))
| {
"content_hash": "51239a58469e5c942b2379cb77716e5d",
"timestamp": "",
"source": "github",
"line_count": 386,
"max_line_length": 80,
"avg_line_length": 27.183937823834196,
"alnum_prop": 0.6200324025540837,
"repo_name": "KaranToor/MA450",
"id": "c0e773f1befe884f09e5cd6f70aa8d204999d1ed",
"size": "11088",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/.install/.backup/lib/googlecloudsdk/api_lib/compute/property_selector.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3162"
},
{
"name": "CSS",
"bytes": "1930"
},
{
"name": "HTML",
"bytes": "13381"
},
{
"name": "Java",
"bytes": "151442"
},
{
"name": "JavaScript",
"bytes": "4906"
},
{
"name": "Makefile",
"bytes": "1636"
},
{
"name": "Objective-C",
"bytes": "13335"
},
{
"name": "PHP",
"bytes": "9086"
},
{
"name": "Pascal",
"bytes": "62"
},
{
"name": "Python",
"bytes": "19710731"
},
{
"name": "Roff",
"bytes": "2069494"
},
{
"name": "Ruby",
"bytes": "690"
},
{
"name": "Shell",
"bytes": "32272"
},
{
"name": "Smarty",
"bytes": "4968"
},
{
"name": "SourcePawn",
"bytes": "616"
},
{
"name": "Swift",
"bytes": "14225"
}
],
"symlink_target": ""
} |
"""Test the SlimProto Player config flow."""
from unittest.mock import AsyncMock
from homeassistant.components.slimproto.const import DEFAULT_NAME, DOMAIN
from homeassistant.config_entries import SOURCE_USER
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from tests.common import MockConfigEntry
async def test_full_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None:
"""Test the full user configuration flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result.get("type") == FlowResultType.CREATE_ENTRY
assert result.get("title") == DEFAULT_NAME
assert result.get("data") == {}
assert len(mock_setup_entry.mock_calls) == 1
async def test_already_configured(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_setup_entry: AsyncMock,
) -> None:
"""Test abort if SlimProto Player is already configured."""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result.get("type") == FlowResultType.ABORT
assert result.get("reason") == "single_instance_allowed"
| {
"content_hash": "c21357a46487eefdca00245e47ee44e3",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 88,
"avg_line_length": 33.55263157894737,
"alnum_prop": 0.7231372549019608,
"repo_name": "mezz64/home-assistant",
"id": "15ea5434fc58cf25910491aa90260a3451a490dd",
"size": "1275",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/slimproto/test_config_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52481895"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
"""
Import related utilities and helper functions.
"""
import sys
import traceback
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition('.')
__import__(mod_str)
try:
return getattr(sys.modules[mod_str], class_str)
except AttributeError:
raise ImportError('Class %s cannot be found (%s)' %
(class_str,
traceback.format_exception(*sys.exc_info())))
def import_object(import_str, *args, **kwargs):
"""Import a class and return an instance of it."""
return import_class(import_str)(*args, **kwargs)
def import_object_ns(name_space, import_str, *args, **kwargs):
"""Tries to import object from default namespace.
Imports a class and return an instance of it, first by trying
to find the class in a default namespace, then failing back to
a full path if not found in the default namespace.
"""
import_value = "%s.%s" % (name_space, import_str)
try:
return import_class(import_value)(*args, **kwargs)
except ImportError:
return import_class(import_str)(*args, **kwargs)
def import_module(import_str):
"""Import a module."""
__import__(import_str)
return sys.modules[import_str]
def import_versioned_module(version, submodule=None):
module = 'glance.v%s' % version
if submodule:
module = '.'.join((module, submodule))
return import_module(module)
def try_import(import_str, default=None):
"""Try to import a module and if it fails return default."""
try:
return import_module(import_str)
except ImportError:
return default
| {
"content_hash": "7d3d4602af5f8b7e272a21e9dbae71fd",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 72,
"avg_line_length": 29.724137931034484,
"alnum_prop": 0.6438515081206496,
"repo_name": "tanglei528/glance",
"id": "b01a7cb46a20ccb56d1d858fc6ed95a5e401a365",
"size": "2361",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "glance/openstack/common/importutils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3193082"
},
{
"name": "Shell",
"bytes": "7168"
}
],
"symlink_target": ""
} |
from libra.repository.mongodb.mongodb import Repository
from libra.repository.mongodb.orm import Collection, Property, PropertyDict
__all__ = (
"Repository",
"Collection",
"Property",
"PropertyDict"
)
| {
"content_hash": "c00542d5146167bd3a31024c0807b396",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 75,
"avg_line_length": 24.22222222222222,
"alnum_prop": 0.7201834862385321,
"repo_name": "pitomba/libra",
"id": "3c216aa98484acf70a33185e750feaa7da44b6fb",
"size": "218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libra/repository/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1545"
},
{
"name": "Python",
"bytes": "20998"
},
{
"name": "Ruby",
"bytes": "235"
}
],
"symlink_target": ""
} |
"""
Fitmarket
Mali broj ljudi - donori - dijele dnevna mjerenja svoje težine. Iz dnevne težine jednog donora određujemo vrijednosti dviju dionica: - dionica X ima vrijednost koja odgovara težini donora na taj dan. - inverzna dionica ~X ima vrijednost (150 kg - X). Primjetimo da: - kako X raste, ~X pada. - X + ~X = 150 kg Svaki igrač počinje igru sa 10,000 kg raspoloživog novca. Igrač koristi taj novac za trgovanje dionicama. Ukupna vrijednost igrača je zbroj rapoloživog novca i aktualne vrijednosti svih dionica koje posjeduje. Cilj igre je maksimizirati ukupnu vrijednost dobrim predviđanjem kretanja vrijednosti dionica. Na primjer, u prvom danu igrac kupi 125 dionica \"X\" za 80 kg. U drugom danu, dionica naraste na 82 kg. Ako igrac proda sve dionice \"X\", zaradio je 2 kg * 125 = 250 kg! Igra ne dopušta donoru da trguje vlastitim dionicama.
OpenAPI spec version: 1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class StockWithCount(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, stream_name=None, latest_weight=None, count=None):
"""
StockWithCount - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'stream_name': 'str',
'latest_weight': 'float',
'count': 'float'
}
self.attribute_map = {
'stream_name': 'stream_name',
'latest_weight': 'latest_weight',
'count': 'count'
}
self._stream_name = stream_name
self._latest_weight = latest_weight
self._count = count
@property
def stream_name(self):
"""
Gets the stream_name of this StockWithCount.
:return: The stream_name of this StockWithCount.
:rtype: str
"""
return self._stream_name
@stream_name.setter
def stream_name(self, stream_name):
"""
Sets the stream_name of this StockWithCount.
:param stream_name: The stream_name of this StockWithCount.
:type: str
"""
if stream_name is None:
raise ValueError("Invalid value for `stream_name`, must not be `None`")
self._stream_name = stream_name
@property
def latest_weight(self):
"""
Gets the latest_weight of this StockWithCount.
:return: The latest_weight of this StockWithCount.
:rtype: float
"""
return self._latest_weight
@latest_weight.setter
def latest_weight(self, latest_weight):
"""
Sets the latest_weight of this StockWithCount.
:param latest_weight: The latest_weight of this StockWithCount.
:type: float
"""
if latest_weight is None:
raise ValueError("Invalid value for `latest_weight`, must not be `None`")
self._latest_weight = latest_weight
@property
def count(self):
"""
Gets the count of this StockWithCount.
:return: The count of this StockWithCount.
:rtype: float
"""
return self._count
@count.setter
def count(self, count):
"""
Sets the count of this StockWithCount.
:param count: The count of this StockWithCount.
:type: float
"""
if count is None:
raise ValueError("Invalid value for `count`, must not be `None`")
self._count = count
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "46d2bf118f3b3bb49f318a8676bc104e",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 850,
"avg_line_length": 31.774725274725274,
"alnum_prop": 0.5868926162891233,
"repo_name": "brahle/fitmarket-python-api",
"id": "41be6cb8c6d50df36396c07326e511bfe3892086",
"size": "5812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fitmarket_api/models/stock_with_count.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "110321"
},
{
"name": "Shell",
"bytes": "1630"
}
],
"symlink_target": ""
} |
import unittest
import mock
from django.forms import ValidationError
from ..forms import ResetForm
from .. import strings
class ResetFormTestCase(unittest.TestCase):
@mock.patch('authentication.forms.forms.Form.clean')
@mock.patch('authentication.forms.User')
def test_clean_should_return_super(self, user_class, clean):
# setup
form = ResetForm()
email = 'an@example.com'
form.cleaned_data = dict(email=email)
user_class.objects.filter.return_value.count.return_value = 1
# action
returned_value = form.clean()
# assert
self.assertDictEqual(dict(username=email),
user_class.objects.filter.call_args[1])
self.assertEqual(id(clean.return_value), id(returned_value))
@mock.patch('authentication.forms.forms')
@mock.patch('authentication.forms.User')
def test_clean_should_raise_validation_error_when_user_not_found(
self, user_class, forms_module):
# setup
form = ResetForm()
email = 'an@example.com'
form.cleaned_data = dict(email=email)
user_class.objects.filter.return_value.count.return_value = 0
forms_module.ValidationError = ValidationError
# action
with self.assertRaises(ValidationError) as cm:
form.clean()
# assert
self.assertDictEqual(dict(username=email),
user_class.objects.filter.call_args[1])
self.assertEqual(strings.RESET_FORM_EMAIL_DOESNT_EXIST % email,
cm.exception.message)
| {
"content_hash": "3b403e0ae83668577998270e9fef1c78",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 71,
"avg_line_length": 33.06382978723404,
"alnum_prop": 0.6550836550836551,
"repo_name": "hellhovnd/dentexchange",
"id": "8f67df1fab33aaa4cbd5e4367502ec785427f057",
"size": "1577",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "dentexchange/apps/authentication/tests/test_reset_form.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "6611"
},
{
"name": "HTML",
"bytes": "145589"
},
{
"name": "JavaScript",
"bytes": "23966"
},
{
"name": "Python",
"bytes": "563289"
},
{
"name": "Shell",
"bytes": "2274"
}
],
"symlink_target": ""
} |
import os
import pytest
from engines import BingEngine, DuckgoEngine, FarooEngine, YandexEngine
from engines.base import EngineBase, ResultItemBase
bing = BingEngine(api_key=os.environ["BING_API_KEY"])
duckduckgo = DuckgoEngine()
faroo = FarooEngine(api_key=os.environ["FAROO_API_KEY"])
yandex = YandexEngine(
api_key=os.environ["YANDEX_API_KEY"],
username=os.environ["YANDEX_USER_NAME"]
)
@pytest.fixture(scope="module")
def engine():
from aggregator import Aggregator
engine = Aggregator()
return engine
@pytest.fixture(scope="module")
def results(engine):
return engine.search("python")
def test_add(engine):
engine.add_engine(bing)
engine.add_engine(duckduckgo)
assert len(engine._engines) == 2
engine._engines = {}
def test_adds(engine):
engine.add_engines([duckduckgo, yandex, faroo])
assert len(engine._engines) == 3
engine._engines = {}
def test_add_fail(engine):
engine.add_engine(faroo)
with pytest.raises(Exception):
engine.add_engine(faroo)
engine._engines = {}
def test_remove_engine(engine):
engine.add_engine(faroo)
engine.remove_engine('faroo')
assert len(engine._engines) == 0
def test_get_a_single_engine(engine):
engine.add_engine(faroo)
assert isinstance(engine['faroo'], EngineBase)
engine._engines = {}
def test_set_weights(engine):
engine.add_engines([duckduckgo, yandex, faroo])
engine.set_weights({
"duckduckgo": 0.3,
"yandex": 0.9,
"faroo": 0.1
})
assert engine['duckduckgo'].weight == 0.3
assert engine['yandex'].weight == 0.9
assert engine['faroo'].weight == 0.1
engine._engines = {} | {
"content_hash": "812c62e5efde321ab888cc8d963cceaa",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 71,
"avg_line_length": 27.241935483870968,
"alnum_prop": 0.670219064535228,
"repo_name": "axknightroad/metasearch",
"id": "8621b0c32aef2d024f54cf4e013110f303da56e7",
"size": "1689",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_aggregator.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
"""\
Update files with AWS metadata
"""
import json
import logging
import transaction
from pyramid.paster import get_app
from pyramid.threadlocal import manager
from pyramid.testing import DummyRequest
EPILOG = __doc__
logger = logging.getLogger(__name__)
def run(app, files):
root = app.root_factory(app)
collection = root['file']
dummy_request = DummyRequest(root=root, registry=app.registry, _stats={})
manager.push({'request': dummy_request, 'registry': app.registry})
for i, uuid in enumerate(collection):
item = root.get_by_uuid(uuid)
dummy_request.context = item
properties = item.upgrade_properties()
sheets = None
value = files.get(str(uuid))
if value is not None:
properties['file_size'] = value['file_size']
sheets = {
'external': {
'service': 's3',
'bucket': 'encode-files',
'key': value['s3_file_name'],
},
}
item.update(properties, sheets=sheets)
if (i + 1) % 100 == 0:
logger.info('Updated %d', i + 1)
def main():
import argparse
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is specified wrong here.
description="Migrate files to AWS", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('--abort', action='store_true', help="Rollback transaction")
parser.add_argument('files_processed', type=argparse.FileType('rb'), help="path to json file")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
app = get_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
files_processed = json.load(args.files_processed)
good_files = {v['uuid']: v for v in files_processed
if 'errors' not in v and 'blacklisted' not in v}
raised = False
try:
run(app, good_files)
except Exception:
raised = True
raise
finally:
if raised or args.abort:
transaction.abort()
logger.info('Rolled back.')
else:
transaction.commit()
if __name__ == '__main__':
main()
| {
"content_hash": "97849e83f6d51a28d8368df96a8aaee4",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 115,
"avg_line_length": 31.76923076923077,
"alnum_prop": 0.6146085552865214,
"repo_name": "4dn-dcic/fourfront",
"id": "8e6b658db92bda02ee4c9b05f394acb09eb62dc7",
"size": "2478",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/encoded/commands/migrate_files_aws.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Common Workflow Language",
"bytes": "15818"
},
{
"name": "Dockerfile",
"bytes": "6312"
},
{
"name": "HTML",
"bytes": "11048"
},
{
"name": "JavaScript",
"bytes": "2106661"
},
{
"name": "Makefile",
"bytes": "9079"
},
{
"name": "PLpgSQL",
"bytes": "12067"
},
{
"name": "Python",
"bytes": "1758496"
},
{
"name": "SCSS",
"bytes": "224522"
},
{
"name": "Shell",
"bytes": "19014"
}
],
"symlink_target": ""
} |
import discord
from discord.ext import commands
from cogs.utils import checks
from __main__ import set_cog
from .utils.dataIO import dataIO
from .utils.chat_formatting import pagify, box
import importlib
import traceback
import logging
import asyncio
import threading
import datetime
import glob
import os
import aiohttp
log = logging.getLogger("chronoxia.owner")
class CogNotFoundError(Exception):
pass
class CogLoadError(Exception):
pass
class NoSetupError(CogLoadError):
pass
class CogUnloadError(Exception):
pass
class OwnerUnloadWithoutReloadError(CogUnloadError):
pass
class Owner:
"""All owner-only commands that relate to debug bot operations.
"""
def __init__(self, bot):
self.bot = bot
self.setowner_lock = False
self.file_path = "data/chronoxia/disabled_commands.json"
self.disabled_commands = dataIO.load_json(self.file_path)
self.session = aiohttp.ClientSession(loop=self.bot.loop)
def __unload(self):
self.session.close()
@commands.command()
@checks.is_owner()
async def load(self, *, cog_name: str):
"""Loads a cog
Example: load mod"""
module = cog_name.strip()
if "cogs." not in module:
module = "cogs." + module
try:
self._load_cog(module)
except CogNotFoundError:
await self.bot.say("That cog could not be found.")
except CogLoadError as e:
log.exception(e)
traceback.print_exc()
await self.bot.say("There was an issue loading the cog. Check"
" your console or logs for more information.")
except Exception as e:
log.exception(e)
traceback.print_exc()
await self.bot.say('Cog was found and possibly loaded but '
'something went wrong. Check your console '
'or logs for more information.')
else:
set_cog(module, True)
await self.disable_commands()
await self.bot.say("The cog has been loaded.")
@commands.group(invoke_without_command=True)
@checks.is_owner()
async def unload(self, *, cog_name: str):
"""Unloads a cog
Example: unload mod"""
module = cog_name.strip()
if "cogs." not in module:
module = "cogs." + module
if not self._does_cogfile_exist(module):
await self.bot.say("That cog file doesn't exist. I will not"
" turn off autoloading at start just in case"
" this isn't supposed to happen.")
else:
set_cog(module, False)
try: # No matter what we should try to unload it
self._unload_cog(module)
except OwnerUnloadWithoutReloadError:
await self.bot.say("I cannot allow you to unload the Owner plugin"
" unless you are in the process of reloading.")
except CogUnloadError as e:
log.exception(e)
traceback.print_exc()
await self.bot.say('Unable to safely unload that cog.')
else:
await self.bot.say("The cog has been unloaded.")
@unload.command(name="all")
@checks.is_owner()
async def unload_all(self):
"""Unloads all cogs"""
cogs = self._list_cogs()
still_loaded = []
for cog in cogs:
set_cog(cog, False)
try:
self._unload_cog(cog)
except OwnerUnloadWithoutReloadError:
pass
except CogUnloadError as e:
log.exception(e)
traceback.print_exc()
still_loaded.append(cog)
if still_loaded:
still_loaded = ", ".join(still_loaded)
await self.bot.say("I was unable to unload some cogs: "
"{}".format(still_loaded))
else:
await self.bot.say("All cogs are now unloaded.")
@checks.is_owner()
@commands.command(name="reload")
async def _reload(self, *, cog_name: str):
"""Reloads a cog
Example: reload audio"""
module = cog_name.strip()
if "cogs." not in module:
module = "cogs." + module
try:
self._unload_cog(module, reloading=True)
except:
pass
try:
self._load_cog(module)
except CogNotFoundError:
await self.bot.say("That cog cannot be found.")
except NoSetupError:
await self.bot.say("That cog does not have a setup function.")
except CogLoadError as e:
log.exception(e)
traceback.print_exc()
await self.bot.say("That cog could not be loaded. Check your"
" console or logs for more information.")
else:
set_cog(module, True)
await self.disable_commands()
await self.bot.say("The cog has been reloaded.")
@commands.command(name="cogs")
@checks.is_owner()
async def _show_cogs(self):
"""Shows loaded/unloaded cogs"""
# This function assumes that all cogs are in the cogs folder,
# which is currently true.
# Extracting filename from __module__ Example: cogs.owner
loaded = [c.__module__.split(".")[1] for c in self.bot.cogs.values()]
# What's in the folder but not loaded is unloaded
unloaded = [c.split(".")[1] for c in self._list_cogs()
if c.split(".")[1] not in loaded]
if not unloaded:
unloaded = ["None"]
msg = ("+ Loaded\n"
"{}\n\n"
"- Unloaded\n"
"{}"
"".format(", ".join(sorted(loaded)),
", ".join(sorted(unloaded)))
)
for page in pagify(msg, [" "], shorten_by=16):
await self.bot.say(box(page.lstrip(" "), lang="diff"))
@commands.command(pass_context=True, hidden=True)
@checks.is_owner()
async def debug(self, ctx, *, code):
"""Evaluates code"""
def check(m):
if m.content.strip().lower() == "more":
return True
author = ctx.message.author
channel = ctx.message.channel
code = code.strip('` ')
result = None
global_vars = globals().copy()
global_vars['bot'] = self.bot
global_vars['ctx'] = ctx
global_vars['message'] = ctx.message
global_vars['author'] = ctx.message.author
global_vars['channel'] = ctx.message.channel
global_vars['server'] = ctx.message.server
try:
result = eval(code, global_vars, locals())
except Exception as e:
await self.bot.say(box('{}: {}'.format(type(e).__name__, str(e)),
lang="py"))
return
if asyncio.iscoroutine(result):
result = await result
result = str(result)
if not ctx.message.channel.is_private:
censor = (self.bot.settings.email,
self.bot.settings.password,
self.bot.settings.token)
r = "[EXPUNGED]"
for w in censor:
if w is None or w == "":
continue
result = result.replace(w, r)
result = result.replace(w.lower(), r)
result = result.replace(w.upper(), r)
result = list(pagify(result, shorten_by=16))
for i, page in enumerate(result):
if i != 0 and i % 4 == 0:
last = await self.bot.say("There are still {} messages. "
"Type `more` to continue."
"".format(len(result) - (i+1)))
msg = await self.bot.wait_for_message(author=author,
channel=channel,
check=check,
timeout=10)
if msg is None:
try:
await self.bot.delete_message(last)
except:
pass
finally:
break
await self.bot.say(box(page, lang="py"))
@commands.group(name="set", pass_context=True)
async def _set(self, ctx):
"""Changes Chronoxia's global settings."""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
return
@_set.command(pass_context=True)
async def owner(self, ctx):
"""Sets owner"""
if self.bot.settings.no_prompt is True:
await self.bot.say("Console interaction is disabled. Start Chronoxia "
"without the `--no-prompt` flag to use this "
"command.")
return
if self.setowner_lock:
await self.bot.say("A set owner command is already pending.")
return
if self.bot.settings.owner is not None:
await self.bot.say(
"The owner is already set. Remember that setting the owner "
"to someone else other than who hosts the bot has security "
"repercussions and is *NOT recommended*. Proceed at your own risk."
)
await asyncio.sleep(3)
await self.bot.say("Confirm in the console that you're the owner.")
self.setowner_lock = True
t = threading.Thread(target=self._wait_for_answer,
args=(ctx.message.author,))
t.start()
@_set.command()
@checks.is_owner()
async def defaultmodrole(self, *, role_name: str):
"""Sets the default mod role name
This is used if a server-specific role is not set"""
self.bot.settings.default_mod = role_name
self.bot.settings.save_settings()
await self.bot.say("The default mod role name has been set.")
@_set.command()
@checks.is_owner()
async def defaultadminrole(self, *, role_name: str):
"""Sets the default admin role name
This is used if a server-specific role is not set"""
self.bot.settings.default_admin = role_name
self.bot.settings.save_settings()
await self.bot.say("The default admin role name has been set.")
@_set.command(pass_context=True)
@checks.is_owner()
async def prefix(self, ctx, *prefixes):
"""Sets chrono's global prefixes
Accepts multiple prefixes separated by a space. Enclose in double
quotes if a prefix contains spaces.
Example: set prefix ! $ ? "two words" """
if prefixes == ():
await self.bot.send_cmd_help(ctx)
return
self.bot.settings.prefixes = sorted(prefixes, reverse=True)
self.bot.settings.save_settings()
log.debug("Setting global prefixes to:\n\t{}"
"".format(self.bot.settings.prefixes))
p = "prefixes" if len(prefixes) > 1 else "prefix"
await self.bot.say("Global {} set".format(p))
@_set.command(pass_context=True, no_pm=True)
@checks.serverowner_or_permissions(administrator=True)
async def serverprefix(self, ctx, *prefixes):
"""Sets chrono's prefixes for this server
Accepts multiple prefixes separated by a space. Enclose in double
quotes if a prefix contains spaces.
Example: set serverprefix ! $ ? "two words"
Issuing this command with no parameters will reset the server
prefixes and the global ones will be used instead."""
server = ctx.message.server
if prefixes == ():
self.bot.settings.set_server_prefixes(server, [])
self.bot.settings.save_settings()
current_p = ", ".join(self.bot.settings.prefixes)
await self.bot.say("Server prefixes reset. Current prefixes: "
"`{}`".format(current_p))
return
prefixes = sorted(prefixes, reverse=True)
self.bot.settings.set_server_prefixes(server, prefixes)
self.bot.settings.save_settings()
log.debug("Setting server's {} prefixes to:\n\t{}"
"".format(server.id, self.bot.settings.prefixes))
p = "Prefixes" if len(prefixes) > 1 else "Prefix"
await self.bot.say("{} set for this server.\n"
"To go back to the global prefixes, do"
" `{}set serverprefix` "
"".format(p, prefixes[0]))
@_set.command(pass_context=True)
@checks.is_owner()
async def name(self, ctx, *, name):
"""Sets chrono's name"""
name = name.strip()
if name != "":
try:
await self.bot.edit_profile(self.bot.settings.password,
username=name)
except:
await self.bot.say("Failed to change name. Remember that you"
" can only do it up to 2 times an hour."
"Use nicknames if you need frequent "
"changes. {}set nickname"
"".format(ctx.prefix))
else:
await self.bot.say("Done.")
else:
await self.bot.send_cmd_help(ctx)
@_set.command(pass_context=True, no_pm=True)
@checks.is_owner()
async def nickname(self, ctx, *, nickname=""):
"""Sets chrono's nickname
Leaving this empty will remove it."""
nickname = nickname.strip()
if nickname == "":
nickname = None
try:
await self.bot.change_nickname(ctx.message.server.me, nickname)
await self.bot.say("Done.")
except discord.Forbidden:
await self.bot.say("I cannot do that, I lack the "
"\"Change Nickname\" permission.")
@_set.command(pass_context=True)
@checks.is_owner()
async def game(self, ctx, *, game=None):
"""Sets chrono's playing status
Leaving this empty will clear it."""
server = ctx.message.server
current_status = server.me.status if server is not None else None
if game:
game = game.strip()
await self.bot.change_presence(game=discord.Game(name=game),
status=current_status)
log.debug('Status set to "{}" by owner'.format(game))
else:
await self.bot.change_presence(game=None, status=current_status)
log.debug('status cleared by owner')
await self.bot.say("Done.")
@_set.command(pass_context=True)
@checks.is_owner()
async def status(self, ctx, *, status=None):
"""Sets chrono's status
Statuses:
online
idle
dnd
invisible"""
statuses = {
"online" : discord.Status.online,
"idle" : discord.Status.idle,
"dnd" : discord.Status.dnd,
"invisible" : discord.Status.invisible
}
server = ctx.message.server
current_game = server.me.game if server is not None else None
if status is None:
await self.bot.change_presence(status=discord.Status.online,
game=current_game)
await self.bot.say("Status reset.")
else:
status = statuses.get(status.lower(), None)
if status:
await self.bot.change_presence(status=status,
game=current_game)
await self.bot.say("Status changed.")
else:
await self.bot.send_cmd_help(ctx)
@_set.command(pass_context=True)
@checks.is_owner()
async def stream(self, ctx, streamer=None, *, stream_title=None):
"""Sets chrono's streaming status
Leaving both streamer and stream_title empty will clear it."""
server = ctx.message.server
current_status = server.me.status if server is not None else None
if stream_title:
stream_title = stream_title.strip()
if "twitch.tv/" not in streamer:
streamer = "https://www.twitch.tv/" + streamer
game = discord.Game(type=1, url=streamer, name=stream_title)
await self.bot.change_presence(game=game, status=current_status)
log.debug('Owner has set streaming status and url to "{}" and {}'.format(stream_title, streamer))
elif streamer is not None:
await self.bot.send_cmd_help(ctx)
return
else:
await self.bot.change_presence(game=None, status=current_status)
log.debug('stream cleared by owner')
await self.bot.say("Done.")
@_set.command()
@checks.is_owner()
async def avatar(self, url):
"""Sets chrono's avatar"""
try:
async with self.session.get(url) as r:
data = await r.read()
await self.bot.edit_profile(self.bot.settings.password, avatar=data)
await self.bot.say("Done.")
log.debug("changed avatar")
except Exception as e:
await self.bot.say("Error, check your console or logs for "
"more information.")
log.exception(e)
traceback.print_exc()
@_set.command(name="token")
@checks.is_owner()
async def _token(self, token):
"""Sets chrono's login token"""
if len(token) < 50:
await self.bot.say("Invalid token.")
else:
self.bot.settings.token = token
self.bot.settings.save_settings()
await self.bot.say("Token set. Restart me.")
log.debug("Token changed.")
@commands.command()
@checks.is_owner()
async def shutdown(self, silently : bool=False):
"""Shuts down chrono"""
wave = "\N{WAVING HAND SIGN}"
skin = "\N{EMOJI MODIFIER FITZPATRICK TYPE-3}"
try: # We don't want missing perms to stop our shutdown
if not silently:
await self.bot.say("Shutting down... " + wave + skin)
except:
pass
await self.bot.shutdown()
@commands.command()
@checks.is_owner()
async def restart(self, silently : bool=False):
"""Attempts to restart chrono
Makes chrono quit with exit code 26
The restart is not guaranteed: it must be dealt
with by the process manager in use"""
try:
if not silently:
await self.bot.say("Restarting...")
except:
pass
await self.bot.shutdown(restart=True)
@commands.group(name="command", pass_context=True)
@checks.is_owner()
async def command_disabler(self, ctx):
"""Disables/enables commands
With no subcommands returns the disabled commands list"""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
if self.disabled_commands:
msg = "Disabled commands:\n```xl\n"
for cmd in self.disabled_commands:
msg += "{}, ".format(cmd)
msg = msg.strip(", ")
await self.bot.whisper("{}```".format(msg))
@command_disabler.command()
async def disable(self, *, command):
"""Disables commands/subcommands"""
comm_obj = await self.get_command(command)
if comm_obj is KeyError:
await self.bot.say("That command doesn't seem to exist.")
elif comm_obj is False:
await self.bot.say("You cannot disable owner restricted commands.")
else:
comm_obj.enabled = False
comm_obj.hidden = True
self.disabled_commands.append(command)
dataIO.save_json(self.file_path, self.disabled_commands)
await self.bot.say("Command has been disabled.")
@command_disabler.command()
async def enable(self, *, command):
"""Enables commands/subcommands"""
if command in self.disabled_commands:
self.disabled_commands.remove(command)
dataIO.save_json(self.file_path, self.disabled_commands)
await self.bot.say("Command enabled.")
else:
await self.bot.say("That command is not disabled.")
return
try:
comm_obj = await self.get_command(command)
comm_obj.enabled = True
comm_obj.hidden = False
except: # In case it was in the disabled list but not currently loaded
pass # No point in even checking what returns
async def get_command(self, command):
command = command.split()
try:
comm_obj = self.bot.commands[command[0]]
if len(command) > 1:
command.pop(0)
for cmd in command:
comm_obj = comm_obj.commands[cmd]
except KeyError:
return KeyError
for check in comm_obj.checks:
if hasattr(check, "__name__") and check.__name__ == "is_owner_check":
return False
return comm_obj
async def disable_commands(self): # runs at boot
for cmd in self.disabled_commands:
cmd_obj = await self.get_command(cmd)
try:
cmd_obj.enabled = False
cmd_obj.hidden = True
except:
pass
@commands.command()
@checks.is_owner()
async def join(self, invite_url: discord.Invite=None):
"""Joins new server"""
if hasattr(self.bot.user, 'bot') and self.bot.user.bot is True:
# Check to ensure they're using updated discord.py
msg = ("I have a **BOT** tag, so I must be invited with an OAuth2"
" link:\nFor more information: "
"https://lsomers984.github.io/"
"chrono-Docs/chrono_guide_bot_accounts/#bot-invites")
await self.bot.say(msg)
if hasattr(self.bot, 'oauth_url'):
await self.bot.whisper("Here's my OAUTH2 link:\n{}".format(
self.bot.oauth_url))
return
if invite_url is None:
await self.bot.say("I need a Discord Invite link for the "
"server you want me to join.")
return
try:
await self.bot.accept_invite(invite_url)
await self.bot.say("Server joined.")
log.debug("We just joined {}".format(invite_url))
except discord.NotFound:
await self.bot.say("The invite was invalid or expired.")
except discord.HTTPException:
await self.bot.say("I wasn't able to accept the invite."
" Try again.")
@commands.command(pass_context=True, no_pm=True)
@checks.is_owner()
async def leave(self, ctx):
"""Leaves server"""
message = ctx.message
await self.bot.say("Are you sure you want me to leave this server?"
" Type yes to confirm.")
response = await self.bot.wait_for_message(author=message.author)
if response.content.lower().strip() == "yes":
await self.bot.say("Alright. Bye :wave:")
log.debug('Leaving "{}"'.format(message.server.name))
await self.bot.leave_server(message.server)
else:
await self.bot.say("Ok I'll stay here then.")
@commands.command(pass_context=True)
@checks.is_owner()
async def servers(self, ctx):
"""Lists and allows to leave servers"""
owner = ctx.message.author
servers = sorted(list(self.bot.servers),
key=lambda s: s.name.lower())
msg = ""
for i, server in enumerate(servers):
msg += "{}: {}\n".format(i, server.name)
msg += "\nTo leave a server just type its number."
for page in pagify(msg, ['\n']):
await self.bot.say(page)
while msg is not None:
msg = await self.bot.wait_for_message(author=owner, timeout=15)
try:
msg = int(msg.content)
await self.leave_confirmation(servers[msg], owner, ctx)
break
except (IndexError, ValueError, AttributeError):
pass
async def leave_confirmation(self, server, owner, ctx):
await self.bot.say("Are you sure you want me "
"to leave {}? (yes/no)".format(server.name))
msg = await self.bot.wait_for_message(author=owner, timeout=15)
if msg is None:
await self.bot.say("I guess not.")
elif msg.content.lower().strip() in ("yes", "y"):
await self.bot.leave_server(server)
if server != ctx.message.server:
await self.bot.say("Done.")
else:
await self.bot.say("Alright then.")
@commands.command(pass_context=True)
@commands.cooldown(1, 60, commands.BucketType.user)
async def contact(self, ctx, *, message : str):
"""Sends message to the owner"""
if self.bot.settings.owner is None:
await self.bot.say("I have no owner set.")
return
owner = discord.utils.get(self.bot.get_all_members(),
id=self.bot.settings.owner)
author = ctx.message.author
if ctx.message.channel.is_private is False:
server = ctx.message.server
source = ", server **{}** ({})".format(server.name, server.id)
else:
source = ", direct message"
sender = "From **{}** ({}){}:\n\n".format(author, author.id, source)
message = sender + message
try:
await self.bot.send_message(owner, message)
except discord.errors.InvalidArgument:
await self.bot.say("I cannot send your message, I'm unable to find"
" my owner... *sigh*")
except discord.errors.HTTPException:
await self.bot.say("Your message is too long.")
except:
await self.bot.say("I'm unable to deliver your message. Sorry.")
else:
await self.bot.say("Your message has been sent.")
@commands.command()
async def info(self):
"""Shows info about chrono"""
author_repo = "https://github.com/lsomers984"
chrono_repo = author_repo + "/Chronoxia---Final-Year-Project"
dpy_repo = "https://github.com/Rapptz/discord.py"
python_url = "https://www.python.org/"
since = datetime.datetime(2016, 1, 2, 0, 0)
days_since = (datetime.datetime.utcnow() - since).days
dpy_version = "[{}]({})".format(discord.__version__, dpy_repo)
py_version = "[{}.{}.{}]({})".format(*os.sys.version_info[:3],
python_url)
owner_set = self.bot.settings.owner is not None
owner = self.bot.settings.owner if owner_set else None
if owner:
owner = discord.utils.get(self.bot.get_all_members(), id=owner)
if not owner:
try:
owner = await self.bot.get_user_info(self.bot.settings.owner)
except:
owner = None
if not owner:
owner = "Unknown"
about = (
"This is an instance of [Chronoxia, an open source Discord bot]({}) "
"created by [lsomers984//W0lfstorm]({}) and improved by many.\n\n"
"chrono is backed by a passionate community who contributes and "
"creates content for everyone to enjoy."
"and help us improve!\n\n"
"".format(chrono_repo, author_repo))
embed = discord.Embed(colour=discord.Colour.purple())
embed.add_field(name="Instance owned by", value=str(owner))
embed.add_field(name="Python", value=py_version)
embed.add_field(name="discord.py", value=dpy_version)
embed.add_field(name="About Chronoxia", value=about, inline=False)
embed.set_footer(text="Bringing joy since 02 Jan 2016 (over "
"{} days ago!)".format(days_since))
try:
await self.bot.say(embed=embed)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command()
async def uptime(self):
"""Shows chrono's uptime"""
since = self.bot.uptime.strftime("%Y-%m-%d %H:%M:%S")
passed = self.get_bot_uptime()
await self.bot.say("Been up for: **{}** (since {} UTC)"
"".format(passed, since))
@commands.command()
async def version(self):
"""Shows chrono's current version"""
response = self.bot.loop.run_in_executor(None, self._get_version)
result = await asyncio.wait_for(response, timeout=10)
try:
await self.bot.say(embed=result)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command(pass_context=True)
@checks.is_owner()
async def traceback(self, ctx, public: bool=False):
"""Sends to the owner the last command exception that has occurred
If public (yes is specified), it will be sent to the chat instead"""
if not public:
destination = ctx.message.author
else:
destination = ctx.message.channel
if self.bot._last_exception:
for page in pagify(self.bot._last_exception):
await self.bot.send_message(destination, box(page, lang="py"))
else:
await self.bot.say("No exception has occurred yet.")
def _load_cog(self, cogname):
if not self._does_cogfile_exist(cogname):
raise CogNotFoundError(cogname)
try:
mod_obj = importlib.import_module(cogname)
importlib.reload(mod_obj)
self.bot.load_extension(mod_obj.__name__)
except SyntaxError as e:
raise CogLoadError(*e.args)
except:
raise
def _unload_cog(self, cogname, reloading=False):
if not reloading and cogname == "cogs.owner":
raise OwnerUnloadWithoutReloadError(
"Can't unload the owner plugin :P")
try:
self.bot.unload_extension(cogname)
except:
raise CogUnloadError
def _list_cogs(self):
cogs = [os.path.basename(f) for f in glob.glob("cogs/*.py")]
return ["cogs." + os.path.splitext(f)[0] for f in cogs]
def _does_cogfile_exist(self, module):
if "cogs." not in module:
module = "cogs." + module
if module not in self._list_cogs():
return False
return True
def _wait_for_answer(self, author):
print(author.name + " requested to be set as owner. If this is you, "
"type 'yes'. Otherwise press enter.")
print()
print("*DO NOT* set anyone else as owner. This has security "
"repercussions.")
choice = "None"
while choice.lower() != "yes" and choice == "None":
choice = input("> ")
if choice == "yes":
self.bot.settings.owner = author.id
self.bot.settings.save_settings()
print(author.name + " has been set as owner.")
self.setowner_lock = False
self.owner.hidden = True
else:
print("The set owner request has been ignored.")
self.setowner_lock = False
def _get_version(self):
if not os.path.isdir(".git"):
msg = "This instance of chrono hasn't been installed with git."
e = discord.Embed(title=msg,
colour=discord.Colour.red())
return e
commands = " && ".join((
r'git config --get remote.origin.url', # Remote URL
r'git rev-list --count HEAD', # Number of commits
r'git rev-parse --abbrev-ref HEAD', # Branch name
r'git show -s -n 3 HEAD --format="%cr|%s|%H"' # Last 3 commits
))
result = os.popen(commands).read()
url, ncommits, branch, commits = result.split("\n", 3)
if url.endswith(".git"):
url = url[:-4]
if url.startswith("git@"):
domain, _, resource = url[4:].partition(':')
url = 'https://{}/{}'.format(domain, resource)
repo_name = url.split("/")[-1]
embed = discord.Embed(title="Updates of " + repo_name,
description="Last three updates",
colour=discord.Colour.red(),
url="{}/tree/{}".format(url, branch))
for line in commits.split('\n'):
if not line:
continue
when, commit, chash = line.split("|")
commit_url = url + "/commit/" + chash
content = "[{}]({}) - {} ".format(chash[:6], commit_url, commit)
embed.add_field(name=when, value=content, inline=False)
embed.set_footer(text="Total commits: " + ncommits)
return embed
def get_bot_uptime(self, *, brief=False):
# Courtesy of Danny
now = datetime.datetime.utcnow()
delta = now - self.bot.uptime
hours, remainder = divmod(int(delta.total_seconds()), 3600)
minutes, seconds = divmod(remainder, 60)
days, hours = divmod(hours, 24)
if not brief:
if days:
fmt = '{d} days, {h} hours, {m} minutes, and {s} seconds'
else:
fmt = '{h} hours, {m} minutes, and {s} seconds'
else:
fmt = '{h}h {m}m {s}s'
if days:
fmt = '{d}d ' + fmt
return fmt.format(d=days, h=hours, m=minutes, s=seconds)
def check_files():
if not os.path.isfile("data/chronoxia/disabled_commands.json"):
print("Creating empty disabled_commands.json...")
dataIO.save_json("data/chronoxia/disabled_commands.json", [])
def setup(bot):
check_files()
n = Owner(bot)
bot.add_cog(n)
| {
"content_hash": "10c5a6b56e75d89187f63e2884799483",
"timestamp": "",
"source": "github",
"line_count": 929,
"max_line_length": 109,
"avg_line_length": 37.29817007534984,
"alnum_prop": 0.5436652236652236,
"repo_name": "lsomers984/Chronoxia---Final-Year-Project",
"id": "949adcdb3531c4565426d446ee1bd0503ac692bc",
"size": "34650",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cogs/owner.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1022"
},
{
"name": "Python",
"bytes": "365227"
}
],
"symlink_target": ""
} |
"""Classes for stats-related testing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import contextlib
import mock
from grr_response_core.lib.util import precondition
from grr_response_core.stats import metrics
from grr_response_core.stats import stats_collector_instance
class StatsDeltaAssertionContext(object):
"""A context manager to check the stats variable changes."""
def __init__(self, test, delta, metric, fields=None):
precondition.AssertType(metric, metrics.AbstractMetric)
self.test = test
self.metric = metric
self.fields = fields
self.delta = delta
def __enter__(self):
self.prev_count = self.metric.GetValue(fields=self.fields)
# Handle the case when we're dealing with distributions.
if hasattr(self.prev_count, "count"):
self.prev_count = self.prev_count.count
def __exit__(self, unused_type, unused_value, unused_traceback):
new_count = self.metric.GetValue(fields=self.fields)
if hasattr(new_count, "count"):
new_count = new_count.count
actual = new_count - self.prev_count
self.test.assertEqual(
actual, self.delta,
"%s (fields=%s) expected to change with delta=%d, but changed by %d. "
"Metric has field values %s." %
(self.metric.name, self.fields, self.delta, actual,
self.metric.GetFields()))
class StatsTestMixin(object):
"""Mixin for stats-related assertions."""
# pylint: disable=invalid-name
def assertStatsCounterDelta(self, delta, metric, fields=None):
return StatsDeltaAssertionContext(self, delta, metric, fields=fields)
# pylint: enable=invalid-name
class StatsCollectorTestMixin(object):
"""Mixin for setting up a StatsCollector with metrics."""
@contextlib.contextmanager
def SetUpStatsCollector(self, collector):
patcher = mock.patch.multiple(
stats_collector_instance, _metadatas=[], _stats_singleton=None)
patcher.start()
self.addCleanup(patcher.stop)
yield
stats_collector_instance.Set(collector)
self.collector = stats_collector_instance.Get()
| {
"content_hash": "4e600c700a6150449a8abe3542e158bb",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 78,
"avg_line_length": 32.21212121212121,
"alnum_prop": 0.7121354656632173,
"repo_name": "dunkhong/grr",
"id": "7391fc2dfe4e5ce43f403341e4391fb0fd1c7db6",
"size": "2148",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "grr/test_lib/stats_test_lib.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "227"
},
{
"name": "Batchfile",
"bytes": "882"
},
{
"name": "C",
"bytes": "11321"
},
{
"name": "C++",
"bytes": "54535"
},
{
"name": "CSS",
"bytes": "36745"
},
{
"name": "Dockerfile",
"bytes": "1822"
},
{
"name": "HCL",
"bytes": "8451"
},
{
"name": "HTML",
"bytes": "193751"
},
{
"name": "JavaScript",
"bytes": "12795"
},
{
"name": "Jupyter Notebook",
"bytes": "199190"
},
{
"name": "Makefile",
"bytes": "3139"
},
{
"name": "PowerShell",
"bytes": "1984"
},
{
"name": "Python",
"bytes": "7430923"
},
{
"name": "Roff",
"bytes": "444"
},
{
"name": "Shell",
"bytes": "49155"
},
{
"name": "Standard ML",
"bytes": "8172"
},
{
"name": "TSQL",
"bytes": "10560"
},
{
"name": "TypeScript",
"bytes": "56756"
}
],
"symlink_target": ""
} |
import sys, os
sys.path.insert(0, os.path.abspath('../../'))
import sphinx_bootstrap_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# Disabled: , 'sphinx.ext.intersphinx'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Demo'
copyright = u'2011-2016, Ryan Roemer'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = sphinx_bootstrap_theme.__version__
# The full version, including alpha/beta/rc tags.
release = sphinx_bootstrap_theme.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# Enable todo output
todo_include_todos = True
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bootstrap'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
'navbar_title': "Demo",
# Tab name for entire site. (Default: "Site")
'navbar_site_name': "Site",
# Tab name for the current pages TOC. (Default: "Page")
'navbar_pagenav_name': "Page",
# A list of tuples containing pages or urls to link to.
# Valid tuples should be in the following forms:
# (name, page) # a link to a page
# (name, "/aa/bb", 1) # a link to an arbitrary relative url
# (name, "http://example.com", True) # arbitrary absolute url
# Note the "1" or "True" value above as the third argument to indicate
# an arbitrary url.
# 'navbar_links': [
# ("Examples", "examples"),
# ("Link", "http://example.com", True),
# ],
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 2,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the build
# will break.
#
# Values: "true" (default) or "false"
'globaltoc_includehidden': "true",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For black navbar, do "navbar navbar-inverse"
'navbar_class': "navbar",
# Fix navigation bar to top of page?
# Values: "true" (default) or "false"
'navbar_fixed_top': "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': "nav",
# Bootswatch (http://bootswatch.com/) theme.
#
# Options are nothing (default) or the name of a valid theme such
# such as "cosmo" or "sandstone".
#
# Example themes:
# * flatly
# * sandstone (v3 only)
# * united
# * yeti (v3 only)
'bootswatch_theme': "sandstone",
# Choose Bootstrap version.
# Values: "3" (default) or "2" (in quotes)
'bootstrap_version': "3",
}
# Add any paths that contain custom themes here, relative to this directory.
# ``get_html_theme_path`` returns a list, so you can concatenate with
# any other theme directories you would like.
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = "Demo"
# (Optional) Logo. Should be small enough to fit the navbar (ideally 24x24).
# Path should be relative to the ``_static`` files directory.
#html_logo = "demologo.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
html_sidebars = {'sidebar': ['localtoc.html', 'sourcelink.html', 'searchbox.html']}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MyProjectDoc'
# # Custom style overrides
# def setup(app):
# app.add_stylesheet('my-styles.css') # may also be an URL
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'MyProject.tex', u'My Project',
u'Ryan Roemer', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'myproject', u'My Project',
[u'Ryan Roemer'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MyProject', u'My Project',
u'Ryan Roemer', 'MyProject', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| {
"content_hash": "4c0127377b907f3adaa23812dae815c2",
"timestamp": "",
"source": "github",
"line_count": 307,
"max_line_length": 100,
"avg_line_length": 33.153094462540714,
"alnum_prop": 0.6817645902927884,
"repo_name": "EricFromCanada/sphinx-bootstrap-theme",
"id": "99c9883c3cd14897db5fec6e2ce4170c57f3e7bc",
"size": "10202",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "demo/source/conf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "13956"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Makefile",
"bytes": "432"
},
{
"name": "Python",
"bytes": "2854"
}
],
"symlink_target": ""
} |
import datetime
from django.test import TestCase
from django.utils import timezone
from oscar.test import factories
from oscar.apps.offer import models
class TestActiveOfferManager(TestCase):
def test_includes_offers_in_date_range(self):
now = timezone.now()
start = now - datetime.timedelta(days=1)
end = now + datetime.timedelta(days=1)
factories.create_offer(start=start, end=end)
filtered_offers = models.ConditionalOffer.active.all()
self.assertEqual(1, len(filtered_offers))
def test_includes_offers_with_null_start_date(self):
now = timezone.now()
end = now + datetime.timedelta(days=1)
factories.create_offer(start=None, end=end)
filtered_offers = models.ConditionalOffer.active.all()
self.assertEqual(1, len(filtered_offers))
def test_includes_offers_with_null_end_date(self):
now = timezone.now()
start = now - datetime.timedelta(days=1)
factories.create_offer(start=start, end=None)
filtered_offers = models.ConditionalOffer.active.all()
self.assertEqual(1, len(filtered_offers))
def test_includes_offers_with_null_start_and_end_date(self):
factories.create_offer(start=None, end=None)
filtered_offers = models.ConditionalOffer.active.all()
self.assertEqual(1, len(filtered_offers))
def test_filters_out_expired_offers(self):
# Create offer that is available but with the wrong status
now = timezone.now()
start = now - datetime.timedelta(days=3)
end = now - datetime.timedelta(days=1)
factories.create_offer(start=start, end=end)
filtered_offers = models.ConditionalOffer.active.all()
self.assertEqual(0, len(filtered_offers))
def test_filters_out_offers_yet_to_start(self):
# Create offer that is available but with the wrong status
now = timezone.now()
start = now + datetime.timedelta(days=1)
end = now + datetime.timedelta(days=3)
factories.create_offer(start=start, end=end)
filtered_offers = models.ConditionalOffer.active.all()
self.assertEqual(0, len(filtered_offers))
def test_filters_out_suspended_offers(self):
# Create offer that is available but with the wrong status
factories.create_offer(
status=models.ConditionalOffer.SUSPENDED)
filtered_offers = models.ConditionalOffer.active.all()
self.assertEqual(0, len(filtered_offers))
| {
"content_hash": "f1111c7c96b50a7267036f6feff22cca",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 66,
"avg_line_length": 38.55384615384615,
"alnum_prop": 0.6779728651237031,
"repo_name": "sasha0/django-oscar",
"id": "ce610371b43c42699d292f3b1005c34461e5c978",
"size": "2506",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/integration/offer/test_manager.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "387941"
},
{
"name": "Dockerfile",
"bytes": "544"
},
{
"name": "HTML",
"bytes": "518624"
},
{
"name": "JavaScript",
"bytes": "344864"
},
{
"name": "Makefile",
"bytes": "4290"
},
{
"name": "Python",
"bytes": "1957797"
},
{
"name": "Shell",
"bytes": "1643"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import sys
import argparse
from greatfet.utils import GreatFETArgumentParser
def main():
# Set up a simple argument parser.
parser = GreatFETArgumentParser(description="Utility for loading runtime extensions on to a GreatFET board.")
parser.add_argument('--m0', dest="m0", type=argparse.FileType('rb'), metavar='<filename>',
help="loads the provided loadable file to run on the GreatFET's m0 core")
args = parser.parse_args()
log_function = parser.get_log_function()
device = parser.find_specified_device()
if not args.m0:
parser.print_help()
sys.exit(-1)
if args.m0:
data = args.m0.read()
log_function("Loading {} byte loadable onto the M0 coprocessor.\n".format(len(data)))
device.m0.run_loadable(data)
| {
"content_hash": "13c3e16906d875ed7cd50cf012dc2354",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 113,
"avg_line_length": 30.77777777777778,
"alnum_prop": 0.6787003610108303,
"repo_name": "greatscottgadgets/greatfet",
"id": "20cd67e418bb2f5d2e044ab03f7af65ba2052322",
"size": "889",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "host/greatfet/commands/greatfet_loadable.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "823"
},
{
"name": "C",
"bytes": "546689"
},
{
"name": "C++",
"bytes": "457321"
},
{
"name": "CMake",
"bytes": "20175"
},
{
"name": "Dockerfile",
"bytes": "601"
},
{
"name": "Makefile",
"bytes": "8176"
},
{
"name": "Python",
"bytes": "541237"
},
{
"name": "Shell",
"bytes": "2236"
}
],
"symlink_target": ""
} |
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._server_vulnerability_assessments_operations import (
build_create_or_update_request,
build_delete_request,
build_get_request,
build_list_by_server_request,
)
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ServerVulnerabilityAssessmentsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.sql.aio.SqlManagementClient`'s
:attr:`server_vulnerability_assessments` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
async def get(
self,
resource_group_name: str,
server_name: str,
vulnerability_assessment_name: Union[str, _models.VulnerabilityAssessmentName],
**kwargs: Any
) -> _models.ServerVulnerabilityAssessment:
"""Gets the server's vulnerability assessment.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param server_name: The name of the server for which the vulnerability assessment is defined.
Required.
:type server_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment. "default"
Required.
:type vulnerability_assessment_name: str or ~azure.mgmt.sql.models.VulnerabilityAssessmentName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ServerVulnerabilityAssessment or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.ServerVulnerabilityAssessment
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2020-11-01-preview")
) # type: Literal["2020-11-01-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerVulnerabilityAssessment]
request = build_get_request(
resource_group_name=resource_group_name,
server_name=server_name,
vulnerability_assessment_name=vulnerability_assessment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ServerVulnerabilityAssessment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}"} # type: ignore
@overload
async def create_or_update(
self,
resource_group_name: str,
server_name: str,
vulnerability_assessment_name: Union[str, _models.VulnerabilityAssessmentName],
parameters: _models.ServerVulnerabilityAssessment,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ServerVulnerabilityAssessment:
"""Creates or updates the server's vulnerability assessment. Learn more about setting SQL
vulnerability assessment with managed identity -
https://docs.microsoft.com/azure/azure-sql/database/sql-database-vulnerability-assessment-storage.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param server_name: The name of the server for which the vulnerability assessment is defined.
Required.
:type server_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment. "default"
Required.
:type vulnerability_assessment_name: str or ~azure.mgmt.sql.models.VulnerabilityAssessmentName
:param parameters: The requested resource. Required.
:type parameters: ~azure.mgmt.sql.models.ServerVulnerabilityAssessment
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ServerVulnerabilityAssessment or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.ServerVulnerabilityAssessment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def create_or_update(
self,
resource_group_name: str,
server_name: str,
vulnerability_assessment_name: Union[str, _models.VulnerabilityAssessmentName],
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.ServerVulnerabilityAssessment:
"""Creates or updates the server's vulnerability assessment. Learn more about setting SQL
vulnerability assessment with managed identity -
https://docs.microsoft.com/azure/azure-sql/database/sql-database-vulnerability-assessment-storage.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param server_name: The name of the server for which the vulnerability assessment is defined.
Required.
:type server_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment. "default"
Required.
:type vulnerability_assessment_name: str or ~azure.mgmt.sql.models.VulnerabilityAssessmentName
:param parameters: The requested resource. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ServerVulnerabilityAssessment or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.ServerVulnerabilityAssessment
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
server_name: str,
vulnerability_assessment_name: Union[str, _models.VulnerabilityAssessmentName],
parameters: Union[_models.ServerVulnerabilityAssessment, IO],
**kwargs: Any
) -> _models.ServerVulnerabilityAssessment:
"""Creates or updates the server's vulnerability assessment. Learn more about setting SQL
vulnerability assessment with managed identity -
https://docs.microsoft.com/azure/azure-sql/database/sql-database-vulnerability-assessment-storage.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param server_name: The name of the server for which the vulnerability assessment is defined.
Required.
:type server_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment. "default"
Required.
:type vulnerability_assessment_name: str or ~azure.mgmt.sql.models.VulnerabilityAssessmentName
:param parameters: The requested resource. Is either a model type or a IO type. Required.
:type parameters: ~azure.mgmt.sql.models.ServerVulnerabilityAssessment or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ServerVulnerabilityAssessment or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.ServerVulnerabilityAssessment
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2020-11-01-preview")
) # type: Literal["2020-11-01-preview"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerVulnerabilityAssessment]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ServerVulnerabilityAssessment")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
server_name=server_name,
vulnerability_assessment_name=vulnerability_assessment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("ServerVulnerabilityAssessment", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("ServerVulnerabilityAssessment", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}"} # type: ignore
@distributed_trace_async
async def delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
server_name: str,
vulnerability_assessment_name: Union[str, _models.VulnerabilityAssessmentName],
**kwargs: Any
) -> None:
"""Removes the server's vulnerability assessment.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param server_name: The name of the server for which the vulnerability assessment is defined.
Required.
:type server_name: str
:param vulnerability_assessment_name: The name of the vulnerability assessment. "default"
Required.
:type vulnerability_assessment_name: str or ~azure.mgmt.sql.models.VulnerabilityAssessmentName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2020-11-01-preview")
) # type: Literal["2020-11-01-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
server_name=server_name,
vulnerability_assessment_name=vulnerability_assessment_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}"} # type: ignore
@distributed_trace
def list_by_server(
self, resource_group_name: str, server_name: str, **kwargs: Any
) -> AsyncIterable["_models.ServerVulnerabilityAssessment"]:
"""Lists the vulnerability assessment policies associated with a server.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param server_name: The name of the server. Required.
:type server_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ServerVulnerabilityAssessment or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.sql.models.ServerVulnerabilityAssessment]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", "2020-11-01-preview")
) # type: Literal["2020-11-01-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ServerVulnerabilityAssessmentListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_server_request(
resource_group_name=resource_group_name,
server_name=server_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_server.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ServerVulnerabilityAssessmentListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_server.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/vulnerabilityAssessments"} # type: ignore
| {
"content_hash": "25424ec09673e8f2cb99f913ed83b67e",
"timestamp": "",
"source": "github",
"line_count": 442,
"max_line_length": 225,
"avg_line_length": 46.66289592760181,
"alnum_prop": 0.6641939393939394,
"repo_name": "Azure/azure-sdk-for-python",
"id": "8fb133825cc959e947a072926fdd86d022485165",
"size": "21125",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/sql/azure-mgmt-sql/azure/mgmt/sql/aio/operations/_server_vulnerability_assessments_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import argparse
import sys
import numpy as np
import cv2
from ds_utils.video_utils import generate_video
# see https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_video/py_lucas_kanade/py_lucas_kanade.html
def optical_flow_kanade(input_video_path):
# load input video
cap = cv2.VideoCapture(input_video_path)
# params for ShiTomasi corner detection
feature_params = dict(maxCorners=100,
qualityLevel=0.3,
minDistance=7,
blockSize=7)
# Parameters for lucas kanade optical flow
lk_params = dict(winSize=(15, 15),
maxLevel=2,
criteria=(cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 0.03))
# Create some random colors
color = np.random.randint(0, 255, (100, 3))
# Take first frame and find corners in it
ret, old_frame = cap.read()
old_gray = cv2.cvtColor(old_frame, cv2.COLOR_BGR2GRAY)
p0 = cv2.goodFeaturesToTrack(old_gray, mask=None, **feature_params)
# Create a mask image for drawing purposes
mask = np.zeros_like(old_frame)
frame_count = 0
frames = []
while cap.isOpened():
ret, frame = cap.read()
if ret:
frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# calculate optical flow
p1, st, err = cv2.calcOpticalFlowPyrLK(old_gray, frame_gray, p0, None, **lk_params)
# Select good points
good_new = p1[st == 1]
good_old = p0[st == 1]
# draw the tracks
for i, (new, old) in enumerate(zip(good_new, good_old)):
a, b = new.ravel()
c, d = old.ravel()
mask = cv2.line(mask, (a, b), (c, d), color[i].tolist(), 2)
frame = cv2.circle(frame, (a, b), 5, color[i].tolist(), -1)
img = cv2.add(frame, mask)
# Now update the previous frame and previous points
old_gray = frame_gray.copy()
p0 = good_new.reshape(-1, 1, 2)
frames.append(img)
frame_count += 1
else:
break
cap.release()
return frames
def dense_optical_flow(input_video_path):
cap = cv2.VideoCapture(input_video_path)
ret, frame1 = cap.read()
prvs = cv2.cvtColor(frame1, cv2.COLOR_BGR2GRAY)
hsv = np.zeros_like(frame1)
hsv[..., 1] = 255
frame_count = 0
frames = []
while cap.isOpened():
ret, frame = cap.read()
if ret:
next = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
flow = cv2.calcOpticalFlowFarneback(prvs, next, None, 0.5, 3, 15, 3, 5, 1.2, 0)
mag, ang = cv2.cartToPolar(flow[..., 0], flow[..., 1])
hsv[..., 0] = ang * 180 / np.pi / 2
hsv[..., 2] = cv2.normalize(mag, None, 0, 255, cv2.NORM_MINMAX)
frame = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
prvs = next
frames.append(frame)
frame_count += 1
else:
break
cap.release()
return frames
def show_video(frames):
for frame in frames:
cv2.imshow('frame2', frame)
k = cv2.waitKey(30) & 0xff
if k == 27:
break
def main(_=None):
parser = argparse.ArgumentParser(description='Optical Flow')
parser.add_argument('-i', '--input', required=True, help="input path")
parser.add_argument('-o', '--out', help="output path")
#parser.add_argument('--width', default=-1)
#parser.add_argument('--height', default=-1)
parser.add_argument('--fps', default=24)
parser.add_argument('--codec', default='mp4v')
parser.add_argument('--method', default='dense')
args = parser.parse_args()
method = args.method
out_path = args.out
if method == 'dense':
frames = dense_optical_flow(args.input)
elif method == 'kanade':
frames = optical_flow_kanade(args.input)
else:
print(f'No such method: {method}')
sys.exit(0)
if out_path:
generate_video(out_path, frames[0].shape[:2][::-1], lambda i: frames[i],
len(frames), fps=int(args.fps), is_color=True, codec=args.codec)
else:
show_video(frames)
if __name__ == "__main__":
main(sys.argv[1:])
| {
"content_hash": "f5dd5036b06dd79b473409045914c96c",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 120,
"avg_line_length": 31.08695652173913,
"alnum_prop": 0.5636363636363636,
"repo_name": "5agado/data-science-learning",
"id": "c46b424d9dc899eccfb3c0ed7065ba76227d1773",
"size": "4290",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "graphics/optical_flow/optical_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "565"
},
{
"name": "Jupyter Notebook",
"bytes": "2011939"
},
{
"name": "Python",
"bytes": "550056"
}
],
"symlink_target": ""
} |
__requires__ = 'Babel==1.3'
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point('Babel==1.3', 'console_scripts', 'pybabel')()
)
| {
"content_hash": "bd8d4e74d91080437a5151c9098d2ec7",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 70,
"avg_line_length": 25.125,
"alnum_prop": 0.5920398009950248,
"repo_name": "IuryAlves/Novos-Bandeirantes",
"id": "6df27e1514771a0f60f8f033a18d01643f981f7b",
"size": "347",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "backend/venv/Scripts/pybabel-script.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2077"
},
{
"name": "CSS",
"bytes": "7833"
},
{
"name": "HTML",
"bytes": "80335"
},
{
"name": "JavaScript",
"bytes": "28350"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "105323"
},
{
"name": "Shell",
"bytes": "905"
}
],
"symlink_target": ""
} |
"""SCons.Tool.g++
Tool-specific initialization for g++.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/g++.py 2014/01/04 01:12:18 root"
import os.path
import re
import subprocess
import SCons.Tool
import SCons.Util
cplusplus = __import__('c++', globals(), locals(), [])
compilers = ['g++']
def generate(env):
"""Add Builders and construction variables for g++ to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
cplusplus.generate(env)
env['CXX'] = env.Detect(compilers)
# platform specific settings
if env['PLATFORM'] == 'aix':
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc')
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
elif env['PLATFORM'] == 'hpux':
env['SHOBJSUFFIX'] = '.pic.o'
elif env['PLATFORM'] == 'sunos':
env['SHOBJSUFFIX'] = '.pic.o'
# determine compiler version
if env['CXX']:
#pipe = SCons.Action._subproc(env, [env['CXX'], '-dumpversion'],
pipe = SCons.Action._subproc(env, [env['CXX'], '--version'],
stdin = 'devnull',
stderr = 'devnull',
stdout = subprocess.PIPE)
if pipe.wait() != 0: return
# -dumpversion was added in GCC 3.0. As long as we're supporting
# GCC versions older than that, we should use --version and a
# regular expression.
#line = pipe.stdout.read().strip()
#if line:
# env['CXXVERSION'] = line
line = pipe.stdout.readline()
match = re.search(r'[0-9]+(\.[0-9]+)+', line)
if match:
env['CXXVERSION'] = match.group(0)
def exists(env):
return env.Detect(compilers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| {
"content_hash": "bf50441d4357ed9608ca3ca2bded5587",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 113,
"avg_line_length": 36.05555555555556,
"alnum_prop": 0.6533127889060092,
"repo_name": "Distrotech/scons",
"id": "c9ec40a25e9c74f04849f8cc5e83cef50e974f3d",
"size": "3245",
"binary": false,
"copies": "2",
"ref": "refs/heads/distrotech-scons",
"path": "build/scons/engine/SCons/Tool/g++.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "259"
},
{
"name": "JavaScript",
"bytes": "17316"
},
{
"name": "Perl",
"bytes": "45214"
},
{
"name": "Python",
"bytes": "12517068"
},
{
"name": "Shell",
"bytes": "20589"
}
],
"symlink_target": ""
} |
"""Recursive feature elimination for feature ranking"""
import numpy as np
import numbers
from joblib import Parallel, effective_n_jobs
from ..utils.metaestimators import if_delegate_has_method
from ..utils.metaestimators import _safe_split
from ..utils._tags import _safe_tags
from ..utils.validation import check_is_fitted
from ..utils.fixes import delayed
from ..base import BaseEstimator
from ..base import MetaEstimatorMixin
from ..base import clone
from ..base import is_classifier
from ..model_selection import check_cv
from ..model_selection._validation import _score
from ..metrics import check_scoring
from ._base import SelectorMixin
from ._base import _get_feature_importances
def _rfe_single_fit(rfe, estimator, X, y, train, test, scorer):
"""
Return the score for a fit across one fold.
"""
X_train, y_train = _safe_split(estimator, X, y, train)
X_test, y_test = _safe_split(estimator, X, y, test, train)
return rfe._fit(
X_train, y_train,
lambda estimator, features: _score(
estimator, X_test[:, features], y_test, scorer
)).scores_
class RFE(SelectorMixin, MetaEstimatorMixin, BaseEstimator):
"""Feature ranking with recursive feature elimination.
Given an external estimator that assigns weights to features (e.g., the
coefficients of a linear model), the goal of recursive feature elimination
(RFE) is to select features by recursively considering smaller and smaller
sets of features. First, the estimator is trained on the initial set of
features and the importance of each feature is obtained either through
any specific attribute or callable.
Then, the least important features are pruned from current set of features.
That procedure is recursively repeated on the pruned set until the desired
number of features to select is eventually reached.
Read more in the :ref:`User Guide <rfe>`.
Parameters
----------
estimator : ``Estimator`` instance
A supervised learning estimator with a ``fit`` method that provides
information about feature importance
(e.g. `coef_`, `feature_importances_`).
n_features_to_select : int or float, default=None
The number of features to select. If `None`, half of the features are
selected. If integer, the parameter is the absolute number of features
to select. If float between 0 and 1, it is the fraction of features to
select.
.. versionchanged:: 0.24
Added float values for fractions.
step : int or float, default=1
If greater than or equal to 1, then ``step`` corresponds to the
(integer) number of features to remove at each iteration.
If within (0.0, 1.0), then ``step`` corresponds to the percentage
(rounded down) of features to remove at each iteration.
verbose : int, default=0
Controls verbosity of output.
importance_getter : str or callable, default='auto'
If 'auto', uses the feature importance either through a `coef_`
or `feature_importances_` attributes of estimator.
Also accepts a string that specifies an attribute name/path
for extracting feature importance (implemented with `attrgetter`).
For example, give `regressor_.coef_` in case of
:class:`~sklearn.compose.TransformedTargetRegressor` or
`named_steps.clf.feature_importances_` in case of
class:`~sklearn.pipeline.Pipeline` with its last step named `clf`.
If `callable`, overrides the default feature importance getter.
The callable is passed with the fitted estimator and it should
return importance for each feature.
.. versionadded:: 0.24
Attributes
----------
classes_ : ndarray of shape (n_classes,)
The classes labels. Only available when `estimator` is a classifier.
estimator_ : ``Estimator`` instance
The fitted estimator used to select features.
n_features_ : int
The number of selected features.
n_features_in_ : int
Number of features seen during :term:`fit`. Only defined if the
underlying estimator exposes such an attribute when fit.
.. versionadded:: 0.24
ranking_ : ndarray of shape (n_features,)
The feature ranking, such that ``ranking_[i]`` corresponds to the
ranking position of the i-th feature. Selected (i.e., estimated
best) features are assigned rank 1.
support_ : ndarray of shape (n_features,)
The mask of selected features.
Examples
--------
The following example shows how to retrieve the 5 most informative
features in the Friedman #1 dataset.
>>> from sklearn.datasets import make_friedman1
>>> from sklearn.feature_selection import RFE
>>> from sklearn.svm import SVR
>>> X, y = make_friedman1(n_samples=50, n_features=10, random_state=0)
>>> estimator = SVR(kernel="linear")
>>> selector = RFE(estimator, n_features_to_select=5, step=1)
>>> selector = selector.fit(X, y)
>>> selector.support_
array([ True, True, True, True, True, False, False, False, False,
False])
>>> selector.ranking_
array([1, 1, 1, 1, 1, 6, 4, 3, 2, 5])
Notes
-----
Allows NaN/Inf in the input if the underlying estimator does as well.
See Also
--------
RFECV : Recursive feature elimination with built-in cross-validated
selection of the best number of features.
SelectFromModel : Feature selection based on thresholds of importance
weights.
SequentialFeatureSelector : Sequential cross-validation based feature
selection. Does not rely on importance weights.
References
----------
.. [1] Guyon, I., Weston, J., Barnhill, S., & Vapnik, V., "Gene selection
for cancer classification using support vector machines",
Mach. Learn., 46(1-3), 389--422, 2002.
"""
def __init__(self, estimator, *, n_features_to_select=None, step=1,
verbose=0, importance_getter='auto'):
self.estimator = estimator
self.n_features_to_select = n_features_to_select
self.step = step
self.importance_getter = importance_getter
self.verbose = verbose
@property
def _estimator_type(self):
return self.estimator._estimator_type
@property
def classes_(self):
return self.estimator_.classes_
def fit(self, X, y):
"""Fit the RFE model and then the underlying estimator on the selected
features.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The training input samples.
y : array-like of shape (n_samples,)
The target values.
"""
return self._fit(X, y)
def _fit(self, X, y, step_score=None):
# Parameter step_score controls the calculation of self.scores_
# step_score is not exposed to users
# and is used when implementing RFECV
# self.scores_ will not be calculated when calling _fit through fit
tags = self._get_tags()
X, y = self._validate_data(
X, y, accept_sparse="csc",
ensure_min_features=2,
force_all_finite=not tags.get("allow_nan", True),
multi_output=True
)
error_msg = ("n_features_to_select must be either None, a "
"positive integer representing the absolute "
"number of features or a float in (0.0, 1.0] "
"representing a percentage of features to "
f"select. Got {self.n_features_to_select}")
# Initialization
n_features = X.shape[1]
if self.n_features_to_select is None:
n_features_to_select = n_features // 2
elif self.n_features_to_select < 0:
raise ValueError(error_msg)
elif isinstance(self.n_features_to_select, numbers.Integral): # int
n_features_to_select = self.n_features_to_select
elif self.n_features_to_select > 1.0: # float > 1
raise ValueError(error_msg)
else: # float
n_features_to_select = int(n_features * self.n_features_to_select)
if 0.0 < self.step < 1.0:
step = int(max(1, self.step * n_features))
else:
step = int(self.step)
if step <= 0:
raise ValueError("Step must be >0")
support_ = np.ones(n_features, dtype=bool)
ranking_ = np.ones(n_features, dtype=int)
if step_score:
self.scores_ = []
# Elimination
while np.sum(support_) > n_features_to_select:
# Remaining features
features = np.arange(n_features)[support_]
# Rank the remaining features
estimator = clone(self.estimator)
if self.verbose > 0:
print("Fitting estimator with %d features." % np.sum(support_))
estimator.fit(X[:, features], y)
# Get importance and rank them
importances = _get_feature_importances(
estimator, self.importance_getter, transform_func="square",
)
ranks = np.argsort(importances)
# for sparse case ranks is matrix
ranks = np.ravel(ranks)
# Eliminate the worse features
threshold = min(step, np.sum(support_) - n_features_to_select)
# Compute step score on the previous selection iteration
# because 'estimator' must use features
# that have not been eliminated yet
if step_score:
self.scores_.append(step_score(estimator, features))
support_[features[ranks][:threshold]] = False
ranking_[np.logical_not(support_)] += 1
# Set final attributes
features = np.arange(n_features)[support_]
self.estimator_ = clone(self.estimator)
self.estimator_.fit(X[:, features], y)
# Compute step score when only n_features_to_select features left
if step_score:
self.scores_.append(step_score(self.estimator_, features))
self.n_features_ = support_.sum()
self.support_ = support_
self.ranking_ = ranking_
return self
@if_delegate_has_method(delegate='estimator')
def predict(self, X):
"""Reduce X to the selected features and then predict using the
underlying estimator.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
Returns
-------
y : array of shape [n_samples]
The predicted target values.
"""
check_is_fitted(self)
return self.estimator_.predict(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def score(self, X, y):
"""Reduce X to the selected features and then return the score of the
underlying estimator.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
y : array of shape [n_samples]
The target values.
"""
check_is_fitted(self)
return self.estimator_.score(self.transform(X), y)
def _get_support_mask(self):
check_is_fitted(self)
return self.support_
@if_delegate_has_method(delegate='estimator')
def decision_function(self, X):
"""Compute the decision function of ``X``.
Parameters
----------
X : {array-like or sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
score : array, shape = [n_samples, n_classes] or [n_samples]
The decision function of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
Regression and binary classification produce an array of shape
[n_samples].
"""
check_is_fitted(self)
return self.estimator_.decision_function(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def predict_proba(self, X):
"""Predict class probabilities for X.
Parameters
----------
X : {array-like or sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
p : array of shape (n_samples, n_classes)
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
"""
check_is_fitted(self)
return self.estimator_.predict_proba(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def predict_log_proba(self, X):
"""Predict class log-probabilities for X.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
Returns
-------
p : array of shape (n_samples, n_classes)
The class log-probabilities of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
"""
check_is_fitted(self)
return self.estimator_.predict_log_proba(self.transform(X))
def _more_tags(self):
return {
'poor_score': True,
'allow_nan': _safe_tags(self.estimator, key='allow_nan'),
'requires_y': True,
}
class RFECV(RFE):
"""Feature ranking with recursive feature elimination and cross-validated
selection of the best number of features.
See glossary entry for :term:`cross-validation estimator`.
Read more in the :ref:`User Guide <rfe>`.
Parameters
----------
estimator : ``Estimator`` instance
A supervised learning estimator with a ``fit`` method that provides
information about feature importance either through a ``coef_``
attribute or through a ``feature_importances_`` attribute.
step : int or float, default=1
If greater than or equal to 1, then ``step`` corresponds to the
(integer) number of features to remove at each iteration.
If within (0.0, 1.0), then ``step`` corresponds to the percentage
(rounded down) of features to remove at each iteration.
Note that the last iteration may remove fewer than ``step`` features in
order to reach ``min_features_to_select``.
min_features_to_select : int, default=1
The minimum number of features to be selected. This number of features
will always be scored, even if the difference between the original
feature count and ``min_features_to_select`` isn't divisible by
``step``.
.. versionadded:: 0.20
cv : int, cross-validation generator or an iterable, default=None
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 5-fold cross-validation,
- integer, to specify the number of folds.
- :term:`CV splitter`,
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if ``y`` is binary or multiclass,
:class:`~sklearn.model_selection.StratifiedKFold` is used. If the
estimator is a classifier or if ``y`` is neither binary nor multiclass,
:class:`~sklearn.model_selection.KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
.. versionchanged:: 0.22
``cv`` default value of None changed from 3-fold to 5-fold.
scoring : string, callable or None, default=None
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
verbose : int, default=0
Controls verbosity of output.
n_jobs : int or None, default=None
Number of cores to run in parallel while fitting across folds.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
.. versionadded:: 0.18
importance_getter : str or callable, default='auto'
If 'auto', uses the feature importance either through a `coef_`
or `feature_importances_` attributes of estimator.
Also accepts a string that specifies an attribute name/path
for extracting feature importance.
For example, give `regressor_.coef_` in case of
:class:`~sklearn.compose.TransformedTargetRegressor` or
`named_steps.clf.feature_importances_` in case of
:class:`~sklearn.pipeline.Pipeline` with its last step named `clf`.
If `callable`, overrides the default feature importance getter.
The callable is passed with the fitted estimator and it should
return importance for each feature.
.. versionadded:: 0.24
Attributes
----------
classes_ : ndarray of shape (n_classes,)
The classes labels. Only available when `estimator` is a classifier.
estimator_ : ``Estimator`` instance
The fitted estimator used to select features.
grid_scores_ : ndarray of shape (n_subsets_of_features,)
The cross-validation scores such that
``grid_scores_[i]`` corresponds to
the CV score of the i-th subset of features.
n_features_ : int
The number of selected features with cross-validation.
n_features_in_ : int
Number of features seen during :term:`fit`. Only defined if the
underlying estimator exposes such an attribute when fit.
.. versionadded:: 0.24
ranking_ : narray of shape (n_features,)
The feature ranking, such that `ranking_[i]`
corresponds to the ranking
position of the i-th feature.
Selected (i.e., estimated best)
features are assigned rank 1.
support_ : ndarray of shape (n_features,)
The mask of selected features.
Notes
-----
The size of ``grid_scores_`` is equal to
``ceil((n_features - min_features_to_select) / step) + 1``,
where step is the number of features removed at each iteration.
Allows NaN/Inf in the input if the underlying estimator does as well.
Examples
--------
The following example shows how to retrieve the a-priori not known 5
informative features in the Friedman #1 dataset.
>>> from sklearn.datasets import make_friedman1
>>> from sklearn.feature_selection import RFECV
>>> from sklearn.svm import SVR
>>> X, y = make_friedman1(n_samples=50, n_features=10, random_state=0)
>>> estimator = SVR(kernel="linear")
>>> selector = RFECV(estimator, step=1, cv=5)
>>> selector = selector.fit(X, y)
>>> selector.support_
array([ True, True, True, True, True, False, False, False, False,
False])
>>> selector.ranking_
array([1, 1, 1, 1, 1, 6, 4, 3, 2, 5])
See Also
--------
RFE : Recursive feature elimination.
References
----------
.. [1] Guyon, I., Weston, J., Barnhill, S., & Vapnik, V., "Gene selection
for cancer classification using support vector machines",
Mach. Learn., 46(1-3), 389--422, 2002.
"""
def __init__(self, estimator, *, step=1, min_features_to_select=1,
cv=None, scoring=None, verbose=0, n_jobs=None,
importance_getter='auto'):
self.estimator = estimator
self.step = step
self.importance_getter = importance_getter
self.cv = cv
self.scoring = scoring
self.verbose = verbose
self.n_jobs = n_jobs
self.min_features_to_select = min_features_to_select
def fit(self, X, y, groups=None):
"""Fit the RFE model and automatically tune the number of selected
features.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Training vector, where `n_samples` is the number of samples and
`n_features` is the total number of features.
y : array-like of shape (n_samples,)
Target values (integers for classification, real numbers for
regression).
groups : array-like of shape (n_samples,) or None, default=None
Group labels for the samples used while splitting the dataset into
train/test set. Only used in conjunction with a "Group" :term:`cv`
instance (e.g., :class:`~sklearn.model_selection.GroupKFold`).
.. versionadded:: 0.20
"""
tags = self._get_tags()
X, y = self._validate_data(
X, y, accept_sparse="csr", ensure_min_features=2,
force_all_finite=not tags.get('allow_nan', True),
multi_output=True
)
# Initialization
cv = check_cv(self.cv, y, classifier=is_classifier(self.estimator))
scorer = check_scoring(self.estimator, scoring=self.scoring)
n_features = X.shape[1]
if 0.0 < self.step < 1.0:
step = int(max(1, self.step * n_features))
else:
step = int(self.step)
if step <= 0:
raise ValueError("Step must be >0")
# Build an RFE object, which will evaluate and score each possible
# feature count, down to self.min_features_to_select
rfe = RFE(estimator=self.estimator,
n_features_to_select=self.min_features_to_select,
importance_getter=self.importance_getter,
step=self.step, verbose=self.verbose)
# Determine the number of subsets of features by fitting across
# the train folds and choosing the "features_to_select" parameter
# that gives the least averaged error across all folds.
# Note that joblib raises a non-picklable error for bound methods
# even if n_jobs is set to 1 with the default multiprocessing
# backend.
# This branching is done so that to
# make sure that user code that sets n_jobs to 1
# and provides bound methods as scorers is not broken with the
# addition of n_jobs parameter in version 0.18.
if effective_n_jobs(self.n_jobs) == 1:
parallel, func = list, _rfe_single_fit
else:
parallel = Parallel(n_jobs=self.n_jobs)
func = delayed(_rfe_single_fit)
scores = parallel(
func(rfe, self.estimator, X, y, train, test, scorer)
for train, test in cv.split(X, y, groups))
scores = np.sum(scores, axis=0)
scores_rev = scores[::-1]
argmax_idx = len(scores) - np.argmax(scores_rev) - 1
n_features_to_select = max(
n_features - (argmax_idx * step),
self.min_features_to_select)
# Re-execute an elimination with best_k over the whole set
rfe = RFE(estimator=self.estimator,
n_features_to_select=n_features_to_select, step=self.step,
importance_getter=self.importance_getter,
verbose=self.verbose)
rfe.fit(X, y)
# Set final attributes
self.support_ = rfe.support_
self.n_features_ = rfe.n_features_
self.ranking_ = rfe.ranking_
self.estimator_ = clone(self.estimator)
self.estimator_.fit(self.transform(X), y)
# Fixing a normalization error, n is equal to get_n_splits(X, y) - 1
# here, the scores are normalized by get_n_splits(X, y)
self.grid_scores_ = scores[::-1] / cv.get_n_splits(X, y, groups)
return self
| {
"content_hash": "9e030644b2c317df617f2a27e161790e",
"timestamp": "",
"source": "github",
"line_count": 643,
"max_line_length": 79,
"avg_line_length": 37.54121306376361,
"alnum_prop": 0.6181283400306558,
"repo_name": "kevin-intel/scikit-learn",
"id": "b6db0e9444524385705411b023075070566e8aa0",
"size": "24324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sklearn/feature_selection/_rfe.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "394787"
},
{
"name": "C++",
"bytes": "140225"
},
{
"name": "Makefile",
"bytes": "1579"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "6394128"
},
{
"name": "Shell",
"bytes": "9250"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "nonhumanuser.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| {
"content_hash": "4fdf09cff004352149ef940bac337044",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 76,
"avg_line_length": 25.88888888888889,
"alnum_prop": 0.7167381974248928,
"repo_name": "bane138/nonhumanuser",
"id": "73af4696643e9b8445ec19282925fdf34306b952",
"size": "255",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "97404"
},
{
"name": "HTML",
"bytes": "40972"
},
{
"name": "JavaScript",
"bytes": "161253"
},
{
"name": "Python",
"bytes": "101304"
},
{
"name": "Shell",
"bytes": "248"
}
],
"symlink_target": ""
} |
"""Tests for streams.py."""
import gc
import os
import socket
import sys
import unittest
from unittest import mock
try:
import ssl
except ImportError:
ssl = None
import asyncio
from asyncio import test_utils
class StreamReaderTests(test_utils.TestCase):
DATA = b'line1\nline2\nline3\n'
def setUp(self):
self.loop = asyncio.new_event_loop()
self.set_event_loop(self.loop)
def tearDown(self):
# just in case if we have transport close callbacks
test_utils.run_briefly(self.loop)
self.loop.close()
gc.collect()
super().tearDown()
@mock.patch('asyncio.streams.events')
def test_ctor_global_loop(self, m_events):
stream = asyncio.StreamReader()
self.assertIs(stream._loop, m_events.get_event_loop.return_value)
def _basetest_open_connection(self, open_connection_fut):
reader, writer = self.loop.run_until_complete(open_connection_fut)
writer.write(b'GET / HTTP/1.0\r\n\r\n')
f = reader.readline()
data = self.loop.run_until_complete(f)
self.assertEqual(data, b'HTTP/1.0 200 OK\r\n')
f = reader.read()
data = self.loop.run_until_complete(f)
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
writer.close()
def test_open_connection(self):
with test_utils.run_test_server() as httpd:
conn_fut = asyncio.open_connection(*httpd.address,
loop=self.loop)
self._basetest_open_connection(conn_fut)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_open_unix_connection(self):
with test_utils.run_test_unix_server() as httpd:
conn_fut = asyncio.open_unix_connection(httpd.address,
loop=self.loop)
self._basetest_open_connection(conn_fut)
def _basetest_open_connection_no_loop_ssl(self, open_connection_fut):
try:
reader, writer = self.loop.run_until_complete(open_connection_fut)
finally:
asyncio.set_event_loop(None)
writer.write(b'GET / HTTP/1.0\r\n\r\n')
f = reader.read()
data = self.loop.run_until_complete(f)
self.assertTrue(data.endswith(b'\r\n\r\nTest message'))
writer.close()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_open_connection_no_loop_ssl(self):
with test_utils.run_test_server(use_ssl=True) as httpd:
conn_fut = asyncio.open_connection(
*httpd.address,
ssl=test_utils.dummy_ssl_context(),
loop=self.loop)
self._basetest_open_connection_no_loop_ssl(conn_fut)
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_open_unix_connection_no_loop_ssl(self):
with test_utils.run_test_unix_server(use_ssl=True) as httpd:
conn_fut = asyncio.open_unix_connection(
httpd.address,
ssl=test_utils.dummy_ssl_context(),
server_hostname='',
loop=self.loop)
self._basetest_open_connection_no_loop_ssl(conn_fut)
def _basetest_open_connection_error(self, open_connection_fut):
reader, writer = self.loop.run_until_complete(open_connection_fut)
writer._protocol.connection_lost(ZeroDivisionError())
f = reader.read()
with self.assertRaises(ZeroDivisionError):
self.loop.run_until_complete(f)
writer.close()
test_utils.run_briefly(self.loop)
def test_open_connection_error(self):
with test_utils.run_test_server() as httpd:
conn_fut = asyncio.open_connection(*httpd.address,
loop=self.loop)
self._basetest_open_connection_error(conn_fut)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_open_unix_connection_error(self):
with test_utils.run_test_unix_server() as httpd:
conn_fut = asyncio.open_unix_connection(httpd.address,
loop=self.loop)
self._basetest_open_connection_error(conn_fut)
def test_feed_empty_data(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b'')
self.assertEqual(b'', stream._buffer)
def test_feed_nonempty_data(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(self.DATA)
self.assertEqual(self.DATA, stream._buffer)
def test_read_zero(self):
# Read zero bytes.
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(self.DATA)
data = self.loop.run_until_complete(stream.read(0))
self.assertEqual(b'', data)
self.assertEqual(self.DATA, stream._buffer)
def test_read(self):
# Read bytes.
stream = asyncio.StreamReader(loop=self.loop)
read_task = asyncio.Task(stream.read(30), loop=self.loop)
def cb():
stream.feed_data(self.DATA)
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(self.DATA, data)
self.assertEqual(b'', stream._buffer)
def test_read_line_breaks(self):
# Read bytes without line breaks.
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b'line1')
stream.feed_data(b'line2')
data = self.loop.run_until_complete(stream.read(5))
self.assertEqual(b'line1', data)
self.assertEqual(b'line2', stream._buffer)
def test_read_eof(self):
# Read bytes, stop at eof.
stream = asyncio.StreamReader(loop=self.loop)
read_task = asyncio.Task(stream.read(1024), loop=self.loop)
def cb():
stream.feed_eof()
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(b'', data)
self.assertEqual(b'', stream._buffer)
def test_read_until_eof(self):
# Read all bytes until eof.
stream = asyncio.StreamReader(loop=self.loop)
read_task = asyncio.Task(stream.read(-1), loop=self.loop)
def cb():
stream.feed_data(b'chunk1\n')
stream.feed_data(b'chunk2')
stream.feed_eof()
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(b'chunk1\nchunk2', data)
self.assertEqual(b'', stream._buffer)
def test_read_exception(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.read(2))
self.assertEqual(b'li', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.read(2))
def test_readline(self):
# Read one line. 'readline' will need to wait for the data
# to come from 'cb'
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b'chunk1 ')
read_task = asyncio.Task(stream.readline(), loop=self.loop)
def cb():
stream.feed_data(b'chunk2 ')
stream.feed_data(b'chunk3 ')
stream.feed_data(b'\n chunk4')
self.loop.call_soon(cb)
line = self.loop.run_until_complete(read_task)
self.assertEqual(b'chunk1 chunk2 chunk3 \n', line)
self.assertEqual(b' chunk4', stream._buffer)
def test_readline_limit_with_existing_data(self):
# Read one line. The data is in StreamReader's buffer
# before the event loop is run.
stream = asyncio.StreamReader(limit=3, loop=self.loop)
stream.feed_data(b'li')
stream.feed_data(b'ne1\nline2\n')
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
# The buffer should contain the remaining data after exception
self.assertEqual(b'line2\n', stream._buffer)
stream = asyncio.StreamReader(limit=3, loop=self.loop)
stream.feed_data(b'li')
stream.feed_data(b'ne1')
stream.feed_data(b'li')
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
# No b'\n' at the end. The 'limit' is set to 3. So before
# waiting for the new data in buffer, 'readline' will consume
# the entire buffer, and since the length of the consumed data
# is more than 3, it will raise a ValueError. The buffer is
# expected to be empty now.
self.assertEqual(b'', stream._buffer)
def test_at_eof(self):
stream = asyncio.StreamReader(loop=self.loop)
self.assertFalse(stream.at_eof())
stream.feed_data(b'some data\n')
self.assertFalse(stream.at_eof())
self.loop.run_until_complete(stream.readline())
self.assertFalse(stream.at_eof())
stream.feed_data(b'some data\n')
stream.feed_eof()
self.loop.run_until_complete(stream.readline())
self.assertTrue(stream.at_eof())
def test_readline_limit(self):
# Read one line. StreamReaders are fed with data after
# their 'readline' methods are called.
stream = asyncio.StreamReader(limit=7, loop=self.loop)
def cb():
stream.feed_data(b'chunk1')
stream.feed_data(b'chunk2')
stream.feed_data(b'chunk3\n')
stream.feed_eof()
self.loop.call_soon(cb)
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
# The buffer had just one line of data, and after raising
# a ValueError it should be empty.
self.assertEqual(b'', stream._buffer)
stream = asyncio.StreamReader(limit=7, loop=self.loop)
def cb():
stream.feed_data(b'chunk1')
stream.feed_data(b'chunk2\n')
stream.feed_data(b'chunk3\n')
stream.feed_eof()
self.loop.call_soon(cb)
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
self.assertEqual(b'chunk3\n', stream._buffer)
def test_readline_nolimit_nowait(self):
# All needed data for the first 'readline' call will be
# in the buffer.
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(self.DATA[:6])
stream.feed_data(self.DATA[6:])
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'line1\n', line)
self.assertEqual(b'line2\nline3\n', stream._buffer)
def test_readline_eof(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b'some data')
stream.feed_eof()
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'some data', line)
def test_readline_empty_eof(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_eof()
line = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'', line)
def test_readline_read_byte_count(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(self.DATA)
self.loop.run_until_complete(stream.readline())
data = self.loop.run_until_complete(stream.read(7))
self.assertEqual(b'line2\nl', data)
self.assertEqual(b'ine3\n', stream._buffer)
def test_readline_exception(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.readline())
self.assertEqual(b'line\n', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readline())
self.assertEqual(b'', stream._buffer)
def test_readexactly_zero_or_less(self):
# Read exact number of bytes (zero or less).
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(self.DATA)
data = self.loop.run_until_complete(stream.readexactly(0))
self.assertEqual(b'', data)
self.assertEqual(self.DATA, stream._buffer)
data = self.loop.run_until_complete(stream.readexactly(-1))
self.assertEqual(b'', data)
self.assertEqual(self.DATA, stream._buffer)
def test_readexactly(self):
# Read exact number of bytes.
stream = asyncio.StreamReader(loop=self.loop)
n = 2 * len(self.DATA)
read_task = asyncio.Task(stream.readexactly(n), loop=self.loop)
def cb():
stream.feed_data(self.DATA)
stream.feed_data(self.DATA)
stream.feed_data(self.DATA)
self.loop.call_soon(cb)
data = self.loop.run_until_complete(read_task)
self.assertEqual(self.DATA + self.DATA, data)
self.assertEqual(self.DATA, stream._buffer)
def test_readexactly_eof(self):
# Read exact number of bytes (eof).
stream = asyncio.StreamReader(loop=self.loop)
n = 2 * len(self.DATA)
read_task = asyncio.Task(stream.readexactly(n), loop=self.loop)
def cb():
stream.feed_data(self.DATA)
stream.feed_eof()
self.loop.call_soon(cb)
with self.assertRaises(asyncio.IncompleteReadError) as cm:
self.loop.run_until_complete(read_task)
self.assertEqual(cm.exception.partial, self.DATA)
self.assertEqual(cm.exception.expected, n)
self.assertEqual(str(cm.exception),
'18 bytes read on a total of 36 expected bytes')
self.assertEqual(b'', stream._buffer)
def test_readexactly_exception(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b'line\n')
data = self.loop.run_until_complete(stream.readexactly(2))
self.assertEqual(b'li', data)
stream.set_exception(ValueError())
self.assertRaises(
ValueError, self.loop.run_until_complete, stream.readexactly(2))
def test_exception(self):
stream = asyncio.StreamReader(loop=self.loop)
self.assertIsNone(stream.exception())
exc = ValueError()
stream.set_exception(exc)
self.assertIs(stream.exception(), exc)
def test_exception_waiter(self):
stream = asyncio.StreamReader(loop=self.loop)
@asyncio.coroutine
def set_err():
stream.set_exception(ValueError())
t1 = asyncio.Task(stream.readline(), loop=self.loop)
t2 = asyncio.Task(set_err(), loop=self.loop)
self.loop.run_until_complete(asyncio.wait([t1, t2], loop=self.loop))
self.assertRaises(ValueError, t1.result)
def test_exception_cancel(self):
stream = asyncio.StreamReader(loop=self.loop)
t = asyncio.Task(stream.readline(), loop=self.loop)
test_utils.run_briefly(self.loop)
t.cancel()
test_utils.run_briefly(self.loop)
# The following line fails if set_exception() isn't careful.
stream.set_exception(RuntimeError('message'))
test_utils.run_briefly(self.loop)
self.assertIs(stream._waiter, None)
def test_start_server(self):
class MyServer:
def __init__(self, loop):
self.server = None
self.loop = loop
@asyncio.coroutine
def handle_client(self, client_reader, client_writer):
data = yield from client_reader.readline()
client_writer.write(data)
def start(self):
sock = socket.socket()
sock.bind(('127.0.0.1', 0))
self.server = self.loop.run_until_complete(
asyncio.start_server(self.handle_client,
sock=sock,
loop=self.loop))
return sock.getsockname()
def handle_client_callback(self, client_reader, client_writer):
task = asyncio.Task(client_reader.readline(), loop=self.loop)
def done(task):
client_writer.write(task.result())
task.add_done_callback(done)
def start_callback(self):
sock = socket.socket()
sock.bind(('127.0.0.1', 0))
addr = sock.getsockname()
sock.close()
self.server = self.loop.run_until_complete(
asyncio.start_server(self.handle_client_callback,
host=addr[0], port=addr[1],
loop=self.loop))
return addr
def stop(self):
if self.server is not None:
self.server.close()
self.loop.run_until_complete(self.server.wait_closed())
self.server = None
@asyncio.coroutine
def client(addr):
reader, writer = yield from asyncio.open_connection(
*addr, loop=self.loop)
# send a line
writer.write(b"hello world!\n")
# read it back
msgback = yield from reader.readline()
writer.close()
return msgback
# test the server variant with a coroutine as client handler
server = MyServer(self.loop)
addr = server.start()
msg = self.loop.run_until_complete(asyncio.Task(client(addr),
loop=self.loop))
server.stop()
self.assertEqual(msg, b"hello world!\n")
# test the server variant with a callback as client handler
server = MyServer(self.loop)
addr = server.start_callback()
msg = self.loop.run_until_complete(asyncio.Task(client(addr),
loop=self.loop))
server.stop()
self.assertEqual(msg, b"hello world!\n")
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_start_unix_server(self):
class MyServer:
def __init__(self, loop, path):
self.server = None
self.loop = loop
self.path = path
@asyncio.coroutine
def handle_client(self, client_reader, client_writer):
data = yield from client_reader.readline()
client_writer.write(data)
def start(self):
self.server = self.loop.run_until_complete(
asyncio.start_unix_server(self.handle_client,
path=self.path,
loop=self.loop))
def handle_client_callback(self, client_reader, client_writer):
task = asyncio.Task(client_reader.readline(), loop=self.loop)
def done(task):
client_writer.write(task.result())
task.add_done_callback(done)
def start_callback(self):
self.server = self.loop.run_until_complete(
asyncio.start_unix_server(self.handle_client_callback,
path=self.path,
loop=self.loop))
def stop(self):
if self.server is not None:
self.server.close()
self.loop.run_until_complete(self.server.wait_closed())
self.server = None
@asyncio.coroutine
def client(path):
reader, writer = yield from asyncio.open_unix_connection(
path, loop=self.loop)
# send a line
writer.write(b"hello world!\n")
# read it back
msgback = yield from reader.readline()
writer.close()
return msgback
# test the server variant with a coroutine as client handler
with test_utils.unix_socket_path() as path:
server = MyServer(self.loop, path)
server.start()
msg = self.loop.run_until_complete(asyncio.Task(client(path),
loop=self.loop))
server.stop()
self.assertEqual(msg, b"hello world!\n")
# test the server variant with a callback as client handler
with test_utils.unix_socket_path() as path:
server = MyServer(self.loop, path)
server.start_callback()
msg = self.loop.run_until_complete(asyncio.Task(client(path),
loop=self.loop))
server.stop()
self.assertEqual(msg, b"hello world!\n")
@unittest.skipIf(sys.platform == 'win32', "Don't have pipes")
def test_read_all_from_pipe_reader(self):
# See Tulip issue 168. This test is derived from the example
# subprocess_attach_read_pipe.py, but we configure the
# StreamReader's limit so that twice it is less than the size
# of the data writter. Also we must explicitly attach a child
# watcher to the event loop.
code = """\
import os, sys
fd = int(sys.argv[1])
os.write(fd, b'data')
os.close(fd)
"""
rfd, wfd = os.pipe()
args = [sys.executable, '-c', code, str(wfd)]
pipe = open(rfd, 'rb', 0)
reader = asyncio.StreamReader(loop=self.loop, limit=1)
protocol = asyncio.StreamReaderProtocol(reader, loop=self.loop)
transport, _ = self.loop.run_until_complete(
self.loop.connect_read_pipe(lambda: protocol, pipe))
watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(self.loop)
try:
asyncio.set_child_watcher(watcher)
create = asyncio.create_subprocess_exec(*args,
pass_fds={wfd},
loop=self.loop)
proc = self.loop.run_until_complete(create)
self.loop.run_until_complete(proc.wait())
finally:
asyncio.set_child_watcher(None)
os.close(wfd)
data = self.loop.run_until_complete(reader.read(-1))
self.assertEqual(data, b'data')
def test_streamreader_constructor(self):
self.addCleanup(asyncio.set_event_loop, None)
asyncio.set_event_loop(self.loop)
# Tulip issue #184: Ensure that StreamReaderProtocol constructor
# retrieves the current loop if the loop parameter is not set
reader = asyncio.StreamReader()
self.assertIs(reader._loop, self.loop)
def test_streamreaderprotocol_constructor(self):
self.addCleanup(asyncio.set_event_loop, None)
asyncio.set_event_loop(self.loop)
# Tulip issue #184: Ensure that StreamReaderProtocol constructor
# retrieves the current loop if the loop parameter is not set
reader = mock.Mock()
protocol = asyncio.StreamReaderProtocol(reader)
self.assertIs(protocol._loop, self.loop)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "6eb59db8ac4d60d403b610ea5d620b99",
"timestamp": "",
"source": "github",
"line_count": 641,
"max_line_length": 78,
"avg_line_length": 36.441497659906396,
"alnum_prop": 0.5849137377456227,
"repo_name": "munyirik/python",
"id": "2273049b81577c0ded9e200cd8a26e90ff58adb8",
"size": "23359",
"binary": false,
"copies": "14",
"ref": "refs/heads/develop",
"path": "cpython/Lib/test/test_asyncio/test_streams.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "470920"
},
{
"name": "Batchfile",
"bytes": "35551"
},
{
"name": "C",
"bytes": "17872871"
},
{
"name": "C#",
"bytes": "1231"
},
{
"name": "C++",
"bytes": "356072"
},
{
"name": "CSS",
"bytes": "2839"
},
{
"name": "Common Lisp",
"bytes": "24481"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "Groff",
"bytes": "254942"
},
{
"name": "HTML",
"bytes": "130698"
},
{
"name": "JavaScript",
"bytes": "10616"
},
{
"name": "Makefile",
"bytes": "25026"
},
{
"name": "Objective-C",
"bytes": "33182"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "PostScript",
"bytes": "13803"
},
{
"name": "PowerShell",
"bytes": "1420"
},
{
"name": "Prolog",
"bytes": "557"
},
{
"name": "Python",
"bytes": "24911704"
},
{
"name": "R",
"bytes": "5378"
},
{
"name": "Shell",
"bytes": "437386"
},
{
"name": "TeX",
"bytes": "323102"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
} |
"""leastsq_fit.py
This example shows how to fit a PDF to data via the method of
least-squares.
"""
import matplotlib.pyplot as plt
import numpy as np
import statspy as sp
import scipy.stats
# Define the true PDF (gaussian signal on top of an exponentially falling bkg)
pdf_true_sig = sp.PF("pdf_true_sig=norm(x;mu_true=125,sigma_true=10)")
pdf_true_bkg = sp.PF("pdf_true_bkg=expon(x;offset_true=50,lambda_true=20)")
pdf_true = 0.95 * pdf_true_bkg + 0.05 * pdf_true_sig
# Sample data from the true PDF
nexp = 2000 # number of expected events
nobs = scipy.stats.poisson.rvs(nexp, size=1)[0]
data = pdf_true.rvs(size=nobs)
fig = plt.figure()
fig.patch.set_color('w')
ax = fig.add_subplot(111)
# Build histogram of the data
ydata, bins, patches = ax.hist(data, 30, range=[50, 200], log=True,
facecolor='green', alpha=0.75, label='Data')
xdata = 0.5*(bins[1:]+bins[:-1]) # Bin centers
dx = bins[1:] - bins[:-1] # Bin widths
# Define the background and signal PFs
pdf_fit_bkg = sp.PF("pdf_fit_bkg=expon(x;offset=50,lambda=10)")
sp.get_obj("lambda").label = "\\lambda"
offset = sp.get_obj('offset')
offset.const = True # Fix parameter value
#pdf_fit_bkg.norm.value = nobs
#sidebands = np.logical_or((xdata < 100), (xdata > 150))
#pdf_fit_bkg.leastsq_fit(xdata, ydata, dx=dx, cond=sidebands)
pdf_fit_sig = sp.PF("pdf_fit_sig=norm(x;mu=120,sigma=20)")
sp.get_obj("mu").label = "\\mu"
sp.get_obj("sigma").label = "\\sigma"
pdf_fit = pdf_fit_bkg + pdf_fit_sig
pdf_fit.name = 'pdf_fit'
pdf_fit.norm.const = False # Fit total rate to data
pdf_fit.norm.label = 'Norm'
pdf_fit.norm.value = nobs
pdf_fit_sig.norm.label = 'frac(sig)'
# Least square fit to the data (whole data range)
params, pcov, chi2min, pvalue = pdf_fit.leastsq_fit(xdata, ydata, dx=dx)
yfit = pdf_fit(xdata) * dx
eyfit = pdf_fit.dF(xdata) * dx # Get error bars on the fitted PF
ax.plot(xdata, yfit, 'r--', linewidth=2, label='Fitted PF')
ax.fill_between(xdata, yfit-eyfit, yfit+eyfit, facecolor='y')
ax.plot(xdata, pdf_true(xdata) * dx * nexp, 'b:', linewidth=2, label='True PF')
# Plot
ax.set_xlabel('x')
ax.set_ylabel('Evts / %3.2f' % dx[0])
ax.set_xlim(50, 200)
ax.set_ylim(0.1, nexp)
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[::-1], labels[::-1])
fittxt = "Fit results:\n"
fittxt += "$\Delta\chi^{2}/ndf = %3.2f$\n" % (chi2min/(len(ydata)-len(params)))
fittxt += "$p-value = %4.3f$\n" % pvalue
for par in params:
fittxt += "$%s = %3.2f \\pm %3.2f$\n" % (par.label, par.value, par.unc)
ax.text(0.05, 0.05, fittxt, transform=ax.transAxes, fontsize=14,
verticalalignment='bottom', bbox={"boxstyle":"square","fc":"w"})
plt.show()
fig.savefig('leastsq_fit.png', dpi=fig.dpi)
| {
"content_hash": "8d8e3e58719c571100982a56d77c830c",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 79,
"avg_line_length": 37.21917808219178,
"alnum_prop": 0.6650717703349283,
"repo_name": "bruneli/statspy",
"id": "585879986de39448045f42c49d579802bd78164e",
"size": "2739",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/leastsq_fit.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "150620"
},
{
"name": "Shell",
"bytes": "5107"
}
],
"symlink_target": ""
} |
"""Test legalize pass"""
import numpy as np
import tvm
from tvm import te
from tvm import relay
from tvm.contrib import graph_executor
from tvm.relay import transform, analysis
from tvm.relay.testing.temp_op_attr import TempOpAttr
def alpha_equal(x, y):
"""
Wrapper around alpha equality which ensures that
the hash function respects equality.
"""
x = x["main"]
y = y["main"]
return tvm.ir.structural_equal(x, y) and tvm.ir.structural_hash(x) == tvm.ir.structural_hash(y)
def run_opt_pass(expr, passes):
passes = passes if isinstance(passes, list) else [passes]
mod = tvm.IRModule.from_expr(expr)
seq = tvm.transform.Sequential(passes)
with tvm.transform.PassContext(opt_level=3):
mod = seq(mod)
entry = mod["main"]
return entry if isinstance(expr, relay.Function) else entry.body
def test_qnn_legalize():
"""Test directly replacing an operator with a new one"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56), dtype="int8")
y = relay.qnn.op.requantize(
x,
input_scale=relay.const(1, "float32"),
input_zero_point=relay.const(0, "int32"),
output_scale=relay.const(1, "float32"),
output_zero_point=relay.const(0, "int32"),
out_dtype="int8",
)
y = relay.Function([x], y)
return y
def legalize_qnn_requantize(attrs, inputs, types):
data = inputs[0]
data = relay.add(relay.const(0, "int8"), data)
y = relay.qnn.op.requantize(
data,
input_scale=relay.const(1, "float32"),
input_zero_point=relay.const(0, "int32"),
output_scale=relay.const(1, "float32"),
output_zero_point=relay.const(0, "int32"),
out_dtype="int8",
)
return y
def expected():
x = relay.var("x", shape=(1, 64, 56, 56), dtype="int8")
y = relay.add(relay.const(0, "int8"), x)
z = relay.qnn.op.requantize(
y,
input_scale=relay.const(1, "float32"),
input_zero_point=relay.const(0, "int32"),
output_scale=relay.const(1, "float32"),
output_zero_point=relay.const(0, "int32"),
out_dtype="int8",
)
z = relay.Function([x], z)
return z
a = before()
with TempOpAttr("qnn.requantize", "FTVMQnnLegalize", legalize_qnn_requantize):
# Check that Relay Legalize does not change the graph.
a = run_opt_pass(a, relay.transform.Legalize())
b = run_opt_pass(before(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
# Check that QNN Legalize modifies the graph.
a = run_opt_pass(a, relay.qnn.transform.Legalize())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_qnn_legalize_qnn_conv2d():
def _get_mod(data_dtype, kernel_dtype):
data_shape = (1, 64, 256, 256)
kernel_shape = (128, 64, 3, 3)
data = relay.var("data", shape=data_shape, dtype=data_dtype)
kernel = relay.var("kernel", shape=kernel_shape, dtype=kernel_dtype)
func = relay.qnn.op.conv2d(
data,
kernel,
input_zero_point=relay.const(1, "int32"),
kernel_zero_point=relay.const(1, "int32"),
input_scale=relay.const(1.0, "float32"),
kernel_scale=relay.const(1.0, "float32"),
kernel_size=(3, 3),
channels=kernel_shape[0],
strides=(1, 1),
dilation=(1, 1),
out_dtype="int32",
data_layout="NCHW",
kernel_layout="OIHW",
)
mod = relay.Function(relay.analysis.free_vars(func), func)
mod = tvm.IRModule.from_expr(mod)
return mod
# Check uint8 x uint8 and int8 x int8 transformation
for dtype in ("uint8", "int8"):
mod = _get_mod(dtype, dtype)
#############################################################
# Check transformations for platforms with fast Int8 support.
#############################################################
# Check that Intel VNNI gets picked up.
with tvm.target.Target("llvm -mcpu=skylake-avx512"):
mod = relay.transform.InferType()(mod)
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn.conv2d" in legalized_mod.astext()
# Since same dtype, there should not be any transformation
with tvm.target.Target(
"llvm -device=arm_cpu -mtriple=aarch64-linux-gnu -mattr=+v8.2a,+dotprod"
):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert tvm.ir.structural_equal(mod, legalized_mod)
################################################################
# Check transformations for platforms without fast Int8 support.
################################################################
# Older Intel versions.
with tvm.target.Target("llvm"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" not in legalized_mod.astext()
# Older ARM vesions.
with tvm.target.Target("llvm -device=arm_cpu -mtriple=aarch64-linux-gnu"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" not in legalized_mod.astext()
# Check uint8 x int8 transformation
mod = _get_mod("uint8", "int8")
#############################################################
# Check transformations for platforms with fast Int8 support.
#############################################################
# Check no transformation for Intel VNNI.
with tvm.target.Target("llvm -mcpu=skylake-avx512"):
mod = relay.transform.InferType()(mod)
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert tvm.ir.structural_equal(mod, legalized_mod)
# ARM - so check that transformation has happened.
with tvm.target.Target(
"llvm -device=arm_cpu -mtriple=aarch64-linux-gnu -mattr=+v8.2a,+dotprod"
):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn.conv2d" in legalized_mod.astext()
################################################################
# Check transformations for platforms without fast Int8 support.
################################################################
# Older Intel versions.
with tvm.target.Target("llvm"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" not in legalized_mod.astext()
# Older ARM vesions.
with tvm.target.Target("llvm -device=arm_cpu -mtriple=aarch64-linux-gnu"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" not in legalized_mod.astext()
###########################################
# Check transformations for CUDA platforms.
###########################################
with tvm.target.Target("cuda"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" in legalized_mod.astext()
def test_qnn_legalize_qnn_dense():
def _get_mod(data_dtype, kernel_dtype):
data_shape = (10, 3)
kernel_shape = (20, 3)
data = relay.var("data", shape=data_shape, dtype=data_dtype)
kernel = relay.var("kernel", shape=kernel_shape, dtype=kernel_dtype)
func = relay.qnn.op.dense(
data,
kernel,
input_zero_point=relay.const(1, "int32"),
kernel_zero_point=relay.const(1, "int32"),
input_scale=relay.const(1, "float32"),
kernel_scale=relay.const(1, "float32"),
units=kernel_shape[0],
out_dtype="int32",
)
mod = relay.Function(relay.analysis.free_vars(func), func)
mod = tvm.IRModule.from_expr(mod)
return mod
# Check uint8 x uint8 and int8 x int8 transformation
for dtype in ("uint8", "int8"):
mod = _get_mod(dtype, dtype)
#############################################################
# Check transformations for platforms with fast Int8 support.
#############################################################
# Check that Intel VNNI gets picked up.
with tvm.target.Target("llvm -mcpu=skylake-avx512"):
mod = relay.transform.InferType()(mod)
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn.dense" in legalized_mod.astext()
# Since same dtype, there should not be any transformation
with tvm.target.Target(
"llvm -device=arm_cpu -mtriple=aarch64-linux-gnu -mattr=+v8.2a,+dotprod"
):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert tvm.ir.structural_equal(mod, legalized_mod)
################################################################
# Check transformations for platforms without fast Int8 support.
################################################################
# Older Intel versions.
with tvm.target.Target("llvm"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" not in legalized_mod.astext()
# Older ARM vesions.
with tvm.target.Target("llvm -device=arm_cpu -mtriple=aarch64-linux-gnu"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" not in legalized_mod.astext()
# Check uint8 x int8 transformation
mod = _get_mod("uint8", "int8")
#############################################################
# Check transformations for platforms with fast Int8 support.
#############################################################
# Check no transformation for Intel VNNI.
with tvm.target.Target("llvm -mcpu=skylake-avx512"):
mod = relay.transform.InferType()(mod)
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert tvm.ir.structural_equal(mod, legalized_mod)
# ARM - so check that transformation has happened.
with tvm.target.Target(
"llvm -device=arm_cpu -mtriple=aarch64-linux-gnu -mattr=+v8.2a,+dotprod"
):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn.dense" in legalized_mod.astext()
################################################################
# Check transformations for platforms without fast Int8 support.
################################################################
# Older Intel versions.
with tvm.target.Target("llvm"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" not in legalized_mod.astext()
# Older ARM vesions.
with tvm.target.Target("llvm -device=arm_cpu -mtriple=aarch64-linux-gnu"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" not in legalized_mod.astext()
###########################################
# Check transformations for CUDA platforms.
###########################################
with tvm.target.Target("cuda"):
legalized_mod = relay.qnn.transform.Legalize()(mod)
assert "cast" in legalized_mod.astext() and "qnn" in legalized_mod.astext()
if __name__ == "__main__":
test_qnn_legalize()
test_qnn_legalize_qnn_conv2d()
test_qnn_legalize_qnn_dense()
| {
"content_hash": "5e174a9ddad75fedc34aa9d023c88ce9",
"timestamp": "",
"source": "github",
"line_count": 284,
"max_line_length": 99,
"avg_line_length": 41.91549295774648,
"alnum_prop": 0.5512432795698925,
"repo_name": "dmlc/tvm",
"id": "a30cd1e73e3f7b0cd3a4111a117885345b67b1cc",
"size": "12689",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "tests/python/relay/test_pass_qnn_legalize.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "6112"
},
{
"name": "C",
"bytes": "92947"
},
{
"name": "C++",
"bytes": "5765945"
},
{
"name": "CMake",
"bytes": "74045"
},
{
"name": "Go",
"bytes": "112384"
},
{
"name": "HTML",
"bytes": "8625"
},
{
"name": "Java",
"bytes": "171101"
},
{
"name": "JavaScript",
"bytes": "49803"
},
{
"name": "Makefile",
"bytes": "55807"
},
{
"name": "Objective-C",
"bytes": "15241"
},
{
"name": "Objective-C++",
"bytes": "46673"
},
{
"name": "Python",
"bytes": "7183810"
},
{
"name": "Rust",
"bytes": "181961"
},
{
"name": "Scala",
"bytes": "202148"
},
{
"name": "Shell",
"bytes": "97271"
},
{
"name": "Tcl",
"bytes": "53645"
},
{
"name": "Verilog",
"bytes": "30605"
}
],
"symlink_target": ""
} |
__author__ = 'vladimir'
from collections import OrderedDict
import traceback
import ujson
from flask import Blueprint, request
from . import *
from .database import update_query, select_query
user_blueprint = Blueprint("user", __name__, url_prefix="user")
# actually this is get_or_create method
@user_blueprint.route("/create/", methods=["POST"])
def create():
try:
params = request.json
except Exception:
print "user.create params exception:\n{0}".format(traceback.format_exc())
return ujson.dumps({"code": c_BAD_REQUEST, "response": "invalid json"})
email = params.get("email", None)
username = params.get("username", None)
name = params.get("name", None)
about = params.get("about", None)
isAnonymous = int(bool(params.get("isAnonymous")))
if email:
user = get_user_profile(email)
code = c_OK
if not user:
print "Creating user '{0}'".format(email)
row_id = update_query(
"INSERT INTO `user` (`username`, `email`, `name`, `about`, `isAnonymous`) VALUES (%s, %s, %s, %s, %s)",
(username, email, name, about, isAnonymous),
verbose=False
)
user = {
"id": row_id,
"email": email,
"username": username,
"name": name,
"about": about,
"isAnonymous": isAnonymous,
"followers": [],
"following": [],
"subscriptions": [],
}
else:
user = "this user already exist"
code = c_USER_EXISTS
else:
user = "invalid request"
code = c_INVALID_REQUEST_PARAMS
return ujson.dumps({
"response": user,
"code": code,
})
@user_blueprint.route("/details/", methods=["GET"])
def detail():
email = request.args.get("user", None)
if email:
print "Detailing user '{0}'".format(email)
user = get_user_profile(email)
code = c_OK
if not user:
user = "user not found"
code = c_NOT_FOUND
else:
user = "invalid request"
code = c_INVALID_REQUEST_PARAMS
return ujson.dumps({
"response": user,
"code": code,
})
@user_blueprint.route("/updateProfile/", methods=["POST"])
def update():
try:
params = request.json
except Exception:
print "user.updateProfile params exception:\n{0}".format(traceback.format_exc())
return ujson.dumps({"code": c_BAD_REQUEST, "response": "invalid json"})
email = params.get("user", None)
name = params.get("name", None)
about = params.get("about", None)
if email and (name or about):
print "Updating user '{0}'".format(email)
update_query(
"UPDATE `user` SET name = %s, about = %s WHERE email = %s",
(name, about, email),
verbose=False
)
user = get_user_profile(email)
code = c_OK
if not user:
user = "Not found"
code = c_NOT_FOUND
else:
user = "Invalid request"
code = c_INVALID_REQUEST_PARAMS
return ujson.dumps({
"response": user,
"code": code,
})
# BUG: blocks transaction
@user_blueprint.route("/follow/", methods=["POST"])
def follow():
try:
params = request.json
except Exception:
print "user.follow params exception:\n{0}".format(traceback.format_exc())
return ujson.dumps({"code": c_BAD_REQUEST, "response": "invalid json"})
follower = params.get("follower", None) # it's ME :)
followee = params.get("followee", None)
if follower and followee:
# print "{0} following {1}".format(follower, followee) # FOLLOWER FOLLOWS FOLLOWEE
test_q = select_query(
"SELECT f.`follower`, f.`followee` FROM `follower` f WHERE f.`follower`=%s AND f.`followee`=%s",
(follower, followee),
verbose=False
)
if len(test_q) == 0:
user = get_user_profile(follower)
if user:
update_query(
"INSERT INTO `follower` (`follower`, `followee`) VALUES (%s, %s)",
(follower, followee),
verbose=False
)
code = c_OK
else:
user = "Not found"
code = c_NOT_FOUND
else:
user = "This pair already exists"
code = c_INVALID_REQUEST_PARAMS
else:
user = "Invalid request"
code = c_INVALID_REQUEST_PARAMS
return ujson.dumps({
"response": user,
"code": code,
})
@user_blueprint.route("/unfollow/", methods=["POST"])
def unfollow():
try:
params = request.json
except Exception:
print "user.unfollow params exception:\n{0}".format(traceback.format_exc())
return ujson.dumps({"code": c_BAD_REQUEST, "response": "invalid json"})
follower = params.get("follower", None) # it's ME :)
followee = params.get("followee", None)
if follower and followee:
# print "{0} unfollowing {1}".format(follower, followee) # FOLLOWER FOLLOWS FOLLOWEE
update_query(
"DELETE FROM `follower` WHERE `follower`.`follower` = %s AND `follower`.`followee` = %s LIMIT 1",
(follower, followee),
verbose=False
)
user = get_user_profile(follower)
code = c_OK
if not user:
user = "Not found"
code = c_NOT_FOUND
else:
user = "Invalid request"
code = c_INVALID_REQUEST_PARAMS
return ujson.dumps({
"response": user,
"code": code,
})
@user_blueprint.route("/listFollowers/", methods=["GET"])
def list_followers():
email = request.args.get("user", None)
# optional
limit = get_int_or_none(request.args.get("limit", None))
since_id = get_int_or_none(request.args.get("since_id", None))
order = request.args.get("order", "desc")
if email and order in ("asc", "desc"):
SQL = """SELECT u.`id`, u.`username`, u.`email`, u.`name`, u.`about`, u.`isAnonymous`,
flwr.`followee`, flwe.`follower`, sub.`thread` FROM `user` u
LEFT JOIN `follower` flwr ON flwr.`follower` = u.`email`
LEFT JOIN `follower` flwe ON flwe.`followee` = u.`email`
LEFT JOIN `subscription` sub ON sub.`user` = u.`email`
WHERE u.`email` IN (SELECT f.`follower` FROM `follower` f WHERE f.`followee` = %s)"""
params = (email, ),
if since_id:
SQL += " AND u.`id` >= %s"
params += (since_id, )
SQL += " ORDER BY u.`name` {0}".format(order.upper())
if limit and limit > 0:
SQL += " LIMIT %s"
params += (limit, )
r = select_query(SQL, params, verbose=False)
code = c_OK
if len(r) > 0:
user = prepare_profiles(r)
else:
user = []
else:
user = "Invalid request"
code = c_INVALID_REQUEST_PARAMS
# print "*"*50, "\nFollowers: {0}\n".format(repr(user)), "*"*50
return ujson.dumps({
"response": user,
"code": code,
})
@user_blueprint.route("/listFollowing/", methods=["GET"])
def list_following():
email = request.args.get("user", None)
# optional
limit = get_int_or_none(request.args.get("limit", None))
since_id = get_int_or_none(request.args.get("since_id", None))
order = request.args.get("order", "desc")
if email and order in ("asc", "desc"):
SQL = """SELECT u.`id`, u.`username`, u.`email`, u.`name`, u.`about`, u.`isAnonymous`,
flwr.`followee`, flwe.`follower`, sub.`thread` FROM `user` u
LEFT JOIN `follower` flwr ON flwr.`follower` = u.`email`
LEFT JOIN `follower` flwe ON flwe.`followee` = u.`email`
LEFT JOIN `subscription` sub ON sub.`user` = u.`email`
WHERE u.`email` IN (SELECT f.`followee` FROM `follower` f WHERE f.`follower` = %s)"""
params = (email, ),
if since_id:
SQL += " AND u.`id` >= %s"
params += (since_id, )
SQL += " ORDER BY u.`name` {0}".format(order.upper())
if limit and limit > 0:
SQL += " LIMIT %s"
params += (limit, )
r = select_query(SQL, params, verbose=False)
code = c_OK
if len(r) > 0:
user = prepare_profiles(r)
else:
user = []
else:
user = "Invalid request"
code = c_INVALID_REQUEST_PARAMS
# print "*"*50, "\nFollowing: {0}\n".format(repr(user)), "*"*50
return ujson.dumps({
"response": user,
"code": code,
})
@user_blueprint.route("/listPosts/", methods=["GET"])
def list_posts():
email = request.args.get("user", None)
# optional
limit = get_int_or_none(request.args.get("limit", None))
since = request.args.get("since", None)
order = request.args.get("order", "desc")
if email and order in ("asc", "desc"):
SQL = "SELECT * FROM `post` p WHERE p.`user` = %s"
params = (email, ),
if since:
SQL += " AND p.`date` >= %s"
params += (since, )
SQL += " ORDER BY p.`date` {0}".format(order.upper())
if limit and limit > 0:
SQL += " LIMIT %s"
params += (limit, )
user = select_query(SQL, params, verbose=False)
code = c_OK
if len(user) > 0:
for item in user:
item["date"] = get_date(item["date"])
else:
user = []
else:
user = "Invalid request"
code = c_INVALID_REQUEST_PARAMS
# print "*"*50, "\nUserPosts: {0}\n".format(repr(user)), "*"*50
return ujson.dumps({
"response": user,
"code": code,
})
# ######## HELPERS ########
def prepare_profiles(query, limit=None):
"""Render bunch of profiles from queries result"""
buf = OrderedDict()
i = 0
while i < len(query):
user = query[i]
# print "user-{0} = {1}\n".format(i, user), "-"*50
if user["email"] not in buf:
if (limit and len(buf.keys()) < limit) or not limit:
# print "Keys={0}".format(len(buf.keys()))
buf[user["email"]] = {
"id": user["id"],
"username": user["username"],
"email": user["email"],
"name": user["name"],
"about": user["about"],
"isAnonymous": bool(user["isAnonymous"]),
"followers": [],
"following": [],
"subscriptions": [],
}
if "follower" in user and user["follower"] and user["email"] in buf:
buf[user["email"]]["followers"].append(user["follower"])
if "followee" in user and user["followee"] and user["email"] in buf:
buf[user["email"]]["following"].append(user["followee"])
if "thread" in user and user["thread"] and user["email"] in buf:
if not user["thread"] in buf[user["email"]]["subscriptions"]:
buf[user["email"]]["subscriptions"].append(user["thread"])
i += 1
# render list saving the order
res = []
append = res.append
for k in buf:
b = buf[k]
b.update({"email": k})
append(b)
return res
def get_user_profile(email):
"""Return full profile + subscribers + followers + following"""
r = select_query(
"""SELECT u.`id`, u.`username`, u.`email`, u.`name`, u.`about`, u.`isAnonymous`,
flwr.`followee`, flwe.`follower`, sub.`thread` FROM `user` u
LEFT JOIN `follower` flwr ON flwr.`follower` = u.`email`
LEFT JOIN `follower` flwe ON flwe.`followee` = u.`email`
LEFT JOIN `subscription` sub ON sub.`user` = u.`email`
WHERE `email`=%s
""",
(email, ),
verbose=False
)
if len(r) > 0:
user = r[0]
user["isAnonymous"] = bool(user["isAnonymous"])
user["followers"] = []
user["following"] = []
user["subscriptions"] = [] # TODO: update inserting subscr, don't show until...
for line in r:
if "follower" in line and line["follower"]:
user["followers"].append(line["follower"])
if "followee" in line and line["followee"]:
user["following"].append(line["followee"])
if "thread" in line and line["thread"]:
user["subscriptions"].append(line["thread"])
del user["followee"]
del user["follower"]
del user["thread"]
# print "*"*50, "\nUserProfile: {0}\n".format(repr(user)), "*"*50
return user
else:
return None
| {
"content_hash": "fc6fb5e4c99a25506a294faf7156221a",
"timestamp": "",
"source": "github",
"line_count": 365,
"max_line_length": 119,
"avg_line_length": 34.704109589041096,
"alnum_prop": 0.5337491118654772,
"repo_name": "d3QUone/db_api",
"id": "737415d4c14a1b4ba78ad39c322a45ef76625f87",
"size": "12667",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blueprints/user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "64484"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('rpocore', '0004_auto_20160920_1232'),
]
operations = [
migrations.AlterModelOptions(
name='notablesupporter',
options={'ordering': ('_order',), 'verbose_name': 'First signatory', 'verbose_name_plural': 'First signatories'},
),
]
| {
"content_hash": "6e71ff65d65dae0f29881a0139fa1779",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 125,
"avg_line_length": 25.235294117647058,
"alnum_prop": 0.62004662004662,
"repo_name": "frmwrk123/rpo-website",
"id": "568ab879668e21c2896c5fed08a0cf3e83f45efd",
"size": "500",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/rpocore/migrations/0005_auto_20160921_1342.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6018"
},
{
"name": "HTML",
"bytes": "427154"
},
{
"name": "JavaScript",
"bytes": "320006"
},
{
"name": "Python",
"bytes": "75732"
},
{
"name": "Shell",
"bytes": "291"
}
],
"symlink_target": ""
} |
from editor.app import app
app.run()
| {
"content_hash": "bf5a77b9e1da20a5574cc4c3c1f65a2a",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 26,
"avg_line_length": 12.666666666666666,
"alnum_prop": 0.7368421052631579,
"repo_name": "ludwinas/editore",
"id": "13a48eb489c705c4c811c6dd7e5e87098a3f6fdc",
"size": "63",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11308"
},
{
"name": "Python",
"bytes": "4496"
}
],
"symlink_target": ""
} |
__all__ = [
'fileio', 'laplacianSmoothing', 'lar', 'matrixutil_accel',
'pklzToSmoothObj', 'smooth',
'visualization', 'import_library'
]
import data_preparation
import pklzToSmoothObj
import objToVolume
# import fileio
# import laplacianSmoothing
# import lar
# import matrixutil_accel
# import matrixutil_no_accel
# import pklzToSmoothObj
# import smooth
# import step_calcchains_serial_tobinary_filter_proc_lisa
# import step_generatebordermtx
# import step_loadmodel
# import step_mergemesh
# import step_remove_boxes_iner_faces
# import step_squaremesh
# import step_triangularmesh
# import visualization
# import import_library
| {
"content_hash": "5367a001e733cb66f2258cef06fd23d9",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 62,
"avg_line_length": 26.833333333333332,
"alnum_prop": 0.7732919254658385,
"repo_name": "mjirik/larVolumeToObj",
"id": "87f1eaef2dc71d67bb39b2c4d7468f7d05c9d200",
"size": "644",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "larVolumeToObj/computation/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "253243"
},
{
"name": "Shell",
"bytes": "34815"
}
],
"symlink_target": ""
} |
import six
from page_sets.system_health import platforms
from page_sets.system_health import story_tags
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
from telemetry.util import wpr_modes
# Extra wait time after the page has loaded required by the loading metric. We
# use it in all benchmarks to avoid divergence between benchmarks.
# TODO(petrcermak): Switch the memory benchmarks to use it as well.
_WAIT_TIME_AFTER_LOAD = 10
class _SystemHealthSharedState(shared_page_state.SharedPageState):
"""Shared state which enables disabling stories on individual platforms.
This should be used only to disable the stories permanently. For
disabling stories temporarily use story expectations in ./expectations.py.
"""
def CanRunOnBrowser(self, browser_info, page):
if (browser_info.browser_type.startswith('android-webview')
and page.WEBVIEW_NOT_SUPPORTED):
return False
if page.TAGS and story_tags.WEBGL in page.TAGS:
return browser_info.HasWebGLSupport()
return True
class _MetaSystemHealthStory(type):
"""Metaclass for SystemHealthStory."""
@property
def ABSTRACT_STORY(cls):
"""Class field marking whether the class is abstract.
If true, the story will NOT be instantiated and added to a System Health
story set. This field is NOT inherited by subclasses (that's why it's
defined on the metaclass).
"""
return cls.__dict__.get('ABSTRACT_STORY', False)
class SystemHealthStory(
six.with_metaclass(_MetaSystemHealthStory, page_module.Page)):
"""Abstract base class for System Health user stories."""
# The full name of a single page story has the form CASE:GROUP:PAGE:[VERSION]
# (e.g. 'load:search:google' or 'load:search:google:2018').
NAME = NotImplemented
URL = NotImplemented
ABSTRACT_STORY = True
# Skip the login flow in replay mode
# If you want to replay the login flow in your story, set SKIP_LOGIN to False
SKIP_LOGIN = True
SUPPORTED_PLATFORMS = platforms.ALL_PLATFORMS
TAGS = []
PLATFORM_SPECIFIC = False
WEBVIEW_NOT_SUPPORTED = False
def __init__(self, story_set, take_memory_measurement,
extra_browser_args=None):
case, group, _ = self.NAME.split(':', 2)
tags = []
found_year_tag = False
for t in self.TAGS: # pylint: disable=not-an-iterable
assert t in story_tags.ALL_TAGS
tags.append(t.name)
if t in story_tags.YEAR_TAGS:
# Assert that this is the first year tag.
assert not found_year_tag, (
"%s has more than one year tag found." % self.__class__.__name__)
found_year_tag = True
# Assert that there is one year tag.
assert found_year_tag, (
"%s needs exactly one year tag." % self.__class__.__name__)
super(SystemHealthStory, self).__init__(
shared_page_state_class=_SystemHealthSharedState,
page_set=story_set, name=self.NAME, url=self.URL, tags=tags,
grouping_keys={'case': case, 'group': group},
platform_specific=self.PLATFORM_SPECIFIC,
extra_browser_args=extra_browser_args)
self._take_memory_measurement = take_memory_measurement
@classmethod
def GetStoryDescription(cls):
if cls.__doc__:
return cls.__doc__
return cls.GenerateStoryDescription()
@classmethod
def GenerateStoryDescription(cls):
""" Subclasses of SystemHealthStory can override this to auto generate
their story description.
However, it's recommended to use the Python docstring to describe the user
stories instead and this should only be used for very repetitive cases.
"""
return None
def _Measure(self, action_runner):
if self._take_memory_measurement:
action_runner.MeasureMemory(deterministic_mode=True)
else:
action_runner.Wait(_WAIT_TIME_AFTER_LOAD)
def _Login(self, action_runner):
pass
def _DidLoadDocument(self, action_runner):
pass
def RunNavigateSteps(self, action_runner):
if not (self.SKIP_LOGIN and self.wpr_mode == wpr_modes.WPR_REPLAY):
self._Login(action_runner)
super(SystemHealthStory, self).RunNavigateSteps(action_runner)
def RunPageInteractions(self, action_runner):
action_runner.tab.WaitForDocumentReadyStateToBeComplete()
self._DidLoadDocument(action_runner)
self._Measure(action_runner)
| {
"content_hash": "9a25f240b42a20b9578ad99ead092569",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 79,
"avg_line_length": 35.21138211382114,
"alnum_prop": 0.7083814361579311,
"repo_name": "nwjs/chromium.src",
"id": "c4c38fec743e710c59dbe2b689a1782239cba03b",
"size": "4472",
"binary": false,
"copies": "7",
"ref": "refs/heads/nw70",
"path": "tools/perf/page_sets/system_health/system_health_story.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import socket
import time
import settings
from utils.timing import Timer
from utils.logger import log
class IRCBot:
"""
Sends and receives messages to and from IRC channels.
"""
def __init__(self, bot_name, owner_name, oauth):
"""
:param bot_name: str - The bot's username
:param owner_name: str - The owner's username
:param oauth: str - The bot's oauth
"""
self.nickname = bot_name
self.owner_name = owner_name
self.oauth = oauth
self.last_message_send_time = 0
# Initializing socket
self.irc_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.irc_sock.settimeout(settings.IRC_POLL_TIMEOUT)
def send_raw_instant(self, msg_str):
"""
Sends a raw IRC message with no rate-limiting concerns.
:param msg_str:
:return:
"""
log('> ' + msg_str)
self.irc_sock.send(bytes(msg_str + '\r\n', 'UTF-8'))
def send_raw(self, msg_str):
"""
Sends a raw IRC message with post-delay to be consistent with rate-limiting.
:param msg_str: str - The raw IRC message to be sent
"""
# Wait until the cooldown is over
required_wait_time = settings.IRC_SEND_COOLDOWN - (time.time() - self.last_message_send_time)
if required_wait_time > 0:
time.sleep(required_wait_time)
self.send_raw_instant(msg_str)
# Block further messages until we set the send_msg_cooldown event
self.last_message_send_time = time.time()
def recv_raw(self):
"""
Receives a raw IRC message.
:return: str - The raw IRC message received
"""
try:
buf = self.irc_sock.recv(settings.IRC_RECV_SIZE)
total_data = buf
if not buf:
raise Exception('Socket connection broken.')
# Keep trying to pull until there's nothing left.
while len(buf) == settings.IRC_RECV_SIZE:
buf = self.irc_sock.recv(settings.IRC_RECV_SIZE)
total_data += buf
# Sometimes there's a delay between different parts of the message
time.sleep(settings.IRC_CHUNK_DELAY)
if not buf:
raise Exception('Socket connection broken.')
return str(total_data, encoding='UTF-8').strip('\r\n')
except socket.timeout:
# We quickly time out if there's no messages to receive as set by socket set timeout in the init
return None
def connect(self):
"""
Connect to the IRC server.
"""
log('Connecting to IRC service...')
self.irc_sock.connect((settings.IRC_SERVER, settings.IRC_PORT))
self.send_raw_instant('PASS ' + self.oauth)
self.send_raw_instant('NICK ' + self.nickname)
def send_pong(self, server):
"""
Send a keep-alive message when prompted with a ping.
:param server: str - IRC server that sent a PING
"""
# Guaranteed to be at least two string tokens from the check in the main run loop
self.send_raw_instant('PONG ' + server)
def handle_msg(self, raw_msg):
"""
Given an arbitrary IRC message, handle it as necessary.
:param raw_msg: str - The IRC raw message
"""
if raw_msg:
log(raw_msg)
lower_msg = raw_msg.lower()
if lower_msg.startswith('ping '):
self.send_pong(raw_msg.split()[1])
def run(self):
"""
Core update loop for the bot. Checks for completed timer callbacks and then handles input.
"""
while True:
# Check to see if any timers completed and activate their callbacks
Timer.check_timers()
raw_msgs = self.recv_raw()
# We return None if we timed out on the receive in settings.IRC_POLL_TIMEOUT seconds to check our timers
# or if we failed to receive messages
if raw_msgs is None:
continue
# Splitting on \r\n allows reading of multiple commands with one recv
for raw_msg in raw_msgs.split('\r\n'):
self.handle_msg(raw_msg)
raise RuntimeError('Exited execution loop.')
| {
"content_hash": "04a0ef9cceeda0731f6cc9d363c629f9",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 116,
"avg_line_length": 33.94488188976378,
"alnum_prop": 0.5801438181396428,
"repo_name": "Xelaadryth/Xelabot",
"id": "91f5f316d918554da696cbb2dc096e8d20285a1a",
"size": "4311",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/irc_bot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "215"
},
{
"name": "Python",
"bytes": "143528"
}
],
"symlink_target": ""
} |
"""
Easy-to-use UMFPACK interface
=============================
.. currentmodule:: scikits.umfpack
The following functions can be used for LU decompositions and solving
equation systems:
.. autosummary::
:toctree: reference/
spsolve
splu
UmfpackLU
"""
from __future__ import division, print_function, absolute_import
from warnings import warn
import sys
import numpy as np
from numpy import asarray
from scipy.sparse import (isspmatrix_csc, isspmatrix_csr, isspmatrix,
SparseEfficiencyWarning, csc_matrix, hstack)
from .umfpack import UmfpackContext, UMFPACK_A
_families = {
(np.float64, np.int32): 'di',
(np.complex128, np.int32): 'zi',
(np.float64, np.int64): 'dl',
(np.complex128, np.int64): 'zl'
}
__all__ = ['spsolve', 'splu', 'UmfpackLU']
if sys.version_info[0] >= 3:
xrange = range
def spsolve(A, b):
"""Solve the sparse linear system Ax=b, where b may be a vector or a matrix.
Parameters
----------
A : ndarray or sparse matrix
The square matrix A will be converted into CSC or CSR form
b : ndarray or sparse matrix
The matrix or vector representing the right hand side of the equation.
Returns
-------
x : ndarray or sparse matrix
the solution of the sparse linear equation.
If b is a vector, then x is a vector of size A.shape[0]
If b is a matrix, then x is a matrix of size (A.shape[0],)+b.shape[1:]
"""
x = UmfpackLU(A).solve(b)
if b.ndim == 2 and b.shape[1] == 1:
# compatibility with scipy.sparse.spsolve quirk
return x.ravel()
else:
return x
def splu(A):
"""
Compute the LU decomposition of a sparse, square matrix.
Parameters
----------
A : sparse matrix
Sparse matrix to factorize. Should be in CSR or CSC format.
Returns
-------
invA : scikits.umfpack.UmfpackLU
Object, which has a ``solve`` method.
Notes
-----
This function uses the UMFPACK library.
"""
return UmfpackLU(A)
class UmfpackLU(object):
"""
LU factorization of a sparse matrix.
Factorization is represented as::
Pr * (R^-1) * A * Pc = L * U
Parameters
----------
A : csc_matrix or csr_matrix
Matrix to decompose
Attributes
----------
shape
nnz
perm_c
perm_r
L
U
R
Methods
-------
solve
solve_sparse
Examples
--------
The LU decomposition can be used to solve matrix equations. Consider:
>>> import numpy as np
>>> from scipy.sparse import csc_matrix
>>> from scikits import umfpack
>>> A = csc_matrix([[1,2,0,4],[1,0,0,1],[1,0,2,1],[2,2,1,0.]])
This can be solved for a given right-hand side:
>>> lu = umfpack.splu(A)
>>> b = np.array([1, 2, 3, 4])
>>> x = lu.solve(b)
>>> A.dot(x)
array([ 1., 2., 3., 4.])
The ``lu`` object also contains an explicit representation of the
decomposition. The permutations are represented as mappings of
indices:
>>> lu.perm_r
array([0, 2, 1, 3], dtype=int32)
>>> lu.perm_c
array([2, 0, 1, 3], dtype=int32)
The L and U factors are sparse matrices in CSC format:
>>> lu.L.A
array([[ 1. , 0. , 0. , 0. ],
[ 0. , 1. , 0. , 0. ],
[ 0. , 0. , 1. , 0. ],
[ 1. , 0.5, 0.5, 1. ]])
>>> lu.U.A
array([[ 2., 0., 1., 4.],
[ 0., 2., 1., 1.],
[ 0., 0., 1., 1.],
[ 0., 0., 0., -5.]])
The permutation matrices can be constructed:
>>> Pr = csc_matrix((4, 4))
>>> Pr[lu.perm_r, np.arange(4)] = 1
>>> Pc = csc_matrix((4, 4))
>>> Pc[np.arange(4), lu.perm_c] = 1
Similarly for the row scalings:
>>> R = csc_matrix((4, 4))
>>> R.setdiag(lu.R)
We can reassemble the original matrix:
>>> (Pr.T * R * (lu.L * lu.U) * Pc.T).A
array([[ 1., 2., 0., 4.],
[ 1., 0., 0., 1.],
[ 1., 0., 2., 1.],
[ 2., 2., 1., 0.]])
"""
def __init__(self, A):
if not (isspmatrix_csc(A) or isspmatrix_csr(A)):
A = csc_matrix(A)
warn('spsolve requires A be CSC or CSR matrix format',
SparseEfficiencyWarning)
A.sort_indices()
A = A.asfptype() # upcast to a floating point format
M, N = A.shape
if (M != N):
raise ValueError("matrix must be square (has shape %s)" % ((M, N),))
f_type = np.sctypeDict[A.dtype.name]
i_type = np.sctypeDict[A.indices.dtype.name]
try:
family = _families[(f_type, i_type)]
except KeyError:
msg = 'only float64 or complex128 matrices with int32 or int64' \
' indices are supported! (got: matrix: %s, indices: %s)' \
% (f_type, i_type)
raise ValueError(msg)
self.umf = UmfpackContext(family)
self.umf.numeric(A)
self._A = A
self._L = None
self._U = None
self._P = None
self._Q = None
self._R = None
def solve(self, b):
"""
Solve linear equation A x = b for x
Parameters
----------
b : ndarray
Right-hand side of the matrix equation. Can be vector or a matrix.
Returns
-------
x : ndarray
Solution to the matrix equation
"""
if isspmatrix(b):
b = b.toarray()
if b.shape[0] != self._A.shape[1]:
raise ValueError("Shape of b is not compatible with that of A")
b_arr = asarray(b, dtype=self._A.dtype).reshape(b.shape[0], -1)
x = np.zeros((self._A.shape[0], b_arr.shape[1]), dtype=self._A.dtype)
for j in range(b_arr.shape[1]):
x[:,j] = self.umf.solve(UMFPACK_A, self._A, b_arr[:,j], autoTranspose=True)
return x.reshape((self._A.shape[0],) + b.shape[1:])
def solve_sparse(self, B):
"""
Solve linear equation of the form A X = B. Where B and X are sparse matrices.
Parameters
----------
B : any scipy.sparse matrix
Right-hand side of the matrix equation.
Note: it will be converted to csc_matrix via `.tocsc()`.
Returns
-------
X : csc_matrix
Solution to the matrix equation as a csc_matrix
"""
B = B.tocsc()
cols = list()
for j in xrange(B.shape[1]):
col = self.solve(B[:,j])
cols.append(csc_matrix(col))
return hstack(cols)
def _compute_lu(self):
if self._L is None:
self._L, self._U, self._P, self._Q, self._R, do_recip = self.umf.lu(self._A)
if do_recip:
with np.errstate(divide='ignore'):
np.reciprocal(self._R, out=self._R)
# Conform to scipy.sparse.splu convention on permutation matrices
self._P = self._P[self._P]
@property
def shape(self):
"""
Shape of the original matrix as a tuple of ints.
"""
return self._A.shape
@property
def L(self):
"""
Lower triangular factor with unit diagonal as a
`scipy.sparse.csc_matrix`.
"""
self._compute_lu()
return self._L
@property
def U(self):
"""
Upper triangular factor as a `scipy.sparse.csc_matrix`.
"""
self._compute_lu()
return self._U
@property
def R(self):
"""
Row scaling factors, as a 1D array.
"""
self._compute_lu()
return self._R
@property
def perm_c(self):
"""
Permutation Pc represented as an array of indices.
The column permutation matrix can be reconstructed via:
>>> Pc = np.zeros((n, n))
>>> Pc[np.arange(n), perm_c] = 1
"""
self._compute_lu()
return self._Q
@property
def perm_r(self):
"""
Permutation Pr represented as an array of indices.
The row permutation matrix can be reconstructed via:
>>> Pr = np.zeros((n, n))
>>> Pr[perm_r, np.arange(n)] = 1
"""
self._compute_lu()
return self._P
@property
def nnz(self):
"""
Combined number of nonzeros in L and U: L.nnz + U.nnz
"""
return self._L.nnz + self._U.nnz
| {
"content_hash": "e97fc8e82da1fce2c0a280757e8dd517",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 88,
"avg_line_length": 25.044510385756677,
"alnum_prop": 0.5225118483412322,
"repo_name": "scikit-umfpack/scikit-umfpack",
"id": "5e5fc6fdb73942043f89d2c48d261ad225a468dc",
"size": "8440",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "scikits/umfpack/interface.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "15161"
},
{
"name": "Python",
"bytes": "65101"
}
],
"symlink_target": ""
} |
from south.db import db
from django.db import models
from laws.models import *
class Migration:
def forwards(self, orm):
# Adding field 'Vote.votes_count'
db.add_column('laws_vote', 'votes_count', orm['laws.vote:votes_count'])
def backwards(self, orm):
# Deleting field 'Vote.votes_count'
db.delete_column('laws_vote', 'votes_count')
models = {
'laws.membervotingstatistics': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'voting_statistics'", 'unique': 'True', 'to': "orm['mks.Member']"})
},
'laws.partyvotingstatistics': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'party': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'voting_statistics'", 'unique': 'True', 'to': "orm['mks.Party']"})
},
'laws.vote': {
'against_party': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'controversy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'full_text': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_text_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'importance': ('django.db.models.fields.FloatField', [], {}),
'meeting_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'src_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'src_url': ('django.db.models.fields.URLField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'time_string': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'vote_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'votes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mks.Member']", 'blank': 'True'}),
'votes_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'laws.voteaction': {
'against_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'against_opposition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'against_party': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'vote': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['laws.Vote']"})
},
'mks.member': {
'current_party': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': "orm['mks.Party']"}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parties': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['mks.Party']"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'mks.party': {
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_members': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'number_of_seats': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['laws']
| {
"content_hash": "a3fa6475e5457724a65aa6bc6b9d4546",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 170,
"avg_line_length": 72.14772727272727,
"alnum_prop": 0.5383524964561348,
"repo_name": "otadmor/Open-Knesset",
"id": "c16c354c709546af85776d0e8f88bebb1634deb5",
"size": "6350",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "laws/migrations/0004_add_votes_count_to_vote.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "346228"
},
{
"name": "HTML",
"bytes": "689823"
},
{
"name": "JavaScript",
"bytes": "214741"
},
{
"name": "Python",
"bytes": "4115356"
},
{
"name": "Shell",
"bytes": "203"
}
],
"symlink_target": ""
} |
from zerver.lib.test_classes import WebhookTestCase
class IFTTTHookTests(WebhookTestCase):
STREAM_NAME = 'ifttt'
URL_TEMPLATE = "/api/v1/external/ifttt?stream={stream}&api_key={api_key}"
FIXTURE_DIR_NAME = 'ifttt'
def test_ifttt_when_subject_and_body_are_correct(self) -> None:
expected_topic = u"Email sent from email@email.com"
expected_message = u"Email subject: Subject"
self.send_and_test_stream_message('correct_subject_and_body', expected_topic, expected_message)
def test_ifttt_when_topic_and_body_are_correct(self) -> None:
expected_topic = u"Email sent from email@email.com"
expected_message = u"Email subject: Subject"
self.send_and_test_stream_message('correct_topic_and_body', expected_topic, expected_message)
def test_ifttt_when_topic_is_missing(self) -> None:
self.url = self.build_webhook_url()
payload = self.get_body('invalid_payload_with_missing_topic')
result = self.client_post(self.url, payload, content_type='application/json')
self.assert_json_error(result, "Topic can't be empty")
def test_ifttt_when_content_is_missing(self) -> None:
self.url = self.build_webhook_url()
payload = self.get_body('invalid_payload_with_missing_content')
result = self.client_post(self.url, payload, content_type='application/json')
self.assert_json_error(result, "Content can't be empty")
| {
"content_hash": "bb5b313059cf257a1d7e038cdd0c2adf",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 103,
"avg_line_length": 51.464285714285715,
"alnum_prop": 0.6897987508674531,
"repo_name": "rishig/zulip",
"id": "f13615c6eee1d599e537fb2e3506fbbc741afcbc",
"size": "1465",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "zerver/webhooks/ifttt/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "394414"
},
{
"name": "Dockerfile",
"bytes": "2939"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "721392"
},
{
"name": "JavaScript",
"bytes": "3050898"
},
{
"name": "Perl",
"bytes": "398763"
},
{
"name": "Puppet",
"bytes": "71261"
},
{
"name": "Python",
"bytes": "6870363"
},
{
"name": "Ruby",
"bytes": "6110"
},
{
"name": "Shell",
"bytes": "119762"
},
{
"name": "TypeScript",
"bytes": "14100"
}
],
"symlink_target": ""
} |
"""Module for direct audit relationships mixin."""
from ggrc.models.audit import Audit
from ggrc.models.relationship import Relationship
class AuditRelationship(object):
# pylint: disable=too-few-public-methods
"""Mixin for mandatory link to an Audit via Relationships."""
_aliases = {
"audit": {
"display_name": "Audit",
"mandatory": True,
"filter_by": "_filter_by_audit",
"ignore_on_update": True,
},
}
@classmethod
def _filter_by_audit(cls, predicate):
"""Get filter for objects related to an Audit."""
return Relationship.query.filter(
Relationship.source_type == cls.__name__,
Relationship.source_id == cls.id,
Relationship.destination_type == Audit.__name__,
).join(Audit, Relationship.destination_id == Audit.id).filter(
predicate(Audit.slug)
).exists() | Relationship.query.filter(
Relationship.destination_type == cls.__name__,
Relationship.destination_id == cls.id,
Relationship.source_type == Audit.__name__,
).join(Audit, Relationship.source_id == Audit.id).filter(
predicate(Audit.slug)
).exists()
| {
"content_hash": "4750592fd8a59fc847d396988360f380",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 66,
"avg_line_length": 32.47222222222222,
"alnum_prop": 0.6381522668947819,
"repo_name": "AleksNeStu/ggrc-core",
"id": "1726b05977ccbcc0ef59486cfdae501695bee532",
"size": "1282",
"binary": false,
"copies": "3",
"ref": "refs/heads/release/0.10-Raspberry",
"path": "src/ggrc/models/mixins/audit_relationship.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "221201"
},
{
"name": "HTML",
"bytes": "1055542"
},
{
"name": "JavaScript",
"bytes": "1872353"
},
{
"name": "Makefile",
"bytes": "7044"
},
{
"name": "Mako",
"bytes": "4320"
},
{
"name": "Python",
"bytes": "2700938"
},
{
"name": "Shell",
"bytes": "31273"
}
],
"symlink_target": ""
} |
"""
The flask application package.
"""
from flask import Flask
app = Flask(__name__)
import $safeprojectname$.views
| {
"content_hash": "622eb237e25621deb75ca3347094707e",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 30,
"avg_line_length": 14.222222222222221,
"alnum_prop": 0.65625,
"repo_name": "Habatchii/PTVS",
"id": "da9151615ac2974c028a73d1ac941a4f2acd2d94",
"size": "128",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Python/Samples/PollsFlask/PollsFlask/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "113"
},
{
"name": "Batchfile",
"bytes": "3044"
},
{
"name": "C",
"bytes": "469713"
},
{
"name": "C#",
"bytes": "13408651"
},
{
"name": "C++",
"bytes": "193038"
},
{
"name": "CSS",
"bytes": "7456"
},
{
"name": "HTML",
"bytes": "56626"
},
{
"name": "JavaScript",
"bytes": "85715"
},
{
"name": "Objective-C",
"bytes": "1140"
},
{
"name": "PowerShell",
"bytes": "83521"
},
{
"name": "Python",
"bytes": "2699245"
},
{
"name": "Smarty",
"bytes": "8611"
},
{
"name": "Tcl",
"bytes": "24968"
}
],
"symlink_target": ""
} |
import wx
from wx.lib.scrolledpanel import ScrolledPanel
class EditHotKeyDialog(wx.Dialog):
def __init__(self, parent, id, title, key):
wx.Dialog.__init__(self, parent, id, title, size=(250, 240))
self.currKey = key
self.panel = wx.Panel(self, -1)
self.updateUI()
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self.panel, 1, wx.EXPAND, 0)
self.SetSizer(vbox)
self.Layout()
def updateUI(self):
vbox = wx.BoxSizer(wx.VERTICAL)
self.label = wx.StaticText(self.panel, label='')
vbox.Add(self.label)
self.modifierRadio = wx.RadioBox(self.panel, -1, "", choices=['None', 'Shift', 'Control'], majorDimension=1, style=wx.RA_SPECIFY_ROWS)
self.modifierRadio.Bind(wx.EVT_RADIOBOX, self.onChangeModifier)
vbox.Add(self.modifierRadio)
itemPanel = wx.Panel(self.panel)
hbox = wx.BoxSizer(wx.HORIZONTAL)
keyList = ['']
keyList.extend(base.direct.specialKeys)
self.specialKeyCombo = wx.Choice(itemPanel, -1, choices=keyList)
self.specialKeyCombo.Bind(wx.EVT_CHOICE, self.onChangeSpecialKey)
self.keyEntry = wx.TextCtrl(itemPanel, -1, size=(30, 20))
button = wx.Button(itemPanel, -1, 'Apply', size=(50, 20))
button.Bind(wx.EVT_BUTTON, self.onApply)
hbox.Add(self.specialKeyCombo)
hbox.Add(self.keyEntry)
hbox.Add(button)
itemPanel.SetSizer(hbox)
vbox.Add(itemPanel)
self.panel.SetSizer(vbox)
keyDesc = base.direct.hotKeyMap[self.currKey]
self.label.SetLabel(keyDesc[0])
if 'shift' in self.currKey:
self.modifierRadio.SetStringSelection('Shift')
self.specialKeyCombo.Enable(False)
keyStr = self.currKey[len('shift-'):]
elif 'control' in self.currKey:
self.modifierRadio.SetStringSelection('Control')
self.specialKeyCombo.Enable(False)
keyStr = self.currKey[len('control-'):]
else:
self.modifierRadio.SetStringSelection('None')
self.specialKeyCombo.Enable(True)
keyStr = self.currKey
if keyStr in base.direct.specialKeys:
self.keyEntry.SetValue('')
self.keyEntry.Enable(False)
self.specialKeyCombo.SetStringSelection(keyStr)
else:
self.specialKeyCombo.SetStringSelection('')
self.keyEntry.SetValue(keyStr)
def onChangeModifier(self, evt):
if evt.GetString() == 'None':
self.specialKeyCombo.Enable(True)
else:
self.specialKeyCombo.SetStringSelection('')
self.specialKeyCombo.Enable(False)
self.keyEntry.Enable(True)
def onChangeSpecialKey(self, evt):
if evt.GetString() != '':
self.keyEntry.SetValue('')
self.keyEntry.Enable(False)
else:
self.keyEntry.Enable(True)
def onApply(self, evt):
modifier = self.modifierRadio.GetStringSelection()
if modifier == 'Shift':
prefix = 'shift-'
elif modifier == 'Control':
prefix = 'control-'
else:
prefix = ''
specialKey = self.specialKeyCombo.GetStringSelection()
if specialKey == '':
newKeyStr= prefix + self.keyEntry.GetValue().lower()
else:
newKeyStr = specialKey
if newKeyStr != self.currKey:
if newKeyStr in list(base.direct.hotKeyMap.keys()):
print('a hotkey is to be overridden with %s' % newKeyStr)
oldKeyDesc = base.direct.hotKeyMap[newKeyStr]
msg = 'The hotkey is already assigned to %s\n'%oldKeyDesc[0] +\
'Do you want to override this?'
dialog = wx.MessageDialog(None, msg, 'Hot Key exists!',
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION)
result = dialog.ShowModal()
if result == wx.ID_YES:
base.direct.hotKeyMap[newKeyStr] = base.direct.hotKeyMap[self.currKey]
base.direct.hotKeyMap['__removed__' + newKeyStr] = oldKeyDesc
del base.direct.hotKeyMap[self.currKey]
else:
base.direct.hotKeyMap[newKeyStr] = base.direct.hotKeyMap[self.currKey]
del base.direct.hotKeyMap[self.currKey]
self.Destroy()
class HotKeyPanel(ScrolledPanel):
def __init__(self, parent):
ScrolledPanel.__init__(self, parent, -1)
self.parent = parent
self.updateUI()
def updateUI(self):
vbox = wx.BoxSizer(wx.VERTICAL)
keys = list(base.direct.hotKeyMap.keys())
keys.sort()
for key in keys:
keyDesc = base.direct.hotKeyMap[key]
itemPanel = wx.Panel(self)
sizer = wx.BoxSizer(wx.HORIZONTAL)
space = wx.StaticText(itemPanel, label='', size=(10,20))
hotKey = wx.StaticText(itemPanel, label=key, size=(100, 20))
desc = wx.StaticText(itemPanel, label=keyDesc[0], size=(380, 20))
button = wx.Button(itemPanel, -1, 'Edit', size=(40, 20))
button.Bind(wx.EVT_BUTTON, lambda p0 = None, p1 = key: self.onEdit(p0, p1))
sizer.Add(button)
sizer.Add(space)
sizer.Add(hotKey)
sizer.Add(desc, 1, wx.EXPAND)
itemPanel.SetSizer(sizer)
vbox.Add(itemPanel)
self.SetSizer(vbox)
self.Layout()
self.SetupScrolling(self, scroll_y=True, rate_y=20)
self.parent.parent.updateMenu()
def onEdit(self, evt, key):
base.le.ui.bindKeyEvents(False)
editUI = EditHotKeyDialog(self, -1, 'Edit Hot Key', key)
editUI.ShowModal()
editUI.Destroy()
sizer = self.GetSizer()
if sizer is not None:
sizer.DeleteWindows()
self.SetSizer(None)
base.le.ui.bindKeyEvents(True)
self.updateUI()
class HotKeyUI(wx.Dialog):
def __init__(self, parent, id, title):
wx.Dialog.__init__(self, parent, id, title, size=(550, 500))
self.parent = parent
panel = HotKeyPanel(self)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(panel, 1, wx.EXPAND, 0)
self.SetSizer(vbox)
self.Layout()
| {
"content_hash": "72bf5829d66711433259116eb6c88f5b",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 142,
"avg_line_length": 39.135802469135804,
"alnum_prop": 0.583596214511041,
"repo_name": "tobspr/panda3d",
"id": "8cde68b0ebf1edbd9897c82e033676edf561c00c",
"size": "6340",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "direct/src/leveleditor/HotKeyUI.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4004"
},
{
"name": "C",
"bytes": "6724918"
},
{
"name": "C++",
"bytes": "25480688"
},
{
"name": "Emacs Lisp",
"bytes": "229264"
},
{
"name": "Groff",
"bytes": "3106"
},
{
"name": "HTML",
"bytes": "8081"
},
{
"name": "Java",
"bytes": "3113"
},
{
"name": "JavaScript",
"bytes": "7003"
},
{
"name": "Logos",
"bytes": "5504"
},
{
"name": "MAXScript",
"bytes": "1745"
},
{
"name": "NSIS",
"bytes": "92320"
},
{
"name": "Nemerle",
"bytes": "4403"
},
{
"name": "Objective-C",
"bytes": "28865"
},
{
"name": "Objective-C++",
"bytes": "257446"
},
{
"name": "Perl",
"bytes": "206982"
},
{
"name": "Perl6",
"bytes": "30484"
},
{
"name": "Puppet",
"bytes": "2627"
},
{
"name": "Python",
"bytes": "5537773"
},
{
"name": "R",
"bytes": "421"
},
{
"name": "Shell",
"bytes": "55940"
},
{
"name": "Visual Basic",
"bytes": "136"
}
],
"symlink_target": ""
} |
import numpy as np
from Coupling import Coupling
class Coupling2DCavities3D(Coupling):
"""
Coupling for cavity3D to cavity transmission.
"""
@property
def impedance_from(self):
"""
Choses the right impedance of subsystem_from.
Applies boundary conditions correction as well.
"""
return self.subsystem_from.impedance
@property
def impedance_to(self):
"""
Choses the right impedance of subsystem_from.
Applies boundary conditions correction as well.
"""
return self.subsystem_to.impedance
@property
def tau(self):
"""
Transmission coefficient.
"""
return 0.1
@property
def clf(self):
"""
Coupling loss factor for transmission from a 3D cavity to a 3D cavity.
.. math:: \\eta_{12} = \\frac{c S}{8 \\pi f V} \\tau_{12}
See BAC, equation 3.14
"""
return self.subsystem_from.soundspeed_group * self.area / (8.0 * np.pi * self.frequency.center * self.subsystem_from.volume) * self.tau | {
"content_hash": "258829c7f0682743a6df4f583beca0c6",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 143,
"avg_line_length": 26.3953488372093,
"alnum_prop": 0.5762114537444933,
"repo_name": "FRidh/Sea",
"id": "4d8bca982016257f4542686e00328e03c100f8d0",
"size": "1135",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Sea/model/couplings/Coupling2DCavities3D.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "220852"
},
{
"name": "Shell",
"bytes": "5106"
}
],
"symlink_target": ""
} |
import logging
import sys
import re
from osmfiltering.filters import BaseFilter, VersionIncrementor
from osmfiltering.filtering import run
from osmfiltering.writing import OSMWriter, OSCWriter
class PhoneFilter(BaseFilter):
def __init__(self):
self.parsable_re = re.compile(".*([1-9]).*?(\d).*?(\d).*?(\d).*?(\d).*?(\d).*?(\d).*?(\d).*?(\d)$")
self.france_re = re.compile("^\+33 ?[1-9]( ?[0-9]){8}$")
def test_phone(self, number):
if self.france_re.match(number):
return True
match = self.parsable_re.search(number)
if match:
return "+33 {0} {1}{2} {3}{4} {5}{6} {7}{8}".format(*match.groups())
else:
return False
def act(self, e, tags):
for tag in ('phone', 'contact:phone', 'fax', 'contact:fax'):
if tag in e.tags.keys():
new_phone = self.test_phone(e.tags[tag])
if new_phone is True:
continue
if new_phone is False:
tags.append("check")
else:
tags.append("modified")
e.tags[tag] = new_phone
return (e, tags)
def node(self, e, tags):
return self.act(e, tags)
def way(self, e, tags):
return self.act(e, tags)
def relation(self, e, tags):
return self.act(e, tags)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
input_file = open(sys.argv[1], "rb")
output_file = open(sys.argv[2], "wb")
bad_phone_file = open(sys.argv[3], "wb")
writer = OSCWriter(output_file, "modified")
check_writer = OSMWriter(bad_phone_file, "check")
writer.initialize_document()
check_writer.initialize_document()
filters = (PhoneFilter(), VersionIncrementor(), writer, check_writer)
try:
run(input_file, output_file, filters, threads=20)
finally:
writer.finalize_document()
input_file.close()
output_file.close()
logging.debug("Closed files")
| {
"content_hash": "37cf997252a8a167686a62afbef31084",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 107,
"avg_line_length": 30.417910447761194,
"alnum_prop": 0.5564278704612365,
"repo_name": "gileri/osmfiltering",
"id": "21bf2f6944f486a0eba9b4a828d7727ade6479fd",
"size": "2062",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/phone.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7551"
}
],
"symlink_target": ""
} |
import web
from urls import urls
from config import *
from app.api.log import *
app = web.application(urls, globals())
if __name__ == "__main__":
logclass.append(cLogs())
app.run()
| {
"content_hash": "b880bbed11c67afc4e119f2120420b97",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 38,
"avg_line_length": 18.5,
"alnum_prop": 0.6756756756756757,
"repo_name": "selboo/squid-api",
"id": "fb839259056772023b6cf5fd9a86e451e3299b6c",
"size": "222",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "run.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4580"
}
],
"symlink_target": ""
} |
from gcp_common import BaseTest, event_data
class BigQueryDataSetTest(BaseTest):
def test_query(self):
factory = self.replay_flight_data('bq-dataset-query')
p = self.load_policy({
'name': 'bq-get',
'resource': 'gcp.bq-dataset'},
session_factory=factory)
dataset = p.resource_manager.get_resource(
event_data('bq-dataset-create.json'))
self.assertEqual(
dataset['datasetReference']['datasetId'],
'devxyz')
self.assertTrue('access' in dataset)
self.assertEqual(dataset['labels'], {'env': 'dev'})
class BigQueryJobTest(BaseTest):
def test_query(self):
project_id = 'cloud-custodian'
factory = self.replay_flight_data('bq-job-query', project_id=project_id)
p = self.load_policy({
'name': 'bq-job-get',
'resource': 'gcp.bq-job'},
session_factory=factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['status']['state'], 'DONE')
self.assertEqual(resources[0]['jobReference']['location'], 'US')
self.assertEqual(resources[0]['jobReference']['projectId'], project_id)
def test_job_get(self):
project_id = 'cloud-custodian'
job_id = 'bquxjob_4c28c9a7_16958c2791d'
location = 'US'
factory = self.replay_flight_data('bq-job-get', project_id=project_id)
p = self.load_policy({
'name': 'bq-job-get',
'resource': 'gcp.bq-job',
'mode': {
'type': 'gcp-audit',
'methods': ['google.cloud.bigquery.v2.JobService.InsertJob']
}
}, session_factory=factory)
exec_mode = p.get_execution_mode()
event = event_data('bq-job-create.json')
job = exec_mode.run(event, None)
self.assertEqual(job[0]['jobReference']['jobId'], job_id)
self.assertEqual(job[0]['jobReference']['location'], location)
self.assertEqual(job[0]['jobReference']['projectId'], project_id)
self.assertEqual(job[0]['id'], "{}:{}.{}".format(project_id, location, job_id))
class BigQueryTableTest(BaseTest):
def test_query(self):
project_id = 'cloud-custodian'
factory = self.replay_flight_data('bq-table-query', project_id=project_id)
p = self.load_policy({
'name': 'bq-table-query',
'resource': 'gcp.bq-table'},
session_factory=factory)
resources = p.run()
self.assertIn('tableReference', resources[0].keys())
self.assertEqual('TABLE', resources[0]['type'])
def test_table_get(self):
project_id = 'cloud-custodian'
factory = self.replay_flight_data('bq-table-get', project_id=project_id)
p = self.load_policy({
'name': 'bq-table-get',
'resource': 'gcp.bq-table',
'mode': {
'type': 'gcp-audit',
'methods': ['google.cloud.bigquery.v2.TableService.InsertTable']
}
}, session_factory=factory)
exec_mode = p.get_execution_mode()
event = event_data('bq-table-create.json')
job = exec_mode.run(event, None)
self.assertIn('tableReference', job[0].keys())
| {
"content_hash": "bbc428140d6fd02121241377e3cf1b34",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 87,
"avg_line_length": 38.71764705882353,
"alnum_prop": 0.5739896687936797,
"repo_name": "capitalone/cloud-custodian",
"id": "fc111ad3be2b333e01ff4a2d5fdc300ae2509ad2",
"size": "3414",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/c7n_gcp/tests/test_bigquery.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2190"
},
{
"name": "Go",
"bytes": "135995"
},
{
"name": "HTML",
"bytes": "31"
},
{
"name": "Makefile",
"bytes": "9378"
},
{
"name": "Python",
"bytes": "3693572"
},
{
"name": "Shell",
"bytes": "2294"
}
],
"symlink_target": ""
} |
"""Support for Rflink Cover devices."""
import logging
import voluptuous as vol
from homeassistant.components.cover import PLATFORM_SCHEMA, CoverDevice
from homeassistant.const import CONF_NAME, CONF_TYPE, STATE_OPEN
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.restore_state import RestoreEntity
from . import (
CONF_ALIASES,
CONF_DEVICE_DEFAULTS,
CONF_DEVICES,
CONF_FIRE_EVENT,
CONF_GROUP,
CONF_GROUP_ALIASES,
CONF_NOGROUP_ALIASES,
CONF_SIGNAL_REPETITIONS,
DEVICE_DEFAULTS_SCHEMA,
RflinkCommand,
)
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
TYPE_STANDARD = "standard"
TYPE_INVERTED = "inverted"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_DEVICE_DEFAULTS, default=DEVICE_DEFAULTS_SCHEMA({})
): DEVICE_DEFAULTS_SCHEMA,
vol.Optional(CONF_DEVICES, default={}): vol.Schema(
{
cv.string: {
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_TYPE): vol.Any(TYPE_STANDARD, TYPE_INVERTED),
vol.Optional(CONF_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_GROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_NOGROUP_ALIASES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean,
vol.Optional(CONF_SIGNAL_REPETITIONS): vol.Coerce(int),
vol.Optional(CONF_GROUP, default=True): cv.boolean,
}
}
),
}
)
def entity_type_for_device_id(device_id):
"""Return entity class for protocol of a given device_id.
Async friendly.
"""
entity_type_mapping = {
# KlikAanKlikUit cover have the controls inverted
"newkaku": TYPE_INVERTED
}
protocol = device_id.split("_")[0]
return entity_type_mapping.get(protocol, TYPE_STANDARD)
def entity_class_for_type(entity_type):
"""Translate entity type to entity class.
Async friendly.
"""
entity_device_mapping = {
# default cover implementation
TYPE_STANDARD: RflinkCover,
# cover with open/close commands inverted
# like KAKU/COCO ASUN-650
TYPE_INVERTED: InvertedRflinkCover,
}
return entity_device_mapping.get(entity_type, RflinkCover)
def devices_from_config(domain_config):
"""Parse configuration and add Rflink cover devices."""
devices = []
for device_id, config in domain_config[CONF_DEVICES].items():
# Determine what kind of entity to create, RflinkCover
# or InvertedRflinkCover
if CONF_TYPE in config:
# Remove type from config to not pass it as and argument
# to entity instantiation
entity_type = config.pop(CONF_TYPE)
else:
entity_type = entity_type_for_device_id(device_id)
entity_class = entity_class_for_type(entity_type)
device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config)
device = entity_class(device_id, **device_config)
devices.append(device)
return devices
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Rflink cover platform."""
async_add_entities(devices_from_config(config))
class RflinkCover(RflinkCommand, CoverDevice, RestoreEntity):
"""Rflink entity which can switch on/stop/off (eg: cover)."""
async def async_added_to_hass(self):
"""Restore RFLink cover state (OPEN/CLOSE)."""
await super().async_added_to_hass()
old_state = await self.async_get_last_state()
if old_state is not None:
self._state = old_state.state == STATE_OPEN
def _handle_event(self, event):
"""Adjust state if Rflink picks up a remote command for this device."""
self.cancel_queued_send_commands()
command = event["command"]
if command in ["on", "allon", "up"]:
self._state = True
elif command in ["off", "alloff", "down"]:
self._state = False
@property
def should_poll(self):
"""No polling available in RFlink cover."""
return False
@property
def is_closed(self):
"""Return if the cover is closed."""
return not self._state
@property
def assumed_state(self):
"""Return True because covers can be stopped midway."""
return True
async def async_close_cover(self, **kwargs):
"""Turn the device close."""
await self._async_handle_command("close_cover")
async def async_open_cover(self, **kwargs):
"""Turn the device open."""
await self._async_handle_command("open_cover")
async def async_stop_cover(self, **kwargs):
"""Turn the device stop."""
await self._async_handle_command("stop_cover")
class InvertedRflinkCover(RflinkCover):
"""Rflink cover that has inverted open/close commands."""
async def _async_send_command(self, cmd, repetitions):
"""Will invert only the UP/DOWN commands."""
_LOGGER.debug("Getting command: %s for Rflink device: %s", cmd, self._device_id)
cmd_inv = {"UP": "DOWN", "DOWN": "UP"}
await super()._async_send_command(cmd_inv.get(cmd, cmd), repetitions)
| {
"content_hash": "c411197563631dbccff85d43a54550f0",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 88,
"avg_line_length": 32.58479532163743,
"alnum_prop": 0.6136037329504667,
"repo_name": "postlund/home-assistant",
"id": "794542cb9d4c824ca75e1bd4a2063dfcad138eba",
"size": "5572",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/rflink/cover.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20215859"
},
{
"name": "Shell",
"bytes": "6663"
}
],
"symlink_target": ""
} |
"""Test class of 3PAR Client handling CPG."""
from test import HPE3ParClient_base as hpe3parbase
from hpe3parclient import exceptions
DOMAIN = 'UNIT_TEST_DOMAIN'
CPG_NAME1 = 'CPG1_UNIT_TEST' + hpe3parbase.TIME
CPG_NAME2 = 'CPG2_UNIT_TEST' + hpe3parbase.TIME
class HPE3ParClientCPGTestCase(hpe3parbase.HPE3ParClientBaseTestCase):
def setUp(self):
super(HPE3ParClientCPGTestCase, self).setUp()
def tearDown(self):
try:
self.cl.deleteCPG(CPG_NAME1)
except Exception:
pass
try:
self.cl.deleteCPG(CPG_NAME2)
except Exception:
pass
# very last, tear down base class
super(HPE3ParClientCPGTestCase, self).tearDown()
def test_1_create_CPG(self):
self.printHeader('create_CPG')
# add one
optional = self.CPG_OPTIONS
name = CPG_NAME1
self.cl.createCPG(name, optional)
# check
cpg1 = self.cl.getCPG(name)
self.assertIsNotNone(cpg1)
cpgName = cpg1['name']
self.assertEqual(name, cpgName)
# add another
name = CPG_NAME2
optional2 = optional.copy()
if self.CPG_LDLAYOUT_HA is None:
more_optional = {'LDLayout': {'RAIDType': 2}}
else:
if self.DISK_TYPE is None:
more_optional = {'LDLayout': {'RAIDType': 2,
'HA': self.CPG_LDLAYOUT_HA}}
else:
more_optional = {'LDLayout': {'RAIDType': 2,
'HA': self.CPG_LDLAYOUT_HA,
'diskPatterns':
[{'diskType': self.DISK_TYPE}]}}
optional2.update(more_optional)
self.cl.createCPG(name, optional2)
# check
cpg2 = self.cl.getCPG(name)
self.assertIsNotNone(cpg2)
cpgName = cpg2['name']
self.assertEqual(name, cpgName)
self.printFooter('create_CPG')
def test_1_create_CPG_badDomain(self):
self.printHeader('create_CPG_badDomain')
if self.DISK_TYPE is None:
optional = {'domain': 'BAD_DOMAIN'}
else:
optional = {'LDLayout': {'diskPatterns': [{
'diskType': self.DISK_TYPE}]}, 'domain': 'BAD_DOMAIN'}
self.assertRaises(exceptions.HTTPNotFound, self.cl.createCPG,
CPG_NAME1, optional)
self.printFooter('create_CPG_badDomain')
def test_1_create_CPG_dup(self):
self.printHeader('create_CPG_dup')
optional = self.CPG_OPTIONS
name = CPG_NAME1
self.cl.createCPG(name, optional)
self.assertRaises(exceptions.HTTPConflict, self.cl.createCPG,
CPG_NAME1, optional)
self.printFooter('create_CPG_dup')
def test_1_create_CPG_badParams(self):
self.printHeader('create_CPG_badParams')
optional = {'domainBad': 'UNIT_TEST'}
self.assertRaises(exceptions.HTTPBadRequest, self.cl.createCPG,
CPG_NAME1, optional)
self.printFooter('create_CPG_badParams')
def test_1_create_CPG_badParams2(self):
self.printHeader('create_CPG_badParams2')
optional = {'domain': 'UNIT_TEST'}
more_optional = {'LDLayout': {'RAIDBadType': 1}}
optional.update(more_optional)
self.assertRaises(exceptions.HTTPBadRequest, self.cl.createCPG,
CPG_NAME1, optional)
self.printFooter('create_CPG_badParams2')
def test_2_get_CPG_bad(self):
self.printHeader('get_CPG_bad')
self.assertRaises(exceptions.HTTPNotFound, self.cl.getCPG, 'BadName')
self.printFooter('get_CPG_bad')
def test_2_get_CPGs(self):
self.printHeader('get_CPGs')
optional = self.CPG_OPTIONS
name = CPG_NAME1
self.cl.createCPG(name, optional)
cpgs = self.cl.getCPGs()
self.assertGreater(len(cpgs), 0, 'getCPGs failed with no CPGs')
self.assertTrue(self.findInDict(cpgs['members'], 'name', CPG_NAME1))
self.printFooter('get_CPGs')
def test_3_delete_CPG_nonExist(self):
self.printHeader('delete_CPG_nonExist')
self.assertRaises(exceptions.HTTPNotFound, self.cl.deleteCPG,
'NonExistCPG')
self.printFooter('delete_CPG_nonExist')
def test_3_delete_CPGs(self):
self.printHeader('delete_CPGs')
# add one
optional = self.CPG_OPTIONS
self.cl.createCPG(CPG_NAME1, optional)
cpg = self.cl.getCPG(CPG_NAME1)
self.assertTrue(cpg['name'], CPG_NAME1)
cpgs = self.cl.getCPGs()
if cpgs and cpgs['total'] > 0:
for cpg in cpgs['members']:
if cpg['name'] == CPG_NAME1:
# pprint.pprint("Deleting CPG %s " % cpg['name'])
self.cl.deleteCPG(cpg['name'])
# check
self.assertRaises(exceptions.HTTPNotFound, self.cl.getCPG, CPG_NAME1)
self.printFooter('delete_CPGs')
def test_4_get_cpg_available_space(self):
self.printHeader('get_cpg_available_space')
optional = self.CPG_OPTIONS
name = CPG_NAME1
self.cl.createCPG(name, optional)
cpg1 = self.cl.getCPGAvailableSpace(name)
self.assertIsNotNone(cpg1)
self.printFooter('get_cpg_available_space')
def test_4_get_cpg_available_space_bad_cpg(self):
self.printHeader('get_cpg_available_space_bad_cpg')
self.assertRaises(
exceptions.HTTPNotFound,
self.cl.getCPGAvailableSpace,
'BadName')
self.printFooter('get_cpg_available_space_bad_cpg')
# testing
# suite = unittest.TestLoader().loadTestsFromTestCase(HPE3ParClientCPGTestCase)
# unittest.TextTestRunner(verbosity=2).run(suite)
| {
"content_hash": "fa683e755310b816f523a3a905df0f73",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 79,
"avg_line_length": 31.29255319148936,
"alnum_prop": 0.586775454699983,
"repo_name": "hpe-storage/python-3parclient",
"id": "9bdf39b8f1a9b595a3855b547ac891568844a706",
"size": "6489",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/test_HPE3ParClient_CPG.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "747190"
},
{
"name": "Tcl",
"bytes": "3246"
}
],
"symlink_target": ""
} |
from argparse import ArgumentParser
from sys import stderr
from . import MIN_GETDENTS_BUFF_SIZE, getdents
from .formatters import FORMATTERS
def parse_args(args, prog):
parser = ArgumentParser(
prog=prog,
description='Print directory contents.',
)
parser.add_argument('path', metavar='PATH')
parser.add_argument(
'-b', '--buffer-size',
metavar='N',
type=int,
default=32768,
help=(
'Buffer size (in bytes) to allocate when iterating over directory'
),
)
parser.add_argument(
'-o', '--output-format',
metavar='NAME',
default='plain',
choices=list(FORMATTERS),
help='Output format: %s' % ', '.join(sorted(FORMATTERS)),
)
parsed_args = parser.parse_args(args)
buff_size = parsed_args.buffer_size
if buff_size < MIN_GETDENTS_BUFF_SIZE:
parser.error('Minimum buffer size is %s' % MIN_GETDENTS_BUFF_SIZE)
return parsed_args.path, buff_size, FORMATTERS[parsed_args.output_format]
def main(args=None, prog=None):
path, buff_size, fmt = parse_args(args, prog)
try:
fmt(getdents(path, buff_size=buff_size))
except MemoryError:
print(
'Not enough memory to allocate', buff_size, 'bytes of data',
file=stderr,
)
return 3
except FileNotFoundError as e:
print(e, file=stderr)
return 4
except NotADirectoryError as e:
print(e, file=stderr)
return 5
except PermissionError as e:
print(e, file=stderr)
return 6
except OSError as e:
print(e, file=stderr)
return 7
return 0
| {
"content_hash": "ebeb64cf8fa5adbc2974cef9d7c8c461",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 78,
"avg_line_length": 25.923076923076923,
"alnum_prop": 0.6017804154302671,
"repo_name": "ZipFile/python-getdents",
"id": "118ce0b770604b09a3ed09a746829ef40b0e3411",
"size": "1685",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "getdents/cli.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "5129"
},
{
"name": "Python",
"bytes": "14698"
}
],
"symlink_target": ""
} |
import requests, json, base64
placeID = 2322
personID = 72898
uri = "/api/core/v3/members/places/{}".format(placeID)
base_url = "https://geonet.esri.com"
url = base_url + uri
user = "username"
password = "password"
auth = "Basic " + base64.encodestring('%s:%s' % (user, password)).replace("\n","");
headers = { "Content-Type": "application/json", "Authorization": auth }
data = json.dumps(
{
"person" : "https://example.jiveon.com/api/core/v3/people/{}".format(personID),
"state" : "member"
})
try:
req = requests.post(url, data=data, headers=headers )
print "Member Created"
except:
print req.text
| {
"content_hash": "f5ad0d503de451956f05f0f9c16f20bf",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 85,
"avg_line_length": 22.821428571428573,
"alnum_prop": 0.6416275430359938,
"repo_name": "thales007/Jive-Community-Management",
"id": "9f30ea3451a495f51d5e79d5fb973d6e08bd6b79",
"size": "954",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CreateGroupMember.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10929"
}
],
"symlink_target": ""
} |
import datetime, os
from email.utils import parseaddr
from django import forms
from django.conf import settings
from django.db.models import Q
from django.forms.util import ErrorList
from django.core.validators import email_re
from django.template.loader import render_to_string
from ietf.liaisons.accounts import (can_add_outgoing_liaison, can_add_incoming_liaison,
get_person_for_user, is_secretariat, is_sdo_liaison_manager)
from ietf.liaisons.utils import IETFHM
from ietf.liaisons.widgets import (FromWidget, ReadOnlyWidget, ButtonWidget,
ShowAttachmentsWidget, RelatedLiaisonWidget)
from ietf.liaisons.models import LiaisonStatement, LiaisonStatementPurposeName
from ietf.liaisons.proxy import LiaisonDetailProxy
from ietf.group.models import Group, Role
from ietf.person.models import Person, Email
from ietf.doc.models import Document
class LiaisonForm(forms.Form):
person = forms.ModelChoiceField(Person.objects.all())
from_field = forms.ChoiceField(widget=FromWidget, label=u'From')
replyto = forms.CharField(label=u'Reply to')
organization = forms.ChoiceField()
to_poc = forms.CharField(widget=ReadOnlyWidget, label="POC", required=False)
response_contact = forms.CharField(required=False, max_length=255)
technical_contact = forms.CharField(required=False, max_length=255)
cc1 = forms.CharField(widget=forms.Textarea, label="CC", required=False, help_text='Please insert one email address per line')
purpose = forms.ChoiceField()
purpose_text = forms.CharField(widget=forms.Textarea, label='Other purpose')
deadline_date = forms.DateField(label='Deadline')
submitted_date = forms.DateField(label='Submission date', initial=datetime.date.today())
title = forms.CharField(label=u'Title')
body = forms.CharField(widget=forms.Textarea, required=False)
attachments = forms.CharField(label='Attachments', widget=ShowAttachmentsWidget, required=False)
attach_title = forms.CharField(label='Title', required=False)
attach_file = forms.FileField(label='File', required=False)
attach_button = forms.CharField(label='',
widget=ButtonWidget(label='Attach', show_on='id_attachments',
require=['id_attach_title', 'id_attach_file'],
required_label='title and file'),
required=False)
related_to = forms.ModelChoiceField(LiaisonStatement.objects.all(), label=u'Related Liaison', widget=RelatedLiaisonWidget, required=False)
fieldsets = [('From', ('from_field', 'replyto')),
('To', ('organization', 'to_poc')),
('Other email addresses', ('response_contact', 'technical_contact', 'cc1')),
('Purpose', ('purpose', 'purpose_text', 'deadline_date')),
('References', ('related_to', )),
('Liaison Statement', ('title', 'submitted_date', 'body', 'attachments')),
('Add attachment', ('attach_title', 'attach_file', 'attach_button')),
]
class Media:
js = ("/js/jquery-1.5.1.min.js",
"/js/jquery-ui-1.8.11.custom.min.js",
"/js/liaisons.js", )
css = {'all': ("/css/liaisons.css",
"/css/jquery-ui-themes/jquery-ui-1.8.11.custom.css")}
def __init__(self, user, *args, **kwargs):
self.user = user
self.fake_person = None
self.person = get_person_for_user(user)
if kwargs.get('data', None):
if is_secretariat(self.user) and 'from_fake_user' in kwargs['data'].keys():
self.fake_person = Person.objects.get(pk=kwargs['data']['from_fake_user'])
kwargs['data'].update({'person': self.fake_person.pk})
else:
kwargs['data'].update({'person': self.person.pk})
self.instance = kwargs.pop("instance", None)
super(LiaisonForm, self).__init__(*args, **kwargs)
# now copy in values from instance, like a ModelForm
if self.instance:
for name, field in self.fields.iteritems():
try:
x = getattr(self.instance, name)
if name == "purpose": # proxy has a name-clash on purpose so help it
x = x.order
try:
x = x.pk # foreign keys need the .pk, not the actual object
except AttributeError:
pass
self.initial[name] = x
except AttributeError:
# we have some fields on the form that aren't in the model
pass
self.fields["purpose"].choices = [("", "---------")] + [(str(l.order), l.name) for l in LiaisonStatementPurposeName.objects.all()]
self.hm = IETFHM
self.set_from_field()
self.set_replyto_field()
self.set_organization_field()
def __unicode__(self):
return self.as_div()
def get_post_only(self):
return False
def set_required_fields(self):
purpose = self.data.get('purpose', None)
if purpose == '5':
self.fields['purpose_text'].required=True
else:
self.fields['purpose_text'].required=False
if purpose in ['1', '2']:
self.fields['deadline_date'].required=True
else:
self.fields['deadline_date'].required=False
def reset_required_fields(self):
self.fields['purpose_text'].required=True
self.fields['deadline_date'].required=True
def set_from_field(self):
assert NotImplemented
def set_replyto_field(self):
self.fields['replyto'].initial = self.person.email()[1]
def set_organization_field(self):
assert NotImplemented
def as_div(self):
return render_to_string('liaisons/liaisonform.html', {'form': self})
def get_fieldsets(self):
if not self.fieldsets:
yield dict(name=None, fields=self)
else:
for fieldset, fields in self.fieldsets:
fieldset_dict = dict(name=fieldset, fields=[])
for field_name in fields:
if field_name in self.fields.keyOrder:
fieldset_dict['fields'].append(self[field_name])
if not fieldset_dict['fields']:
# if there is no fields in this fieldset, we continue to next fieldset
continue
yield fieldset_dict
def full_clean(self):
self.set_required_fields()
super(LiaisonForm, self).full_clean()
self.reset_required_fields()
def has_attachments(self):
for key in self.files.keys():
if key.startswith('attach_file_') and key.replace('file', 'title') in self.data.keys():
return True
return False
def check_email(self, value):
if not value:
return
emails = value.split(',')
for email in emails:
name, addr = parseaddr(email)
if not email_re.search(addr):
raise forms.ValidationError('Invalid email address: %s' % addr)
def clean_response_contact(self):
value = self.cleaned_data.get('response_contact', None)
self.check_email(value)
return value
def clean_technical_contact(self):
value = self.cleaned_data.get('technical_contact', None)
self.check_email(value)
return value
def clean_reply_to(self):
value = self.cleaned_data.get('reply_to', None)
self.check_email(value)
return value
def clean(self):
if not self.cleaned_data.get('body', None) and not self.has_attachments():
self._errors['body'] = ErrorList([u'You must provide a body or attachment files'])
self._errors['attachments'] = ErrorList([u'You must provide a body or attachment files'])
return self.cleaned_data
def get_from_entity(self):
organization_key = self.cleaned_data.get('from_field')
return self.hm.get_entity_by_key(organization_key)
def get_to_entity(self):
organization_key = self.cleaned_data.get('organization')
return self.hm.get_entity_by_key(organization_key)
def get_poc(self, organization):
return ', '.join(u"%s <%s>" % i.email() for i in organization.get_poc())
def clean_cc1(self):
value = self.cleaned_data.get('cc1', '')
result = []
errors = []
for address in value.split('\n'):
address = address.strip();
if not address:
continue
try:
self.check_email(address)
except forms.ValidationError:
errors.append(address)
result.append(address)
if errors:
raise forms.ValidationError('Invalid email addresses: %s' % ', '.join(errors))
return ','.join(result)
def get_cc(self, from_entity, to_entity):
#Old automatic Cc code, now we retrive it from cleaned_data
#persons = to_entity.get_cc(self.person)
#persons += from_entity.get_from_cc(self.person)
#return ', '.join(['%s <%s>' % i.email() for i in persons])
cc = self.cleaned_data.get('cc1', '')
return cc
def save(self, *args, **kwargs):
l = self.instance
if not l:
l = LiaisonDetailProxy()
l.title = self.cleaned_data["title"]
l.purpose = LiaisonStatementPurposeName.objects.get(order=self.cleaned_data["purpose"])
l.body = self.cleaned_data["body"].strip()
l.deadline = self.cleaned_data["deadline_date"]
l.related_to = self.cleaned_data["related_to"]
l.reply_to = self.cleaned_data["replyto"]
l.response_contact = self.cleaned_data["response_contact"]
l.technical_contact = self.cleaned_data["technical_contact"]
now = datetime.datetime.now()
l.modified = now
l.submitted = datetime.datetime.combine(self.cleaned_data["submitted_date"], now.time())
if not l.approved:
l.approved = now
self.save_extra_fields(l)
l.save() # we have to save here to make sure we get an id for the attachments
self.save_attachments(l)
return l
def save_extra_fields(self, liaison):
from_entity = self.get_from_entity()
liaison.from_name = from_entity.name
liaison.from_group = from_entity.obj
e = self.cleaned_data["person"].email_set.order_by('-active')
if e:
liaison.from_contact = e[0]
organization = self.get_to_entity()
liaison.to_name = organization.name
liaison.to_group = organization.obj
liaison.to_contact = self.get_poc(organization)
liaison.cc = self.get_cc(from_entity, organization)
def save_attachments(self, instance):
written = instance.attachments.all().count()
for key in self.files.keys():
title_key = key.replace('file', 'title')
if not key.startswith('attach_file_') or not title_key in self.data.keys():
continue
attached_file = self.files.get(key)
extension=attached_file.name.rsplit('.', 1)
if len(extension) > 1:
extension = '.' + extension[1]
else:
extension = ''
written += 1
name = instance.name() + ("-attachment-%s" % written)
attach, _ = Document.objects.get_or_create(
name = name,
defaults=dict(
title = self.data.get(title_key),
type_id = "liai-att",
external_url = name + extension, # strictly speaking not necessary, but just for the time being ...
)
)
instance.attachments.add(attach)
attach_file = open(os.path.join(settings.LIAISON_ATTACH_PATH, attach.name + extension), 'w')
attach_file.write(attached_file.read())
attach_file.close()
def clean_title(self):
title = self.cleaned_data.get('title', None)
if self.instance and self.instance.pk:
exclude_filter = {'pk': self.instance.pk}
else:
exclude_filter = {}
exists = bool(LiaisonStatement.objects.exclude(**exclude_filter).filter(title__iexact=title).count())
if exists:
raise forms.ValidationError('A liaison statement with the same title has previously been submitted.')
return title
class IncomingLiaisonForm(LiaisonForm):
def set_from_field(self):
if is_secretariat(self.user):
sdos = Group.objects.filter(type="sdo", state="active")
else:
sdos = Group.objects.filter(type="sdo", state="active", role__person=self.person, role__name__in=("liaiman", "auth")).distinct()
self.fields['from_field'].choices = [('sdo_%s' % i.pk, i.name) for i in sdos.order_by("name")]
self.fields['from_field'].widget.submitter = unicode(self.person)
def set_replyto_field(self):
e = Email.objects.filter(person=self.person, role__group__state="active", role__name__in=["liaiman", "auth"])
if e:
addr = e[0].address
else:
addr = self.person.email_address()
self.fields['replyto'].initial = addr
def set_organization_field(self):
self.fields['organization'].choices = self.hm.get_all_incoming_entities()
def get_post_only(self):
from_entity = self.get_from_entity()
if is_secretariat(self.user) or Role.objects.filter(person=self.person, group=from_entity.obj, name="auth"):
return False
return True
def clean(self):
if 'send' in self.data.keys() and self.get_post_only():
self._errors['from_field'] = ErrorList([u'As an IETF Liaison Manager you can not send an incoming liaison statements, you only can post them'])
return super(IncomingLiaisonForm, self).clean()
def liaison_manager_sdos(person):
return Group.objects.filter(type="sdo", state="active", role__person=person, role__name="liaiman").distinct()
class OutgoingLiaisonForm(LiaisonForm):
to_poc = forms.CharField(label="POC", required=True)
approved = forms.BooleanField(label="Obtained prior approval", required=False)
other_organization = forms.CharField(label="Other SDO", required=True)
def get_to_entity(self):
organization_key = self.cleaned_data.get('organization')
organization = self.hm.get_entity_by_key(organization_key)
if organization_key == 'othersdo' and self.cleaned_data.get('other_organization', None):
organization.name=self.cleaned_data['other_organization']
return organization
def set_from_field(self):
if is_secretariat(self.user):
self.fields['from_field'].choices = self.hm.get_all_incoming_entities()
elif is_sdo_liaison_manager(self.person):
self.fields['from_field'].choices = self.hm.get_all_incoming_entities()
all_entities = []
for i in self.hm.get_entities_for_person(self.person):
all_entities += i[1]
if all_entities:
self.fields['from_field'].widget.full_power_on = [i[0] for i in all_entities]
self.fields['from_field'].widget.reduced_to_set = ['sdo_%s' % i.pk for i in liaison_manager_sdos(self.person)]
else:
self.fields['from_field'].choices = self.hm.get_entities_for_person(self.person)
self.fields['from_field'].widget.submitter = unicode(self.person)
self.fieldsets[0] = ('From', ('from_field', 'replyto', 'approved'))
def set_replyto_field(self):
e = Email.objects.filter(person=self.person, role__group__state="active", role__name__in=["ad", "chair"])
if e:
addr = e[0].address
else:
addr = self.person.email_address()
self.fields['replyto'].initial = addr
def set_organization_field(self):
# If the user is a liaison manager and is nothing more, reduce the To field to his SDOs
if not self.hm.get_entities_for_person(self.person) and is_sdo_liaison_manager(self.person):
self.fields['organization'].choices = [('sdo_%s' % i.pk, i.name) for i in liaison_manager_sdos(self.person)]
else:
self.fields['organization'].choices = self.hm.get_all_outgoing_entities()
self.fieldsets[1] = ('To', ('organization', 'other_organization', 'to_poc'))
def set_required_fields(self):
super(OutgoingLiaisonForm, self).set_required_fields()
organization = self.data.get('organization', None)
if organization == 'othersdo':
self.fields['other_organization'].required=True
else:
self.fields['other_organization'].required=False
def reset_required_fields(self):
super(OutgoingLiaisonForm, self).reset_required_fields()
self.fields['other_organization'].required=True
def get_poc(self, organization):
return self.cleaned_data['to_poc']
def save_extra_fields(self, liaison):
super(OutgoingLiaisonForm, self).save_extra_fields(liaison)
from_entity = self.get_from_entity()
needs_approval = from_entity.needs_approval(self.person)
if not needs_approval or self.cleaned_data.get('approved', False):
liaison.approved = datetime.datetime.now()
else:
liaison.approved = None
def clean_to_poc(self):
value = self.cleaned_data.get('to_poc', None)
self.check_email(value)
return value
def clean_organization(self):
to_code = self.cleaned_data.get('organization', None)
from_code = self.cleaned_data.get('from_field', None)
if not to_code or not from_code:
return to_code
all_entities = []
person = self.fake_person or self.person
for i in self.hm.get_entities_for_person(person):
all_entities += i[1]
# If the from entity is one in wich the user has full privileges the to entity could be anyone
if from_code in [i[0] for i in all_entities]:
return to_code
sdo_codes = ['sdo_%s' % i.pk for i in liaison_manager_sdos(person)]
if to_code in sdo_codes:
return to_code
entity = self.get_to_entity()
entity_name = entity and entity.name or to_code
if self.fake_person:
raise forms.ValidationError('%s is not allowed to send a liaison to: %s' % (self.fake_person, entity_name))
else:
raise forms.ValidationError('You are not allowed to send a liaison to: %s' % entity_name)
class EditLiaisonForm(LiaisonForm):
from_field = forms.CharField(widget=forms.TextInput, label=u'From')
replyto = forms.CharField(label=u'Reply to', widget=forms.TextInput)
organization = forms.CharField(widget=forms.TextInput)
to_poc = forms.CharField(widget=forms.TextInput, label="POC", required=False)
cc1 = forms.CharField(widget=forms.TextInput, label="CC", required=False)
class Meta:
fields = ('from_raw_body', 'to_body', 'to_poc', 'cc1', 'last_modified_date', 'title',
'response_contact', 'technical_contact', 'purpose_text', 'body',
'deadline_date', 'purpose', 'replyto', 'related_to')
def __init__(self, *args, **kwargs):
super(EditLiaisonForm, self).__init__(*args, **kwargs)
self.edit = True
self.initial.update({'attachments': self.instance.uploads_set.all()})
self.fields['submitted_date'].initial = self.instance.submitted_date
def set_from_field(self):
self.fields['from_field'].initial = self.instance.from_body
def set_replyto_field(self):
self.fields['replyto'].initial = self.instance.replyto
def set_organization_field(self):
self.fields['organization'].initial = self.instance.to_body
def save_extra_fields(self, liaison):
liaison.from_name = self.cleaned_data.get('from_field')
liaison.to_name = self.cleaned_data.get('organization')
liaison.to_contact = self.cleaned_data['to_poc']
liaison.cc = self.cleaned_data['cc1']
| {
"content_hash": "d92b6d89a3f72fa3f688e52b53d0529c",
"timestamp": "",
"source": "github",
"line_count": 474,
"max_line_length": 155,
"avg_line_length": 43.257383966244724,
"alnum_prop": 0.6060280920795942,
"repo_name": "mcr/ietfdb",
"id": "efe2c6eb4eaff4fca193ff257ba625a928555390",
"size": "20504",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ietf/liaisons/formsREDESIGN.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "239198"
},
{
"name": "JavaScript",
"bytes": "450755"
},
{
"name": "Perl",
"bytes": "3223"
},
{
"name": "Python",
"bytes": "10286676"
},
{
"name": "Ruby",
"bytes": "3468"
},
{
"name": "Shell",
"bytes": "39950"
},
{
"name": "TeX",
"bytes": "23944"
}
],
"symlink_target": ""
} |
import sys
from typing import TYPE_CHECKING
if sys.version_info < (3, 7) or TYPE_CHECKING:
from ._x import X
from ._y import Y
from ._z import Z
from . import x
from . import y
from . import z
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__, [".x", ".y", ".z"], ["._x.X", "._y.Y", "._z.Z"]
)
| {
"content_hash": "47ad1b0ddc00cd00dd3c35252a06781f",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 65,
"avg_line_length": 25.375,
"alnum_prop": 0.5566502463054187,
"repo_name": "plotly/plotly.py",
"id": "40e571c2c8732d5419cd2e52a3dd520cf0cce217",
"size": "406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/graph_objs/surface/contours/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
import datetime
from dateutil.relativedelta import relativedelta
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models import signals
from django.dispatch import receiver
from django.utils.dateformat import format
from django.utils.formats import date_format, get_format
from django.utils.translation import ugettext_lazy as _
from fragapy.common.models.adminlink import AdminLinkMixin
from model_utils.models import TimeStampedModel
from boris.classification import SEXES, NATIONALITIES, \
ETHNIC_ORIGINS, LIVING_CONDITIONS, ACCOMODATION_TYPES, EMPLOYMENT_TYPES, \
DRUG_APPLICATION_FREQUENCY, DRUG_APPLICATION_TYPES, \
DISEASES, DISEASE_TEST_RESULTS, EDUCATION_LEVELS, ANONYMOUS_TYPES, \
RISKY_BEHAVIOR_KIND, RISKY_BEHAVIOR_PERIODICITY, DRUGS
from boris.services.models import GroupCounselling, Encounter
from boris.services.models.k import _group_service_title
class IndexedStringEnum(models.Model, AdminLinkMixin):
title = models.CharField(max_length=100, verbose_name=_(u'Název'), db_index=True)
def __unicode__(self):
return self.title
class Meta:
abstract = True
@staticmethod
def autocomplete_search_fields():
return ('title__icontains',)
class Region(IndexedStringEnum):
class Meta:
verbose_name = _(u'Kraj')
verbose_name_plural = _(u'Kraje')
class District(IndexedStringEnum):
region = models.ForeignKey(Region, verbose_name=_(u'Kraj'))
class Meta:
verbose_name = _(u'Okres')
verbose_name_plural = _(u'Okresy')
def __unicode__(self):
return u'%s, %s' % (self.title, unicode(self.region))
class Town(IndexedStringEnum):
district = models.ForeignKey(District, verbose_name=_(u'Okres'))
class Meta:
verbose_name = _(u'Město')
verbose_name_plural = _(u'Města')
def __unicode__(self):
return u'%s' % self.title
class GroupContactType(IndexedStringEnum):
key = models.SmallIntegerField(verbose_name=_(u'Kód'))
class Meta:
verbose_name = _(u'Typ skupiny')
verbose_name_plural = _(u'Typy skupin')
def __unicode__(self):
return u'%s) %s' % (self.key, unicode(self.title))
class Person(TimeStampedModel, AdminLinkMixin):
# title enables us to easily print subclass __unicode__ values from Person
title = models.CharField(max_length=255, editable=False,
verbose_name=_(u'Název'), db_index=True)
content_type = models.ForeignKey(ContentType, editable=False)
class Meta:
verbose_name = _(u'Osoba')
verbose_name_plural = _(u'Osoby')
@property
def services(self):
from boris.services.models.core import Service
return Service.objects.filter(encounter__person=self)
@property
def first_contact_date(self):
try:
return self.encounters.order_by('performed_on').values_list('performed_on', flat=True)[0]
except IndexError:
return None
@property
def last_contact_date(self):
try:
return self.encounters.order_by('-performed_on').values_list('performed_on', flat=True)[0]
except IndexError:
return None
def __unicode__(self):
return self.title
def clean(self):
self.title = unicode(self)
# @attention: instead of using get_for_model which doesn't respect
# proxy models content types, use get_by_natural key as a workaround
self.content_type = ContentType.objects.get_by_natural_key(
self._meta.app_label, self._meta.object_name.lower())
def cast(self):
"""
When dealing with subclass that has been selected from base table,
this will return the corresponding subclass instance.
"""
try:
return self.content_type.get_object_for_this_type(pk=self.pk)
except ContentType.DoesNotExist: # E.g. mock objects or some not-yet-saved objects.
return self
def is_default_service(self, service):
"""Returns True if ``service`` is default for this person, False otherwise"""
return False
@staticmethod
def autocomplete_search_fields():
return ('title__icontains',)
class PractitionerContact(models.Model, AdminLinkMixin):
'''
A simple model to capture the contacts with practitioners.
(Formerly was a descendant of Person named "Practitioner".)
'''
users = models.ManyToManyField('auth.User', verbose_name=_('Kdo'))
person_or_institution = models.CharField(max_length=255,
verbose_name=_(u'Osoba nebo instituce'))
town = models.ForeignKey('clients.Town', related_name='+', verbose_name=_(u'Město'))
date = models.DateField(verbose_name=_(u'Kdy'))
note = models.TextField(verbose_name=_(u'Poznámka'), blank=True)
class Meta:
verbose_name = _(u'Odborný kontakt')
verbose_name_plural = _(u'Odborné kontakty')
def __unicode__(self):
return _(u'%(person_or_institution)s v %(town)s, %(date)s') % {
'person_or_institution': self.person_or_institution,
'town': self.town,
'date': date_format(self.date)
}
class GroupContact(models.Model, AdminLinkMixin):
'''
A model for convenient work with group counselling service.
'''
users = models.ManyToManyField('auth.User', verbose_name=_('Kdo'))
clients = models.ManyToManyField('clients.Client', verbose_name=_('Klienti'))
town = models.ForeignKey('clients.Town', related_name='+', verbose_name=_(u'Město'))
date = models.DateField(verbose_name=_(u'Kdy'))
note = models.TextField(verbose_name=_(u'Poznámka'), blank=True)
type = models.ForeignKey('clients.GroupContactType', related_name='+', verbose_name=_(u'Typ'),
default=1)
class Meta:
verbose_name = u'Skupinový kontakt'
verbose_name_plural = u'Skupinové kontakty'
def __unicode__(self):
town = self.town.title if hasattr(self, 'town') and self.town else '---'
return u'Skupinový kontakt %s, %s, %s' % (self.type.title, town, self.date)
def __sync_many(e, instance, src_attr, target_attr):
existing = getattr(e, src_attr).values_list('id', flat=True)
to_keep = getattr(instance, target_attr).values_list('id', flat=True)
to_delete = set(existing) - set(to_keep)
to_create = set(to_keep) - set(existing)
for pk in to_create:
getattr(e, src_attr).add(pk)
for pk in to_delete:
getattr(e, src_attr).remove(pk)
def __get_or_create_encounter(client, instance, services):
e, created = Encounter.objects.get_or_create(person=client, is_by_phone=False, group_contact=instance,
defaults={'performed_on': instance.date, 'where': instance.town})
__sync_many(e, instance, 'performed_by', 'users')
e.performed_on = instance.date
e.where = instance.town
e.save()
for service in services:
ct = service.service.model.real_content_type()
s, _ = service.objects.get_or_create(encounter=e, content_type=ct)
s.title = _group_service_title(instance, service)
s.save()
return e, created
def __delete_group_encounters(encs, group_contact):
for encounter in encs:
services = encounter.services.all()
group_services = __filter_group_services(group_contact, services)
if services.count() == 1 and group_services.exists():
encounter.delete()
else:
group_services.delete()
def __filter_group_services(group_contact, services):
return services.filter(content_type=GroupCounselling.real_content_type())
def __correct_contact(encs, instance):
for encounter in encs:
for service in __filter_group_services(instance, encounter.services):
service.save()
encounter.save()
@receiver(signals.m2m_changed, sender=GroupContact.clients.through)
def create_group_encounters(sender, instance, action, *args, **kwargs):
# group contact serves as a way to create many group counselling encounters at a time
# this signal is used for creating encounters
for client in instance.clients.all():
__get_or_create_encounter(client, instance, [GroupCounselling])
if instance.clients.exists():
# this is needed because django sends signals pretty erratically :(
delete_excess_group_encounters(sender, instance)
@receiver(signals.post_save, sender=GroupContact)
def delete_excess_group_encounters(sender, instance, *args, **kwargs):
# this signal is used for deleting encounters for removed clients
all_encs = Encounter.objects.filter(group_contact=instance)
to_delete = set(all_encs.values_list('person', flat=True)) - set(instance.clients.values_list('pk', flat=True))
for client_pk in to_delete:
encs = all_encs.filter(person__pk=client_pk)
__delete_group_encounters(encs, instance)
@receiver(signals.pre_delete, sender=GroupContact)
def save_group_contact3(sender, instance, using, signal, *args, **kwargs):
encs = Encounter.objects.filter(group_contact=instance)
__delete_group_encounters(encs, instance)
class Anonymous(Person):
drug_user_type = models.PositiveSmallIntegerField(
choices=ANONYMOUS_TYPES, verbose_name=_(u'Typ'))
sex = models.PositiveSmallIntegerField(choices=SEXES, verbose_name=_(u'Pohlaví'))
class Meta:
verbose_name = _(u'Anonym')
verbose_name_plural = _(u'Anonymové')
unique_together = ('sex', 'drug_user_type')
def __unicode__(self):
return u'%s - %s' % (self.get_sex_display(), self.get_drug_user_type_display())
def is_default_service(self, service):
"""Returns True if ``service`` is default for this person, False otherwise"""
return service.class_name() == 'Approach'
class ClientManager(models.Manager):
def filter_by_age(self, age_from=None, age_to=None):
today = datetime.date.today()
clients = self.all()
if age_from is not None:
born_to = today - relativedelta(years=age_from)
clients = clients.filter(birthdate__lte=born_to)
if age_to is not None:
born_from = today - relativedelta(years=age_to + 1)
clients = clients.filter(birthdate__gt=born_from)
return clients
class Client(Person):
code = models.CharField(max_length=63, unique=True, verbose_name=_(u'Kód'))
sex = models.PositiveSmallIntegerField(choices=SEXES, verbose_name=_(u'Pohlaví'))
first_name = models.CharField(max_length=63, blank=True, null=True,
verbose_name=_(u'Jméno'))
last_name = models.CharField(max_length=63, blank=True, null=True,
verbose_name=_(u'Příjmení'))
birthdate = models.DateField(verbose_name=_(u'Datum narození'), blank=True, null=True,
help_text=_(u'Pokud znáte pouze rok, zaškrtněte políčko `Známý pouze rok`.'))
birthdate_year_only = models.BooleanField(default=False,
verbose_name=_(u'Známý pouze rok'))
town = models.ForeignKey(Town, verbose_name=_(u'Město'))
primary_drug = models.PositiveSmallIntegerField(blank=True, null=True,
choices=DRUGS, verbose_name=_(u'Primární droga'))
primary_drug_usage = models.PositiveSmallIntegerField(blank=True, null=True,
choices=DRUG_APPLICATION_TYPES, verbose_name=_(u'Způsob aplikace'))
close_person = models.BooleanField(default=False,
verbose_name=_(u'Osoba blízká (rodiče, sex. partneři apod.)'))
phone_number = models.CharField(max_length=30, blank=True, null=True, verbose_name=_(u'Telefonní číslo'))
email = models.EmailField(max_length=80, blank=True, null=True, verbose_name=_(u'E-mail'))
class Meta:
verbose_name = _(u'Klient')
verbose_name_plural = _(u'Klienti')
objects = ClientManager()
def __unicode__(self):
return self.code
@property
def age(self):
"""Return the client's age in years, if known."""
return self.get_relative_age(datetime.date.today())
def get_relative_age(self, relative_to):
if not self.birthdate:
return None
age = relative_to.year - self.birthdate.year - ((relative_to.month, relative_to.day) < (self.birthdate.month, self.birthdate.day))
return age
@property
def hygiene_report_code(self):
code = (str(self.birthdate.year)[2:] if self.birthdate else '??') + '0000/'
code += self.code[5:8].upper() if len(self.code) >= 7 else '???'
return code
def is_default_service(self, service):
"""Returns True if ``service`` is default for this person, False otherwise"""
return service.class_name() == 'HarmReduction'
def save(self, *args, **kwargs):
if self.code:
self.code = self.code.upper()
super(Client, self).save(*args, **kwargs)
class Anamnesis(TimeStampedModel, AdminLinkMixin):
""" Income anamnesis. """
client = models.OneToOneField(Client, verbose_name=_(u'Klient'))
filled_when = models.DateField(verbose_name=_(u'Datum kontaktu'))
filled_where = models.ForeignKey(Town, verbose_name=_(u'Město kontaktu'))
author = models.ForeignKey(User, verbose_name=_(u'Vyplnil'))
nationality = models.PositiveSmallIntegerField(choices=NATIONALITIES,
default=NATIONALITIES.UNKNOWN, verbose_name=_(u'Státní příslušnost'))
ethnic_origin = models.PositiveSmallIntegerField(choices=ETHNIC_ORIGINS,
default=ETHNIC_ORIGINS.NOT_MONITORED, verbose_name=_(u'Etnická příslušnost'))
living_condition = models.PositiveSmallIntegerField(choices=LIVING_CONDITIONS,
default=LIVING_CONDITIONS.UNKNOWN, verbose_name=_(u'Bydlení (s kým klient žije)'))
accomodation = models.PositiveSmallIntegerField(choices=ACCOMODATION_TYPES,
default=ACCOMODATION_TYPES.UNKNOWN, verbose_name=_(u'Bydlení (kde klient žije)'))
lives_with_junkies = models.NullBooleanField(verbose_name=_(u'Žije klient s osobou užívající drogy?'))
employment = models.PositiveSmallIntegerField(choices=EMPLOYMENT_TYPES,
default=EMPLOYMENT_TYPES.UNKNOWN, verbose_name=_(u'Zaměstnání / škola'))
education = models.PositiveSmallIntegerField(choices=EDUCATION_LEVELS,
default=EDUCATION_LEVELS.UNKNOWN, verbose_name=_(u'Vzdělání'))
been_cured_before = models.BooleanField(verbose_name=_(u'Dříve léčen'), default=None)
been_cured_currently = models.BooleanField(verbose_name=_(u'Nyní léčen'), default=None)
@property
def birth_year(self):
return self.client.birth_year
@property
def client_code(self):
return self.client.code
@property
def sex(self):
return self.client.sex
def __unicode__(self):
return _(u'Anamnéza: %s') % self.client
class Meta:
verbose_name = _(u'Anamnéza')
verbose_name_plural = _(u'Anamnézy')
@property
def drug_info(self):
if not hasattr(self, '__drug_info'):
self.__drug_info = DrugUsage.objects.filter(anamnesis=self)
self.__drug_info = sorted(list(self.__drug_info),
key=lambda di: '%s%s' % (
'1' if di.is_primary else '2',
str(di.pk)
))
return self.__drug_info
@property
def disease_test_results(self):
if not hasattr(self, '__disease_test_results'):
self.__disease_test_results = dict((c[1], None) for c in DISEASE_TEST_RESULTS)
for t in DiseaseTest.objects.filter(anamnesis=self):
self.__disease_test_results[t.get_disease_display()] = t
return self.__disease_test_results
@property
def overall_first_try_age(self):
if not hasattr(self, '__overall_first_try_age'):
ages = [d.first_try_age for d in self.drug_info]
self.__overall_first_try_age = min(ages) if ages else None
return self.__overall_first_try_age
@property
def is_intravenous_user(self):
return any([di.application in (DRUG_APPLICATION_TYPES.VEIN_INJECTION,
DRUG_APPLICATION_TYPES.MUSCLE_INJECTION)
for di in self.drug_info])
@property
def intravenous_first_try_age(self):
if not hasattr(self, '__intravenous_first_try_age'):
ages = [d.first_try_iv_age for d in self.drug_info if d.first_try_iv_age]
self.__intravenous_first_try_age = min(ages) if ages else None
return self.__intravenous_first_try_age
class ClientNote(models.Model):
author = models.ForeignKey(User, verbose_name=_(u'Autor'),
related_name='notes_added')
client = models.ForeignKey(Client, verbose_name=_(u'Klient'),
related_name='notes')
datetime = models.DateTimeField(default=datetime.datetime.now,
verbose_name=_(u'Datum a čas'))
text = models.TextField(verbose_name=_(u'Text'))
def __unicode__(self):
return u"%s -> %s, %s" % (self.author, self.client,
format(self.datetime, get_format('DATE_FORMAT')))
class Meta:
verbose_name = _(u'Poznámka')
verbose_name_plural = _(u'Poznámky')
ordering = ('-datetime', '-id')
class DrugUsage(models.Model):
drug = models.PositiveSmallIntegerField(choices=DRUGS, verbose_name=_(u'Droga'))
anamnesis = models.ForeignKey(Anamnesis, verbose_name=_(u'Anamnéza'))
application = models.PositiveSmallIntegerField(choices=DRUG_APPLICATION_TYPES, null=True, blank=True,
verbose_name=_(u'Aplikace'))
frequency = models.PositiveSmallIntegerField(choices=DRUG_APPLICATION_FREQUENCY, null=True, blank=True,
verbose_name=_(u'Četnost'))
first_try_age = models.PositiveSmallIntegerField(null=True, blank=True,
verbose_name=_(u'První užití (věk)'))
first_try_iv_age = models.PositiveSmallIntegerField(null=True, blank=True,
verbose_name=_(u'První i.v. užití (věk)'))
first_try_application = models.PositiveSmallIntegerField(choices=DRUG_APPLICATION_TYPES, null=True, blank=True,
verbose_name=_(u'Způsob prvního užití'))
was_first_illegal = models.NullBooleanField(verbose_name=_(u'První neleg. droga'))
is_primary = models.BooleanField(verbose_name=_(u'Primární droga'), default=None)
note = models.TextField(null=True, blank=True, verbose_name=_(u'Poznámka'))
def __unicode__(self):
return unicode(self.get_drug_display())
class Meta:
verbose_name = _(u'Užívaná droga')
verbose_name_plural = _(u'Užívané drogy')
unique_together = ('drug', 'anamnesis')
class RiskyManners(models.Model):
behavior = models.PositiveIntegerField(choices=RISKY_BEHAVIOR_KIND)
anamnesis = models.ForeignKey(Anamnesis, verbose_name=_(u'Anamnéza'))
periodicity_in_past = models.PositiveIntegerField(blank=True, null=True,
choices=RISKY_BEHAVIOR_PERIODICITY,
verbose_name=_(u'Jak často v minulosti'))
periodicity_in_present = models.PositiveIntegerField(blank=True, null=True,
choices=RISKY_BEHAVIOR_PERIODICITY,
verbose_name=_(u'Jak často v přítomnosti'))
def __unicode__(self):
return u'%s: %s' % (self.anamnesis.client, self.behavior)
class Meta:
verbose_name = _(u'Rizikové chování')
verbose_name_plural = _(u'Riziková chování')
unique_together = ('behavior', 'anamnesis')
class DiseaseTest(models.Model):
anamnesis = models.ForeignKey(Anamnesis)
disease = models.PositiveSmallIntegerField(choices=DISEASES,
verbose_name=_(u'Testované onemocnění'))
result = models.SmallIntegerField(choices=DISEASE_TEST_RESULTS,
default=DISEASE_TEST_RESULTS.UNKNOWN, verbose_name=_(u'Výsledek testu'))
date = models.DateField(verbose_name=_(u'Datum'), null=True, blank=True, default=datetime.datetime.now)
def __unicode__(self):
return unicode(self.disease)
class Meta:
verbose_name = _(u'Vyšetření onemocnění')
verbose_name_plural = _(u'Vyšetření onemocnění')
def get_client_card_filename(instance, filename):
return 'client_notes/%s/%s' % (instance.client.pk, filename)
class ClientCard(models.Model):
client = models.ForeignKey(Client, related_name='client_card')
file = models.FileField(upload_to=get_client_card_filename)
class Meta:
verbose_name = u'Přílohy' # makes more sense from user's perspective
verbose_name_plural = u'Přílohy'
def save(self, *args, **kwargs):
try:
old_instance = ClientCard.objects.get(id=self.id)
if old_instance.file != self.file:
old_instance.file.delete(save=False)
except ClientCard.DoesNotExist:
pass
super(ClientCard, self).save(*args, **kwargs)
def delete(self, using=None):
self.file.delete(save=False)
super(ClientCard, self).delete(using=using)
class TerrainNotes(models.Model, AdminLinkMixin):
users = models.ManyToManyField('auth.User', verbose_name=_('Kdo'))
town = models.ForeignKey('clients.Town', related_name='+', verbose_name=_(u'Město'))
date = models.DateField(verbose_name=_(u'Kdy'))
note = models.TextField(verbose_name=_(u'Zápis'), blank=True)
class Meta:
verbose_name = _(u'Zápis z terénu')
verbose_name_plural = _(u'Zápisy z terénu')
| {
"content_hash": "12465b1b8f42ff5102f0581e49ff7832",
"timestamp": "",
"source": "github",
"line_count": 544,
"max_line_length": 138,
"avg_line_length": 39.50919117647059,
"alnum_prop": 0.6587726236449076,
"repo_name": "fragaria/BorIS",
"id": "c3ee8763d242e6d471d8aab2aa8a2cabeca739b2",
"size": "21659",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boris/clients/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "302491"
},
{
"name": "HTML",
"bytes": "148721"
},
{
"name": "JavaScript",
"bytes": "208867"
},
{
"name": "Python",
"bytes": "396225"
}
],
"symlink_target": ""
} |
from neutron_lib.api.definitions import network as net_def
from neutron_lib.api.definitions import vlantransparent as vlan_apidef
from neutron.db import _resource_extend as resource_extend
@resource_extend.has_resource_extenders
class Vlantransparent_db_mixin(object):
"""Mixin class to add vlan transparent methods to db_base_plugin_v2."""
@staticmethod
@resource_extend.extends([net_def.COLLECTION_NAME])
def _extend_network_dict_vlan_transparent(network_res, network_db):
network_res[vlan_apidef.VLANTRANSPARENT] = (
network_db.vlan_transparent)
return network_res
| {
"content_hash": "55c624b550c19fa77099ad1ce92227bd",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 75,
"avg_line_length": 38.5,
"alnum_prop": 0.7483766233766234,
"repo_name": "noironetworks/neutron",
"id": "066371027aa1ba18ed94e0775dfe7cdffc5d6fc7",
"size": "1254",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutron/db/vlantransparent_db.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "11420614"
},
{
"name": "Shell",
"bytes": "38791"
}
],
"symlink_target": ""
} |
import time
from airflow.models import DAG
from airflow.operators.dummy import DummyOperator
from airflow.utils.context import Context
from airflow.utils.timezone import datetime
class DummyWithOnKill(DummyOperator):
def execute(self, context: Context):
import os
self.log.info("Signalling that I am running")
# signal to the test that we've started
with open("/tmp/airflow_on_kill_running", "w") as f:
f.write("ON_KILL_RUNNING")
self.log.info("Signalled")
# This runs extra processes, so that we can be sure that we correctly
# tidy up all processes launched by a task when killing
if not os.fork():
os.system('sleep 10')
time.sleep(10)
def on_kill(self):
self.log.info("Executing on_kill")
with open("/tmp/airflow_on_kill_killed", "w") as f:
f.write("ON_KILL_TEST")
self.log.info("Executed on_kill")
# DAG tests backfill with pooled tasks
# Previously backfill would queue the task but never run it
dag1 = DAG(dag_id='test_on_kill', start_date=datetime(2015, 1, 1))
dag1_task1 = DummyWithOnKill(task_id='task1', dag=dag1, owner='airflow')
| {
"content_hash": "dfef58b8a412ed2a3e94ebcfa7e90251",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 77,
"avg_line_length": 33.05555555555556,
"alnum_prop": 0.6630252100840336,
"repo_name": "mistercrunch/airflow",
"id": "e4000b8a9e7616eec9301ff4a3510d495880144e",
"size": "1977",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/dags/test_on_kill.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36341"
},
{
"name": "HTML",
"bytes": "99243"
},
{
"name": "JavaScript",
"bytes": "891460"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "773270"
},
{
"name": "Shell",
"bytes": "5659"
}
],
"symlink_target": ""
} |
import unittest
from model import Cell, Module, OutputType, CellType, Mux
import random
class CellTests (unittest.TestCase):
def setUp(self):
self.c = Cell()
def tearDown(self):
pass
def testAsyncOutputFalseWhenBothInputsFalse(self):
self.c.driveInputs([False, False])
self.assertFalse(self.c.asyncOutput())
def testAsyncOutputFalseWhenOneInputFalse(self):
self.c.driveInputs([True, False])
self.assertFalse(self.c.asyncOutput())
def testAsyncOutputTrueWhenBothInputsTrue(self):
self.c.driveInputs([True, True])
self.assertTrue(self.c.asyncOutput())
def testSyncOutputResetsToFalse(self):
self.assertFalse(self.c.syncOutput())
def testSyncOutputFalseWhenBothInputsFalse(self):
self.c.driveInputs([False, False])
self.c.clk()
self.assertFalse(self.c.syncOutput())
def testSyncOutputTrueWhenBothInputsTrue(self):
self.c.driveInputs([True, True])
self.c.clk()
self.assertTrue(self.c.syncOutput())
def testSyncOutputUpdatesWith2ndClk(self):
self.c.driveInputs([True, True])
self.c.clk()
self.c.driveInputs([False, False])
self.c.clk()
self.assertFalse(self.c.syncOutput())
def testSyncOutputHolds(self):
self.c.driveInputs([True, True])
self.c.clk()
self.c.clk()
self.assertTrue(self.c.syncOutput())
def testAsyncStableWhenFalse(self):
self.c.driveInputs([False, False])
self.c.driveInputs([False, False])
self.assertTrue(self.c.isStable())
def testAsyncStableWhenBothTrue(self):
self.c.driveInputs([True, True])
self.c.driveInputs([True, True])
self.assertTrue(self.c.isStable())
def testAsyncStableWhenBothFalse(self):
self.c.driveInputs([False, False])
self.c.driveInputs([False, False])
self.assertTrue(self.c.isStable())
def testAsyncNotStableWhenAChanges(self):
self.c.driveInputs([True, True])
self.c.driveInputs([False, True])
self.assertFalse(self.c.isStable())
def testAsyncNotStableWhenBChanges(self):
self.c.driveInputs([True, True])
self.c.clk()
self.c.driveInputs([True, False])
self.c.clk()
self.assertFalse(self.c.isStable())
def testCellCanBeOr(self):
self.c.setOperator(CellType._or)
self.c.driveInputs([False, True])
self.assertTrue(self.c.asyncOutput())
def testCellCanBeXor(self):
self.c.setOperator(CellType._xor)
self.c.driveInputs([True, True])
self.assertFalse(self.c.asyncOutput())
self.c.driveInputs([False, False])
self.assertFalse(self.c.asyncOutput())
self.c.driveInputs([False, True])
self.assertTrue(self.c.asyncOutput())
def testSetForAsyncOutput(self):
self.c.setOutputType(OutputType.async)
self.c.driveInputs([True, True])
self.assertTrue(self.c.output())
def testSetForSyncOutput(self):
self.c.setOutputType(OutputType.sync)
self.c.driveInputs([True, True])
self.assertFalse(self.c.output())
self.c.clk()
self.assertTrue(self.c.output())
def testGetOutputType(self):
self.c.setOutputType(OutputType.sync)
self.assertTrue(self.c.getOutputType() == OutputType.sync)
def testCellHistory(self):
self.c.setOutputType(OutputType.sync)
self.c.driveInputs([True, True])
for i in range(50):
if i == 49:
self.c.driveInputs([False, False])
self.c.clk()
self.c.output()
self.assertEqual(len(self.c.cellHistory()), 50)
self.assertEqual(self.c.cellHistory(), [True] * 49 + [False])
def testCellHistoryFixed(self):
self.c.setOutputType(OutputType.sync)
self.c.driveInputs([True, True])
for i in range(50):
self.c.clk()
self.c.output()
self.assertTrue(self.c.cellHistoryFixed())
def testCellHistoryNotFixed(self):
self.c.setOutputType(OutputType.sync)
self.c.driveInputs([True, True])
self.c.clk()
self.c.output()
self.c.driveInputs([False, True])
self.c.clk()
self.c.output()
self.assertFalse(self.c.cellHistoryFixed())
def testNoCellHistoryForAsync(self):
self.c.setOutputType(OutputType.async)
self.c.driveInputs([True, True])
self.c.output()
self.c.driveInputs([False, True])
self.c.output()
self.assertFalse(self.c.cellHistoryFixed())
class ModuleTests (unittest.TestCase):
def setUp(self):
self.m = Module()
def tearDown(self):
pass
def depth(self):
return len(self.m.cells)
def width(self):
return len(self.m.cells[0])
def createGridAndTieCell0Input(self, wIn, wOut, width, depth=1, initValForCell0 = False):
self.m.createGrid(wIn, wOut, width, depth)
self.m.tieCell0([initValForCell0])
def testInit4x1(self):
self.createGridAndTieCell0Input(4, 4, 4, 1)
self.assertTrue(self.depth() == 1)
self.assertTrue(self.width() == 4)
def testInitNxN(self):
self.createGridAndTieCell0Input(7, 7, 7, 6)
self.assertTrue(self.depth() == 6)
self.assertTrue(self.width() == 7)
def test2x1AndTiedLow(self):
self.createGridAndTieCell0Input(2, 2, 2, 1)
self.m.driveInputs([True, True])
self.assertEqual(self.m.sampleOutputs(), [False, False])
def test2x1AndTiedHigh(self):
self.createGridAndTieCell0Input(2, 2, 2, 1, True)
self.m.driveInputs([True, True])
self.assertEqual(self.m.sampleOutputs(), [True, True])
def test3x1AndTiedHigh(self):
self.createGridAndTieCell0Input(3, 3, 3, 1, True)
self.m.driveInputs([True, True, False])
self.assertEqual(self.m.sampleOutputs(), [True, True, False])
def test2x2AndTiedHigh(self):
self.createGridAndTieCell0Input(2, 2, 2, 2, True)
self.m.driveInputs([True, True])
self.assertEqual(self.m.sampleOutputs(), [True, True])
self.m.driveInputs([True, False])
self.assertEqual(self.m.sampleOutputs(), [False, False])
def test3x2AndTiedHigh(self):
self.createGridAndTieCell0Input(3, 3, 3, 2, True)
self.m.driveInputs([True, True, True])
self.assertEqual(self.m.sampleOutputs(), [True, True, True])
self.m.driveInputs([True, False, True])
self.assertEqual(self.m.sampleOutputs(), [False, False, False])
def testFixNumberOfFlopsTo0(self):
self.createGridAndTieCell0Input(25, 25, 25, 14, True)
self.m.setNumFlops(0)
self.assertTrue(self.m.getNumFlops() == 0)
def testFixNumberOfFlopsToLtWidth(self):
self.createGridAndTieCell0Input(25, 25, 25, 14, True)
self.m.setNumFlops(17)
self.assertTrue(self.m.getNumFlops() == 17)
def testFixNumberOfFlopsToGtWidth(self):
self.createGridAndTieCell0Input(25, 25, 25, 14, True)
self.m.setNumFlops(28)
self.assertTrue(self.m.getNumFlops() == 28)
def testFixNumberOfFlopsToMax(self):
self.createGridAndTieCell0Input(25, 25, 25, 14, True)
self.m.setNumFlops(25 * 14)
self.assertTrue(self.m.getNumFlops() == (25 * 14))
def test2x1FloppedAndTiedHigh(self):
self.createGridAndTieCell0Input(2, 2, 2, 1, True)
self.m.setNumFlops(2)
self.m.driveInputs([True, True])
self.m.clk()
self.assertEqual(self.m.sampleOutputs(), [True, False])
self.m.clk()
self.assertEqual(self.m.sampleOutputs(), [True, True])
def testOutputMuxOnlyExistsWhenOutputSmallerThanInputWidth(self):
self.createGridAndTieCell0Input(2, 2, 2)
self.assertEqual(self.m.outputMux, None)
def testOutputMuxForMoreInputsThanOutputs(self):
self.createGridAndTieCell0Input(2, 1, 2)
self.assertNotEqual(self.m.outputMux, None)
def testOutputSizeFor2Inputs1Output(self):
self.createGridAndTieCell0Input(2, 1, 2)
self.m.driveInputs([True, True])
self.assertEqual(len(self.m.sampleOutputs()), 1)
def testOutputFor2Inputs1Output(self):
self.createGridAndTieCell0Input(2, 1, 2, 1, True)
self.m.driveInputs([True, True])
self.assertEqual(self.m.sampleOutputs(), [ True ])
def testOutputFor3Inputs2Output(self):
self.createGridAndTieCell0Input(3, 2, 3, 1, True)
self.m.driveInputs([True, True, False])
self.assertEqual(self.m.sampleOutputs(), [ True, False ])
def testOutputFor4Inputs3Output(self):
self.createGridAndTieCell0Input(4, 3, 4, 1, True)
self.m.driveInputs([True, True, True, False])
self.assertEqual(self.m.sampleOutputs(), [ True, True, False ])
def testOutputFor5Inputs4Output(self):
self.createGridAndTieCell0Input(5, 4, 5, 1, True)
self.m.driveInputs([True, True, True, False, False])
self.assertEqual(self.m.sampleOutputs(), [ True, True, False, False ])
def testOutputFor8Inputs5Output(self):
self.createGridAndTieCell0Input(8, 5, 8, 1, True)
self.m.driveInputs([True] * 6 + [False, False])
self.assertEqual(self.m.sampleOutputs(), [ True, True, True, False, False ])
def testModuleHasFixedCells(self):
self.createGridAndTieCell0Input(2, 2, 2)
self.m.setNumFlops(2)
self.m.driveInputs([True] * 2)
self.m.clk()
self.m.sampleOutputs()
self.m.clk()
self.m.sampleOutputs()
self.assertTrue(self.m.moduleHasFixedCells())
def testModuleHasNoFixedCells(self):
self.createGridAndTieCell0Input(2, 2, 2, 1, True)
self.m.cells[0][1].setOutputType(OutputType.sync)
self.m.driveInputs([True] * 2)
self.m.clk()
self.m.sampleOutputs()
self.m.driveInputs([False] * 2)
self.m.clk()
self.m.sampleOutputs()
self.assertFalse(self.m.moduleHasFixedCells())
def testOutputHistory(self):
self.createGridAndTieCell0Input(2, 2, 2, 1, True)
self.m.driveInputs([True, True])
self.m.sampleOutputs()
self.m.sampleOutputs()
self.m.sampleOutputs()
self.assertEqual(len(self.m.outputHistory()), 3)
self.assertEqual(self.m.outputHistory(), [ [True, True], [True, True], [True, True] ])
self.assertTrue(self.m.outputsFixed())
def testOutputsNotFixed(self):
self.createGridAndTieCell0Input(2, 2, 2, 1, True)
self.m.driveInputs([True, True])
self.m.sampleOutputs()
self.m.driveInputs([False, False])
self.m.sampleOutputs()
self.assertFalse(self.m.outputsFixed())
def testOutputFor1Input2Outputs(self):
self.createGridAndTieCell0Input(1, 2, 2, 1, True)
self.m.driveInputs([True])
self.assertEqual(self.m.sampleOutputs(), [ True, True ])
def testOutputFor2Input4Outputs(self):
self.createGridAndTieCell0Input(2, 4, 4, 1, True)
self.m.driveInputs([True, True])
self.assertEqual(self.m.sampleOutputs(), [ True, True ] * 2)
def testOutputForLargerGridWidth(self):
self.createGridAndTieCell0Input(2, 4, 6, 1, True)
self.m.driveInputs([True, True])
self.assertEqual(self.m.sampleOutputs(), [ True, True ] * 2)
class MuxTests (unittest.TestCase):
def setUp(self):
self.m = Mux()
def tearDown(self):
pass
def testInputSelect2InputSelect0(self):
self.m.driveInputs([False, True])
self.assertEqual(self.m.inputSelect(), 0)
def testInputSelect2InputSelect1(self):
self.m.driveInputs([True, True])
self.assertEqual(self.m.inputSelect(), 1)
def testInputSelect3InputSelect0(self):
self.m.driveInputs([False, False, True])
self.assertEqual(self.m.inputSelect(), 0)
def testInputSelect3InputSelect1(self):
self.m.driveInputs([True, False, True])
self.assertEqual(self.m.inputSelect(), 1)
def testInputSelect3InputSelect2(self):
self.m.driveInputs([False, True, True])
self.assertEqual(self.m.inputSelect(), 2)
def testInputSelect3InputSelectOverflow(self):
self.m.driveInputs([True, True, True])
self.assertEqual(self.m.inputSelect(), 2)
def testInputSelect4InputSelect3(self):
self.m.driveInputs([True, True, True, False])
self.assertEqual(self.m.inputSelect(), 3)
def test2InputSelect0(self):
self.m.driveInputs([False, False])
self.assertFalse(self.m.asyncOutput())
def test2InputSelect1(self):
self.m.driveInputs([True, True])
self.assertTrue(self.m.asyncOutput())
def test4InputSelect3(self):
self.m.driveInputs([True, True, True, False])
self.assertFalse(self.m.asyncOutput())
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "4942086505e2933a6b2ee651d565f0ea",
"timestamp": "",
"source": "github",
"line_count": 413,
"max_line_length": 91,
"avg_line_length": 29.055690072639226,
"alnum_prop": 0.694,
"repo_name": "nosnhojn/poser",
"id": "c61623b37bddc297ff4efcaa0c5fed293a59e6f0",
"size": "12000",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Forth",
"bytes": "114"
},
{
"name": "Python",
"bytes": "56809"
},
{
"name": "SystemVerilog",
"bytes": "7791"
},
{
"name": "Verilog",
"bytes": "30466"
}
],
"symlink_target": ""
} |
from os.path import exists
from setuptools import setup, find_packages
import versioneer
setup(name='topik',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='A Topic Modeling toolkit',
url='http://github.com/ContinuumIO/topik/',
author='Topik development team',
author_email='msarahan@continuum.io',
license='BSD',
keywords='topic modeling lda nltk gensim pattern',
packages=find_packages(),
package_data={'topik': ['R/runLDAvis.R']},
setup_requires=['setuptools-markdown'],
long_description_markdown_filename='README.md',
install_requires=list(open('requirements.txt').read().strip().split('\n')),
entry_points= {
'console_scripts': ['topik = topik.simple_run.cli:run']
},
zip_safe=False)
| {
"content_hash": "3c6ed9049ddfa61d28c9da13f3f840ec",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 81,
"avg_line_length": 37.77272727272727,
"alnum_prop": 0.6594464500601684,
"repo_name": "ContinuumIO/topik",
"id": "0c854e1334f6e53bf0b3ebcf275179b9a21da894",
"size": "854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5004"
},
{
"name": "Python",
"bytes": "201024"
},
{
"name": "Shell",
"bytes": "1782"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
from auvsi_suas.models.server_info import ServerInfo
class Migration(migrations.Migration):
dependencies = [('auvsi_suas', '0003_missionconfig_is_active')]
operations = [
migrations.AddField(
model_name='missionconfig',
name='server_info',
field=models.ForeignKey(
default=1,
to='auvsi_suas.ServerInfo'),
preserve_default=False),
]
| {
"content_hash": "74bd394dd584c912af22f81a99ab6a00",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 67,
"avg_line_length": 25.95,
"alnum_prop": 0.6223506743737958,
"repo_name": "transformation/utatuav-interop",
"id": "dabc373027af0cbeab31485a2edab56c314730c0",
"size": "543",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "auvsi/server/auvsi_suas/migrations/0004_missionconfig_server_info.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "7130"
},
{
"name": "CSS",
"bytes": "678"
},
{
"name": "HTML",
"bytes": "7598"
},
{
"name": "JavaScript",
"bytes": "861038"
},
{
"name": "Makefile",
"bytes": "7485"
},
{
"name": "Puppet",
"bytes": "12694"
},
{
"name": "Python",
"bytes": "398564"
},
{
"name": "Ruby",
"bytes": "1204"
},
{
"name": "Shell",
"bytes": "4242"
},
{
"name": "TeX",
"bytes": "5434"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'DatabaseCreate.database'
db.alter_column(u'maintenance_databasecreate', 'database_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm['logical.Database']))
# Changing field 'DatabaseResize.target_offer'
db.alter_column(u'maintenance_databaseresize', 'target_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm['physical.Offering']))
# Changing field 'DatabaseResize.source_offer'
db.alter_column(u'maintenance_databaseresize', 'source_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm['physical.Offering']))
def backwards(self, orm):
# Changing field 'DatabaseCreate.database'
db.alter_column(u'maintenance_databasecreate', 'database_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['logical.Database']))
# Changing field 'DatabaseResize.target_offer'
db.alter_column(u'maintenance_databaseresize', 'target_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['dbaas_cloudstack.CloudStackPack'], on_delete=models.SET_NULL))
# Changing field 'DatabaseResize.source_offer'
db.alter_column(u'maintenance_databaseresize', 'source_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['dbaas_cloudstack.CloudStackPack'], on_delete=models.SET_NULL))
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'source_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Offering']"}),
'target_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Offering']", 'null': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'total_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'physical.offering': {
'Meta': {'object_name': 'Offering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'offerings'", 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'allowed_values': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'stronger_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'main_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'weaker_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | {
"content_hash": "c697cbc9c94257130968258c96c90170",
"timestamp": "",
"source": "github",
"line_count": 457,
"max_line_length": 227,
"avg_line_length": 97.54048140043764,
"alnum_prop": 0.568691672648959,
"repo_name": "globocom/database-as-a-service",
"id": "287690833be65d6c92c97a8760403e35648534f0",
"size": "44600",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dbaas/maintenance/migrations/0029_auto__chg_field_databasecreate_database__chg_field_databaseresize_targ.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "243568"
},
{
"name": "Dockerfile",
"bytes": "1372"
},
{
"name": "HTML",
"bytes": "310401"
},
{
"name": "JavaScript",
"bytes": "988830"
},
{
"name": "Makefile",
"bytes": "5199"
},
{
"name": "Python",
"bytes": "9674426"
},
{
"name": "Shell",
"bytes": "215115"
}
],
"symlink_target": ""
} |
from typing import MutableMapping, MutableSequence
import proto # type: ignore
__protobuf__ = proto.module(
package="google.cloud.essentialcontacts.v1",
manifest={
"NotificationCategory",
"ValidationState",
},
)
class NotificationCategory(proto.Enum):
r"""The notification categories that an essential contact can be
subscribed to. Each notification will be categorized by the
sender into one of the following categories. All contacts that
are subscribed to that category will receive the notification.
"""
NOTIFICATION_CATEGORY_UNSPECIFIED = 0
ALL = 2
SUSPENSION = 3
SECURITY = 5
TECHNICAL = 6
BILLING = 7
LEGAL = 8
PRODUCT_UPDATES = 9
TECHNICAL_INCIDENTS = 10
class ValidationState(proto.Enum):
r"""A contact's validation state indicates whether or not it is
the correct contact to be receiving notifications for a
particular resource.
"""
VALIDATION_STATE_UNSPECIFIED = 0
VALID = 1
INVALID = 2
__all__ = tuple(sorted(__protobuf__.manifest))
| {
"content_hash": "cf2a2ff943fbcb61596be3e7aa7e1e02",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 68,
"avg_line_length": 25.975609756097562,
"alnum_prop": 0.692018779342723,
"repo_name": "googleapis/python-essential-contacts",
"id": "92ad009dfab552035c8bf62e5d534703ec3cd56e",
"size": "1665",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/cloud/essential_contacts_v1/types/enums.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "315423"
},
{
"name": "Shell",
"bytes": "30696"
}
],
"symlink_target": ""
} |
import datetime
import time
from django.template import RequestContext
from django.http import Http404, HttpResponse
from django.shortcuts import render_to_response
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from events.models import Event
from events.forms import CalendarYearMonthForm
from events.utils import export_ical
def events_month(request, year=None, month=None):
if not year:
year = str(datetime.date.today().year)
if not month:
month = datetime.date.today().strftime('%b').lower()
if request.GET:
new_data = request.GET.copy()
form = CalendarYearMonthForm(new_data)
if form.is_valid():
if form.cleaned_data['year']:
year = str(form.cleaned_data['year'])
else:
year = str(datetime.date.today().year)
if form.cleaned_data['month']:
month = form.cleaned_data['month']
try:
month = datetime.date(*time.strptime(year+month, '%Y%b')[:3])
except ValueError:
raise Http404
if month == datetime.date.today():
today = True
else:
today = False
# Had some issuse with time past 1000.
if int(year) <= 1000:
raise Http404
# Had some issue with time later than 9999.
if int(year) >= 9999:
raise Http404
event_list = Event.objects.filter(start_date__year=month.year, start_date__month=month.month)
first_day = month.replace(day=1)
if first_day.month == 12:
last_day = first_day.replace(year=first_day.year + 1, month=1)
else:
last_day = first_day.replace(month=first_day.month + 1)
first_weekday = first_day - datetime.timedelta(first_day.weekday())
last_weekday = last_day + datetime.timedelta(7 - last_day.weekday())
next_month = last_day + datetime.timedelta(1)
if int(next_month.year) >= 9999:
next_month = None
prev_month = first_day - datetime.timedelta(1)
if int(prev_month.year) <= 1000:
prev_month = None
month_cal = []
week = []
week_headers = []
i = 0
day = first_weekday
while day <= last_weekday:
if i < 7:
week_headers.append(day)
cal_day = {}
cal_day['day'] = day
cal_day['events'] = event_list.filter(start_date=day).order_by('start_time')
if day.month == month.month:
cal_day['in_month'] = True
else:
cal_day['in_month'] = False
if day == datetime.date.today():
cal_day['today'] = True
else:
cal_day['today'] = False
if day.weekday() == 6 or day.weekday() == 5:
cal_day['weekend'] = True
else:
cal_day['weekend'] = False
week.append(cal_day)
if day.weekday() == 6:
month_cal.append(week)
week = []
i += 1
day += datetime.timedelta(1)
years = range(1986, (datetime.date.today().year + 4))
payload = {
'calendar': month_cal,
'headers': week_headers,
'month': month,
'next_month': next_month,
'prev_month': prev_month,
'today': today,
'years': years,
'is_archive': True,
}
return render_to_response('events/month.html', payload, context_instance=RequestContext(request))
def events_week(request, year=None, week=None):
if not year:
year = str(datetime.date.today().year)
if not week:
week = str(datetime.date.today().isocalendar()[1])
try:
date = datetime.date(*time.strptime(year + '-0-' + week, '%Y-%w-%U')[:3])
except ValueError:
raise Http404
weekdays = [
{'name': 'Sunday', 'number': 0},
{'name': 'Monday', 'number': 1},
{'name': 'Tuseday', 'number': 2},
{'name': 'Wednsday', 'number': 3},
{'name': 'Thursday', 'number': 4},
{'name': 'Friday', 'number': 5},
{'name': 'Saturday', 'number': 6},
]
hours = []
start_time = datetime.datetime.combine(date, datetime.time(0, 0))
end_time = datetime.datetime.combine(date, datetime.time(23, 0))
now = start_time
while now <= end_time:
hours += [now,]
now += datetime.timedelta(hours=1)
weekday_count = 0
for weekday in weekdays:
weekday['day'] = date + datetime.timedelta(days=weekday_count)
weekday_count += 1
morning = datetime.datetime.combine(weekday['day'], datetime.time(0, 0))
evening = datetime.datetime.combine(weekday['day'], datetime.time(23, 0))
# TODO I don't like running so many queries.
events = Event.objects.filter(start_date=weekday['day'])
all_day = events.filter(start_time__isnull=True)
weekday['agenda'] = []
now = morning
while now <= evening:
end = now + datetime.timedelta(minutes=59)
agenda_hour = {}
agenda_hour['start'] = now
agenda_hour['end'] = end
agenda_hour['events'] = events.filter(start_time__range=(now.time(), end.time())).order_by('start_time')
if now.time().hour == datetime.datetime.now().time().hour <= 17:
agenda_hour['working_hours'] = True
else:
agenda_hour['working_hours'] = False
weekday['agenda'] += [agenda_hour,]
now += datetime.timedelta(hours=1)
context_payload = {
'weekdays': weekdays,
'date': date,
'is_archive': True,
'hours': hours,
}
return render_to_response('events/week.html', context_payload, context_instance=RequestContext(request))
def events_year(request, year=None):
if not year:
year = str(datetime.date.today().year)
prev_year = int(year) - 1
next_year = int(year) + 1
events = Event.objects.filter(start_date__year=year).dates('start_date', 'month')
months = {}
for i in events:
month = i.month
try:
months[month-1][1] = True
except KeyError:
months = [[ datetime.date(int(year), m, 1), False] for m in xrange(1, 13)]
months[month-1][1] = True
context_payload = {
'months': months,
'year': year,
'next_year': next_year,
'prev_year': prev_year
}
return render_to_response('events/year.html', context_payload, context_instance=RequestContext(request))
def events_archives(request):
events = Event.objects.dates('start_date', 'month')
years = {}
for i in events:
month = i.month
year = i.year
try:
years[year][month-1][1] = True
except KeyError:
years[year] = [[ datetime.date(year, m, 1), False ] for m in xrange(1, 13) ]
years[year][month-1][1] = True
return render_to_response('events/archives.html', { 'years': years }, context_instance=RequestContext(request))
def events_day(request, year=None, month=None, day=None):
if not year:
year = str(datetime.date.today().year)
if not month:
month = datetime.date.today().strftime('%b').lower()
if not day:
day = str(datetime.date.today().day)
try:
date = datetime.date(*time.strptime(year+month+day, '%Y%b%d')[:3])
except ValueError:
raise Http404
next_day = date + datetime.timedelta(days=+1)
prev_day = date + datetime.timedelta(days=-1)
events = Event.objects.filter(start_date=date)
all_day = events.filter(start_time__isnull=True)
morning = datetime.datetime.combine(date, datetime.time(0, 0))
evening = datetime.datetime.combine(date, datetime.time(23, 0))
agenda = []
now = morning
while now <= evening:
end = now + datetime.timedelta(minutes=59)
agenda_hour = {}
agenda_hour['start'] = now
agenda_hour['end'] = end
agenda_hour['events'] = events.filter(start_time__range=(now.time(), end.time())).order_by('start_time')
if now.time().hour == datetime.datetime.now().time().hour:
agenda_hour['now'] = True
else:
agenda_hour['now'] = False
if now.time().hour >= 9 and now.time().hour <= 17:
agenda_hour['working_hours'] = True
else:
agenda_hour['working_hours'] = False
agenda += [agenda_hour,]
now += datetime.timedelta(hours=1)
context_payload = {
'events': events,
'agenda': agenda,
'next_day': next_day,
'prev_day': prev_day,
'date': date,
'all_day': all_day,
'is_archive': True,
}
return render_to_response('events/day.html', context_payload, context_instance=RequestContext(request))
def detail(request, year, month, day, slug):
try:
date = datetime.date(*time.strptime(year+month+day, '%Y%b%d')[:3])
except ValueError:
raise Http404
try:
event = Event.objects.get(start_date=date, slug__iexact=slug)
except IndexError:
raise Http404
return render_to_response('events/detail.html', { 'event': event, 'date': date }, context_instance=RequestContext(request))
def detail_ical(request, year, month, day, slug):
try:
date = datetime.date(*time.strptime(year+month+day, '%Y%b%d')[:3])
except ValueError:
raise Http404
try:
event = Event.objects.get(start_date=date, slug__iexact=slug)
except IndexError:
raise Http404
icalendar = export_ical([event, ])
response = HttpResponse(icalendar.as_string(), mimetype="text/calendar")
response['Content-Disposition'] = 'attachment: filename=%s-%s.ics' % (event.start_date.isoformat(), event.slug)
return response
def ical(request):
TODAY = datetime.date.today()
THRIDY_DAYS = datetime.timedelta(days=30)
FUTURE = TODAY + THRIDY_DAYS
PAST = TODAY - THRIDY_DAYS
event_list = Event.objects.filter(start_date__lte=FUTURE, start_date__gte=PAST)
icalendar = export_ical(event_list)
response = HttpResponse(icalendar.as_string(), mimetype="text/calendar")
response['Content-Disposition'] = 'attachment: filename=%s-%s.ics' % (FUTURE.isoformat(), PAST.isoformat())
return response
def tag_list(request):
tags = Event.tags.all()
context = {
'tags': tags,
'is_archive': True
}
return render_to_response('events/tag_list.html', context, context_instance=RequestContext(request))
def tag_detail(request, slug, page=1):
tag = Event.tags.get(slug=slug)
event_list = Event.objects.filter(tags__in=[tag])
paginator = Paginator(event_list, 10)
try:
events = paginator.page(page)
except (EmptyPage, InvalidPage):
events = paginator.page(paginator.num_pages)
context = {
'tag': tag,
'events': events,
'is_archive': True
}
return render_to_response('events/tag_detail.html', context, context_instance=RequestContext(request))
def tag_detail_ical(request, slug):
tag = Event.tags.get(slug=slug)
event_list = Event.objects.upcoming(tags__in=[tag])[:10]
icalendar = export_ical(event_list)
response = HttpResponse(icalendar.as_string(), mimetype="text/calendar")
response['Content-Disposition'] = 'attachment: filename=%s.ics' % slug
return response | {
"content_hash": "b163f53eec515c20cdc8b0d4492b6a51",
"timestamp": "",
"source": "github",
"line_count": 369,
"max_line_length": 124,
"avg_line_length": 27.07859078590786,
"alnum_prop": 0.6703362690152121,
"repo_name": "asgardproject/asgard-calendar",
"id": "fe6dcc3a56afdbf45e1e08f64e4b0f746fe0fe8f",
"size": "9992",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "events/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "27425"
}
],
"symlink_target": ""
} |
import time
from datetime import timedelta, date
import mock
from django.core import mail
from django.template import defaultfilters
from django.utils.timezone import now, get_current_timezone
from tenant_extras.utils import TenantLanguage
from bluebottle.activities.messages import ParticipantWithdrewConfirmationNotification, \
TeamMemberWithdrewMessage, TeamMemberAddedMessage
from bluebottle.activities.messages import TeamMemberRemovedMessage, TeamCancelledTeamCaptainMessage, \
TeamCancelledMessage
from bluebottle.activities.models import Organizer, Activity
from bluebottle.activities.tests.factories import TeamFactory
from bluebottle.initiatives.tests.factories import InitiativeFactory, InitiativePlatformSettingsFactory
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.utils import BluebottleTestCase, CeleryTestCase, TriggerTestCase
from bluebottle.time_based.messages import (
ParticipantJoinedNotification, ParticipantChangedNotification,
ParticipantAppliedNotification, ParticipantRemovedNotification, ParticipantRemovedOwnerNotification,
NewParticipantNotification, TeamParticipantJoinedNotification, ParticipantAddedNotification,
ParticipantRejectedNotification, ParticipantAddedOwnerNotification, TeamSlotChangedNotification,
ParticipantWithdrewNotification, TeamParticipantAppliedNotification, TeamMemberJoinedNotification
)
from bluebottle.time_based.tests.factories import (
DateActivityFactory, PeriodActivityFactory,
DateParticipantFactory, PeriodParticipantFactory,
DateActivitySlotFactory, SlotParticipantFactory, TeamSlotFactory
)
class TimeBasedActivityTriggerTestCase():
def setUp(self):
super().setUp()
self.settings = InitiativePlatformSettingsFactory.create(
activity_types=[self.factory._meta.model.__name__.lower()]
)
self.user = BlueBottleUserFactory()
self.initiative = InitiativeFactory(owner=self.user)
self.activity = self.factory.create(initiative=self.initiative, review=False)
def test_initial(self):
organizer = self.activity.contributors.instance_of(Organizer).get()
self.assertEqual(organizer.status, 'new')
def test_delete(self):
self.activity.states.delete(save=True)
organizer = self.activity.contributors.instance_of(Organizer).get()
self.assertEqual(organizer.status, 'failed')
def test_reject(self):
self.initiative.states.submit(save=True)
self.activity.states.submit()
self.activity.states.reject(save=True)
organizer = self.activity.contributors.instance_of(Organizer).get()
self.assertEqual(organizer.status, 'failed')
self.assertEqual(
mail.outbox[-1].subject,
'Your activity "{}" has been rejected'.format(self.activity.title)
)
def test_submit_initiative(self):
self.initiative.states.submit(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'submitted')
def test_submit_initiative_already_approved(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
activity = self.factory.create(initiative=self.initiative)
activity.states.submit(save=True)
self.assertEqual(activity.status, 'open')
def test_submit_initiative_not_approved(self):
self.initiative.states.submit(save=True)
activity = self.factory.create(initiative=self.initiative)
activity.states.submit(save=True)
self.assertEqual(activity.status, 'submitted')
def test_approve_initiative(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'open')
organizer = self.activity.contributors.instance_of(Organizer).get()
self.assertEqual(organizer.status, 'succeeded')
def test_cancel(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.activity.states.cancel(save=True)
self.assertEqual(self.activity.status, 'cancelled')
organizer = self.activity.contributors.instance_of(Organizer).get()
self.assertEqual(organizer.status, 'failed')
self.assertEqual(
mail.outbox[-1].subject,
'Your activity "{}" has been cancelled'.format(self.activity.title)
)
def test_change_capacity(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create_batch(
self.activity.capacity - 1,
activity=self.activity,
status='accepted'
)
self.activity.capacity = self.activity.capacity - 1
self.activity.save()
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.capacity = self.activity.capacity + 1
self.activity.save()
self.assertEqual(self.activity.status, 'open')
def change_registration_deadline(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.activity.registration_deadline = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'full')
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.registration_deadline = date.today() + timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'open')
class DateActivityTriggerTestCase(TimeBasedActivityTriggerTestCase, BluebottleTestCase):
factory = DateActivityFactory
participant_factory = DateParticipantFactory
def test_unset_capacity(self):
self.activity.slot_selection = 'free'
self.activity.save()
self.activity.refresh_from_db()
self.assertIsNone(self.activity.capacity)
def test_unset_registration_deadline(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.activity.registration_deadline = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'full')
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.refresh_from_db()
self.activity.registration_deadline = None
self.activity.save()
self.assertEqual(self.activity.status, 'open')
class PeriodActivityTriggerTestCase(TimeBasedActivityTriggerTestCase, BluebottleTestCase):
factory = PeriodActivityFactory
participant_factory = PeriodParticipantFactory
def test_unset_registration_deadline(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.activity.registration_deadline = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'full')
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.refresh_from_db()
self.activity.registration_deadline = None
self.activity.save()
self.assertEqual(self.activity.status, 'open')
def test_reopen(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'open')
self.activity.deadline = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'expired')
self.activity.states.reopen_manually(save=True)
self.assertEqual(self.activity.status, 'draft')
self.assertIsNone(self.activity.deadline)
def test_change_deadline(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'open')
self.activity.deadline = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'expired')
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.deadline = date.today() + timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'open')
def test_change_deadline_future(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create(
activity=self.activity,
status='accepted'
)
self.assertEqual(self.activity.status, 'open')
self.activity.deadline = date.today() + timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'open')
def test_change_deadline_with_contributors(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create(
activity=self.activity,
)
self.assertEqual(self.activity.status, 'open')
self.activity.deadline = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'succeeded')
for duration in self.activity.durations:
self.assertEqual(duration.status, 'succeeded')
def test_change_deadline_with_contributors_reopen(self):
self.test_change_deadline_with_contributors()
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.deadline = date.today() + timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'open')
def test_change_deadline_with_contributors_cancel(self):
self.test_change_deadline_with_contributors()
self.activity.states.cancel(save=True)
self.assertEqual(self.activity.status, 'cancelled')
for duration in self.activity.durations:
self.assertEqual(duration.status, 'failed')
def test_change_deadline_full(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create_batch(
self.activity.capacity,
activity=self.activity,
)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.activity.deadline = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'succeeded')
self.assertEqual(
mail.outbox[-1].subject,
'Your activity "{}" has succeeded 🎉'.format(self.activity.title)
)
self.assertFalse(
(
'Head over to your activity page and enter the impact your activity made, '
'so that everybody can see how effective your activity was'
) in mail.outbox[-1].body
)
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.deadline = date.today() + timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'full')
def test_change_deadline_full_enable_impact(self):
InitiativePlatformSettingsFactory.create(enable_impact=True)
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create_batch(
self.activity.capacity,
activity=self.activity,
)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.activity.deadline = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'succeeded')
self.assertEqual(
mail.outbox[-1].subject,
'Your activity "{}" has succeeded 🎉'.format(self.activity.title)
)
self.assertTrue(
(
'Head over to your activity page and enter the impact your activity made, '
'so that everybody can see how effective your activity was'
) in mail.outbox[-1].body
)
def test_change_start(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.activity.start = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'open')
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.start = date.today() + timedelta(days=2)
self.activity.save()
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'open')
def test_change_start_notification(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create(
activity=self.activity,
)
mail.outbox = []
self.activity.start = date.today() + timedelta(days=4)
self.activity.save()
self.assertEqual(len(mail.outbox), 1)
self.assertTrue(
'The activity starts on {start} and ends on {end}'.format(
start=defaultfilters.date(self.activity.start),
end=defaultfilters.date(self.activity.deadline)
)
in mail.outbox[-1].body
)
self.activity.save()
self.assertEqual(len(mail.outbox), 1)
def test_unset_start_notification(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create(
activity=self.activity,
)
self.activity.start = None
self.activity.save()
self.assertTrue(
'The activity starts immediately and ends on {end}'.format(
end=defaultfilters.date(self.activity.deadline),
)
in mail.outbox[-1].body
)
def test_change_deadline_notification(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create(
activity=self.activity,
)
self.activity.start = date.today() + timedelta(days=40)
self.activity.save()
self.assertTrue(
'The activity starts on {start} and ends on {end}'.format(
start=defaultfilters.date(self.activity.start),
end=defaultfilters.date(self.activity.deadline),
)
in mail.outbox[-1].body
)
def test_unset_both_notification(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create(
activity=self.activity,
)
self.activity.start = None
self.activity.deadline = None
self.activity.save()
self.assertTrue(
'The activity starts immediately and runs indefinitely'
in mail.outbox[-1].body
)
def test_unset_start(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.activity.start = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'open')
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.start = None
self.activity.save()
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'open')
def test_change_start_after_registration_deadline(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.activity.registration_deadline = date.today() - timedelta(days=4)
self.activity.save()
self.assertEqual(self.activity.status, 'full')
self.activity.start = date.today() - timedelta(days=2)
self.activity.save()
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.activity.start = date.today() + timedelta(days=2)
self.activity.save()
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
def test_change_start_after_full(self):
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create_batch(
self.activity.capacity,
activity=self.activity,
)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.activity.start = date.today() - timedelta(days=1)
self.activity.save()
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.activity = self.factory._meta.model.objects.get(pk=self.activity.pk)
self.activity.start = date.today() + timedelta(days=2)
self.activity.save()
self.assertEqual(self.activity.status, 'full')
def test_succeed_manually(self):
self.activity.duration_period = 'weeks'
self.activity.save()
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant_factory.create_batch(
self.activity.capacity,
activity=self.activity,
)
self.activity.refresh_from_db()
self.activity.states.succeed_manually(save=True)
self.assertEqual(self.activity.deadline, date.today() - timedelta(days=1))
for duration in self.activity.durations:
self.assertEqual(duration.status, 'succeeded')
for message in mail.outbox[-self.activity.capacity:]:
self.assertEqual(
message.subject,
'The activity "{}" has succeeded 🎉'.format(self.activity.title)
)
def test_succeed_manually_review_new(self):
self.activity.duration_period = 'weeks'
self.activity.save()
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.activity.review = True
self.activity.save()
self.participant_factory.create_batch(
self.activity.capacity,
activity=self.activity,
)
self.activity.refresh_from_db()
mail.outbox = []
self.activity.states.succeed_manually(save=True)
self.assertEqual(self.activity.deadline, date.today() - timedelta(days=1))
for duration in self.activity.durations:
self.assertEqual(duration.status, 'succeeded')
for message in mail.outbox[-self.activity.capacity:]:
self.assertEqual(
message.subject,
'The activity "{}" has succeeded 🎉'.format(self.activity.title)
)
def test_reschedule_contributions(self):
self.participant_factory.create_batch(5, activity=self.activity)
self.assertEqual(len(self.activity.durations), 5)
tz = get_current_timezone()
for duration in self.activity.durations:
self.assertEqual(duration.start.astimezone(tz).date(), self.activity.start)
self.assertEqual(duration.end.astimezone(tz).date(), self.activity.deadline)
self.activity.start = self.activity.start + timedelta(days=1)
self.activity.save()
for duration in self.activity.durations:
self.assertEqual(duration.start.astimezone(tz).date(), self.activity.start)
self.assertEqual(duration.end.astimezone(tz).date(), self.activity.deadline)
self.activity.deadline = self.activity.deadline + timedelta(days=1)
self.activity.save()
for duration in self.activity.durations:
self.assertEqual(duration.start.astimezone(tz).date(), self.activity.start)
self.assertEqual(duration.end.astimezone(tz).date(), self.activity.deadline)
current_start = self.activity.start
self.activity.start = None
self.activity.save()
for duration in self.activity.durations:
self.assertEqual(duration.start.astimezone(tz).date(), current_start)
self.assertEqual(duration.end.astimezone(tz).date(), self.activity.deadline)
self.activity.deadline = None
self.activity.save()
for duration in self.activity.durations:
self.assertEqual(duration.start.astimezone(tz).date(), current_start)
self.assertEqual(duration.end, None)
class DateActivitySlotTriggerTestCase(BluebottleTestCase):
def setUp(self):
super().setUp()
self.user = BlueBottleUserFactory()
self.initiative = InitiativeFactory(owner=self.user)
self.activity = DateActivityFactory.create(
initiative=self.initiative,
slots=[],
review=False)
self.slot = DateActivitySlotFactory.create(activity=self.activity)
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
def assertStatus(self, obj, status):
obj.refresh_from_db()
self.assertEqual(obj.status, status)
def test_incomplete(self):
self.slot.start = None
self.slot.save()
self.assertEqual(self.slot.status, 'draft')
def test_complete(self):
self.test_incomplete()
self.slot.start = now() + timedelta(days=2)
self.slot.save()
self.assertEqual(self.slot.status, 'open')
def test_start(self):
self.slot.start = now() - timedelta(hours=1)
self.slot.save()
self.assertEqual(self.slot.status, 'running')
def test_finish_one_slot_no_participants(self):
self.slot.start = now() - timedelta(days=1)
self.slot.save()
self.assertStatus(self.slot, 'finished')
self.assertStatus(self.activity, 'expired')
def test_reschedule_one_slot_no_participants(self):
self.test_finish_one_slot_no_participants()
self.slot.start = now() + timedelta(days=1)
self.slot.save()
self.assertStatus(self.slot, 'open')
self.assertStatus(self.activity, 'open')
def test_finish_one_slot_with_participants(self):
DateParticipantFactory.create(activity=self.activity)
self.slot.start = now() - timedelta(days=1)
self.slot.save()
self.assertStatus(self.slot, 'finished')
self.assertStatus(self.activity, 'succeeded')
def test_reschedule_one_slot_with_participants(self):
self.test_finish_one_slot_with_participants()
self.slot.start = now() + timedelta(days=1)
self.slot.save()
self.assertStatus(self.slot, 'open')
self.assertStatus(self.activity, 'open')
def test_finish_multiple_slots(self):
self.slot2 = DateActivitySlotFactory.create(activity=self.activity)
DateParticipantFactory.create(activity=self.activity)
self.slot.start = now() - timedelta(days=1)
self.slot.save()
self.assertStatus(self.slot, 'finished')
self.assertStatus(self.activity, 'open')
self.slot2.start = now() - timedelta(days=1)
self.slot2.save()
self.assertStatus(self.slot2, 'finished')
self.assertStatus(self.activity, 'succeeded')
def test_reschedule_open(self):
self.test_finish_one_slot_with_participants()
self.slot.start = now() + timedelta(days=1)
self.slot.save()
self.assertStatus(self.slot, 'open')
def test_reschedule_running(self):
self.test_finish_one_slot_with_participants()
self.slot.start = now() - timedelta(hours=1)
self.slot.save()
self.assertStatus(self.slot, 'running')
def test_reset_slot_selection(self):
self.activity.slot_selection = 'free'
self.activity.save()
second_slot = DateActivitySlotFactory.create(activity=self.activity)
third_slot = DateActivitySlotFactory.create(activity=self.activity)
second_slot.delete()
self.activity.refresh_from_db()
self.assertEqual(self.activity.slot_selection, 'free')
third_slot.delete()
self.activity.refresh_from_db()
self.assertEqual(self.activity.slot_selection, 'all')
def test_reset_slot_selection_all(self):
self.activity.save()
second_slot = DateActivitySlotFactory.create(activity=self.activity)
second_slot.delete()
self.activity.refresh_from_db()
self.assertEqual(self.activity.slot_selection, 'all')
def test_changed_single_date(self):
eng = BlueBottleUserFactory.create(primary_language='en')
DateParticipantFactory.create(activity=self.activity, user=eng)
mail.outbox = []
self.slot.start = now() + timedelta(days=10)
self.slot.save()
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'The details of activity "{}" have changed'.format(self.activity.title)
)
with TenantLanguage('en'):
expected = '{} {} - {} ({})'.format(
defaultfilters.date(self.slot.start),
defaultfilters.time(self.slot.start.astimezone(get_current_timezone())),
defaultfilters.time(self.slot.end.astimezone(get_current_timezone())),
self.slot.start.astimezone(get_current_timezone()).strftime('%Z'),
)
self.assertTrue(expected in mail.outbox[0].body)
def test_changed_multiple_dates(self):
self.activity.slot_selection = 'free'
eng = BlueBottleUserFactory.create(primary_language='en')
participant = DateParticipantFactory.create(activity=self.activity, user=eng)
SlotParticipantFactory.create(participant=participant, slot=self.slot)
slot2 = DateActivitySlotFactory.create(activity=self.activity)
other_participant = DateParticipantFactory.create(activity=self.activity)
SlotParticipantFactory.create(slot=slot2, participant=other_participant)
mail.outbox = []
self.slot.start = now() + timedelta(days=10)
self.slot.execute_triggers(user=self.user, send_messages=True)
self.slot.save()
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'The details of activity "{}" have changed'.format(self.activity.title)
)
self.assertEqual(mail.outbox[0].to[0], participant.user.email)
with TenantLanguage('en'):
expected = '{} {} - {} ({})'.format(
defaultfilters.date(self.slot.start),
defaultfilters.time(self.slot.start.astimezone(get_current_timezone())),
defaultfilters.time(self.slot.end.astimezone(get_current_timezone())),
self.slot.start.astimezone(get_current_timezone()).strftime('%Z'),
)
self.assertTrue(expected in mail.outbox[0].body)
def test_reschedule_contributions(self):
DateParticipantFactory.create_batch(5, activity=self.activity)
for duration in self.slot.durations:
self.assertEqual(duration.start, self.slot.start)
self.assertEqual(duration.end, self.slot.start + self.slot.duration)
self.assertEqual(duration.value, self.slot.duration)
self.slot.start = self.slot.start + timedelta(days=1)
self.slot.save()
for duration in self.slot.durations:
self.assertEqual(duration.start, self.slot.start)
self.assertEqual(duration.end, self.slot.start + self.slot.duration)
self.assertEqual(duration.value, self.slot.duration)
self.slot.duration = self.slot.duration + timedelta(hours=1)
self.slot.save()
for duration in self.slot.durations:
self.assertEqual(duration.start, self.slot.start)
self.assertEqual(duration.end, self.slot.start + self.slot.duration)
self.assertEqual(duration.value, self.slot.duration)
def test_cancel(self):
DateParticipantFactory.create(activity=self.activity)
mail.outbox = []
self.slot.title = 'Session 1'
self.slot.states.cancel(save=True)
self.assertEqual(self.slot.status, 'cancelled')
self.assertEqual(
len(mail.outbox),
2
)
self.assertEqual(
mail.outbox[1].subject,
'A slot for your activity "{}" has been cancelled'.format(self.activity.title)
)
self.assertTrue(
'Session 1' in
mail.outbox[1].body
)
def test_cancel_multiple_slots(self):
self.slot2 = DateActivitySlotFactory.create(activity=self.activity)
self.slot.states.cancel(save=True)
self.assertStatus(self.slot, 'cancelled')
self.assertStatus(self.activity, 'open')
self.slot2.states.cancel(save=True)
self.assertStatus(self.slot2, 'cancelled')
self.assertStatus(self.activity, 'cancelled')
def test_cancel_multiple_slots_succeed(self):
self.slot2 = DateActivitySlotFactory.create(activity=self.activity)
DateParticipantFactory.create(activity=self.activity)
self.slot.start = now() - timedelta(days=1)
self.slot.save()
self.assertStatus(self.slot, 'finished')
self.assertStatus(self.activity, 'open')
self.slot2.states.cancel(save=True)
self.assertStatus(self.slot2, 'cancelled')
self.assertStatus(self.activity, 'succeeded')
def test_cancel_with_cancelled_activity(self):
DateParticipantFactory.create(activity=self.activity)
self.activity.states.cancel(save=True)
mail.outbox = []
self.slot.title = 'Session 3'
self.slot.states.cancel(save=True)
self.assertEqual(self.slot.status, 'cancelled')
self.assertEqual(
len(mail.outbox),
1
)
self.assertEqual(
mail.outbox[0].subject,
'A slot for your activity "{}" has been cancelled'.format(self.activity.title)
)
self.assertTrue(
'Session 3' in
mail.outbox[0].body
)
class ParticipantTriggerTestCase(object):
def setUp(self):
super().setUp()
self.settings = InitiativePlatformSettingsFactory.create(
activity_types=[self.factory._meta.model.__name__.lower()]
)
self.user = BlueBottleUserFactory()
self.admin_user = BlueBottleUserFactory.create(is_staff=True)
self.initiative = InitiativeFactory.create(owner=self.user)
self.activity = self.factory.create(
preparation=timedelta(hours=1),
initiative=self.initiative,
review=False)
self.review_activity = self.factory.create(
preparation=timedelta(hours=4),
initiative=self.initiative,
review=True)
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.review_activity.refresh_from_db()
def test_initial_added_through_admin(self):
mail.outbox = []
participant = self.participant_factory.create(
activity=self.review_activity,
user=BlueBottleUserFactory.create(),
as_user=self.admin_user
)
self.assertEqual(participant.status, 'accepted')
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
'You have been added to the activity "{}" 🎉'.format(self.review_activity.title)
)
self.assertEqual(
mail.outbox[1].subject,
'A participant has been added to your activity "{}" 🎉'.format(self.review_activity.title)
)
self.assertTrue(self.review_activity.followers.filter(user=participant.user).exists())
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.review_activity.preparation
)
self.assertEqual(
prep.status,
'succeeded'
)
def test_initial_added_through_admin_team(self):
self.review_activity.team_activity = Activity.TeamActivityChoices.teams
self.review_activity.save()
participant = self.participant_factory.create(
activity=self.review_activity,
user=BlueBottleUserFactory.create(),
as_user=self.admin_user
)
self.assertTrue(participant.team)
self.assertEqual(participant.team.owner, participant.user)
self.assertEqual(participant.status, 'accepted')
self.assertEqual(participant.team.status, 'open')
def test_initiate_team_invite(self):
self.activity.team_activity = Activity.TeamActivityChoices.teams
self.activity.save()
team_captain = self.participant_factory.create(
activity=self.activity,
user=BlueBottleUserFactory.create()
)
mail.outbox = []
participant = self.participant_factory.create(
activity=self.activity,
accepted_invite=team_captain.invite,
user=BlueBottleUserFactory.create()
)
self.assertEqual(participant.team, team_captain.team)
'New team member' in [message.subject for message in mail.outbox]
def test_initiate_team_invite_review(self):
self.activity.team_activity = Activity.TeamActivityChoices.teams
self.activity.review = True
self.activity.save()
capt = BlueBottleUserFactory.create()
team_captain = self.participant_factory.create(
activity=self.activity,
user=capt,
as_user=capt
)
team_captain.states.accept(save=True)
mail.outbox = []
participant = self.participant_factory.create(
activity=self.activity,
accepted_invite=team_captain.invite,
user=BlueBottleUserFactory.create()
)
self.assertEqual(participant.team, team_captain.team)
self.assertEqual(participant.status, 'accepted')
def test_initiate_team_invite_review_after_signup(self):
self.activity.team_activity = Activity.TeamActivityChoices.teams
self.activity.review = True
self.activity.save()
capt = BlueBottleUserFactory.create()
team_captain = self.participant_factory.create(
activity=self.activity,
user=capt,
as_user=capt
)
mail.outbox = []
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.activity,
accepted_invite=team_captain.invite,
user=user,
as_user=user
)
self.assertEqual(participant.team, team_captain.team)
team_captain.states.accept(save=True)
self.assertEqual(team_captain.status, 'accepted')
self.assertEqual(team_captain.team.status, 'open')
participant.refresh_from_db()
self.assertEqual(participant.status, 'accepted')
def test_initial_removed_through_admin(self):
mail.outbox = []
participant = self.participant_factory.create(
activity=self.review_activity,
user=BlueBottleUserFactory.create(),
as_user=self.admin_user
)
mail.outbox = []
participant.states.remove()
participant.execute_triggers(user=self.admin_user, send_messages=True)
participant.save()
self.assertEqual(participant.status, 'rejected')
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
'You have been removed as participant for the activity "{}"'.format(self.review_activity.title)
)
self.assertEqual(
mail.outbox[1].subject,
'A participant has been removed from your activity "{}"'.format(self.review_activity.title)
)
def test_accept(self):
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.review_activity,
user=user,
as_user=user
)
mail.outbox = []
participant.states.accept(save=True)
self.assertEqual(participant.status, 'accepted')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'You have been selected for the activity "{}" 🎉'.format(
self.review_activity.title
)
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.review_activity.preparation
)
self.assertEqual(
prep.status,
'succeeded'
)
def test_accept_team(self):
self.review_activity.team_activity = Activity.TeamActivityChoices.teams
self.review_activity.save()
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.review_activity,
user=user,
as_user=user
)
participant.states.accept(save=True)
self.assertTrue(participant.team)
self.assertEqual(participant.team.owner, participant.user)
def test_initial_team_created(self):
self.review_activity.team_activity = Activity.TeamActivityChoices.teams
self.review_activity.save()
participant = self.participant_factory.create(
activity=self.review_activity,
user=BlueBottleUserFactory.create()
)
self.assertIsNotNone(participant.team)
def test_initial_no_review_team(self):
self.activity.team_activity = Activity.TeamActivityChoices.teams
self.activity.save()
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.activity,
user=user,
as_user=user
)
self.assertTrue(participant.team)
self.assertEqual(participant.team.owner, participant.user)
def test_no_review_fill(self):
self.participant_factory.create_batch(
self.activity.capacity, activity=self.activity
)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
def test_no_review_fill_cancel(self):
self.participant_factory.create_batch(
self.activity.capacity, activity=self.activity
)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.activity.states.cancel(save=True)
self.assertEqual(self.activity.status, 'cancelled')
def test_review_fill(self):
participants = self.participant_factory.create_batch(
self.review_activity.capacity,
activity=self.review_activity,
user=BlueBottleUserFactory.create(),
as_relation='user'
)
self.review_activity.refresh_from_db()
self.assertEqual(self.activity.status, 'open')
for participant in participants:
user = participant.user
user.save()
participant.execute_triggers(user=user, send_messages=True)
participant.save()
participant.states.accept(save=True)
self.review_activity.refresh_from_db()
self.assertEqual(self.review_activity.status, 'full')
def test_remove(self):
self.participants = self.participant_factory.create_batch(
self.activity.capacity, activity=self.activity
)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
mail.outbox = []
participant = self.participants[0]
participant.states.remove(save=True)
self.assertEqual(
participant.contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'failed'
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.activity.preparation
)
self.assertEqual(
prep.status,
'failed'
)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'open')
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
'You have been removed as participant for the activity "{}"'.format(
self.activity.title
)
)
self.assertEqual(
mail.outbox[1].subject,
'A participant has been removed from your activity "{}"'.format(
self.activity.title
)
)
self.assertFalse(self.activity.followers.filter(user=self.participants[0].user).exists())
def test_remove_team(self):
self.activity.team_activity = Activity.TeamActivityChoices.teams
self.activity.save()
team_captain = self.participant_factory.create(
activity=self.activity,
user=BlueBottleUserFactory.create()
)
participant = self.participant_factory.create(
activity=self.activity,
accepted_invite=team_captain.invite,
user=BlueBottleUserFactory.create()
)
mail.outbox = []
participant.states.remove(save=True)
self.activity.refresh_from_db()
self.assertEqual(
participant.contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'failed'
)
subjects = [mail.subject for mail in mail.outbox]
self.assertTrue(
f"Your team participation in ‘{self.activity.title}’ has been cancelled" in subjects
)
self.assertTrue(
f"Team member removed for ‘{self.activity.title}’" in subjects
)
def test_reject(self):
users = BlueBottleUserFactory.create_batch(self.activity.capacity)
self.participants = []
for user in users:
participant = self.participant_factory.build(
user=user,
activity=self.review_activity,
)
participant.execute_triggers(user=user)
participant.save()
self.participants.append(
participant
)
mail.outbox = []
participant = self.participants[0]
participant.states.reject(save=True)
self.assertEqual(
participant.contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'failed'
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'You have not been selected for the activity "{}"'.format(
self.review_activity.title
)
)
self.assertFalse(self.review_activity.followers.filter(user=participant.user).exists())
def test_withdraw(self):
self.participants = self.participant_factory.create_batch(
self.activity.capacity,
activity=self.activity,
user=BlueBottleUserFactory.create()
)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
mail.outbox = []
self.participants[0].states.withdraw(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'open')
self.assertEqual(
self.participants[0].contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'failed'
)
self.assertFalse(self.activity.followers.filter(user=self.participants[0].user).exists())
subjects = [mail.subject for mail in mail.outbox]
self.assertTrue(
f'You have withdrawn from the activity "{self.activity.title}"' in subjects
)
self.assertTrue(
f'A participant has withdrawn from your activity "{self.activity.title}"' in subjects
)
def test_withdraw_team(self):
self.activity.team_activity = Activity.TeamActivityChoices.teams
self.activity.save()
team_captain = self.participant_factory.create(
activity=self.activity,
user=BlueBottleUserFactory.create()
)
participant = self.participant_factory.create(
activity=self.activity,
accepted_invite=team_captain.invite,
user=BlueBottleUserFactory.create()
)
mail.outbox = []
participant.states.withdraw(save=True)
self.activity.refresh_from_db()
self.assertEqual(
participant.contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'failed'
)
subjects = [mail.subject for mail in mail.outbox]
self.assertTrue(
f'You have withdrawn from the activity "{self.activity.title}"' in subjects
)
self.assertTrue(
f'A participant has withdrawn from your team for "{self.activity.title}"' in subjects
)
def test_reapply_cancelled(self):
self.participants = self.participant_factory.create_batch(
self.activity.capacity,
activity=self.activity,
user=BlueBottleUserFactory.create()
)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
mail.outbox = []
self.participants[0].states.withdraw(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'open')
self.assertEqual(
self.participants[0].contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'failed'
)
self.assertFalse(self.activity.followers.filter(user=self.participants[0].user).exists())
subjects = [mail.subject for mail in mail.outbox]
self.assertTrue(
f'You have withdrawn from the activity "{self.activity.title}"' in subjects
)
self.assertTrue(
f'A participant has withdrawn from your activity "{self.activity.title}"' in subjects
)
def test_withdraw_from_team(self):
self.activity.team_activity = Activity.TeamActivityChoices.teams
self.captain = self.participant_factory.create(
activity=self.activity,
user=BlueBottleUserFactory.create()
)
self.participant = self.participant_factory.create(
activity=self.activity,
user=BlueBottleUserFactory.create(),
team=self.captain.team
)
mail.outbox = []
self.participant.states.withdraw(save=True)
subjects = [mail.subject for mail in mail.outbox]
self.assertTrue(
f'You have withdrawn from the activity "{self.activity.title}"' in subjects
)
self.assertTrue(
f'A participant has withdrawn from your team for "{self.activity.title}"' in subjects
)
class DateParticipantTriggerTestCase(ParticipantTriggerTestCase, BluebottleTestCase):
factory = DateActivityFactory
participant_factory = DateParticipantFactory
def test_type(self):
self.participants = self.participant_factory.create_batch(
self.activity.capacity, activity=self.review_activity
)
self.assertEqual(
self.participants[0].contributions.
exclude(timecontribution__contribution_type='preparation').get().contribution_type,
'date'
)
def test_reaccept(self):
self.test_remove()
self.participants[0].states.accept(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.assertEqual(
self.participants[0].contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'new'
)
self.assertTrue(self.activity.followers.filter(user=self.participants[0].user).exists())
def test_initial_no_review(self):
mail.outbox = []
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.activity,
user=user,
as_user=user
)
self.assertEqual(participant.status, 'accepted')
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
'A new participant has joined your activity "{}" 🎉'.format(self.activity.title)
)
self.assertTrue(self.activity.followers.filter(user=participant.user).exists())
self.assertEqual(
self.activity.accepted_participants.get().
contributions.exclude(timecontribution__contribution_type='preparation').get().status,
'new'
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.activity.preparation
)
self.assertEqual(
prep.status,
'succeeded'
)
def test_initial_review(self):
mail.outbox = []
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.review_activity,
user=user,
as_user=user
)
self.assertEqual(participant.status, 'new')
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[1].subject,
'You have a new participant for your activity "{}" 🎉'.format(
self.review_activity.title
)
)
self.assertTrue(self.review_activity.followers.filter(user=participant.user).exists())
self.assertEqual(
participant.contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'new'
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.review_activity.preparation
)
self.assertEqual(
prep.status,
'new'
)
def test_reapply(self):
self.test_withdraw()
self.participants[0].states.reapply(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.assertEqual(
self.participants[0].contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'new'
)
self.assertTrue(self.activity.followers.filter(user=self.participants[0].user).exists())
@mock.patch.object(
ParticipantJoinedNotification, 'delay', 2
)
@mock.patch.object(
ParticipantAppliedNotification, 'delay', 1
)
@mock.patch.object(
ParticipantChangedNotification, 'delay', 2
)
class DateParticipantTriggerCeleryTestCase(CeleryTestCase):
factory = DateActivityFactory
participant_factory = DateParticipantFactory
factories = CeleryTestCase.factories + [
DateParticipantFactory, DateActivityFactory, InitiativeFactory
]
def setUp(self):
super().setUp()
self.settings = InitiativePlatformSettingsFactory.create(
activity_types=[self.factory._meta.model.__name__.lower()]
)
self.user = BlueBottleUserFactory()
self.admin_user = BlueBottleUserFactory(is_staff=True)
self.initiative = InitiativeFactory(
owner=self.user,
status='approved'
)
self.activity = self.factory.create(
preparation=timedelta(hours=1),
initiative=self.initiative,
slot_selection='free',
review=False
)
self.slots = DateActivitySlotFactory.create_batch(3, activity=self.activity)
self.activity.refresh_from_db()
self.participant = None
def test_join_all(self):
mail.outbox = []
self.activity.slot_selection = 'all'
self.activity.save()
user = BlueBottleUserFactory.create()
self.participant_factory.create(
activity=self.activity,
user=user,
as_user=user
)
time.sleep(4)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
f'A new participant has joined your activity "{self.activity.title}" 🎉'
)
self.assertEqual(
mail.outbox[1].subject,
f'You have joined the activity "{self.activity.title}"'
)
for slot in self.slots:
expected = '{} {} - {} ({})'.format(
defaultfilters.date(slot.start),
defaultfilters.time(slot.start.astimezone(get_current_timezone())),
defaultfilters.time(slot.end.astimezone(get_current_timezone())),
slot.start.astimezone(get_current_timezone()).strftime('%Z'),
)
self.assertTrue(expected in mail.outbox[1].body)
def test_join_free(self):
mail.outbox = []
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.activity,
user=user,
as_user=user
)
self.slot_participants = [
SlotParticipantFactory.create(slot=slot, participant=participant)
for slot in self.slots
]
time.sleep(3)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
f'A new participant has joined your activity "{self.activity.title}" 🎉'
)
self.assertEqual(
mail.outbox[1].subject,
f'You have joined the activity "{self.activity.title}"'
)
for slot in self.slots:
self.assertTrue(slot.title in mail.outbox[1].body)
def test_join_free_review(self):
self.activity.review = True
self.activity.save()
mail.outbox = []
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.activity,
user=user,
as_user=user
)
self.slot_participants = [
SlotParticipantFactory.create(slot=slot, participant=participant)
for slot in self.slots
]
time.sleep(3)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
f'You have a new participant for your activity "{self.activity.title}" 🎉'
)
self.assertEqual(
mail.outbox[1].subject,
f'You have applied to the activity "{self.activity.title}"'
)
def test_change_free(self):
self.test_join_free()
time.sleep(3)
mail.outbox = []
for slot_participant in self.slot_participants[:-1]:
slot_participant.states.withdraw(save=True)
time.sleep(3)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
f'You have changed your application on the activity "{self.activity.title}"'
)
for slot in self.slots[:-1]:
self.assertTrue(slot.title not in mail.outbox[0].body)
self.assertTrue(self.slots[2].title in mail.outbox[0].body)
def test_withdraw_free(self):
self.test_join_free()
time.sleep(3)
mail.outbox = []
for slot_participant in self.slot_participants:
slot_participant.states.withdraw(save=True)
time.sleep(3)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
f'A participant has withdrawn from your activity "{self.activity.title}"'
)
self.assertEqual(
mail.outbox[1].subject,
f'You have withdrawn from the activity "{self.activity.title}"'
)
class PeriodParticipantTriggerTestCase(ParticipantTriggerTestCase, TriggerTestCase):
factory = PeriodActivityFactory
participant_factory = PeriodParticipantFactory
def test_initial_added_with_team_through_admin(self):
captain = BlueBottleUserFactory.create(email='captain@example.com')
team = TeamFactory.create(
activity=self.activity,
owner=captain
)
PeriodParticipantFactory.create(
user=captain,
activity=self.activity,
team=team
)
mail.outbox = []
self.activity.team_activity = 'teams'
self.activity.save()
participant = self.participant_factory.build(
activity=self.activity,
user=BlueBottleUserFactory.create(),
team=team
)
participant.execute_triggers(user=self.admin_user, send_messages=True)
participant.save()
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[1].subject,
'A participant has been added to your activity "{}" 🎉'.format(self.activity.title)
)
self.assertEqual(
mail.outbox[0].subject,
'You have been added to a team for "{}" 🎉'.format(self.activity.title)
)
def test_join(self):
mail.outbox = []
participant = self.participant_factory.create(
activity=self.activity,
user=BlueBottleUserFactory.create(),
as_relation='user'
)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
f'A new participant has joined your activity "{self.activity.title}" 🎉'
)
self.assertEqual(
mail.outbox[1].subject,
f'You have joined the activity "{self.activity.title}"'
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.activity.preparation
)
self.assertEqual(
prep.status,
'succeeded'
)
def test_team_join(self):
self.activity.team_activity = Activity.TeamActivityChoices.teams
self.activity.save()
mail.outbox = []
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.activity,
user=user,
as_user=user
)
self.assertStatus(participant, 'accepted')
self.assertStatus(participant.team, 'open')
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
f'A new team has joined "{self.activity.title}"'
)
self.assertEqual(
mail.outbox[1].subject,
f'You have registered your team for "{self.activity.title}"'
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.activity.preparation
)
self.assertEqual(
prep.status,
'succeeded'
)
def test_apply(self):
mail.outbox = []
participant = self.participant_factory.create(
activity=self.review_activity,
user=BlueBottleUserFactory.create(),
as_relation='user'
)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[1].subject,
f'You have a new participant for your activity "{self.review_activity.title}" 🎉'
)
self.assertEqual(
mail.outbox[0].subject,
f'You have applied to the activity "{self.review_activity.title}"'
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.review_activity.preparation
)
self.assertEqual(
prep.status,
'new'
)
def test_team_apply(self):
self.review_activity.team_activity = Activity.TeamActivityChoices.teams
self.review_activity.save()
mail.outbox = []
participant = self.participant_factory.create(
activity=self.review_activity,
user=BlueBottleUserFactory.create(),
as_relation='user'
)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[1].subject,
f'You have registered your team for "{self.review_activity.title}"'
)
self.assertEqual(
mail.outbox[0].subject,
f'A new team has applied to "{self.review_activity.title}"'
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.review_activity.preparation
)
self.assertEqual(
prep.status,
'new'
)
def test_team_accept(self):
self.review_activity.team_activity = Activity.TeamActivityChoices.teams
self.review_activity.save()
participant = self.participant_factory.create(
activity=self.review_activity,
user=BlueBottleUserFactory.create(),
as_relation='user'
)
mail.outbox = []
participant.states.accept(save=True)
self.assertEqual(participant.status, 'accepted')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'Your team has been accepted for "{}"'.format(
self.review_activity.title
)
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.review_activity.preparation
)
self.assertEqual(
prep.status,
'succeeded'
)
def test_no_review_succeed(self):
self.activity.deadline = date.today() - timedelta(days=1)
self.activity.save()
self.assertEqual(self.activity.status, 'expired')
participant = self.participant_factory.create(activity=self.activity)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'succeeded')
self.assertEqual(
participant.contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'succeeded'
)
self.assertEqual(
participant.contributions.
exclude(timecontribution__contribution_type='preparation').get().contribution_type,
'period'
)
def test_stop(self):
participant = self.participant_factory.create(activity=self.activity)
self.activity.start = date.today() - timedelta(days=1)
self.activity.save()
participant.states.stop(save=True)
self.assertEqual(
mail.outbox[-1].subject,
'Your contribution to the activity "{}" is successful 🎉'.format(self.activity.title)
)
def test_join_participant(self):
user = BlueBottleUserFactory.create()
self.model = self.participant_factory.build(
activity=self.activity,
user=user
)
with self.execute(user=user):
self.assertNotificationEffect(NewParticipantNotification)
self.assertNotificationEffect(ParticipantJoinedNotification)
def test_add_participant(self):
user = BlueBottleUserFactory.create()
self.model = self.participant_factory.build(
activity=self.activity,
user=user
)
staff = BlueBottleUserFactory.create(is_staff=True)
with self.execute(user=staff):
self.assertNotificationEffect(ParticipantAddedOwnerNotification)
self.assertNotificationEffect(ParticipantAddedNotification)
def test_start_team(self):
self.activity.team_activity = 'teams'
self.activity.save()
user = BlueBottleUserFactory.create()
self.model = self.participant_factory.build(
activity=self.activity,
user=user
)
with self.execute(user=user):
self.assertNoNotificationEffect(NewParticipantNotification)
self.assertNoNotificationEffect(TeamParticipantJoinedNotification)
self.assertNoNotificationEffect(ParticipantJoinedNotification)
def test_apply_team(self):
self.activity.team_activity = 'teams'
self.activity.review = True
self.activity.save()
user = BlueBottleUserFactory.create()
self.model = self.participant_factory.build(
activity=self.activity,
user=user
)
with self.execute(user=user):
self.assertNotificationEffect(TeamParticipantAppliedNotification)
self.assertNoNotificationEffect(ParticipantJoinedNotification)
def test_join_team_participant(self):
self.activity.team_activity = 'teams'
self.activity.save()
user = BlueBottleUserFactory.create()
captain = self.participant_factory.create(
activity=self.activity,
user=BlueBottleUserFactory.create()
)
self.model = self.participant_factory.build(
accepted_invite=captain.invite,
activity=self.activity,
user=user
)
with self.execute(user=user, send_messages=True):
self.assertNoNotificationEffect(NewParticipantNotification)
self.assertNoNotificationEffect(ParticipantJoinedNotification)
self.assertNoNotificationEffect(TeamParticipantJoinedNotification)
self.assertNotificationEffect(TeamMemberJoinedNotification)
self.assertNotificationEffect(TeamMemberAddedMessage)
def test_remove_participant(self):
self.model = self.participant_factory.create(
activity=self.activity,
status='accepted'
)
self.model.states.remove()
with self.execute():
self.assertNotificationEffect(ParticipantRemovedNotification)
self.assertNotificationEffect(ParticipantRemovedOwnerNotification)
def test_withdraw_team_participant(self):
self.activity.team_activity = 'teams'
captain = BlueBottleUserFactory.create()
team = TeamFactory.create(
owner=captain,
activity=self.activity
)
self.model = self.participant_factory.create(
activity=self.activity,
team=team,
status='accepted'
)
self.model.states.withdraw()
with self.execute():
self.assertNoNotificationEffect(ParticipantWithdrewNotification)
self.assertNotificationEffect(TeamMemberWithdrewMessage)
self.assertNotificationEffect(ParticipantWithdrewConfirmationNotification)
def test_remove_team_participant(self):
self.activity.team_activity = 'teams'
self.activity.save()
team = TeamFactory.create(
owner=BlueBottleUserFactory.create(),
activity=self.activity
)
self.model = self.participant_factory.create(
activity=self.activity,
team=team,
status='accepted'
)
self.model.states.remove()
with self.execute():
self.assertNotificationEffect(ParticipantRemovedNotification)
self.assertNotificationEffect(TeamMemberRemovedMessage)
self.assertNoNotificationEffect(ParticipantRemovedOwnerNotification)
def test_remove_team_participant_by_captain(self):
self.activity.team_activity = 'teams'
self.activity.save()
captain = BlueBottleUserFactory.create()
team = TeamFactory.create(
owner=captain,
activity=self.activity
)
self.model = self.participant_factory.create(
activity=self.activity,
team=team,
status='accepted'
)
self.model.states.remove()
with self.execute(user=captain):
self.assertNotificationEffect(ParticipantRemovedNotification)
self.assertNoNotificationEffect(TeamMemberRemovedMessage)
self.assertNoNotificationEffect(ParticipantRemovedOwnerNotification)
def test_initial_no_review(self):
mail.outbox = []
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.activity,
user=user,
as_user=user
)
self.assertEqual(participant.status, 'accepted')
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
'A new participant has joined your activity "{}" 🎉'.format(self.activity.title)
)
self.assertTrue(self.activity.followers.filter(user=participant.user).exists())
self.assertEqual(
self.activity.accepted_participants.get().
contributions.exclude(timecontribution__contribution_type='preparation').get().status,
'succeeded'
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.activity.preparation
)
self.assertEqual(
prep.status,
'succeeded'
)
def test_initial_review(self):
mail.outbox = []
user = BlueBottleUserFactory.create()
participant = self.participant_factory.create(
activity=self.review_activity,
user=user,
as_user=user
)
self.assertEqual(participant.status, 'new')
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[1].subject,
'You have a new participant for your activity "{}" 🎉'.format(
self.review_activity.title
)
)
self.assertTrue(self.review_activity.followers.filter(user=participant.user).exists())
self.assertEqual(
participant.contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'succeeded'
)
prep = participant.preparation_contributions.first()
self.assertEqual(
prep.value,
self.review_activity.preparation
)
self.assertEqual(
prep.status,
'new'
)
def test_reaccept(self):
self.test_remove()
self.participants[0].states.accept(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.assertEqual(
self.participants[0].contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'succeeded'
)
self.assertEqual(
self.participants[0].contributions.
filter(timecontribution__contribution_type='preparation').get().status,
'succeeded'
)
self.assertTrue(self.activity.followers.filter(user=self.participants[0].user).exists())
def test_reapply(self):
self.test_withdraw()
self.participants[0].states.reapply(save=True)
self.activity.refresh_from_db()
self.assertEqual(self.activity.status, 'full')
self.assertEqual(
self.participants[0].contributions.
exclude(timecontribution__contribution_type='preparation').get().status,
'succeeded'
)
self.assertEqual(
self.participants[0].contributions.
filter(timecontribution__contribution_type='preparation').get().status,
'succeeded'
)
self.assertTrue(self.activity.followers.filter(user=self.participants[0].user).exists())
class AllSlotParticipantTriggerTestCase(BluebottleTestCase):
def setUp(self):
self.user = BlueBottleUserFactory.create()
self.initiative = InitiativeFactory.create()
self.activity = DateActivityFactory.create(
slots=[],
capacity=True,
slot_selection='all',
initiative=self.initiative
)
self.slot1 = DateActivitySlotFactory.create(activity=self.activity)
self.slot2 = DateActivitySlotFactory.create(activity=self.activity)
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant = DateParticipantFactory.create(
activity=self.activity
)
self.slot1_participant = self.participant.slot_participants.filter(slot=self.slot1).first()
self.slot2_participant = self.participant.slot_participants.filter(slot=self.slot2).first()
self.contribution1 = self.slot1_participant.contributions.first()
self.contribution2 = self.slot2_participant.contributions.first()
def assertStatus(self, obj, status):
obj.refresh_from_db()
self.assertEqual(obj.status, status)
def test_apply(self):
self.assertStatus(self.slot1_participant, 'registered')
self.assertStatus(self.contribution1, 'new')
self.assertStatus(self.slot2_participant, 'registered')
self.assertStatus(self.contribution2, 'new')
def test_remove_participant(self):
self.participant.states.remove(save=True)
self.assertStatus(self.slot1_participant, 'registered')
self.assertStatus(self.contribution1, 'failed')
self.assertStatus(self.slot2_participant, 'registered')
self.assertStatus(self.contribution2, 'failed')
def test_remove_participant_from_slot(self):
self.slot1_participant.states.remove(save=True)
self.assertEqual(self.slot1_participant.status, 'removed')
self.assertStatus(self.contribution1, 'failed')
def test_withdraw_from_slot(self):
self.slot1_participant.states.withdraw(save=True)
self.assertStatus(self.slot1_participant, 'withdrawn')
self.assertStatus(self.contribution1, 'failed')
def test_cancel_slot(self):
self.slot1.states.cancel(save=True)
self.assertStatus(self.slot1_participant, 'registered')
self.assertStatus(self.contribution1, 'failed')
def test_finish_slot(self):
self.slot1.states.finish(save=True)
self.assertStatus(self.slot1_participant, 'registered')
self.assertStatus(self.contribution1, 'succeeded')
def test_reschedule_slot(self):
self.slot1.states.finish(save=True)
self.assertStatus(self.slot1_participant, 'registered')
self.assertStatus(self.contribution1, 'succeeded')
self.slot1.states.reschedule(save=True)
self.assertStatus(self.slot1_participant, 'registered')
self.assertStatus(self.contribution1, 'new')
def test_cancel_activity(self):
self.activity.states.cancel(save=True)
self.assertStatus(self.slot1_participant, 'registered')
self.assertStatus(self.contribution1, 'failed')
class FreeSlotParticipantTriggerTestCase(BluebottleTestCase):
def setUp(self):
self.user = BlueBottleUserFactory.create()
self.initiative = InitiativeFactory.create()
self.activity = DateActivityFactory.create(
slots=[],
capacity=None,
slot_selection='free',
initiative=self.initiative
)
self.slot1 = DateActivitySlotFactory.create(
activity=self.activity,
capacity=2
)
self.slot2 = DateActivitySlotFactory.create(
activity=self.activity,
capacity=1
)
self.initiative.states.submit(save=True)
self.initiative.states.approve(save=True)
self.activity.refresh_from_db()
self.participant = DateParticipantFactory.create(activity=self.activity)
def assertStatus(self, obj, status):
obj.refresh_from_db()
self.assertEqual(obj.status, status)
def test_apply(self):
self.assertEqual(
self.participant.slot_participants.count(),
0
)
slot_participant = SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
self.assertEqual(
self.participant.slot_participants.count(),
1
)
self.assertStatus(slot_participant, 'registered')
def test_withdraw_from_slot(self):
slot_participant = SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
slot_participant.states.withdraw(save=True)
self.assertStatus(slot_participant, 'withdrawn')
def test_withdraw_from_all_slots(self):
slot_participant1 = SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
slot_participant2 = SlotParticipantFactory.create(slot=self.slot2, participant=self.participant)
slot_participant1.states.withdraw(save=True)
self.assertStatus(self.participant, 'accepted')
self.assertStatus(slot_participant1, 'withdrawn')
slot_participant2.states.withdraw(save=True)
self.assertStatus(self.participant, 'withdrawn')
self.assertStatus(slot_participant2, 'withdrawn')
slot_participant1.states.reapply(save=True)
self.assertStatus(self.participant, 'accepted')
self.assertStatus(slot_participant1, 'registered')
slot_participant2.states.reapply(save=True)
self.assertStatus(self.participant, 'accepted')
self.assertStatus(slot_participant2, 'registered')
def test_remove_from_all_slots(self):
slot_participant1 = SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
slot_participant2 = SlotParticipantFactory.create(slot=self.slot2, participant=self.participant)
slot_participant1.states.remove(save=True)
self.assertStatus(self.participant, 'accepted')
self.assertStatus(slot_participant1, 'removed')
slot_participant2.states.remove(save=True)
self.assertStatus(self.participant, 'rejected')
self.assertStatus(slot_participant2, 'removed')
slot_participant1.states.accept(save=True)
self.assertStatus(self.participant, 'accepted')
self.assertStatus(slot_participant1, 'registered')
slot_participant2.states.accept(save=True)
self.assertStatus(self.participant, 'accepted')
self.assertStatus(slot_participant2, 'registered')
def test_fill_slot(self):
SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
self.assertStatus(self.slot1, 'open')
participant2 = DateParticipantFactory.create(activity=self.activity)
SlotParticipantFactory.create(slot=self.slot1, participant=participant2)
self.assertStatus(self.slot1, 'full')
self.assertStatus(self.activity, 'open')
SlotParticipantFactory.create(slot=self.slot2, participant=self.participant)
self.assertStatus(self.slot2, 'full')
self.assertStatus(self.activity, 'full')
def test_fill_slot_ignores_activity_capacity(self):
self.activity.capacity = 1
self.activity.save()
SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
self.assertStatus(self.slot1, 'open')
self.assertStatus(self.activity, 'open')
def test_unfill_slot(self):
self.slot_part = SlotParticipantFactory.create(slot=self.slot2, participant=self.participant)
self.assertStatus(self.slot2, 'full')
self.assertStatus(self.activity, 'open')
SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
participant2 = DateParticipantFactory.create(activity=self.activity)
SlotParticipantFactory.create(slot=self.slot1, participant=participant2)
self.assertStatus(self.slot1, 'full')
self.assertStatus(self.activity, 'full')
self.slot_part.states.withdraw(save=True)
self.assertStatus(self.slot2, 'open')
self.assertStatus(self.activity, 'open')
def test_extend_slot_unfills(self):
self.assertStatus(self.activity, 'open')
SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
participant2 = DateParticipantFactory.create(activity=self.activity)
SlotParticipantFactory.create(slot=self.slot1, participant=participant2)
participant2 = DateParticipantFactory.create(activity=self.activity)
SlotParticipantFactory.create(slot=self.slot2, participant=participant2)
self.assertStatus(self.slot1, 'full')
self.assertStatus(self.slot2, 'full')
self.assertStatus(self.activity, 'full')
self.slot1.capacity = 10
self.slot1.save()
self.assertStatus(self.slot1, 'open')
self.assertStatus(self.activity, 'open')
def test_cancel_open_slot_fills(self):
self.assertStatus(self.activity, 'open')
self.assertStatus(self.slot1, 'open')
SlotParticipantFactory.create(slot=self.slot2, participant=self.participant)
self.assertStatus(self.slot1, 'open')
self.assertStatus(self.slot2, 'full')
self.assertStatus(self.activity, 'open')
self.slot1.states.cancel(save=True)
self.assertStatus(self.activity, 'full')
self.slot3 = DateActivitySlotFactory.create(activity=self.activity)
self.assertStatus(self.activity, 'open')
self.slot3.delete()
self.assertStatus(self.activity, 'full')
def test_fill_new_slot(self):
self.slot_part = SlotParticipantFactory.create(slot=self.slot2, participant=self.participant)
self.assertStatus(self.slot2, 'full')
self.assertStatus(self.activity, 'open')
SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
participant2 = DateParticipantFactory.create(activity=self.activity)
SlotParticipantFactory.create(slot=self.slot1, participant=participant2)
self.assertStatus(self.slot1, 'full')
self.assertStatus(self.activity, 'full')
new_slot = DateActivitySlotFactory.create(
activity=self.activity,
capacity=1
)
self.assertStatus(self.activity, 'open')
new_slot.delete()
self.assertStatus(self.activity, 'full')
def test_expire_new_slot(self):
self.participant.delete()
self.slot1.start = now() - timedelta(days=1)
self.slot1.save()
self.assertStatus(self.slot1, 'finished')
self.assertStatus(self.activity, 'open')
self.slot2.start = now() - timedelta(days=1)
self.slot2.save()
self.assertStatus(self.slot2, 'finished')
self.assertStatus(self.activity, 'expired')
new_slot = DateActivitySlotFactory.create(
activity=self.activity,
capacity=1
)
self.assertStatus(self.activity, 'open')
new_slot.delete()
self.assertStatus(self.activity, 'expired')
def test_succeed_new_slot(self):
SlotParticipantFactory.create(slot=self.slot1, participant=self.participant)
self.slot1.start = now() - timedelta(days=1)
self.slot1.save()
self.assertStatus(self.slot1, 'finished')
self.assertStatus(self.activity, 'open')
SlotParticipantFactory.create(slot=self.slot2, participant=self.participant)
self.slot2.start = now() - timedelta(days=1)
self.slot2.save()
self.assertStatus(self.slot2, 'finished')
self.assertStatus(self.activity, 'succeeded')
new_slot = DateActivitySlotFactory.create(
activity=self.activity,
capacity=1
)
self.assertStatus(self.activity, 'open')
new_slot.delete()
self.assertStatus(self.activity, 'succeeded')
def test_refill_slot(self):
self.test_unfill_slot()
self.slot_part.states.reapply(save=True)
self.assertStatus(self.slot2, 'full')
def test_unfill_slot_remove(self):
self.slot_part = SlotParticipantFactory.create(slot=self.slot2, participant=self.participant)
self.assertStatus(self.slot2, 'full')
self.slot_part.states.remove(save=True)
self.assertStatus(self.slot2, 'open')
def test_refill_slot_remove(self):
self.test_unfill_slot_remove()
self.slot_part.states.accept(save=True)
self.assertStatus(self.slot2, 'full')
class TeamSlotTriggerTestCase(TriggerTestCase):
def setUp(self):
super().setUp()
self.user = BlueBottleUserFactory()
self.initiative = InitiativeFactory(owner=self.user)
self.activity = PeriodActivityFactory.create(
initiative=self.initiative,
team_activity='teams',
status='approved',
review=False)
self.participant = PeriodParticipantFactory.create(
user=self.user,
activity=self.activity
)
def assertStatus(self, obj, status):
obj.refresh_from_db()
self.assertEqual(obj.status, status)
def test_set_date(self):
self.assertTrue(self.participant.team)
start = now() + timedelta(days=4)
self.model = TeamSlotFactory.build(
team=self.participant.team,
activity=self.activity,
start=start,
duration=timedelta(hours=2)
)
with self.execute():
self.assertNotificationEffect(TeamSlotChangedNotification)
self.assertEqual(self.model.status, 'open')
self.model.start = now() + timedelta(days=1)
with self.execute():
self.assertNotificationEffect(TeamSlotChangedNotification)
self.assertEqual(self.model.status, 'open')
def test_change_date(self):
self.assertTrue(self.participant.team)
start = now() + timedelta(days=4)
self.model = TeamSlotFactory.build(
team=self.participant.team,
activity=self.activity,
start=start,
duration=timedelta(hours=2)
)
self.model.start = now() - timedelta(days=1)
with self.execute():
self.assertNoNotificationEffect(TeamSlotChangedNotification)
self.assertEqual(self.model.status, 'finished')
self.assertEqual(self.model.team.status, 'finished')
self.model.start = now() + timedelta(days=3)
with self.execute():
self.assertNotificationEffect(TeamSlotChangedNotification)
self.assertEqual(self.model.status, 'open')
self.assertEqual(self.model.team.status, 'open')
class TeamReviewTriggerTestCase(TriggerTestCase):
def setUp(self):
super().setUp()
self.initiator = BlueBottleUserFactory()
self.user = BlueBottleUserFactory()
self.initiative = InitiativeFactory(owner=self.initiator)
self.activity = PeriodActivityFactory.create(
initiative=self.initiative,
team_activity='teams',
status='approved',
review=True
)
self.model = PeriodParticipantFactory.create(
user=self.user,
activity=self.activity,
as_relation='user'
)
def assertStatus(self, obj, status):
obj.refresh_from_db()
self.assertEqual(obj.status, status)
def test_reject(self):
self.assertTrue(self.model.team)
self.assertEqual(
self.model.team.owner,
self.user
)
self.model.states.reject()
with self.execute():
self.assertNoNotificationEffect(ParticipantRejectedNotification)
self.assertNoNotificationEffect(TeamCancelledMessage)
self.assertNotificationEffect(TeamCancelledTeamCaptainMessage)
| {
"content_hash": "a60af88c60dcd449030c044dfdb103c6",
"timestamp": "",
"source": "github",
"line_count": 2557,
"max_line_length": 107,
"avg_line_length": 35.059835745013686,
"alnum_prop": 0.6352512047117616,
"repo_name": "onepercentclub/bluebottle",
"id": "f92856350e3dd39516e79ab0926145a005e764df",
"size": "89713",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bluebottle/time_based/tests/test_triggers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "41694"
},
{
"name": "HTML",
"bytes": "246695"
},
{
"name": "Handlebars",
"bytes": "63"
},
{
"name": "JavaScript",
"bytes": "139123"
},
{
"name": "PHP",
"bytes": "35"
},
{
"name": "PLpgSQL",
"bytes": "1369882"
},
{
"name": "PostScript",
"bytes": "2927"
},
{
"name": "Python",
"bytes": "4983116"
},
{
"name": "Rich Text Format",
"bytes": "39109"
},
{
"name": "SCSS",
"bytes": "99555"
},
{
"name": "Shell",
"bytes": "3068"
},
{
"name": "Smarty",
"bytes": "3814"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import unittest
from django.conf.urls import url
from django.core.urlresolvers import reverse
from django.db import connection
from django.forms import EmailField, IntegerField
from django.http import HttpResponse
from django.template.loader import render_to_string
from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.html import HTMLParseError, parse_html
from django.test.utils import CaptureQueriesContext, override_settings
from django.utils import six
from .models import Person
class SkippingTestCase(TestCase):
def test_skip_unless_db_feature(self):
"A test that might be skipped is actually called."
# Total hack, but it works, just want an attribute that's always true.
@skipUnlessDBFeature("__class__")
def test_func():
raise ValueError
self.assertRaises(ValueError, test_func)
class SkippingClassTestCase(TestCase):
def test_skip_class_unless_db_feature(self):
@skipUnlessDBFeature("__class__")
class NotSkippedTests(unittest.TestCase):
def test_dummy(self):
return
@skipIfDBFeature("__class__")
class SkippedTests(unittest.TestCase):
def test_will_be_skipped(self):
self.fail("We should never arrive here.")
test_suite = unittest.TestSuite()
test_suite.addTest(NotSkippedTests('test_dummy'))
try:
test_suite.addTest(SkippedTests('test_will_be_skipped'))
except unittest.SkipTest:
self.fail("SkipTest should not be raised at this stage")
result = unittest.TextTestRunner(stream=six.StringIO()).run(test_suite)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.skipped), 1)
@override_settings(ROOT_URLCONF='test_utils.urls')
class AssertNumQueriesTests(TestCase):
def test_assert_num_queries(self):
def test_func():
raise ValueError
self.assertRaises(ValueError, self.assertNumQueries, 2, test_func)
def test_assert_num_queries_with_client(self):
person = Person.objects.create(name='test')
self.assertNumQueries(
1,
self.client.get,
"/test_utils/get_person/%s/" % person.pk
)
self.assertNumQueries(
1,
self.client.get,
"/test_utils/get_person/%s/" % person.pk
)
def test_func():
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.assertNumQueries(2, test_func)
class AssertQuerysetEqualTests(TestCase):
def setUp(self):
self.p1 = Person.objects.create(name='p1')
self.p2 = Person.objects.create(name='p2')
def test_ordered(self):
self.assertQuerysetEqual(
Person.objects.all().order_by('name'),
[repr(self.p1), repr(self.p2)]
)
def test_unordered(self):
self.assertQuerysetEqual(
Person.objects.all().order_by('name'),
[repr(self.p2), repr(self.p1)],
ordered=False
)
def test_transform(self):
self.assertQuerysetEqual(
Person.objects.all().order_by('name'),
[self.p1.pk, self.p2.pk],
transform=lambda x: x.pk
)
def test_undefined_order(self):
# Using an unordered queryset with more than one ordered value
# is an error.
with self.assertRaises(ValueError):
self.assertQuerysetEqual(
Person.objects.all(),
[repr(self.p1), repr(self.p2)]
)
# No error for one value.
self.assertQuerysetEqual(
Person.objects.filter(name='p1'),
[repr(self.p1)]
)
@override_settings(ROOT_URLCONF='test_utils.urls')
class CaptureQueriesContextManagerTests(TestCase):
def setUp(self):
self.person_pk = six.text_type(Person.objects.create(name='test').pk)
def test_simple(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
with CaptureQueriesContext(connection) as captured_queries:
pass
self.assertEqual(0, len(captured_queries))
def test_within(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
def test_nested(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.count()
with CaptureQueriesContext(connection) as nested_captured_queries:
Person.objects.count()
self.assertEqual(1, len(nested_captured_queries))
self.assertEqual(2, len(captured_queries))
def test_failure(self):
with self.assertRaises(TypeError):
with CaptureQueriesContext(connection):
raise TypeError
def test_with_client(self):
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 2)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
self.assertIn(self.person_pk, captured_queries[1]['sql'])
@override_settings(ROOT_URLCONF='test_utils.urls')
class AssertNumQueriesContextManagerTests(TestCase):
def test_simple(self):
with self.assertNumQueries(0):
pass
with self.assertNumQueries(1):
Person.objects.count()
with self.assertNumQueries(2):
Person.objects.count()
Person.objects.count()
def test_failure(self):
with self.assertRaises(AssertionError) as exc_info:
with self.assertNumQueries(2):
Person.objects.count()
self.assertIn("1 queries executed, 2 expected", str(exc_info.exception))
self.assertIn("Captured queries were", str(exc_info.exception))
with self.assertRaises(TypeError):
with self.assertNumQueries(4000):
raise TypeError
def test_with_client(self):
person = Person.objects.create(name="test")
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(2):
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
@override_settings(ROOT_URLCONF='test_utils.urls')
class AssertTemplateUsedContextManagerTests(TestCase):
def test_usage(self):
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/base.html')
with self.assertTemplateUsed(template_name='template_used/base.html'):
render_to_string('template_used/base.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/include.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/extends.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/base.html')
render_to_string('template_used/base.html')
def test_nested_usage(self):
with self.assertTemplateUsed('template_used/base.html'):
with self.assertTemplateUsed('template_used/include.html'):
render_to_string('template_used/include.html')
with self.assertTemplateUsed('template_used/extends.html'):
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/extends.html')
with self.assertTemplateUsed('template_used/base.html'):
with self.assertTemplateUsed('template_used/alternative.html'):
render_to_string('template_used/alternative.html')
render_to_string('template_used/base.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/extends.html')
with self.assertTemplateNotUsed('template_used/base.html'):
render_to_string('template_used/alternative.html')
render_to_string('template_used/base.html')
def test_not_used(self):
with self.assertTemplateNotUsed('template_used/base.html'):
pass
with self.assertTemplateNotUsed('template_used/alternative.html'):
pass
def test_error_message(self):
with six.assertRaisesRegex(self, AssertionError, r'^template_used/base\.html'):
with self.assertTemplateUsed('template_used/base.html'):
pass
with six.assertRaisesRegex(self, AssertionError, r'^template_used/base\.html'):
with self.assertTemplateUsed(template_name='template_used/base.html'):
pass
with six.assertRaisesRegex(self, AssertionError, r'^template_used/base\.html.*template_used/alternative\.html$'):
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/alternative.html')
with self.assertRaises(AssertionError) as cm:
response = self.client.get('/test_utils/no_template_used/')
self.assertTemplateUsed(response, 'template_used/base.html')
self.assertEqual(cm.exception.args[0], "No templates used to render the response")
def test_failure(self):
with self.assertRaises(TypeError):
with self.assertTemplateUsed():
pass
with self.assertRaises(AssertionError):
with self.assertTemplateUsed(''):
pass
with self.assertRaises(AssertionError):
with self.assertTemplateUsed(''):
render_to_string('template_used/base.html')
with self.assertRaises(AssertionError):
with self.assertTemplateUsed(template_name=''):
pass
with self.assertRaises(AssertionError):
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/alternative.html')
class HTMLEqualTests(TestCase):
def test_html_parser(self):
element = parse_html('<div><p>Hello</p></div>')
self.assertEqual(len(element.children), 1)
self.assertEqual(element.children[0].name, 'p')
self.assertEqual(element.children[0].children[0], 'Hello')
parse_html('<p>')
parse_html('<p attr>')
dom = parse_html('<p>foo')
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.name, 'p')
self.assertEqual(dom[0], 'foo')
def test_parse_html_in_script(self):
parse_html('<script>var a = "<p" + ">";</script>')
parse_html('''
<script>
var js_sha_link='<p>***</p>';
</script>
''')
# script content will be parsed to text
dom = parse_html('''
<script><p>foo</p> '</scr'+'ipt>' <span>bar</span></script>
''')
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.children[0], "<p>foo</p> '</scr'+'ipt>' <span>bar</span>")
def test_self_closing_tags(self):
self_closing_tags = ('br', 'hr', 'input', 'img', 'meta', 'spacer',
'link', 'frame', 'base', 'col')
for tag in self_closing_tags:
dom = parse_html('<p>Hello <%s> world</p>' % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], 'Hello')
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], 'world')
dom = parse_html('<p>Hello <%s /> world</p>' % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], 'Hello')
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], 'world')
def test_simple_equal_html(self):
self.assertHTMLEqual('', '')
self.assertHTMLEqual('<p></p>', '<p></p>')
self.assertHTMLEqual('<p></p>', ' <p> </p> ')
self.assertHTMLEqual(
'<div><p>Hello</p></div>',
'<div><p>Hello</p></div>')
self.assertHTMLEqual(
'<div><p>Hello</p></div>',
'<div> <p>Hello</p> </div>')
self.assertHTMLEqual(
'<div>\n<p>Hello</p></div>',
'<div><p>Hello</p></div>\n')
self.assertHTMLEqual(
'<div><p>Hello\nWorld !</p></div>',
'<div><p>Hello World\n!</p></div>')
self.assertHTMLEqual(
'<div><p>Hello\nWorld !</p></div>',
'<div><p>Hello World\n!</p></div>')
self.assertHTMLEqual(
'<p>Hello World !</p>',
'<p>Hello World\n\n!</p>')
self.assertHTMLEqual('<p> </p>', '<p></p>')
self.assertHTMLEqual('<p/>', '<p></p>')
self.assertHTMLEqual('<p />', '<p></p>')
self.assertHTMLEqual('<input checked>', '<input checked="checked">')
self.assertHTMLEqual('<p>Hello', '<p> Hello')
self.assertHTMLEqual('<p>Hello</p>World', '<p>Hello</p> World')
def test_ignore_comments(self):
self.assertHTMLEqual(
'<div>Hello<!-- this is a comment --> World!</div>',
'<div>Hello World!</div>')
def test_unequal_html(self):
self.assertHTMLNotEqual('<p>Hello</p>', '<p>Hello!</p>')
self.assertHTMLNotEqual('<p>foobar</p>', '<p>foo bar</p>')
self.assertHTMLNotEqual('<p>foo bar</p>', '<p>foo bar</p>')
self.assertHTMLNotEqual('<p>foo nbsp</p>', '<p>foo </p>')
self.assertHTMLNotEqual('<p>foo #20</p>', '<p>foo </p>')
self.assertHTMLNotEqual(
'<p><span>Hello</span><span>World</span></p>',
'<p><span>Hello</span>World</p>')
self.assertHTMLNotEqual(
'<p><span>Hello</span>World</p>',
'<p><span>Hello</span><span>World</span></p>')
def test_attributes(self):
self.assertHTMLEqual(
'<input type="text" id="id_name" />',
'<input id="id_name" type="text" />')
self.assertHTMLEqual(
'''<input type='text' id="id_name" />''',
'<input id="id_name" type="text" />')
self.assertHTMLNotEqual(
'<input type="text" id="id_name" />',
'<input type="password" id="id_name" />')
def test_complex_examples(self):
self.assertHTMLEqual(
"""<tr><th><label for="id_first_name">First name:</label></th>
<td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th>
<td><input type="text" id="id_last_name" name="last_name" value="Lennon" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th>
<td><input type="text" value="1940-10-9" name="birthday" id="id_birthday" /></td></tr>""",
"""
<tr><th>
<label for="id_first_name">First name:</label></th><td><input type="text" name="first_name" value="John" id="id_first_name" />
</td></tr>
<tr><th>
<label for="id_last_name">Last name:</label></th><td><input type="text" name="last_name" value="Lennon" id="id_last_name" />
</td></tr>
<tr><th>
<label for="id_birthday">Birthday:</label></th><td><input type="text" name="birthday" value="1940-10-9" id="id_birthday" />
</td></tr>
""")
self.assertHTMLEqual(
"""<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p>
This is a valid paragraph
<div> this is a div AFTER the p</div>
</body>
</html>""", """
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p> This is a valid paragraph
<!-- browsers would close the p tag here -->
<div> this is a div AFTER the p</div>
</p> <!-- this is invalid HTML parsing, but it should make no
difference in most cases -->
</body>
</html>""")
def test_html_contain(self):
# equal html contains each other
dom1 = parse_html('<p>foo')
dom2 = parse_html('<p>foo</p>')
self.assertTrue(dom1 in dom2)
self.assertTrue(dom2 in dom1)
dom2 = parse_html('<div><p>foo</p></div>')
self.assertTrue(dom1 in dom2)
self.assertTrue(dom2 not in dom1)
self.assertFalse('<p>foo</p>' in dom2)
self.assertTrue('foo' in dom2)
# when a root element is used ...
dom1 = parse_html('<p>foo</p><p>bar</p>')
dom2 = parse_html('<p>foo</p><p>bar</p>')
self.assertTrue(dom1 in dom2)
dom1 = parse_html('<p>foo</p>')
self.assertTrue(dom1 in dom2)
dom1 = parse_html('<p>bar</p>')
self.assertTrue(dom1 in dom2)
def test_count(self):
# equal html contains each other one time
dom1 = parse_html('<p>foo')
dom2 = parse_html('<p>foo</p>')
self.assertEqual(dom1.count(dom2), 1)
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo</p><p>bar</p>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo foo</p><p>foo</p>')
self.assertEqual(dom2.count('foo'), 3)
dom2 = parse_html('<p class="bar">foo</p>')
self.assertEqual(dom2.count('bar'), 0)
self.assertEqual(dom2.count('class'), 0)
self.assertEqual(dom2.count('p'), 0)
self.assertEqual(dom2.count('o'), 2)
dom2 = parse_html('<p>foo</p><p>foo</p>')
self.assertEqual(dom2.count(dom1), 2)
dom2 = parse_html('<div><p>foo<input type=""></p><p>foo</p></div>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<div><div><p>foo</p></div></div>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo<p>foo</p></p>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo<p>bar</p></p>')
self.assertEqual(dom2.count(dom1), 0)
def test_parsing_errors(self):
with self.assertRaises(AssertionError):
self.assertHTMLEqual('<p>', '')
with self.assertRaises(AssertionError):
self.assertHTMLEqual('', '<p>')
with self.assertRaises(HTMLParseError):
parse_html('</p>')
def test_contains_html(self):
response = HttpResponse('''<body>
This is a form: <form action="" method="get">
<input type="text" name="Hello" />
</form></body>''')
self.assertNotContains(response, "<input name='Hello' type='text'>")
self.assertContains(response, '<form action="" method="get">')
self.assertContains(response, "<input name='Hello' type='text'>", html=True)
self.assertNotContains(response, '<form action="" method="get">', html=True)
invalid_response = HttpResponse('''<body <bad>>''')
with self.assertRaises(AssertionError):
self.assertContains(invalid_response, '<p></p>')
with self.assertRaises(AssertionError):
self.assertContains(response, '<p "whats" that>')
def test_unicode_handling(self):
response = HttpResponse('<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>')
self.assertContains(response, '<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>', html=True)
class XMLEqualTests(TestCase):
def test_simple_equal(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_unordered(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_raise(self):
xml1 = "<elem attr1='a' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLEqual(xml1, xml2)
def test_simple_not_equal(self):
xml1 = "<elem attr1='a' attr2='c' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLNotEqual(xml1, xml2)
def test_simple_not_equal_raise(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml1, xml2)
def test_parsing_errors(self):
xml_unvalid = "<elem attr1='a attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml_unvalid, xml2)
def test_comment_root(self):
xml1 = "<?xml version='1.0'?><!-- comment1 --><elem attr1='a' attr2='b' />"
xml2 = "<?xml version='1.0'?><!-- comment2 --><elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
class SkippingExtraTests(TestCase):
fixtures = ['should_not_be_loaded.json']
# HACK: This depends on internals of our TestCase subclasses
def __call__(self, result=None):
# Detect fixture loading by counting SQL queries, should be zero
with self.assertNumQueries(0):
super(SkippingExtraTests, self).__call__(result)
@unittest.skip("Fixture loading should not be performed for skipped tests.")
def test_fixtures_are_skipped(self):
pass
class AssertRaisesMsgTest(SimpleTestCase):
def test_special_re_chars(self):
"""assertRaisesMessage shouldn't interpret RE special chars."""
def func1():
raise ValueError("[.*x+]y?")
self.assertRaisesMessage(ValueError, "[.*x+]y?", func1)
class AssertFieldOutputTests(SimpleTestCase):
def test_assert_field_output(self):
error_invalid = ['Enter a valid email address.']
self.assertFieldOutput(EmailField, {'a@a.com': 'a@a.com'}, {'aaa': error_invalid})
self.assertRaises(AssertionError, self.assertFieldOutput, EmailField, {'a@a.com': 'a@a.com'}, {'aaa': error_invalid + ['Another error']})
self.assertRaises(AssertionError, self.assertFieldOutput, EmailField, {'a@a.com': 'Wrong output'}, {'aaa': error_invalid})
self.assertRaises(AssertionError, self.assertFieldOutput, EmailField, {'a@a.com': 'a@a.com'}, {'aaa': ['Come on, gimme some well formatted data, dude.']})
def test_custom_required_message(self):
class MyCustomField(IntegerField):
default_error_messages = {
'required': 'This is really required.',
}
self.assertFieldOutput(MyCustomField, {}, {}, empty_value=None)
# for OverrideSettingsTests
def fake_view(request):
pass
class FirstUrls:
urlpatterns = [url(r'first/$', fake_view, name='first')]
class SecondUrls:
urlpatterns = [url(r'second/$', fake_view, name='second')]
class OverrideSettingsTests(TestCase):
"""
#21518 -- If neither override_settings nor a settings_changed receiver
clears the URL cache between tests, then one of these two test methods will
fail.
"""
@override_settings(ROOT_URLCONF=FirstUrls)
def test_first(self):
reverse('first')
@override_settings(ROOT_URLCONF=SecondUrls)
def test_second(self):
reverse('second')
| {
"content_hash": "a39e0254c388b86c234a1590092960dc",
"timestamp": "",
"source": "github",
"line_count": 650,
"max_line_length": 162,
"avg_line_length": 37.863076923076925,
"alnum_prop": 0.5966437771728089,
"repo_name": "liavkoren/djangoDev",
"id": "e9f6da727904362f5e30c0f164996c5b5f2b32d8",
"size": "24651",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_utils/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52957"
},
{
"name": "JavaScript",
"bytes": "102668"
},
{
"name": "Python",
"bytes": "9336943"
},
{
"name": "Shell",
"bytes": "12137"
}
],
"symlink_target": ""
} |
'''
Created on Jun 8, 2011
@author: evan
'''
from kayako.core.lib import UnsetParameter
from kayako.core.object import KayakoObject
from kayako.exception import KayakoRequestError, KayakoResponseError
from lxml import etree
import base64
class TicketAttachment(KayakoObject):
'''
Kayako TicketAttachment API Object.
ticketid The unique numeric identifier of the ticket.
ticketpostid The unique numeric identifier of the ticket post.
filename The file name for the attachment
contents The BASE64 encoded attachment contents
filesize
filetype
dateline
'''
controller = '/Tickets/TicketAttachment'
__parameters__ = [
'id',
'ticketid',
'ticketpostid',
'filename',
'filesize',
'filetype',
'contents',
'dateline',
]
__required_add_parameters__ = ['ticketid', 'ticketpostid', 'filename', 'contents']
__add_parameters__ = ['ticketid', 'ticketpostid', 'filename', 'contents']
@classmethod
def _parse_ticket_attachment(cls, ticket_attachment_tree):
params = dict(
id=cls._get_int(ticket_attachment_tree.find('id')),
ticketid=cls._get_int(ticket_attachment_tree.find('ticketid')),
ticketpostid=cls._get_int(ticket_attachment_tree.find('ticketpostid')),
filename=cls._get_string(ticket_attachment_tree.find('filename')),
filesize=cls._get_int(ticket_attachment_tree.find('filesize')),
filetype=cls._get_string(ticket_attachment_tree.find('filetype')),
contents=cls._get_string(ticket_attachment_tree.find('contents')),
dateline=cls._get_date(ticket_attachment_tree.find('dateline')),
)
return params
def _update_from_response(self, ticket_attachment_tree):
for int_node in ['id', 'ticketid', 'ticketpostid', 'filesize']:
node = ticket_attachment_tree.find(int_node)
if node is not None:
setattr(self, int_node, self._get_int(node, required=False))
for str_node in ['filename', 'filetype', 'contents']:
node = ticket_attachment_tree.find(str_node)
if node is not None:
setattr(self, str_node, self._get_string(node))
for date_node in ['dateline']:
node = ticket_attachment_tree.find(date_node)
if node is not None:
setattr(self, date_node, self._get_date(node, required=False))
@classmethod
def get_all(cls, api, ticketid):
'''
Get all of the TicketAttachments for a ticket.
Required:
ticketid The unique numeric identifier of the ticket.
'''
response = api._request('%s/ListAll/%s' % (cls.controller, ticketid), 'GET')
tree = etree.parse(response)
return [TicketAttachment(api, **cls._parse_ticket_attachment(ticket_attachment_tree)) for ticket_attachment_tree in tree.findall('attachment')]
@classmethod
def get(cls, api, ticketid, attachmentid):
try:
response = api._request('%s/%s/%s/' % (cls.controller, ticketid, attachmentid), 'GET')
except KayakoResponseError, error:
if 'HTTP Error 404' in str(error):
return None
else:
raise
tree = etree.parse(response)
node = tree.find('attachment')
if node is None:
return None
params = cls._parse_ticket_attachment(node)
return TicketAttachment(api, **params)
def add(self):
'''
Add this TicketAttachment.
Requires:
ticketid The unique numeric identifier of the ticket.
ticketpostid The unique numeric identifier of the ticket post.
filename The file name for the attachment
contents The BASE64 encoded attachment contents
'''
response = self._add(self.controller)
tree = etree.parse(response)
node = tree.find('attachment')
self._update_from_response(node)
def delete(self):
if self.ticketid is None or self.ticketid is UnsetParameter:
raise KayakoRequestError('Cannot delete a TicketAttachment without being attached to a ticket. The ID of the Ticket (ticketid) has not been specified.')
self._delete('%s/%s/%s/' % (self.controller, self.ticketid, self.id))
def get_contents(self):
''' Return the unencoded contents of this TicketAttachment. '''
if self.contents:
return base64.b64decode(self.contents)
def set_contents(self, contents):
'''
Set this TicketAttachment's contents to Base 64 encoded data, or set the
contents to nothing.
'''
if contents:
self.contents = base64.b64encode(contents)
else:
self.contents = None
def __str__(self):
return '<TicketAttachment (%s): %s>' % (self.id, self.filename)
| {
"content_hash": "ad878853c829b9d7eaa9016f6df03f48",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 164,
"avg_line_length": 36.713235294117645,
"alnum_prop": 0.6144602443420789,
"repo_name": "sahilsehgal81/python-api-library",
"id": "0f138204b8da895ff46f32c1f3b9ecb7e830d3f9",
"size": "5287",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/kayako/objects/ticket/ticket_attachment.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "315429"
}
],
"symlink_target": ""
} |
import os
import time
from dataclasses import dataclass, field
from mashumaro.types import SerializableType
from pathlib import Path
from typing import (
Optional,
Union,
List,
Dict,
Any,
Sequence,
Tuple,
Iterator,
TypeVar,
)
from dbt.dataclass_schema import (
dbtClassMixin, ExtensibleDbtClassMixin
)
from dbt.clients.system import write_file
from dbt.contracts.files import FileHash, MAXIMUM_SEED_SIZE_NAME
from dbt.contracts.graph.unparsed import (
UnparsedNode, UnparsedDocumentation, Quoting, Docs,
UnparsedBaseNode, FreshnessThreshold, ExternalTable,
HasYamlMetadata, MacroArgument, UnparsedSourceDefinition,
UnparsedSourceTableDefinition, UnparsedColumn, TestDef,
ExposureOwner, ExposureType, MaturityType, MetricFilter
)
from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin
from dbt.exceptions import warn_or_error
from dbt import flags
from dbt.node_types import NodeType
from .model_config import (
NodeConfig,
SeedConfig,
TestConfig,
SourceConfig,
EmptySnapshotConfig,
SnapshotConfig,
)
@dataclass
class ColumnInfo(
AdditionalPropertiesMixin,
ExtensibleDbtClassMixin,
Replaceable
):
name: str
description: str = ''
meta: Dict[str, Any] = field(default_factory=dict)
data_type: Optional[str] = None
quote: Optional[bool] = None
tags: List[str] = field(default_factory=list)
_extra: Dict[str, Any] = field(default_factory=dict)
@dataclass
class HasFqn(dbtClassMixin, Replaceable):
fqn: List[str]
def same_fqn(self, other: 'HasFqn') -> bool:
return self.fqn == other.fqn
@dataclass
class HasUniqueID(dbtClassMixin, Replaceable):
unique_id: str
@dataclass
class MacroDependsOn(dbtClassMixin, Replaceable):
macros: List[str] = field(default_factory=list)
# 'in' on lists is O(n) so this is O(n^2) for # of macros
def add_macro(self, value: str):
if value not in self.macros:
self.macros.append(value)
@dataclass
class DependsOn(MacroDependsOn):
nodes: List[str] = field(default_factory=list)
def add_node(self, value: str):
if value not in self.nodes:
self.nodes.append(value)
@dataclass
class HasRelationMetadata(dbtClassMixin, Replaceable):
database: Optional[str]
schema: str
# Can't set database to None like it ought to be
# because it messes up the subclasses and default parameters
# so hack it here
@classmethod
def __pre_deserialize__(cls, data):
data = super().__pre_deserialize__(data)
if 'database' not in data:
data['database'] = None
return data
class ParsedNodeMixins(dbtClassMixin):
resource_type: NodeType
depends_on: DependsOn
config: NodeConfig
@property
def is_refable(self):
return self.resource_type in NodeType.refable()
@property
def should_store_failures(self):
return self.resource_type == NodeType.Test and (
self.config.store_failures if self.config.store_failures is not None
else flags.STORE_FAILURES
)
# will this node map to an object in the database?
@property
def is_relational(self):
return (
self.resource_type in NodeType.refable() or
self.should_store_failures
)
@property
def is_ephemeral(self):
return self.config.materialized == 'ephemeral'
@property
def is_ephemeral_model(self):
return self.is_refable and self.is_ephemeral
@property
def depends_on_nodes(self):
return self.depends_on.nodes
def patch(self, patch: 'ParsedNodePatch'):
"""Given a ParsedNodePatch, add the new information to the node."""
# explicitly pick out the parts to update so we don't inadvertently
# step on the model name or anything
# Note: config should already be updated
self.patch_path: Optional[str] = patch.file_id
# update created_at so process_docs will run in partial parsing
self.created_at = time.time()
self.description = patch.description
self.columns = patch.columns
self.meta = patch.meta
self.docs = patch.docs
def get_materialization(self):
return self.config.materialized
@dataclass
class ParsedNodeMandatory(
UnparsedNode,
HasUniqueID,
HasFqn,
HasRelationMetadata,
Replaceable
):
alias: str
checksum: FileHash
config: NodeConfig = field(default_factory=NodeConfig)
@property
def identifier(self):
return self.alias
@dataclass
class NodeInfoMixin():
_event_status: Dict[str, Any] = field(default_factory=dict)
@property
def node_info(self):
node_info = {
"node_path": getattr(self, 'path', None),
"node_name": getattr(self, 'name', None),
"unique_id": getattr(self, 'unique_id', None),
"resource_type": str(getattr(self, 'resource_type', '')),
"materialized": self.config.get('materialized'),
"node_status": str(self._event_status.get('node_status')),
"node_started_at": self._event_status.get("started_at"),
"node_finished_at": self._event_status.get("finished_at")
}
return node_info
@dataclass
class ParsedNodeDefaults(NodeInfoMixin, ParsedNodeMandatory):
tags: List[str] = field(default_factory=list)
refs: List[List[str]] = field(default_factory=list)
sources: List[List[str]] = field(default_factory=list)
depends_on: DependsOn = field(default_factory=DependsOn)
description: str = field(default='')
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
meta: Dict[str, Any] = field(default_factory=dict)
docs: Docs = field(default_factory=Docs)
patch_path: Optional[str] = None
compiled_path: Optional[str] = None
build_path: Optional[str] = None
deferred: bool = False
unrendered_config: Dict[str, Any] = field(default_factory=dict)
created_at: float = field(default_factory=lambda: time.time())
config_call_dict: Dict[str, Any] = field(default_factory=dict)
def write_node(self, target_path: str, subdirectory: str, payload: str):
if (os.path.basename(self.path) ==
os.path.basename(self.original_file_path)):
# One-to-one relationship of nodes to files.
path = self.original_file_path
else:
# Many-to-one relationship of nodes to files.
path = os.path.join(self.original_file_path, self.path)
full_path = os.path.join(
target_path, subdirectory, self.package_name, path
)
write_file(full_path, payload)
return full_path
T = TypeVar('T', bound='ParsedNode')
@dataclass
class ParsedNode(ParsedNodeDefaults, ParsedNodeMixins, SerializableType):
def _serialize(self):
return self.to_dict()
def __post_serialize__(self, dct):
if 'config_call_dict' in dct:
del dct['config_call_dict']
if '_event_status' in dct:
del dct['_event_status']
return dct
@classmethod
def _deserialize(cls, dct: Dict[str, int]):
# The serialized ParsedNodes do not differ from each other
# in fields that would allow 'from_dict' to distinguis
# between them.
resource_type = dct['resource_type']
if resource_type == 'model':
return ParsedModelNode.from_dict(dct)
elif resource_type == 'analysis':
return ParsedAnalysisNode.from_dict(dct)
elif resource_type == 'seed':
return ParsedSeedNode.from_dict(dct)
elif resource_type == 'rpc':
return ParsedRPCNode.from_dict(dct)
elif resource_type == 'sql':
return ParsedSqlNode.from_dict(dct)
elif resource_type == 'test':
if 'test_metadata' in dct:
return ParsedGenericTestNode.from_dict(dct)
else:
return ParsedSingularTestNode.from_dict(dct)
elif resource_type == 'operation':
return ParsedHookNode.from_dict(dct)
elif resource_type == 'seed':
return ParsedSeedNode.from_dict(dct)
elif resource_type == 'snapshot':
return ParsedSnapshotNode.from_dict(dct)
else:
return cls.from_dict(dct)
def _persist_column_docs(self) -> bool:
if hasattr(self.config, 'persist_docs'):
assert isinstance(self.config, NodeConfig)
return bool(self.config.persist_docs.get('columns'))
return False
def _persist_relation_docs(self) -> bool:
if hasattr(self.config, 'persist_docs'):
assert isinstance(self.config, NodeConfig)
return bool(self.config.persist_docs.get('relation'))
return False
def same_body(self: T, other: T) -> bool:
return self.raw_sql == other.raw_sql
def same_persisted_description(self: T, other: T) -> bool:
# the check on configs will handle the case where we have different
# persist settings, so we only have to care about the cases where they
# are the same..
if self._persist_relation_docs():
if self.description != other.description:
return False
if self._persist_column_docs():
# assert other._persist_column_docs()
column_descriptions = {
k: v.description for k, v in self.columns.items()
}
other_column_descriptions = {
k: v.description for k, v in other.columns.items()
}
if column_descriptions != other_column_descriptions:
return False
return True
def same_database_representation(self, other: T) -> bool:
# compare the config representation, not the node's config value. This
# compares the configured value, rather than the ultimate value (so
# generate_*_name and unset values derived from the target are
# ignored)
keys = ('database', 'schema', 'alias')
for key in keys:
mine = self.unrendered_config.get(key)
others = other.unrendered_config.get(key)
if mine != others:
return False
return True
def same_config(self, old: T) -> bool:
return self.config.same_contents(
self.unrendered_config,
old.unrendered_config,
)
def same_contents(self: T, old: Optional[T]) -> bool:
if old is None:
return False
return (
self.same_body(old) and
self.same_config(old) and
self.same_persisted_description(old) and
self.same_fqn(old) and
self.same_database_representation(old) and
True
)
@dataclass
class ParsedAnalysisNode(ParsedNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Analysis]})
@dataclass
class ParsedHookNode(ParsedNode):
resource_type: NodeType = field(
metadata={'restrict': [NodeType.Operation]}
)
index: Optional[int] = None
@dataclass
class ParsedModelNode(ParsedNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Model]})
# TODO: rm?
@dataclass
class ParsedRPCNode(ParsedNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.RPCCall]})
@dataclass
class ParsedSqlNode(ParsedNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.SqlOperation]})
def same_seeds(first: ParsedNode, second: ParsedNode) -> bool:
# for seeds, we check the hashes. If the hashes are different types,
# no match. If the hashes are both the same 'path', log a warning and
# assume they are the same
# if the current checksum is a path, we want to log a warning.
result = first.checksum == second.checksum
if first.checksum.name == 'path':
msg: str
if second.checksum.name != 'path':
msg = (
f'Found a seed ({first.package_name}.{first.name}) '
f'>{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was '
f'<={MAXIMUM_SEED_SIZE_NAME}, so it has changed'
)
elif result:
msg = (
f'Found a seed ({first.package_name}.{first.name}) '
f'>{MAXIMUM_SEED_SIZE_NAME} in size at the same path, dbt '
f'cannot tell if it has changed: assuming they are the same'
)
elif not result:
msg = (
f'Found a seed ({first.package_name}.{first.name}) '
f'>{MAXIMUM_SEED_SIZE_NAME} in size. The previous file was in '
f'a different location, assuming it has changed'
)
else:
msg = (
f'Found a seed ({first.package_name}.{first.name}) '
f'>{MAXIMUM_SEED_SIZE_NAME} in size. The previous file had a '
f'checksum type of {second.checksum.name}, so it has changed'
)
warn_or_error(msg, node=first)
return result
@dataclass
class ParsedSeedNode(ParsedNode):
# keep this in sync with CompiledSeedNode!
resource_type: NodeType = field(metadata={'restrict': [NodeType.Seed]})
config: SeedConfig = field(default_factory=SeedConfig)
@property
def empty(self):
""" Seeds are never empty"""
return False
def same_body(self: T, other: T) -> bool:
return same_seeds(self, other)
@dataclass
class TestMetadata(dbtClassMixin, Replaceable):
name: str
# kwargs are the args that are left in the test builder after
# removing configs. They are set from the test builder when
# the test node is created.
kwargs: Dict[str, Any] = field(default_factory=dict)
namespace: Optional[str] = None
@dataclass
class HasTestMetadata(dbtClassMixin):
test_metadata: TestMetadata
@dataclass
class ParsedSingularTestNode(ParsedNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
# Was not able to make mypy happy and keep the code working. We need to
# refactor the various configs.
config: TestConfig = field(default_factory=TestConfig) # type: ignore
@property
def test_node_type(self):
return 'singular'
@dataclass
class ParsedGenericTestNode(ParsedNode, HasTestMetadata):
# keep this in sync with CompiledGenericTestNode!
resource_type: NodeType = field(metadata={'restrict': [NodeType.Test]})
column_name: Optional[str] = None
file_key_name: Optional[str] = None
# Was not able to make mypy happy and keep the code working. We need to
# refactor the various configs.
config: TestConfig = field(default_factory=TestConfig) # type: ignore
def same_contents(self, other) -> bool:
if other is None:
return False
return (
self.same_config(other) and
self.same_fqn(other) and
True
)
@property
def test_node_type(self):
return 'generic'
@dataclass
class IntermediateSnapshotNode(ParsedNode):
# at an intermediate stage in parsing, where we've built something better
# than an unparsed node for rendering in parse mode, it's pretty possible
# that we won't have critical snapshot-related information that is only
# defined in config blocks. To fix that, we have an intermediate type that
# uses a regular node config, which the snapshot parser will then convert
# into a full ParsedSnapshotNode after rendering.
resource_type: NodeType = field(metadata={'restrict': [NodeType.Snapshot]})
config: EmptySnapshotConfig = field(default_factory=EmptySnapshotConfig)
@dataclass
class ParsedSnapshotNode(ParsedNode):
resource_type: NodeType = field(metadata={'restrict': [NodeType.Snapshot]})
config: SnapshotConfig
@dataclass
class ParsedPatch(HasYamlMetadata, Replaceable):
name: str
description: str
meta: Dict[str, Any]
docs: Docs
config: Dict[str, Any]
# The parsed node update is only the 'patch', not the test. The test became a
# regular parsed node. Note that description and columns must be present, but
# may be empty.
@dataclass
class ParsedNodePatch(ParsedPatch):
columns: Dict[str, ColumnInfo]
@dataclass
class ParsedMacroPatch(ParsedPatch):
arguments: List[MacroArgument] = field(default_factory=list)
@dataclass
class ParsedMacro(UnparsedBaseNode, HasUniqueID):
name: str
macro_sql: str
resource_type: NodeType = field(metadata={'restrict': [NodeType.Macro]})
# TODO: can macros even have tags?
tags: List[str] = field(default_factory=list)
# TODO: is this ever populated?
depends_on: MacroDependsOn = field(default_factory=MacroDependsOn)
description: str = ''
meta: Dict[str, Any] = field(default_factory=dict)
docs: Docs = field(default_factory=Docs)
patch_path: Optional[str] = None
arguments: List[MacroArgument] = field(default_factory=list)
created_at: float = field(default_factory=lambda: time.time())
def patch(self, patch: ParsedMacroPatch):
self.patch_path: Optional[str] = patch.file_id
self.description = patch.description
self.created_at = time.time()
self.meta = patch.meta
self.docs = patch.docs
self.arguments = patch.arguments
def same_contents(self, other: Optional['ParsedMacro']) -> bool:
if other is None:
return False
# the only thing that makes one macro different from another with the
# same name/package is its content
return self.macro_sql == other.macro_sql
@dataclass
class ParsedDocumentation(UnparsedDocumentation, HasUniqueID):
name: str
block_contents: str
@property
def search_name(self):
return self.name
def same_contents(self, other: Optional['ParsedDocumentation']) -> bool:
if other is None:
return False
# the only thing that makes one doc different from another with the
# same name/package is its content
return self.block_contents == other.block_contents
def normalize_test(testdef: TestDef) -> Dict[str, Any]:
if isinstance(testdef, str):
return {testdef: {}}
else:
return testdef
@dataclass
class UnpatchedSourceDefinition(UnparsedBaseNode, HasUniqueID, HasFqn):
source: UnparsedSourceDefinition
table: UnparsedSourceTableDefinition
resource_type: NodeType = field(metadata={'restrict': [NodeType.Source]})
patch_path: Optional[Path] = None
def get_full_source_name(self):
return f'{self.source.name}_{self.table.name}'
def get_source_representation(self):
return f'source("{self.source.name}", "{self.table.name}")'
@property
def name(self) -> str:
return self.get_full_source_name()
@property
def quote_columns(self) -> Optional[bool]:
result = None
if self.source.quoting.column is not None:
result = self.source.quoting.column
if self.table.quoting.column is not None:
result = self.table.quoting.column
return result
@property
def columns(self) -> Sequence[UnparsedColumn]:
if self.table.columns is None:
return []
else:
return self.table.columns
def get_tests(
self
) -> Iterator[Tuple[Dict[str, Any], Optional[UnparsedColumn]]]:
for test in self.tests:
yield normalize_test(test), None
for column in self.columns:
if column.tests is not None:
for test in column.tests:
yield normalize_test(test), column
@property
def tests(self) -> List[TestDef]:
if self.table.tests is None:
return []
else:
return self.table.tests
@dataclass
class ParsedSourceMandatory(
UnparsedBaseNode,
HasUniqueID,
HasRelationMetadata,
HasFqn,
):
name: str
source_name: str
source_description: str
loader: str
identifier: str
resource_type: NodeType = field(metadata={'restrict': [NodeType.Source]})
@dataclass
class ParsedSourceDefinition(
NodeInfoMixin,
ParsedSourceMandatory
):
quoting: Quoting = field(default_factory=Quoting)
loaded_at_field: Optional[str] = None
freshness: Optional[FreshnessThreshold] = None
external: Optional[ExternalTable] = None
description: str = ''
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
meta: Dict[str, Any] = field(default_factory=dict)
source_meta: Dict[str, Any] = field(default_factory=dict)
tags: List[str] = field(default_factory=list)
config: SourceConfig = field(default_factory=SourceConfig)
patch_path: Optional[Path] = None
unrendered_config: Dict[str, Any] = field(default_factory=dict)
relation_name: Optional[str] = None
created_at: float = field(default_factory=lambda: time.time())
def __post_serialize__(self, dct):
if '_event_status' in dct:
del dct['_event_status']
return dct
def same_database_representation(
self, other: 'ParsedSourceDefinition'
) -> bool:
return (
self.database == other.database and
self.schema == other.schema and
self.identifier == other.identifier and
True
)
def same_quoting(self, other: 'ParsedSourceDefinition') -> bool:
return self.quoting == other.quoting
def same_freshness(self, other: 'ParsedSourceDefinition') -> bool:
return (
self.freshness == other.freshness and
self.loaded_at_field == other.loaded_at_field and
True
)
def same_external(self, other: 'ParsedSourceDefinition') -> bool:
return self.external == other.external
def same_config(self, old: 'ParsedSourceDefinition') -> bool:
return self.config.same_contents(
self.unrendered_config,
old.unrendered_config,
)
def same_contents(self, old: Optional['ParsedSourceDefinition']) -> bool:
# existing when it didn't before is a change!
if old is None:
return True
# config changes are changes (because the only config is "enabled", and
# enabling a source is a change!)
# changing the database/schema/identifier is a change
# messing around with external stuff is a change (uh, right?)
# quoting changes are changes
# freshness changes are changes, I guess
# metadata/tags changes are not "changes"
# patching/description changes are not "changes"
return (
self.same_database_representation(old) and
self.same_fqn(old) and
self.same_config(old) and
self.same_quoting(old) and
self.same_freshness(old) and
self.same_external(old) and
True
)
def get_full_source_name(self):
return f'{self.source_name}_{self.name}'
def get_source_representation(self):
return f'source("{self.source.name}", "{self.table.name}")'
@property
def is_refable(self):
return False
@property
def is_ephemeral(self):
return False
@property
def is_ephemeral_model(self):
return False
@property
def depends_on_nodes(self):
return []
@property
def depends_on(self):
return DependsOn(macros=[], nodes=[])
@property
def refs(self):
return []
@property
def sources(self):
return []
@property
def has_freshness(self):
return bool(self.freshness) and self.loaded_at_field is not None
@property
def search_name(self):
return f'{self.source_name}.{self.name}'
@dataclass
class ParsedExposure(UnparsedBaseNode, HasUniqueID, HasFqn):
name: str
type: ExposureType
owner: ExposureOwner
resource_type: NodeType = NodeType.Exposure
description: str = ''
maturity: Optional[MaturityType] = None
meta: Dict[str, Any] = field(default_factory=dict)
tags: List[str] = field(default_factory=list)
url: Optional[str] = None
depends_on: DependsOn = field(default_factory=DependsOn)
refs: List[List[str]] = field(default_factory=list)
sources: List[List[str]] = field(default_factory=list)
created_at: float = field(default_factory=lambda: time.time())
@property
def depends_on_nodes(self):
return self.depends_on.nodes
@property
def search_name(self):
return self.name
def same_depends_on(self, old: 'ParsedExposure') -> bool:
return set(self.depends_on.nodes) == set(old.depends_on.nodes)
def same_description(self, old: 'ParsedExposure') -> bool:
return self.description == old.description
def same_maturity(self, old: 'ParsedExposure') -> bool:
return self.maturity == old.maturity
def same_owner(self, old: 'ParsedExposure') -> bool:
return self.owner == old.owner
def same_exposure_type(self, old: 'ParsedExposure') -> bool:
return self.type == old.type
def same_url(self, old: 'ParsedExposure') -> bool:
return self.url == old.url
def same_contents(self, old: Optional['ParsedExposure']) -> bool:
# existing when it didn't before is a change!
# metadata/tags changes are not "changes"
if old is None:
return True
return (
self.same_fqn(old) and
self.same_exposure_type(old) and
self.same_owner(old) and
self.same_maturity(old) and
self.same_url(old) and
self.same_description(old) and
self.same_depends_on(old) and
True
)
@dataclass
class ParsedMetric(UnparsedBaseNode, HasUniqueID, HasFqn):
model: str
name: str
description: str
label: str
type: str
sql: Optional[str]
timestamp: Optional[str]
filters: List[MetricFilter]
time_grains: List[str]
dimensions: List[str]
resource_type: NodeType = NodeType.Metric
meta: Dict[str, Any] = field(default_factory=dict)
tags: List[str] = field(default_factory=list)
sources: List[List[str]] = field(default_factory=list)
depends_on: DependsOn = field(default_factory=DependsOn)
refs: List[List[str]] = field(default_factory=list)
created_at: float = field(default_factory=lambda: time.time())
@property
def depends_on_nodes(self):
return self.depends_on.nodes
@property
def search_name(self):
return self.name
def same_model(self, old: 'ParsedMetric') -> bool:
return self.model == old.model
def same_dimensions(self, old: 'ParsedMetric') -> bool:
return self.dimensions == old.dimensions
def same_filters(self, old: 'ParsedMetric') -> bool:
return self.filters == old.filters
def same_description(self, old: 'ParsedMetric') -> bool:
return self.description == old.description
def same_label(self, old: 'ParsedMetric') -> bool:
return self.label == old.label
def same_type(self, old: 'ParsedMetric') -> bool:
return self.type == old.type
def same_sql(self, old: 'ParsedMetric') -> bool:
return self.sql == old.sql
def same_timestamp(self, old: 'ParsedMetric') -> bool:
return self.timestamp == old.timestamp
def same_time_grains(self, old: 'ParsedMetric') -> bool:
return self.time_grains == old.time_grains
def same_contents(self, old: Optional['ParsedMetric']) -> bool:
# existing when it didn't before is a change!
# metadata/tags changes are not "changes"
if old is None:
return True
return (
self.same_model(old) and
self.same_dimensions(old) and
self.same_filters(old) and
self.same_description(old) and
self.same_label(old) and
self.same_type(old) and
self.same_sql(old) and
self.same_timestamp(old) and
self.same_time_grains(old) and
True
)
ManifestNodes = Union[
ParsedAnalysisNode,
ParsedSingularTestNode,
ParsedHookNode,
ParsedModelNode,
ParsedRPCNode,
ParsedSqlNode,
ParsedGenericTestNode,
ParsedSeedNode,
ParsedSnapshotNode,
]
ParsedResource = Union[
ParsedDocumentation,
ParsedMacro,
ParsedNode,
ParsedExposure,
ParsedMetric,
ParsedSourceDefinition,
]
| {
"content_hash": "4e89b12246e3a6af2c5da2503b64d96e",
"timestamp": "",
"source": "github",
"line_count": 920,
"max_line_length": 83,
"avg_line_length": 31.089130434782607,
"alnum_prop": 0.6392210334941613,
"repo_name": "analyst-collective/dbt",
"id": "f51ff7dedd57bff55842a333d542b558be682805",
"size": "28602",
"binary": false,
"copies": "1",
"ref": "refs/heads/ct-117-readme_docs",
"path": "core/dbt/contracts/graph/parsed.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "769"
},
{
"name": "Python",
"bytes": "284372"
},
{
"name": "Shell",
"bytes": "625"
}
],
"symlink_target": ""
} |
import numpy as np
from gym.spaces import Box
from metaworld.envs.asset_path_utils import full_v1_path_for
from metaworld.envs.mujoco.sawyer_xyz.sawyer_xyz_env import SawyerXYZEnv, _assert_task_is_set
class SawyerButtonPressTopdownEnv(SawyerXYZEnv):
def __init__(self):
hand_low = (-0.5, 0.40, 0.05)
hand_high = (0.5, 1, 0.5)
obj_low = (-0.1, 0.8, 0.05)
obj_high = (0.1, 0.9, 0.05)
super().__init__(
self.model_name,
hand_low=hand_low,
hand_high=hand_high,
)
self.init_config = {
'obj_init_pos': np.array([0, 0.8, 0.05], dtype=np.float32),
'hand_init_pos': np.array([0, 0.6, 0.2], dtype=np.float32),
}
self.goal = np.array([0, 0.88, 0.1])
self.obj_init_pos = self.init_config['obj_init_pos']
self.hand_init_pos = self.init_config['hand_init_pos']
goal_low = self.hand_low
goal_high = self.hand_high
self._random_reset_space = Box(
np.array(obj_low),
np.array(obj_high),
)
self.goal_space = Box(np.array(goal_low), np.array(goal_high))
@property
def model_name(self):
return full_v1_path_for('sawyer_xyz/sawyer_button_press_topdown.xml')
@_assert_task_is_set
def step(self, action):
ob = super().step(action)
reward, reachDist, pressDist = self.compute_reward(action, ob)
info = {
'reachDist': reachDist,
'goalDist': pressDist,
'epRew': reward,
'pickRew': None,
'success': float(pressDist <= 0.02)
}
return ob, reward, False, info
@property
def _target_site_config(self):
return []
def _get_pos_objects(self):
return self.data.site_xpos[self.model.site_name2id('buttonStart')]
def _set_obj_xyz(self, pos):
qpos = self.data.qpos.flat.copy()
qvel = self.data.qvel.flat.copy()
qpos[9] = pos
qvel[9] = 0
self.set_state(qpos, qvel)
def reset_model(self):
self._reset_hand()
self._target_pos = self.goal.copy()
if self.random_init:
goal_pos = self._get_state_rand_vec()
self.obj_init_pos = goal_pos
button_pos = goal_pos.copy()
button_pos[1] += 0.08
button_pos[2] += 0.07
self._target_pos = button_pos
self._target_pos[2] -= 0.02
self.sim.model.body_pos[self.model.body_name2id('box')] = self.obj_init_pos
self.sim.model.body_pos[self.model.body_name2id('button')] = self._target_pos
self._set_obj_xyz(0)
self._target_pos = self._get_site_pos('hole')
self.maxDist = np.abs(self.data.site_xpos[self.model.site_name2id('buttonStart')][2] - self._target_pos[2])
self.target_reward = 1000*self.maxDist + 1000*2
return self._get_obs()
def _reset_hand(self):
super()._reset_hand(10)
rightFinger, leftFinger = self._get_site_pos('rightEndEffector'), self._get_site_pos('leftEndEffector')
self.init_fingerCOM = (rightFinger + leftFinger)/2
self.pickCompleted = False
def compute_reward(self, actions, obs):
del actions
objPos = obs[3:6]
rightFinger, leftFinger = self._get_site_pos('rightEndEffector'), self._get_site_pos('leftEndEffector')
fingerCOM = (rightFinger + leftFinger)/2
pressGoal = self._target_pos[2]
pressDist = np.abs(objPos[2] - pressGoal)
reachDist = np.linalg.norm(objPos - fingerCOM)
reachRew = -reachDist
c1 = 1000
c2 = 0.01
c3 = 0.001
if reachDist < 0.05:
pressRew = 1000*(self.maxDist - pressDist) + c1*(np.exp(-(pressDist**2)/c2) + np.exp(-(pressDist**2)/c3))
else:
pressRew = 0
pressRew = max(pressRew, 0)
reward = reachRew + pressRew
return [reward, reachDist, pressDist]
| {
"content_hash": "43e2cfbdea1cfacd946aa885f0c5da94",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 117,
"avg_line_length": 32.13709677419355,
"alnum_prop": 0.5668757841907152,
"repo_name": "rlworkgroup/metaworld",
"id": "1a91815ec9b922d1e2850e966ea2394f40a35df1",
"size": "3985",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metaworld/envs/mujoco/sawyer_xyz/v1/sawyer_button_press_topdown.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "379"
},
{
"name": "Jupyter Notebook",
"bytes": "340927"
},
{
"name": "Makefile",
"bytes": "1853"
},
{
"name": "Python",
"bytes": "820606"
},
{
"name": "Shell",
"bytes": "810"
}
],
"symlink_target": ""
} |
from pprint import pprint
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
apikey = "your api key"
secretkey = "your secret key"
Driver = get_driver(Provider.IKOULA)
driver = Driver(key=apikey, secret=secretkey)
# This returns a list of CloudStackNetwork objects
nets = driver.ex_list_networks()
# List the images/templates available
# This returns a list of NodeImage objects
images = driver.list_images()
# List the instance types
# This returns a list of NodeSize objects
sizes = driver.list_sizes()
# Create the node
# This returns a Node object
node = driver.create_node(name="libcloud", image=images[0], size=sizes[0], networks=[nets[0]])
# The node has a private IP in the guest network used
# No public IPs and no rules
pprint(node.extra)
pprint(node.private_ips)
| {
"content_hash": "849da80f7de5de5d53d7bd67a9025b18",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 94,
"avg_line_length": 27.5,
"alnum_prop": 0.7648484848484849,
"repo_name": "apache/libcloud",
"id": "09d2c8d4d7a7c3c05bf3d727837ba67e4dd4312c",
"size": "825",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "docs/examples/compute/cloudstack/create_node_advanced_zone.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2155"
},
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "PowerShell",
"bytes": "410"
},
{
"name": "Python",
"bytes": "9105547"
},
{
"name": "Shell",
"bytes": "12994"
}
],
"symlink_target": ""
} |
"""Enumerators for string types"""
# -*- coding: utf-8 -*-
from dlkit.abstract_osid.osid.errors import NotFound
STRING_MATCH_TYPES = {
'EXACT': 'Exact',
'IGNORECASE': 'Ignore Case',
'WORD': 'Word',
'WORDIGNORECASE': 'Word Ignore Case',
'WILDCARD': 'Wildcard',
'REGEX': 'Regular Expression',
'SOUND': 'Sound',
'SOUNDEX': 'Soundex',
'METAPHONE': 'Metaphone',
'DMETAPHONE': 'Dmetaphone',
'LEVENSHTEIN': 'Levenshtein'
}
TYPE_SET = {
'SMT': STRING_MATCH_TYPES
}
def get_type_data(name):
"""Return dictionary representation of type.
Can be used to initialize primordium.type.primitives.Type
"""
name = name.upper()
try:
return {
'authority': 'okapia.net',
'namespace': 'string match types',
'identifier': name,
'domain': 'String Match Types',
'display_name': STRING_MATCH_TYPES[name] + ' String Match Type',
'display_label': STRING_MATCH_TYPES[name],
'description': ('The string match type for the ' +
STRING_MATCH_TYPES[name])
}
except KeyError:
raise NotFound('String Type: ' + name)
| {
"content_hash": "4c5b812f37b9727aaba4c403824ca330",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 76,
"avg_line_length": 27.15909090909091,
"alnum_prop": 0.5757322175732218,
"repo_name": "mitsei/dlkit",
"id": "248ee207f309d0498f47c4c48f4fb8d784acf9e0",
"size": "1195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dlkit/primordium/locale/types/string.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "25170465"
},
{
"name": "TeX",
"bytes": "1088"
}
],
"symlink_target": ""
} |
import operator
from ..sql import operators
class UnevaluatableError(Exception):
pass
_straight_ops = set(getattr(operators, op)
for op in ('add', 'mul', 'sub',
'div',
'mod', 'truediv',
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
_notimplemented_ops = set(getattr(operators, op)
for op in ('like_op', 'notlike_op', 'ilike_op',
'notilike_op', 'between_op', 'in_op',
'notin_op', 'endswith_op', 'concat_op'))
class EvaluatorCompiler(object):
def __init__(self, target_cls=None):
self.target_cls = target_cls
def process(self, clause):
meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
if not meth:
raise UnevaluatableError(
"Cannot evaluate %s" % type(clause).__name__)
return meth(clause)
def visit_grouping(self, clause):
return self.process(clause.element)
def visit_null(self, clause):
return lambda obj: None
def visit_false(self, clause):
return lambda obj: False
def visit_true(self, clause):
return lambda obj: True
def visit_column(self, clause):
if 'parentmapper' in clause._annotations:
parentmapper = clause._annotations['parentmapper']
if self.target_cls and not issubclass(
self.target_cls, parentmapper.class_):
raise UnevaluatableError(
"Can't evaluate criteria against alternate class %s" %
parentmapper.class_
)
key = parentmapper._columntoproperty[clause].key
else:
key = clause.key
get_corresponding_attr = operator.attrgetter(key)
return lambda obj: get_corresponding_attr(obj)
def visit_clauselist(self, clause):
evaluators = list(map(self.process, clause.clauses))
if clause.operator is operators.or_:
def evaluate(obj):
has_null = False
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if value:
return True
has_null = has_null or value is None
if has_null:
return None
return False
elif clause.operator is operators.and_:
def evaluate(obj):
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if not value:
if value is None:
return None
return False
return True
else:
raise UnevaluatableError(
"Cannot evaluate clauselist with operator %s" %
clause.operator)
return evaluate
def visit_binary(self, clause):
eval_left, eval_right = list(map(self.process,
[clause.left, clause.right]))
operator = clause.operator
if operator is operators.is_:
def evaluate(obj):
return eval_left(obj) == eval_right(obj)
elif operator is operators.isnot:
def evaluate(obj):
return eval_left(obj) != eval_right(obj)
elif operator in _straight_ops:
def evaluate(obj):
left_val = eval_left(obj)
right_val = eval_right(obj)
if left_val is None or right_val is None:
return None
return operator(eval_left(obj), eval_right(obj))
else:
raise UnevaluatableError(
"Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
return evaluate
def visit_unary(self, clause):
eval_inner = self.process(clause.element)
if clause.operator is operators.inv:
def evaluate(obj):
value = eval_inner(obj)
if value is None:
return None
return not value
return evaluate
raise UnevaluatableError(
"Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
def visit_bindparam(self, clause):
val = clause.value
return lambda obj: val
| {
"content_hash": "4321e631420f9dba5a7bef1c795d97fc",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 77,
"avg_line_length": 35.38582677165354,
"alnum_prop": 0.5129060970182465,
"repo_name": "rolandmansilla/microblog",
"id": "534e7fa8f1e24d70c6368f9890c13dc5fc81e70e",
"size": "4731",
"binary": false,
"copies": "20",
"ref": "refs/heads/master",
"path": "flask/lib/python2.7/site-packages/sqlalchemy/orm/evaluator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "6349"
},
{
"name": "CSS",
"bytes": "12052"
},
{
"name": "HTML",
"bytes": "8699"
},
{
"name": "JavaScript",
"bytes": "27063"
},
{
"name": "Python",
"bytes": "10651309"
},
{
"name": "Shell",
"bytes": "3288"
}
],
"symlink_target": ""
} |
"""
Copyright 2010 Christopher MacGown
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class RequestError(Exception):
""" Common exception for Request Handlers """
pass
class ProtocolError(Exception):
""" A generic protocol error class """
# TODO(chris): Handle logging here.
pass
class ClassNotFound(IOError):
def __init__(self, class_name=None):
message = "Could not import %(class_name)s" % locals()
super(IOError, self).__init__(message)
class ExecutionError(IOError):
def __init__(self, stdout=None, stderr=None, return_code=None, cmd=None):
self.stdout = stdout
self.stderr = stderr
self.return_code = return_code
self.cmd = cmd
message = ("Command failed: %(cmd)s\n"
"return_code: %(return_code)s\n"
"stdout: %(stdout)r\n"
"stderr: %(stderr)r\n" % locals())
super(IOError, self).__init__(message)
| {
"content_hash": "31d68fc7e2d9fba1e04f50c9c0c3f0b4",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 77,
"avg_line_length": 32.02173913043478,
"alnum_prop": 0.6503733876442634,
"repo_name": "ChristopherMacGown/pynpoint",
"id": "2118d46d585f385b2bb32fc02d158744646f88d2",
"size": "1473",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pynpoint/exc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "28135"
}
],
"symlink_target": ""
} |
from .message_passing_layers import MP_LAYERS
#from .adjacency import construct_adjacency_matrix_layer
'''
This module implements the core message passing operations.
###adjacency.py <-- compute an adjacency matrix based on vertex data.
message_passing.py <-- run a single iteration of message passing.
message.py <-- compute a message, given a hidden state.
vertex_update.py <-- compute a vertex's new hidden state, given a message.
'''
| {
"content_hash": "05c0039eb3cae68021e73bd18eab0bd8",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 74,
"avg_line_length": 40,
"alnum_prop": 0.7659090909090909,
"repo_name": "isaachenrion/jets",
"id": "863ec91235f6b81d1545dd171eea4009ee2ed25a",
"size": "440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/architectures/nmp/message_passing/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "11751"
},
{
"name": "Python",
"bytes": "258548"
},
{
"name": "Shell",
"bytes": "6358"
}
],
"symlink_target": ""
} |
import copy
import mock
from oslo.config import cfg
from mistral.openstack.common import log as logging
from mistral.tests import base
from mistral.db import api as db_api
from mistral.engine.scalable import engine
from mistral.actions import std_actions
from mistral.engine import states
from mistral.engine import client
from mistral.utils.openstack import keystone
# TODO(rakhmerov): add more tests
LOG = logging.getLogger(__name__)
TOKEN = "123ab"
USER_ID = "321ba"
CONTEXT = {
'person': {
'first_name': 'John',
'last_name': 'Doe',
'address': {
'street': '124352 Broadway Street',
'city': 'Gloomington',
'country': 'USA'
}
}
}
# Use the set_default method to set value otherwise in certain test cases
# the change in value is not permanent.
cfg.CONF.set_default('auth_enable', False, group='pecan')
def create_workbook(definition_path):
return db_api.workbook_create({
'name': 'my_workbook',
'definition': base.get_resource(definition_path)
})
@mock.patch.object(
client.EngineClient, 'start_workflow_execution',
mock.MagicMock(side_effect=base.EngineTestCase.mock_start_workflow))
@mock.patch.object(
client.EngineClient, 'convey_task_result',
mock.MagicMock(side_effect=base.EngineTestCase.mock_task_result))
@mock.patch.object(
engine.ScalableEngine, '_run_tasks',
mock.MagicMock(side_effect=base.EngineTestCase.mock_run_tasks))
class DataFlowTest(base.EngineTestCase):
def _check_in_context_execution(self, task):
self.assertIn('__execution', task['in_context'])
exec_dict = task['in_context']['__execution']
self.assertEqual('my_workbook', exec_dict['workbook_name'])
self.assertEqual(task['execution_id'], exec_dict['id'])
self.assertIn('task', exec_dict)
def test_two_dependent_tasks(self):
CTX = copy.copy(CONTEXT)
wb = create_workbook('data_flow/two_dependent_tasks.yaml')
execution = self.engine.start_workflow_execution(wb['name'],
'build_greeting',
CTX)
# We have to reread execution to get its latest version.
execution = db_api.execution_get(execution['workbook_name'],
execution['id'])
self.assertEqual(states.SUCCESS, execution['state'])
self.assertDictEqual(CTX, execution['context'])
tasks = db_api.tasks_get(wb['name'], execution['id'])
self.assertEqual(2, len(tasks))
build_full_name_task = \
self._assert_single_item(tasks, name='build_full_name')
build_greeting_task = \
self._assert_single_item(tasks, name='build_greeting')
# Check the first task.
self.assertEqual(states.SUCCESS, build_full_name_task['state'])
self._check_in_context_execution(build_full_name_task)
del build_full_name_task['in_context']['__execution']
self.assertDictEqual(CTX, build_full_name_task['in_context'])
self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'},
build_full_name_task['parameters'])
self.assertDictEqual(
{
'f_name': 'John Doe',
'task': {
'build_full_name': {
'full_name': 'John Doe'
}
}
},
build_full_name_task['output'])
# Check the second task.
in_context = CTX
in_context['f_name'] = 'John Doe'
self.assertEqual(states.SUCCESS, build_greeting_task['state'])
self.assertEqual('John Doe',
build_greeting_task['in_context']['f_name'])
self.assertDictEqual({'full_name': 'John Doe'},
build_greeting_task['parameters'])
self.assertDictEqual(
{
'task': {
'build_greeting': {
'greeting': 'Hello, John Doe!',
}
}
},
build_greeting_task['output'])
del build_greeting_task['in_context']['task']
self._check_in_context_execution(build_greeting_task)
del build_greeting_task['in_context']['__execution']
self.assertDictEqual(CTX, build_greeting_task['in_context'])
def test_task_with_two_dependencies(self):
CTX = copy.copy(CONTEXT)
wb = create_workbook('data_flow/task_with_two_dependencies.yaml')
execution = self.engine.start_workflow_execution(wb['name'],
'send_greeting',
CTX)
# We have to reread execution to get its latest version.
execution = db_api.execution_get(execution['workbook_name'],
execution['id'])
self.assertEqual(states.SUCCESS, execution['state'])
self.assertDictEqual(CTX, execution['context'])
tasks = db_api.tasks_get(wb['name'], execution['id'])
self.assertEqual(3, len(tasks))
build_full_name_task = \
self._assert_single_item(tasks, name='build_full_name')
build_greeting_task = \
self._assert_single_item(tasks, name='build_greeting')
send_greeting_task = \
self._assert_single_item(tasks, name='send_greeting')
# Check the first task.
self.assertEqual(states.SUCCESS, build_full_name_task['state'])
self._check_in_context_execution(build_full_name_task)
del build_full_name_task['in_context']['__execution']
self.assertDictEqual(CTX, build_full_name_task['in_context'])
self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'},
build_full_name_task['parameters'])
self.assertDictEqual(
{
'f_name': 'John Doe',
'task': {
'build_full_name': {
'full_name': 'John Doe',
}
}
},
build_full_name_task['output'])
# Check the second task.
in_context = CTX
in_context['f_name'] = 'John Doe'
self.assertEqual(states.SUCCESS, build_greeting_task['state'])
self.assertEqual('John Doe',
build_greeting_task['in_context']['f_name'])
self.assertDictEqual({}, build_greeting_task['parameters'])
self.assertDictEqual(
{
'greet_msg': 'Cheers!',
'task': {
'build_greeting': {
'greeting': 'Cheers!'
}
}
},
build_greeting_task['output'])
del build_greeting_task['in_context']['task']
self._check_in_context_execution(build_greeting_task)
del build_greeting_task['in_context']['__execution']
self.assertDictEqual(CTX, build_greeting_task['in_context'])
# Check the third task.
in_context = CTX
in_context['f_name'] = 'John Doe'
in_context['greet_msg'] = 'Cheers!'
in_context['task'] = {
'build_greeting': {
'greeting': 'Cheers!'
}
}
self.assertEqual(states.SUCCESS, send_greeting_task['state'])
self._check_in_context_execution(send_greeting_task)
del send_greeting_task['in_context']['__execution']
self.assertDictEqual(in_context, send_greeting_task['in_context'])
self.assertDictEqual({'f_name': 'John Doe', 'greet_msg': 'Cheers!'},
send_greeting_task['parameters'])
self.assertDictEqual(
{
'task': {
'send_greeting': {
'greeting_sent': True
}
}
},
send_greeting_task['output'])
def test_two_subsequent_tasks(self):
CTX = copy.copy(CONTEXT)
wb = create_workbook('data_flow/two_subsequent_tasks.yaml')
execution = self.engine.start_workflow_execution(wb['name'],
'build_full_name',
CTX)
# We have to reread execution to get its latest version.
execution = db_api.execution_get(execution['workbook_name'],
execution['id'])
self.assertEqual(states.SUCCESS, execution['state'])
self.assertDictEqual(CTX, execution['context'])
tasks = db_api.tasks_get(wb['name'], execution['id'])
self.assertEqual(2, len(tasks))
build_full_name_task = \
self._assert_single_item(tasks, name='build_full_name')
build_greeting_task = \
self._assert_single_item(tasks, name='build_greeting')
# Check the first task.
self.assertEqual(states.SUCCESS, build_full_name_task['state'])
self._check_in_context_execution(build_full_name_task)
del build_full_name_task['in_context']['__execution']
self.assertDictEqual(CTX, build_full_name_task['in_context'])
self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'},
build_full_name_task['parameters'])
self.assertDictEqual(
{
'f_name': 'John Doe',
'task': {
'build_full_name': {
'full_name': 'John Doe'
}
}
},
build_full_name_task['output'])
# Check the second task.
in_context = CTX
in_context['f_name'] = 'John Doe'
self.assertEqual(states.SUCCESS, build_greeting_task['state'])
self.assertEqual('John Doe',
build_greeting_task['in_context']['f_name'])
self.assertDictEqual({'full_name': 'John Doe'},
build_greeting_task['parameters'])
self.assertDictEqual(
{
'greet_msg': 'Hello, John Doe!',
'task': {
'build_greeting': {
'greeting': 'Hello, John Doe!',
}
}
},
build_greeting_task['output'])
del build_greeting_task['in_context']['task']
self._check_in_context_execution(build_greeting_task)
del build_greeting_task['in_context']['__execution']
self.assertDictEqual(CTX, build_greeting_task['in_context'])
def test_three_subsequent_tasks(self):
CTX = copy.copy(CONTEXT)
wb = create_workbook('data_flow/three_subsequent_tasks.yaml')
execution = self.engine.start_workflow_execution(wb['name'],
'build_full_name',
CTX)
# We have to reread execution to get its latest version.
execution = db_api.execution_get(execution['workbook_name'],
execution['id'])
self.assertEqual(states.SUCCESS, execution['state'])
self.assertDictEqual(CTX, execution['context'])
tasks = db_api.tasks_get(wb['name'], execution['id'])
self.assertEqual(3, len(tasks))
build_full_name_task = \
self._assert_single_item(tasks, name='build_full_name')
build_greeting_task = \
self._assert_single_item(tasks, name='build_greeting')
send_greeting_task = \
self._assert_single_item(tasks, name='send_greeting')
# Check the first task.
self.assertEqual(states.SUCCESS, build_full_name_task['state'])
self._check_in_context_execution(build_full_name_task)
del build_full_name_task['in_context']['__execution']
self.assertDictEqual(CTX, build_full_name_task['in_context'])
self.assertDictEqual({'first_name': 'John', 'last_name': 'Doe'},
build_full_name_task['parameters'])
self.assertDictEqual(
{
'f_name': 'John Doe',
'task': {
'build_full_name': {
'full_name': 'John Doe'
}
}
},
build_full_name_task['output'])
# Check the second task.
in_context = CTX
in_context['f_name'] = 'John Doe'
self.assertEqual(states.SUCCESS, build_greeting_task['state'])
self.assertEqual('John Doe',
build_greeting_task['in_context']['f_name'])
self.assertDictEqual({'full_name': 'John Doe'},
build_greeting_task['parameters'])
self.assertDictEqual(
{
'greet_msg': 'Hello, John Doe!',
'task': {
'build_greeting': {
'greeting': 'Hello, John Doe!',
}
}
},
build_greeting_task['output'])
del build_greeting_task['in_context']['task']
self._check_in_context_execution(build_greeting_task)
del build_greeting_task['in_context']['__execution']
self.assertDictEqual(CTX, build_greeting_task['in_context'])
# Check the third task.
in_context = CTX
in_context['f_name'] = 'John Doe'
in_context['greet_msg'] = 'Hello, John Doe!'
self.assertEqual(states.SUCCESS, send_greeting_task['state'])
self.assertEqual('John Doe',
send_greeting_task['in_context']['f_name'])
self.assertEqual('Hello, John Doe!',
send_greeting_task['in_context']['greet_msg'])
self.assertDictEqual({'greeting': 'Hello, John Doe!'},
send_greeting_task['parameters'])
self.assertDictEqual(
{
'sent': True,
'task': {
'send_greeting': {
'greeting_sent': True,
}
}
},
send_greeting_task['output'])
del send_greeting_task['in_context']['task']
self._check_in_context_execution(send_greeting_task)
del send_greeting_task['in_context']['__execution']
self.assertDictEqual(CTX, send_greeting_task['in_context'])
@mock.patch.object(
std_actions.HTTPAction, 'run',
mock.MagicMock(return_value={'state': states.RUNNING}))
@mock.patch.object(
keystone, 'client_for_trusts',
mock.Mock(return_value=mock.MagicMock(user_id=USER_ID,
auth_token=TOKEN)))
def test_add_token_to_context(self):
task_name = "create-vms"
cfg.CONF.pecan.auth_enable = True
try:
workbook = create_workbook("test_rest.yaml")
db_api.workbook_update(workbook['name'], {'trust_id': '123'})
execution = self.engine.start_workflow_execution(workbook['name'],
task_name, {})
tasks = db_api.tasks_get(workbook['name'], execution['id'])
task = self._assert_single_item(tasks, name=task_name)
context = task['in_context']
self.assertIn("auth_token", context)
self.assertEqual(TOKEN, context['auth_token'])
self.assertEqual(USER_ID, context["user_id"])
self.engine.convey_task_result(workbook['name'], execution['id'],
task['id'], states.SUCCESS, {})
execution = db_api.execution_get(workbook['name'], execution['id'])
self.assertEqual(states.SUCCESS, execution['state'])
finally:
cfg.CONF.pecan.auth_enable = False
| {
"content_hash": "c4d30df4476ab72e609035ff3e8c804b",
"timestamp": "",
"source": "github",
"line_count": 434,
"max_line_length": 79,
"avg_line_length": 37.0184331797235,
"alnum_prop": 0.5284451636997386,
"repo_name": "TimurNurlygayanov/mistral",
"id": "e83d0f4300adb3bc59d3016a22d5b80b80055dda",
"size": "16700",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mistral/tests/unit/engine/test_data_flow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "561348"
}
],
"symlink_target": ""
} |
from models import User
import common
class UserManager():
def __init__(self):
pass
@property
def get_all(self):
return User.objects.all()
def add(self, user):
# 对用户密码加密
user.password = common.md5(user.password)
user.save()
| {
"content_hash": "8af7cf9c8d7cb4de3d68fd91a680c3b1",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 49,
"avg_line_length": 18.75,
"alnum_prop": 0.5533333333333333,
"repo_name": "GavinZhuLei/GavinsDjango",
"id": "49c355b5819eff1acacb7d0e2784a2940c8bb33c",
"size": "362",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mysite/auth/userManager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1627563"
},
{
"name": "HTML",
"bytes": "116063"
},
{
"name": "JavaScript",
"bytes": "1646538"
},
{
"name": "Python",
"bytes": "37419"
}
],
"symlink_target": ""
} |
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('..'))
sys.path.append(os.path.abspath('../demo'))
userena = __import__('userena')
demo = __import__('demo')
os.environ['DJANGO_SETTINGS_MODULE'] = 'demo.settings'
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-userena'
copyright = u'2010, 2011 Bread & Pepper'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = userena.get_version()
# The full version, including alpha/beta/rc tags.
release = userena.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'murphy'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = ['theme']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'userenadoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Userena.tex', u'Userena Documentation',
u'Petar Radosevic', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| {
"content_hash": "c6b43569a30148e61870d6f011e3d146",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 80,
"avg_line_length": 32.76881720430107,
"alnum_prop": 0.7099261689909762,
"repo_name": "beni55/django-userena",
"id": "b5d21558edb381c5f9a985ce3e9cee295f561dd1",
"size": "6513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "18379"
},
{
"name": "HTML",
"bytes": "41039"
},
{
"name": "Nginx",
"bytes": "101"
},
{
"name": "Python",
"bytes": "232845"
},
{
"name": "Shell",
"bytes": "800"
}
],
"symlink_target": ""
} |
import copy
import re
import sys
from datetime import datetime
from pprint import PrettyPrinter
from ..rest_client import AtlassianRestAPI
RE_TIMEZONE = re.compile(r"(\d{2}):(\d{2})$")
class BitbucketBase(AtlassianRestAPI):
CONF_TIMEFORMAT = "%Y-%m-%dT%H:%M:%S.%f%z"
bulk_headers = {"Content-Type": "application/vnd.atl.bitbucket.bulk+json"}
def __init__(self, url, *args, **kwargs):
"""
Init the rest api wrapper
:param url: string: The base url used for the rest api.
:param *args: list: The fixed arguments for the AtlassianRestApi.
:param **kwargs: dict: The keyword arguments for the AtlassianRestApi.
:return: nothing
"""
self._update_data(kwargs.pop("data", {}))
if url is None:
url = self.get_link("self")
if isinstance(url, list): # Server has a list of links
url = url[0]
self.timeformat_lambda = kwargs.pop("timeformat_lambda", lambda x: self._default_timeformat_lambda(x))
self._check_timeformat_lambda()
super(BitbucketBase, self).__init__(url, *args, **kwargs)
def __str__(self):
return PrettyPrinter(indent=4).pformat(self.__data if self.__data else self)
def _get_paged(self, url, params=None, data=None, flags=None, trailing=None, absolute=False):
"""
Used to get the paged data
:param url: string: The url to retrieve
:param params: dict (default is None): The parameters
:param data: dict (default is None): The data
:param flags: string[] (default is None): The flags
:param trailing: bool (default is None): If True, a trailing slash is added to the url
:param absolute: bool (default is False): If True, the url is used absolute and not relative to the root
:return: A generator object for the data elements
"""
if params is None:
params = {}
while True:
response = self.get(url, trailing=trailing, params=params, data=data, flags=flags, absolute=absolute)
if "values" not in response:
return
for value in response.get("values", []):
yield value
if self.cloud:
url = response.get("next")
if url is None:
break
# From now on we have absolute URLs including the trailing slash if needed
absolute = True
trailing = False
else:
if response.get("nextPageStart") is None:
break
params["start"] = response.get("nextPageStart")
return
@staticmethod
def _default_timeformat_lambda(timestamp):
"""
Default time format function.
:param timestamp: datetime str: The datetime object of the parsed string or the raw value if parsing failed
:return: timestamp if it was a datetime object, else None
"""
return timestamp if isinstance(timestamp, datetime) else None
def _check_timeformat_lambda(self):
"""
Check the lambda for for the time format. Raise an exception if the the value is wrong
"""
LAMBDA = lambda: 0 # noqa: E731
if self.timeformat_lambda is None or (
isinstance(self.timeformat_lambda, type(LAMBDA)) and self.timeformat_lambda.__name__ == LAMBDA.__name__
):
return True
else:
ValueError("Expected [None] or [lambda function] for argument [timeformat_func]")
def _sub_url(self, url):
"""
Get the full url from a relative one.
:param url: string: The sub url
:return: The absolute url
"""
return self.url_joiner(self.url, url)
@property
def data(self):
"""
Get the internal cached data. For data integrity a deep copy is returned.
:return: A copy of the data cache
"""
return copy.copy(self.__data)
def get_data(self, id, default=None):
"""
Get a data element from the internal data cache. For data integrity a deep copy is returned.
If data isn't present, the default value is returned.
:param id: string: The data element to return
:param default: any (default is None): The value to return if id is not present
:return: The requested data element
"""
return copy.copy(self.__data[id]) if id in self.__data else default
def get_time(self, id):
"""
Return the time value with the expected format.
:param id: string: The id for the time data
:return: The time with the configured format, see timeformat_lambda.
"""
value_str = self.get_data(id)
if self.timeformat_lambda is None:
return value_str
if isinstance(value_str, str):
# The format contains a : in the timezone which is supported from 3.7 on.
if sys.version_info <= (3, 7):
value_str = RE_TIMEZONE.sub(r"\1\2", value_str)
value = datetime.strptime(value_str, self.CONF_TIMEFORMAT)
else:
value = value_str
return self.timeformat_lambda(value)
def _update_data(self, data):
"""
Internal function to update the data.
:param data: dict: The new data.
:return: The updated object
"""
self.__data = data
return self
@property
def _new_session_args(self):
"""
Get the kwargs for new objects (session, root, version,...).
:return: A dict with the kwargs for new objects
"""
return dict(
session=self._session,
cloud=self.cloud,
api_root=self.api_root,
api_version=self.api_version,
timeformat_lambda=self.timeformat_lambda,
)
| {
"content_hash": "d938e8a6994bb2c592e9284caabc7a05",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 115,
"avg_line_length": 33.42458100558659,
"alnum_prop": 0.5803108808290155,
"repo_name": "AstroTech/atlassian-python-api",
"id": "83daf05a7363e0f8735fa7df73977dd371c70582",
"size": "5999",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atlassian/bitbucket/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1544"
},
{
"name": "Python",
"bytes": "667966"
},
{
"name": "Shell",
"bytes": "1679"
}
],
"symlink_target": ""
} |
import numpy as np
import pybasis
import pyam
def skip_comments(fp):
while True:
line = next(fp)
# ignore comment lines
if line[0] != '#':
return line
def scale_factor(A):
return (18/A)**(.3)
def read_m_scheme_matel(filename, sp_basis, scale_factor=1.0):
ob_matel = {}
tb_matel = {}
index_mapping = {}
num_sp_energies = -1
with open(filename) as fp:
line = skip_comments(fp)
num_sp_energies = int(line)
if num_sp_energies != len(sp_basis):
raise ValueError("Invalid number of s.p. energies")
for _ in range(num_sp_energies):
line = skip_comments(fp)
tokens = line.split()
idx = int(tokens[0])
n = int(tokens[1])
l = int(tokens[2])
j = pyam.HalfInt(int(tokens[3]), 2)
m = pyam.HalfInt(int(tokens[4]), 2)
en = float(tokens[5])
sp_index = sp_basis.LookUpStateIndex((n, l, j, m))
index_mapping[idx] = sp_index
ob_matel[(sp_index, sp_index)] = en
line = skip_comments(fp)
num_tb_matels = int(line)
for _ in range(num_tb_matels):
line = skip_comments(fp)
tokens = line.split()
idx_p = index_mapping[int(tokens[0])]
idx_q = index_mapping[int(tokens[1])]
idx_r = index_mapping[int(tokens[2])]
idx_s = index_mapping[int(tokens[3])]
matel = float(tokens[4])
tb_matel[((idx_p, idx_q), (idx_r, idx_s))] = matel*scale_factor
return (ob_matel, tb_matel)
if __name__ == "__main__":
def set_up_sp_basis(filename):
with open(filename) as fp:
orbitals = pybasis.nlj_orbital.ParseOrbitalPNStream(fp, True)
sp_subspace = pybasis.m_scheme.SingleParticleSubspacePN(
pybasis.nlj_orbital.OrbitalSpeciesPN.kN, orbitals
)
return sp_subspace
sp_basis = set_up_sp_basis('sd_orbitals.dat')
ob_matel, tb_matel = read_m_scheme_matel('sdshellint.dat', sp_basis)
print(sorted(ob_matel.items()))
print(sorted(tb_matel.items()))
| {
"content_hash": "1e67394f1e221ba674f0b5487b0e07fe",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 75,
"avg_line_length": 30.309859154929576,
"alnum_prop": 0.5497211895910781,
"repo_name": "kc9jud/ShellCorretta",
"id": "39038ea76501eb7ccd92d3c2e58ec4eed7a50915",
"size": "2152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/read_matel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "46846"
},
{
"name": "C",
"bytes": "65"
},
{
"name": "C++",
"bytes": "10902"
},
{
"name": "LiveScript",
"bytes": "50538"
},
{
"name": "Makefile",
"bytes": "2623"
},
{
"name": "PostScript",
"bytes": "349881"
},
{
"name": "Python",
"bytes": "21417"
},
{
"name": "Scheme",
"bytes": "6233"
},
{
"name": "Shell",
"bytes": "3420"
},
{
"name": "SourcePawn",
"bytes": "3190"
}
],
"symlink_target": ""
} |
"""Copyright 2014 Cyrus Dasadia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from fabric.api import run, local, env, sudo, put, warn_only
from fabric.context_managers import cd, prefix
from fabric.colors import yellow, green
from fabric.contrib.files import append
from fabric.operations import prompt
from getpass import getpass
import inspect
import random
env.app_dir = '/opt/cito_plugin_server'
env.venv_dir = '/opt/virtualenvs/citopluginvenv'
env.pip_file = 'requirements.txt'
# Setting defaults in case e() is not called
env.django_settings_module = 'cito_plugin_server.settings.production'
env.django_settings_file = 'cito_plugin_server/settings/production.py'
def install_virtualenv():
print(yellow('Starting >> %s()' % _fn()))
sudo('pip -q install virtualenv')
def setup_virtualenv():
print(yellow('Starting >> %s()' % _fn()))
run('virtualenv --no-site-packages %(venv_dir)s' % env, pty=True)
def update_requirements():
print(yellow('Starting >> %s()' % _fn()))
with cd(env.app_dir):
ve_run('pip install -q --upgrade setuptools')
ve_run('pip install -r %s' % env.pip_file)
def mkdirs():
print(yellow('Starting >> %s()' % _fn()))
sudo('mkdir -p %(app_dir)s && chown %(user)s %(app_dir)s' % env)
sudo('mkdir -p %(venv_dir)s && chown %(user)s %(venv_dir)s' % env)
def upload_reqs():
print(yellow('Starting >> %s()' % _fn()))
put('requirements.txt', env.app_dir)
def install_build_deps():
print(yellow('Starting >> %s()' % _fn()))
_detect_pkg_manager()
with warn_only():
sudo('%(pkg_manager)s -y install build-essential python-dev python-mysqldb python-pip libmysqlclient-dev' % env)
def install_mysql():
print(yellow('Starting >> %s()' % _fn()))
_detect_pkg_manager()
with warn_only():
sudo('%(pkg_manager)s -y install mysql-server mysql-client' % env)
def bootstrap():
install_build_deps()
install_virtualenv()
mkdirs()
upload_reqs()
setup_virtualenv()
update_requirements()
install_mysql()
def deploy():
dj_copy_vagrant()
dj_create_secret()
dj_dbconfig()
dj_syncdb()
dj_create_superuser()
#################
# Django specific methods
#################
# noinspection PyBroadException
def dj_dbconfig(createdb=True):
"""
Setup mysql root password, create the db and update the config
"""
env.mysql_user = prompt(green("Enter MySQL username: "))
env.mysql_password = getpass(green("Enter MySQL password (will not display on screen): "))
env.mysql_dbname = prompt(green("Enter MySQL DB Name (default=cito_plugin_server): "), default='cito_plugin_server')
if createdb:
run('mysqladmin -u %(mysql_user)s -p%(mysql_password)s create %(mysql_dbname)s' % env)
with cd(env.app_dir):
sudo('chown %(user)s %(app_dir)s/%(django_settings_file)s' % env)
append('%(app_dir)s/%(django_settings_file)s' % env, "\nDATABASES['default']['USER'] = '%(mysql_user)s'" % env)
append('%(app_dir)s/%(django_settings_file)s' % env, "DATABASES['default']['PASSWORD'] = '%(mysql_password)s'" % env)
append('%(app_dir)s/%(django_settings_file)s' % env, "DATABASES['default']['NAME'] = '%(mysql_dbname)s'" % env)
def dj_create_superuser():
"""
Creates django superuser
"""
print(yellow('Starting >> %s()' % _fn()))
with cd(env.app_dir):
ve_run('python manage.py createsuperuser --settings=%s' % env.django_settings_module)
def dj_copy_vagrant(from_dir='/vagrant/'):
"""
Copies the files from /vagrant onto env.app_dir. Can be used as an alternate to git clone <repo>
"""
print(yellow('Starting >> %s(from:%s to:%s)' % (_fn(), from_dir, env.app_dir)))
with cd(env.app_dir):
sudo('cp -Rpf %s/* ./ ' % from_dir)
sudo('chown -R %(user)s %(app_dir)s/logs' % env)
def dj_syncdb():
"""
Initial db sync
"""
print(yellow('Starting >> %s()' % _fn()))
with cd(env.app_dir):
ve_run('python manage.py syncdb --noinput --migrate --settings=%s' % env.django_settings_module)
def dj_create_secret():
"""
Creates new SECRET_KEY everytime its run
"""
print(yellow('Starting >> %s()' % _fn()))
with cd(env.app_dir):
sudo('chown %(user)s %(app_dir)s/cito_plugin_server/settings/secret_key.py' % env)
secret_sauce = "".join([random.SystemRandom().choice("abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)") for i in range(50)])
append('%(app_dir)s/cito_plugin_server/settings/secret_key.py' % env, "SECRET_KEY = '%s'" % secret_sauce)
#################
# Helper methods
#################
def _fn():
return inspect.stack()[1][3]
def e(environment='production'):
env.environment = environment
env.django_settings_module = 'cito_plugin_server.settings.%s' % environment
env.django_settings_file = 'cito_plugin_server/settings/%s.py' % environment
print(green('django settings set to %(django_settings_module)s' % env))
return
def _get_vagrant_ssh_config():
result = local('vagrant ssh-config', capture=True)
conf = {}
for line in iter(result.splitlines()):
parts = line.split()
conf[parts[0]] = ' '.join(parts[1:])
return conf
def ve_run(command, func=run, base_dir=env.app_dir, *args, **kwargs):
with cd(base_dir):
with prefix('source %(venv_dir)s/bin/activate' % env):
return func(command, *args, **kwargs)
def _detect_pkg_manager():
print(yellow('Starting >> %s()' % _fn()))
managers = ['apt-get', 'yum', 'zypper']
with warn_only():
for v in managers:
env.pkg_manager = run('which %s' % v).stdout
print(green('Package manager is %(pkg_manager)s' % env))
return
def hostname():
print(yellow('Starting >> %s()' % _fn()))
run("uname -a")
def vagrant(user='vagrant'):
print(yellow('Starting >> %s()' % _fn()))
print(yellow('Setting user as %s' % user))
env.user = user
vagrant.config = _get_vagrant_ssh_config()
env.key_config = vagrant.config['IdentityFile']
env.hosts = ['%s:%s' % (vagrant.config['HostName'], vagrant.config['Port'])]
env.user = vagrant.config['User']
| {
"content_hash": "7dc4b33a30c1127daa1e129181b17e16",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 135,
"avg_line_length": 32.11538461538461,
"alnum_prop": 0.6309880239520959,
"repo_name": "CitoEngine/cito_plugin_server",
"id": "b74e7b464308432f61038577a7ca6d2b3574321a",
"size": "6680",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabfile.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "51274"
},
{
"name": "HTML",
"bytes": "22568"
},
{
"name": "JavaScript",
"bytes": "75416"
},
{
"name": "Python",
"bytes": "49561"
},
{
"name": "Shell",
"bytes": "1218"
}
],
"symlink_target": ""
} |
import unittest
from datetime import date
from datetime import datetime
from ggrc_workflows.models import task_group_task
class TestTaskGroupTask(unittest.TestCase):
def test_validate_task_type(self):
t = task_group_task.TaskGroupTask()
self.assertRaises(ValueError, t.validate_task_type, "task_type", "helloh")
self.assertEqual("menu", t.validate_task_type("task_type", "menu"))
def test_validate_date(self):
t = task_group_task.TaskGroupTask()
self.assertEqual(date(2002, 4, 16), t.validate_date(date(2, 4, 16)))
self.assertEqual(date(2014, 7, 23),
t.validate_date(datetime(2014, 7, 23, 22, 5, 7)))
self.assertEqual(date(2014, 7, 23),
t.validate_date(datetime(2014, 7, 23, 0, 0, 0)))
def test_validate_end_date_decorator(self):
t = task_group_task.TaskGroupTask()
t.end_date = date(15, 4, 17)
self.assertEqual(date(2015, 4, 17), t.end_date)
t.start_date = date(2015, 4, 17)
self.assertRaises(ValueError,
t.validate_end_date, "end_date", date(2014, 2, 5))
t.end_date = date(2015, 2, 17)
self.assertEqual(date(2015, 2, 17), t.end_date)
def test_validate_start_date_decorator(self):
t = task_group_task.TaskGroupTask()
t.start_date = date(16, 4, 21)
self.assertEqual(date(2016, 4, 21), t.start_date)
t.end_date = date(2016, 4, 21)
t.start_date = date(2015, 2, 25)
self.assertEqual(date(2015, 2, 25), t.start_date)
t.start_date = date(2015, 6, 17)
self.assertEqual(date(2015, 6, 17), t.start_date)
| {
"content_hash": "cd7c4cbf4aa04858b73800ca6f0e4a33",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 78,
"avg_line_length": 33.38297872340426,
"alnum_prop": 0.6443594646271511,
"repo_name": "VinnieJohns/ggrc-core",
"id": "6434835af7a99bce87cb3811e57025e431dbae88",
"size": "1682",
"binary": false,
"copies": "7",
"ref": "refs/heads/develop",
"path": "test/unit/ggrc_workflows/models/test_task_group_task.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "226950"
},
{
"name": "HTML",
"bytes": "1060386"
},
{
"name": "JavaScript",
"bytes": "1927277"
},
{
"name": "Makefile",
"bytes": "7044"
},
{
"name": "Mako",
"bytes": "4320"
},
{
"name": "Python",
"bytes": "2762348"
},
{
"name": "Shell",
"bytes": "31100"
}
],
"symlink_target": ""
} |
import pytest
from f5.bigip.resource import MissingRequiredCreationParameter
from requests.exceptions import HTTPError
def delete_user(bigip, name):
user = bigip.auth.users.user
try:
user.load(name=name)
except HTTPError as err:
if err.response.status_code != 404:
raise
return
user.delete()
def setup_loadable_user_test(request, bigip, user):
def teardown():
delete_user(bigip, 'user1')
request.addfinalizer(teardown)
user.create(name='user1')
assert user.name == 'user1'
def setup_create_test(request, bigip):
def teardown():
delete_user(bigip, 'user1')
request.addfinalizer(teardown)
def setup_create_two(request, bigip):
def teardown():
for name in ['user1', 'user2']:
delete_user(bigip, name)
request.addfinalizer(teardown)
class TestCreate(object):
def test_create_two(self, request, bigip):
setup_create_two(request, bigip)
n1 = bigip.auth.users.user.create(name='user1')
n2 = bigip.auth.users.user.create(name='user2')
assert n1 is not n2
assert n2.name != n1.name
def test_create_no_args(self, bigip):
'''Test that user.create() with no options throws a ValueError '''
user1 = bigip.auth.users.user
with pytest.raises(MissingRequiredCreationParameter):
user1.create()
def test_create_min_args(self, request, bigip):
'''Test that user.create() with only required arguments work.
This will also test that the default values are set correctly and are
part of the user object after creating the instance on the BigIP
'''
setup_create_test(request, bigip)
user1 = bigip.auth.users.user.create(name='user1')
assert user1.name == 'user1'
assert user1.generation is not None \
and isinstance(user1.generation, int)
assert user1.fullPath == 'user1'
assert user1.selfLink.startswith(
'https://localhost/mgmt/tm/auth/user/user1')
# Default Values
assert user1.description == 'user1'
assert user1.encryptedPassword == '!!'
assert user1.partitionAccess == [dict(
name='all-partitions',
role='no-access'
)]
def test_create_description(self, request, bigip, USER):
setup_create_test(request, bigip)
USER.create(name='user1', description='foo')
assert USER.description == 'foo'
class TestLoad(object):
def test_load_no_object(self, USER):
with pytest.raises(HTTPError) as err:
USER.load(name='user10')
assert err.response.status == 404
def test_load(self, request, bigip, USER):
setup_loadable_user_test(request, bigip, USER)
n1 = bigip.auth.users.user.load(name='user1')
assert n1.name == 'user1'
assert n1.description == 'user1'
assert isinstance(n1.generation, int)
class TestRefresh(object):
def test_refresh(self, request, bigip, USER):
setup_loadable_user_test(request, bigip, USER)
n1 = bigip.auth.users.user.load(name='user1')
n2 = bigip.auth.users.user.load(name='user1')
assert n1.description == 'user1'
assert n2.description == 'user1'
n2.update(description='foobaz')
assert n2.description == 'foobaz'
assert n1.description == 'user1'
n1.refresh()
assert n1.description == 'foobaz'
class TestDelete(object):
def test_delete(self, request, bigip, USER):
setup_loadable_user_test(request, bigip, USER)
n1 = bigip.auth.users.user.load(name='user1')
n1.delete()
del(n1)
with pytest.raises(HTTPError) as err:
bigip.auth.users.user.load(name='user1')
assert err.response.status_code == 404
class TestUpdate(object):
def test_update_with_args(self, request, bigip, USER):
setup_loadable_user_test(request, bigip, USER)
n1 = bigip.auth.users.user.load(name='user1')
assert n1.description == 'user1'
n1.update(description='foobar')
assert n1.description == 'foobar'
def test_update_parameters(self, request, bigip, USER):
setup_loadable_user_test(request, bigip, USER)
n1 = bigip.auth.users.user.load(name='user1')
assert n1.description == 'user1'
n1.description = 'foobar'
n1.update()
assert n1.description == 'foobar'
| {
"content_hash": "3796b9f591953e9d40120db0bca78a00",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 77,
"avg_line_length": 31.125,
"alnum_prop": 0.6282909415439536,
"repo_name": "wojtek0806/f5-common-python",
"id": "17102b571d00bbb8719e550e8892b57230352a39",
"size": "5064",
"binary": false,
"copies": "1",
"ref": "refs/heads/0.1",
"path": "test/functional/auth/test_user.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "554081"
}
],
"symlink_target": ""
} |
import optparse
from webkitpy.common.system.executive_mock import MockExecutive2
from webkitpy.layout_tests.models import test_run_results
from webkitpy.layout_tests.port import browser_test
from webkitpy.layout_tests.port import browser_test_driver
from webkitpy.layout_tests.port import port_testcase
class _BrowserTestTestCaseMixin(object):
def test_check_sys_deps(self):
port = self.make_port()
port._executive = MockExecutive2(exit_code=0)
self.assertEqual(port.check_sys_deps(needs_http=False), test_run_results.OK_EXIT_STATUS)
def test_driver_name_option(self):
self.assertTrue(self.make_port()._path_to_driver().endswith(self.driver_name_endswith))
def test_default_timeout_ms(self):
self.assertEqual(self.make_port(options=optparse.Values({'configuration': 'Release'})).default_timeout_ms(),
self.timeout_ms)
self.assertEqual(self.make_port(options=optparse.Values({'configuration': 'Debug'})).default_timeout_ms(),
3 * self.timeout_ms)
def test_driver_type(self):
self.assertTrue(isinstance(self.make_port(options=optparse.Values({'driver_name': 'browser_tests'})
).create_driver(1), browser_test_driver.BrowserTestDriver))
def test_layout_tests_dir(self):
self.assertTrue(self.make_port().layout_tests_dir().endswith('chrome/test/data/printing/layout_tests'))
def test_virtual_test_suites(self):
# The browser_tests port do not use virtual test suites, so we are just testing the stub.
port = self.make_port()
self.assertEqual(port.virtual_test_suites(), [])
class BrowserTestLinuxTest(_BrowserTestTestCaseMixin, port_testcase.PortTestCase):
port_name = 'linux'
port_maker = browser_test.BrowserTestLinuxPort
os_name = 'linux'
os_version = 'trusty'
driver_name_endswith = 'browser_tests'
timeout_ms = 10 * 1000
class BrowserTestWinTest(_BrowserTestTestCaseMixin, port_testcase.PortTestCase):
port_name = 'win'
port_maker = browser_test.BrowserTestWinPort
os_name = 'win'
os_version = 'win7'
driver_name_endswith = 'browser_tests.exe'
timeout_ms = 20 * 1000
class BrowserTestMacTest(_BrowserTestTestCaseMixin, port_testcase.PortTestCase):
os_name = 'mac'
os_version = 'mac10.11'
port_name = 'mac'
port_maker = browser_test.BrowserTestMacPort
driver_name_endswith = 'browser_tests'
timeout_ms = 20 * 1000
def test_driver_path(self):
test_port = self.make_port(options=optparse.Values({'driver_name': 'browser_tests'}))
self.assertNotIn('.app/Contents/MacOS', test_port._path_to_driver())
| {
"content_hash": "6e9eb2be7100e86a19bca71cf8fcb93b",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 116,
"avg_line_length": 40.55223880597015,
"alnum_prop": 0.6878910563121089,
"repo_name": "danakj/chromium",
"id": "59e8465f8bd9a5b85710642e704beccf10db266a",
"size": "4244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/browser_test_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import sys
from manilaclient import v2
class MovedModule(object):
def __init__(self, new_module):
self.new_module = new_module
def __getattr__(self, attr):
return getattr(self.new_module, attr)
sys.modules["maniliaclient.v1"] = MovedModule(v2)
| {
"content_hash": "7ec1aa7958441734e305c061bb2a7c1b",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 49,
"avg_line_length": 21,
"alnum_prop": 0.6703296703296703,
"repo_name": "sniperganso/python-manilaclient",
"id": "eb442d909cd20cea274c8b8349925d6f87e6db67",
"size": "911",
"binary": false,
"copies": "1",
"ref": "refs/heads/bp/data-service-migration-api",
"path": "manilaclient/v1/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "755723"
},
{
"name": "Shell",
"bytes": "11199"
}
],
"symlink_target": ""
} |
import collections
import os
import random
from artificialproject.random import weighted_choice
class GenerationFailedException(Exception):
pass
GeneratedField = collections.namedtuple("GeneratedField", ["value", "deps"])
class NullableGenerator:
def __init__(self, value_generator):
self._value_generator = value_generator
self._null_values = collections.Counter()
def add_sample(self, base_path, sample):
if sample is None:
self._null_values.update([True])
else:
self._null_values.update([False])
self._value_generator.add_sample(base_path, sample)
def generate(self, base_path):
if weighted_choice(self._null_values):
return GeneratedField(None, [])
else:
return self._value_generator.generate(base_path)
class SingletonGenerator:
def __init__(self, set_generator):
self._set_generator = set_generator
def add_sample(self, base_path, sample):
self._set_generator.add_sample(base_path, [sample])
def generate(self, base_path):
field = self._set_generator.generate(base_path)
assert len(field.value) == 1, field
return GeneratedField(field.value[0], field.deps)
class EnumSetGenerator:
def __init__(self):
self._lengths = collections.Counter()
self._values = collections.Counter()
def add_sample(self, base_path, sample):
self._lengths.update([len(sample)])
self._values.update(sample)
def generate(self, base_path):
length = weighted_choice(self._lengths)
options = collections.Counter(self._values)
output = []
while len(output) < length:
value = weighted_choice(options)
output.append(value)
del options[value]
return GeneratedField(output, [])
class StringGenerator:
def __init__(self, respect_file_extensions=False):
self._respect_file_extensions = respect_file_extensions
self._lengths = collections.Counter()
self._first_chars = collections.Counter()
self._other_chars = collections.Counter()
if self._respect_file_extensions:
self._extensions = collections.Counter()
def add_sample(self, base_path, sample):
self.add_string_sample(sample)
def add_string_sample(self, sample):
if self._respect_file_extensions:
sample, extension = os.path.splitext(sample)
self._extensions.update([extension])
self._lengths.update([len(sample)])
if sample:
self._first_chars.update(sample[0])
for ch in sample[1:]:
self._other_chars.update(ch)
def generate(self, base_path):
return GeneratedField(self.generate_string(), [])
def generate_string(self):
length = weighted_choice(self._lengths)
output = ""
if length > 0:
output += weighted_choice(self._first_chars)
while len(output) < length:
output += weighted_choice(self._other_chars)
if self._respect_file_extensions:
output += weighted_choice(self._extensions)
return output
class VisibilityGenerator:
def add_sample(self, base_path, sample):
pass
def generate(self, base_path):
return GeneratedField(["PUBLIC"], [])
class BuildTargetSetGenerator:
class DynamicFilteredList:
def __init__(self, input_list, predicate):
self._input_list = input_list
self._predicate = predicate
self._output_list = []
self._processed = 0
def get_values(self):
input_len = len(self._input_list)
while self._processed < input_len:
value = self._input_list[self._processed]
if self._predicate(value):
self._output_list.append(value)
self._processed += 1
return self._output_list
def __init__(self, context, process_output_extensions=False, override_types=None):
self._context = context
self._process_output_extensions = process_output_extensions
self._lengths = collections.Counter()
self._types = collections.Counter()
self._unique_values_by_type_and_extension = collections.defaultdict(set)
self._unique_values_dirty = False
self._choice_probability_by_type_and_extension = dict()
self._accepted_targets_by_type = dict()
self._accepted_targets_with_output_by_type = dict()
if self._process_output_extensions:
self._output_extensions_by_type = collections.defaultdict(
collections.Counter
)
if override_types is None:
self._override_types = {}
else:
self._override_types = dict(override_types)
def add_sample(self, base_path, sample):
self._lengths.update([len(sample)])
for target in sample:
target = target.split("#")[0]
if target.startswith(":"):
target = "//" + base_path + target
target_data = self._context.input_target_data[target]
target_type = target_data["buck.type"]
target_type = self._override_types.get(target_type, target_type)
self._types.update([target_type])
extension = None
if self._process_output_extensions:
extension = self._get_output_extension(target_data)
self._output_extensions_by_type[target_type].update([extension])
self._unique_values_by_type_and_extension[(target_type, extension)].add(
target
)
self._unique_values_dirty = True
def _update_choice_probability(self):
self._choice_probability_by_type_and_extension = dict()
for (
(type, extension),
used_values,
) in self._unique_values_by_type_and_extension.items():
all_values = (
x
for x in self._context.input_target_data.values()
if x["buck.type"] == type
)
if self._process_output_extensions:
all_values = (
x for x in all_values if self._get_output_extension(x) == extension
)
num = len(used_values)
denom = sum(1 for x in all_values)
probability = float(num) / denom
key = (type, extension)
self._choice_probability_by_type_and_extension[key] = probability
def _is_accepted(self, target_name):
target_data = self._context.gen_target_data[target_name]
target_type = target_data["buck.type"]
extension = None
if self._process_output_extensions:
extension = self._get_output_extension(target_data)
probability = self._choice_probability_by_type_and_extension.get(
(target_type, extension), 0
)
return random.uniform(0, 1) < probability
def generate(self, base_path, force_length=None):
if self._unique_values_dirty:
self._update_choice_probability()
self._unique_values_dirty = False
if force_length is not None:
length = force_length
else:
length = weighted_choice(self._lengths)
type_extension_counts = collections.Counter()
for i in range(length):
type = weighted_choice(self._types)
if self._process_output_extensions:
extension = weighted_choice(self._output_extensions_by_type[type])
else:
extension = None
type_extension_counts.update([(type, extension)])
output = []
if self._process_output_extensions:
all_targets_dict = self._context.gen_targets_with_output_by_type
accepted_targets_dict = self._accepted_targets_with_output_by_type
else:
all_targets_dict = self._context.gen_targets_by_type
accepted_targets_dict = self._accepted_targets_by_type
for (type, extension), count in type_extension_counts.items():
options = accepted_targets_dict.get(type)
if options is None:
options = self.DynamicFilteredList(
all_targets_dict[type], lambda x: self._is_accepted(x)
)
accepted_targets_dict[type] = options
options = options.get_values()
if extension is not None:
options = [
x
for x in options
if self._get_output_extension(self._context.gen_target_data[x])
== extension
]
if count > len(options):
raise GenerationFailedException()
output.extend(random.sample(options, count))
return GeneratedField(output, output)
def _get_output_extension(self, target_data):
if "out" not in target_data or target_data["out"] is None:
return None
extension = os.path.splitext(target_data["out"])[1]
if extension == "":
return None
return extension
class PathSetGenerator:
def __init__(self, context):
self._context = context
self._component_generator = StringGenerator()
self._lengths = collections.Counter()
self._component_counts = collections.Counter()
self._extensions = collections.Counter()
def add_sample(self, base_path, sample):
self._lengths.update([len(sample)])
for path in sample:
self._context.file_path_generator.add_package_file_sample(base_path, path)
components = []
while path:
path, component = os.path.split(path)
components.append(component)
self._component_counts.update([len(components)])
if not components:
self._extensions.update([""])
else:
components[0], extension = os.path.splitext(components[0])
self._extensions.update([extension])
for component in components:
self._component_generator.add_sample(base_path, component)
def generate(self, base_path, force_length=None):
if force_length is not None:
length = force_length
else:
length = weighted_choice(self._lengths)
extension = weighted_choice(self._extensions)
output = [self._generate_path(base_path, extension) for i in range(length)]
return GeneratedField(output, [])
def _generate_path(self, base_path, extension):
component_count = weighted_choice(self._component_counts)
path = self._context.file_path_generator.generate_path_in_package(
base_path, component_count, self._component_generator, extension
)
full_path = os.path.join(self._context.output_repository, base_path, path)
os.makedirs(os.path.dirname(full_path), exist_ok=True)
with open(full_path, "w"):
pass
return path
class SourcePathSetGenerator:
def __init__(self, context):
self._build_target_set_generator = BuildTargetSetGenerator(
context, process_output_extensions=True
)
self._path_set_generator = PathSetGenerator(context)
self._lengths = collections.Counter()
self._build_target_values = collections.Counter()
def add_sample(self, base_path, sample):
self._lengths.update([len(sample)])
for source_path in sample:
if source_path.startswith("//") or source_path.startswith(":"):
self._build_target_values.update([True])
self._build_target_set_generator.add_sample(base_path, [source_path])
else:
self._build_target_values.update([False])
self._path_set_generator.add_sample(base_path, [source_path])
def generate(self, base_path):
length = weighted_choice(self._lengths)
build_target_count = 0
path_count = 0
for i in range(length):
if weighted_choice(self._build_target_values):
build_target_count += 1
else:
path_count += 1
build_targets = self._build_target_set_generator.generate(
base_path, force_length=build_target_count
)
paths = self._path_set_generator.generate(base_path, force_length=path_count)
assert len(build_targets.value) == build_target_count, (
build_targets,
build_target_count,
)
assert len(paths.value) == path_count, (paths, path_count)
return GeneratedField(
build_targets.value + paths.value, build_targets.deps + paths.deps
)
class SourcesWithFlagsGenerator:
def __init__(self, context):
self._source_path_set_generator = SourcePathSetGenerator(context)
self._flag_generator = StringGenerator()
self._flag_counts = collections.Counter()
def add_sample(self, base_path, sample):
source_paths = []
flag_lists = []
for source_with_flags in sample:
if isinstance(source_with_flags, list):
source_paths.append(source_with_flags[0])
flag_lists.append(source_with_flags[1])
else:
source_paths.append(source_with_flags)
flag_lists.append([])
self._source_path_set_generator.add_sample(base_path, source_paths)
for flags in flag_lists:
self._flag_counts.update([len(flags)])
for flag in flags:
self._flag_generator.add_sample(base_path, flag)
def generate(self, base_path):
source_paths = self._source_path_set_generator.generate(base_path)
output = [
self._generate_source_with_flags(base_path, sp) for sp in source_paths.value
]
return GeneratedField(output, source_paths.deps)
def _generate_source_with_flags(self, base_path, source_path):
flag_count = weighted_choice(self._flag_counts)
if flag_count == 0:
return source_path
flags = [
self._flag_generator.generate(base_path).value for i in range(flag_count)
]
return [source_path, flags]
| {
"content_hash": "1ed58fdf9b848ce16b063736eb757940",
"timestamp": "",
"source": "github",
"line_count": 375,
"max_line_length": 88,
"avg_line_length": 38.394666666666666,
"alnum_prop": 0.5929990276427282,
"repo_name": "nguyentruongtho/buck",
"id": "6c151212bcf683752cc67d2c53518548b1729a6c",
"size": "14996",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "scripts/artificialproject/field_generators.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1652"
},
{
"name": "C",
"bytes": "250430"
},
{
"name": "CSS",
"bytes": "56106"
},
{
"name": "Dockerfile",
"bytes": "2094"
},
{
"name": "HTML",
"bytes": "11770"
},
{
"name": "Java",
"bytes": "32390158"
},
{
"name": "JavaScript",
"bytes": "931214"
},
{
"name": "Kotlin",
"bytes": "315107"
},
{
"name": "Lex",
"bytes": "14469"
},
{
"name": "Makefile",
"bytes": "508"
},
{
"name": "PowerShell",
"bytes": "2298"
},
{
"name": "Python",
"bytes": "2135599"
},
{
"name": "Shell",
"bytes": "43745"
},
{
"name": "Smalltalk",
"bytes": "194"
},
{
"name": "Thrift",
"bytes": "17630"
}
],
"symlink_target": ""
} |
from datasets.sts import STS
class STSLarge(STS):
def __init__(self, train_validation_split=None, test_split=None,
use_defaults=True, name='sts_large'):
super().__init__(subset=name)
| {
"content_hash": "7066a9c1551e284787b158cdb495a6f0",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 68,
"avg_line_length": 30.571428571428573,
"alnum_prop": 0.6261682242990654,
"repo_name": "mindgarage/Ovation",
"id": "7cd21399992216c6a2a5a18147d3604786d42342",
"size": "214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datasets/sts_large.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "5870"
},
{
"name": "Python",
"bytes": "264059"
},
{
"name": "Shell",
"bytes": "265"
}
],
"symlink_target": ""
} |
import logging
from functools import reduce
import asyncio
from .timeout import timeout
from .futures import AsyncObject, Bench
from .protocols import Producer
logger = logging.getLogger('pulsar.clients')
class Pool(AsyncObject):
'''An asynchronous pool of open connections.
Open connections are either :attr:`in_use` or :attr:`available`
to be used. Available connection are placed in an :class:`asyncio.Queue`.
This class is not thread safe.
'''
def __init__(self, creator, pool_size=10, loop=None, timeout=None, **kw):
'''
Construct an asynchronous Pool.
:param creator: a callable function that returns a connection object.
:param pool_size: The size of the pool to be maintained,
defaults to 10. This is the largest number of connections that
will be kept persistently in the pool. Note that the pool
begins with no connections; once this number of connections
is requested, that number of connections will remain.
:param timeout: The number of seconds to wait before giving up
on returning a connection. Defaults to 30.
'''
self._creator = creator
self._closed = False
self._timeout = timeout
self._queue = asyncio.Queue(maxsize=pool_size, loop=loop)
self._connecting = 0
self._loop = self._queue._loop
self._logger = logger
self._in_use_connections = set()
@property
def pool_size(self):
'''The maximum number of open connections allowed.
If more connections are requested, the request
is queued and a connection returned as soon as one becomes
available.
'''
return self._queue._maxsize
@property
def in_use(self):
'''The number of connections in use.
These connections are not available until they are released back
to the pool.
'''
return len(self._in_use_connections)
@property
def available(self):
'''Number of available connections in the pool.
'''
return reduce(self._count_connections, self._queue._queue, 0)
@property
def closed(self):
"""True when this pool is closed
"""
return bool(self._closed)
def __contains__(self, connection):
if connection not in self._in_use_connections:
return connection in self._queue._queue
return True
async def connect(self):
'''Get a connection from the pool.
The connection is either a new one or retrieved from the
:attr:`available` connections in the pool.
:return: a :class:`~asyncio.Future` resulting in the connection.
'''
assert not self.closed
connection = await self._get()
return PoolConnection(self, connection)
def close(self):
'''Close all connections
Return a :class:`~asyncio.Future` called once all connections
have closed
'''
if not self.closed:
waiters = []
queue = self._queue
while queue.qsize():
connection = queue.get_nowait()
if connection:
closed = connection.close()
if closed:
waiters.append(closed)
in_use = self._in_use_connections
self._in_use_connections = set()
for connection in in_use:
if connection:
waiters.append(connection.close())
self._closed = asyncio.gather(*waiters, loop=self._loop)
return self._closed
async def _get(self):
queue = self._queue
# grab the connection without waiting, important!
if queue.qsize():
connection = queue.get_nowait()
# wait for one to be available
elif self.in_use + self._connecting >= queue._maxsize:
with timeout(self._loop, self._timeout):
connection = await queue.get()
else: # must create a new connection
self._connecting += 1
try:
connection = await self._creator()
finally:
self._connecting -= 1
# None signal that a connection was removed form the queue
# Go again
if connection is None:
connection = await self._get()
else:
if connection.closed:
connection = await self._get()
else:
self._in_use_connections.add(connection)
return connection
def _put(self, conn, discard=False):
if not self.closed:
try:
# None signal that a connection was removed form the queue
self._queue.put_nowait(None if discard else conn)
except asyncio.QueueFull:
# The queue of available connection is already full
if conn:
conn.close()
self._in_use_connections.discard(conn)
def status(self, message=None, level=None):
return ('Pool size: %d Connections in pool: %d '
'Current Checked out connections: %d' %
(self._queue._maxsize, self.available, self.in_use))
def _count_connections(self, x, y):
return x + int(y is not None)
class PoolConnection:
'''A wrapper for a :class:`Connection` in a connection :class:`Pool`.
.. attribute:: pool
The :class:`Pool` which created this :class:`PoolConnection`
.. attribute:: connection
The underlying socket connection.
'''
__slots__ = ('pool', 'connection')
def __init__(self, pool, connection):
self.pool = pool
self.connection = connection
def close(self, discard=False):
'''Close this pool connection by releasing the underlying
:attr:`connection` back to the :attr:`pool`.
'''
if self.pool is not None:
self.pool._put(self.connection, discard)
self.pool = None
conn, self.connection = self.connection, None
return conn
async def detach(self, discard=True):
'''Remove the underlying :attr:`connection` from the connection
:attr:`pool`.
'''
if discard:
return self.close(True)
else:
self.connection._exit_ = False
return self
async def __aenter__(self):
return self
async def __aexit__(self, type, value, traceback):
if getattr(self.connection, '_exit_', True):
self.close()
else:
del self.connection._exit_
def __getattr__(self, name):
return getattr(self.connection, name)
def __setattr__(self, name, value):
try:
super().__setattr__(name, value)
except AttributeError:
setattr(self.connection, name, value)
def __del__(self):
self.close()
class ClientMixin:
def __repr__(self):
return self.__class__.__name__
__str__ = __repr__
def close(self):
'''Close all idle connections.
'''
raise NotImplementedError
def abort(self):
return self.close()
class AbstractClient(Producer, ClientMixin):
"""A :class:`.Producer` for client connections.
"""
def connect(self):
'''Abstract method for creating a connection.
'''
raise NotImplementedError
async def create_connection(self, address=None, protocol_factory=None,
**kwargs):
"""Helper method for creating a connection to an ``address``.
"""
loop = self._loop
protocol_factory = protocol_factory or self.create_protocol
if isinstance(address, tuple):
kwargs['host'] = address[0]
kwargs['port'] = address[1]
_, protocol = await loop.create_connection(protocol_factory, **kwargs)
event = protocol.event('connection_made')
if not event.fired():
await event.waiter()
return protocol
async def create_datagram_endpoint(self, protocol_factory=None, **kw):
'''Helper method for creating a connection to an ``address``.
'''
protocol_factory = protocol_factory or self.create_protocol
_, protocol = await self._loop.create_datagram_endpoint(
protocol_factory, **kw)
event = protocol.event('connection_made')
if not event.fired():
await event.waiter()
return protocol
def timeit(self, method, times, *args, **kwargs):
'''Useful utility for benchmarking an asynchronous ``method``.
:param method: the name of the ``method`` to execute
:param times: number of times to execute the ``method``
:param args: positional arguments to pass to the ``method``
:param kwargs: key-valued arguments to pass to the ``method``
:return: a :class:`~asyncio.Future` which results in a :class:`Bench`
object if successful
The usage is simple::
>>> b = self.timeit('asyncmethod', 100)
'''
bench = Bench(times, loop=self._loop)
return bench(getattr(self, method), *args, **kwargs)
| {
"content_hash": "d628e32c4cd2b6b7ff5b3fa748005591",
"timestamp": "",
"source": "github",
"line_count": 286,
"max_line_length": 78,
"avg_line_length": 32.33916083916084,
"alnum_prop": 0.5844956211482323,
"repo_name": "quantmind/pulsar",
"id": "9e2ed081a6aded592e45bf5a024f7adc260543bd",
"size": "9249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pulsar/async/clients.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "838"
},
{
"name": "C",
"bytes": "1366"
},
{
"name": "CSS",
"bytes": "1302"
},
{
"name": "HTML",
"bytes": "1085"
},
{
"name": "JavaScript",
"bytes": "116"
},
{
"name": "Makefile",
"bytes": "2272"
},
{
"name": "Python",
"bytes": "1140291"
},
{
"name": "Shell",
"bytes": "2164"
}
],
"symlink_target": ""
} |
from ..processing_node import ProcessingNode
class ResizeNode(ProcessingNode):
def __init__(self, width, height, quality):
self.width = int(width)
self.height = int(height)
self.quality = quality
def execute(self, processor, img):
return [img.resize((self.width, self.height), self.quality.to_pil())]
| {
"content_hash": "9cfb7fe7d256a818c4c643b4df8e3159",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 77,
"avg_line_length": 31.272727272727273,
"alnum_prop": 0.6569767441860465,
"repo_name": "PearCoding/SpriteResourceCompiler",
"id": "4b9fd766c6c8d8ffe502d7725eba9b89dceda7d4",
"size": "344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/processor/nodes/resize_node.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "33"
},
{
"name": "Python",
"bytes": "47933"
},
{
"name": "Shell",
"bytes": "44"
}
],
"symlink_target": ""
} |
CORE_SOURCE_FILES = [
'src/core/ext/filters/census/grpc_context.cc',
'src/core/ext/filters/client_channel/backend_metric.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_channelz.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/global_subchannel_pool.cc',
'src/core/ext/filters/client_channel/health/health_check_client.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel_secure.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc',
'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc',
'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/cds.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/local_subchannel_pool.cc',
'src/core/ext/filters/client_channel/parse_address.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.cc',
'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc',
'src/core/ext/filters/client_channel/resolver/xds/xds_resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/resolver_result_parsing.cc',
'src/core/ext/filters/client_channel/resolving_lb_policy.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/server_address.cc',
'src/core/ext/filters/client_channel/service_config.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_pool_interface.cc',
'src/core/ext/filters/client_channel/xds/xds_api.cc',
'src/core/ext/filters/client_channel/xds/xds_bootstrap.cc',
'src/core/ext/filters/client_channel/xds/xds_channel_secure.cc',
'src/core/ext/filters/client_channel/xds/xds_client.cc',
'src/core/ext/filters/client_channel/xds/xds_client_stats.cc',
'src/core/ext/filters/client_idle/client_idle_filter.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/client_authority_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
'src/core/ext/filters/max_age/max_age_filter.cc',
'src/core/ext/filters/message_size/message_size_filter.cc',
'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc',
'src/core/ext/filters/workarounds/workaround_utils.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/transport/chttp2/client/authority.cc',
'src/core/ext/transport/chttp2/client/chttp2_connector.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc',
'src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc',
'src/core/ext/transport/chttp2/server/chttp2_server.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc',
'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/context_list.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/inproc/inproc_plugin.cc',
'src/core/ext/transport/inproc/inproc_transport.cc',
'src/core/ext/upb-generated/envoy/annotations/deprecation.upb.c',
'src/core/ext/upb-generated/envoy/annotations/resource.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/auth/cert.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cds.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cluster.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cluster/circuit_breaker.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cluster/filter.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/cluster/outlier_detection.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/address.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/base.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/config_source.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/grpc_service.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/health_check.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/http_uri.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/core/protocol.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/discovery.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/eds.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/endpoint.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/endpoint/endpoint.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/endpoint/endpoint_components.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/endpoint/load_report.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/lds.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/listener.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/listener/listener.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/listener/listener_components.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/listener/udp_listener_config.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/rds.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/route.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/route/route.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/route/route_components.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/scoped_route.upb.c',
'src/core/ext/upb-generated/envoy/api/v2/srds.upb.c',
'src/core/ext/upb-generated/envoy/config/filter/accesslog/v2/accesslog.upb.c',
'src/core/ext/upb-generated/envoy/config/filter/network/http_connection_manager/v2/http_connection_manager.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v2/api_listener.upb.c',
'src/core/ext/upb-generated/envoy/service/discovery/v2/ads.upb.c',
'src/core/ext/upb-generated/envoy/service/load_stats/v2/lrs.upb.c',
'src/core/ext/upb-generated/envoy/type/http.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/regex.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/string.upb.c',
'src/core/ext/upb-generated/envoy/type/metadata/v2/metadata.upb.c',
'src/core/ext/upb-generated/envoy/type/percent.upb.c',
'src/core/ext/upb-generated/envoy/type/range.upb.c',
'src/core/ext/upb-generated/envoy/type/semantic_version.upb.c',
'src/core/ext/upb-generated/envoy/type/tracing/v2/custom_tag.upb.c',
'src/core/ext/upb-generated/gogoproto/gogo.upb.c',
'src/core/ext/upb-generated/google/api/annotations.upb.c',
'src/core/ext/upb-generated/google/api/http.upb.c',
'src/core/ext/upb-generated/google/protobuf/any.upb.c',
'src/core/ext/upb-generated/google/protobuf/descriptor.upb.c',
'src/core/ext/upb-generated/google/protobuf/duration.upb.c',
'src/core/ext/upb-generated/google/protobuf/empty.upb.c',
'src/core/ext/upb-generated/google/protobuf/struct.upb.c',
'src/core/ext/upb-generated/google/protobuf/timestamp.upb.c',
'src/core/ext/upb-generated/google/protobuf/wrappers.upb.c',
'src/core/ext/upb-generated/google/rpc/status.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c',
'src/core/ext/upb-generated/udpa/annotations/migrate.upb.c',
'src/core/ext/upb-generated/udpa/annotations/sensitive.upb.c',
'src/core/ext/upb-generated/udpa/data/orca/v1/orca_load_report.upb.c',
'src/core/ext/upb-generated/validate/validate.upb.c',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channelz.cc',
'src/core/lib/channel/channelz_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_args.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/debug/trace.cc',
'src/core/lib/gpr/alloc.cc',
'src/core/lib/gpr/atm.cc',
'src/core/lib/gpr/cpu_iphone.cc',
'src/core/lib/gpr/cpu_linux.cc',
'src/core/lib/gpr/cpu_posix.cc',
'src/core/lib/gpr/cpu_windows.cc',
'src/core/lib/gpr/env_linux.cc',
'src/core/lib/gpr/env_posix.cc',
'src/core/lib/gpr/env_windows.cc',
'src/core/lib/gpr/log.cc',
'src/core/lib/gpr/log_android.cc',
'src/core/lib/gpr/log_linux.cc',
'src/core/lib/gpr/log_posix.cc',
'src/core/lib/gpr/log_windows.cc',
'src/core/lib/gpr/murmur_hash.cc',
'src/core/lib/gpr/string.cc',
'src/core/lib/gpr/string_posix.cc',
'src/core/lib/gpr/string_util_windows.cc',
'src/core/lib/gpr/string_windows.cc',
'src/core/lib/gpr/sync.cc',
'src/core/lib/gpr/sync_abseil.cc',
'src/core/lib/gpr/sync_posix.cc',
'src/core/lib/gpr/sync_windows.cc',
'src/core/lib/gpr/time.cc',
'src/core/lib/gpr/time_posix.cc',
'src/core/lib/gpr/time_precise.cc',
'src/core/lib/gpr/time_windows.cc',
'src/core/lib/gpr/tls_pthread.cc',
'src/core/lib/gpr/tmpfile_msys.cc',
'src/core/lib/gpr/tmpfile_posix.cc',
'src/core/lib/gpr/tmpfile_windows.cc',
'src/core/lib/gpr/wrap_memcpy.cc',
'src/core/lib/gprpp/arena.cc',
'src/core/lib/gprpp/fork.cc',
'src/core/lib/gprpp/global_config_env.cc',
'src/core/lib/gprpp/host_port.cc',
'src/core/lib/gprpp/mpscq.cc',
'src/core/lib/gprpp/thd_posix.cc',
'src/core/lib/gprpp/thd_windows.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/httpcli_security_connector.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/buffer_list.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/cfstream_handle.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_cfstream.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/error_cfstream.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/executor/mpmcqueue.cc',
'src/core/lib/iomgr/executor/threadpool.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_posix.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc',
'src/core/lib/iomgr/internal_errqueue.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_posix_cfstream.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/poller/eventmanager_libuv.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/sockaddr_utils.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_cfstream.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/iomgr/work_serializer.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/profiling/basic_timers.cc',
'src/core/lib/profiling/stap_timers.cc',
'src/core/lib/security/context/security_context.cc',
'src/core/lib/security/credentials/alts/alts_credentials.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc',
'src/core/lib/security/credentials/composite/composite_credentials.cc',
'src/core/lib/security/credentials/credentials.cc',
'src/core/lib/security/credentials/credentials_metadata.cc',
'src/core/lib/security/credentials/fake/fake_credentials.cc',
'src/core/lib/security/credentials/google_default/credentials_generic.cc',
'src/core/lib/security/credentials/google_default/google_default_credentials.cc',
'src/core/lib/security/credentials/iam/iam_credentials.cc',
'src/core/lib/security/credentials/jwt/json_token.cc',
'src/core/lib/security/credentials/jwt/jwt_credentials.cc',
'src/core/lib/security/credentials/jwt/jwt_verifier.cc',
'src/core/lib/security/credentials/local/local_credentials.cc',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.cc',
'src/core/lib/security/credentials/plugin/plugin_credentials.cc',
'src/core/lib/security/credentials/ssl/ssl_credentials.cc',
'src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc',
'src/core/lib/security/credentials/tls/tls_credentials.cc',
'src/core/lib/security/security_connector/alts/alts_security_connector.cc',
'src/core/lib/security/security_connector/fake/fake_security_connector.cc',
'src/core/lib/security/security_connector/load_system_roots_fallback.cc',
'src/core/lib/security/security_connector/load_system_roots_linux.cc',
'src/core/lib/security/security_connector/local/local_security_connector.cc',
'src/core/lib/security/security_connector/security_connector.cc',
'src/core/lib/security/security_connector/ssl/ssl_security_connector.cc',
'src/core/lib/security/security_connector/ssl_utils.cc',
'src/core/lib/security/security_connector/ssl_utils_config.cc',
'src/core/lib/security/security_connector/tls/tls_security_connector.cc',
'src/core/lib/security/transport/client_auth_filter.cc',
'src/core/lib/security/transport/secure_endpoint.cc',
'src/core/lib/security/transport/security_handshaker.cc',
'src/core/lib/security/transport/server_auth_filter.cc',
'src/core/lib/security/transport/target_authority_table.cc',
'src/core/lib/security/transport/tsi_error.cc',
'src/core/lib/security/util/json_util.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/init.cc',
'src/core/lib/surface/init_secure.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/uri/uri_parser.cc',
'src/core/plugin_registry/grpc_plugin_registry.cc',
'src/core/tsi/alts/crypt/aes_gcm.cc',
'src/core/tsi/alts/crypt/gsec.cc',
'src/core/tsi/alts/frame_protector/alts_counter.cc',
'src/core/tsi/alts/frame_protector/alts_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_frame_protector.cc',
'src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.cc',
'src/core/tsi/alts/frame_protector/alts_seal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_unseal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/frame_handler.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_client.cc',
'src/core/tsi/alts/handshaker/alts_shared_resource.cc',
'src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc',
'src/core/tsi/alts/handshaker/alts_tsi_utils.cc',
'src/core/tsi/alts/handshaker/transport_security_common_api.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.cc',
'src/core/tsi/fake_transport_security.cc',
'src/core/tsi/local_transport_security.cc',
'src/core/tsi/ssl/session_cache/ssl_session_boringssl.cc',
'src/core/tsi/ssl/session_cache/ssl_session_cache.cc',
'src/core/tsi/ssl/session_cache/ssl_session_openssl.cc',
'src/core/tsi/ssl_transport_security.cc',
'src/core/tsi/transport_security.cc',
'src/core/tsi/transport_security_grpc.cc',
'third_party/abseil-cpp/absl/base/dynamic_annotations.cc',
'third_party/abseil-cpp/absl/base/internal/cycleclock.cc',
'third_party/abseil-cpp/absl/base/internal/raw_logging.cc',
'third_party/abseil-cpp/absl/base/internal/spinlock.cc',
'third_party/abseil-cpp/absl/base/internal/spinlock_wait.cc',
'third_party/abseil-cpp/absl/base/internal/sysinfo.cc',
'third_party/abseil-cpp/absl/base/internal/thread_identity.cc',
'third_party/abseil-cpp/absl/base/internal/throw_delegate.cc',
'third_party/abseil-cpp/absl/base/internal/unscaledcycleclock.cc',
'third_party/abseil-cpp/absl/base/log_severity.cc',
'third_party/abseil-cpp/absl/numeric/int128.cc',
'third_party/abseil-cpp/absl/strings/ascii.cc',
'third_party/abseil-cpp/absl/strings/charconv.cc',
'third_party/abseil-cpp/absl/strings/escaping.cc',
'third_party/abseil-cpp/absl/strings/internal/charconv_bigint.cc',
'third_party/abseil-cpp/absl/strings/internal/charconv_parse.cc',
'third_party/abseil-cpp/absl/strings/internal/escaping.cc',
'third_party/abseil-cpp/absl/strings/internal/memutil.cc',
'third_party/abseil-cpp/absl/strings/internal/ostringstream.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/arg.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/bind.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/extension.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/float_conversion.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/output.cc',
'third_party/abseil-cpp/absl/strings/internal/str_format/parser.cc',
'third_party/abseil-cpp/absl/strings/internal/utf8.cc',
'third_party/abseil-cpp/absl/strings/match.cc',
'third_party/abseil-cpp/absl/strings/numbers.cc',
'third_party/abseil-cpp/absl/strings/str_cat.cc',
'third_party/abseil-cpp/absl/strings/str_replace.cc',
'third_party/abseil-cpp/absl/strings/str_split.cc',
'third_party/abseil-cpp/absl/strings/string_view.cc',
'third_party/abseil-cpp/absl/strings/substitute.cc',
'third_party/abseil-cpp/absl/types/bad_optional_access.cc',
'third_party/address_sorting/address_sorting.c',
'third_party/address_sorting/address_sorting_posix.c',
'third_party/address_sorting/address_sorting_windows.c',
'third_party/boringssl-with-bazel/err_data.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bitstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bool.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_d2i_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_dup.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_enum.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_gentm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_i2d_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_mbstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_object.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_octet.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_print.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_strnid.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_time.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_type.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_utctm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_utf8.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_lib.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_par.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn_pack.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_enum.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_string.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_dec.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_enc.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_fre.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_new.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_typ.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_utl.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/time_support.c',
'third_party/boringssl-with-bazel/src/crypto/base64/base64.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio_mem.c',
'third_party/boringssl-with-bazel/src/crypto/bio/connect.c',
'third_party/boringssl-with-bazel/src/crypto/bio/fd.c',
'third_party/boringssl-with-bazel/src/crypto/bio/file.c',
'third_party/boringssl-with-bazel/src/crypto/bio/hexdump.c',
'third_party/boringssl-with-bazel/src/crypto/bio/pair.c',
'third_party/boringssl-with-bazel/src/crypto/bio/printf.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket_helper.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/bn_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/convert.c',
'third_party/boringssl-with-bazel/src/crypto/buf/buf.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/asn1_compat.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/ber.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbb.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbs.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/unicode.c',
'third_party/boringssl-with-bazel/src/crypto/chacha/chacha.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/cipher_extra.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/derive_key.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesccm.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesctrhmac.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesgcmsiv.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_chacha20poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_null.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc2.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc4.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_tls.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/tls_cbc.c',
'third_party/boringssl-with-bazel/src/crypto/cmac/cmac.c',
'third_party/boringssl-with-bazel/src/crypto/conf/conf.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-aarch64-fuchsia.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-aarch64-linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-arm-linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-arm.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-intel.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-ppc64le.c',
'third_party/boringssl-with-bazel/src/crypto/crypto.c',
'third_party/boringssl-with-bazel/src/crypto/curve25519/spake25519.c',
'third_party/boringssl-with-bazel/src/crypto/dh/check.c',
'third_party/boringssl-with-bazel/src/crypto/dh/dh.c',
'third_party/boringssl-with-bazel/src/crypto/dh/dh_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/dh/params.c',
'third_party/boringssl-with-bazel/src/crypto/digest_extra/digest_extra.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_derive.c',
'third_party/boringssl-with-bazel/src/crypto/ecdh_extra/ecdh_extra.c',
'third_party/boringssl-with-bazel/src/crypto/ecdsa_extra/ecdsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/engine/engine.c',
'third_party/boringssl-with-bazel/src/crypto/err/err.c',
'third_party/boringssl-with-bazel/src/crypto/evp/digestsign.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_ctx.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/pbkdf.c',
'third_party/boringssl-with-bazel/src/crypto/evp/print.c',
'third_party/boringssl-with-bazel/src/crypto/evp/scrypt.c',
'third_party/boringssl-with-bazel/src/crypto/evp/sign.c',
'third_party/boringssl-with-bazel/src/crypto/ex_data.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/bcm.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/fips_shared_support.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/is_fips.c',
'third_party/boringssl-with-bazel/src/crypto/hkdf/hkdf.c',
'third_party/boringssl-with-bazel/src/crypto/hrss/hrss.c',
'third_party/boringssl-with-bazel/src/crypto/lhash/lhash.c',
'third_party/boringssl-with-bazel/src/crypto/mem.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj_xref.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_all.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_info.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_lib.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_oth.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pk8.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_xaux.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/p5_pbev2.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8_x509.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_arm.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_vec.c',
'third_party/boringssl-with-bazel/src/crypto/pool/pool.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/deterministic.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/forkunsafe.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/fuchsia.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/rand_extra.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/windows.c',
'third_party/boringssl-with-bazel/src/crypto/rc4/rc4.c',
'third_party/boringssl-with-bazel/src/crypto/refcount_c11.c',
'third_party/boringssl-with-bazel/src/crypto/refcount_lock.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_print.c',
'third_party/boringssl-with-bazel/src/crypto/siphash/siphash.c',
'third_party/boringssl-with-bazel/src/crypto/stack/stack.c',
'third_party/boringssl-with-bazel/src/crypto/thread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_none.c',
'third_party/boringssl-with-bazel/src/crypto/thread_pthread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_win.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_digest.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_sign.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_strex.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_verify.c',
'third_party/boringssl-with-bazel/src/crypto/x509/algorithm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/asn1_gen.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_dir.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_file.c',
'third_party/boringssl-with-bazel/src/crypto/x509/i2d_pr.c',
'third_party/boringssl-with-bazel/src/crypto/x509/rsa_pss.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_att.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_cmp.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_d2.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_def.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_ext.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_lu.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_obj.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_r2x.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_set.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_trs.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_txt.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_v3.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vfy.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vpm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509cset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509rset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_algor.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_all.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_attrib.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_exten.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pubkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_sig.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_val.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_cache.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_data.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_lib.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_map.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_node.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_tree.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akeya.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_alt.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bitst.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_conf.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_cpols.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_crld.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_enum.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_extku.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_genn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ia5.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_int.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_lib.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ncons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ocsp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pci.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pcia.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pku.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pmaps.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_prn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_purp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_skey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_sxnet.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_utl.c',
'third_party/boringssl-with-bazel/src/ssl/bio_ssl.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_both.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_srtp.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_record.cc',
'third_party/boringssl-with-bazel/src/ssl/handoff.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_client.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_server.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_both.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_aead_ctx.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_asn1.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_buffer.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cert.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cipher.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_file.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_key_share.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_privkey.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_session.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_stat.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_transcript.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_versions.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_x509.cc',
'third_party/boringssl-with-bazel/src/ssl/t1_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/t1_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_both.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_client.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_server.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_record.cc',
'third_party/boringssl-with-bazel/src/third_party/fiat/curve25519.c',
'third_party/cares/cares/ares__close_sockets.c',
'third_party/cares/cares/ares__get_hostent.c',
'third_party/cares/cares/ares__read_line.c',
'third_party/cares/cares/ares__timeval.c',
'third_party/cares/cares/ares_cancel.c',
'third_party/cares/cares/ares_create_query.c',
'third_party/cares/cares/ares_data.c',
'third_party/cares/cares/ares_destroy.c',
'third_party/cares/cares/ares_expand_name.c',
'third_party/cares/cares/ares_expand_string.c',
'third_party/cares/cares/ares_fds.c',
'third_party/cares/cares/ares_free_hostent.c',
'third_party/cares/cares/ares_free_string.c',
'third_party/cares/cares/ares_getenv.c',
'third_party/cares/cares/ares_gethostbyaddr.c',
'third_party/cares/cares/ares_gethostbyname.c',
'third_party/cares/cares/ares_getnameinfo.c',
'third_party/cares/cares/ares_getopt.c',
'third_party/cares/cares/ares_getsock.c',
'third_party/cares/cares/ares_init.c',
'third_party/cares/cares/ares_library_init.c',
'third_party/cares/cares/ares_llist.c',
'third_party/cares/cares/ares_mkquery.c',
'third_party/cares/cares/ares_nowarn.c',
'third_party/cares/cares/ares_options.c',
'third_party/cares/cares/ares_parse_a_reply.c',
'third_party/cares/cares/ares_parse_aaaa_reply.c',
'third_party/cares/cares/ares_parse_mx_reply.c',
'third_party/cares/cares/ares_parse_naptr_reply.c',
'third_party/cares/cares/ares_parse_ns_reply.c',
'third_party/cares/cares/ares_parse_ptr_reply.c',
'third_party/cares/cares/ares_parse_soa_reply.c',
'third_party/cares/cares/ares_parse_srv_reply.c',
'third_party/cares/cares/ares_parse_txt_reply.c',
'third_party/cares/cares/ares_platform.c',
'third_party/cares/cares/ares_process.c',
'third_party/cares/cares/ares_query.c',
'third_party/cares/cares/ares_search.c',
'third_party/cares/cares/ares_send.c',
'third_party/cares/cares/ares_strcasecmp.c',
'third_party/cares/cares/ares_strdup.c',
'third_party/cares/cares/ares_strerror.c',
'third_party/cares/cares/ares_strsplit.c',
'third_party/cares/cares/ares_timeout.c',
'third_party/cares/cares/ares_version.c',
'third_party/cares/cares/ares_writev.c',
'third_party/cares/cares/bitncmp.c',
'third_party/cares/cares/inet_net_pton.c',
'third_party/cares/cares/inet_ntop.c',
'third_party/cares/cares/windows_port.c',
'third_party/upb/upb/decode.c',
'third_party/upb/upb/encode.c',
'third_party/upb/upb/msg.c',
'third_party/upb/upb/port.c',
'third_party/upb/upb/table.c',
'third_party/upb/upb/upb.c',
'third_party/zlib/adler32.c',
'third_party/zlib/compress.c',
'third_party/zlib/crc32.c',
'third_party/zlib/deflate.c',
'third_party/zlib/gzclose.c',
'third_party/zlib/gzlib.c',
'third_party/zlib/gzread.c',
'third_party/zlib/gzwrite.c',
'third_party/zlib/infback.c',
'third_party/zlib/inffast.c',
'third_party/zlib/inflate.c',
'third_party/zlib/inftrees.c',
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
]
| {
"content_hash": "7a8086db9ce013ba5c415723acf84c9d",
"timestamp": "",
"source": "github",
"line_count": 813,
"max_line_length": 118,
"avg_line_length": 58.9569495694957,
"alnum_prop": 0.7228573812901611,
"repo_name": "firebase/grpc-SwiftPM",
"id": "795c02680c57dd9f14795bbab7a91b9f72ffb8c0",
"size": "48612",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/python/grpcio/grpc_core_dependencies.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "35533"
},
{
"name": "C",
"bytes": "3406418"
},
{
"name": "C#",
"bytes": "2100730"
},
{
"name": "C++",
"bytes": "11752203"
},
{
"name": "CMake",
"bytes": "579797"
},
{
"name": "CSS",
"bytes": "1519"
},
{
"name": "Cython",
"bytes": "261666"
},
{
"name": "DTrace",
"bytes": "147"
},
{
"name": "Dockerfile",
"bytes": "166273"
},
{
"name": "Go",
"bytes": "34794"
},
{
"name": "HTML",
"bytes": "14"
},
{
"name": "Java",
"bytes": "6259"
},
{
"name": "JavaScript",
"bytes": "75178"
},
{
"name": "M4",
"bytes": "58013"
},
{
"name": "Makefile",
"bytes": "1065702"
},
{
"name": "Mako",
"bytes": "5629"
},
{
"name": "Objective-C",
"bytes": "685096"
},
{
"name": "Objective-C++",
"bytes": "77742"
},
{
"name": "PHP",
"bytes": "260811"
},
{
"name": "PowerShell",
"bytes": "3226"
},
{
"name": "Python",
"bytes": "2867167"
},
{
"name": "Ruby",
"bytes": "1098054"
},
{
"name": "Shell",
"bytes": "507226"
},
{
"name": "Starlark",
"bytes": "510236"
},
{
"name": "Swift",
"bytes": "6540"
},
{
"name": "XSLT",
"bytes": "9673"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.