text stringlengths 4 1.02M | meta dict |
|---|---|
from ..config import mmtwfs_config
from ..secondary import SecondaryFactory
from ..custom_exceptions import WFSConfigException, WFSCommandException
def test_secondaries():
for s in mmtwfs_config['secondary']:
sec = SecondaryFactory(secondary=s, test="foo")
assert(sec.test == "foo")
def test_bogus_secondary():
try:
sec = SecondaryFactory(secondary="bazz")
assert(sec is not None)
except WFSConfigException:
assert True
except Exception as e:
assert(e is not None)
assert False
else:
assert False
def test_connect():
s = SecondaryFactory(secondary='f5')
try:
s.connect()
except Exception as e:
assert(not s.connected)
assert(e is not None)
finally:
s.disconnect()
assert(not s.connected)
def test_focus():
s = SecondaryFactory(secondary='f5')
cmd = s.focus(200.3)
assert("200.3" in cmd)
def test_m1spherical():
s = SecondaryFactory(secondary='f5')
cmd = s.m1spherical(200.3)
assert("200.3" in cmd)
def test_cc():
s = SecondaryFactory(secondary='f5')
cmd = s.cc('x', 200.3)
assert("200.3" in cmd)
cmd = s.cc('y', 200.3)
assert("200.3" in cmd)
try:
cmd = s.cc('z', 200.3)
except WFSCommandException:
assert True
except Exception as e:
assert(e is not None)
assert False
else:
assert False
def test_zc():
s = SecondaryFactory(secondary='f5')
cmd = s.zc('x', 200.3)
assert("200.3" in cmd)
cmd = s.zc('y', 200.3)
assert("200.3" in cmd)
try:
cmd = s.zc('z', 200.3)
except WFSCommandException:
assert True
except Exception as e:
assert(e is not None)
assert False
else:
assert False
def test_clear():
s = SecondaryFactory(secondary='f5')
cmd = s.clear_m1spherical()
assert("0.0" in cmd)
cmds = s.clear_wfs()
for c in cmds:
assert("0.0" in c)
| {
"content_hash": "45e043abb0e49a98652265a7100e96ce",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 71,
"avg_line_length": 22.370786516853933,
"alnum_prop": 0.5951783023606229,
"repo_name": "MMTObservatory/mmtwfs",
"id": "39a08a5684202b93cad32cf7d80f02883235d7d1",
"size": "2071",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "mmtwfs/tests/test_secondaries.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "104"
},
{
"name": "Jupyter Notebook",
"bytes": "13457858"
},
{
"name": "Python",
"bytes": "230109"
}
],
"symlink_target": ""
} |
"""The internals for the unit of work system.
The session's flush() process passes objects to a contextual object
here, which assembles flush tasks based on mappers and their properties,
organizes them in order of dependency, and executes.
"""
from . import attributes
from . import exc as orm_exc
from . import util as orm_util
from .. import event
from .. import util
from ..util import topological
def _warn_for_cascade_backrefs(state, prop):
util.warn_deprecated_20(
'"%s" object is being merged into a Session along the backref '
'cascade path for relationship "%s"; in SQLAlchemy 2.0, this '
"reverse cascade will not take place. Set cascade_backrefs to "
"False in either the relationship() or backref() function for "
"the 2.0 behavior; or to set globally for the whole "
"Session, set the future=True flag" % (state.class_.__name__, prop),
code="s9r1",
)
def track_cascade_events(descriptor, prop):
"""Establish event listeners on object attributes which handle
cascade-on-set/append.
"""
key = prop.key
def append(state, item, initiator):
# process "save_update" cascade rules for when
# an instance is appended to the list of another instance
if item is None:
return
sess = state.session
if sess:
if sess._warn_on_events:
sess._flush_warning("collection append")
prop = state.manager.mapper._props[key]
item_state = attributes.instance_state(item)
if (
prop._cascade.save_update
and (
(prop.cascade_backrefs and not sess.future)
or key == initiator.key
)
and not sess._contains_state(item_state)
):
if key != initiator.key:
_warn_for_cascade_backrefs(item_state, prop)
sess._save_or_update_state(item_state)
return item
def remove(state, item, initiator):
if item is None:
return
sess = state.session
prop = state.manager.mapper._props[key]
if sess and sess._warn_on_events:
sess._flush_warning(
"collection remove"
if prop.uselist
else "related attribute delete"
)
if (
item is not None
and item is not attributes.NEVER_SET
and item is not attributes.PASSIVE_NO_RESULT
and prop._cascade.delete_orphan
):
# expunge pending orphans
item_state = attributes.instance_state(item)
if prop.mapper._is_orphan(item_state):
if sess and item_state in sess._new:
sess.expunge(item)
else:
# the related item may or may not itself be in a
# Session, however the parent for which we are catching
# the event is not in a session, so memoize this on the
# item
item_state._orphaned_outside_of_session = True
def set_(state, newvalue, oldvalue, initiator):
# process "save_update" cascade rules for when an instance
# is attached to another instance
if oldvalue is newvalue:
return newvalue
sess = state.session
if sess:
if sess._warn_on_events:
sess._flush_warning("related attribute set")
prop = state.manager.mapper._props[key]
if newvalue is not None:
newvalue_state = attributes.instance_state(newvalue)
if (
prop._cascade.save_update
and (
(prop.cascade_backrefs and not sess.future)
or key == initiator.key
)
and not sess._contains_state(newvalue_state)
):
if key != initiator.key:
_warn_for_cascade_backrefs(newvalue_state, prop)
sess._save_or_update_state(newvalue_state)
if (
oldvalue is not None
and oldvalue is not attributes.NEVER_SET
and oldvalue is not attributes.PASSIVE_NO_RESULT
and prop._cascade.delete_orphan
):
# possible to reach here with attributes.NEVER_SET ?
oldvalue_state = attributes.instance_state(oldvalue)
if oldvalue_state in sess._new and prop.mapper._is_orphan(
oldvalue_state
):
sess.expunge(oldvalue)
return newvalue
event.listen(descriptor, "append_wo_mutation", append, raw=True)
event.listen(descriptor, "append", append, raw=True, retval=True)
event.listen(descriptor, "remove", remove, raw=True, retval=True)
event.listen(descriptor, "set", set_, raw=True, retval=True)
class UOWTransaction(object):
def __init__(self, session):
self.session = session
# dictionary used by external actors to
# store arbitrary state information.
self.attributes = {}
# dictionary of mappers to sets of
# DependencyProcessors, which are also
# set to be part of the sorted flush actions,
# which have that mapper as a parent.
self.deps = util.defaultdict(set)
# dictionary of mappers to sets of InstanceState
# items pending for flush which have that mapper
# as a parent.
self.mappers = util.defaultdict(set)
# a dictionary of Preprocess objects, which gather
# additional states impacted by the flush
# and determine if a flush action is needed
self.presort_actions = {}
# dictionary of PostSortRec objects, each
# one issues work during the flush within
# a certain ordering.
self.postsort_actions = {}
# a set of 2-tuples, each containing two
# PostSortRec objects where the second
# is dependent on the first being executed
# first
self.dependencies = set()
# dictionary of InstanceState-> (isdelete, listonly)
# tuples, indicating if this state is to be deleted
# or insert/updated, or just refreshed
self.states = {}
# tracks InstanceStates which will be receiving
# a "post update" call. Keys are mappers,
# values are a set of states and a set of the
# columns which should be included in the update.
self.post_update_states = util.defaultdict(lambda: (set(), set()))
@property
def has_work(self):
return bool(self.states)
def was_already_deleted(self, state):
"""Return ``True`` if the given state is expired and was deleted
previously.
"""
if state.expired:
try:
state._load_expired(state, attributes.PASSIVE_OFF)
except orm_exc.ObjectDeletedError:
self.session._remove_newly_deleted([state])
return True
return False
def is_deleted(self, state):
"""Return ``True`` if the given state is marked as deleted
within this uowtransaction."""
return state in self.states and self.states[state][0]
def memo(self, key, callable_):
if key in self.attributes:
return self.attributes[key]
else:
self.attributes[key] = ret = callable_()
return ret
def remove_state_actions(self, state):
"""Remove pending actions for a state from the uowtransaction."""
isdelete = self.states[state][0]
self.states[state] = (isdelete, True)
def get_attribute_history(
self, state, key, passive=attributes.PASSIVE_NO_INITIALIZE
):
"""Facade to attributes.get_state_history(), including
caching of results."""
hashkey = ("history", state, key)
# cache the objects, not the states; the strong reference here
# prevents newly loaded objects from being dereferenced during the
# flush process
if hashkey in self.attributes:
history, state_history, cached_passive = self.attributes[hashkey]
# if the cached lookup was "passive" and now
# we want non-passive, do a non-passive lookup and re-cache
if (
not cached_passive & attributes.SQL_OK
and passive & attributes.SQL_OK
):
impl = state.manager[key].impl
history = impl.get_history(
state,
state.dict,
attributes.PASSIVE_OFF
| attributes.LOAD_AGAINST_COMMITTED
| attributes.NO_RAISE,
)
if history and impl.uses_objects:
state_history = history.as_state()
else:
state_history = history
self.attributes[hashkey] = (history, state_history, passive)
else:
impl = state.manager[key].impl
# TODO: store the history as (state, object) tuples
# so we don't have to keep converting here
history = impl.get_history(
state,
state.dict,
passive
| attributes.LOAD_AGAINST_COMMITTED
| attributes.NO_RAISE,
)
if history and impl.uses_objects:
state_history = history.as_state()
else:
state_history = history
self.attributes[hashkey] = (history, state_history, passive)
return state_history
def has_dep(self, processor):
return (processor, True) in self.presort_actions
def register_preprocessor(self, processor, fromparent):
key = (processor, fromparent)
if key not in self.presort_actions:
self.presort_actions[key] = Preprocess(processor, fromparent)
def register_object(
self,
state,
isdelete=False,
listonly=False,
cancel_delete=False,
operation=None,
prop=None,
):
if not self.session._contains_state(state):
# this condition is normal when objects are registered
# as part of a relationship cascade operation. it should
# not occur for the top-level register from Session.flush().
if not state.deleted and operation is not None:
util.warn(
"Object of type %s not in session, %s operation "
"along '%s' will not proceed"
% (orm_util.state_class_str(state), operation, prop)
)
return False
if state not in self.states:
mapper = state.manager.mapper
if mapper not in self.mappers:
self._per_mapper_flush_actions(mapper)
self.mappers[mapper].add(state)
self.states[state] = (isdelete, listonly)
else:
if not listonly and (isdelete or cancel_delete):
self.states[state] = (isdelete, False)
return True
def register_post_update(self, state, post_update_cols):
mapper = state.manager.mapper.base_mapper
states, cols = self.post_update_states[mapper]
states.add(state)
cols.update(post_update_cols)
def _per_mapper_flush_actions(self, mapper):
saves = SaveUpdateAll(self, mapper.base_mapper)
deletes = DeleteAll(self, mapper.base_mapper)
self.dependencies.add((saves, deletes))
for dep in mapper._dependency_processors:
dep.per_property_preprocessors(self)
for prop in mapper.relationships:
if prop.viewonly:
continue
dep = prop._dependency_processor
dep.per_property_preprocessors(self)
@util.memoized_property
def _mapper_for_dep(self):
"""return a dynamic mapping of (Mapper, DependencyProcessor) to
True or False, indicating if the DependencyProcessor operates
on objects of that Mapper.
The result is stored in the dictionary persistently once
calculated.
"""
return util.PopulateDict(
lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
)
def filter_states_for_dep(self, dep, states):
"""Filter the given list of InstanceStates to those relevant to the
given DependencyProcessor.
"""
mapper_for_dep = self._mapper_for_dep
return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]]
def states_for_mapper_hierarchy(self, mapper, isdelete, listonly):
checktup = (isdelete, listonly)
for mapper in mapper.base_mapper.self_and_descendants:
for state in self.mappers[mapper]:
if self.states[state] == checktup:
yield state
def _generate_actions(self):
"""Generate the full, unsorted collection of PostSortRecs as
well as dependency pairs for this UOWTransaction.
"""
# execute presort_actions, until all states
# have been processed. a presort_action might
# add new states to the uow.
while True:
ret = False
for action in list(self.presort_actions.values()):
if action.execute(self):
ret = True
if not ret:
break
# see if the graph of mapper dependencies has cycles.
self.cycles = cycles = topological.find_cycles(
self.dependencies, list(self.postsort_actions.values())
)
if cycles:
# if yes, break the per-mapper actions into
# per-state actions
convert = dict(
(rec, set(rec.per_state_flush_actions(self))) for rec in cycles
)
# rewrite the existing dependencies to point to
# the per-state actions for those per-mapper actions
# that were broken up.
for edge in list(self.dependencies):
if (
None in edge
or edge[0].disabled
or edge[1].disabled
or cycles.issuperset(edge)
):
self.dependencies.remove(edge)
elif edge[0] in cycles:
self.dependencies.remove(edge)
for dep in convert[edge[0]]:
self.dependencies.add((dep, edge[1]))
elif edge[1] in cycles:
self.dependencies.remove(edge)
for dep in convert[edge[1]]:
self.dependencies.add((edge[0], dep))
return set(
[a for a in self.postsort_actions.values() if not a.disabled]
).difference(cycles)
def execute(self):
postsort_actions = self._generate_actions()
postsort_actions = sorted(
postsort_actions,
key=lambda item: item.sort_key,
)
# sort = topological.sort(self.dependencies, postsort_actions)
# print "--------------"
# print "\ndependencies:", self.dependencies
# print "\ncycles:", self.cycles
# print "\nsort:", list(sort)
# print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
# execute
if self.cycles:
for subset in topological.sort_as_subsets(
self.dependencies, postsort_actions
):
set_ = set(subset)
while set_:
n = set_.pop()
n.execute_aggregate(self, set_)
else:
for rec in topological.sort(self.dependencies, postsort_actions):
rec.execute(self)
def finalize_flush_changes(self):
"""Mark processed objects as clean / deleted after a successful
flush().
This method is called within the flush() method after the
execute() method has succeeded and the transaction has been committed.
"""
if not self.states:
return
states = set(self.states)
isdel = set(
s for (s, (isdelete, listonly)) in self.states.items() if isdelete
)
other = states.difference(isdel)
if isdel:
self.session._remove_newly_deleted(isdel)
if other:
self.session._register_persistent(other)
class IterateMappersMixin(object):
def _mappers(self, uow):
if self.fromparent:
return iter(
m
for m in self.dependency_processor.parent.self_and_descendants
if uow._mapper_for_dep[(m, self.dependency_processor)]
)
else:
return self.dependency_processor.mapper.self_and_descendants
class Preprocess(IterateMappersMixin):
__slots__ = (
"dependency_processor",
"fromparent",
"processed",
"setup_flush_actions",
)
def __init__(self, dependency_processor, fromparent):
self.dependency_processor = dependency_processor
self.fromparent = fromparent
self.processed = set()
self.setup_flush_actions = False
def execute(self, uow):
delete_states = set()
save_states = set()
for mapper in self._mappers(uow):
for state in uow.mappers[mapper].difference(self.processed):
(isdelete, listonly) = uow.states[state]
if not listonly:
if isdelete:
delete_states.add(state)
else:
save_states.add(state)
if delete_states:
self.dependency_processor.presort_deletes(uow, delete_states)
self.processed.update(delete_states)
if save_states:
self.dependency_processor.presort_saves(uow, save_states)
self.processed.update(save_states)
if delete_states or save_states:
if not self.setup_flush_actions and (
self.dependency_processor.prop_has_changes(
uow, delete_states, True
)
or self.dependency_processor.prop_has_changes(
uow, save_states, False
)
):
self.dependency_processor.per_property_flush_actions(uow)
self.setup_flush_actions = True
return True
else:
return False
class PostSortRec(object):
__slots__ = ("disabled",)
def __new__(cls, uow, *args):
key = (cls,) + args
if key in uow.postsort_actions:
return uow.postsort_actions[key]
else:
uow.postsort_actions[key] = ret = object.__new__(cls)
ret.disabled = False
return ret
def execute_aggregate(self, uow, recs):
self.execute(uow)
class ProcessAll(IterateMappersMixin, PostSortRec):
__slots__ = "dependency_processor", "isdelete", "fromparent", "sort_key"
def __init__(self, uow, dependency_processor, isdelete, fromparent):
self.dependency_processor = dependency_processor
self.sort_key = (
"ProcessAll",
self.dependency_processor.sort_key,
isdelete,
)
self.isdelete = isdelete
self.fromparent = fromparent
uow.deps[dependency_processor.parent.base_mapper].add(
dependency_processor
)
def execute(self, uow):
states = self._elements(uow)
if self.isdelete:
self.dependency_processor.process_deletes(uow, states)
else:
self.dependency_processor.process_saves(uow, states)
def per_state_flush_actions(self, uow):
# this is handled by SaveUpdateAll and DeleteAll,
# since a ProcessAll should unconditionally be pulled
# into per-state if either the parent/child mappers
# are part of a cycle
return iter([])
def __repr__(self):
return "%s(%s, isdelete=%s)" % (
self.__class__.__name__,
self.dependency_processor,
self.isdelete,
)
def _elements(self, uow):
for mapper in self._mappers(uow):
for state in uow.mappers[mapper]:
(isdelete, listonly) = uow.states[state]
if isdelete == self.isdelete and not listonly:
yield state
class PostUpdateAll(PostSortRec):
__slots__ = "mapper", "isdelete", "sort_key"
def __init__(self, uow, mapper, isdelete):
self.mapper = mapper
self.isdelete = isdelete
self.sort_key = ("PostUpdateAll", mapper._sort_key, isdelete)
@util.preload_module("sqlalchemy.orm.persistence")
def execute(self, uow):
persistence = util.preloaded.orm_persistence
states, cols = uow.post_update_states[self.mapper]
states = [s for s in states if uow.states[s][0] == self.isdelete]
persistence.post_update(self.mapper, states, uow, cols)
class SaveUpdateAll(PostSortRec):
__slots__ = ("mapper", "sort_key")
def __init__(self, uow, mapper):
self.mapper = mapper
self.sort_key = ("SaveUpdateAll", mapper._sort_key)
assert mapper is mapper.base_mapper
@util.preload_module("sqlalchemy.orm.persistence")
def execute(self, uow):
util.preloaded.orm_persistence.save_obj(
self.mapper,
uow.states_for_mapper_hierarchy(self.mapper, False, False),
uow,
)
def per_state_flush_actions(self, uow):
states = list(
uow.states_for_mapper_hierarchy(self.mapper, False, False)
)
base_mapper = self.mapper.base_mapper
delete_all = DeleteAll(uow, base_mapper)
for state in states:
# keep saves before deletes -
# this ensures 'row switch' operations work
action = SaveUpdateState(uow, state)
uow.dependencies.add((action, delete_all))
yield action
for dep in uow.deps[self.mapper]:
states_for_prop = uow.filter_states_for_dep(dep, states)
dep.per_state_flush_actions(uow, states_for_prop, False)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.mapper)
class DeleteAll(PostSortRec):
__slots__ = ("mapper", "sort_key")
def __init__(self, uow, mapper):
self.mapper = mapper
self.sort_key = ("DeleteAll", mapper._sort_key)
assert mapper is mapper.base_mapper
@util.preload_module("sqlalchemy.orm.persistence")
def execute(self, uow):
util.preloaded.orm_persistence.delete_obj(
self.mapper,
uow.states_for_mapper_hierarchy(self.mapper, True, False),
uow,
)
def per_state_flush_actions(self, uow):
states = list(
uow.states_for_mapper_hierarchy(self.mapper, True, False)
)
base_mapper = self.mapper.base_mapper
save_all = SaveUpdateAll(uow, base_mapper)
for state in states:
# keep saves before deletes -
# this ensures 'row switch' operations work
action = DeleteState(uow, state)
uow.dependencies.add((save_all, action))
yield action
for dep in uow.deps[self.mapper]:
states_for_prop = uow.filter_states_for_dep(dep, states)
dep.per_state_flush_actions(uow, states_for_prop, True)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.mapper)
class ProcessState(PostSortRec):
__slots__ = "dependency_processor", "isdelete", "state", "sort_key"
def __init__(self, uow, dependency_processor, isdelete, state):
self.dependency_processor = dependency_processor
self.sort_key = ("ProcessState", dependency_processor.sort_key)
self.isdelete = isdelete
self.state = state
def execute_aggregate(self, uow, recs):
cls_ = self.__class__
dependency_processor = self.dependency_processor
isdelete = self.isdelete
our_recs = [
r
for r in recs
if r.__class__ is cls_
and r.dependency_processor is dependency_processor
and r.isdelete is isdelete
]
recs.difference_update(our_recs)
states = [self.state] + [r.state for r in our_recs]
if isdelete:
dependency_processor.process_deletes(uow, states)
else:
dependency_processor.process_saves(uow, states)
def __repr__(self):
return "%s(%s, %s, delete=%s)" % (
self.__class__.__name__,
self.dependency_processor,
orm_util.state_str(self.state),
self.isdelete,
)
class SaveUpdateState(PostSortRec):
__slots__ = "state", "mapper", "sort_key"
def __init__(self, uow, state):
self.state = state
self.mapper = state.mapper.base_mapper
self.sort_key = ("ProcessState", self.mapper._sort_key)
@util.preload_module("sqlalchemy.orm.persistence")
def execute_aggregate(self, uow, recs):
persistence = util.preloaded.orm_persistence
cls_ = self.__class__
mapper = self.mapper
our_recs = [
r for r in recs if r.__class__ is cls_ and r.mapper is mapper
]
recs.difference_update(our_recs)
persistence.save_obj(
mapper, [self.state] + [r.state for r in our_recs], uow
)
def __repr__(self):
return "%s(%s)" % (
self.__class__.__name__,
orm_util.state_str(self.state),
)
class DeleteState(PostSortRec):
__slots__ = "state", "mapper", "sort_key"
def __init__(self, uow, state):
self.state = state
self.mapper = state.mapper.base_mapper
self.sort_key = ("DeleteState", self.mapper._sort_key)
@util.preload_module("sqlalchemy.orm.persistence")
def execute_aggregate(self, uow, recs):
persistence = util.preloaded.orm_persistence
cls_ = self.__class__
mapper = self.mapper
our_recs = [
r for r in recs if r.__class__ is cls_ and r.mapper is mapper
]
recs.difference_update(our_recs)
states = [self.state] + [r.state for r in our_recs]
persistence.delete_obj(
mapper, [s for s in states if uow.states[s][0]], uow
)
def __repr__(self):
return "%s(%s)" % (
self.__class__.__name__,
orm_util.state_str(self.state),
)
| {
"content_hash": "8952857fb96f937a37533901df1cf063",
"timestamp": "",
"source": "github",
"line_count": 777,
"max_line_length": 79,
"avg_line_length": 34.557271557271555,
"alnum_prop": 0.5681352649808201,
"repo_name": "monetate/sqlalchemy",
"id": "f29d11bcd59b9969a7de2c863d122cf6d6dc59ea",
"size": "27090",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/sqlalchemy/orm/unitofwork.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "49142"
},
{
"name": "Python",
"bytes": "11790244"
}
],
"symlink_target": ""
} |
__version__ = "1.6.1"
from .TerminalNotifier import Notifier
| {
"content_hash": "c5af612803ef248b718f2072a0e217b6",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 38,
"avg_line_length": 20.666666666666668,
"alnum_prop": 0.7096774193548387,
"repo_name": "dalimatt/Instastalk",
"id": "d90298e7e0178e2f2092fab14aff0ad7692622e4",
"size": "62",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dependencies/pync/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "107849"
}
],
"symlink_target": ""
} |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scheduling', '0017_update_ui_type'),
]
operations = [
migrations.RemoveField(
model_name='alertevent',
name='time_to_wait',
),
migrations.AddField(
model_name='alertevent',
name='minutes_to_wait',
field=models.IntegerField(default=0),
preserve_default=False,
),
]
| {
"content_hash": "b951120a57803e8e1f6d34bdf51f50eb",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 49,
"avg_line_length": 23.571428571428573,
"alnum_prop": 0.5535353535353535,
"repo_name": "dimagi/commcare-hq",
"id": "9b7ce972fe134926d41a445e812ac5235e9dd713",
"size": "546",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/messaging/scheduling/migrations/0018_minutes_to_wait.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "82928"
},
{
"name": "Dockerfile",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "2589268"
},
{
"name": "JavaScript",
"bytes": "5889543"
},
{
"name": "Jinja",
"bytes": "3693"
},
{
"name": "Less",
"bytes": "176180"
},
{
"name": "Makefile",
"bytes": "1622"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "66704"
},
{
"name": "Python",
"bytes": "21779773"
},
{
"name": "Roff",
"bytes": "150"
},
{
"name": "Shell",
"bytes": "67473"
}
],
"symlink_target": ""
} |
from processing.actions.sendmessage import SendMessage
from processing.pipelines.basepipeline import Pipeline
class SendSparqlQueryResponse(Pipeline):
"""Pipeline that handles outgoing SPARQL-query responses"""
name = 'SendSparqlQuery'
description = 'Pipeline that handles outgoing SPARQL-query responses'
chain = [
SendMessage
]
def __init__(self, token):
self.token = token
def execute(self):
return Pipeline.handle(self.chain, self.token)
| {
"content_hash": "582cb91059e3f08ed920f6e4416dfbbe",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 73,
"avg_line_length": 27.72222222222222,
"alnum_prop": 0.7134268537074149,
"repo_name": "onnovalkering/sparql-over-sms",
"id": "66f210836687198b911d7454c3827b43768e5b82",
"size": "499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sos-service/src/processing/pipelines/sendsparqlqueryresponse.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1016"
},
{
"name": "HTML",
"bytes": "155"
},
{
"name": "Java",
"bytes": "53896"
},
{
"name": "JavaScript",
"bytes": "16397"
},
{
"name": "Python",
"bytes": "61385"
},
{
"name": "Shell",
"bytes": "4394"
},
{
"name": "Vue",
"bytes": "30343"
}
],
"symlink_target": ""
} |
import optparse
import sys
from smimeX509validation import TrustAnchor
import logging, logging.config
import os.path
from smimeX509validation import TrustStore, LoadDirChainOfTrust,smimeX509validation
def main():
p = optparse.OptionParser()
p.add_option('-m', '--message', action ='append',
help='adds a message to be tested.')
p.add_option('-c', '--certs-dir', action ='store',
help='Path of certificates dir',
default='/etc/grid-security/certificates/')
options, arguments = p.parse_args()
if not os.path.isdir(options.certs_dir):
print ("Warning not a directory:%s" % (options.certs_dir))
sys.exit(1)
anchor = LoadDirChainOfTrust(options.certs_dir)
if options.message == None:
sys.exit(1)
else:
for item in options.message:
#print anchor.validate_file(item)
smimeProcessor = smimeX509validation(anchor)
smimeProcessor.ProcessFile(item)
print smimeProcessor.InputCertMetaDataList
print smimeProcessor.verified
print smimeProcessor.InputDaraStringIO.getvalue()
if __name__ == "__main__":
logging.basicConfig(level=logging.WARNING)
main()
| {
"content_hash": "43d4148d9799c64df933ac2305f388d4",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 83,
"avg_line_length": 35.31428571428572,
"alnum_prop": 0.6545307443365695,
"repo_name": "osynge/deb-python-smimex509validation",
"id": "d4b844fa8bf97cf02abc4a1ea9eccfe8da2ca0b0",
"size": "1258",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "message_signed_validation.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "42802"
}
],
"symlink_target": ""
} |
""" Defines standard features and rules.
"""
import b2.build.targets as targets
import sys
from b2.build import feature, property, virtual_target, generators, type, property_set, scanner
from b2.util.utility import *
from b2.util import path, regex, bjam_signature, is_iterable_typed
import b2.tools.types
from b2.manager import get_manager
# Records explicit properties for a variant.
# The key is the variant name.
__variant_explicit_properties = {}
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __variant_explicit_properties
__variant_explicit_properties = {}
@bjam_signature((["name"], ["parents_or_properties", "*"], ["explicit_properties", "*"]))
def variant (name, parents_or_properties, explicit_properties = []):
""" Declares a new variant.
First determines explicit properties for this variant, by
refining parents' explicit properties with the passed explicit
properties. The result is remembered and will be used if
this variant is used as parent.
Second, determines the full property set for this variant by
adding to the explicit properties default values for all properties
which neither present nor are symmetric.
Lastly, makes appropriate value of 'variant' property expand
to the full property set.
name: Name of the variant
parents_or_properties: Specifies parent variants, if
'explicit_properties' are given,
and explicit_properties otherwise.
explicit_properties: Explicit properties.
"""
parents = []
if not explicit_properties:
explicit_properties = parents_or_properties
else:
parents = parents_or_properties
inherited = property_set.empty()
if parents:
# If we allow multiple parents, we'd have to to check for conflicts
# between base variants, and there was no demand for so to bother.
if len (parents) > 1:
raise BaseException ("Multiple base variants are not yet supported")
p = parents[0]
# TODO: the check may be stricter
if not feature.is_implicit_value (p):
raise BaseException ("Invalid base variant '%s'" % p)
inherited = __variant_explicit_properties[p]
explicit_properties = property_set.create_with_validation(explicit_properties)
explicit_properties = inherited.refine(explicit_properties)
# Record explicitly specified properties for this variant
# We do this after inheriting parents' properties, so that
# they affect other variants, derived from this one.
__variant_explicit_properties[name] = explicit_properties
feature.extend('variant', [name])
feature.compose ("<variant>" + name, explicit_properties.all())
__os_names = """
amiga aix appletv bsd cygwin darwin dos emx freebsd hpux hurd iphone linux
netbsd openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix
unixware vms windows
""".split()
# Translates from bjam current OS to the os tags used in host-os and target-os,
# i.e. returns the running host-os.
#
def default_host_os():
host_os = os_name()
if host_os not in (x.upper() for x in __os_names):
if host_os == 'NT': host_os = 'windows'
elif host_os == 'AS400': host_os = 'unix'
elif host_os == 'MINGW': host_os = 'windows'
elif host_os == 'BSDI': host_os = 'bsd'
elif host_os == 'COHERENT': host_os = 'unix'
elif host_os == 'DRAGONFLYBSD': host_os = 'bsd'
elif host_os == 'IRIX': host_os = 'sgi'
elif host_os == 'MACOSX': host_os = 'darwin'
elif host_os == 'KFREEBSD': host_os = 'freebsd'
elif host_os == 'LINUX': host_os = 'linux'
elif host_os == 'HAIKU': host_os = 'haiku'
else: host_os = 'unix'
return host_os.lower()
def register_globals ():
""" Registers all features and variants declared by this module.
"""
# This feature is used to determine which OS we're on.
# In future, this may become <target-os> and <host-os>
# TODO: check this. Compatibility with bjam names? Subfeature for version?
os = sys.platform
feature.feature ('os', [os], ['propagated', 'link-incompatible'])
# The two OS features define a known set of abstract OS names. The host-os is
# the OS under which bjam is running. Even though this should really be a fixed
# property we need to list all the values to prevent unknown value errors. Both
# set the default value to the current OS to account for the default use case of
# building on the target OS.
feature.feature('host-os', __os_names)
feature.set_default('host-os', default_host_os())
feature.feature('target-os', __os_names, ['propagated', 'link-incompatible'])
feature.set_default('target-os', default_host_os())
feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric'])
feature.feature ('stdlib', ['native'], ['propagated', 'composite'])
feature.feature ('link', ['shared', 'static'], ['propagated'])
feature.feature ('runtime-link', ['shared', 'static'], ['propagated'])
feature.feature ('runtime-debugging', ['on', 'off'], ['propagated'])
feature.feature ('optimization', ['off', 'speed', 'space'], ['propagated'])
feature.feature ('profiling', ['off', 'on'], ['propagated'])
feature.feature ('inlining', ['off', 'on', 'full'], ['propagated'])
feature.feature ('threading', ['single', 'multi'], ['propagated'])
feature.feature ('rtti', ['on', 'off'], ['propagated'])
feature.feature ('exception-handling', ['on', 'off'], ['propagated'])
# Whether there is support for asynchronous EH (e.g. catching SEGVs).
feature.feature ('asynch-exceptions', ['off', 'on'], ['propagated'])
# Whether all extern "C" functions are considered nothrow by default.
feature.feature ('extern-c-nothrow', ['off', 'on'], ['propagated'])
feature.feature ('debug-symbols', ['on', 'off'], ['propagated'])
feature.feature ('define', [], ['free'])
feature.feature ('undef', [], ['free'])
feature.feature ('include', [], ['free', 'path']) #order-sensitive
feature.feature ('cflags', [], ['free'])
feature.feature ('cxxflags', [], ['free'])
feature.feature ('asmflags', [], ['free'])
feature.feature ('linkflags', [], ['free'])
feature.feature ('archiveflags', [], ['free'])
feature.feature ('version', [], ['free'])
feature.feature ('location-prefix', [], ['free'])
feature.feature ('action', [], ['free'])
# The following features are incidental, since
# in themself they have no effect on build products.
# Not making them incidental will result in problems in corner
# cases, for example:
#
# unit-test a : a.cpp : <use>b ;
# lib b : a.cpp b ;
#
# Here, if <use> is not incidental, we'll decide we have two
# targets for a.obj with different properties, and will complain.
#
# Note that making feature incidental does not mean it's ignored. It may
# be ignored when creating the virtual target, but the rest of build process
# will use them.
feature.feature ('use', [], ['free', 'dependency', 'incidental'])
feature.feature ('dependency', [], ['free', 'dependency', 'incidental'])
feature.feature ('implicit-dependency', [], ['free', 'dependency', 'incidental'])
feature.feature('warnings', [
'on', # Enable default/"reasonable" warning level for the tool.
'all', # Enable all possible warnings issued by the tool.
'off'], # Disable all warnings issued by the tool.
['incidental', 'propagated'])
feature.feature('warnings-as-errors', [
'off', # Do not fail the compilation if there are warnings.
'on'], # Fail the compilation if there are warnings.
['incidental', 'propagated'])
feature.feature('coverage', [
'off', # Disable coverage generation for the tool.
'on'], # Enable coverage generation for the tool.
['incidental', 'propagated'])
feature.feature('c++-template-depth',
[str(i) for i in range(64,1024+1,64)] +
[str(i) for i in range(20,1000+1,10)] +
# Maximum template instantiation depth guaranteed for ANSI/ISO C++
# conforming programs.
['17'],
['incidental', 'optional', 'propagated'])
feature.feature ('source', [], ['free', 'dependency', 'incidental'])
feature.feature ('library', [], ['free', 'dependency', 'incidental'])
feature.feature ('file', [], ['free', 'dependency', 'incidental'])
feature.feature ('find-shared-library', [], ['free']) #order-sensitive ;
feature.feature ('find-static-library', [], ['free']) #order-sensitive ;
feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ;
# Internal feature.
feature.feature ('library-file', [], ['free', 'dependency'])
feature.feature ('name', [], ['free'])
feature.feature ('tag', [], ['free'])
feature.feature ('search', [], ['free', 'path']) #order-sensitive ;
feature.feature ('location', [], ['free', 'path'])
feature.feature ('dll-path', [], ['free', 'path'])
feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental'])
# This is internal feature which holds the paths of all dependency
# dynamic libraries. On Windows, it's needed so that we can all
# those paths to PATH, when running applications.
# On Linux, it's needed to add proper -rpath-link command line options.
feature.feature ('xdll-path', [], ['free', 'path'])
#provides means to specify def-file for windows dlls.
feature.feature ('def-file', [], ['free', 'dependency'])
# This feature is used to allow specific generators to run.
# For example, QT tools can only be invoked when QT library
# is used. In that case, <allow>qt will be in usage requirement
# of the library.
feature.feature ('allow', [], ['free'])
# The addressing model to generate code for. Currently a limited set only
# specifying the bit size of pointers.
feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional'])
# Type of CPU architecture to compile for.
feature.feature('architecture', [
# x86 and x86-64
'x86',
# ia64
'ia64',
# Sparc
'sparc',
# RS/6000 & PowerPC
'power',
# MIPS/SGI
'mips1', 'mips2', 'mips3', 'mips4', 'mips32', 'mips32r2', 'mips64',
# HP/PA-RISC
'parisc',
# Advanced RISC Machines
'arm',
# z Systems (aka s390x)
's390x',
# Combined architectures for platforms/toolsets that support building for
# multiple architectures at once. "combined" would be the default multi-arch
# for the toolset.
'combined',
'combined-x86-power'],
['propagated', 'optional'])
# The specific instruction set in an architecture to compile.
feature.feature('instruction-set', [
# x86 and x86-64
'native', 'i486', 'i586', 'i686', 'pentium', 'pentium-mmx', 'pentiumpro', 'pentium2', 'pentium3',
'pentium3m', 'pentium-m', 'pentium4', 'pentium4m', 'prescott', 'nocona', 'core2', 'corei7', 'corei7-avx', 'core-avx-i',
'conroe', 'conroe-xe', 'conroe-l', 'allendale', 'merom', 'merom-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
'yorksfield', 'nehalem', 'sandy-bridge', 'ivy-bridge', 'haswell', 'broadwell', 'skylake', 'skylake-avx512', 'cannonlake',
'icelake-client', 'icelake-server', 'cascadelake', 'cooperlake', 'tigerlake',
'atom',
'k6', 'k6-2', 'k6-3', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp', 'athlon-mp', 'k8', 'opteron', 'athlon64', 'athlon-fx',
'k8-sse3', 'opteron-sse3', 'athlon64-sse3', 'amdfam10', 'barcelona', 'bdver1', 'bdver2', 'bdver3', 'btver1',
'btver2', 'znver1', 'znver2',
'winchip-c6', 'winchip2',
'c3', 'c3-2', 'c7',
# ia64
'itanium', 'itanium1', 'merced', 'itanium2', 'mckinley',
# Sparc
'v7', 'cypress', 'v8', 'supersparc', 'sparclite', 'hypersparc', 'sparclite86x', 'f930', 'f934',
'sparclet', 'tsc701', 'v9', 'ultrasparc', 'ultrasparc3',
# RS/6000 & PowerPC
'401', '403', '405', '405fp', '440', '440fp', '505', '601', '602',
'603', '603e', '604', '604e', '620', '630', '740', '7400',
'7450', '750', '801', '821', '823', '860', '970', '8540',
'power-common', 'ec603e', 'g3', 'g4', 'g5', 'power', 'power2',
'power3', 'power4', 'power5', 'powerpc', 'powerpc64', 'rios',
'rios1', 'rsc', 'rios2', 'rs64a',
# MIPS
'4kc', '4kp', '5kc', '20kc', 'm4k', 'r2000', 'r3000', 'r3900', 'r4000',
'r4100', 'r4300', 'r4400', 'r4600', 'r4650',
'r6000', 'r8000', 'rm7000', 'rm9000', 'orion', 'sb1', 'vr4100',
'vr4111', 'vr4120', 'vr4130', 'vr4300',
'vr5000', 'vr5400', 'vr5500',
# HP/PA-RISC
'700', '7100', '7100lc', '7200', '7300', '8000',
# Advanced RISC Machines
'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5',
'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312',
# z Systems (aka s390x)
'z196', 'zEC12', 'z13', 'z13', 'z14', 'z15'],
['propagated', 'optional'])
feature.feature('conditional', [], ['incidental', 'free'])
# The value of 'no' prevents building of a target.
feature.feature('build', ['yes', 'no'], ['optional'])
# Windows-specific features
feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], [])
feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric'])
variant ('debug', ['<optimization>off', '<debug-symbols>on', '<inlining>off', '<runtime-debugging>on'])
variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',
'<runtime-debugging>off', '<define>NDEBUG'])
variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on'])
reset ()
register_globals ()
class SearchedLibTarget (virtual_target.AbstractFileTarget):
def __init__ (self, name, project, shared, search, action):
virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action)
self.shared_ = shared
self.search_ = search
def shared (self):
return self.shared_
def search (self):
return self.search_
def actualize_location (self, target):
bjam.call("NOTFILE", target)
def path (self):
#FIXME: several functions rely on this not being None
return ""
class CScanner (scanner.Scanner):
def __init__ (self, includes):
scanner.Scanner.__init__ (self)
self.includes_ = []
for i in includes:
self.includes_.extend(i.split("&&"))
def pattern (self):
return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")'
def process (self, target, matches, binding):
# since it's possible for this function to be called
# thousands to millions of times (depending on how many
# header files there are), as such, there are some
# optimizations that have been used here. Anything that
# is slightly out of the ordinary for Python code
# has been commented.
angle = []
quoted = []
for match in matches:
if '<' in match:
angle.append(match.strip('<>'))
elif '"' in match:
quoted.append(match.strip('"'))
g = id(self)
b = os.path.normpath(os.path.dirname(binding[0]))
# Attach binding of including file to included targets.
# When target is directly created from virtual target
# this extra information is unnecessary. But in other
# cases, it allows to distinguish between two headers of the
# same name included from different places.
# We don't need this extra information for angle includes,
# since they should not depend on including file (we can't
# get literal "." in include path).
# Note: string interpolation is slightly faster
# than .format()
g2 = '<%s#%s>' % (g, b)
g = "<%s>" % g
angle = [g + x for x in angle]
quoted = [g2 + x for x in quoted]
all = angle + quoted
bjam.call("mark-included", target, all)
# each include in self.includes_ looks something like this:
# <include>path/to/somewhere
# calling get_value(include) is super slow,
# calling .replace('<include>', '') is much faster
# however, i[9:] is the fastest way of stripping off the "<include>"
# substring.
include_paths = [i[9:] for i in self.includes_]
engine = get_manager().engine()
engine.set_target_variable(angle, "SEARCH", include_paths)
engine.set_target_variable(quoted, "SEARCH", [b] + include_paths)
# Just propagate current scanner to includes, in a hope
# that includes do not change scanners.
get_manager().scanners().propagate(self, all)
scanner.register (CScanner, 'include')
type.set_scanner ('CPP', CScanner)
type.set_scanner ('C', CScanner)
type.set_scanner('H', CScanner)
type.set_scanner('HPP', CScanner)
# Ported to trunk@47077
class LibGenerator (generators.Generator):
""" The generator class for libraries (target type LIB). Depending on properties it will
request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
SHARED_LIB.
"""
def __init__(self, id, composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements)
def run(self, project, name, prop_set, sources):
assert isinstance(project, targets.ProjectTarget)
assert isinstance(name, basestring) or name is None
assert isinstance(prop_set, property_set.PropertySet)
assert is_iterable_typed(sources, virtual_target.VirtualTarget)
# The lib generator is composing, and can be only invoked with
# explicit name. This check is present in generator.run (and so in
# builtin.LinkingGenerator), but duplicate it here to avoid doing
# extra work.
if name:
properties = prop_set.raw()
# Determine the needed target type
actual_type = None
properties_grist = get_grist(properties)
if '<source>' not in properties_grist and \
('<search>' in properties_grist or '<name>' in properties_grist):
actual_type = 'SEARCHED_LIB'
elif '<file>' in properties_grist:
# The generator for
actual_type = 'LIB'
elif '<link>shared' in properties:
actual_type = 'SHARED_LIB'
else:
actual_type = 'STATIC_LIB'
prop_set = prop_set.add_raw(['<main-target-type>LIB'])
# Construct the target.
return generators.construct(project, name, actual_type, prop_set, sources)
def viable_source_types(self):
return ['*']
generators.register(LibGenerator("builtin.lib-generator"))
generators.override("builtin.prebuilt", "builtin.lib-generator")
def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]):
"""The implementation of the 'lib' rule. Beyond standard syntax that rule allows
simplified: 'lib a b c ;'."""
assert is_iterable_typed(names, basestring)
assert is_iterable_typed(sources, basestring)
assert is_iterable_typed(requirements, basestring)
assert is_iterable_typed(default_build, basestring)
assert is_iterable_typed(usage_requirements, basestring)
if len(names) > 1:
if any(r.startswith('<name>') for r in requirements):
get_manager().errors()("When several names are given to the 'lib' rule\n" +
"it is not allowed to specify the <name> feature.")
if sources:
get_manager().errors()("When several names are given to the 'lib' rule\n" +
"it is not allowed to specify sources.")
project = get_manager().projects().current()
result = []
for name in names:
r = requirements[:]
# Support " lib a ; " and " lib a b c ; " syntax.
if not sources and not any(r.startswith("<name>") for r in requirements) \
and not any(r.startswith("<file") for r in requirements):
r.append("<name>" + name)
result.append(targets.create_typed_metatarget(name, "LIB", sources,
r,
default_build,
usage_requirements))
return result
get_manager().projects().add_rule("lib", lib)
# Updated to trunk@47077
class SearchedLibGenerator (generators.Generator):
def __init__ (self, id = 'SearchedLibGenerator', composing = False, source_types = [], target_types_and_names = ['SEARCHED_LIB'], requirements = []):
# TODO: the comment below looks strange. There are no requirements!
# The requirements cause the generators to be tried *only* when we're building
# lib target and there's 'search' feature. This seems ugly --- all we want
# is make sure SearchedLibGenerator is not invoked deep in transformation
# search.
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
def run(self, project, name, prop_set, sources):
assert isinstance(project, targets.ProjectTarget)
assert isinstance(name, basestring) or name is None
assert isinstance(prop_set, property_set.PropertySet)
assert is_iterable_typed(sources, virtual_target.VirtualTarget)
if not name:
return None
# If name is empty, it means we're called not from top-level.
# In this case, we just fail immediately, because SearchedLibGenerator
# cannot be used to produce intermediate targets.
properties = prop_set.raw ()
shared = '<link>shared' in properties
a = virtual_target.NullAction (project.manager(), prop_set)
real_name = feature.get_values ('<name>', properties)
if real_name:
real_name = real_name[0]
else:
real_name = name
search = feature.get_values('<search>', properties)
usage_requirements = property_set.create(['<xdll-path>' + p for p in search])
t = SearchedLibTarget(real_name, project, shared, search, a)
# We return sources for a simple reason. If there's
# lib png : z : <name>png ;
# the 'z' target should be returned, so that apps linking to
# 'png' will link to 'z', too.
return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources)
generators.register (SearchedLibGenerator ())
class PrebuiltLibGenerator(generators.Generator):
def __init__(self, id, composing, source_types, target_types_and_names, requirements):
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
def run(self, project, name, properties, sources):
assert isinstance(project, targets.ProjectTarget)
assert isinstance(name, basestring)
assert isinstance(properties, property_set.PropertySet)
assert is_iterable_typed(sources, virtual_target.VirtualTarget)
f = properties.get("file")
return f + sources
generators.register(PrebuiltLibGenerator("builtin.prebuilt", False, [],
["LIB"], ["<file>"]))
generators.override("builtin.prebuilt", "builtin.lib-generator")
class CompileAction (virtual_target.Action):
def __init__ (self, manager, sources, action_name, prop_set):
virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set)
def adjust_properties (self, prop_set):
""" For all virtual targets for the same dependency graph as self,
i.e. which belong to the same main target, add their directories
to include path.
"""
assert isinstance(prop_set, property_set.PropertySet)
s = self.targets () [0].creating_subvariant ()
return prop_set.add_raw (s.implicit_includes ('include', 'H'))
class CCompilingGenerator (generators.Generator):
""" Declare a special compiler generator.
The only thing it does is changing the type used to represent
'action' in the constructed dependency graph to 'CompileAction'.
That class in turn adds additional include paths to handle a case
when a source file includes headers which are generated themselves.
"""
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
# TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong.
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
def action_class (self):
return CompileAction
def register_c_compiler (id, source_types, target_types, requirements, optional_properties = []):
g = CCompilingGenerator (id, False, source_types, target_types, requirements + optional_properties)
return generators.register (g)
class LinkingGenerator (generators.Generator):
""" The generator class for handling EXE and SHARED_LIB creation.
"""
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
def run (self, project, name, prop_set, sources):
assert isinstance(project, targets.ProjectTarget)
assert isinstance(name, basestring) or name is None
assert isinstance(prop_set, property_set.PropertySet)
assert is_iterable_typed(sources, virtual_target.VirtualTarget)
# create a copy since sources is being modified
sources = list(sources)
sources.extend(prop_set.get('<library>'))
# Add <library-path> properties for all searched libraries
extra = []
for s in sources:
if s.type () == 'SEARCHED_LIB':
search = s.search()
extra.extend(property.Property('<library-path>', sp) for sp in search)
# It's possible that we have libraries in sources which did not came
# from 'lib' target. For example, libraries which are specified
# just as filenames as sources. We don't have xdll-path properties
# for such target, but still need to add proper dll-path properties.
extra_xdll_path = []
for s in sources:
if type.is_derived (s.type (), 'SHARED_LIB') and not s.action ():
# Unfortunately, we don't have a good way to find the path
# to a file, so use this nasty approach.
p = s.project()
location = path.root(s.name(), p.get('source-location')[0])
extra_xdll_path.append(os.path.dirname(location))
# Hardcode DLL paths only when linking executables.
# Pros: do not need to relink libraries when installing.
# Cons: "standalone" libraries (plugins, python extensions) can not
# hardcode paths to dependent libraries.
if prop_set.get('<hardcode-dll-paths>') == ['true'] \
and type.is_derived(self.target_types_ [0], 'EXE'):
xdll_path = prop_set.get('<xdll-path>')
extra.extend(property.Property('<dll-path>', sp) \
for sp in extra_xdll_path)
extra.extend(property.Property('<dll-path>', sp) \
for sp in xdll_path)
if extra:
prop_set = prop_set.add_raw (extra)
result = generators.Generator.run(self, project, name, prop_set, sources)
if result:
ur = self.extra_usage_requirements(result, prop_set)
ur = ur.add(property_set.create(['<xdll-path>' + p for p in extra_xdll_path]))
else:
return None
return (ur, result)
def extra_usage_requirements (self, created_targets, prop_set):
assert is_iterable_typed(created_targets, virtual_target.VirtualTarget)
assert isinstance(prop_set, property_set.PropertySet)
result = property_set.empty ()
extra = []
# Add appropriate <xdll-path> usage requirements.
raw = prop_set.raw ()
if '<link>shared' in raw:
paths = []
# TODO: is it safe to use the current directory? I think we should use
# another mechanism to allow this to be run from anywhere.
pwd = os.getcwd()
for t in created_targets:
if type.is_derived(t.type(), 'SHARED_LIB'):
paths.append(path.root(path.make(t.path()), pwd))
extra += replace_grist(paths, '<xdll-path>')
# We need to pass <xdll-path> features that we've got from sources,
# because if shared library is built, exe which uses it must know paths
# to other shared libraries this one depends on, to be able to find them
# all at runtime.
# Just pass all features in property_set, it's theorically possible
# that we'll propagate <xdll-path> features explicitly specified by
# the user, but then the user's to blame for using internal feature.
values = prop_set.get('<xdll-path>')
extra += replace_grist(values, '<xdll-path>')
if extra:
result = property_set.create(extra)
return result
def generated_targets (self, sources, prop_set, project, name):
assert is_iterable_typed(sources, virtual_target.VirtualTarget)
assert isinstance(prop_set, property_set.PropertySet)
assert isinstance(project, targets.ProjectTarget)
assert isinstance(name, basestring)
# sources to pass to inherited rule
sources2 = []
# sources which are libraries
libraries = []
# Searched libraries are not passed as argument to linker
# but via some option. So, we pass them to the action
# via property.
fsa = []
fst = []
for s in sources:
if type.is_derived(s.type(), 'SEARCHED_LIB'):
n = s.name()
if s.shared():
fsa.append(n)
else:
fst.append(n)
else:
sources2.append(s)
add = []
if fsa:
add.append("<find-shared-library>" + '&&'.join(fsa))
if fst:
add.append("<find-static-library>" + '&&'.join(fst))
spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name)
return spawn
def register_linker(id, source_types, target_types, requirements):
g = LinkingGenerator(id, True, source_types, target_types, requirements)
generators.register(g)
class ArchiveGenerator (generators.Generator):
""" The generator class for handling STATIC_LIB creation.
"""
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
def run (self, project, name, prop_set, sources):
assert isinstance(project, targets.ProjectTarget)
assert isinstance(name, basestring) or name is None
assert isinstance(prop_set, property_set.PropertySet)
assert is_iterable_typed(sources, virtual_target.VirtualTarget)
# create a copy since this modifies the sources list
sources = list(sources)
sources.extend(prop_set.get('<library>'))
result = generators.Generator.run (self, project, name, prop_set, sources)
usage_requirements = []
link = prop_set.get('<link>')
if 'static' in link:
for t in sources:
if type.is_derived(t.type(), 'LIB'):
usage_requirements.append(property.Property('<library>', t))
usage_requirements = property_set.create(usage_requirements)
return usage_requirements, result
def register_archiver(id, source_types, target_types, requirements):
g = ArchiveGenerator(id, True, source_types, target_types, requirements)
generators.register(g)
class DummyGenerator(generators.Generator):
"""Generator that accepts everything and produces nothing. Useful as a general
fallback for toolset-specific actions like PCH generation.
"""
def run (self, project, name, prop_set, sources):
return (property_set.empty(), [])
get_manager().projects().add_rule("variant", variant)
import stage
import symlink
import message
| {
"content_hash": "2f25686ab12c8f8a6d3da5e45c6d53b4",
"timestamp": "",
"source": "github",
"line_count": 803,
"max_line_length": 153,
"avg_line_length": 41.60024906600249,
"alnum_prop": 0.6153869181260291,
"repo_name": "davehorton/drachtio-server",
"id": "ecec2c9e3332fd3f1580762598262f46bceeeb84",
"size": "33774",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "deps/boost_1_77_0/tools/build/src/tools/builtin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "662596"
},
{
"name": "Dockerfile",
"bytes": "1330"
},
{
"name": "JavaScript",
"bytes": "60639"
},
{
"name": "M4",
"bytes": "35273"
},
{
"name": "Makefile",
"bytes": "5960"
},
{
"name": "Shell",
"bytes": "47298"
}
],
"symlink_target": ""
} |
"""Test largefile support on system where this makes sense.
"""
import os
import stat
import sys
import unittest
from test.support import run_unittest, TESTFN, verbose, requires, \
unlink
import io # C implementation of io
import _pyio as pyio # Python implementation of io
try:
import signal
# The default handler for SIGXFSZ is to abort the process.
# By ignoring it, system calls exceeding the file size resource
# limit will raise IOError instead of crashing the interpreter.
oldhandler = signal.signal(signal.SIGXFSZ, signal.SIG_IGN)
except (ImportError, AttributeError):
pass
# create >2GB file (2GB = 2147483648 bytes)
size = 2500000000
class LargeFileTest(unittest.TestCase):
"""Test that each file function works as expected for a large
(i.e. > 2GB, do we have to check > 4GB) files.
NOTE: the order of execution of the test methods is important! test_seek
must run first to create the test file. File cleanup must also be handled
outside the test instances because of this.
"""
def test_seek(self):
if verbose:
print('create large file via seek (may be sparse file) ...')
with self.open(TESTFN, 'wb') as f:
f.write(b'z')
f.seek(0)
f.seek(size)
f.write(b'a')
f.flush()
if verbose:
print('check file size with os.fstat')
self.assertEqual(os.fstat(f.fileno())[stat.ST_SIZE], size+1)
def test_osstat(self):
if verbose:
print('check file size with os.stat')
self.assertEqual(os.stat(TESTFN)[stat.ST_SIZE], size+1)
def test_seek_read(self):
if verbose:
print('play around with seek() and read() with the built largefile')
with self.open(TESTFN, 'rb') as f:
self.assertEqual(f.tell(), 0)
self.assertEqual(f.read(1), b'z')
self.assertEqual(f.tell(), 1)
f.seek(0)
self.assertEqual(f.tell(), 0)
f.seek(0, 0)
self.assertEqual(f.tell(), 0)
f.seek(42)
self.assertEqual(f.tell(), 42)
f.seek(42, 0)
self.assertEqual(f.tell(), 42)
f.seek(42, 1)
self.assertEqual(f.tell(), 84)
f.seek(0, 1)
self.assertEqual(f.tell(), 84)
f.seek(0, 2) # seek from the end
self.assertEqual(f.tell(), size + 1 + 0)
f.seek(-10, 2)
self.assertEqual(f.tell(), size + 1 - 10)
f.seek(-size-1, 2)
self.assertEqual(f.tell(), 0)
f.seek(size)
self.assertEqual(f.tell(), size)
# the 'a' that was written at the end of file above
self.assertEqual(f.read(1), b'a')
f.seek(-size-1, 1)
self.assertEqual(f.read(1), b'z')
self.assertEqual(f.tell(), 1)
def test_lseek(self):
if verbose:
print('play around with os.lseek() with the built largefile')
with self.open(TESTFN, 'rb') as f:
self.assertEqual(os.lseek(f.fileno(), 0, 0), 0)
self.assertEqual(os.lseek(f.fileno(), 42, 0), 42)
self.assertEqual(os.lseek(f.fileno(), 42, 1), 84)
self.assertEqual(os.lseek(f.fileno(), 0, 1), 84)
self.assertEqual(os.lseek(f.fileno(), 0, 2), size+1+0)
self.assertEqual(os.lseek(f.fileno(), -10, 2), size+1-10)
self.assertEqual(os.lseek(f.fileno(), -size-1, 2), 0)
self.assertEqual(os.lseek(f.fileno(), size, 0), size)
# the 'a' that was written at the end of file above
self.assertEqual(f.read(1), b'a')
def test_truncate(self):
if verbose:
print('try truncate')
with self.open(TESTFN, 'r+b') as f:
# this is already decided before start running the test suite
# but we do it anyway for extra protection
if not hasattr(f, 'truncate'):
raise unittest.SkipTest("open().truncate() not available on this system")
f.seek(0, 2)
# else we've lost track of the true size
self.assertEqual(f.tell(), size+1)
# Cut it back via seek + truncate with no argument.
newsize = size - 10
f.seek(newsize)
f.truncate()
self.assertEqual(f.tell(), newsize) # else pointer moved
f.seek(0, 2)
self.assertEqual(f.tell(), newsize) # else wasn't truncated
# Ensure that truncate(smaller than true size) shrinks
# the file.
newsize -= 1
f.seek(42)
f.truncate(newsize)
self.assertEqual(f.tell(), newsize) # else wasn't truncated
f.seek(0, 2)
self.assertEqual(f.tell(), newsize)
# XXX truncate(larger than true size) is ill-defined
# across platform; cut it waaaaay back
f.seek(0)
f.truncate(1)
self.assertEqual(f.tell(), 1) # else pointer moved
f.seek(0)
self.assertEqual(len(f.read()), 1) # else wasn't truncated
def test_seekable(self):
# Issue #5016; seekable() can return False when the current position
# is negative when truncated to an int.
for pos in (2**31-1, 2**31, 2**31+1):
with self.open(TESTFN, 'rb') as f:
f.seek(pos)
self.assert_(f.seekable())
def test_main():
# On Windows and Mac OSX this test comsumes large resources; It
# takes a long time to build the >2GB file and takes >2GB of disk
# space therefore the resource must be enabled to run this test.
# If not, nothing after this line stanza will be executed.
if sys.platform[:3] == 'win' or sys.platform == 'darwin':
requires('largefile',
'test requires %s bytes and a long time to run' % str(size))
else:
# Only run if the current filesystem supports large files.
# (Skip this test on Windows, since we now always support
# large files.)
f = open(TESTFN, 'wb', buffering=0)
try:
# 2**31 == 2147483648
f.seek(2147483649)
# Seeking is not enough of a test: you must write and
# flush, too!
f.write(b'x')
f.flush()
except (IOError, OverflowError):
f.close()
unlink(TESTFN)
raise unittest.SkipTest("filesystem does not have largefile support")
else:
f.close()
suite = unittest.TestSuite()
for _open, prefix in [(io.open, 'C'), (pyio.open, 'Py')]:
class TestCase(LargeFileTest):
pass
TestCase.open = staticmethod(_open)
TestCase.__name__ = prefix + LargeFileTest.__name__
suite.addTest(TestCase('test_seek'))
suite.addTest(TestCase('test_osstat'))
suite.addTest(TestCase('test_seek_read'))
suite.addTest(TestCase('test_lseek'))
with _open(TESTFN, 'wb') as f:
if hasattr(f, 'truncate'):
suite.addTest(TestCase('test_truncate'))
suite.addTest(TestCase('test_seekable'))
unlink(TESTFN)
try:
run_unittest(suite)
finally:
unlink(TESTFN)
if __name__ == '__main__':
test_main()
| {
"content_hash": "d34edc587540480cf789d5d5177ea6ff",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 89,
"avg_line_length": 38.427083333333336,
"alnum_prop": 0.5612632149634047,
"repo_name": "MalloyPower/parsing-python",
"id": "1e868e059dc0fcdbf6a8a1634f0568f976a72b99",
"size": "7378",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-3.1/Lib/test/test_largefile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
import asyncio
import inspect
from collections import defaultdict, namedtuple
from typing import Awaitable, Callable, Iterable, Mapping
from aioworkers.core.base import LoggingEntity
from aioworkers.net.web.exceptions import HttpException
from .request import Request
Route = namedtuple('Route', 'handler kwargs')
class Application(LoggingEntity):
async def init(self):
self._routes = defaultdict(dict)
resources = self.config.get('resources')
for url, name, routes in Resources(resources):
for method, operation in routes.items():
if not isinstance(operation, Mapping):
raise TypeError(
'operation for {method} {url} '
'expected Mapping, not {t}'.format(
method=method.upper(),
url=url,
t=type(operation),
)
)
operation = dict(operation)
handler = operation.pop('handler')
self.add_route(method, url, handler, name=name)
def add_route(self, method, path, handler, name=None, **kwargs):
handlers = self._routes[path]
method = method.upper()
assert method not in handlers
h = self.context.get_object(handler)
if callable(h):
try:
kwargs = inspect.signature(h).parameters
except ValueError:
kwargs = ()
else:
kwargs = ()
handlers[method] = Route(h, kwargs)
async def __call__(
self,
scope: Mapping,
receive: Callable[[], Awaitable],
send: Callable[[Mapping], Awaitable],
):
request = Request(
app=self,
context=self.context,
scope=scope,
receive=receive,
send=send,
)
path = scope['path']
method = scope['method']
if path not in self._routes:
await send(
{
'type': 'http.response.start',
'status': 404,
}
)
return await send(
{
'type': 'http.response.body',
}
)
handlers = self._routes[path]
if method not in handlers:
await send(
{
'type': 'http.response.start',
'status': 405,
}
)
return await send(
{
'type': 'http.response.body',
}
)
route = handlers[method]
handler = route.handler
kwargs = {}
if 'request' in route.kwargs:
kwargs['request'] = request
if 'context' in route.kwargs:
kwargs['context'] = self._context
if asyncio.iscoroutinefunction(handler):
handler = await handler(**kwargs)
elif callable(handler):
handler = handler(**kwargs)
if asyncio.isfuture(handler):
handler = await handler
try:
return request.response(handler, format='json')
except HttpException as e:
request.response(e, status=500)
self.logger.exception('Server error:')
class Resources(Iterable):
def __init__(self, resources: Mapping) -> None:
self._resources = resources
self._prepared = self._sort_resources(resources)
def __iter__(self):
return iter(self._prepared)
def _iter_resources(self, resources, prefix=''):
if not resources:
return
elif not isinstance(resources, Mapping):
raise TypeError(
'Resources should be described in dict %s' % resources
)
prefix += resources.get('prefix', '')
for name, sub in resources.items():
if name == 'prefix':
continue
elif not isinstance(sub, Mapping):
raise TypeError(
'Resource should be described in dict %s' % sub
)
routes = dict(sub)
priority = routes.pop('priority', 0)
if 'include' in routes:
url = name if name.startswith('/') else None
if url:
url = prefix + url
else:
url = prefix
if url:
routes['prefix'] = url + routes.get('prefix', '')
yield priority, url or None, name, routes
continue
elif not name.startswith('/'):
for p, u, n, rs in self._iter_resources(sub, prefix):
if n:
n = ':'.join((name, n))
yield p, u, n, rs
continue
url = name
name = routes.pop('name', None)
for k, v in routes.items():
if 'include' in routes:
continue
elif isinstance(v, str):
routes[k] = {'handler': v}
yield priority, prefix + url, name, routes
def _sort_resources(self, resources):
iterator = self._iter_resources(resources)
r = sorted(iterator, key=lambda x: x[0], reverse=True)
return map(lambda x: x[1:], r)
| {
"content_hash": "57138172a1a6a29ab828ae300569784a",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 70,
"avg_line_length": 33.608695652173914,
"alnum_prop": 0.4884494548142672,
"repo_name": "aamalev/aioworkers",
"id": "850fc95849dd1b7c604dc34ec4ed018d1c03f528",
"size": "5411",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aioworkers/net/web/app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "61723"
}
],
"symlink_target": ""
} |
import re
import csv
import ipaddress
__version__ = 1.0
routes_key = {"C" : "Connected",
"S" : "Static",
"R" : "RIP",
"B" : "BGP",
"B*" : "BGP*",
"D" : "EIGRP",
"D*" : "EIGRP*",
"D EX" : "EIGRP External",
"O" : "OSPF",
"O*" : "OSPF*",
"O*E1" : "OSPF* Candidate Default",
"O*E2" : "OSPF* Candidate Default",
"O E1" : "OSPF External 1",
"O E2" : "OSPF External 2",
"O IA" : "OSPF Inter-Area",
"O N1" : "OSPF NSSA External Type 1",
"O N2" : "OSPF NSSA External Type 2",
"L" : "Local",
"i" : "IS-IS",
"i su" : "IS-IS Summary",
"i L1" : "IS-IS Level-1",
"i L2" : "IS-IS Level-2",
"i ia" : "IS-IS Inter-Area",
"*" : "Candidate Default"}
# Each route will have the following values
class Route(object):
def __init__(self):
self.route = {}
self.protocol = []
self.metric = []
self.next_hop = []
self.age = []
self.interface = []
def __repr__(self):
return str(self.route)
# The main code structure
class Code(object):
def __init__(self):
self.route_table = {}
self.read_file()
self.generate_output_to_file()
# Retrieve a route object if it exists
def get_route(self,route):
return self.route_table.get(route)
# If the regular expression picked up a valid route, extract the values into a temporary dictionary
def get_match_values(self,match):
values = {}
for keyword, value in vars(Route()).items():
if keyword in match.groupdict():
val = str(match.group(keyword).strip())
values[keyword] = val
else:
values[keyword] = ""
return values
# Create a new route object using the values from the temporary dictionary
def create_route(self,match):
match_values = self.get_match_values(match)
route_prefix = match_values["route"]
existing_route = self.get_route(route_prefix)
if not existing_route:
new_route = Route()
new_route.route = match_values["route"]
new_route.protocol.append(match_values["protocol"])
new_route.metric.append(match_values["metric"])
new_route.next_hop.append(match_values["next_hop"])
new_route.interface.append(match_values["interface"])
new_route.age.append(match_values["age"])
self.route_table[route_prefix] = new_route
# Search the route for an ECMP pattern and then update the route object if it is found
def add_ecmp_route(self,route,string_to_search):
parent_route = self.get_route(route)
ecmp_patterns = [
r'(?P<metric>\[.*\/.*\]) via (?P<next_hop>.*), (?P<age>.*), (?P<interface>.*)', \
r'(?P<metric>\[.*\/.*\]) via (?P<next_hop>.*), (?P<age>.*)'
]
for pattern in ecmp_patterns:
match = re.search(pattern,string_to_search)
if match:
parent_route.protocol.append(parent_route.protocol[0])
parent_route.metric.append(match.group('metric').strip())
parent_route.next_hop.append(match.group('next_hop').strip())
parent_route.age.append(match.group('age').strip())
try:
parent_route.interface.append(match.group('interface').strip())
except IndexError:
parent_route.interface.append("N/A")
break
def get_host_range(self,subnet):
try:
range = ipaddress.ip_network(subnet)
return range[1],range[-2]
except ValueError:
return "error", "error"
except IndexError: # Handles /32
return range[0], ""
def generate_output_to_file(self):
try:
with open('routes.csv', 'w', newline='') as csv_file:
spamwriter = csv.writer(
csv_file,
delimiter=',',
quotechar='|',
quoting=csv.QUOTE_MINIMAL
)
spamwriter.writerow([
'Route',
'Protocol',
'Metric',
'Next Hop',
'Age',
'Interface',
'From Range',
'To Range']
)
for entry in sorted(self.route_table):
route = self.get_route(entry)
first_ip, last_ip = self.get_host_range(route)
for no in range(len(route.protocol)):
spamwriter.writerow([
route.route,
route.protocol[no],
route.metric[no],
route.next_hop[no],
route.age[no],
route.interface[no],
first_ip,
last_ip
])
print (" -- Output saved to 'routes.csv'")
except:
print (" -- Unable to write to routes.csv, if the file is already open close it.")
def read_file(self):
start_processing = False
invalid_phrases = ["variably","subnetted"]
with open("routes.txt","r") as route_file:
for line in route_file:
#-----------------------
# Ignore certain input
#-----------------------
if line.count(' ') < 2:
continue
if any(x in line for x in invalid_phrases):
continue
if "Gateway" in line:
start_processing = True
continue
if not start_processing:
continue
line = line.strip().replace("\n","")
#---------------------------------------
# Define all the possible regex patterns
#---------------------------------------
# Line 1. BGP
# Line 2. IGP (OSPF,EIGRP etc)
# Line 3. Static routes
# Line 4. Connected/local routes
#---------------------------------------
if start_processing:
patterns = [
r'(?P<protocol>[a-zA-Z] ..) (?P<route>.*) (?P<metric>.*) via (?P<next_hop>.*), ?(?P<age>.*), ?(?P<interface>.*)', \
r'(?P<protocol>[a-zA-Z]..) (?P<route>.*) (?P<metric>.*) via (?P<next_hop>.*), ?(?P<age>.*), ?(?P<interface>.*)', \
r'(?P<protocol>[a-zA-Z]) (?P<route>.*) is a summary, (?P<age>.*), (?P<interface>.*)', \
r'(?P<protocol>[a-zA-Z]..) (?P<route>.*) is a summary, (?P<age>.*), (?P<interface>.*)', \
r'(?P<protocol>B.*|B\*.*) (?P<route>.*) (?P<metric>.*) via (?P<next_hop>.*), (?P<age>.*)', \
r'(?P<protocol>S.*|S\*.*) (?P<route>.*) (?P<metric>.*) via (?P<next_hop>.*)', \
r'(?P<protocol>C.*|L.*) (?P<route>.*) is directly connected, (?P<interface>.*)'
]
#-----------------------------------------------------
# Cycle through all the patterns and grab the matches
#-----------------------------------------------------
ecmp_route_found = False
for regex in patterns:
match = re.search(regex,line)
if match:
self.create_route(match)
prefix_being_processed = match.group('route').strip()
ecmp_route_found = True
break
if not ecmp_route_found:
self.add_ecmp_route(prefix_being_processed, line)
print ("Cisco IOS Route Parser version: '{}'".format(__version__))
c = Code()
| {
"content_hash": "86c297960f22284cc9471548b12e1555",
"timestamp": "",
"source": "github",
"line_count": 204,
"max_line_length": 139,
"avg_line_length": 40.94117647058823,
"alnum_prop": 0.43139367816091956,
"repo_name": "abbacode/ciscorouteparser",
"id": "821cfa0700fa86eeed6daaf70da6f99d8bdf4ef9",
"size": "8352",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "route_parse_ios.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13748"
}
],
"symlink_target": ""
} |
"""Export metadata to YAML for Datasette."""
import argparse
import logging
import sys
import coloredlogs
from pudl.metadata.classes import DatasetteMetadata
logger = logging.getLogger(__name__)
def parse_command_line(argv):
"""Parse command line arguments. See the -h option.
Args:
argv (str): Command line arguments, including absolute path to output filename.
Returns:
dict: Dictionary of command line arguments and their parsed values.
"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"-o",
"--output",
help="Path to the file where the YAML output should be written.",
default=False,
)
arguments = parser.parse_args(argv[1:])
return arguments
def main():
"""Convert metadata to YAML."""
pudl_logger = logging.getLogger("pudl")
log_format = "%(asctime)s [%(levelname)8s] %(name)s:%(lineno)s %(message)s"
coloredlogs.install(fmt=log_format, level="INFO", logger=pudl_logger)
args = parse_command_line(sys.argv)
logger.info(f"Exporting metadata to: {args.output}")
dm = DatasetteMetadata.from_data_source_ids()
dm.to_yaml(path=args.output)
if __name__ == "__main__":
sys.exit(main())
| {
"content_hash": "94c282b663703c4dddc1244211c162aa",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 87,
"avg_line_length": 25.510204081632654,
"alnum_prop": 0.6624,
"repo_name": "catalyst-cooperative/pudl",
"id": "cb8f0a89df66b17b8d505371d2b0911b55d5a3d4",
"size": "1250",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/pudl/convert/metadata_to_yml.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jinja",
"bytes": "7488"
},
{
"name": "Python",
"bytes": "1658629"
},
{
"name": "Shell",
"bytes": "5118"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from django.utils import timezone
from chords.models import Artist, Song
from .helper_functions import create_artist, create_song, create_user
class UserModelTests(TestCase):
def test_cannot_bookmark_same_song_twice(self):
"""
User must not be able to bookmark the same song more than one time.
"""
user = create_user()
song = create_song()
user.bookmarks.add(song)
num_bookmarks = user.bookmarks.count()
user.bookmarks.add(song)
self.assertEqual(user.bookmarks.count(), num_bookmarks)
def test_song_deleted_bookmark_deleted(self):
"""
If any of user's bookmarked songs get deleted, it should also removed
from the bookmarks.
"""
user = create_user()
song = create_song()
user.bookmarks.add(song)
num_bookmarks = user.bookmarks.count()
song.delete()
self.assertEqual(user.bookmarks.count(), num_bookmarks - 1)
def test_song_unpublished_bookmark_unchanged(self):
"""
If any of user's bookmarked songs get unpublished, it should remain
in the bookmarks.
"""
user = create_user()
song = create_song(published=True)
user.bookmarks.add(song)
num_bookmarks = user.bookmarks.count()
song.published = False
song.save()
self.assertEqual(user.bookmarks.count(), num_bookmarks)
class ArtistModelTests(TestCase):
def test_slug_line_creation(self):
"""
When we add an artist, an appropriate slug must be created.
"""
artist = create_artist(name='Some Artist')
self.assertEqual(artist.slug, 'some-artist')
def test_slug_line_creation_greek(self):
"""
When we add an artist with a greek name, an appropriate slug must be
created in english.
"""
artist = create_artist(name='Τυχαίο Όνομα Καλλιτέχνη')
self.assertEqual(artist.slug, 'tyxaio-onoma-kallitexnh')
def test_slugs_are_unique(self):
"""
Artist slugs must be always unique, even when there are artists with
the same name.
"""
artist1 = create_artist()
artist2 = Artist(name=artist1.name)
artist2.save()
self.assertNotEqual(artist1.slug, artist2.slug)
def test_slugs_are_of_appropriate_size(self):
"""
Artist slug must not exceed the specified length.
"""
slug_length = 20
artist = Artist(name='Some Artist')
artist.save(slug_max_length=slug_length)
self.assertLessEqual(len(artist.slug), slug_length)
def test_slug_when_name_changes(self):
"""
Once created slug must never change, even when we update the artist
name, in order to avoid broken links.
"""
artist = create_artist(name='Some Artist')
orig_slug = artist.slug
artist.name = 'Some Other Name'
artist.save()
self.assertEqual(artist.slug, orig_slug)
class SongModelTests(TestCase):
def test_slug_line_creation(self):
"""
When we add a song, an appropriate slug must be created.
"""
song = create_song(title='Random Song')
self.assertEqual(song.slug, 'random-song')
def test_slug_line_creation_greek(self):
"""
When we add a song with a greek title, an appropriate slug must be
created in english.
"""
song = create_song(title='Τυχαίο όνομα από τραγούδι')
self.assertEqual(song.slug, 'tyxaio-onoma-apo-tragoudi')
def test_slugs_are_unique(self):
"""
Song slugs must be always unique, even when they have the same title.
"""
song1 = create_song()
song2 = Song(title=song1.title, artist=song1.artist)
song2.save()
self.assertNotEqual(song1.slug, song2.slug)
def test_slugs_are_of_appropriate_size(self):
"""
Song slug must not exceed the specified length.
"""
slug_length = 5
song = Song(title='Random Song', artist=create_artist())
song.save(slug_max_length=slug_length)
self.assertLessEqual(len(song.slug), slug_length)
def test_slug_when_title_changes(self):
"""
Once created, slug must never change, even when we update the song
title, in order to avoid broken links.
"""
song = create_song(title='Random Song')
orig_slug = song.slug
song.title = 'Some Other Name'
song.save()
self.assertEqual(song.slug, orig_slug)
def test_pub_date_with_a_published_song(self):
"""
Published songs must have a published day in past.
"""
song = create_song(published=True)
self.assertLessEqual(song.pub_date, timezone.now())
def test_pub_date_with_an_unpublished_song(self):
"""
Un-published songs should have no publish date, even if they published
once.
"""
song = create_song(published=False)
self.assertEqual(song.pub_date, None)
song.published = True
song.save()
song.published = False
song.save()
self.assertEqual(song.pub_date, None)
| {
"content_hash": "d81fe3e7c245edb098ee97280f3b6f8b",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 78,
"avg_line_length": 33.081761006289305,
"alnum_prop": 0.6131178707224335,
"repo_name": "Ilias95/guitarchords",
"id": "c4efb0f24f563a9ec9202a5ee5febfb8df9b61be",
"size": "5303",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chords/tests/test_models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2150"
},
{
"name": "HTML",
"bytes": "30403"
},
{
"name": "JavaScript",
"bytes": "14348"
},
{
"name": "Python",
"bytes": "86306"
},
{
"name": "Shell",
"bytes": "778"
}
],
"symlink_target": ""
} |
import base64
import json
import logging
from google.appengine.api import urlfetch
from google.appengine.ext import ndb
from consts.event_type import EventType
from controllers.api.api_status_controller import ApiStatusController
from datafeeds.datafeed_base import DatafeedBase
from models.event_team import EventTeam
from models.sitevar import Sitevar
from parsers.fms_api.fms_api_awards_parser import FMSAPIAwardsParser
from parsers.fms_api.fms_api_event_alliances_parser import FMSAPIEventAlliancesParser
from parsers.fms_api.fms_api_event_list_parser import FMSAPIEventListParser
from parsers.fms_api.fms_api_event_rankings_parser import FMSAPIEventRankingsParser
from parsers.fms_api.fms_api_match_parser import FMSAPIHybridScheduleParser, FMSAPIMatchDetailsParser
from parsers.fms_api.fms_api_team_details_parser import FMSAPITeamDetailsParser
class DatafeedFMSAPI(object):
EVENT_SHORT_EXCEPTIONS = {
'arc': 'archimedes',
'cars': 'carson',
'carv': 'carver',
'cur': 'curie',
'gal': 'galileo',
'hop': 'hopper',
'new': 'newton',
'tes': 'tesla',
}
SUBDIV_TO_DIV = {
'arc': 'cmp-arte',
'cars': 'cmp-gaca',
'carv': 'cmp-cuca',
'cur': 'cmp-cuca',
'gal': 'cmp-gaca',
'hop': 'cmp-neho',
'new': 'cmp-neho',
'tes': 'cmp-arte',
}
def __init__(self, version):
fms_api_secrets = Sitevar.get_by_id('fmsapi.secrets')
if fms_api_secrets is None:
raise Exception("Missing sitevar: fmsapi.secrets. Can't access FMS API.")
fms_api_username = fms_api_secrets.contents['username']
fms_api_authkey = fms_api_secrets.contents['authkey']
self._fms_api_authtoken = base64.b64encode('{}:{}'.format(fms_api_username, fms_api_authkey))
self._is_down_sitevar = Sitevar.get_by_id('apistatus.fmsapi_down')
if not self._is_down_sitevar:
self._is_down_sitevar = Sitevar(id="apistatus.fmsapi_down", description="Is FMSAPI down?")
if version == 'v1.0':
FMS_API_URL_BASE = 'https://frc-api.firstinspires.org/api/v1.0'
self.FMS_API_AWARDS_URL_PATTERN = FMS_API_URL_BASE + '/awards/%s/%s' # (year, event_short)
self.FMS_API_HYBRID_SCHEDULE_QUAL_URL_PATTERN = FMS_API_URL_BASE + '/schedule/%s/%s/qual/hybrid' # (year, event_short)
self.FMS_API_HYBRID_SCHEDULE_PLAYOFF_URL_PATTERN = FMS_API_URL_BASE + '/schedule/%s/%s/playoff/hybrid' # (year, event_short)
self.FMS_API_EVENT_RANKINGS_URL_PATTERN = FMS_API_URL_BASE + '/rankings/%s/%s' # (year, event_short)
self.FMS_API_EVENT_ALLIANCES_URL_PATTERN = FMS_API_URL_BASE + '/alliances/%s/%s' # (year, event_short)
self.FMS_API_TEAM_DETAILS_URL_PATTERN = FMS_API_URL_BASE + '/teams/%s/?teamNumber=%s' # (year, teamNumber)
self.FMS_API_EVENT_LIST_URL_PATTERN = FMS_API_URL_BASE + '/events/season=%s'
self.FMS_API_EVENTTEAM_LIST_URL_PATTERN = FMS_API_URL_BASE + '/teams/?season=%s&eventCode=%s&page=%s' # (year, eventCode, page)
elif version == 'v2.0':
FMS_API_URL_BASE = 'https://frc-api.firstinspires.org/v2.0'
self.FMS_API_AWARDS_URL_PATTERN = FMS_API_URL_BASE + '/%s/awards/%s' # (year, event_short)
self.FMS_API_HYBRID_SCHEDULE_QUAL_URL_PATTERN = FMS_API_URL_BASE + '/%s/schedule/%s/qual/hybrid' # (year, event_short)
self.FMS_API_HYBRID_SCHEDULE_PLAYOFF_URL_PATTERN = FMS_API_URL_BASE + '/%s/schedule/%s/playoff/hybrid' # (year, event_short)
self.FMS_API_MATCH_DETAILS_QUAL_URL_PATTERN = FMS_API_URL_BASE + '/%s/scores/%s/qual' # (year, event_short)
self.FMS_API_MATCH_DETAILS_PLAYOFF_URL_PATTERN = FMS_API_URL_BASE + '/%s/scores/%s/playoff' # (year, event_short)
self.FMS_API_EVENT_RANKINGS_URL_PATTERN = FMS_API_URL_BASE + '/%s/rankings/%s' # (year, event_short)
self.FMS_API_EVENT_ALLIANCES_URL_PATTERN = FMS_API_URL_BASE + '/%s/alliances/%s' # (year, event_short)
self.FMS_API_TEAM_DETAILS_URL_PATTERN = FMS_API_URL_BASE + '/%s/teams/?teamNumber=%s' # (year, teamNumber)
self.FMS_API_EVENT_LIST_URL_PATTERN = FMS_API_URL_BASE + '/%s/events' # year
self.FMS_API_EVENTTEAM_LIST_URL_PATTERN = FMS_API_URL_BASE + '/%s/teams/?eventCode=%s&page=%s' # (year, eventCode, page)
else:
raise Exception("Unknown FMS API version: {}".format(version))
def _get_event_short(self, event_short):
return self.EVENT_SHORT_EXCEPTIONS.get(event_short, event_short)
@ndb.tasklet
def _parse_async(self, url, parser):
headers = {
'Authorization': 'Basic {}'.format(self._fms_api_authtoken),
'Cache-Control': 'no-cache, max-age=10',
'Pragma': 'no-cache',
}
try:
rpc = urlfetch.create_rpc(deadline=10)
result = yield urlfetch.make_fetch_call(rpc, url, headers=headers)
except Exception, e:
logging.error("URLFetch failed for: {}".format(url))
logging.info(e)
raise ndb.Return(None)
old_status = self._is_down_sitevar.contents
if result.status_code == 200:
self._is_down_sitevar.contents = False
self._is_down_sitevar.put()
ApiStatusController.clear_cache_if_needed(old_status, self._is_down_sitevar.contents)
raise ndb.Return(parser.parse(json.loads(result.content)))
elif result.status_code % 100 == 5:
# 5XX error - something is wrong with the server
logging.warning('URLFetch for %s failed; Error code %s' % (url, result.status_code))
self._is_down_sitevar.contents = True
self._is_down_sitevar.put()
ApiStatusController.clear_cache_if_needed(old_status, self._is_down_sitevar.contents)
raise ndb.Return(None)
else:
logging.warning('URLFetch for %s failed; Error code %s' % (url, result.status_code))
raise ndb.Return(None)
@ndb.toplevel
def _parse(self, url, parser):
result = yield self._parse_async(url, parser)
raise ndb.Return(result)
def getAwards(self, event):
awards = []
if event.event_type_enum == EventType.CMP_DIVISION and event.year >= 2015: # 8 subdivisions from 2015+ have awards listed under 4 divisions
event_team_keys = EventTeam.query(EventTeam.event == event.key).fetch(keys_only=True)
valid_team_nums = set([int(etk.id().split('_')[1][3:]) for etk in event_team_keys])
awards += self._parse(self.FMS_API_AWARDS_URL_PATTERN % (event.year, self._get_event_short(self.SUBDIV_TO_DIV[event.event_short])), FMSAPIAwardsParser(event, valid_team_nums))
awards += self._parse(self.FMS_API_AWARDS_URL_PATTERN % (event.year, self._get_event_short(event.event_short)), FMSAPIAwardsParser(event))
return awards
def getEventAlliances(self, event_key):
year = int(event_key[:4])
event_short = event_key[4:]
alliances = self._parse(self.FMS_API_EVENT_ALLIANCES_URL_PATTERN % (year, self._get_event_short(event_short)), FMSAPIEventAlliancesParser())
return alliances
def getMatches(self, event_key):
year = int(event_key[:4])
event_short = event_key[4:]
hs_parser = FMSAPIHybridScheduleParser(year, event_short)
detail_parser = FMSAPIMatchDetailsParser(year, event_short)
qual_matches_future = self._parse_async(self.FMS_API_HYBRID_SCHEDULE_QUAL_URL_PATTERN % (year, self._get_event_short(event_short)), hs_parser)
playoff_matches_future = self._parse_async(self.FMS_API_HYBRID_SCHEDULE_PLAYOFF_URL_PATTERN % (year, self._get_event_short(event_short)), hs_parser)
qual_details_future = self._parse_async(self.FMS_API_MATCH_DETAILS_QUAL_URL_PATTERN % (year, self._get_event_short(event_short)), detail_parser)
playoff_details_future = self._parse_async(self.FMS_API_MATCH_DETAILS_PLAYOFF_URL_PATTERN % (year, self._get_event_short(event_short)), detail_parser)
matches_by_key = {}
qual_matches = qual_matches_future.get_result()
if qual_matches is not None:
for match in qual_matches:
matches_by_key[match.key.id()] = match
playoff_matches = playoff_matches_future.get_result()
if playoff_matches is not None:
for match in playoff_matches:
matches_by_key[match.key.id()] = match
qual_details = qual_details_future.get_result()
qual_details_items = qual_details.items() if qual_details is not None else []
playoff_details = playoff_details_future.get_result()
playoff_details_items = playoff_details.items() if playoff_details is not None else []
for match_key, match_details in qual_details_items + playoff_details_items:
if match_key in matches_by_key:
matches_by_key[match_key].score_breakdown_json = json.dumps(match_details)
return matches_by_key.values()
def getEventRankings(self, event_key):
year = int(event_key[:4])
event_short = event_key[4:]
rankings = self._parse(self.FMS_API_EVENT_RANKINGS_URL_PATTERN % (year, self._get_event_short(event_short)), FMSAPIEventRankingsParser(year))
return rankings
def getTeamDetails(self, year, team_key):
team_number = team_key[3:] # everything after 'frc'
result = self._parse(self.FMS_API_TEAM_DETAILS_URL_PATTERN % (year, team_number), FMSAPITeamDetailsParser(year))
if result:
return result[0] # (team, districtteam, robot)
else:
return None
def getEventList(self, year):
events = self._parse(self.FMS_API_EVENT_LIST_URL_PATTERN % (year), FMSAPIEventListParser(year))
return events
# Returns list of tuples (team, districtteam, robot)
def getEventTeams(self, event_key):
year = int(event_key[:4])
event_code = self._get_event_short(event_key[4:])
parser = FMSAPITeamDetailsParser(year)
models = [] # will be list of tuples (team, districtteam, robot) model
for page in range(1, 9): # Ensure this won't loop forever. 8 pages should be more than enough
url = self.FMS_API_EVENTTEAM_LIST_URL_PATTERN % (year, event_code, page)
result = self._parse(url, parser)
if result is None:
break
partial_models, more_pages = result
models.extend(partial_models)
if not more_pages:
break
return models
| {
"content_hash": "d654a93402f400a99774c71e364a4f56",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 187,
"avg_line_length": 50.81428571428572,
"alnum_prop": 0.6363040014993909,
"repo_name": "synth3tk/the-blue-alliance",
"id": "483869f5a20bfd0e4f9996a7435bd84c6a923d72",
"size": "10671",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datafeeds/datafeed_fms_api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "101"
},
{
"name": "CSS",
"bytes": "372169"
},
{
"name": "HTML",
"bytes": "5373642"
},
{
"name": "JavaScript",
"bytes": "264246"
},
{
"name": "PHP",
"bytes": "10727"
},
{
"name": "Python",
"bytes": "1545327"
},
{
"name": "Ruby",
"bytes": "4677"
},
{
"name": "Shell",
"bytes": "413"
}
],
"symlink_target": ""
} |
import csv
import datetime
import json
import logging
import urlparse
from io import BytesIO
from django.conf import settings
from django.core.exceptions import ValidationError as DjangoValidationError
from django.core.urlresolvers import reverse
from django.core.validators import validate_ipv4_address, validate_ipv46_address
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from mongoengine.base import ValidationError
from crits.campaigns.forms import CampaignForm
from crits.campaigns.campaign import Campaign
from crits.config.config import CRITsConfig
from crits.core import form_consts
from crits.core.class_mapper import class_from_id
from crits.core.crits_mongoengine import EmbeddedSource, EmbeddedCampaign
from crits.core.crits_mongoengine import json_handler, Action
from crits.core.forms import SourceForm, DownloadFileForm
from crits.core.handlers import build_jtable, csv_export, action_add
from crits.core.handlers import jtable_ajax_list, jtable_ajax_delete
from crits.core.handlers import datetime_parser
from crits.core.user_tools import is_admin, user_sources
from crits.core.user_tools import is_user_subscribed, is_user_favorite
from crits.domains.domain import Domain
from crits.domains.handlers import upsert_domain, get_valid_root_domain
from crits.events.event import Event
from crits.indicators.forms import IndicatorActivityForm
from crits.indicators.indicator import Indicator
from crits.indicators.indicator import EmbeddedConfidence, EmbeddedImpact
from crits.ips.handlers import ip_add_update, validate_and_normalize_ip
from crits.ips.ip import IP
from crits.notifications.handlers import remove_user_from_notification
from crits.services.handlers import run_triage, get_supported_services
from crits.vocabulary.indicators import (
IndicatorTypes,
IndicatorThreatTypes,
IndicatorAttackTypes
)
from crits.vocabulary.ips import IPTypes
from crits.vocabulary.relationships import RelationshipTypes
from crits.vocabulary.status import Status
logger = logging.getLogger(__name__)
def generate_indicator_csv(request):
"""
Generate a CSV file of the Indicator information
:param request: The request for this CSV.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
response = csv_export(request, Indicator)
return response
def generate_indicator_jtable(request, option):
"""
Generate the jtable data for rendering in the list template.
:param request: The request for this jtable.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
obj_type = Indicator
type_ = "indicator"
mapper = obj_type._meta['jtable_opts']
if option == "jtlist":
# Sets display url
details_url = mapper['details_url']
details_url_key = mapper['details_url_key']
fields = mapper['fields']
response = jtable_ajax_list(obj_type,
details_url,
details_url_key,
request,
includes=fields)
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
if option == "jtdelete":
response = {"Result": "ERROR"}
if jtable_ajax_delete(obj_type, request):
response = {"Result": "OK"}
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
jtopts = {
'title': "Indicators",
'default_sort': mapper['default_sort'],
'listurl': reverse('crits.%ss.views.%ss_listing' % (type_,
type_),
args=('jtlist',)),
'deleteurl': reverse('crits.%ss.views.%ss_listing' % (type_,
type_),
args=('jtdelete',)),
'searchurl': reverse(mapper['searchurl']),
'fields': list(mapper['jtopts_fields']),
'hidden_fields': mapper['hidden_fields'],
'linked_fields': mapper['linked_fields'],
'details_link': mapper['details_link'],
'no_sort': mapper['no_sort']
}
config = CRITsConfig.objects().first()
if not config.splunk_search_url:
del jtopts['fields'][1]
jtable = build_jtable(jtopts, request)
jtable['toolbar'] = [
{
'tooltip': "'All Indicators'",
'text': "'All'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'New Indicators'",
'text': "'New'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'New'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'In Progress Indicators'",
'text': "'In Progress'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'In Progress'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Analyzed Indicators'",
'text': "'Analyzed'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'Analyzed'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Deprecated Indicators'",
'text': "'Deprecated'",
'click': "function () {$('#indicator_listing').jtable('load', {'refresh': 'yes', 'status': 'Deprecated'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Add Indicator'",
'text': "'Add Indicator'",
'click': "function () {$('#new-indicator').click()}",
},
]
if config.splunk_search_url:
for field in jtable['fields']:
if field['fieldname'].startswith("'splunk"):
field['display'] = """ function (data) {
return '<a href="%s' + data.record.value + '"><img src="/new_images/splunk.png" /></a>';
}
""" % config.splunk_search_url
if option == "inline":
return render_to_response("jtable.html",
{'jtable': jtable,
'jtid': '%s_listing' % type_,
'button': '%ss_tab' % type_},
RequestContext(request))
else:
return render_to_response("%s_listing.html" % type_,
{'jtable': jtable,
'jtid': '%s_listing' % type_},
RequestContext(request))
def get_indicator_details(indicator_id, analyst):
"""
Generate the data to render the Indicator details template.
:param indicator_id: The ObjectId of the Indicator to get details for.
:type indicator_id: str
:param analyst: The user requesting this information.
:type analyst: str
:returns: template (str), arguments (dict)
"""
template = None
users_sources = user_sources(analyst)
indicator = Indicator.objects(id=indicator_id,
source__name__in=users_sources).first()
if not indicator:
error = ("Either this indicator does not exist or you do "
"not have permission to view it.")
template = "error.html"
args = {'error': error}
return template, args
forms = {}
forms['new_activity'] = IndicatorActivityForm(initial={'analyst': analyst,
'date': datetime.datetime.now()})
forms['new_campaign'] = CampaignForm()#'date': datetime.datetime.now(),
forms['new_source'] = SourceForm(analyst, initial={'date': datetime.datetime.now()})
forms['download_form'] = DownloadFileForm(initial={"obj_type": 'Indicator',
"obj_id": indicator_id})
indicator.sanitize("%s" % analyst)
# remove pending notifications for user
remove_user_from_notification("%s" % analyst, indicator_id, 'Indicator')
# subscription
subscription = {
'type': 'Indicator',
'id': indicator_id,
'subscribed': is_user_subscribed("%s" % analyst,
'Indicator',
indicator_id),
}
# relationship
relationship = {
'type': 'Indicator',
'value': indicator_id,
}
#objects
objects = indicator.sort_objects()
#relationships
relationships = indicator.sort_relationships("%s" % analyst, meta=True)
#comments
comments = {'comments': indicator.get_comments(),
'url_key': indicator_id}
#screenshots
screenshots = indicator.get_screenshots(analyst)
# favorites
favorite = is_user_favorite("%s" % analyst, 'Indicator', indicator.id)
# services
service_list = get_supported_services('Indicator')
# analysis results
service_results = indicator.get_analysis_results()
args = {'objects': objects,
'relationships': relationships,
'comments': comments,
'relationship': relationship,
'subscription': subscription,
"indicator": indicator,
"forms": forms,
"indicator_id": indicator_id,
'screenshots': screenshots,
'service_list': service_list,
'service_results': service_results,
'favorite': favorite,
'rt_url': settings.RT_URL}
return template, args
def get_indicator_type_value_pair(field):
"""
Extracts the type/value pair from a generic field. This is generally used on
fields that can become indicators such as objects or email fields.
The type/value pairs are used in indicator relationships
since indicators are uniquely identified via their type/value pair.
This function can be used in conjunction with:
crits.indicators.handlers.does_indicator_relationship_exist
Args:
field: The input field containing a type/value pair. This field is
generally from custom dictionaries such as from Django templates.
Returns:
Returns true if the input field already has an indicator associated
with its values. Returns false otherwise.
"""
# this is an object
if field.get("name") != None and field.get("type") != None and field.get("value") != None:
name = field.get("name")
type = field.get("type")
value = field.get("value").lower().strip()
full_type = type
if type != name:
full_type = type + " - " + name
return (full_type, value)
# this is an email field
if field.get("field_type") != None and field.get("field_value") != None:
return (field.get("field_type"), field.get("field_value").lower().strip())
# otherwise the logic to extract the type/value pair from this
# specific field type is not supported
return (None, None)
def get_verified_field(data, valid_values, field=None, default=None):
"""
Validate and correct string value(s) in a dictionary key or list,
or a string by itself.
:param data: The data to be verified and corrected.
:type data: dict, list of strings, or str
:param valid_values: Key with simplified string, value with actual string
:type valid_values: dict
:param field: The dictionary key containing the data.
:type field: str
:param default: A value to use if an invalid item cannot be corrected
:type default: str
:returns: the validated/corrected value(str), list of values(list) or ''
"""
if isinstance(data, dict):
data = data.get(field, '')
if isinstance(data, list):
value_list = data
else:
value_list = [data]
for i, item in enumerate(value_list):
if isinstance(item, basestring):
item = item.lower().strip().replace(' - ', '-')
if item in valid_values:
value_list[i] = valid_values[item]
continue
if default is not None:
item = default
continue
return ''
if isinstance(data, list):
return value_list
else:
return value_list[0]
def handle_indicator_csv(csv_data, source, method, reference, ctype, username,
add_domain=False):
"""
Handle adding Indicators in CSV format (file or blob).
:param csv_data: The CSV data.
:type csv_data: str or file handle
:param source: The name of the source for these indicators.
:type source: str
:param method: The method of acquisition of this indicator.
:type method: str
:param reference: The reference to this data.
:type reference: str
:param ctype: The CSV type.
:type ctype: str ("file" or "blob")
:param username: The user adding these indicators.
:type username: str
:param add_domain: If the indicators being added are also other top-level
objects, add those too.
:type add_domain: boolean
:returns: dict with keys "success" (boolean) and "message" (str)
"""
if ctype == "file":
cdata = csv_data.read()
else:
cdata = csv_data.encode('ascii')
data = csv.DictReader(BytesIO(cdata), skipinitialspace=True)
result = {'success': True}
result_message = ""
# Compute permitted values in CSV
valid_ratings = {
'unknown': 'unknown',
'benign': 'benign',
'low': 'low',
'medium': 'medium',
'high': 'high'}
valid_campaign_confidence = {
'low': 'low',
'medium': 'medium',
'high': 'high'}
valid_campaigns = {}
for c in Campaign.objects(active='on'):
valid_campaigns[c['name'].lower().replace(' - ', '-')] = c['name']
valid_actions = {}
for a in Action.objects(active='on'):
valid_actions[a['name'].lower().replace(' - ', '-')] = a['name']
valid_ind_types = {}
for obj in IndicatorTypes.values(sort=True):
valid_ind_types[obj.lower().replace(' - ', '-')] = obj
# Start line-by-line import
added = 0
for processed, d in enumerate(data, 1):
ind = {}
ind['value'] = d.get('Indicator', '').strip()
ind['lower'] = d.get('Indicator', '').lower().strip()
ind['description'] = d.get('Description', '').strip()
ind['type'] = get_verified_field(d, valid_ind_types, 'Type')
ind['threat_type'] = d.get('Threat Type', IndicatorThreatTypes.UNKNOWN)
ind['attack_type'] = d.get('Attack Type', IndicatorAttackTypes.UNKNOWN)
if len(ind['threat_type']) < 1:
ind['threat_type'] = IndicatorThreatTypes.UNKNOWN
if ind['threat_type'] not in IndicatorThreatTypes.values():
result['success'] = False
result_message += "Cannot process row %s: Invalid Threat Type<br />" % processed
continue
if len(ind['attack_type']) < 1:
ind['attack_type'] = IndicatorAttackTypes.UNKNOWN
if ind['attack_type'] not in IndicatorAttackTypes.values():
result['success'] = False
result_message += "Cannot process row %s: Invalid Attack Type<br />" % processed
continue
ind['status'] = d.get('Status', Status.NEW)
if not ind['value'] or not ind['type']:
# Mandatory value missing or malformed, cannot process csv row
i = ""
result['success'] = False
if not ind['value']:
i += "No valid Indicator value "
if not ind['type']:
i += "No valid Indicator type "
result_message += "Cannot process row %s: %s<br />" % (processed, i)
continue
campaign = get_verified_field(d, valid_campaigns, 'Campaign')
if campaign:
ind['campaign'] = campaign
ind['campaign_confidence'] = get_verified_field(d, valid_campaign_confidence,
'Campaign Confidence',
default='low')
actions = d.get('Action', '')
if actions:
actions = get_verified_field(actions.split(','), valid_actions)
if not actions:
result['success'] = False
result_message += "Cannot process row %s: Invalid Action<br />" % processed
continue
ind['confidence'] = get_verified_field(d, valid_ratings, 'Confidence',
default='unknown')
ind['impact'] = get_verified_field(d, valid_ratings, 'Impact',
default='unknown')
ind[form_consts.Common.BUCKET_LIST_VARIABLE_NAME] = d.get(form_consts.Common.BUCKET_LIST, '')
ind[form_consts.Common.TICKET_VARIABLE_NAME] = d.get(form_consts.Common.TICKET, '')
try:
response = handle_indicator_insert(ind, source, reference,
analyst=username, method=method,
add_domain=add_domain)
except Exception, e:
result['success'] = False
result_message += "Failure processing row %s: %s<br />" % (processed, str(e))
continue
if response['success']:
if actions:
action = {'active': 'on',
'analyst': username,
'begin_date': '',
'end_date': '',
'performed_date': '',
'reason': '',
'date': datetime.datetime.now()}
for action_type in actions:
action['action_type'] = action_type
action_add('Indicator', response.get('objectid'), action,
user=username)
else:
result['success'] = False
result_message += "Failure processing row %s: %s<br />" % (processed, response['message'])
continue
added += 1
if processed < 1:
result['success'] = False
result_message = "Could not find any valid CSV rows to parse!"
result['message'] = "Successfully added %s Indicator(s).<br />%s" % (added, result_message)
return result
def handle_indicator_ind(value, source, ctype, threat_type, attack_type,
analyst, method='', reference='',
add_domain=False, add_relationship=False, campaign=None,
campaign_confidence=None, confidence=None, description=None, impact=None,
bucket_list=None, ticket=None, cache={}):
"""
Handle adding an individual indicator.
:param value: The indicator value.
:type value: str
:param source: The name of the source for this indicator.
:type source: str
:param ctype: The indicator type.
:type ctype: str
:param threat_type: The indicator threat type.
:type threat_type: str
:param attack_type: The indicator attack type.
:type attack_type: str
:param analyst: The user adding this indicator.
:type analyst: str
:param method: The method of acquisition of this indicator.
:type method: str
:param reference: The reference to this data.
:type reference: str
:param add_domain: If the indicators being added are also other top-level
objects, add those too.
:type add_domain: boolean
:param add_relationship: If a relationship can be made, create it.
:type add_relationship: boolean
:param campaign: Campaign to attribute to this indicator.
:type campaign: str
:param campaign_confidence: Confidence of this campaign.
:type campaign_confidence: str
:param confidence: Indicator confidence.
:type confidence: str
:param description: The description of this data.
:type description: str
:param impact: Indicator impact.
:type impact: str
:param bucket_list: The bucket(s) to assign to this indicator.
:type bucket_list: str
:param ticket: Ticket to associate with this indicator.
:type ticket: str
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: dict with keys "success" (boolean) and "message" (str)
"""
result = None
if not source:
return {"success" : False, "message" : "Missing source information."}
if threat_type is None:
threat_type = IndicatorThreatTypes.UNKNOWN
if attack_type is None:
attack_type = IndicatorAttackTypes.UNKNOWN
if value == None or value.strip() == "":
result = {'success': False,
'message': "Can't create indicator with an empty value field"}
elif ctype == None or ctype.strip() == "":
result = {'success': False,
'message': "Can't create indicator with an empty type field"}
else:
ind = {}
ind['type'] = ctype.strip()
ind['threat_type'] = threat_type.strip()
ind['attack_type'] = attack_type.strip()
ind['value'] = value.strip()
ind['lower'] = value.lower().strip()
ind['description'] = description.strip()
if campaign:
ind['campaign'] = campaign
if campaign_confidence and campaign_confidence in ('low', 'medium', 'high'):
ind['campaign_confidence'] = campaign_confidence
if confidence and confidence in ('unknown', 'benign', 'low', 'medium',
'high'):
ind['confidence'] = confidence
if impact and impact in ('unknown', 'benign', 'low', 'medium', 'high'):
ind['impact'] = impact
if bucket_list:
ind[form_consts.Common.BUCKET_LIST_VARIABLE_NAME] = bucket_list
if ticket:
ind[form_consts.Common.TICKET_VARIABLE_NAME] = ticket
try:
return handle_indicator_insert(ind, source, reference, analyst,
method, add_domain, add_relationship, cache=cache)
except Exception, e:
return {'success': False, 'message': repr(e)}
return result
def handle_indicator_insert(ind, source, reference='', analyst='', method='',
add_domain=False, add_relationship=False, cache={}):
"""
Insert an individual indicator into the database.
NOTE: Setting add_domain to True will always create a relationship as well.
However, to create a relationship with an object that already exists before
this function was called, set add_relationship to True. This will assume
that the domain or IP object to create the relationship with already exists
and will avoid infinite mutual calls between, for example, add_update_ip
and this function. add domain/IP objects.
:param ind: Information about the indicator.
:type ind: dict
:param source: The source for this indicator.
:type source: list, str, :class:`crits.core.crits_mongoengine.EmbeddedSource`
:param reference: The reference to the data.
:type reference: str
:param analyst: The user adding this indicator.
:type analyst: str
:param method: Method of acquiring this indicator.
:type method: str
:param add_domain: If this indicator is also a top-level object, try to add
it.
:type add_domain: boolean
:param add_relationship: Attempt to add relationships if applicable.
:type add_relationship: boolean
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"objectid" (str) if successful,
"is_new_indicator" (boolean) if successful.
"""
if ind['type'] not in IndicatorTypes.values():
return {'success': False,
'message': "Not a valid Indicator Type: %s" % ind['type']}
if ind['threat_type'] not in IndicatorThreatTypes.values():
return {'success': False,
'message': "Not a valid Indicator Threat Type: %s" % ind['threat_type']}
if ind['attack_type'] not in IndicatorAttackTypes.values():
return {'success': False,
'message': "Not a valid Indicator Attack Type: " % ind['attack_type']}
(ind['value'], error) = validate_indicator_value(ind['value'], ind['type'])
if error:
return {"success": False, "message": error}
is_new_indicator = False
dmain = None
ip = None
rank = {
'unknown': 0,
'benign': 1,
'low': 2,
'medium': 3,
'high': 4,
}
if ind.get('status', None) is None or len(ind.get('status', '')) < 1:
ind['status'] = Status.NEW
indicator = Indicator.objects(ind_type=ind['type'],
lower=ind['lower'],
threat_type=ind['threat_type'],
attack_type=ind['attack_type']).first()
if not indicator:
indicator = Indicator()
indicator.ind_type = ind['type']
indicator.threat_type = ind['threat_type']
indicator.attack_type = ind['attack_type']
indicator.value = ind['value']
indicator.lower = ind['lower']
indicator.description = ind['description']
indicator.created = datetime.datetime.now()
indicator.confidence = EmbeddedConfidence(analyst=analyst)
indicator.impact = EmbeddedImpact(analyst=analyst)
indicator.status = ind['status']
is_new_indicator = True
else:
if ind['status'] != Status.NEW:
indicator.status = ind['status']
add_desc = "\nSeen on %s as: %s" % (str(datetime.datetime.now()),
ind['value'])
if indicator.description is None:
indicator.description = add_desc
else:
indicator.description += add_desc
if 'campaign' in ind:
if isinstance(ind['campaign'], basestring) and len(ind['campaign']) > 0:
confidence = ind.get('campaign_confidence', 'low')
ind['campaign'] = EmbeddedCampaign(name=ind['campaign'],
confidence=confidence,
description="",
analyst=analyst,
date=datetime.datetime.now())
if isinstance(ind['campaign'], EmbeddedCampaign):
indicator.add_campaign(ind['campaign'])
elif isinstance(ind['campaign'], list):
for campaign in ind['campaign']:
if isinstance(campaign, EmbeddedCampaign):
indicator.add_campaign(campaign)
if 'confidence' in ind and rank.get(ind['confidence'], 0) > rank.get(indicator.confidence.rating, 0):
indicator.confidence.rating = ind['confidence']
indicator.confidence.analyst = analyst
if 'impact' in ind and rank.get(ind['impact'], 0) > rank.get(indicator.impact.rating, 0):
indicator.impact.rating = ind['impact']
indicator.impact.analyst = analyst
bucket_list = None
if form_consts.Common.BUCKET_LIST_VARIABLE_NAME in ind:
bucket_list = ind[form_consts.Common.BUCKET_LIST_VARIABLE_NAME]
if bucket_list:
indicator.add_bucket_list(bucket_list, analyst)
ticket = None
if form_consts.Common.TICKET_VARIABLE_NAME in ind:
ticket = ind[form_consts.Common.TICKET_VARIABLE_NAME]
if ticket:
indicator.add_ticket(ticket, analyst)
if isinstance(source, list):
for s in source:
indicator.add_source(source_item=s, method=method, reference=reference)
elif isinstance(source, EmbeddedSource):
indicator.add_source(source_item=source, method=method, reference=reference)
elif isinstance(source, basestring):
s = EmbeddedSource()
s.name = source
instance = EmbeddedSource.SourceInstance()
instance.reference = reference
instance.method = method
instance.analyst = analyst
instance.date = datetime.datetime.now()
s.instances = [instance]
indicator.add_source(s)
if add_domain or add_relationship:
ind_type = indicator.ind_type
ind_value = indicator.lower
url_contains_ip = False
if ind_type in (IndicatorTypes.DOMAIN,
IndicatorTypes.URI):
if ind_type == IndicatorTypes.URI:
domain_or_ip = urlparse.urlparse(ind_value).hostname
try:
validate_ipv46_address(domain_or_ip)
url_contains_ip = True
except DjangoValidationError:
pass
else:
domain_or_ip = ind_value
if not url_contains_ip:
success = None
if add_domain:
success = upsert_domain(domain_or_ip,
indicator.source,
username='%s' % analyst,
campaign=indicator.campaign,
bucket_list=bucket_list,
cache=cache)
if not success['success']:
return {'success': False, 'message': success['message']}
if not success or not 'object' in success:
dmain = Domain.objects(domain=domain_or_ip).first()
else:
dmain = success['object']
if ind_type in IPTypes.values() or url_contains_ip:
if url_contains_ip:
ind_value = domain_or_ip
try:
validate_ipv4_address(domain_or_ip)
ind_type = IndicatorTypes.IPV4_ADDRESS
except DjangoValidationError:
ind_type = IndicatorTypes.IPV6_ADDRESS
success = None
if add_domain:
success = ip_add_update(ind_value,
ind_type,
source=indicator.source,
campaign=indicator.campaign,
analyst=analyst,
bucket_list=bucket_list,
ticket=ticket,
indicator_reference=reference,
cache=cache)
if not success['success']:
return {'success': False, 'message': success['message']}
if not success or not 'object' in success:
ip = IP.objects(ip=indicator.value).first()
else:
ip = success['object']
indicator.save(username=analyst)
if dmain:
dmain.add_relationship(indicator,
RelationshipTypes.RELATED_TO,
analyst="%s" % analyst,
get_rels=False)
dmain.save(username=analyst)
if ip:
ip.add_relationship(indicator,
RelationshipTypes.RELATED_TO,
analyst="%s" % analyst,
get_rels=False)
ip.save(username=analyst)
# run indicator triage
if is_new_indicator:
indicator.reload()
run_triage(indicator, analyst)
return {'success': True, 'objectid': str(indicator.id),
'is_new_indicator': is_new_indicator, 'object': indicator}
def does_indicator_relationship_exist(field, indicator_relationships):
"""
Checks if the input field's values already have an indicator
by cross checking against the list of indicator relationships. The input
field already has an associated indicator created if the input field's
"type" and "value" pairs exist -- since indicators are uniquely identified
by their type/value pair.
Args:
field: The generic input field containing a type/value pair. This is
checked against a list of indicators relationships to see if a
corresponding indicator already exists. This field is generally
from custom dictionaries such as from Django templates.
indicator_relationships: The list of indicator relationships
to cross reference the input field against.
Returns:
Returns true if the input field already has an indicator associated
with its values. Returns false otherwise.
"""
type, value = get_indicator_type_value_pair(field)
if indicator_relationships != None:
if type != None and value != None:
for indicator_relationship in indicator_relationships:
if indicator_relationship == None:
logger.error('Indicator relationship is not valid: ' +
str(indicator_relationship))
continue
if type == indicator_relationship.get('ind_type') and value == indicator_relationship.get('ind_value'):
return True
else:
logger.error('Could not extract type/value pair of input field' +
'type: ' + str(type) +
'value: ' + (value.encode("utf-8") if value else str(value)) +
'indicator_relationships: ' + str(indicator_relationships))
return False
def ci_search(itype, confidence, impact, actions):
"""
Find indicators based on type, confidence, impact, and/or actions.
:param itype: The indicator type to search for.
:type itype: str
:param confidence: The confidence level(s) to search for.
:type confidence: str
:param impact: The impact level(s) to search for.
:type impact: str
:param actions: The action(s) to search for.
:type actions: str
:returns: :class:`crits.core.crits_mongoengine.CritsQuerySet`
"""
query = {}
if confidence:
item_list = confidence.replace(' ', '').split(',')
query["confidence.rating"] = {"$in": item_list}
if impact:
item_list = impact.replace(' ', '').split(',')
query["impact.rating"] = {"$in": item_list}
if actions:
item_list = actions.split(',')
query["actions.action_type"] = {"$in": item_list}
query["type"] = "%s" % itype.strip()
result_filter = ('type', 'value', 'confidence', 'impact', 'actions')
results = Indicator.objects(__raw__=query).only(*result_filter)
return results
def set_indicator_type(indicator_id, itype, username):
"""
Set the Indicator type.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param itype: The new indicator type.
:type itype: str
:param username: The user updating the indicator.
:type username: str
:returns: dict with key "success" (boolean)
"""
# check to ensure we're not duping an existing indicator
indicator = Indicator.objects(id=indicator_id).first()
value = indicator.value
ind_check = Indicator.objects(ind_type=itype, value=value).first()
if ind_check:
# we found a dupe
return {'success': False}
else:
try:
indicator.ind_type = itype
indicator.save(username=username)
return {'success': True}
except ValidationError:
return {'success': False}
def set_indicator_threat_type(id_, threat_type, user, **kwargs):
"""
Set the Indicator threat type.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param threat_type: The new indicator threat type.
:type threat_type: str
:param user: The user updating the indicator.
:type user: str
:returns: dict with key "success" (boolean)
"""
# check to ensure we're not duping an existing indicator
indicator = Indicator.objects(id=id_).first()
value = indicator.value
ind_check = Indicator.objects(threat_type=threat_type, value=value).first()
if ind_check:
# we found a dupe
return {'success': False,
'message': "Duplicate would exist making this change."}
elif threat_type not in IndicatorThreatTypes.values():
return {'success': False,
'message': "Not a valid Threat Type."}
else:
try:
indicator.threat_type = threat_type
indicator.save(username=user)
return {'success': True}
except ValidationError:
return {'success': False}
def set_indicator_attack_type(id_, attack_type, user, **kwargs):
"""
Set the Indicator attack type.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param attack_type: The new indicator attack type.
:type attack_type: str
:param user: The user updating the indicator.
:type user: str
:returns: dict with key "success" (boolean)
"""
# check to ensure we're not duping an existing indicator
indicator = Indicator.objects(id=id_).first()
value = indicator.value
ind_check = Indicator.objects(attack_type=attack_type, value=value).first()
if ind_check:
# we found a dupe
return {'success': False,
'message': "Duplicate would exist making this change."}
elif attack_type not in IndicatorAttackTypes.values():
return {'success': False,
'message': "Not a valid Attack Type."}
else:
try:
indicator.attack_type = attack_type
indicator.save(username=user)
return {'success': True}
except ValidationError:
return {'success': False}
def indicator_remove(_id, username):
"""
Remove an Indicator from CRITs.
:param _id: The ObjectId of the indicator to remove.
:type _id: str
:param username: The user removing the indicator.
:type username: str
:returns: dict with keys "success" (boolean) and "message" (list) if failed.
"""
if is_admin(username):
indicator = Indicator.objects(id=_id).first()
if indicator:
indicator.delete(username=username)
return {'success': True}
else:
return {'success': False, 'message': ['Cannot find Indicator']}
else:
return {'success': False, 'message': ['Must be an admin to delete']}
def activity_add(id_, activity, user, **kwargs):
"""
Add activity to an Indicator.
:param id_: The ObjectId of the indicator to update.
:type id_: str
:param activity: The activity information.
:type activity: dict
:param user: The user adding the activitty.
:type user: str
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(user)
indicator = Indicator.objects(id=id_,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
activity['analyst'] = user
indicator.add_activity(activity['analyst'],
activity['start_date'],
activity['end_date'],
activity['description'],
activity['date'])
indicator.save(username=user)
return {'success': True, 'object': activity,
'id': str(indicator.id)}
except ValidationError, e:
return {'success': False, 'message': e,
'id': str(indicator.id)}
def activity_update(id_, activity, user=None, **kwargs):
"""
Update activity for an Indicator.
:param id_: The ObjectId of the indicator to update.
:type id_: str
:param activity: The activity information.
:type activity: dict
:param user: The user updating the activity.
:type user: str
:returns: dict with keys:
"success" (boolean),
"message" (str) if failed,
"object" (dict) if successful.
"""
sources = user_sources(user)
indicator = Indicator.objects(id=id_,
source__name__in=sources).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
activity = datetime_parser(activity)
activity['analyst'] = user
indicator.edit_activity(activity['analyst'],
activity['start_date'],
activity['end_date'],
activity['description'],
activity['date'])
indicator.save(username=user)
return {'success': True, 'object': activity}
except ValidationError, e:
return {'success': False, 'message': e}
def activity_remove(id_, date, user, **kwargs):
"""
Remove activity from an Indicator.
:param id_: The ObjectId of the indicator to update.
:type id_: str
:param date: The date of the activity to remove.
:type date: datetime.datetime
:param user: The user removing this activity.
:type user: str
:returns: dict with keys "success" (boolean) and "message" (str) if failed.
"""
indicator = Indicator.objects(id=id_).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
try:
date = datetime_parser(date)
indicator.delete_activity(date)
indicator.save(username=user)
return {'success': True}
except ValidationError, e:
return {'success': False, 'message': e}
def ci_update(id_, ci_type, value, user, **kwargs):
"""
Update confidence or impact for an indicator.
:param id_: The ObjectId of the indicator to update.
:type id_: str
:param ci_type: What we are updating.
:type ci_type: str ("confidence" or "impact")
:param value: The value to set.
:type value: str ("unknown", "benign", "low", "medium", "high")
:param user: The user updating this indicator.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str) if failed.
"""
indicator = Indicator.objects(id=id_).first()
if not indicator:
return {'success': False,
'message': 'Could not find Indicator'}
if ci_type == "confidence" or ci_type == "impact":
try:
if ci_type == "confidence":
indicator.set_confidence(user, value)
else:
indicator.set_impact(user, value)
indicator.save(username=user)
return {'success': True}
except ValidationError, e:
return {'success': False, "message": e}
else:
return {'success': False, 'message': 'Invalid CI type'}
def create_indicator_and_ip(type_, id_, ip, analyst):
"""
Add indicators for an IP address.
:param type_: The CRITs top-level object we are getting this IP from.
:type type_: class which inherits from
:class:`crits.core.crits_mongoengine.CritsBaseAttributes`
:param id_: The ObjectId of the top-level object to search for.
:type id_: str
:param ip: The IP address to generate an indicator out of.
:type ip: str
:param analyst: The user adding this indicator.
:type analyst: str
:returns: dict with keys:
"success" (boolean),
"message" (str),
"value" (str)
"""
obj_class = class_from_id(type_, id_)
if obj_class:
ip_class = IP.objects(ip=ip).first()
ind_type = IPTypes.IPV4_ADDRESS
ind_class = Indicator.objects(ind_type=ind_type, value=ip).first()
# setup IP
if ip_class:
ip_class.add_relationship(obj_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
else:
ip_class = IP()
ip_class.ip = ip
ip_class.source = obj_class.source
ip_class.save(username=analyst)
ip_class.add_relationship(obj_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
# setup Indicator
message = ""
if ind_class:
message = ind_class.add_relationship(obj_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
ind_class.add_relationship(ip_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
else:
ind_class = Indicator()
ind_class.source = obj_class.source
ind_class.ind_type = ind_type
ind_class.value = ip
ind_class.save(username=analyst)
message = ind_class.add_relationship(obj_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
ind_class.add_relationship(ip_class,
RelationshipTypes.RELATED_TO,
analyst=analyst)
# save
try:
obj_class.save(username=analyst)
ip_class.save(username=analyst)
ind_class.save(username=analyst)
if message['success']:
rels = obj_class.sort_relationships("%s" % analyst, meta=True)
return {'success': True, 'message': rels, 'value': obj_class.id}
else:
return {'success': False, 'message': message['message']}
except Exception, e:
return {'success': False, 'message': e}
else:
return {'success': False,
'message': "Could not find %s to add relationships" % type_}
def create_indicator_from_tlo(tlo_type, tlo, analyst, source_name=None,
tlo_id=None, ind_type=None, value=None,
update_existing=True, add_domain=True):
"""
Create an indicator from a Top-Level Object (TLO).
:param tlo_type: The CRITs type of the parent TLO.
:type tlo_type: str
:param tlo: A CRITs parent TLO class object
:type tlo: class - some CRITs TLO
:param analyst: The user creating this indicator.
:type analyst: str
:param source_name: The source name for the new source instance that
records this indicator being added.
:type source_name: str
:param tlo_id: The ObjectId of the parent TLO.
:type tlo_id: str
:param ind_type: The indicator type, if TLO is not Domain or IP.
:type ind_type: str
:param value: The value of the indicator, if TLO is not Domain or IP.
:type value: str
:param update_existing: If Indicator already exists, update it
:type update_existing: boolean
:param add_domain: If new indicator contains a domain/ip, add a
matching Domain or IP TLO
:type add_domain: boolean
:returns: dict with keys:
"success" (boolean),
"message" (str),
"value" (str),
"indicator" :class:`crits.indicators.indicator.Indicator`
"""
if not tlo:
tlo = class_from_id(tlo_type, tlo_id)
if not tlo:
return {'success': False,
'message': "Could not find %s" % tlo_type}
source = tlo.source
campaign = tlo.campaign
bucket_list = tlo.bucket_list
tickets = tlo.tickets
# If value and ind_type provided, use them instead of defaults
if tlo_type == "Domain":
value = value or tlo.domain
ind_type = ind_type or IndicatorTypes.DOMAIN
elif tlo_type == "IP":
value = value or tlo.ip
ind_type = ind_type or tlo.ip_type
elif tlo_type == "Indicator":
value = value or tlo.value
ind_type = ind_type or tlo.ind_type
if not value or not ind_type: # if not provided & no default
return {'success': False,
'message': "Indicator value & type must be provided"
"for TLO of type %s" % tlo_type}
#check if indicator already exists
if Indicator.objects(ind_type=ind_type,
value=value).first() and not update_existing:
return {'success': False, 'message': "Indicator already exists"}
result = handle_indicator_ind(value, source,
ctype=ind_type,
threat_type=IndicatorThreatTypes.UNKNOWN,
attack_type=IndicatorAttackTypes.UNKNOWN,
analyst=analyst,
add_domain=add_domain,
add_relationship=True,
campaign=campaign,
bucket_list=bucket_list,
ticket=tickets)
if result['success']:
ind = Indicator.objects(id=result['objectid']).first()
if ind:
if source_name:
# add source to show when indicator was created/updated
ind.add_source(source=source_name,
method= 'Indicator created/updated ' \
'from %s with ID %s' % (tlo_type, tlo.id),
date=datetime.datetime.now(),
analyst = analyst)
tlo.add_relationship(ind,
RelationshipTypes.RELATED_TO,
analyst=analyst)
tlo.save(username=analyst)
for rel in tlo.relationships:
if rel.rel_type == "Event":
# Get event object to pass in.
rel_item = Event.objects(id=rel.object_id).first()
if rel_item:
ind.add_relationship(rel_item,
RelationshipTypes.RELATED_TO,
analyst=analyst)
ind.save(username=analyst)
tlo.reload()
rels = tlo.sort_relationships("%s" % analyst, meta=True)
return {'success': True, 'message': rels,
'value': tlo.id, 'indicator': ind}
else:
return {'success': False, 'message': "Failed to create Indicator"}
else:
return result
def validate_indicator_value(value, ind_type):
"""
Check that a given value is valid for a particular Indicator type.
:param value: The value to be validated
:type value: str
:param ind_type: The indicator type to validate against
:type ind_type: str
:returns: tuple: (Valid value, Error message)
"""
value = value.strip()
domain = ""
# URL
if ind_type == IndicatorTypes.URI:
if "://" not in value.split('.')[0]:
return ("", "URI must contain protocol "
"prefix (e.g. http://, https://, ftp://) ")
domain_or_ip = urlparse.urlparse(value).hostname
try:
validate_ipv46_address(domain_or_ip)
return (value, "")
except DjangoValidationError:
domain = domain_or_ip
# Email address
if ind_type in (IndicatorTypes.EMAIL_ADDRESS,
IndicatorTypes.EMAIL_FROM,
IndicatorTypes.EMAIL_REPLY_TO,
IndicatorTypes.EMAIL_SENDER):
if '@' not in value:
return ("", "Email address must contain an '@'")
domain_or_ip = value.split('@')[-1]
if domain_or_ip[0] == '[' and domain_or_ip[-1] == ']':
try:
validate_ipv46_address(domain_or_ip[1:-1])
return (value, "")
except DjangoValidationError:
return ("", "Email address does not contain a valid IP")
else:
domain = domain_or_ip
# IPs
if ind_type in IPTypes.values():
(ip_address, error) = validate_and_normalize_ip(value, ind_type)
if error:
return ("", error)
else:
return (ip_address, "")
# Domains
if ind_type in (IndicatorTypes.DOMAIN,
IndicatorTypes.URI) or domain:
(root, domain, error) = get_valid_root_domain(domain or value)
if error:
return ("", error)
else:
return (value, "")
return (value, "")
| {
"content_hash": "f0ba339ae69995350f2352465aea9474",
"timestamp": "",
"source": "github",
"line_count": 1358,
"max_line_length": 122,
"avg_line_length": 39.57216494845361,
"alnum_prop": 0.5661065520385568,
"repo_name": "dreardon/crits",
"id": "2a0dba3321b579db03bcdc7c33e292487a39d093",
"size": "53739",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "crits/indicators/handlers.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "8694"
},
{
"name": "CSS",
"bytes": "390510"
},
{
"name": "HTML",
"bytes": "477790"
},
{
"name": "JavaScript",
"bytes": "3518716"
},
{
"name": "Python",
"bytes": "1954489"
},
{
"name": "Shell",
"bytes": "19524"
}
],
"symlink_target": ""
} |
from cvars.public import PublicConVar
from plugins.info import PluginInfo
info = PluginInfo()
info.name = "KillerInfo"
info.basename = 'killerinfo'
info.author = 'Luka "craziest" Volovec'
info.version = '0.1'
info.variable = 'killerinfo_version'
info.convar = PublicConVar(
info.variable, info.version, "{} version".format(info.name))
info.url = "https://github.com/craziest/sp-killerinfo" | {
"content_hash": "f208516db61c849cce55bfd164a67805",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 64,
"avg_line_length": 28.285714285714285,
"alnum_prop": 0.7474747474747475,
"repo_name": "craziest/sp-killerinfo",
"id": "1df76e17282fabcd52ac0593fdfc4d2d786f796a",
"size": "396",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "addons/source-python/plugins/killerinfo/info.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1938"
}
],
"symlink_target": ""
} |
from ansible.cache.base import BaseCacheModule
class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs):
self._cache = {}
def get(self, key):
return self._cache.get(key)
def set(self, key, value):
self._cache[key] = value
def keys(self):
return self._cache.keys()
def contains(self, key):
return key in self._cache
def delete(self, key):
del self._cache[key]
def flush(self):
self._cache = {}
def copy(self):
return self._cache.copy()
| {
"content_hash": "7de58577fba225273859c15a1ff09ccf",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 46,
"avg_line_length": 20.555555555555557,
"alnum_prop": 0.5783783783783784,
"repo_name": "mith1979/ansible_automation",
"id": "735ed32893e035647993eaa50762593e6c12a45d",
"size": "1252",
"binary": false,
"copies": "133",
"ref": "refs/heads/master",
"path": "applied_python/applied_python/lib/python2.7/site-packages/ansible/cache/memory.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1005"
},
{
"name": "C",
"bytes": "84868"
},
{
"name": "CSS",
"bytes": "50289"
},
{
"name": "HTML",
"bytes": "70428"
},
{
"name": "JavaScript",
"bytes": "105262"
},
{
"name": "PowerShell",
"bytes": "51840"
},
{
"name": "Python",
"bytes": "19073705"
},
{
"name": "Shell",
"bytes": "3747"
},
{
"name": "XSLT",
"bytes": "152770"
}
],
"symlink_target": ""
} |
"""
This is the main module in vnc_cfg_api_server package. It manages interaction
between http/rest, address management, authentication and database interfaces.
"""
from gevent import monkey
monkey.patch_all()
from gevent import hub
# from neutron plugin to api server, the request URL could be large.
# fix the const
import gevent.pywsgi
gevent.pywsgi.MAX_REQUEST_LINE = 65535
import sys
reload(sys)
sys.setdefaultencoding('UTF8')
import functools
import logging
import logging.config
import signal
import os
import re
import socket
from cfgm_common import jsonutils as json
from provision_defaults import *
import uuid
import copy
from pprint import pformat
from cStringIO import StringIO
from lxml import etree
# import GreenletProfiler
logger = logging.getLogger(__name__)
"""
Following is needed to silence warnings on every request when keystone
auth_token middleware + Sandesh is used. Keystone or Sandesh alone
do not produce these warnings.
Exception AttributeError: AttributeError(
"'_DummyThread' object has no attribute '_Thread__block'",)
in <module 'threading' from '/usr/lib64/python2.7/threading.pyc'> ignored
See http://stackoverflow.com/questions/13193278/understand-python-threading-bug
for more information.
"""
import threading
threading._DummyThread._Thread__stop = lambda x: 42
CONFIG_VERSION = '1.0'
import bottle
bottle.BaseRequest.MEMFILE_MAX = 1024000
import utils
import context
from context import get_request, get_context, set_context, use_context
from context import ApiContext
import vnc_cfg_types
from vnc_cfg_ifmap import VncDbClient
import cfgm_common
from cfgm_common import ignore_exceptions, imid
from cfgm_common.uve.vnc_api.ttypes import VncApiCommon, VncApiConfigLog,\
VncApiDebug, VncApiInfo, VncApiNotice, VncApiError
from cfgm_common import illegal_xml_chars_RE
from sandesh_common.vns.ttypes import Module
from sandesh_common.vns.constants import ModuleNames, Module2NodeType,\
NodeTypeNames, INSTANCE_ID_DEFAULT, API_SERVER_DISCOVERY_SERVICE_NAME,\
IFMAP_SERVER_DISCOVERY_SERVICE_NAME
from provision_defaults import Provision
from vnc_quota import *
from gen.resource_xsd import *
from gen.resource_common import *
from gen.vnc_api_client_gen import all_resource_type_tuples
import cfgm_common
from cfgm_common.utils import cgitb_hook
from cfgm_common.rest import LinkObject, hdr_server_tenant
from cfgm_common.exceptions import *
from cfgm_common.vnc_extensions import ExtensionManager
import gen.resource_xsd
import vnc_addr_mgmt
import vnc_auth
import vnc_auth_keystone
import vnc_perms
import vnc_rbac
from cfgm_common import vnc_cpu_info
from cfgm_common.vnc_api_stats import log_api_stats
from pysandesh.sandesh_base import *
from pysandesh.gen_py.sandesh.ttypes import SandeshLevel
import discoveryclient.client as client
# from gen_py.vnc_api.ttypes import *
import netifaces
from pysandesh.connection_info import ConnectionState
from cfgm_common.uve.nodeinfo.ttypes import NodeStatusUVE, \
NodeStatus
from sandesh.discovery_client_stats import ttypes as sandesh
from sandesh.traces.ttypes import RestApiTrace
from vnc_bottle import get_bottle_server
_ACTION_RESOURCES = [
{'uri': '/prop-collection-get', 'link_name': 'prop-collection-get',
'method': 'GET', 'method_name': 'prop_collection_http_get'},
{'uri': '/prop-collection-update', 'link_name': 'prop-collection-update',
'method': 'POST', 'method_name': 'prop_collection_update_http_post'},
{'uri': '/ref-update', 'link_name': 'ref-update',
'method': 'POST', 'method_name': 'ref_update_http_post'},
{'uri': '/ref-relax-for-delete', 'link_name': 'ref-relax-for-delete',
'method': 'POST', 'method_name': 'ref_relax_for_delete_http_post'},
{'uri': '/fqname-to-id', 'link_name': 'name-to-id',
'method': 'POST', 'method_name': 'fq_name_to_id_http_post'},
{'uri': '/id-to-fqname', 'link_name': 'id-to-name',
'method': 'POST', 'method_name': 'id_to_fq_name_http_post'},
# ifmap-to-id only for ifmap subcribers using rest for publish
{'uri': '/ifmap-to-id', 'link_name': 'ifmap-to-id',
'method': 'POST', 'method_name': 'ifmap_to_id_http_post'},
{'uri': '/useragent-kv', 'link_name': 'useragent-keyvalue',
'method': 'POST', 'method_name': 'useragent_kv_http_post'},
{'uri': '/db-check', 'link_name': 'database-check',
'method': 'POST', 'method_name': 'db_check'},
{'uri': '/fetch-records', 'link_name': 'fetch-records',
'method': 'POST', 'method_name': 'fetch_records'},
{'uri': '/start-profile', 'link_name': 'start-profile',
'method': 'POST', 'method_name': 'start_profile'},
{'uri': '/stop-profile', 'link_name': 'stop-profile',
'method': 'POST', 'method_name': 'stop_profile'},
{'uri': '/list-bulk-collection', 'link_name': 'list-bulk-collection',
'method': 'POST', 'method_name': 'list_bulk_collection_http_post'},
{'uri': '/obj-perms', 'link_name': 'obj-perms',
'method': 'GET', 'method_name': 'obj_perms_http_get'},
{'uri': '/chown', 'link_name': 'chown',
'method': 'POST', 'method_name': 'obj_chown_http_post'},
{'uri': '/chmod', 'link_name': 'chmod',
'method': 'POST', 'method_name': 'obj_chmod_http_post'},
{'uri': '/multi-tenancy', 'link_name': 'multi-tenancy',
'method': 'PUT', 'method_name': 'mt_http_put'},
{'uri': '/aaa-mode', 'link_name': 'aaa-mode',
'method': 'PUT', 'method_name': 'aaa_mode_http_put'},
]
def error_400(err):
return err.body
# end error_400
def error_403(err):
return err.body
# end error_403
def error_404(err):
return err.body
# end error_404
def error_409(err):
return err.body
# end error_409
@bottle.error(412)
def error_412(err):
return err.body
# end error_412
def error_500(err):
return err.body
# end error_500
def error_503(err):
return err.body
# end error_503
class VncApiServer(object):
"""
This is the manager class co-ordinating all classes present in the package
"""
_INVALID_NAME_CHARS = set(':')
_GENERATE_DEFAULT_INSTANCE = [
'namespace',
'project',
'virtual_network', 'virtual-network',
'network_ipam', 'network-ipam',
]
def __new__(cls, *args, **kwargs):
obj = super(VncApiServer, cls).__new__(cls, *args, **kwargs)
obj.api_bottle = bottle.Bottle()
obj.route('/', 'GET', obj.homepage_http_get)
obj.api_bottle.error_handler = {
400: error_400,
403: error_403,
404: error_404,
409: error_409,
500: error_500,
503: error_503,
}
cls._generate_resource_crud_methods(obj)
cls._generate_resource_crud_uri(obj)
for act_res in _ACTION_RESOURCES:
http_method = act_res.get('method', 'POST')
method_name = getattr(obj, act_res['method_name'])
obj.route(act_res['uri'], http_method, method_name)
return obj
# end __new__
@classmethod
def _validate_complex_type(cls, dict_cls, dict_body):
if dict_body is None:
return
for key, value in dict_body.items():
if key not in dict_cls.attr_fields:
raise ValueError('class %s does not have field %s' % (
str(dict_cls), key))
attr_type_vals = dict_cls.attr_field_type_vals[key]
attr_type = attr_type_vals['attr_type']
restrictions = attr_type_vals['restrictions']
is_array = attr_type_vals.get('is_array', False)
if value is None:
continue
if is_array:
if not isinstance(value, list):
raise ValueError('Field %s must be a list. Received value: %s'
% (key, str(value)))
values = value
else:
values = [value]
if attr_type_vals['is_complex']:
attr_cls = cfgm_common.utils.str_to_class(attr_type, __name__)
for item in values:
cls._validate_complex_type(attr_cls, item)
else:
simple_type = attr_type_vals['simple_type']
for item in values:
cls._validate_simple_type(key, attr_type,
simple_type, item,
restrictions)
# end _validate_complex_type
@classmethod
def _validate_communityattribute_type(cls, value):
poss_values = ["no-export",
"accept-own",
"no-advertise",
"no-export-subconfed",
"no-reoriginate"]
if value in poss_values:
return
res = re.match('[0-9]+:[0-9]+', value)
if res is None:
raise ValueError('Invalid community format %s. '
'Change to \'number:number\''
% value)
asn = value.split(':')
if int(asn[0]) > 65535:
raise ValueError('Out of range ASN value %s. '
'ASN values cannot exceed 65535.'
% value)
@classmethod
def _validate_simple_type(cls, type_name, xsd_type, simple_type, value, restrictions=None):
if value is None:
return
elif xsd_type in ('unsignedLong', 'integer'):
if not isinstance(value, (int, long)):
# If value is not an integer, then try to convert it to integer
try:
value = int(value)
except (TypeError, ValueError):
raise ValueError('%s: integer value expected instead of %s' %(
type_name, value))
if restrictions:
if not (int(restrictions[0]) <= value <= int(restrictions[1])):
raise ValueError('%s: value must be between %s and %s' %(
type_name, restrictions[0], restrictions[1]))
elif xsd_type == 'boolean':
if not isinstance(value, bool):
raise ValueError('%s: true/false expected instead of %s' %(
type_name, value))
elif xsd_type == 'string' and simple_type == 'CommunityAttribute':
cls._validate_communityattribute_type(value)
else:
if not isinstance(value, basestring):
raise ValueError('%s: string value expected instead of %s' %(
type_name, value))
if restrictions and value not in restrictions:
raise ValueError('%s: value must be one of %s' % (
type_name, str(restrictions)))
return value
# end _validate_simple_type
def _validate_props_in_request(self, resource_class, obj_dict):
for prop_name in resource_class.prop_fields:
prop_field_types = resource_class.prop_field_types[prop_name]
is_simple = not prop_field_types['is_complex']
prop_type = prop_field_types['xsd_type']
restrictions = prop_field_types['restrictions']
simple_type = prop_field_types['simple_type']
is_list_prop = prop_name in resource_class.prop_list_fields
is_map_prop = prop_name in resource_class.prop_map_fields
prop_value = obj_dict.get(prop_name)
if not prop_value:
continue
if is_simple and (not is_list_prop) and (not is_map_prop):
try:
obj_dict[prop_name] = self._validate_simple_type(prop_name,
prop_type, simple_type,
prop_value, restrictions)
except Exception as e:
err_msg = 'Error validating property ' + str(e)
return False, err_msg
else:
continue
prop_cls = cfgm_common.utils.str_to_class(prop_type, __name__)
if isinstance(prop_value, dict):
try:
self._validate_complex_type(prop_cls, prop_value)
except Exception as e:
err_msg = 'Error validating property %s value %s ' %(
prop_name, prop_value)
err_msg += str(e)
return False, err_msg
elif isinstance(prop_value, list):
for elem in prop_value:
try:
if is_simple:
self._validate_simple_type(prop_name, prop_type,
simple_type, elem,
restrictions)
else:
self._validate_complex_type(prop_cls, elem)
except Exception as e:
err_msg = 'Error validating property %s elem %s ' %(
prop_name, elem)
err_msg += str(e)
return False, err_msg
else: # complex-type + value isn't dict or wrapped in list or map
err_msg = 'Error in property %s type %s value of %s ' %(
prop_name, prop_cls, prop_value)
return False, err_msg
# end for all properties
return True, ''
# end _validate_props_in_request
def _validate_refs_in_request(self, resource_class, obj_dict):
for ref_name in resource_class.ref_fields:
ref_fld_types_list = list(resource_class.ref_field_types[ref_name])
ref_link_type = ref_fld_types_list[1]
if ref_link_type == 'None':
continue
for ref_dict in obj_dict.get(ref_name) or []:
buf = cStringIO.StringIO()
attr_cls = cfgm_common.utils.str_to_class(ref_link_type, __name__)
tmp_attr = attr_cls(**ref_dict['attr'])
tmp_attr.export(buf)
node = etree.fromstring(buf.getvalue())
try:
tmp_attr.build(node)
except Exception as e:
err_msg = 'Error validating reference %s value %s ' \
%(ref_name, ref_dict)
err_msg += str(e)
return False, err_msg
return True, ''
# end _validate_refs_in_request
def _validate_perms_in_request(self, resource_class, obj_type, obj_dict):
for ref_name in resource_class.ref_fields:
for ref in obj_dict.get(ref_name) or []:
ref_uuid = self._db_conn.fq_name_to_uuid(ref_name[:-5],
ref['to'])
(ok, status) = self._permissions.check_perms_link(
get_request(), ref_uuid)
if not ok:
(code, err_msg) = status
raise cfgm_common.exceptions.HttpError(code, err_msg)
# end _validate_perms_in_request
def _validate_resource_type(self, type):
try:
resource_class = self.get_resource_class(type)
except TypeError:
return False, (404, "Resouce type '%s' not found" % type)
return True, resource_class.resource_type
# http_resource_<oper> - handlers invoked from
# a. bottle route (on-the-wire) OR
# b. internal requests
# using normalized get_request() from ApiContext
@log_api_stats
def http_resource_create(self, obj_type):
r_class = self.get_resource_class(obj_type)
resource_type = r_class.resource_type
obj_dict = get_request().json[resource_type]
self._post_validate(obj_type, obj_dict=obj_dict)
fq_name = obj_dict['fq_name']
try:
self._extension_mgrs['resourceApi'].map_method(
'pre_%s_create' %(obj_type), obj_dict)
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In pre_%s_create an extension had error for %s' \
%(obj_type, obj_dict)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
# properties validator
ok, result = self._validate_props_in_request(r_class, obj_dict)
if not ok:
result = 'Bad property in create: ' + result
raise cfgm_common.exceptions.HttpError(400, result)
# references validator
ok, result = self._validate_refs_in_request(r_class, obj_dict)
if not ok:
result = 'Bad reference in create: ' + result
raise cfgm_common.exceptions.HttpError(400, result)
# common handling for all resource create
(ok, result) = self._post_common(get_request(), obj_type,
obj_dict)
if not ok:
(code, msg) = result
fq_name_str = ':'.join(obj_dict.get('fq_name', []))
self.config_object_error(None, fq_name_str, obj_type, 'http_post', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
uuid_in_req = result
name = obj_dict['fq_name'][-1]
fq_name = obj_dict['fq_name']
db_conn = self._db_conn
# if client gave parent_type of config-root, ignore and remove
if 'parent_type' in obj_dict and obj_dict['parent_type'] == 'config-root':
del obj_dict['parent_type']
parent_class = None
if 'parent_type' in obj_dict:
# non config-root child, verify parent exists
parent_class = self.get_resource_class(obj_dict['parent_type'])
if parent_class is None:
raise cfgm_common.exceptions.HttpError(
400, 'Invalid parent type: %s' %(obj_dict['parent_type']))
parent_obj_type = parent_class.object_type
parent_res_type = parent_class.resource_type
parent_fq_name = obj_dict['fq_name'][:-1]
try:
parent_uuid = self._db_conn.fq_name_to_uuid(parent_obj_type,
parent_fq_name)
(ok, status) = self._permissions.check_perms_write(
get_request(), parent_uuid)
if not ok:
(code, err_msg) = status
raise cfgm_common.exceptions.HttpError(code, err_msg)
self._permissions.set_user_role(get_request(), obj_dict)
except NoIdError:
err_msg = 'Parent %s type %s does not exist' % (
pformat(parent_fq_name), parent_res_type)
fq_name_str = ':'.join(parent_fq_name)
self.config_object_error(None, fq_name_str, obj_type, 'http_post', err_msg)
raise cfgm_common.exceptions.HttpError(400, err_msg)
# Validate perms on references
try:
self._validate_perms_in_request(r_class, obj_type, obj_dict)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
400, 'Unknown reference in resource create %s.' %(obj_dict))
# State modification starts from here. Ensure that cleanup is done for all state changes
cleanup_on_failure = []
obj_ids = {}
def undo_create(result):
(code, msg) = result
get_context().invoke_undo(code, msg, self.config_log)
failed_stage = get_context().get_state()
fq_name_str = ':'.join(fq_name)
self.config_object_error(
None, fq_name_str, obj_type, failed_stage, msg)
# end undo_create
def stateful_create():
# Alloc and Store id-mappings before creating entry on pubsub store.
# Else a subscriber can ask for an id mapping before we have stored it
(ok, result) = db_conn.dbe_alloc(obj_type, obj_dict,
uuid_in_req)
if not ok:
return (ok, result)
get_context().push_undo(db_conn.dbe_release, obj_type, fq_name)
obj_ids.update(result)
env = get_request().headers.environ
tenant_name = env.get(hdr_server_tenant()) or 'default-project'
get_context().set_state('PRE_DBE_CREATE')
# type-specific hook
(ok, result) = r_class.pre_dbe_create(tenant_name, obj_dict,
db_conn)
if not ok:
return (ok, result)
callable = getattr(r_class, 'http_post_collection_fail', None)
if callable:
cleanup_on_failure.append((callable, [tenant_name, obj_dict, db_conn]))
get_context().set_state('DBE_CREATE')
(ok, result) = db_conn.dbe_create(obj_type, obj_ids,
obj_dict)
if not ok:
return (ok, result)
get_context().set_state('POST_DBE_CREATE')
# type-specific hook
try:
ok, err_msg = r_class.post_dbe_create(tenant_name, obj_dict, db_conn)
except Exception as e:
ok = False
err_msg = '%s:%s post_dbe_create had an exception: %s' %(
obj_type, obj_ids['uuid'], str(e))
err_msg += cfgm_common.utils.detailed_traceback()
if not ok:
# Create is done, log to system, no point in informing user
self.config_log(err_msg, level=SandeshLevel.SYS_ERR)
return True, ''
# end stateful_create
try:
ok, result = stateful_create()
except Exception as e:
ok = False
err_msg = cfgm_common.utils.detailed_traceback()
result = (500, err_msg)
if not ok:
undo_create(result)
code, msg = result
raise cfgm_common.exceptions.HttpError(code, msg)
rsp_body = {}
rsp_body['name'] = name
rsp_body['fq_name'] = fq_name
rsp_body['uuid'] = obj_ids['uuid']
rsp_body['href'] = self.generate_url(resource_type, obj_ids['uuid'])
if parent_class:
# non config-root child, send back parent uuid/href
rsp_body['parent_uuid'] = parent_uuid
rsp_body['parent_href'] = self.generate_url(parent_res_type,
parent_uuid)
try:
self._extension_mgrs['resourceApi'].map_method(
'post_%s_create' %(obj_type), obj_dict)
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In post_%s_create an extension had error for %s' \
%(obj_type, obj_dict)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
return {resource_type: rsp_body}
# end http_resource_create
@log_api_stats
def http_resource_read(self, obj_type, id):
r_class = self.get_resource_class(obj_type)
resource_type = r_class.resource_type
try:
self._extension_mgrs['resourceApi'].map_method(
'pre_%s_read' %(obj_type), id)
except Exception as e:
pass
etag = get_request().headers.get('If-None-Match')
db_conn = self._db_conn
try:
req_obj_type = db_conn.uuid_to_obj_type(id)
if req_obj_type != obj_type:
raise cfgm_common.exceptions.HttpError(
404, 'No %s object found for id %s' %(resource_type, id))
fq_name = db_conn.uuid_to_fq_name(id)
except NoIdError as e:
raise cfgm_common.exceptions.HttpError(404, str(e))
# common handling for all resource get
(ok, result) = self._get_common(get_request(), id)
if not ok:
(code, msg) = result
self.config_object_error(
id, None, obj_type, 'http_get', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
db_conn = self._db_conn
if etag:
obj_ids = {'uuid': id}
(ok, result) = db_conn.dbe_is_latest(obj_ids, etag.strip('"'))
if not ok:
# Not present in DB
self.config_object_error(
id, None, obj_type, 'http_get', result)
raise cfgm_common.exceptions.HttpError(404, result)
is_latest = result
if is_latest:
# send Not-Modified, caches use this for read optimization
bottle.response.status = 304
return
#end if etag
obj_ids = {'uuid': id}
# Generate field list for db layer
obj_fields = r_class.prop_fields | r_class.ref_fields
if 'fields' in get_request().query:
obj_fields |= set(get_request().query.fields.split(','))
else: # default props + children + refs + backrefs
if 'exclude_back_refs' not in get_request().query:
obj_fields |= r_class.backref_fields
if 'exclude_children' not in get_request().query:
obj_fields |= r_class.children_fields
try:
(ok, result) = db_conn.dbe_read(obj_type, obj_ids,
list(obj_fields))
if not ok:
self.config_object_error(id, None, obj_type, 'http_get', result)
except NoIdError as e:
# Not present in DB
raise cfgm_common.exceptions.HttpError(404, str(e))
if not ok:
raise cfgm_common.exceptions.HttpError(500, result)
# check visibility
if (not result['id_perms'].get('user_visible', True) and
not self.is_admin_request()):
result = 'This object is not visible by users: %s' % id
self.config_object_error(id, None, obj_type, 'http_get', result)
raise cfgm_common.exceptions.HttpError(404, result)
rsp_body = {}
rsp_body['uuid'] = id
rsp_body['href'] = self.generate_url(resource_type, id)
rsp_body['name'] = result['fq_name'][-1]
rsp_body.update(result)
id_perms = result['id_perms']
bottle.response.set_header('ETag', '"' + id_perms['last_modified'] + '"')
try:
self._extension_mgrs['resourceApi'].map_method(
'post_%s_read' %(obj_type), id, rsp_body)
except Exception as e:
pass
return {resource_type: rsp_body}
# end http_resource_read
@log_api_stats
def http_resource_update(self, obj_type, id):
r_class = self.get_resource_class(obj_type)
resource_type = r_class.resource_type
# Early return if there is no body or an empty body
request = get_request()
if (not hasattr(request, 'json') or
not request.json or
not request.json[resource_type]):
return
obj_dict = get_request().json[resource_type]
try:
self._extension_mgrs['resourceApi'].map_method(
'pre_%s_update' %(obj_type), id, obj_dict)
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In pre_%s_update an extension had error for %s' \
%(obj_type, obj_dict)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
db_conn = self._db_conn
try:
req_obj_type = db_conn.uuid_to_obj_type(id)
if req_obj_type != obj_type:
raise cfgm_common.exceptions.HttpError(
404, 'No %s object found for id %s' %(resource_type, id))
obj_ids = {'uuid': id}
(read_ok, read_result) = db_conn.dbe_read(obj_type, obj_ids)
if not read_ok:
bottle.abort(
404, 'No %s object found for id %s' %(resource_type, id))
fq_name = read_result['fq_name']
except NoIdError as e:
raise cfgm_common.exceptions.HttpError(404, str(e))
# properties validator
ok, result = self._validate_props_in_request(r_class, obj_dict)
if not ok:
result = 'Bad property in update: ' + result
raise cfgm_common.exceptions.HttpError(400, result)
# references validator
ok, result = self._validate_refs_in_request(r_class, obj_dict)
if not ok:
result = 'Bad reference in update: ' + result
raise cfgm_common.exceptions.HttpError(400, result)
# common handling for all resource put
(ok, result) = self._put_common(
get_request(), obj_type, id, fq_name, obj_dict)
if not ok:
(code, msg) = result
self.config_object_error(id, None, obj_type, 'http_put', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
# Validate perms on references
try:
self._validate_perms_in_request(r_class, obj_type, obj_dict)
except NoIdError:
raise cfgm_common.exceptions.HttpError(400,
'Unknown reference in resource update %s %s.'
%(obj_type, obj_dict))
# State modification starts from here. Ensure that cleanup is done for all state changes
cleanup_on_failure = []
obj_ids = {'uuid': id}
def undo_update(result):
(code, msg) = result
get_context().invoke_undo(code, msg, self.config_log)
failed_stage = get_context().get_state()
self.config_object_error(
id, None, obj_type, failed_stage, msg)
# end undo_update
def stateful_update():
get_context().set_state('PRE_DBE_UPDATE')
# type-specific hook
(ok, result) = r_class.pre_dbe_update(
id, fq_name, obj_dict, self._db_conn)
if not ok:
return (ok, result)
get_context().set_state('DBE_UPDATE')
(ok, result) = db_conn.dbe_update(obj_type, obj_ids,
obj_dict)
if not ok:
return (ok, result)
get_context().set_state('POST_DBE_UPDATE')
# type-specific hook
(ok, result) = r_class.post_dbe_update(id, fq_name, obj_dict, self._db_conn)
if not ok:
return (ok, result)
return (ok, result)
# end stateful_update
try:
ok, result = stateful_update()
except Exception as e:
ok = False
err_msg = cfgm_common.utils.detailed_traceback()
result = (500, err_msg)
if not ok:
undo_update(result)
code, msg = result
raise cfgm_common.exceptions.HttpError(code, msg)
rsp_body = {}
rsp_body['uuid'] = id
rsp_body['href'] = self.generate_url(resource_type, id)
try:
self._extension_mgrs['resourceApi'].map_method(
'post_%s_update' %(obj_type), id, obj_dict, read_result)
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In post_%s_update an extension had error for %s' \
%(obj_type, obj_dict)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
return {resource_type: rsp_body}
# end http_resource_update
@log_api_stats
def http_resource_delete(self, obj_type, id):
r_class = self.get_resource_class(obj_type)
resource_type = r_class.resource_type
db_conn = self._db_conn
# if obj doesn't exist return early
try:
req_obj_type = db_conn.uuid_to_obj_type(id)
if req_obj_type != obj_type:
raise cfgm_common.exceptions.HttpError(
404, 'No %s object found for id %s' %(resource_type, id))
_ = db_conn.uuid_to_fq_name(id)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'ID %s does not exist' %(id))
try:
self._extension_mgrs['resourceApi'].map_method(
'pre_%s_delete' %(obj_type), id)
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In pre_%s_delete an extension had error for %s' \
%(obj_type, id)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
# read in obj from db (accepting error) to get details of it
obj_ids = {'uuid': id}
try:
(read_ok, read_result) = db_conn.dbe_read(obj_type, obj_ids)
except NoIdError as e:
raise cfgm_common.exceptions.HttpError(404, str(e))
if not read_ok:
self.config_object_error(
id, None, obj_type, 'http_delete', read_result)
# proceed down to delete the resource
# common handling for all resource delete
parent_obj_type = read_result.get('parent_type')
(ok, del_result) = self._delete_common(
get_request(), obj_type, id, parent_obj_type)
if not ok:
(code, msg) = del_result
self.config_object_error(id, None, obj_type, 'http_delete', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
fq_name = read_result['fq_name']
ifmap_id = imid.get_ifmap_id_from_fq_name(resource_type, fq_name)
obj_ids['imid'] = ifmap_id
if parent_obj_type:
parent_res_type = \
self.get_resource_class(parent_obj_type).resource_type
parent_imid = cfgm_common.imid.get_ifmap_id_from_fq_name(
parent_res_type, fq_name[:-1])
obj_ids['parent_imid'] = parent_imid
# type-specific hook
r_class = self.get_resource_class(obj_type)
# fail if non-default children or non-derived backrefs exist
default_names = {}
for child_field in r_class.children_fields:
child_type, is_derived = r_class.children_field_types[child_field]
if is_derived:
continue
child_cls = self.get_resource_class(child_type)
default_child_name = 'default-%s' %(
child_cls(parent_type=obj_type).get_type())
default_names[child_type] = default_child_name
exist_hrefs = []
for child in read_result.get(child_field, []):
if child['to'][-1] == default_child_name:
continue
exist_hrefs.append(child['href'])
if exist_hrefs:
err_msg = 'Delete when children still present: %s' %(
exist_hrefs)
self.config_object_error(
id, None, obj_type, 'http_delete', err_msg)
raise cfgm_common.exceptions.HttpError(409, err_msg)
relaxed_refs = set(db_conn.dbe_get_relaxed_refs(id))
for backref_field in r_class.backref_fields:
_, _, is_derived = r_class.backref_field_types[backref_field]
if is_derived:
continue
exist_hrefs = [backref['href']
for backref in read_result.get(backref_field, [])
if backref['uuid'] not in relaxed_refs]
if exist_hrefs:
err_msg = 'Delete when resource still referred: %s' %(
exist_hrefs)
self.config_object_error(
id, None, obj_type, 'http_delete', err_msg)
raise cfgm_common.exceptions.HttpError(409, err_msg)
# State modification starts from here. Ensure that cleanup is done for all state changes
cleanup_on_failure = []
def undo_delete(result):
(code, msg) = result
get_context().invoke_undo(code, msg, self.config_log)
failed_stage = get_context().get_state()
self.config_object_error(
id, None, obj_type, failed_stage, msg)
# end undo_delete
def stateful_delete():
get_context().set_state('PRE_DBE_DELETE')
(ok, del_result) = r_class.pre_dbe_delete(id, read_result, db_conn)
if not ok:
return (ok, del_result)
# Delete default children first
for child_field in r_class.children_fields:
child_type, is_derived = r_class.children_field_types[child_field]
if is_derived:
continue
if child_field in self._GENERATE_DEFAULT_INSTANCE:
self.delete_default_children(child_type, read_result)
callable = getattr(r_class, 'http_delete_fail', None)
if callable:
cleanup_on_failure.append((callable, [id, read_result, db_conn]))
get_context().set_state('DBE_DELETE')
(ok, del_result) = db_conn.dbe_delete(
obj_type, obj_ids, read_result)
if not ok:
return (ok, del_result)
# type-specific hook
get_context().set_state('POST_DBE_DELETE')
try:
ok, err_msg = r_class.post_dbe_delete(id, read_result, db_conn)
except Exception as e:
ok = False
err_msg = '%s:%s post_dbe_delete had an exception: ' \
%(obj_type, id)
err_msg += cfgm_common.utils.detailed_traceback()
if not ok:
# Delete is done, log to system, no point in informing user
self.config_log(err_msg, level=SandeshLevel.SYS_ERR)
return (True, '')
# end stateful_delete
try:
ok, result = stateful_delete()
except NoIdError as e:
raise cfgm_common.exceptions.HttpError(
404, 'No %s object found for id %s' %(resource_type, id))
except Exception as e:
ok = False
err_msg = cfgm_common.utils.detailed_traceback()
result = (500, err_msg)
if not ok:
undo_delete(result)
code, msg = result
raise cfgm_common.exceptions.HttpError(code, msg)
try:
self._extension_mgrs['resourceApi'].map_method(
'post_%s_delete' %(obj_type), id, read_result)
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In pre_%s_delete an extension had error for %s' \
%(obj_type, id)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
# end http_resource_delete
@log_api_stats
def http_resource_list(self, obj_type):
r_class = self.get_resource_class(obj_type)
resource_type = r_class.resource_type
db_conn = self._db_conn
env = get_request().headers.environ
parent_uuids = None
back_ref_uuids = None
obj_uuids = None
if (('parent_fq_name_str' in get_request().query) and
('parent_type' in get_request().query)):
parent_fq_name = get_request().query.parent_fq_name_str.split(':')
parent_res_type = get_request().query.parent_type
parent_class = self.get_resource_class(parent_res_type)
parent_type = parent_class.object_type
parent_uuids = [self._db_conn.fq_name_to_uuid(parent_type, parent_fq_name)]
elif 'parent_id' in get_request().query:
parent_uuids = get_request().query.parent_id.split(',')
if 'back_ref_id' in get_request().query:
back_ref_uuids = get_request().query.back_ref_id.split(',')
if 'obj_uuids' in get_request().query:
obj_uuids = get_request().query.obj_uuids.split(',')
# common handling for all resource get
(ok, result) = self._get_common(get_request(), parent_uuids)
if not ok:
(code, msg) = result
self.config_object_error(
None, None, '%ss' %(resource_type), 'http_get_collection', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
if 'count' in get_request().query:
is_count = 'true' in get_request().query.count.lower()
else:
is_count = False
if 'detail' in get_request().query:
is_detail = 'true' in get_request().query.detail.lower()
else:
is_detail = False
if 'fields' in get_request().query:
req_fields = get_request().query.fields.split(',')
else:
req_fields = []
try:
filters = utils.get_filters(get_request().query.filters)
except Exception as e:
raise cfgm_common.exceptions.HttpError(
400, 'Invalid filter ' + get_request().query.filters)
return self._list_collection(obj_type, parent_uuids, back_ref_uuids,
obj_uuids, is_count, is_detail, filters,
req_fields)
# end http_resource_list
# internal_request_<oper> - handlers of internally generated requests
# that save-ctx, generate-ctx and restore-ctx
def internal_request_create(self, resource_type, obj_json):
object_type = self.get_resource_class(resource_type).object_type
try:
orig_context = get_context()
orig_request = get_request()
b_req = bottle.BaseRequest(
{'PATH_INFO': '/%ss' %(resource_type),
'bottle.app': orig_request.environ['bottle.app'],
'HTTP_X_USER': 'contrail-api',
'HTTP_X_ROLE': self.cloud_admin_role})
json_as_dict = {'%s' %(resource_type): obj_json}
i_req = context.ApiInternalRequest(
b_req.url, b_req.urlparts, b_req.environ, b_req.headers,
json_as_dict, None)
set_context(context.ApiContext(internal_req=i_req))
self.http_resource_create(object_type)
return True, ""
finally:
set_context(orig_context)
# end internal_request_create
def internal_request_update(self, resource_type, obj_uuid, obj_json):
object_type = self.get_resource_class(resource_type).object_type
try:
orig_context = get_context()
orig_request = get_request()
b_req = bottle.BaseRequest(
{'PATH_INFO': '/%ss' %(resource_type),
'bottle.app': orig_request.environ['bottle.app'],
'HTTP_X_USER': 'contrail-api',
'HTTP_X_ROLE': self.cloud_admin_role})
json_as_dict = {'%s' %(resource_type): obj_json}
i_req = context.ApiInternalRequest(
b_req.url, b_req.urlparts, b_req.environ, b_req.headers,
json_as_dict, None)
set_context(context.ApiContext(internal_req=i_req))
self.http_resource_update(object_type, obj_uuid)
return True, ""
finally:
set_context(orig_context)
# end internal_request_update
def internal_request_delete(self, resource_type, obj_uuid):
object_type = self.get_resource_class(resource_type).object_type
try:
orig_context = get_context()
orig_request = get_request()
b_req = bottle.BaseRequest(
{'PATH_INFO': '/%s/%s' %(resource_type, obj_uuid),
'bottle.app': orig_request.environ['bottle.app'],
'HTTP_X_USER': 'contrail-api',
'HTTP_X_ROLE': self.cloud_admin_role})
i_req = context.ApiInternalRequest(
b_req.url, b_req.urlparts, b_req.environ, b_req.headers,
None, None)
set_context(context.ApiContext(internal_req=i_req))
self.http_resource_delete(object_type, obj_uuid)
return True, ""
finally:
set_context(orig_context)
# end internal_request_delete
def internal_request_ref_update(self,
res_type, obj_uuid, operation, ref_res_type, ref_uuid, attr=None):
req_dict = {'type': res_type,
'uuid': obj_uuid,
'operation': operation,
'ref-type': ref_res_type,
'ref-uuid': ref_uuid,
'attr': attr}
try:
orig_context = get_context()
orig_request = get_request()
b_req = bottle.BaseRequest(
{'PATH_INFO': '/ref-update',
'bottle.app': orig_request.environ['bottle.app'],
'HTTP_X_USER': 'contrail-api',
'HTTP_X_ROLE': self.cloud_admin_role})
i_req = context.ApiInternalRequest(
b_req.url, b_req.urlparts, b_req.environ, b_req.headers,
req_dict, None)
set_context(context.ApiContext(internal_req=i_req))
self.ref_update_http_post()
return True, ""
finally:
set_context(orig_context)
# end internal_request_ref_update
def create_default_children(self, object_type, parent_obj):
r_class = self.get_resource_class(object_type)
for child_fields in r_class.children_fields:
# Create a default child only if provisioned for
child_res_type, is_derived =\
r_class.children_field_types[child_fields]
if is_derived:
continue
if child_res_type not in self._GENERATE_DEFAULT_INSTANCE:
continue
child_cls = self.get_resource_class(child_res_type)
child_obj_type = child_cls.object_type
child_obj = child_cls(parent_obj=parent_obj)
child_dict = child_obj.__dict__
child_dict['id_perms'] = self._get_default_id_perms()
child_dict['perms2'] = self._get_default_perms2()
(ok, result) = self._db_conn.dbe_alloc(child_obj_type, child_dict)
if not ok:
return (ok, result)
obj_ids = result
# For virtual networks, allocate an ID
if child_obj_type == 'virtual_network':
child_dict['virtual_network_network_id'] =\
self._db_conn._zk_db.alloc_vn_id(
child_obj.get_fq_name_str())
(ok, result) = self._db_conn.dbe_create(child_obj_type, obj_ids,
child_dict)
if not ok:
# DB Create failed, log and stop further child creation.
err_msg = "DB Create failed creating %s" % child_res_type
self.config_log(err_msg, level=SandeshLevel.SYS_ERR)
return (ok, result)
# recurse down type hierarchy
self.create_default_children(child_obj_type, child_obj)
# end create_default_children
def delete_default_children(self, resource_type, parent_dict):
r_class = self.get_resource_class(resource_type)
for child_field in r_class.children_fields:
# Delete a default child only if provisioned for
child_type, is_derived = r_class.children_field_types[child_field]
if child_type not in self._GENERATE_DEFAULT_INSTANCE:
continue
child_cls = self.get_resource_class(child_type)
# first locate default child then delete it")
default_child_name = 'default-%s' %(child_type)
child_infos = parent_dict.get(child_field, [])
for child_info in child_infos:
if child_info['to'][-1] == default_child_name:
default_child_id = child_info['href'].split('/')[-1]
self.http_resource_delete(child_type, default_child_id)
break
# end delete_default_children
@classmethod
def _generate_resource_crud_methods(cls, obj):
for object_type, _ in all_resource_type_tuples:
create_method = functools.partial(obj.http_resource_create,
object_type)
functools.update_wrapper(create_method, obj.http_resource_create)
setattr(obj, '%ss_http_post' %(object_type), create_method)
read_method = functools.partial(obj.http_resource_read,
object_type)
functools.update_wrapper(read_method, obj.http_resource_read)
setattr(obj, '%s_http_get' %(object_type), read_method)
update_method = functools.partial(obj.http_resource_update,
object_type)
functools.update_wrapper(update_method, obj.http_resource_update)
setattr(obj, '%s_http_put' %(object_type), update_method)
delete_method = functools.partial(obj.http_resource_delete,
object_type)
functools.update_wrapper(delete_method, obj.http_resource_delete)
setattr(obj, '%s_http_delete' %(object_type), delete_method)
list_method = functools.partial(obj.http_resource_list,
object_type)
functools.update_wrapper(list_method, obj.http_resource_list)
setattr(obj, '%ss_http_get' %(object_type), list_method)
# end _generate_resource_crud_methods
@classmethod
def _generate_resource_crud_uri(cls, obj):
for object_type, resource_type in all_resource_type_tuples:
# CRUD + list URIs of the form
# obj.route('/virtual-network/<id>', 'GET', obj.virtual_network_http_get)
# obj.route('/virtual-network/<id>', 'PUT', obj.virtual_network_http_put)
# obj.route('/virtual-network/<id>', 'DELETE', obj.virtual_network_http_delete)
# obj.route('/virtual-networks', 'POST', obj.virtual_networks_http_post)
# obj.route('/virtual-networks', 'GET', obj.virtual_networks_http_get)
# leaf resource
obj.route('/%s/<id>' %(resource_type),
'GET',
getattr(obj, '%s_http_get' %(object_type)))
obj.route('/%s/<id>' %(resource_type),
'PUT',
getattr(obj, '%s_http_put' %(object_type)))
obj.route('/%s/<id>' %(resource_type),
'DELETE',
getattr(obj, '%s_http_delete' %(object_type)))
# collection of leaf
obj.route('/%ss' %(resource_type),
'POST',
getattr(obj, '%ss_http_post' %(object_type)))
obj.route('/%ss' %(resource_type),
'GET',
getattr(obj, '%ss_http_get' %(object_type)))
# end _generate_resource_crud_uri
def __init__(self, args_str=None):
self._db_conn = None
self._get_common = None
self._post_common = None
self._resource_classes = {}
self._args = None
if not args_str:
args_str = ' '.join(sys.argv[1:])
self._parse_args(args_str)
# aaa-mode is ignored if multi_tenancy is configured by user
if self._args.multi_tenancy is None:
# MT unconfigured by user - determine from aaa-mode
if self.aaa_mode not in cfgm_common.AAA_MODE_VALID_VALUES:
self.aaa_mode = cfgm_common.AAA_MODE_DEFAULT_VALUE
self._args.multi_tenancy = self.aaa_mode != 'no-auth'
else:
# MT configured by user - ignore aaa-mode
self.aaa_mode = "cloud-admin" if self._args.multi_tenancy else "no-auth"
# set python logging level from logging_level cmdline arg
if not self._args.logging_conf:
logging.basicConfig(level = getattr(logging, self._args.logging_level))
self._base_url = "http://%s:%s" % (self._args.listen_ip_addr,
self._args.listen_port)
# Generate LinkObjects for all entities
links = []
# Link for root
links.append(LinkObject('root', self._base_url , '/config-root',
'config-root'))
for _, resource_type in all_resource_type_tuples:
link = LinkObject('collection',
self._base_url , '/%ss' %(resource_type),
'%s' %(resource_type))
links.append(link)
for _, resource_type in all_resource_type_tuples:
link = LinkObject('resource-base',
self._base_url , '/%s' %(resource_type),
'%s' %(resource_type))
links.append(link)
self._homepage_links = links
self._pipe_start_app = None
#GreenletProfiler.set_clock_type('wall')
self._profile_info = None
# REST interface initialization
self._get_common = self._http_get_common
self._put_common = self._http_put_common
self._delete_common = self._http_delete_common
self._post_validate = self._http_post_validate
self._post_common = self._http_post_common
for act_res in _ACTION_RESOURCES:
link = LinkObject('action', self._base_url, act_res['uri'],
act_res['link_name'], act_res['method'])
self._homepage_links.append(link)
# Register for VN delete request. Disallow delete of system default VN
self.route('/virtual-network/<id>', 'DELETE', self.virtual_network_http_delete)
self.route('/documentation/<filename:path>',
'GET', self.documentation_http_get)
self._homepage_links.insert(
0, LinkObject('documentation', self._base_url,
'/documentation/index.html',
'documentation', 'GET'))
# APIs to reserve/free block of IP address from a VN/Subnet
self.route('/virtual-network/<id>/ip-alloc',
'POST', self.vn_ip_alloc_http_post)
self._homepage_links.append(
LinkObject('action', self._base_url,
'/virtual-network/%s/ip-alloc',
'virtual-network-ip-alloc', 'POST'))
self.route('/virtual-network/<id>/ip-free',
'POST', self.vn_ip_free_http_post)
self._homepage_links.append(
LinkObject('action', self._base_url,
'/virtual-network/%s/ip-free',
'virtual-network-ip-free', 'POST'))
# APIs to find out number of ip instances from given VN subnet
self.route('/virtual-network/<id>/subnet-ip-count',
'POST', self.vn_subnet_ip_count_http_post)
self._homepage_links.append(
LinkObject('action', self._base_url,
'/virtual-network/%s/subnet-ip-count',
'virtual-network-subnet-ip-count', 'POST'))
# Enable/Disable multi tenancy
self.route('/multi-tenancy', 'GET', self.mt_http_get)
self.route('/multi-tenancy', 'PUT', self.mt_http_put)
self.route('/aaa-mode', 'GET', self.aaa_mode_http_get)
self.route('/aaa-mode', 'PUT', self.aaa_mode_http_put)
# Initialize discovery client
self._disc = None
if self._args.disc_server_ip and self._args.disc_server_port:
self._disc = client.DiscoveryClient(self._args.disc_server_ip,
self._args.disc_server_port,
ModuleNames[Module.API_SERVER])
# sandesh init
self._sandesh = Sandesh()
# Reset the sandesh send rate limit value
if self._args.sandesh_send_rate_limit is not None:
SandeshSystem.set_sandesh_send_rate_limit(
self._args.sandesh_send_rate_limit)
sandesh.DiscoveryClientStatsReq.handle_request = self.sandesh_disc_client_stats_handle_request
sandesh.DiscoveryClientSubscribeInfoReq.handle_request = self.sandesh_disc_client_subinfo_handle_request
sandesh.DiscoveryClientPublishInfoReq.handle_request = self.sandesh_disc_client_pubinfo_handle_request
module = Module.API_SERVER
module_name = ModuleNames[Module.API_SERVER]
node_type = Module2NodeType[module]
node_type_name = NodeTypeNames[node_type]
self.table = "ObjectConfigNode"
if self._args.worker_id:
instance_id = self._args.worker_id
else:
instance_id = INSTANCE_ID_DEFAULT
hostname = socket.gethostname()
self._sandesh.init_generator(module_name, hostname,
node_type_name, instance_id,
self._args.collectors,
'vnc_api_server_context',
int(self._args.http_server_port),
['cfgm_common', 'vnc_cfg_api_server.sandesh'], self._disc,
logger_class=self._args.logger_class,
logger_config_file=self._args.logging_conf)
self._sandesh.trace_buffer_create(name="VncCfgTraceBuf", size=1000)
self._sandesh.trace_buffer_create(name="RestApiTraceBuf", size=1000)
self._sandesh.trace_buffer_create(name="DBRequestTraceBuf", size=1000)
self._sandesh.trace_buffer_create(name="DBUVERequestTraceBuf", size=1000)
self._sandesh.trace_buffer_create(name="MessageBusNotifyTraceBuf",
size=1000)
self._sandesh.trace_buffer_create(name="IfmapTraceBuf", size=1000)
self._sandesh.set_logging_params(
enable_local_log=self._args.log_local,
category=self._args.log_category,
level=self._args.log_level,
file=self._args.log_file,
enable_syslog=self._args.use_syslog,
syslog_facility=self._args.syslog_facility)
ConnectionState.init(self._sandesh, hostname, module_name,
instance_id,
staticmethod(ConnectionState.get_process_state_cb),
NodeStatusUVE, NodeStatus, self.table)
# Address Management interface
addr_mgmt = vnc_addr_mgmt.AddrMgmt(self)
self._addr_mgmt = addr_mgmt
vnc_cfg_types.Resource.addr_mgmt = addr_mgmt
# DB interface initialization
if self._args.wipe_config:
self._db_connect(True)
else:
self._db_connect(self._args.reset_config)
self._db_init_entries()
# API/Permissions check
# after db init (uses db_conn)
self._rbac = vnc_rbac.VncRbac(self, self._db_conn)
self._permissions = vnc_perms.VncPermissions(self, self._args)
if self.is_rbac_enabled():
self._create_default_rbac_rule()
# Cpuinfo interface
sysinfo_req = True
config_node_ip = self.get_server_ip()
cpu_info = vnc_cpu_info.CpuInfo(
self._sandesh.module(), self._sandesh.instance_id(), sysinfo_req,
self._sandesh, 60, config_node_ip)
self._cpu_info = cpu_info
self.re_uuid = re.compile('^[0-9A-F]{8}-?[0-9A-F]{4}-?4[0-9A-F]{3}-?[89AB][0-9A-F]{3}-?[0-9A-F]{12}$',
re.IGNORECASE)
# VncZkClient client assignment
vnc_cfg_types.Resource.vnc_zk_client = self._db_conn._zk_db
# Load extensions
self._extension_mgrs = {}
self._load_extensions()
# Authn/z interface
if self._args.auth == 'keystone':
auth_svc = vnc_auth_keystone.AuthServiceKeystone(self, self._args)
else:
auth_svc = vnc_auth.AuthService(self, self._args)
self._pipe_start_app = auth_svc.get_middleware_app()
self._auth_svc = auth_svc
if int(self._args.worker_id) == 0:
try:
self._extension_mgrs['resync'].map(
self._resync_domains_projects)
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_ERR)
# following allowed without authentication
self.white_list = [
'^/documentation', # allow all documentation
'^/$', # allow discovery
]
# end __init__
def sandesh_disc_client_subinfo_handle_request(self, req):
stats = self._disc.get_stats()
resp = sandesh.DiscoveryClientSubscribeInfoResp(Subscribe=[])
for sub in stats['subs']:
info = sandesh.SubscribeInfo(service_type=sub['service_type'])
info.instances = sub['instances']
info.ttl = sub['ttl']
info.blob = sub['blob']
resp.Subscribe.append(info)
resp.response(req.context())
# end
def sandesh_disc_client_pubinfo_handle_request(self, req):
stats = self._disc.get_stats()
resp = sandesh.DiscoveryClientPublishInfoResp(Publish=[])
for service_type, pub in stats['pubs'].items():
info = sandesh.PublishInfo(service_type=service_type)
info.blob = pub['blob']
resp.Publish.append(info)
resp.response(req.context())
# end
# Return discovery client stats
def sandesh_disc_client_stats_handle_request(self, req):
stats = self._disc.get_stats()
resp = sandesh.DiscoveryClientStatsResp(Subscribe=[], Publish=[])
# pub stats
for service_type, pub in stats['pubs'].items():
pub_stats = sandesh.PublisherStats(service_type=service_type)
pub_stats.Request = pub['request']
pub_stats.Response = pub['response']
pub_stats.ConnError = pub['conn_error']
pub_stats.Timeout = pub['timeout']
pub_stats.unknown_exceptions = pub['exc_unknown']
pub_stats.exception_info = pub['exc_info']
xxx = ['%s:%d' % (k[3:], v) for k, v in pub.items() if 'sc_' in k]
pub_stats.HttpError = ", ".join(xxx)
resp.Publish.append(pub_stats)
# sub stats
for sub in stats['subs']:
sub_stats = sandesh.SubscriberStats(service_type=sub['service_type'])
sub_stats.Request = sub['request']
sub_stats.Response = sub['response']
sub_stats.ConnError = sub['conn_error']
sub_stats.Timeout = sub['timeout']
sub_stats.unknown_exceptions = sub['exc_unknown']
sub_stats.exception_info = sub['exc_info']
xxx = ['%s:%d' % (k[3:], v) for k, v in sub.items() if 'sc_' in k]
sub_stats.HttpError = ", ".join(xxx)
resp.Subscribe.append(sub_stats)
resp.response(req.context())
# end sandesh_disc_client_stats_handle_request
def _extensions_transform_request(self, request):
extensions = self._extension_mgrs.get('resourceApi')
if not extensions or not extensions.names():
return None
return extensions.map_method(
'transform_request', request)
# end _extensions_transform_request
def _extensions_validate_request(self, request):
extensions = self._extension_mgrs.get('resourceApi')
if not extensions or not extensions.names():
return None
return extensions.map_method(
'validate_request', request)
# end _extensions_validate_request
def _extensions_transform_response(self, request, response):
extensions = self._extension_mgrs.get('resourceApi')
if not extensions or not extensions.names():
return None
return extensions.map_method(
'transform_response', request, response)
# end _extensions_transform_response
@ignore_exceptions
def _generate_rest_api_request_trace(self):
method = get_request().method.upper()
if method == 'GET':
return None
req_id = get_request().headers.get('X-Request-Id',
'req-%s' %(str(uuid.uuid4())))
gevent.getcurrent().trace_request_id = req_id
url = get_request().url
if method == 'DELETE':
req_data = ''
else:
try:
req_data = json.dumps(get_request().json)
except Exception as e:
req_data = '%s: Invalid request body' %(e)
rest_trace = RestApiTrace(request_id=req_id)
rest_trace.url = url
rest_trace.method = method
rest_trace.request_data = req_data
return rest_trace
# end _generate_rest_api_request_trace
@ignore_exceptions
def _generate_rest_api_response_trace(self, rest_trace, response):
if not rest_trace:
return
rest_trace.status = bottle.response.status
rest_trace.response_body = json.dumps(response)
rest_trace.trace_msg(name='RestApiTraceBuf', sandesh=self._sandesh)
# end _generate_rest_api_response_trace
# Public Methods
def route(self, uri, method, handler):
@use_context
def handler_trap_exception(*args, **kwargs):
try:
trace = None
self._extensions_transform_request(get_request())
self._extensions_validate_request(get_request())
trace = self._generate_rest_api_request_trace()
(ok, status) = self._rbac.validate_request(get_request())
if not ok:
(code, err_msg) = status
raise cfgm_common.exceptions.HttpError(code, err_msg)
response = handler(*args, **kwargs)
self._generate_rest_api_response_trace(trace, response)
self._extensions_transform_response(get_request(), response)
return response
except Exception as e:
if trace:
trace.trace_msg(name='RestApiTraceBuf',
sandesh=self._sandesh)
# don't log details of cfgm_common.exceptions.HttpError i.e handled error cases
if isinstance(e, cfgm_common.exceptions.HttpError):
bottle.abort(e.status_code, e.content)
else:
string_buf = StringIO()
cgitb_hook(file=string_buf, format="text")
err_msg = string_buf.getvalue()
self.config_log(err_msg, level=SandeshLevel.SYS_ERR)
raise
self.api_bottle.route(uri, method, handler_trap_exception)
# end route
def get_args(self):
return self._args
# end get_args
def get_server_ip(self):
ip_list = []
for i in netifaces.interfaces():
try:
if netifaces.AF_INET in netifaces.ifaddresses(i):
addr = netifaces.ifaddresses(i)[netifaces.AF_INET][0][
'addr']
if addr != '127.0.0.1' and addr not in ip_list:
ip_list.append(addr)
except ValueError, e:
self.config_log("Skipping interface %s" % i,
level=SandeshLevel.SYS_DEBUG)
return ip_list
# end get_server_ip
def get_listen_ip(self):
return self._args.listen_ip_addr
# end get_listen_ip
def get_server_port(self):
return self._args.listen_port
# end get_server_port
def get_worker_id(self):
return int(self._args.worker_id)
# end get_worker_id
def get_pipe_start_app(self):
return self._pipe_start_app
# end get_pipe_start_app
def get_ifmap_health_check_interval(self):
return float(self._args.ifmap_health_check_interval)
# end get_ifmap_health_check_interval
def get_rabbit_health_check_interval(self):
return float(self._args.rabbit_health_check_interval)
# end get_rabbit_health_check_interval
def is_auth_disabled(self):
return self._args.auth is None
def is_admin_request(self):
if not self.is_multi_tenancy_set():
return True
env = bottle.request.headers.environ
for field in ('HTTP_X_API_ROLE', 'HTTP_X_ROLE'):
if field in env:
roles = env[field].split(',')
return self.cloud_admin_role in [x.lower() for x in roles]
return False
def get_auth_headers_from_token(self, request, token):
if self.is_auth_disabled() or not self.is_multi_tenancy_set():
return {}
return self._auth_svc.get_auth_headers_from_token(request, token)
# end get_auth_headers_from_token
# Check for the system created VN. Disallow such VN delete
def virtual_network_http_delete(self, id):
db_conn = self._db_conn
# if obj doesn't exist return early
try:
obj_type = db_conn.uuid_to_obj_type(id)
if obj_type != 'virtual_network':
raise cfgm_common.exceptions.HttpError(
404, 'No virtual-network object found for id %s' %(id))
vn_name = db_conn.uuid_to_fq_name(id)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'ID %s does not exist' %(id))
if (vn_name == cfgm_common.IP_FABRIC_VN_FQ_NAME or
vn_name == cfgm_common.LINK_LOCAL_VN_FQ_NAME):
raise cfgm_common.exceptions.HttpError(
409,
'Can not delete system created default virtual-network '+id)
super(VncApiServer, self).virtual_network_http_delete(id)
# end
@use_context
def homepage_http_get(self):
json_body = {}
json_links = []
# strip trailing '/' in url
url = get_request().url[:-1]
for link in self._homepage_links:
# strip trailing '/' in url
json_links.append(
{'link': link.to_dict(with_url=url)}
)
json_body = {"href": url, "links": json_links}
return json_body
# end homepage_http_get
def documentation_http_get(self, filename):
# ubuntu packaged path
doc_root = '/usr/share/doc/contrail-config/doc/contrail-config/html/'
if not os.path.exists(doc_root):
# centos packaged path
doc_root='/usr/share/doc/python-vnc_cfg_api_server/contrial-config/html/'
return bottle.static_file(
filename,
root=doc_root)
# end documentation_http_get
def obj_perms_http_get(self):
if 'HTTP_X_USER_TOKEN' not in get_request().environ:
raise cfgm_common.exceptions.HttpError(
400, 'User token needed for validation')
user_token = get_request().environ['HTTP_X_USER_TOKEN'].encode("ascii")
# get permissions in internal context
try:
orig_context = get_context()
orig_request = get_request()
b_req = bottle.BaseRequest(
{
'HTTP_X_AUTH_TOKEN': user_token,
'REQUEST_METHOD' : 'GET',
'bottle.app': orig_request.environ['bottle.app'],
})
i_req = context.ApiInternalRequest(
b_req.url, b_req.urlparts, b_req.environ, b_req.headers, None, None)
set_context(context.ApiContext(internal_req=i_req))
token_info = self._auth_svc.validate_user_token(get_request())
finally:
set_context(orig_context)
# roles in result['token_info']['access']['user']['roles']
if token_info:
result = {'token_info' : token_info}
if 'uuid' in get_request().query:
obj_uuid = get_request().query.uuid
result['permissions'] = self._permissions.obj_perms(get_request(), obj_uuid)
else:
raise cfgm_common.exceptions.HttpError(403, " Permission denied")
return result
#end check_obj_perms_http_get
def invalid_uuid(self, uuid):
return self.re_uuid.match(uuid) == None
def invalid_access(self, access):
return type(access) is not int or access not in range(0,8)
# change ownership of an object
def obj_chown_http_post(self):
self._post_common(get_request(), None, None)
try:
obj_uuid = get_request().json['uuid']
owner = get_request().json['owner']
except Exception as e:
raise cfgm_common.exceptions.HttpError(400, str(e))
if self.invalid_uuid(obj_uuid) or self.invalid_uuid(owner):
raise cfgm_common.exceptions.HttpError(
400, "Bad Request, invalid object or owner id")
try:
obj_type = self._db_conn.uuid_to_obj_type(obj_uuid)
except NoIdError:
raise cfgm_common.exceptions.HttpError(400, 'Invalid object id')
# ensure user has RW permissions to object
perms = self._permissions.obj_perms(get_request(), obj_uuid)
if not 'RW' in perms:
raise cfgm_common.exceptions.HttpError(403, " Permission denied")
(ok, obj_dict) = self._db_conn.dbe_read(obj_type, {'uuid':obj_uuid},
obj_fields=['perms2'])
obj_dict['perms2']['owner'] = owner
self._db_conn.dbe_update(obj_type, {'uuid': obj_uuid}, obj_dict)
msg = "chown: %s owner set to %s" % (obj_uuid, owner)
self.config_log(msg, level=SandeshLevel.SYS_NOTICE)
return {}
#end obj_chown_http_post
# chmod for an object
def obj_chmod_http_post(self):
self._post_common(get_request(), None, None)
try:
obj_uuid = get_request().json['uuid']
except Exception as e:
raise cfgm_common.exceptions.HttpError(400, str(e))
if self.invalid_uuid(obj_uuid):
raise cfgm_common.exceptions.HttpError(
400, "Bad Request, invalid object id")
try:
obj_type = self._db_conn.uuid_to_obj_type(obj_uuid)
except NoIdError:
raise cfgm_common.exceptions.HttpError(400, 'Invalid object id')
# ensure user has RW permissions to object
perms = self._permissions.obj_perms(get_request(), obj_uuid)
if not 'RW' in perms:
raise cfgm_common.exceptions.HttpError(403, " Permission denied")
request_params = get_request().json
owner = request_params.get('owner')
share = request_params.get('share')
owner_access = request_params.get('owner_access')
global_access = request_params.get('global_access')
(ok, obj_dict) = self._db_conn.dbe_read(obj_type, {'uuid':obj_uuid},
obj_fields=['perms2'])
obj_perms = obj_dict['perms2']
old_perms = '%s/%d %d %s' % (obj_perms['owner'],
obj_perms['owner_access'], obj_perms['global_access'],
['%s:%d' % (item['tenant'], item['tenant_access']) for item in obj_perms['share']])
if owner:
if self.invalid_uuid(owner):
raise cfgm_common.exceptions.HttpError(
400, "Bad Request, invalid owner")
obj_perms['owner'] = owner.replace('-','')
if owner_access is not None:
if self.invalid_access(owner_access):
raise cfgm_common.exceptions.HttpError(
400, "Bad Request, invalid owner_access value")
obj_perms['owner_access'] = owner_access
if share is not None:
try:
for item in share:
if self.invalid_uuid(item['tenant']) or self.invalid_access(item['tenant_access']):
raise cfgm_common.exceptions.HttpError(
400, "Bad Request, invalid share list")
except Exception as e:
raise cfgm_common.exceptions.HttpError(400, str(e))
obj_perms['share'] = share
if global_access is not None:
if self.invalid_access(global_access):
raise cfgm_common.exceptions.HttpError(
400, "Bad Request, invalid global_access value")
obj_perms['global_access'] = global_access
new_perms = '%s/%d %d %s' % (obj_perms['owner'],
obj_perms['owner_access'], obj_perms['global_access'],
['%s:%d' % (item['tenant'], item['tenant_access']) for item in obj_perms['share']])
self._db_conn.dbe_update(obj_type, {'uuid': obj_uuid}, obj_dict)
msg = "chmod: %s perms old=%s, new=%s" % (obj_uuid, old_perms, new_perms)
self.config_log(msg, level=SandeshLevel.SYS_NOTICE)
return {}
#end obj_chmod_http_post
def prop_collection_http_get(self):
if 'uuid' not in get_request().query:
raise cfgm_common.exceptions.HttpError(
400, 'Object uuid needed for property collection get')
obj_uuid = get_request().query.uuid
if 'fields' not in get_request().query:
raise cfgm_common.exceptions.HttpError(
400, 'Object fields needed for property collection get')
obj_fields = get_request().query.fields.split(',')
if 'position' in get_request().query:
fields_position = get_request().query.position
else:
fields_position = None
try:
obj_type = self._db_conn.uuid_to_obj_type(obj_uuid)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Object Not Found: ' + obj_uuid)
resource_class = self.get_resource_class(obj_type)
for obj_field in obj_fields:
if ((obj_field not in resource_class.prop_list_fields) and
(obj_field not in resource_class.prop_map_fields)):
err_msg = '%s neither "ListProperty" nor "MapProperty"' %(
obj_field)
raise cfgm_common.exceptions.HttpError(400, err_msg)
# request validations over
# common handling for all resource get
(ok, result) = self._get_common(get_request(), obj_uuid)
if not ok:
(code, msg) = result
self.config_object_error(
obj_uuid, None, None, 'prop_collection_http_get', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
try:
ok, result = self._db_conn.prop_collection_get(
obj_type, obj_uuid, obj_fields, fields_position)
if not ok:
self.config_object_error(
obj_uuid, None, None, 'prop_collection_http_get', result)
except NoIdError as e:
# Not present in DB
raise cfgm_common.exceptions.HttpError(404, str(e))
if not ok:
raise cfgm_common.exceptions.HttpError(500, result)
# check visibility
if (not result['id_perms'].get('user_visible', True) and
not self.is_admin_request()):
result = 'This object is not visible by users: %s' % id
self.config_object_error(
id, None, None, 'prop_collection_http_get', result)
raise cfgm_common.exceptions.HttpError(404, result)
# Prepare response
del result['id_perms']
return result
# end prop_collection_http_get
def prop_collection_update_http_post(self):
self._post_common(get_request(), None, None)
request_params = get_request().json
# validate each requested operation
obj_uuid = request_params.get('uuid')
if not obj_uuid:
err_msg = 'Error: prop_collection_update needs obj_uuid'
raise cfgm_common.exceptions.HttpError(400, err_msg)
try:
obj_type = self._db_conn.uuid_to_obj_type(obj_uuid)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Object Not Found: ' + obj_uuid)
resource_class = self.get_resource_class(obj_type)
for req_param in request_params.get('updates') or []:
obj_field = req_param.get('field')
if obj_field in resource_class.prop_list_fields:
prop_coll_type = 'list'
elif obj_field in resource_class.prop_map_fields:
prop_coll_type = 'map'
else:
err_msg = '%s neither "ListProperty" nor "MapProperty"' %(
obj_field)
raise cfgm_common.exceptions.HttpError(400, err_msg)
req_oper = req_param.get('operation').lower()
field_val = req_param.get('value')
field_pos = str(req_param.get('position'))
if prop_coll_type == 'list':
if req_oper not in ('add', 'modify', 'delete'):
err_msg = 'Unsupported operation %s in request %s' %(
req_oper, json.dumps(req_param))
raise cfgm_common.exceptions.HttpError(400, err_msg)
if ((req_oper == 'add') and field_val is None):
err_msg = 'Add needs field value in request %s' %(
req_oper, json.dumps(req_param))
raise cfgm_common.exceptions.HttpError(400, err_msg)
elif ((req_oper == 'modify') and
None in (field_val, field_pos)):
err_msg = 'Modify needs field value and position in request %s' %(
req_oper, json.dumps(req_param))
raise cfgm_common.exceptions.HttpError(400, err_msg)
elif ((req_oper == 'delete') and field_pos is None):
err_msg = 'Delete needs field position in request %s' %(
req_oper, json.dumps(req_param))
raise cfgm_common.exceptions.HttpError(400, err_msg)
elif prop_coll_type == 'map':
if req_oper not in ('set', 'delete'):
err_msg = 'Unsupported operation %s in request %s' %(
req_oper, json.dumps(req_param))
raise cfgm_common.exceptions.HttpError(400, err_msg)
if ((req_oper == 'set') and field_val is None):
err_msg = 'Set needs field value in request %s' %(
req_oper, json.dumps(req_param))
elif ((req_oper == 'delete') and field_pos is None):
err_msg = 'Delete needs field position in request %s' %(
req_oper, json.dumps(req_param))
raise cfgm_common.exceptions.HttpError(400, err_msg)
# Validations over. Invoke type specific hook and extension manager
try:
fq_name = self._db_conn.uuid_to_fq_name(obj_uuid)
(read_ok, read_result) = self._db_conn.dbe_read(
obj_type, {'uuid':obj_uuid})
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Object Not Found: '+obj_uuid)
except Exception as e:
read_ok = False
read_result = cfgm_common.utils.detailed_traceback()
if not read_ok:
self.config_object_error(
obj_uuid, None, obj_type, 'prop_collection_update', read_result)
raise cfgm_common.exceptions.HttpError(500, read_result)
# invoke the extension
try:
pre_func = 'pre_'+obj_type+'_update'
self._extension_mgrs['resourceApi'].map_method(pre_func, obj_uuid, {},
prop_collection_updates=request_params.get('updates'))
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In pre_%s_update an extension had error for %s' \
%(obj_type, request_params)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
# type-specific hook
r_class = self.get_resource_class(obj_type)
get_context().set_state('PRE_DBE_UPDATE')
(ok, pre_update_result) = r_class.pre_dbe_update(
obj_uuid, fq_name, {}, self._db_conn,
prop_collection_updates=request_params.get('updates'))
if not ok:
(code, msg) = pre_update_result
self.config_object_error(
obj_uuid, None, obj_type, 'prop_collection_update', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
# the actual db update
try:
get_context().set_state('DBE_UPDATE')
ok, update_result = self._db_conn.prop_collection_update(
obj_type, obj_uuid, request_params.get('updates'))
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'uuid ' + obj_uuid + ' not found')
if not ok:
(code, msg) = update_result
self.config_object_error(
obj_uuid, None, obj_type, 'prop_collection_update', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
# type-specific hook
get_context().set_state('POST_DBE_UPDATE')
(ok, post_update_result) = r_class.post_dbe_update(
obj_uuid, fq_name, {}, self._db_conn,
prop_collection_updates=request_params.get('updates'))
if not ok:
(code, msg) = pre_update_result
self.config_object_error(
obj_uuid, None, obj_type, 'prop_collection_update', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
# invoke the extension
try:
post_func = 'post_'+obj_type+'_update'
self._extension_mgrs['resourceApi'].map_method(
post_func, obj_uuid, {}, read_result,
prop_collection_updates=request_params.get('updates'))
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In post_%s_update an extension had error for %s' \
%(obj_type, request_params)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
apiConfig = VncApiCommon()
apiConfig.object_type = obj_type
apiConfig.identifier_name=':'.join(fq_name)
apiConfig.identifier_uuid = obj_uuid
apiConfig.operation = 'prop-collection-update'
try:
body = json.dumps(get_request().json)
except:
body = str(get_request().json)
apiConfig.body = body
self._set_api_audit_info(apiConfig)
log = VncApiConfigLog(api_log=apiConfig, sandesh=self._sandesh)
log.send(sandesh=self._sandesh)
# end prop_collection_update_http_post
def ref_update_http_post(self):
self._post_common(get_request(), None, None)
# grab fields
type = get_request().json.get('type')
ok, result = self._validate_resource_type(type)
if not ok:
raise cfgm_common.exceptions.HttpError(result[0], result[1])
res_type = result
res_class = self.get_resource_class(res_type)
obj_uuid = get_request().json.get('uuid')
ref_type = get_request().json.get('ref-type')
ok, result = self._validate_resource_type(ref_type)
if not ok:
raise cfgm_common.exceptions.HttpError(result[0], result[1])
ref_res_type = result
ref_class = self.get_resource_class(ref_res_type)
operation = get_request().json.get('operation')
ref_uuid = get_request().json.get('ref-uuid')
ref_fq_name = get_request().json.get('ref-fq-name')
attr = get_request().json.get('attr')
# validate fields
if None in (res_type, obj_uuid, ref_res_type, operation):
err_msg = 'Bad Request: type/uuid/ref-type/operation is null: '
err_msg += '%s, %s, %s, %s.' \
%(res_type, obj_uuid, ref_res_type, operation)
raise cfgm_common.exceptions.HttpError(400, err_msg)
operation = operation.upper()
if operation not in ['ADD', 'DELETE']:
err_msg = 'Bad Request: operation should be add or delete: %s' \
%(operation)
raise cfgm_common.exceptions.HttpError(400, err_msg)
if not ref_uuid and not ref_fq_name:
err_msg = 'Bad Request: ref-uuid or ref-fq-name must be specified'
raise cfgm_common.exceptions.HttpError(400, err_msg)
obj_type = res_class.object_type
ref_obj_type = ref_class.object_type
if not ref_uuid:
try:
ref_uuid = self._db_conn.fq_name_to_uuid(ref_obj_type, ref_fq_name)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Name ' + pformat(ref_fq_name) + ' not found')
# To verify existence of the reference being added
if operation == 'ADD':
try:
(read_ok, read_result) = self._db_conn.dbe_read(
ref_obj_type, {'uuid': ref_uuid}, obj_fields=['fq_name'])
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Object Not Found: ' + ref_uuid)
except Exception as e:
read_ok = False
read_result = cfgm_common.utils.detailed_traceback()
# To invoke type specific hook and extension manager
try:
(read_ok, read_result) = self._db_conn.dbe_read(
obj_type, get_request().json)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Object Not Found: '+obj_uuid)
except Exception as e:
read_ok = False
read_result = cfgm_common.utils.detailed_traceback()
if not read_ok:
self.config_object_error(obj_uuid, None, obj_type, 'ref_update', read_result)
raise cfgm_common.exceptions.HttpError(500, read_result)
obj_dict = copy.deepcopy(read_result)
# invoke the extension
try:
pre_func = 'pre_' + obj_type + '_update'
self._extension_mgrs['resourceApi'].map_method(pre_func, obj_uuid, obj_dict)
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In pre_%s_update an extension had error for %s' \
%(obj_type, obj_dict)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
# type-specific hook
if res_class:
try:
fq_name = self._db_conn.uuid_to_fq_name(obj_uuid)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'UUID ' + obj_uuid + ' not found')
if operation == 'ADD':
if ref_obj_type+'_refs' not in obj_dict:
obj_dict[ref_obj_type+'_refs'] = []
obj_dict[ref_obj_type+'_refs'].append(
{'to':ref_fq_name, 'uuid': ref_uuid, 'attr':attr})
elif operation == 'DELETE':
for old_ref in obj_dict.get(ref_obj_type+'_refs', []):
if old_ref['to'] == ref_fq_name or old_ref['uuid'] == ref_uuid:
obj_dict[ref_obj_type+'_refs'].remove(old_ref)
break
(ok, put_result) = res_class.pre_dbe_update(
obj_uuid, fq_name, obj_dict, self._db_conn)
if not ok:
(code, msg) = put_result
self.config_object_error(obj_uuid, None, obj_type, 'ref_update', msg)
raise cfgm_common.exceptions.HttpError(code, msg)
# end if res_class
try:
self._db_conn.ref_update(obj_type, obj_uuid, ref_obj_type,
ref_uuid, {'attr': attr}, operation)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'uuid ' + obj_uuid + ' not found')
# invoke the extension
try:
post_func = 'post_' + obj_type + '_update'
self._extension_mgrs['resourceApi'].map_method(post_func, obj_uuid, obj_dict, read_result)
except RuntimeError:
# lack of registered extension leads to RuntimeError
pass
except Exception as e:
err_msg = 'In post_%s_update an extension had error for %s' \
%(obj_type, obj_dict)
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
apiConfig = VncApiCommon()
apiConfig.object_type = obj_type
fq_name = self._db_conn.uuid_to_fq_name(obj_uuid)
apiConfig.identifier_name=':'.join(fq_name)
apiConfig.identifier_uuid = obj_uuid
apiConfig.operation = 'ref-update'
try:
body = json.dumps(get_request().json)
except:
body = str(get_request().json)
apiConfig.body = body
self._set_api_audit_info(apiConfig)
log = VncApiConfigLog(api_log=apiConfig, sandesh=self._sandesh)
log.send(sandesh=self._sandesh)
return {'uuid': obj_uuid}
# end ref_update_http_post
def ref_relax_for_delete_http_post(self):
self._post_common(get_request(), None, None)
# grab fields
obj_uuid = get_request().json.get('uuid')
ref_uuid = get_request().json.get('ref-uuid')
# validate fields
if None in (obj_uuid, ref_uuid):
err_msg = 'Bad Request: Both uuid and ref-uuid should be specified: '
err_msg += '%s, %s.' %(obj_uuid, ref_uuid)
raise cfgm_common.exceptions.HttpError(400, err_msg)
try:
obj_type = self._db_conn.uuid_to_obj_type(obj_uuid)
self._db_conn.ref_relax_for_delete(obj_uuid, ref_uuid)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'uuid ' + obj_uuid + ' not found')
apiConfig = VncApiCommon()
apiConfig.object_type = obj_type
fq_name = self._db_conn.uuid_to_fq_name(obj_uuid)
apiConfig.identifier_name=':'.join(fq_name)
apiConfig.identifier_uuid = obj_uuid
apiConfig.operation = 'ref-relax-for-delete'
try:
body = json.dumps(get_request().json)
except:
body = str(get_request().json)
apiConfig.body = body
self._set_api_audit_info(apiConfig)
log = VncApiConfigLog(api_log=apiConfig, sandesh=self._sandesh)
log.send(sandesh=self._sandesh)
return {'uuid': obj_uuid}
# end ref_relax_for_delete_http_post
def fq_name_to_id_http_post(self):
self._post_common(get_request(), None, None)
type = get_request().json.get('type')
ok, result = self._validate_resource_type(type)
if not ok:
raise cfgm_common.exceptions.HttpError(result[0], result[1])
res_type = result
r_class = self.get_resource_class(res_type)
obj_type = r_class.object_type
fq_name = get_request().json['fq_name']
try:
id = self._db_conn.fq_name_to_uuid(obj_type, fq_name)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Name ' + pformat(fq_name) + ' not found')
# ensure user has access to this id
ok, result = self._permissions.check_perms_read(bottle.request, id)
if not ok:
err_code, err_msg = result
raise cfgm_common.exceptions.HttpError(err_code, err_msg)
return {'uuid': id}
# end fq_name_to_id_http_post
def id_to_fq_name_http_post(self):
self._post_common(get_request(), None, None)
obj_uuid = get_request().json['uuid']
# ensure user has access to this id
ok, result = self._permissions.check_perms_read(get_request(), obj_uuid)
if not ok:
err_code, err_msg = result
raise cfgm_common.exceptions.HttpError(err_code, err_msg)
try:
fq_name = self._db_conn.uuid_to_fq_name(obj_uuid)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'UUID ' + obj_uuid + ' not found')
obj_type = self._db_conn.uuid_to_obj_type(obj_uuid)
res_type = self.get_resource_class(obj_type).resource_type
return {'fq_name': fq_name, 'type': res_type}
# end id_to_fq_name_http_post
def ifmap_to_id_http_post(self):
self._post_common(get_request(), None, None)
uuid = self._db_conn.ifmap_id_to_uuid(get_request().json['ifmap_id'])
return {'uuid': uuid}
# end ifmap_to_id_http_post
# Enables a user-agent to store and retrieve key-val pair
# TODO this should be done only for special/quantum plugin
def useragent_kv_http_post(self):
self._post_common(get_request(), None, None)
oper = get_request().json['operation']
key = get_request().json['key']
val = get_request().json.get('value', '')
# TODO move values to common
if oper == 'STORE':
self._db_conn.useragent_kv_store(key, val)
elif oper == 'RETRIEVE':
try:
result = self._db_conn.useragent_kv_retrieve(key)
return {'value': result}
except NoUserAgentKey:
raise cfgm_common.exceptions.HttpError(
404, "Unknown User-Agent key " + key)
elif oper == 'DELETE':
result = self._db_conn.useragent_kv_delete(key)
else:
raise cfgm_common.exceptions.HttpError(
404, "Invalid Operation " + oper)
# end useragent_kv_http_post
def db_check(self):
""" Check database for inconsistencies. No update to database """
check_result = self._db_conn.db_check()
return {'results': check_result}
# end db_check
def fetch_records(self):
""" Retrieve and return all records """
result = self._db_conn.db_read()
return {'results': result}
# end fetch_records
def start_profile(self):
#GreenletProfiler.start()
pass
# end start_profile
def stop_profile(self):
pass
#GreenletProfiler.stop()
#stats = GreenletProfiler.get_func_stats()
#self._profile_info = stats.print_all()
#return self._profile_info
# end stop_profile
def get_profile_info(self):
return self._profile_info
# end get_profile_info
def get_resource_class(self, type_str):
if type_str in self._resource_classes:
return self._resource_classes[type_str]
common_name = cfgm_common.utils.CamelCase(type_str)
server_name = '%sServer' % common_name
try:
resource_class = getattr(vnc_cfg_types, server_name)
except AttributeError:
common_class = cfgm_common.utils.str_to_class(common_name,
__name__)
if common_class is None:
return None
# Create Placeholder classes derived from Resource, <Type> so
# resource_class methods can be invoked in CRUD methods without
# checking for None
resource_class = type(
str(server_name),
(vnc_cfg_types.Resource, common_class, object),
{})
resource_class.server = self
self._resource_classes[resource_class.object_type] = resource_class
self._resource_classes[resource_class.resource_type] = resource_class
return resource_class
# end get_resource_class
def list_bulk_collection_http_post(self):
""" List collection when requested ids don't fit in query params."""
type = get_request().json.get('type') # e.g. virtual-network
ok, result = self._validate_resource_type(type)
if not ok:
raise cfgm_common.exceptions.HttpError(result[0], result[1])
resource_type = result
r_class = self.get_resource_class(resource_type)
if not r_class:
raise cfgm_common.exceptions.HttpError(400,
"Bad Request, Unknown type %s in POST body" % (resource_type))
try:
parent_uuids = get_request().json['parent_id'].split(',')
except KeyError:
parent_uuids = None
try:
back_ref_uuids = get_request().json['back_ref_id'].split(',')
except KeyError:
back_ref_uuids = None
try:
obj_uuids = get_request().json['obj_uuids'].split(',')
except KeyError:
obj_uuids = None
is_count = get_request().json.get('count', False)
is_detail = get_request().json.get('detail', False)
try:
filters = utils.get_filters(get_request().json.get('filters'))
except Exception as e:
raise cfgm_common.exceptions.HttpError(
400, 'Invalid filter ' + get_request().json.get('filters'))
req_fields = get_request().json.get('fields', [])
if req_fields:
req_fields = req_fields.split(',')
return self._list_collection(r_class.object_type, parent_uuids,
back_ref_uuids, obj_uuids, is_count,
is_detail, filters, req_fields)
# end list_bulk_collection_http_post
# Private Methods
def _parse_args(self, args_str):
'''
Eg. python vnc_cfg_api_server.py --ifmap_server_ip 192.168.1.17
--ifmap_server_port 8443
--ifmap_username test
--ifmap_password test
--cassandra_server_list
10.1.2.3:9160 10.1.2.4:9160
--redis_server_ip 127.0.0.1
--redis_server_port 6382
--collectors 127.0.0.1:8086
--http_server_port 8090
--listen_ip_addr 127.0.0.1
--listen_port 8082
--admin_port 8095
--region_name RegionOne
--log_local
--log_level SYS_DEBUG
--logging_level DEBUG
--logging_conf <logger-conf-file>
--log_category test
--log_file <stdout>
--trace_file /var/log/contrail/vnc_openstack.err
--use_syslog
--syslog_facility LOG_USER
--disc_server_ip 127.0.0.1
--disc_server_port 5998
--worker_id 1
--rabbit_max_pending_updates 4096
--rabbit_health_check_interval 120.0
--cluster_id <testbed-name>
[--auth keystone]
[--ifmap_server_loc
/home/contrail/source/ifmap-server/]
[--default_encoding ascii ]
--ifmap_health_check_interval 60
'''
self._args, _ = utils.parse_args(args_str)
# end _parse_args
# sigchld handler is currently not engaged. See comment @sigchld
def sigchld_handler(self):
# DB interface initialization
self._db_connect(reset_config=False)
self._db_init_entries()
# end sigchld_handler
def sigterm_handler(self):
exit()
def _load_extensions(self):
try:
conf_sections = self._args.config_sections
self._extension_mgrs['resync'] = ExtensionManager(
'vnc_cfg_api.resync', api_server_ip=self._args.listen_ip_addr,
api_server_port=self._args.listen_port,
conf_sections=conf_sections, sandesh=self._sandesh)
self._extension_mgrs['resourceApi'] = ExtensionManager(
'vnc_cfg_api.resourceApi',
propagate_map_exceptions=True,
api_server_ip=self._args.listen_ip_addr,
api_server_port=self._args.listen_port,
conf_sections=conf_sections, sandesh=self._sandesh)
self._extension_mgrs['neutronApi'] = ExtensionManager(
'vnc_cfg_api.neutronApi',
api_server_ip=self._args.listen_ip_addr,
api_server_port=self._args.listen_port,
conf_sections=conf_sections, sandesh=self._sandesh,
api_server_obj=self)
except Exception as e:
err_msg = cfgm_common.utils.detailed_traceback()
self.config_log("Exception in extension load: %s" %(err_msg),
level=SandeshLevel.SYS_ERR)
# end _load_extensions
def _db_connect(self, reset_config):
ifmap_ip = self._args.ifmap_server_ip
ifmap_port = self._args.ifmap_server_port
user = self._args.ifmap_username
passwd = self._args.ifmap_password
cass_server_list = self._args.cassandra_server_list
redis_server_ip = self._args.redis_server_ip
redis_server_port = self._args.redis_server_port
zk_server = self._args.zk_server_ip
rabbit_servers = self._args.rabbit_server
rabbit_port = self._args.rabbit_port
rabbit_user = self._args.rabbit_user
rabbit_password = self._args.rabbit_password
rabbit_vhost = self._args.rabbit_vhost
rabbit_ha_mode = self._args.rabbit_ha_mode
cassandra_user = self._args.cassandra_user
cassandra_password = self._args.cassandra_password
cred = None
if cassandra_user is not None and cassandra_password is not None:
cred = {'username':cassandra_user,'password':cassandra_password}
self._db_conn = VncDbClient(
self, ifmap_ip, ifmap_port, user, passwd, cass_server_list,
rabbit_servers, rabbit_port, rabbit_user, rabbit_password,
rabbit_vhost, rabbit_ha_mode, reset_config, zk_server,
self._args.cluster_id, cassandra_credential=cred,
rabbit_use_ssl=self._args.rabbit_use_ssl,
kombu_ssl_version=self._args.kombu_ssl_version,
kombu_ssl_keyfile= self._args.kombu_ssl_keyfile,
kombu_ssl_certfile=self._args.kombu_ssl_certfile,
kombu_ssl_ca_certs=self._args.kombu_ssl_ca_certs)
# end _db_connect
def _ensure_id_perms_present(self, obj_uuid, obj_dict):
"""
Called at resource creation to ensure that id_perms is present in obj
"""
# retrieve object and permissions
id_perms = self._get_default_id_perms()
if (('id_perms' not in obj_dict) or
(obj_dict['id_perms'] is None)):
# Resource creation
if obj_uuid is None:
obj_dict['id_perms'] = id_perms
return
return
# retrieve the previous version of the id_perms
# from the database and update the id_perms with
# them.
if obj_uuid is not None:
try:
old_id_perms = self._db_conn.uuid_to_obj_perms(obj_uuid)
for field, value in old_id_perms.items():
if value is not None:
id_perms[field] = value
except NoIdError:
pass
# not all fields can be updated
if obj_uuid:
field_list = ['enable', 'description']
else:
field_list = ['enable', 'description', 'user_visible', 'creator']
# Start from default and update from obj_dict
req_id_perms = obj_dict['id_perms']
for key in field_list:
if key in req_id_perms:
id_perms[key] = req_id_perms[key]
# TODO handle perms present in req_id_perms
obj_dict['id_perms'] = id_perms
# end _ensure_id_perms_present
def _get_default_id_perms(self):
id_perms = copy.deepcopy(Provision.defaults.perms)
id_perms_json = json.dumps(id_perms, default=lambda o: dict((k, v)
for k, v in o.__dict__.iteritems()))
id_perms_dict = json.loads(id_perms_json)
return id_perms_dict
# end _get_default_id_perms
def _ensure_perms2_present(self, obj_type, obj_uuid, obj_dict,
project_id=None):
"""
Called at resource creation to ensure that id_perms is present in obj
"""
# retrieve object and permissions
perms2 = self._get_default_perms2()
# set ownership of object to creator tenant
if obj_type == 'project' and 'uuid' in obj_dict:
perms2['owner'] = str(obj_dict['uuid']).replace('-','')
elif project_id:
perms2['owner'] = project_id
# set ownership of object to creator tenant
if obj_type == 'project' and 'uuid' in obj_dict:
perms2['owner'] = str(obj_dict['uuid']).replace('-','')
elif project_id:
perms2['owner'] = project_id
if (('perms2' not in obj_dict) or
(obj_dict['perms2'] is None)):
# Resource creation
if obj_uuid is None:
obj_dict['perms2'] = perms2
return (True, "")
# Resource already exist
try:
obj_dict['perms2'] = self._db_conn.uuid_to_obj_perms2(obj_uuid)
except NoIdError:
obj_dict['perms2'] = perms2
return (True, "")
# retrieve the previous version of the perms2
# from the database and update the perms2 with
# them.
if obj_uuid is not None:
try:
old_perms2 = self._db_conn.uuid_to_obj_perms2(obj_uuid)
for field, value in old_perms2.items():
if value is not None:
perms2[field] = value
except NoIdError:
pass
# Start from default and update from obj_dict
req_perms2 = obj_dict['perms2']
for key in req_perms2:
perms2[key] = req_perms2[key]
# TODO handle perms2 present in req_perms2
obj_dict['perms2'] = perms2
# ensure is_shared and global_access are consistent
shared = obj_dict.get('is_shared', None)
gaccess = obj_dict['perms2'].get('global_access', None)
if gaccess is not None and shared is not None and shared != (gaccess != 0):
error = "Inconsistent is_shared (%s a) and global_access (%s)" % (shared, gaccess)
return (False, (400, error))
return (True, "")
# end _ensure_perms2_present
def _get_default_perms2(self):
perms2 = copy.deepcopy(Provision.defaults.perms2)
perms2_json = json.dumps(perms2, default=lambda o: dict((k, v)
for k, v in o.__dict__.iteritems()))
perms2_dict = json.loads(perms2_json)
return perms2_dict
# end _get_default_perms2
def _db_init_entries(self):
# create singleton defaults if they don't exist already in db
glb_sys_cfg = self._create_singleton_entry(
GlobalSystemConfig(autonomous_system=64512,
config_version=CONFIG_VERSION))
def_domain = self._create_singleton_entry(Domain())
ip_fab_vn = self._create_singleton_entry(
VirtualNetwork(cfgm_common.IP_FABRIC_VN_FQ_NAME[-1]))
self._create_singleton_entry(
RoutingInstance('__default__', ip_fab_vn,
routing_instance_is_default=True))
link_local_vn = self._create_singleton_entry(
VirtualNetwork(cfgm_common.LINK_LOCAL_VN_FQ_NAME[-1]))
self._create_singleton_entry(
RoutingInstance('__link_local__', link_local_vn,
routing_instance_is_default=True))
try:
self._create_singleton_entry(
RoutingInstance('default-virtual-network',
routing_instance_is_default=True))
except Exception as e:
self.config_log('error while creating primary routing instance for'
'default-virtual-network: ' + str(e),
level=SandeshLevel.SYS_NOTICE)
self._create_singleton_entry(DiscoveryServiceAssignment())
self._create_singleton_entry(GlobalQosConfig())
if int(self._args.worker_id) == 0:
self._db_conn.db_resync()
# make default ipam available across tenants for backward compatability
obj_type = 'network_ipam'
fq_name = ['default-domain', 'default-project', 'default-network-ipam']
obj_uuid = self._db_conn.fq_name_to_uuid(obj_type, fq_name)
(ok, obj_dict) = self._db_conn.dbe_read(obj_type, {'uuid':obj_uuid},
obj_fields=['perms2'])
obj_dict['perms2']['global_access'] = PERMS_X
self._db_conn.dbe_update(obj_type, {'uuid': obj_uuid}, obj_dict)
# end _db_init_entries
# generate default rbac group rule
def _create_default_rbac_rule(self):
obj_type = 'api_access_list'
fq_name = ['default-global-system-config', 'default-api-access-list']
try:
id = self._db_conn.fq_name_to_uuid(obj_type, fq_name)
return
except NoIdError:
pass
# allow full access to cloud admin
rbac_rules = [
{
'rule_object':'fqname-to-id',
'rule_field': '',
'rule_perms': [{'role_name':'*', 'role_crud':'CRUD'}]
},
{
'rule_object':'id-to-fqname',
'rule_field': '',
'rule_perms': [{'role_name':'*', 'role_crud':'CRUD'}]
},
{
'rule_object':'useragent-kv',
'rule_field': '',
'rule_perms': [{'role_name':'*', 'role_crud':'CRUD'}]
},
{
'rule_object':'documentation',
'rule_field': '',
'rule_perms': [{'role_name':'*', 'role_crud':'R'}]
},
{
'rule_object':'/',
'rule_field': '',
'rule_perms': [{'role_name':'*', 'role_crud':'R'}]
},
]
rge = RbacRuleEntriesType([])
for rule in rbac_rules:
rule_perms = [RbacPermType(role_name=p['role_name'], role_crud=p['role_crud']) for p in rule['rule_perms']]
rbac_rule = RbacRuleType(rule_object=rule['rule_object'],
rule_field=rule['rule_field'], rule_perms=rule_perms)
rge.add_rbac_rule(rbac_rule)
rge_dict = rge.exportDict('')
glb_rbac_cfg = ApiAccessList(parent_type='global-system-config',
fq_name=fq_name, api_access_list_entries = rge_dict)
try:
self._create_singleton_entry(glb_rbac_cfg)
except Exception as e:
err_msg = 'Error creating default api access list object'
err_msg += cfgm_common.utils.detailed_traceback()
self.config_log(err_msg, level=SandeshLevel.SYS_ERR)
# end _create_default_rbac_rule
def _resync_domains_projects(self, ext):
if hasattr(ext.obj, 'resync_domains_projects'):
ext.obj.resync_domains_projects()
# end _resync_domains_projects
def _create_singleton_entry(self, singleton_obj):
s_obj = singleton_obj
obj_type = s_obj.object_type
fq_name = s_obj.get_fq_name()
# TODO remove backward compat create mapping in zk
# for singleton START
try:
cass_uuid = self._db_conn._cassandra_db.fq_name_to_uuid(obj_type, fq_name)
try:
zk_uuid = self._db_conn.fq_name_to_uuid(obj_type, fq_name)
except NoIdError:
# doesn't exist in zookeeper but does so in cassandra,
# migrate this info to zookeeper
self._db_conn._zk_db.create_fq_name_to_uuid_mapping(obj_type, fq_name, str(cass_uuid))
except NoIdError:
# doesn't exist in cassandra as well as zookeeper, proceed normal
pass
# TODO backward compat END
# create if it doesn't exist yet
try:
id = self._db_conn.fq_name_to_uuid(obj_type, fq_name)
except NoIdError:
obj_dict = s_obj.serialize_to_json()
obj_dict['id_perms'] = self._get_default_id_perms()
obj_dict['perms2'] = self._get_default_perms2()
(ok, result) = self._db_conn.dbe_alloc(obj_type, obj_dict)
obj_ids = result
# For virtual networks, allocate an ID
if obj_type == 'virtual_network':
vn_id = self._db_conn._zk_db.alloc_vn_id(
s_obj.get_fq_name_str())
obj_dict['virtual_network_network_id'] = vn_id
self._db_conn.dbe_create(obj_type, obj_ids, obj_dict)
self.create_default_children(obj_type, s_obj)
return s_obj
# end _create_singleton_entry
def _list_collection(self, obj_type, parent_uuids=None,
back_ref_uuids=None, obj_uuids=None,
is_count=False, is_detail=False, filters=None,
req_fields=None):
r_class = self.get_resource_class(obj_type)
resource_type = r_class.resource_type
(ok, result) = self._db_conn.dbe_list(obj_type,
parent_uuids, back_ref_uuids, obj_uuids, is_count,
filters)
if not ok:
self.config_object_error(None, None, '%ss' %(obj_type),
'dbe_list', result)
raise cfgm_common.exceptions.HttpError(404, result)
# If only counting, return early
if is_count:
return {'%ss' %(resource_type): {'count': result}}
# include objects shared with tenant
env = get_request().headers.environ
tenant_uuid = env.get('HTTP_X_PROJECT_ID', None)
shares = self._db_conn.get_shared_objects(obj_type, tenant_uuid) if tenant_uuid else []
owned_objs = set([obj_uuid for (fq_name, obj_uuid) in result])
for (obj_uuid, obj_perm) in shares:
# skip owned objects already included in results
if obj_uuid in owned_objs:
continue
try:
fq_name = self._db_conn.uuid_to_fq_name(obj_uuid)
result.append((fq_name, obj_uuid))
except NoIdError:
# uuid no longer valid. Delete?
pass
fq_names_uuids = result
obj_dicts = []
if not is_detail:
if not self.is_admin_request():
obj_ids_list = [{'uuid': obj_uuid}
for _, obj_uuid in fq_names_uuids]
obj_fields = [u'id_perms']
if req_fields:
obj_fields = obj_fields + req_fields
(ok, result) = self._db_conn.dbe_read_multi(
obj_type, obj_ids_list, obj_fields)
if not ok:
raise cfgm_common.exceptions.HttpError(404, result)
for obj_result in result:
if obj_result['id_perms'].get('user_visible', True):
# skip items not authorized
(ok, status) = self._permissions.check_perms_read(
get_request(), obj_result['uuid'],
obj_result['id_perms'])
if not ok and status[0] == 403:
continue
obj_dict = {}
obj_dict['uuid'] = obj_result['uuid']
obj_dict['href'] = self.generate_url(
resource_type, obj_result['uuid'])
obj_dict['fq_name'] = obj_result['fq_name']
for field in req_fields:
try:
obj_dict[field] = obj_result[field]
except KeyError:
pass
obj_dicts.append(obj_dict)
else: # admin
obj_results = {}
if req_fields:
obj_ids_list = [{'uuid': obj_uuid}
for _, obj_uuid in fq_names_uuids]
(ok, result) = self._db_conn.dbe_read_multi(
obj_type, obj_ids_list, req_fields)
if ok:
obj_results = dict((elem['uuid'], elem)
for elem in result)
for fq_name, obj_uuid in fq_names_uuids:
obj_dict = {}
obj_dict['uuid'] = obj_uuid
obj_dict['href'] = self.generate_url(resource_type,
obj_uuid)
obj_dict['fq_name'] = fq_name
for field in req_fields or []:
try:
obj_dict[field] = obj_results[obj_uuid][field]
except KeyError:
pass
obj_dicts.append(obj_dict)
else: #detail
obj_ids_list = [{'uuid': obj_uuid}
for _, obj_uuid in fq_names_uuids]
obj_class = self.get_resource_class(obj_type)
obj_fields = list(obj_class.prop_fields) + \
list(obj_class.ref_fields)
if req_fields:
obj_fields.extend(req_fields)
(ok, result) = self._db_conn.dbe_read_multi(
obj_type, obj_ids_list, obj_fields)
if not ok:
raise cfgm_common.exceptions.HttpError(404, result)
for obj_result in result:
obj_dict = {}
obj_dict['name'] = obj_result['fq_name'][-1]
obj_dict['href'] = self.generate_url(resource_type,
obj_result['uuid'])
obj_dict.update(obj_result)
if 'id_perms' not in obj_dict:
# It is possible that the object was deleted, but received
# an update after that. We need to ignore it for now. In
# future, we should clean up such stale objects
continue
if (obj_dict['id_perms'].get('user_visible', True) or
self.is_admin_request()):
# skip items not authorized
(ok, status) = self._permissions.check_perms_read(
get_request(), obj_result['uuid'],
obj_result['id_perms'])
if not ok and status[0] == 403:
continue
obj_dicts.append({resource_type: obj_dict})
return {'%ss' %(resource_type): obj_dicts}
# end _list_collection
def get_db_connection(self):
return self._db_conn
# end get_db_connection
def generate_url(self, resource_type, obj_uuid):
try:
url_parts = get_request().urlparts
return '%s://%s/%s/%s'\
% (url_parts.scheme, url_parts.netloc, resource_type, obj_uuid)
except Exception as e:
return '%s/%s/%s' % (self._base_url, resource_type, obj_uuid)
# end generate_url
def config_object_error(self, id, fq_name_str, obj_type,
operation, err_str):
apiConfig = VncApiCommon()
if obj_type is not None:
apiConfig.object_type = obj_type
apiConfig.identifier_name = fq_name_str
apiConfig.identifier_uuid = id
apiConfig.operation = operation
if err_str:
apiConfig.error = "%s:%s" % (obj_type, err_str)
self._set_api_audit_info(apiConfig)
log = VncApiConfigLog(api_log=apiConfig, sandesh=self._sandesh)
log.send(sandesh=self._sandesh)
# end config_object_error
def config_log(self, msg_str, level=SandeshLevel.SYS_INFO):
errcls = {
SandeshLevel.SYS_DEBUG: VncApiDebug,
SandeshLevel.SYS_INFO: VncApiInfo,
SandeshLevel.SYS_NOTICE: VncApiNotice,
SandeshLevel.SYS_ERR: VncApiError,
}
errcls.get(level, VncApiError)(
api_msg=msg_str, level=level, sandesh=self._sandesh).send(
sandesh=self._sandesh)
# end config_log
def _set_api_audit_info(self, apiConfig):
apiConfig.url = get_request().url
apiConfig.remote_ip = get_request().headers.get('Host')
useragent = get_request().headers.get('X-Contrail-Useragent')
if not useragent:
useragent = get_request().headers.get('User-Agent')
apiConfig.useragent = useragent
apiConfig.user = get_request().headers.get('X-User-Name')
apiConfig.project = get_request().headers.get('X-Project-Name')
apiConfig.domain = get_request().headers.get('X-Domain-Name', 'None')
if apiConfig.domain.lower() == 'none':
apiConfig.domain = 'default-domain'
if int(get_request().headers.get('Content-Length', 0)) > 0:
try:
body = json.dumps(get_request().json)
except:
body = str(get_request().json)
apiConfig.body = body
# end _set_api_audit_info
# uuid is parent's for collections
def _http_get_common(self, request, uuid=None):
# TODO check api + resource perms etc.
if self.is_multi_tenancy_set() and uuid:
if isinstance(uuid, list):
for u_id in uuid:
ok, result = self._permissions.check_perms_read(request,
u_id)
if not ok:
return ok, result
else:
return self._permissions.check_perms_read(request, uuid)
return (True, '')
# end _http_get_common
def _http_put_common(self, request, obj_type, obj_uuid, obj_fq_name,
obj_dict):
# If not connected to zookeeper do not allow operations that
# causes the state change
if not self._db_conn._zk_db.is_connected():
return (False,
(503, "Not connected to zookeeper. Not able to perform requested action"))
# If there are too many pending updates to rabbit, do not allow
# operations that cause state change
npending = self._db_conn.dbe_oper_publish_pending()
if (npending >= int(self._args.rabbit_max_pending_updates)):
err_str = str(MaxRabbitPendingError(npending))
return (False, (500, err_str))
if obj_dict:
fq_name_str = ":".join(obj_fq_name)
# TODO keep _id_perms.uuid_xxlong immutable in future
# dsetia - check with ajay regarding comment above
# if 'id_perms' in obj_dict:
# del obj_dict['id_perms']
if 'id_perms' in obj_dict and obj_dict['id_perms']['uuid']:
if not self._db_conn.match_uuid(obj_dict, obj_uuid):
log_msg = 'UUID mismatch from %s:%s' \
% (request.environ['REMOTE_ADDR'],
request.environ['HTTP_USER_AGENT'])
self.config_object_error(
obj_uuid, fq_name_str, obj_type, 'put', log_msg)
self._db_conn.set_uuid(obj_type, obj_dict,
uuid.UUID(obj_uuid),
do_lock=False)
# TODO remove this when the generator will be adapted to
# be consistent with the post method
# Ensure object has at least default permissions set
self._ensure_id_perms_present(obj_uuid, obj_dict)
apiConfig = VncApiCommon()
apiConfig.object_type = obj_type
apiConfig.identifier_name = fq_name_str
apiConfig.identifier_uuid = obj_uuid
apiConfig.operation = 'put'
self._set_api_audit_info(apiConfig)
log = VncApiConfigLog(api_log=apiConfig,
sandesh=self._sandesh)
log.send(sandesh=self._sandesh)
# TODO check api + resource perms etc.
if self.is_multi_tenancy_set():
return self._permissions.check_perms_write(request, obj_uuid)
return (True, '')
# end _http_put_common
# parent_type needed for perms check. None for derived objects (eg.
# routing-instance)
def _http_delete_common(self, request, obj_type, uuid, parent_type):
# If not connected to zookeeper do not allow operations that
# causes the state change
if not self._db_conn._zk_db.is_connected():
return (False,
(503, "Not connected to zookeeper. Not able to perform requested action"))
# If there are too many pending updates to rabbit, do not allow
# operations that cause state change
npending = self._db_conn.dbe_oper_publish_pending()
if (npending >= int(self._args.rabbit_max_pending_updates)):
err_str = str(MaxRabbitPendingError(npending))
return (False, (500, err_str))
fq_name = self._db_conn.uuid_to_fq_name(uuid)
apiConfig = VncApiCommon()
apiConfig.object_type = obj_type
apiConfig.identifier_name=':'.join(fq_name)
apiConfig.identifier_uuid = uuid
apiConfig.operation = 'delete'
self._set_api_audit_info(apiConfig)
log = VncApiConfigLog(api_log=apiConfig, sandesh=self._sandesh)
log.send(sandesh=self._sandesh)
# TODO check api + resource perms etc.
if not self.is_multi_tenancy_set() or not parent_type:
return (True, '')
"""
Validate parent allows write access. Implicitly trust
parent info in the object since coming from our DB.
"""
parent_fq_name = fq_name[:-1]
try:
parent_uuid = self._db_conn.fq_name_to_uuid(
parent_type, parent_fq_name)
except NoIdError:
# parent uuid could be null for derived resources such as
# routing-instance
return (True, '')
return self._permissions.check_perms_write(request, parent_uuid)
# end _http_delete_common
def _http_post_validate(self, obj_type=None, obj_dict=None):
if not obj_dict:
return
def _check_field_present(fname):
fval = obj_dict.get(fname)
if not fval:
raise cfgm_common.exceptions.HttpError(
400, "Bad Request, no %s in POST body" %(fname))
return fval
fq_name = _check_field_present('fq_name')
# well-formed name checks
if illegal_xml_chars_RE.search(fq_name[-1]):
raise cfgm_common.exceptions.HttpError(400,
"Bad Request, name has illegal xml characters")
if obj_type == 'route_target':
invalid_chars = self._INVALID_NAME_CHARS - set(':')
else:
invalid_chars = self._INVALID_NAME_CHARS
if any((c in invalid_chars) for c in fq_name[-1]):
raise cfgm_common.exceptions.HttpError(400,
"Bad Request, name has one of invalid chars %s"
%(invalid_chars))
# end _http_post_validate
def _http_post_common(self, request, obj_type, obj_dict):
# If not connected to zookeeper do not allow operations that
# causes the state change
if not self._db_conn._zk_db.is_connected():
return (False,
(503, "Not connected to zookeeper. Not able to perform requested action"))
if not obj_dict:
# TODO check api + resource perms etc.
return (True, None)
# If there are too many pending updates to rabbit, do not allow
# operations that cause state change
npending = self._db_conn.dbe_oper_publish_pending()
if (npending >= int(self._args.rabbit_max_pending_updates)):
err_str = str(MaxRabbitPendingError(npending))
return (False, (500, err_str))
# Fail if object exists already
try:
obj_uuid = self._db_conn.fq_name_to_uuid(
obj_type, obj_dict['fq_name'])
raise cfgm_common.exceptions.HttpError(
409, '' + pformat(obj_dict['fq_name']) +
' already exists with uuid: ' + obj_uuid)
except NoIdError:
pass
# Ensure object has at least default permissions set
self._ensure_id_perms_present(None, obj_dict)
self._ensure_perms2_present(obj_type, None, obj_dict,
request.headers.environ.get('HTTP_X_PROJECT_ID', None))
# TODO check api + resource perms etc.
uuid_in_req = obj_dict.get('uuid', None)
# Set the display name
if (('display_name' not in obj_dict) or
(obj_dict['display_name'] is None)):
obj_dict['display_name'] = obj_dict['fq_name'][-1]
fq_name_str = ":".join(obj_dict['fq_name'])
apiConfig = VncApiCommon()
apiConfig.object_type = obj_type
apiConfig.identifier_name=fq_name_str
apiConfig.identifier_uuid = uuid_in_req
apiConfig.operation = 'post'
try:
body = json.dumps(request.json)
except:
body = str(request.json)
apiConfig.body = body
if uuid_in_req:
if uuid_in_req != str(uuid.UUID(uuid_in_req)):
bottle.abort(400, 'Invalid UUID format: ' + uuid_in_req)
try:
fq_name = self._db_conn.uuid_to_fq_name(uuid_in_req)
raise cfgm_common.exceptions.HttpError(
409, uuid_in_req + ' already exists with fq_name: ' +
pformat(fq_name))
except NoIdError:
pass
apiConfig.identifier_uuid = uuid_in_req
self._set_api_audit_info(apiConfig)
log = VncApiConfigLog(api_log=apiConfig, sandesh=self._sandesh)
log.send(sandesh=self._sandesh)
return (True, uuid_in_req)
# end _http_post_common
def reset(self):
# cleanup internal state/in-flight operations
if self._db_conn:
self._db_conn.reset()
# end reset
# allocate block of IP addresses from VN. Subnet info expected in request
# body
def vn_ip_alloc_http_post(self, id):
try:
vn_fq_name = self._db_conn.uuid_to_fq_name(id)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Virtual Network ' + id + ' not found!')
# expected format {"subnet_list" : "2.1.1.0/24", "count" : 4}
req_dict = get_request().json
count = req_dict.get('count', 1)
subnet = req_dict.get('subnet')
family = req_dict.get('family')
try:
result = vnc_cfg_types.VirtualNetworkServer.ip_alloc(
vn_fq_name, subnet, count, family)
except vnc_addr_mgmt.AddrMgmtSubnetUndefined as e:
raise cfgm_common.exceptions.HttpError(404, str(e))
except vnc_addr_mgmt.AddrMgmtSubnetExhausted as e:
raise cfgm_common.exceptions.HttpError(409, str(e))
return result
# end vn_ip_alloc_http_post
# free block of ip addresses to subnet
def vn_ip_free_http_post(self, id):
try:
vn_fq_name = self._db_conn.uuid_to_fq_name(id)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Virtual Network ' + id + ' not found!')
"""
{
"subnet" : "2.1.1.0/24",
"ip_addr": [ "2.1.1.239", "2.1.1.238", "2.1.1.237", "2.1.1.236" ]
}
"""
req_dict = get_request().json
ip_list = req_dict['ip_addr'] if 'ip_addr' in req_dict else []
subnet = req_dict['subnet'] if 'subnet' in req_dict else None
result = vnc_cfg_types.VirtualNetworkServer.ip_free(
vn_fq_name, subnet, ip_list)
return result
# end vn_ip_free_http_post
# return no. of IP addresses from VN/Subnet
def vn_subnet_ip_count_http_post(self, id):
try:
vn_fq_name = self._db_conn.uuid_to_fq_name(id)
except NoIdError:
raise cfgm_common.exceptions.HttpError(
404, 'Virtual Network ' + id + ' not found!')
# expected format {"subnet_list" : ["2.1.1.0/24", "1.1.1.0/24"]
req_dict = get_request().json
try:
(ok, result) = self._db_conn.dbe_read('virtual_network', {'uuid': id})
except NoIdError as e:
raise cfgm_common.exceptions.HttpError(404, str(e))
except Exception as e:
ok = False
result = cfgm_common.utils.detailed_traceback()
if not ok:
raise cfgm_common.exceptions.HttpError(500, result)
obj_dict = result
subnet_list = req_dict[
'subnet_list'] if 'subnet_list' in req_dict else []
result = vnc_cfg_types.VirtualNetworkServer.subnet_ip_count(
vn_fq_name, subnet_list)
return result
# end vn_subnet_ip_count_http_post
def set_mt(self, multi_tenancy):
pipe_start_app = self.get_pipe_start_app()
try:
pipe_start_app.set_mt(multi_tenancy)
except AttributeError:
pass
self._args.multi_tenancy = multi_tenancy
# end
def is_multi_tenancy_set(self):
return self._args.multi_tenancy or self.aaa_mode != 'no-auth'
def is_rbac_enabled(self):
return self.aaa_mode == 'rbac'
def mt_http_get(self):
pipe_start_app = self.get_pipe_start_app()
mt = self.is_multi_tenancy_set()
try:
mt = pipe_start_app.get_mt()
except AttributeError:
pass
return {'enabled': mt}
# end
def mt_http_put(self):
multi_tenancy = get_request().json['enabled']
user_token = get_request().get_header('X-Auth-Token')
if user_token is None:
raise cfgm_common.exceptions.HttpError(403, " Permission denied")
data = self._auth_svc.verify_signed_token(user_token)
if data is None:
raise cfgm_common.exceptions.HttpError(403, " Permission denied")
self.set_mt(multi_tenancy)
return {'enabled': self.is_multi_tenancy_set()}
# end
@property
def aaa_mode(self):
return self._args.aaa_mode
@aaa_mode.setter
def aaa_mode(self, mode):
self._args.aaa_mode = mode
# indication if multi tenancy with rbac is enabled or disabled
def aaa_mode_http_get(self):
return {'aaa-mode': self.aaa_mode}
def aaa_mode_http_put(self):
aaa_mode = get_request().json['aaa-mode']
if aaa_mode not in cfgm_common.AAA_MODE_VALID_VALUES:
raise ValueError('Invalid aaa-mode %s' % aaa_mode)
if not self._auth_svc.validate_user_token(get_request()):
raise cfgm_common.exceptions.HttpError(403, " Permission denied")
if not self.is_admin_request():
raise cfgm_common.exceptions.HttpError(403, " Permission denied")
self.aaa_mode = aaa_mode
if self.is_rbac_enabled():
self._create_default_rbac_rule()
return {'aaa-mode': self.aaa_mode}
# end
@property
def cloud_admin_role(self):
return self._args.cloud_admin_role
def publish_self_to_discovery(self):
# publish API server
data = {
'ip-address': self._args.ifmap_server_ip,
'port': self._args.listen_port,
}
if self._disc:
self.api_server_task = self._disc.publish(
API_SERVER_DISCOVERY_SERVICE_NAME, data)
def publish_ifmap_to_discovery(self, state = 'up', msg = ''):
# publish ifmap server
data = {
'ip-address': self._args.ifmap_server_ip,
'port': self._args.ifmap_server_port,
}
if self._disc:
self.ifmap_task = self._disc.publish(
IFMAP_SERVER_DISCOVERY_SERVICE_NAME,
data, state, msg)
# end publish_ifmap_to_discovery
def un_publish_self_to_discovery(self):
# un publish api server
data = {
'ip-address': self._args.ifmap_server_ip,
'port': self._args.listen_port,
}
if self._disc:
self._disc.un_publish(API_SERVER_DISCOVERY_SERVICE_NAME, data)
def un_publish_ifmap_to_discovery(self):
# un publish ifmap server
data = {
'ip-address': self._args.ifmap_server_ip,
'port': self._args.ifmap_server_port,
}
if self._disc:
self._disc.un_publish(IFMAP_SERVER_DISCOVERY_SERVICE_NAME, data)
# end un_publish_ifmap_to_discovery
# end class VncApiServer
def main(args_str=None, server=None):
vnc_api_server = server
pipe_start_app = vnc_api_server.get_pipe_start_app()
server_ip = vnc_api_server.get_listen_ip()
server_port = vnc_api_server.get_server_port()
# Advertise services
if (vnc_api_server._args.disc_server_ip and
vnc_api_server._args.disc_server_port and
vnc_api_server.get_worker_id() == 0):
vnc_api_server.publish_self_to_discovery()
""" @sigchld
Disable handling of SIG_CHLD for now as every keystone request to validate
token sends SIG_CHLD signal to API server.
"""
#hub.signal(signal.SIGCHLD, vnc_api_server.sigchld_handler)
hub.signal(signal.SIGTERM, vnc_api_server.sigterm_handler)
if pipe_start_app is None:
pipe_start_app = vnc_api_server.api_bottle
try:
bottle.run(app=pipe_start_app, host=server_ip, port=server_port,
server=get_bottle_server(server._args.max_requests))
except KeyboardInterrupt:
# quietly handle Ctrl-C
pass
except:
# dump stack on all other exceptions
raise
finally:
# always cleanup gracefully
vnc_api_server.reset()
# end main
def server_main(args_str=None):
import cgitb
cgitb.enable(format='text')
main(args_str, VncApiServer(args_str))
#server_main
if __name__ == "__main__":
server_main()
| {
"content_hash": "d5caf1c5e8b612f7021d928ddefb8ad4",
"timestamp": "",
"source": "github",
"line_count": 3500,
"max_line_length": 119,
"avg_line_length": 41.46,
"alnum_prop": 0.5487009854593067,
"repo_name": "tcpcloud/contrail-controller",
"id": "98d32e81b0148a265dab33bb81d847e8f3cf15cb",
"size": "145179",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/config/api-server/vnc_cfg_api_server.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "88309"
},
{
"name": "C++",
"bytes": "20774234"
},
{
"name": "CSS",
"bytes": "531"
},
{
"name": "GDB",
"bytes": "44610"
},
{
"name": "Groff",
"bytes": "41295"
},
{
"name": "HTML",
"bytes": "519766"
},
{
"name": "Java",
"bytes": "171966"
},
{
"name": "LLVM",
"bytes": "2937"
},
{
"name": "Lua",
"bytes": "19459"
},
{
"name": "Makefile",
"bytes": "12449"
},
{
"name": "Protocol Buffer",
"bytes": "6129"
},
{
"name": "Python",
"bytes": "5701059"
},
{
"name": "Shell",
"bytes": "52859"
},
{
"name": "Thrift",
"bytes": "8382"
},
{
"name": "Yacc",
"bytes": "7737"
}
],
"symlink_target": ""
} |
from pymongo import MongoClient
import pymongo.errors
import config
class Writer(object):
"""Storage Reader Object"""
def __init__(self, doc_type=None):
super(Writer, self).__init__()
self.db_name = config.db_name
self.doc_type = doc_type
# self.conn = motor.motor_asyncio.AsyncIOMotorClient(config.MONGODB_URI)
self.conn = MongoClient(config.mongodb_uri)
self.db = self.conn[self.db_name]
self.coll = self.db[self.doc_type]
def insert_many(self, docs):
self.coll.insert_many(docs)
success = True
return success
def insert_one(self, doc):
self.coll.insert_one(doc)
success = True
return success
def create_update_index(self):
machines_index = [('ssh_key_name', 'text'),
('type', 'text'),
('public_dns', 'text'),
('private_dns', 'text'),
('tags', 'text'),
('security_group', 'text')]
try:
res = self.coll.create_index(machines_index,
name='machines_index',
background=True)
except pymongo.errors.OperationFailure as error:
if error.code == 85:
return True
else:
return False
# def update_one(self, doc):
# old_doc = self.get_doc(doc_id=doc[self.id_identifier])
# print(old_doc)
# print(doc)
# # status = self.conn.update_one(index=self.db_name,
# # doc_type=self.doc_type,
# # id=doc[self.id_identifier],
# # body=doc)
# return True
class Reader(object):
"""Storage Reader Object"""
def __init__(self, doc_type=None):
super(Reader, self).__init__()
self.db_name = config.db_name
self.doc_type = doc_type
self.conn = MongoClient(config.mongodb_uri)
self.db = self.conn[self.db_name]
self.coll = self.db[self.doc_type]
def if_doc_exists(self, doc_id):
return bool(self.coll.find_one({'_id': doc_id}))
def find_docs(self, query):
result = []
for cur in self.coll.find(query):
result.append(cur)
return result
def get_all_match(self, query_str):
query = {'$text': {'$search': query_str}}
return self.find_docs(query)
def get_instance_by_id(self, doc_id):
query = {'_id': doc_id}
res = self.coll.find_one(query)
return res
def get_instance_by_fqdn(self, fqdn):
query = {'public_dns': fqdn}
return self.find_docs(query)
def get_elbs_by_instanceid(self, instance_id):
query = {'backends': instance_id}
return self.find_docs(query)
def get_dns_by_fqdn(self, fqdn):
query = {'records': fqdn}
return self.find_docs(query)
| {
"content_hash": "89179d3151e2c429bcd3eb1874504653",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 80,
"avg_line_length": 29.10576923076923,
"alnum_prop": 0.5189957053187975,
"repo_name": "cloudconsole/cloudconsole",
"id": "76982b05964995e156a30938ac96578195dd5430",
"size": "3073",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "storage/driver.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1557"
},
{
"name": "Dockerfile",
"bytes": "325"
},
{
"name": "HTML",
"bytes": "11934"
},
{
"name": "Python",
"bytes": "14003"
}
],
"symlink_target": ""
} |
"""Emacs and Flymake compatible Pylint.
This script is for integration with emacs and is compatible with flymake mode.
epylint walks out of python packages before invoking pylint. This avoids
reporting import errors that occur when a module within a package uses the
absolute import path to get another module within this package.
For example:
- Suppose a package is structured as
a/__init__.py
a/b/x.py
a/c/y.py
- Then if y.py imports x as "from a.b import x" the following produces pylint errors
cd a/c; pylint y.py
- The following obviously doesn't
pylint a/c/y.py
- As this script will be invoked by emacs within the directory of the file
we are checking we need to go out of it to avoid these false positives.
You may also use py_run to run pylint with desired options and get back (or not) its output.
"""
import sys, os, re
from subprocess import Popen, PIPE
def lint(filename):
"""Pylint the given file.
When run from emacs we will be in the directory of a file, and passed its filename.
If this file is part of a package and is trying to import other modules from within
its own package or another package rooted in a directory below it, pylint will classify
it as a failed import.
To get around this, we traverse down the directory tree to find the root of the package this
module is in. We then invoke pylint from this directory.
Finally, we must correct the filenames in the output generated by pylint so Emacs doesn't
become confused (it will expect just the original filename, while pylint may extend it with
extra directories if we've traversed down the tree)
"""
# traverse downwards until we are out of a python package
fullPath = os.path.abspath(filename)
parentPath, childPath = os.path.dirname(fullPath), os.path.basename(fullPath)
while parentPath != "/" and os.path.exists(os.path.join(parentPath, '__init__.py')):
childPath = os.path.join(os.path.basename(parentPath), childPath)
parentPath = os.path.dirname(parentPath)
# Start pylint
# Ensure we use the python and pylint associated with the running epylint
lintPath = os.path.join(os.path.dirname(__file__), 'lint.py')
cmd = [sys.executable, lintPath, '-f', 'parseable', '-r', 'n',
'--disable=C,R,I', childPath]
process = Popen(cmd, stdout=PIPE, stderr=PIPE, cwd=parentPath)
# The parseable line format is '%(path)s:%(line)s: [%(sigle)s%(obj)s] %(msg)s'
# NOTE: This would be cleaner if we added an Emacs reporter to pylint.reporters.text ..
regex = re.compile(r"\[(?P<type>[WE])(?P<remainder>.*?)\]")
def _replacement(mObj):
"Alter to include 'Error' or 'Warning'"
if mObj.group("type") == "W":
replacement = "Warning"
else:
replacement = "Error"
# replace as "Warning (W0511, funcName): Warning Text"
return "%s (%s%s):" % (replacement, mObj.group("type"), mObj.group("remainder"))
for line in process.stdout:
# remove pylintrc warning
if line.startswith("No config file found"):
continue
line = regex.sub(_replacement, line, 1)
# modify the file name thats output to reverse the path traversal we made
parts = line.split(":")
if parts and parts[0] == childPath:
line = ":".join([filename] + parts[1:])
print line,
process.wait()
return process.returncode
def py_run(command_options='', return_std=False, stdout=None, stderr=None,
script='epylint'):
"""Run pylint from python (needs Python >= 2.4).
``command_options`` is a string containing ``pylint`` command line options;
``return_std`` (boolean) indicates return of created standart output
and error (see below);
``stdout`` and ``stderr`` are 'file-like' objects in which standart output
could be written.
Calling agent is responsible for stdout/err management (creation, close).
Default standart output and error are those from sys,
or standalone ones (``subprocess.PIPE``) are used
if they are not set and ``return_std``.
If ``return_std`` is set to ``True``, this function returns a 2-uple
containing standart output and error related to created process,
as follows: ``(stdout, stderr)``.
A trivial usage could be as follows:
>>> py_run( '--version')
No config file found, using default configuration
pylint 0.18.1,
...
To silently run Pylint on a module, and get its standart output and error:
>>> (pylint_stdout, pylint_stderr) = py_run( 'module_name.py', True)
"""
# Create command line to call pylint
if os.name == 'nt':
script += '.bat'
command_line = script + ' ' + command_options
# Providing standart output and/or error if not set
if stdout is None:
if return_std:
stdout = PIPE
else:
stdout = sys.stdout
if stderr is None:
if return_std:
stderr = PIPE
else:
stderr = sys.stderr
# Call pylint in a subprocess
p = Popen(command_line, shell=True, stdout=stdout, stderr=stderr)
p.wait()
# Return standart output and error
if return_std:
return (p.stdout, p.stderr)
def Run():
sys.exit(lint(sys.argv[1]))
if __name__ == '__main__':
Run()
| {
"content_hash": "682930364052aa99d562783b54b73324",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 96,
"avg_line_length": 36.34228187919463,
"alnum_prop": 0.6513388734995383,
"repo_name": "yongshengwang/hue",
"id": "db2ed82f19b027dd2a9175406e6eea85770f883c",
"size": "6363",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "build/env/lib/python2.7/site-packages/pylint-0.28.0-py2.7.egg/pylint/epylint.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "13685"
},
{
"name": "C",
"bytes": "2479183"
},
{
"name": "C++",
"bytes": "177090"
},
{
"name": "CSS",
"bytes": "1133541"
},
{
"name": "Emacs Lisp",
"bytes": "12145"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Groff",
"bytes": "28547"
},
{
"name": "HTML",
"bytes": "26230478"
},
{
"name": "Java",
"bytes": "133906"
},
{
"name": "JavaScript",
"bytes": "9757355"
},
{
"name": "Makefile",
"bytes": "94066"
},
{
"name": "Mako",
"bytes": "2185828"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "PLSQL",
"bytes": "13774"
},
{
"name": "Perl",
"bytes": "138710"
},
{
"name": "PigLatin",
"bytes": "328"
},
{
"name": "Python",
"bytes": "88056623"
},
{
"name": "Scala",
"bytes": "191428"
},
{
"name": "Shell",
"bytes": "59514"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "TeX",
"bytes": "126420"
},
{
"name": "Thrift",
"bytes": "101931"
},
{
"name": "VimL",
"bytes": "1530"
},
{
"name": "XSLT",
"bytes": "357625"
}
],
"symlink_target": ""
} |
from collections import Callable
from enum import Enum
from .freezing import freeze_data, unfreeze_data
from .protocols import DEFAULT_PROTOCOL
__REGISTRY__ = {}
class OperationType(Enum):
INSERTION = 'insert'
DELETION = 'delete'
REPLACEMENT = 'replace'
class OperationMetaclass(type(Callable)):
def __new__(cls, name, bases, attrs):
super_new = super().__new__
op_type = attrs.get('TYPE')
new_cls = super_new(cls, name, bases, attrs)
if op_type:
__REGISTRY__[op_type.value] = new_cls
return new_cls
class Operation(Callable, metaclass=OperationMetaclass):
TYPE = None
FIELDS = []
@classmethod
def from_serializable_data(cls, data):
op_type_value = data['type']
op_cls = __REGISTRY__[op_type_value]
kwargs = {
'context': data['context'],
}
for field in op_cls.FIELDS:
kwargs[field] = data[field]
return op_cls(**kwargs)
def __init__(self, context):
assert isinstance(context, (tuple, list))
self.__context = tuple(context)
@property
def context(self):
return self.__context
@property
def type(self):
return self.TYPE
def __eq__(self, obj):
if self.__class__ != obj.__class__:
return False
if self.context != obj.context:
return False
for field in self.FIELDS:
if getattr(self, field) != getattr(obj, field):
return False
return True
def __repr__(self):
return '{0}(context={1!r}, {2})'.format(
self.__class__.__name__,
self.context,
', '.join('{0}={1!r}'.format(f, getattr(self, f))
for f in self.FIELDS),
)
def __call__(self, value, protocol=DEFAULT_PROTOCOL):
return self._apply_on(value, self.context, protocol=protocol)
def to_serializable_data(self):
if not self.TYPE:
raise NotImplementedError()
data = {
'type': self.type.value,
'context': self.context,
}
for field in self.FIELDS:
data[field] = getattr(self, field)
return data
def inverted(self):
raise NotImplementedError()
def with_pushed_context_prefix(self, name):
kwargs = {
'context': (name,) + self.context,
}
for field in self.FIELDS:
kwargs[field] = getattr(self, field)
return self.__class__(**kwargs)
def _apply_on(self, value, ctx, protocol):
ctx_len = len(ctx)
if ctx_len == 0:
return self._apply_on_empty_context(value, protocol)
if ctx_len == 1:
return self._apply_on_singleton_context(value, ctx[0], protocol)
else:
ctx_head = ctx[0]
ctx_tail = ctx[1:]
sub_value = protocol.navigate(value, ctx_head)
new_sub_value = self._apply_on(sub_value, ctx_tail, protocol)
return protocol.update(value, ctx_head, new_sub_value)
def _apply_on_empty_context(self, value, protocol):
raise NotImplementedError()
def _apply_on_singleton_context(self, value, name, protocol):
raise NotImplementedError()
class Insertion(Operation):
TYPE = OperationType.INSERTION
FIELDS = ['new_value']
def __init__(self, context, new_value):
super().__init__(context)
self.__new_value = freeze_data(new_value)
@property
def new_value(self):
return self.__new_value
def inverted(self):
return Deletion(context=self.context, old_value=self.new_value)
def _apply_on_empty_context(self, value, protocol):
return unfreeze_data(self.new_value)
def _apply_on_singleton_context(self, value, name, protocol):
return protocol.insert(value, name, unfreeze_data(self.new_value),
obj_context=self.context[:-1])
class Deletion(Operation):
TYPE = OperationType.DELETION
FIELDS = ['old_value']
def __init__(self, context, old_value):
super().__init__(context)
self.__old_value = freeze_data(old_value)
@property
def old_value(self):
return self.__old_value
def inverted(self):
return Insertion(context=self.context, new_value=self.old_value)
def _apply_on_empty_context(self, value, protocol):
return
def _apply_on_singleton_context(self, value, name, protocol):
return protocol.remove(value, name, obj_context=self.context[:-1])
class Replacement(Operation):
TYPE = OperationType.REPLACEMENT
FIELDS = ['old_value', 'new_value']
def __init__(self, context, old_value, new_value):
super().__init__(context)
self.__old_value = freeze_data(old_value)
self.__new_value = freeze_data(new_value)
@property
def old_value(self):
return self.__old_value
@property
def new_value(self):
return self.__new_value
def inverted(self):
return Replacement(
context=self.context,
old_value=self.new_value,
new_value=self.old_value,
)
def _apply_on_empty_context(self, value, protocol):
return unfreeze_data(self.new_value)
def _apply_on_singleton_context(self, value, name, protocol):
return protocol.update(value, name, unfreeze_data(self.new_value),
obj_context=self.context[:-1])
| {
"content_hash": "7427079de9dbf4228b59056e778a9c6e",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 76,
"avg_line_length": 28.36082474226804,
"alnum_prop": 0.5826972010178118,
"repo_name": "szopu/datadiffs",
"id": "f24939de203090f2db2a7c6baf5b270ae62c118d",
"size": "5502",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datadiffs/operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23995"
}
],
"symlink_target": ""
} |
import os
import sys
from telemetry import benchmark
from telemetry.core import util
from telemetry.core import wpr_modes
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
from telemetry.page import page_test
from telemetry.page import record_wpr
from telemetry.unittest import tab_test_case
class MockPage(page_module.Page):
def __init__(self, page_set, url):
super(MockPage, self).__init__(url=url,
page_set=page_set,
base_dir=util.GetUnittestDataDir())
self.func_calls = []
def RunNavigateSteps(self, action_runner):
self.func_calls.append('RunNavigateSteps')
super(MockPage, self).RunNavigateSteps(action_runner)
def RunFoo(self, _):
self.func_calls.append('RunFoo')
def RunBar(self, _):
self.func_calls.append('RunBar')
def RunBaz(self, _):
self.func_calls.append('RunBaz')
class MockPageSet(page_set_module.PageSet):
def __init__(self, url=''):
super(MockPageSet, self).__init__(archive_data_file='data/test.json')
self.AddPage(MockPage(self, url))
class MockPageTest(page_test.PageTest):
def __init__(self):
super(MockPageTest, self).__init__()
self._action_name_to_run = "RunBaz"
self.func_calls = []
@classmethod
def AddCommandLineArgs(cls, parser):
parser.add_option('--mock-page-test-option', action="store_true")
def WillNavigateToPage(self, page, tab):
self.func_calls.append('WillNavigateToPage')
def DidNavigateToPage(self, page, tab):
self.func_calls.append('DidNavigateToPage')
def WillRunActions(self, page, tab):
self.func_calls.append('WillRunActions')
def DidRunActions(self, page, tab):
self.func_calls.append('DidRunActions')
def ValidatePage(self, page, tab, results):
self.func_calls.append('ValidatePage')
class MockBenchmark(benchmark.Benchmark):
test = MockPageTest
mock_page_set = None
@classmethod
def AddTestCommandLineArgs(cls, group):
group.add_option('', '--mock-benchmark-url', action='store', type='string')
def CreatePageSet(self, options):
kwargs = {}
if (options.mock_benchmark_url):
kwargs['url'] = options.mock_benchmark_url
self.mock_page_set = MockPageSet(**kwargs)
return self.mock_page_set
class RecordWprUnitTests(tab_test_case.TabTestCase):
_base_dir = util.GetUnittestDataDir()
_test_data_dir = os.path.join(util.GetUnittestDataDir(), 'page_tests')
@classmethod
def setUpClass(cls):
sys.path.extend([cls._base_dir, cls._test_data_dir])
super(RecordWprUnitTests, cls).setUpClass()
cls._url = cls.UrlOfUnittestFile('blank.html')
# When the RecorderPageTest is created from a PageSet, we do not have a
# PageTest to use. In this case, we will record every available action.
def testRunPage_AllActions(self):
record_page_test = record_wpr.RecorderPageTest(["RunFoo", "RunBar"])
page = MockPage(page_set=MockPageSet(url=self._url), url=self._url)
record_page_test.RunPage(page, self._tab, results=None)
self.assertTrue('RunFoo' in page.func_calls)
self.assertTrue('RunBar' in page.func_calls)
self.assertFalse('RunBaz' in page.func_calls)
def testRunPage_DontReloadSingleActions(self):
record_page_test = record_wpr.RecorderPageTest(["RunFoo"])
page = MockPage(page_set=MockPageSet(url=self._url), url=self._url)
record_page_test.RunPage(page, self._tab, results=None)
self.assertFalse('RunNavigateSteps' in page.func_calls)
def testRunPage_ReloadPageBetweenActions(self):
record_page_test = record_wpr.RecorderPageTest(["RunFoo", "RunBar"])
page = MockPage(page_set=MockPageSet(url=self._url), url=self._url)
record_page_test.RunPage(page, self._tab, results=None)
self.assertTrue('RunNavigateSteps' in page.func_calls)
# When the RecorderPageTest is created from a Benchmark, the benchmark will
# have a PageTest, specified by its test attribute.
def testRunPage_OnlyRunBenchmarkAction(self):
record_page_test = record_wpr.RecorderPageTest(["RunFoo"])
record_page_test.page_test = MockBenchmark().test()
page = MockPage(page_set=MockPageSet(url=self._url), url=self._url)
record_page_test.RunPage(page, self._tab, results=None)
self.assertFalse('RunFoo' in page.func_calls)
self.assertTrue('RunBaz' in page.func_calls)
def testRunPage_CallBenchmarksPageTestsFunctions(self):
record_page_test = record_wpr.RecorderPageTest([])
record_page_test.page_test = MockBenchmark().test()
page = MockPage(page_set=MockPageSet(url=self._url), url=self._url)
record_page_test.RunPage(page, self._tab, results=None)
self.assertEqual(3, len(record_page_test.page_test.func_calls))
self.assertEqual('WillRunActions', record_page_test.page_test.func_calls[0])
self.assertEqual('DidRunActions', record_page_test.page_test.func_calls[1])
self.assertEqual('ValidatePage', record_page_test.page_test.func_calls[2])
def testWprRecorderWithPageSet(self):
flags = []
mock_page_set = MockPageSet(url=self._url)
wpr_recorder = record_wpr.WprRecorder(self._test_data_dir,
mock_page_set, flags)
results = wpr_recorder.CreateResults()
wpr_recorder.Record(results)
self.assertEqual(set(mock_page_set.pages), results.pages_that_succeeded)
def testWprRecorderWithBenchmark(self):
flags = ['--mock-benchmark-url', self._url]
mock_benchmark = MockBenchmark()
wpr_recorder = record_wpr.WprRecorder(self._test_data_dir, mock_benchmark,
flags)
results = wpr_recorder.CreateResults()
wpr_recorder.Record(results)
self.assertEqual(set(mock_benchmark.mock_page_set.pages),
results.pages_that_succeeded)
def testCommandLineFlags(self):
flags = [
'--page-repeat', '2',
'--mock-benchmark-url', self._url,
'--mock-page-test-option',
]
wpr_recorder = record_wpr.WprRecorder(self._test_data_dir, MockBenchmark(),
flags)
# page_runner command-line args
self.assertEquals(2, wpr_recorder.options.page_repeat)
# benchmark command-line args
self.assertEquals(self._url, wpr_recorder.options.mock_benchmark_url)
# benchmark's page_test command-line args
self.assertTrue(wpr_recorder.options.mock_page_test_option)
# invalid command-line args
self.assertFalse(hasattr(wpr_recorder.options, 'not_a_real_option'))
def testRecordingEnabled(self):
flags = ['--mock-benchmark-url', self._url]
wpr_recorder = record_wpr.WprRecorder(self._test_data_dir, MockBenchmark(),
flags)
self.assertEqual(wpr_modes.WPR_RECORD,
wpr_recorder.options.browser_options.wpr_mode)
def testFindAllActionNames(self):
# The src/tools/telemetry/unittest_data/page_tests/ has been
# populated with three simple Page Measurement classes, the first two of
# which have action_name_to_run defined.
action_names_to_run = record_wpr.FindAllActionNames(self._test_data_dir)
self.assertTrue('RunFoo' in action_names_to_run)
self.assertTrue('RunBar' in action_names_to_run)
self.assertFalse('RunBaz' in action_names_to_run)
| {
"content_hash": "eb900b91749add74acb1ecb70af816ea",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 80,
"avg_line_length": 39.24193548387097,
"alnum_prop": 0.6928346348814907,
"repo_name": "ondra-novak/chromium.src",
"id": "85d60da4d4924bd2f0deb7a4ab4285f12979b5f7",
"size": "7462",
"binary": false,
"copies": "4",
"ref": "refs/heads/nw",
"path": "tools/telemetry/telemetry/page/record_wpr_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "35318"
},
{
"name": "Batchfile",
"bytes": "7621"
},
{
"name": "C",
"bytes": "8692951"
},
{
"name": "C++",
"bytes": "206833388"
},
{
"name": "CSS",
"bytes": "871479"
},
{
"name": "HTML",
"bytes": "24541148"
},
{
"name": "Java",
"bytes": "5457985"
},
{
"name": "JavaScript",
"bytes": "17791684"
},
{
"name": "Makefile",
"bytes": "92563"
},
{
"name": "Objective-C",
"bytes": "1312233"
},
{
"name": "Objective-C++",
"bytes": "7105758"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "218379"
},
{
"name": "Perl",
"bytes": "69392"
},
{
"name": "Protocol Buffer",
"bytes": "387183"
},
{
"name": "Python",
"bytes": "6929739"
},
{
"name": "Shell",
"bytes": "473664"
},
{
"name": "Standard ML",
"bytes": "4131"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "15206"
}
],
"symlink_target": ""
} |
from functools import singledispatch
from llvmlite.llvmpy.core import Type, Constant
from numba.core import types, typing, cgutils
from numba.core.imputils import Registry
from numba.cuda import nvvmutils
registry = Registry()
lower = registry.lower
voidptr = Type.pointer(Type.int(8))
# NOTE: we don't use @lower here since print_item() doesn't return a LLVM value
@singledispatch
def print_item(ty, context, builder, val):
"""
Handle printing of a single value of the given Numba type.
A (format string, [list of arguments]) is returned that will allow
forming the final printf()-like call.
"""
raise NotImplementedError("printing unimplemented for values of type %s"
% (ty,))
@print_item.register(types.Integer)
@print_item.register(types.IntegerLiteral)
def int_print_impl(ty, context, builder, val):
if ty in types.unsigned_domain:
rawfmt = "%llu"
dsttype = types.uint64
else:
rawfmt = "%lld"
dsttype = types.int64
fmt = context.insert_string_const_addrspace(builder, rawfmt)
lld = context.cast(builder, val, ty, dsttype)
return rawfmt, [lld]
@print_item.register(types.Float)
def real_print_impl(ty, context, builder, val):
lld = context.cast(builder, val, ty, types.float64)
return "%f", [lld]
@print_item.register(types.StringLiteral)
def const_print_impl(ty, context, builder, sigval):
pyval = ty.literal_value
assert isinstance(pyval, str) # Ensured by lowering
rawfmt = "%s"
val = context.insert_string_const_addrspace(builder, pyval)
return rawfmt, [val]
@lower(print, types.VarArg(types.Any))
def print_varargs(context, builder, sig, args):
"""This function is a generic 'print' wrapper for arbitrary types.
It dispatches to the appropriate 'print' implementations above
depending on the detected real types in the signature."""
vprint = nvvmutils.declare_vprint(builder.module)
formats = []
values = []
for i, (argtype, argval) in enumerate(zip(sig.args, args)):
argfmt, argvals = print_item(argtype, context, builder, argval)
formats.append(argfmt)
values.extend(argvals)
rawfmt = " ".join(formats) + "\n"
fmt = context.insert_string_const_addrspace(builder, rawfmt)
array = cgutils.make_anonymous_struct(builder, values)
arrayptr = cgutils.alloca_once_value(builder, array)
vprint = nvvmutils.declare_vprint(builder.module)
builder.call(vprint, (fmt, builder.bitcast(arrayptr, voidptr)))
return context.get_dummy_value()
| {
"content_hash": "404c51820160dd1b3710afc0f4d70e80",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 79,
"avg_line_length": 32.54430379746835,
"alnum_prop": 0.6938934266822249,
"repo_name": "sklam/numba",
"id": "03cc9144e5d2bb2f53ce8053c45ed828347704ea",
"size": "2571",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numba/cuda/printimpl.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6783"
},
{
"name": "C",
"bytes": "638283"
},
{
"name": "C++",
"bytes": "52741"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "GDB",
"bytes": "101"
},
{
"name": "HTML",
"bytes": "3464"
},
{
"name": "Python",
"bytes": "7918676"
},
{
"name": "Shell",
"bytes": "7823"
}
],
"symlink_target": ""
} |
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
import warnings
from google.api_core import gapic_v1, grpc_helpers
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.longrunning import operations_pb2
from google.protobuf import empty_pb2 # type: ignore
import grpc # type: ignore
from google.cloud.contentwarehouse_v1.types import (
synonymset,
synonymset_service_request,
)
from .base import DEFAULT_CLIENT_INFO, SynonymSetServiceTransport
class SynonymSetServiceGrpcTransport(SynonymSetServiceTransport):
"""gRPC backend transport for SynonymSetService.
A Service that manage/custom customer specified SynonymSets.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "contentwarehouse.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: Optional[grpc.Channel] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
api_audience=api_audience,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "contentwarehouse.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service."""
return self._grpc_channel
@property
def create_synonym_set(
self,
) -> Callable[
[synonymset_service_request.CreateSynonymSetRequest], synonymset.SynonymSet
]:
r"""Return a callable for the create synonym set method over gRPC.
Creates a SynonymSet for a single context. Throws an
ALREADY_EXISTS exception if a synonymset already exists for the
context.
Returns:
Callable[[~.CreateSynonymSetRequest],
~.SynonymSet]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_synonym_set" not in self._stubs:
self._stubs["create_synonym_set"] = self.grpc_channel.unary_unary(
"/google.cloud.contentwarehouse.v1.SynonymSetService/CreateSynonymSet",
request_serializer=synonymset_service_request.CreateSynonymSetRequest.serialize,
response_deserializer=synonymset.SynonymSet.deserialize,
)
return self._stubs["create_synonym_set"]
@property
def get_synonym_set(
self,
) -> Callable[
[synonymset_service_request.GetSynonymSetRequest], synonymset.SynonymSet
]:
r"""Return a callable for the get synonym set method over gRPC.
Gets a SynonymSet for a particular context. Throws a NOT_FOUND
exception if the Synonymset does not exist
Returns:
Callable[[~.GetSynonymSetRequest],
~.SynonymSet]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_synonym_set" not in self._stubs:
self._stubs["get_synonym_set"] = self.grpc_channel.unary_unary(
"/google.cloud.contentwarehouse.v1.SynonymSetService/GetSynonymSet",
request_serializer=synonymset_service_request.GetSynonymSetRequest.serialize,
response_deserializer=synonymset.SynonymSet.deserialize,
)
return self._stubs["get_synonym_set"]
@property
def update_synonym_set(
self,
) -> Callable[
[synonymset_service_request.UpdateSynonymSetRequest], synonymset.SynonymSet
]:
r"""Return a callable for the update synonym set method over gRPC.
Remove the existing SynonymSet for the context and replaces it
with a new one. Throws a NOT_FOUND exception if the SynonymSet
is not found.
Returns:
Callable[[~.UpdateSynonymSetRequest],
~.SynonymSet]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_synonym_set" not in self._stubs:
self._stubs["update_synonym_set"] = self.grpc_channel.unary_unary(
"/google.cloud.contentwarehouse.v1.SynonymSetService/UpdateSynonymSet",
request_serializer=synonymset_service_request.UpdateSynonymSetRequest.serialize,
response_deserializer=synonymset.SynonymSet.deserialize,
)
return self._stubs["update_synonym_set"]
@property
def delete_synonym_set(
self,
) -> Callable[
[synonymset_service_request.DeleteSynonymSetRequest], empty_pb2.Empty
]:
r"""Return a callable for the delete synonym set method over gRPC.
Deletes a SynonymSet for a given context. Throws a NOT_FOUND
exception if the SynonymSet is not found.
Returns:
Callable[[~.DeleteSynonymSetRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_synonym_set" not in self._stubs:
self._stubs["delete_synonym_set"] = self.grpc_channel.unary_unary(
"/google.cloud.contentwarehouse.v1.SynonymSetService/DeleteSynonymSet",
request_serializer=synonymset_service_request.DeleteSynonymSetRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_synonym_set"]
@property
def list_synonym_sets(
self,
) -> Callable[
[synonymset_service_request.ListSynonymSetsRequest],
synonymset_service_request.ListSynonymSetsResponse,
]:
r"""Return a callable for the list synonym sets method over gRPC.
Returns all SynonymSets (for all contexts) for the
specified location.
Returns:
Callable[[~.ListSynonymSetsRequest],
~.ListSynonymSetsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_synonym_sets" not in self._stubs:
self._stubs["list_synonym_sets"] = self.grpc_channel.unary_unary(
"/google.cloud.contentwarehouse.v1.SynonymSetService/ListSynonymSets",
request_serializer=synonymset_service_request.ListSynonymSetsRequest.serialize,
response_deserializer=synonymset_service_request.ListSynonymSetsResponse.deserialize,
)
return self._stubs["list_synonym_sets"]
def close(self):
self.grpc_channel.close()
@property
def kind(self) -> str:
return "grpc"
__all__ = ("SynonymSetServiceGrpcTransport",)
| {
"content_hash": "279a92fd813733ee70a782d15d04854e",
"timestamp": "",
"source": "github",
"line_count": 377,
"max_line_length": 101,
"avg_line_length": 43.824933687002655,
"alnum_prop": 0.6171165718436025,
"repo_name": "googleapis/google-cloud-python",
"id": "af4734dc9c87a2acec1fafc89fe74a0f68a4e88a",
"size": "17122",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/grpc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2895"
},
{
"name": "Python",
"bytes": "5620713"
},
{
"name": "Shell",
"bytes": "51704"
}
],
"symlink_target": ""
} |
import logging
import os
import pytest
from funcy import first
from voluptuous import MultipleInvalid, Schema
from dvc.ignore import _no_match
from dvc.output import CHECKSUM_SCHEMA, Output
from dvc.stage import Stage
def test_save_missing(dvc, mocker):
stage = Stage(dvc)
out = Output(stage, "path", cache=False)
with mocker.patch.object(out.fs, "exists", return_value=False):
with pytest.raises(out.DoesNotExistError):
out.save()
@pytest.mark.parametrize(
"value,expected",
[
("", None),
(None, None),
(11111, "11111"),
("11111", "11111"),
("aAaBa", "aaaba"),
(
"3cc286c534a71504476da009ed174423",
"3cc286c534a71504476da009ed174423",
), # md5
(
"d41d8cd98f00b204e9800998ecf8427e-38",
"d41d8cd98f00b204e9800998ecf8427e-38",
), # etag
(
"000002000000000000000000c16859d1d071c6b1ffc9c8557d4909f1",
"000002000000000000000000c16859d1d071c6b1ffc9c8557d4909f1",
), # hdfs checksum
# Not much we can do about hex and oct values without writing our own
# parser. So listing these test cases just to acknowledge this.
# See https://github.com/iterative/dvc/issues/3331.
(0x3451, "13393"),
(0o1244, "676"),
],
)
def test_checksum_schema(value, expected):
assert Schema(CHECKSUM_SCHEMA)(value) == expected
@pytest.mark.parametrize("value", ["1", "11", {}, {"a": "b"}, [], [1, 2]])
def test_checksum_schema_fail(value):
with pytest.raises(MultipleInvalid):
assert Schema(CHECKSUM_SCHEMA)(value)
@pytest.mark.parametrize(
"exists, expected_message",
[
(
False,
(
"Output 'path'(stage: 'stage.dvc') is missing version info. "
"Cache for it will not be collected. "
"Use `dvc repro` to get your pipeline up to date."
),
),
(
True,
(
"Output 'path'(stage: 'stage.dvc') is missing version info. "
"Cache for it will not be collected. "
"Use `dvc repro` to get your pipeline up to date.\n"
"You can also use `dvc commit stage.dvc` to associate "
"existing 'path' with stage: 'stage.dvc'."
),
),
],
)
def test_get_used_objs(exists, expected_message, mocker, caplog):
stage = mocker.MagicMock()
mocker.patch.object(stage, "__str__", return_value="stage: 'stage.dvc'")
mocker.patch.object(stage, "addressing", "stage.dvc")
mocker.patch.object(stage, "wdir", os.getcwd())
mocker.patch.object(stage.repo, "root_dir", os.getcwd())
mocker.patch.object(stage.repo.dvcignore, "is_ignored", return_value=False)
mocker.patch.object(
stage.repo.dvcignore, "check_ignore", return_value=_no_match("path")
)
output = Output(stage, "path")
mocker.patch.object(output, "use_cache", True)
mocker.patch.object(stage, "is_repo_import", False)
mocker.patch.object(
Output, "exists", new_callable=mocker.PropertyMock
).return_value = exists
with caplog.at_level(logging.WARNING, logger="dvc"):
assert {} == output.get_used_objs()
assert first(caplog.messages) == expected_message
| {
"content_hash": "e43c54fb75f4a06e56f0bd45554a8b19",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 79,
"avg_line_length": 32.794117647058826,
"alnum_prop": 0.6017937219730942,
"repo_name": "dmpetrov/dataversioncontrol",
"id": "46e892b18997654cf885843f485a90cb5443488f",
"size": "3345",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/unit/output/test_output.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "127601"
},
{
"name": "Shell",
"bytes": "1677"
}
],
"symlink_target": ""
} |
from xml.sax.handler import ContentHandler
from xml.sax.xmlreader import Locator
import sys
import xml.sax
import xml.sax.handler
class AimlParserError(Exception): pass
class AimlHandler(ContentHandler):
# The legal states of the AIML parser
_STATE_OutsideAiml = 0
_STATE_InsideAiml = 1
_STATE_InsideCategory = 2
_STATE_InsidePattern = 3
_STATE_AfterPattern = 4
_STATE_InsideThat = 5
_STATE_AfterThat = 6
_STATE_InsideTemplate = 7
_STATE_AfterTemplate = 8
def __init__(self, encoding = "UTF-8"):
self.categories = {}
self._encoding = encoding
self._state = self._STATE_OutsideAiml
self._version = ""
self._namespace = ""
self._forwardCompatibleMode = False
self._currentPattern = ""
self._currentThat = ""
self._currentTopic = ""
self._insideTopic = False
self._currentUnknown = "" # the name of the current unknown element
# This is set to true when a parse error occurs in a category.
self._skipCurrentCategory = False
# Counts the number of parse errors in a particular AIML document.
# query with getNumErrors(). If 0, the document is AIML-compliant.
self._numParseErrors = 0
# TODO: select the proper validInfo table based on the version number.
self._validInfo = self._validationInfo101
# This stack of bools is used when parsing <li> elements inside
# <condition> elements, to keep track of whether or not an
# attribute-less "default" <li> element has been found yet. Only
# one default <li> is allowed in each <condition> element. We need
# a stack in order to correctly handle nested <condition> tags.
self._foundDefaultLiStack = []
# This stack of strings indicates what the current whitespace-handling
# behavior should be. Each string in the stack is either "default" or
# "preserve". When a new AIML element is encountered, a new string is
# pushed onto the stack, based on the value of the element's "xml:space"
# attribute (if absent, the top of the stack is pushed again). When
# ending an element, pop an object off the stack.
self._whitespaceBehaviorStack = ["default"]
self._elemStack = []
self._locator = Locator()
self.setDocumentLocator(self._locator)
def getNumErrors(self):
"Return the number of errors found while parsing the current document."
return self._numParseErrors
def setEncoding(self, encoding):
"""Set the text encoding to use when encoding strings read from XML.
Defaults to 'UTF-8'.
"""
self._encoding = encoding
def _location(self):
"Return a string describing the current location in the source file."
line = self._locator.getLineNumber()
column = self._locator.getColumnNumber()
return "(line %d, column %d)" % (line, column)
def _pushWhitespaceBehavior(self, attr):
"""Push a new string onto the whitespaceBehaviorStack.
The string's value is taken from the "xml:space" attribute, if it exists
and has a legal value ("default" or "preserve"). Otherwise, the previous
stack element is duplicated.
"""
assert len(self._whitespaceBehaviorStack) > 0, "Whitespace behavior stack should never be empty!"
try:
if attr["xml:space"] == "default" or attr["xml:space"] == "preserve":
self._whitespaceBehaviorStack.append(attr["xml:space"])
else:
raise AimlParserError, "Invalid value for xml:space attribute "+self._location()
except KeyError:
self._whitespaceBehaviorStack.append(self._whitespaceBehaviorStack[-1])
def startElementNS(self, name, qname, attr):
print "QNAME:", qname
print "NAME:", name
uri,elem = name
if (elem == "bot"): print "name:", attr.getValueByQName("name"), "a'ite?"
self.startElement(elem, attr)
pass
def startElement(self, name, attr):
# Wrapper around _startElement, which catches errors in _startElement()
# and keeps going.
# If we're inside an unknown element, ignore everything until we're
# out again.
if self._currentUnknown != "":
return
# If we're skipping the current category, ignore everything until
# it's finished.
if self._skipCurrentCategory:
return
# process this start-element.
try: self._startElement(name, attr)
except AimlParserError, msg:
# Print the error message
sys.stderr.write("PARSE ERROR: %s\n" % msg)
self._numParseErrors += 1 # increment error count
# In case of a parse error, if we're inside a category, skip it.
if self._state >= self._STATE_InsideCategory:
self._skipCurrentCategory = True
def _startElement(self, name, attr):
if name == "aiml":
# <aiml> tags are only legal in the OutsideAiml state
if self._state != self._STATE_OutsideAiml:
raise AimlParserError, "Unexpected <aiml> tag "+self._location()
self._state = self._STATE_InsideAiml
self._insideTopic = False
self._currentTopic = u""
try: self._version = attr["version"]
except KeyError:
# This SHOULD be a syntax error, but so many AIML sets out there are missing
# "version" attributes that it just seems nicer to let it slide.
#raise AimlParserError, "Missing 'version' attribute in <aiml> tag "+self._location()
#print "WARNING: Missing 'version' attribute in <aiml> tag "+self._location()
#print " Defaulting to version 1.0"
self._version = "1.0"
self._forwardCompatibleMode = (self._version != "1.0.1")
self._pushWhitespaceBehavior(attr)
# Not sure about this namespace business yet...
#try:
# self._namespace = attr["xmlns"]
# if self._version == "1.0.1" and self._namespace != "http://alicebot.org/2001/AIML-1.0.1":
# raise AimlParserError, "Incorrect namespace for AIML v1.0.1 "+self._location()
#except KeyError:
# if self._version != "1.0":
# raise AimlParserError, "Missing 'version' attribute(s) in <aiml> tag "+self._location()
elif self._state == self._STATE_OutsideAiml:
# If we're outside of an AIML element, we ignore all tags.
return
elif name == "topic":
# <topic> tags are only legal in the InsideAiml state, and only
# if we're not already inside a topic.
if (self._state != self._STATE_InsideAiml) or self._insideTopic:
raise AimlParserError, "Unexpected <topic> tag", self._location()
try: self._currentTopic = unicode(attr['name'])
except KeyError:
raise AimlParserError, "Required \"name\" attribute missing in <topic> element "+self._location()
self._insideTopic = True
elif name == "category":
# <category> tags are only legal in the InsideAiml state
if self._state != self._STATE_InsideAiml:
raise AimlParserError, "Unexpected <category> tag "+self._location()
self._state = self._STATE_InsideCategory
self._currentPattern = u""
self._currentThat = u""
# If we're not inside a topic, the topic is implicitly set to *
if not self._insideTopic: self._currentTopic = u"*"
self._elemStack = []
self._pushWhitespaceBehavior(attr)
elif name == "pattern":
# <pattern> tags are only legal in the InsideCategory state
if self._state != self._STATE_InsideCategory:
raise AimlParserError, "Unexpected <pattern> tag "+self._location()
self._state = self._STATE_InsidePattern
elif name == "that" and self._state == self._STATE_AfterPattern:
# <that> are legal either inside a <template> element, or
# inside a <category> element, between the <pattern> and the
# <template> elements. This clause handles the latter case.
self._state = self._STATE_InsideThat
elif name == "template":
# <template> tags are only legal in the AfterPattern and AfterThat
# states
if self._state not in [self._STATE_AfterPattern, self._STATE_AfterThat]:
raise AimlParserError, "Unexpected <template> tag "+self._location()
# if no <that> element was specified, it is implicitly set to *
if self._state == self._STATE_AfterPattern:
self._currentThat = u"*"
self._state = self._STATE_InsideTemplate
self._elemStack.append(['template',{}])
self._pushWhitespaceBehavior(attr)
elif self._state == self._STATE_InsidePattern:
# Certain tags are allowed inside <pattern> elements.
if name == "bot" and attr.has_key("name") and attr["name"] == u"name":
# Insert a special character string that the PatternMgr will
# replace with the bot's name.
self._currentPattern += u" BOT_NAME "
else:
raise AimlParserError, ("Unexpected <%s> tag " % name)+self._location()
elif self._state == self._STATE_InsideThat:
# Certain tags are allowed inside <that> elements.
if name == "bot" and attr.has_key("name") and attr["name"] == u"name":
# Insert a special character string that the PatternMgr will
# replace with the bot's name.
self._currentThat += u" BOT_NAME "
else:
raise AimlParserError, ("Unexpected <%s> tag " % name)+self._location()
elif self._state == self._STATE_InsideTemplate and self._validInfo.has_key(name):
# Starting a new element inside the current pattern. First
# we need to convert 'attr' into a native Python dictionary,
# so it can later be marshaled.
attrDict = {}
for k,v in attr.items():
#attrDict[k[1].encode(self._encoding)] = v.encode(self._encoding)
attrDict[k.encode(self._encoding)] = unicode(v)
self._validateElemStart(name, attrDict, self._version)
# Push the current element onto the element stack.
self._elemStack.append([name.encode(self._encoding),attrDict])
self._pushWhitespaceBehavior(attr)
# If this is a condition element, push a new entry onto the
# foundDefaultLiStack
if name == "condition":
self._foundDefaultLiStack.append(False)
else:
# we're now inside an unknown element.
if self._forwardCompatibleMode:
# In Forward Compatibility Mode, we ignore the element and its
# contents.
self._currentUnknown = name
else:
# Otherwise, unknown elements are grounds for error!
raise AimlParserError, ("Unexpected <%s> tag " % name)+self._location()
def characters(self, ch):
# Wrapper around _characters which catches errors in _characters()
# and keeps going.
if self._state == self._STATE_OutsideAiml:
# If we're outside of an AIML element, we ignore all text
return
if self._currentUnknown != "":
# If we're inside an unknown element, ignore all text
return
if self._skipCurrentCategory:
# If we're skipping the current category, ignore all text.
return
try: self._characters(ch)
except AimlParserError, msg:
# Print the message
sys.stderr.write("PARSE ERROR: %s\n" % msg)
self._numParseErrors += 1 # increment error count
# In case of a parse error, if we're inside a category, skip it.
if self._state >= self._STATE_InsideCategory:
self._skipCurrentCategory = True
def _characters(self, ch):
text = unicode(ch)
if self._state == self._STATE_InsidePattern:
# TODO: text inside patterns must be upper-case!
self._currentPattern += text
elif self._state == self._STATE_InsideThat:
self._currentThat += text
elif self._state == self._STATE_InsideTemplate:
# First, see whether the element at the top of the element stack
# is permitted to contain text.
try:
parent = self._elemStack[-1][0]
parentAttr = self._elemStack[-1][1]
required, optional, canBeParent = self._validInfo[parent]
nonBlockStyleCondition = (parent == "condition" and not (parentAttr.has_key("name") and parentAttr.has_key("value")))
if not canBeParent:
raise AimlParserError, ("Unexpected text inside <%s> element "%parent)+self._location()
elif parent == "random" or nonBlockStyleCondition:
# <random> elements can only contain <li> subelements. However,
# there's invariably some whitespace around the <li> that we need
# to ignore. Same for non-block-style <condition> elements (i.e.
# those which don't have both a "name" and a "value" attribute).
if len(text.strip()) == 0:
# ignore whitespace inside these elements.
return
else:
# non-whitespace text inside these elements is a syntax error.
raise AimlParserError, ("Unexpected text inside <%s> element "%parent)+self._location()
except IndexError:
# the element stack is empty. This should never happen.
raise AimlParserError, "Element stack is empty while validating text "+self._location()
# Add a new text element to the element at the top of the element
# stack. If there's already a text element there, simply append the
# new characters to its contents.
try: textElemOnStack = (self._elemStack[-1][-1][0] == "text")
except IndexError: textElemOnStack = False
except KeyError: textElemOnStack = False
if textElemOnStack:
self._elemStack[-1][-1][2] += text
else:
self._elemStack[-1].append(["text", {"xml:space": self._whitespaceBehaviorStack[-1]}, text])
else:
# all other text is ignored
pass
def endElementNS(self, name, qname):
uri, elem = name
self.endElement(elem)
def endElement(self, name):
"""Wrapper around _endElement which catches errors in _characters()
and keeps going.
"""
if self._state == self._STATE_OutsideAiml:
# If we're outside of an AIML element, ignore all tags
return
if self._currentUnknown != "":
# see if we're at the end of an unknown element. If so, we can
# stop ignoring everything.
if name == self._currentUnknown:
self._currentUnknown = ""
return
if self._skipCurrentCategory:
# If we're skipping the current category, see if it's ending. We
# stop on ANY </category> tag, since we're not keeping track of
# state in ignore-mode.
if name == "category":
self._skipCurrentCategory = False
self._state = self._STATE_InsideAiml
return
try: self._endElement(name)
except AimlParserError, msg:
# Print the message
sys.stderr.write("PARSE ERROR: %s\n" % msg)
self._numParseErrors += 1 # increment error count
# In case of a parse error, if we're inside a category, skip it.
if self._state >= self._STATE_InsideCategory:
self._skipCurrentCategory = True
def _endElement(self, name):
"""Verify that an AIML end element is valid in the current
context.
Raises an AimlParserError if an illegal end element is encountered.
"""
if name == "aiml":
# </aiml> tags are only legal in the InsideAiml state
if self._state != self._STATE_InsideAiml:
raise AimlParserError, "Unexpected </aiml> tag "+self._location()
self._state = self._STATE_OutsideAiml
self._whitespaceBehaviorStack.pop()
elif name == "topic":
# </topic> tags are only legal in the InsideAiml state, and
# only if _insideTopic is true.
if self._state != self._STATE_InsideAiml or not self._insideTopic:
raise AimlParserError, "Unexpected </topic> tag "+self._location()
self._insideTopic = False
self._currentTopic = u""
elif name == "category":
# </category> tags are only legal in the AfterTemplate state
if self._state != self._STATE_AfterTemplate:
raise AimlParserError, "Unexpected </category> tag "+self._location()
self._state = self._STATE_InsideAiml
# End the current category. Store the current pattern/that/topic and
# element in the categories dictionary.
key = (self._currentPattern.strip(), self._currentThat.strip(),self._currentTopic.strip())
self.categories[key] = self._elemStack[-1]
self._whitespaceBehaviorStack.pop()
elif name == "pattern":
# </pattern> tags are only legal in the InsidePattern state
if self._state != self._STATE_InsidePattern:
raise AimlParserError, "Unexpected </pattern> tag "+self._location()
self._state = self._STATE_AfterPattern
elif name == "that" and self._state == self._STATE_InsideThat:
# </that> tags are only allowed inside <template> elements or in
# the InsideThat state. This clause handles the latter case.
self._state = self._STATE_AfterThat
elif name == "template":
# </template> tags are only allowed in the InsideTemplate state.
if self._state != self._STATE_InsideTemplate:
raise AimlParserError, "Unexpected </template> tag "+self._location()
self._state = self._STATE_AfterTemplate
self._whitespaceBehaviorStack.pop()
elif self._state == self._STATE_InsidePattern:
# Certain tags are allowed inside <pattern> elements.
if name not in ["bot"]:
raise AimlParserError, ("Unexpected </%s> tag " % name)+self._location()
elif self._state == self._STATE_InsideThat:
# Certain tags are allowed inside <that> elements.
if name not in ["bot"]:
raise AimlParserError, ("Unexpected </%s> tag " % name)+self._location()
elif self._state == self._STATE_InsideTemplate:
# End of an element inside the current template. Append the
# element at the top of the stack onto the one beneath it.
elem = self._elemStack.pop()
self._elemStack[-1].append(elem)
self._whitespaceBehaviorStack.pop()
# If the element was a condition, pop an item off the
# foundDefaultLiStack as well.
if elem[0] == "condition": self._foundDefaultLiStack.pop()
else:
# Unexpected closing tag
raise AimlParserError, ("Unexpected </%s> tag " % name)+self._location()
# A dictionary containing a validation information for each AIML
# element. The keys are the names of the elements. The values are a
# tuple of three items. The first is a list containing the names of
# REQUIRED attributes, the second is a list of OPTIONAL attributes,
# and the third is a boolean value indicating whether or not the
# element can contain other elements and/or text (if False, the
# element can only appear in an atomic context, such as <date/>).
_validationInfo101 = {
"bot": ( ["name"], [], False ),
"condition": ( [], ["name", "value"], True ), # can only contain <li> elements
"date": ( [], [], False ),
"formal": ( [], [], True ),
"gender": ( [], [], True ),
"get": ( ["name"], [], False ),
"gossip": ( [], [], True ),
"id": ( [], [], False ),
"input": ( [], ["index"], False ),
"javascript": ( [], [], True ),
"learn": ( [], [], True ),
"li": ( [], ["name", "value"], True ),
"lowercase": ( [], [], True ),
"person": ( [], [], True ),
"person2": ( [], [], True ),
"random": ( [], [], True ), # can only contain <li> elements
"sentence": ( [], [], True ),
"set": ( ["name"], [], True),
"size": ( [], [], False ),
"sr": ( [], [], False ),
"srai": ( [], [], True ),
"star": ( [], ["index"], False ),
"system": ( [], [], True ),
"template": ( [], [], True ), # needs to be in the list because it can be a parent.
"that": ( [], ["index"], False ),
"thatstar": ( [], ["index"], False ),
"think": ( [], [], True ),
"topicstar": ( [], ["index"], False ),
"uppercase": ( [], [], True ),
"version": ( [], [], False ),
}
def _validateElemStart(self, name, attr, version):
"""Test the validity of an element starting inside a <template>
element.
This function raises an AimlParserError exception if it the tag is
invalid. Otherwise, no news is good news.
"""
# Check the element's attributes. Make sure that all required
# attributes are present, and that any remaining attributes are
# valid options.
required, optional, canBeParent = self._validInfo[name]
for a in required:
if a not in attr and not self._forwardCompatibleMode:
raise AimlParserError, ("Required \"%s\" attribute missing in <%s> element " % (a,name))+self._location()
for a in attr:
if a in required: continue
if a[0:4] == "xml:": continue # attributes in the "xml" namespace can appear anywhere
if a not in optional and not self._forwardCompatibleMode:
raise AimlParserError, ("Unexpected \"%s\" attribute in <%s> element " % (a,name))+self._location()
# special-case: several tags contain an optional "index" attribute.
# This attribute's value must be a positive integer.
if name in ["star", "thatstar", "topicstar"]:
for k,v in attr.items():
if k == "index":
temp = 0
try: temp = int(v)
except:
raise AimlParserError, ("Bad type for \"%s\" attribute (expected integer, found \"%s\") " % (k,v))+self._location()
if temp < 1:
raise AimlParserError, ("\"%s\" attribute must have non-negative value " % (k))+self._location()
# See whether the containing element is permitted to contain
# subelements. If not, this element is invalid no matter what it is.
try:
parent = self._elemStack[-1][0]
parentAttr = self._elemStack[-1][1]
except IndexError:
# If the stack is empty, no parent is present. This should never
# happen.
raise AimlParserError, ("Element stack is empty while validating <%s> " % name)+self._location()
required, optional, canBeParent = self._validInfo[parent]
nonBlockStyleCondition = (parent == "condition" and not (parentAttr.has_key("name") and parentAttr.has_key("value")))
if not canBeParent:
raise AimlParserError, ("<%s> elements cannot have any contents "%parent)+self._location()
# Special-case test if the parent element is <condition> (the
# non-block-style variant) or <random>: these elements can only
# contain <li> subelements.
elif (parent == "random" or nonBlockStyleCondition) and name!="li":
raise AimlParserError, ("<%s> elements can only contain <li> subelements "%parent)+self._location()
# Special-case test for <li> elements, which can only be contained
# by non-block-style <condition> and <random> elements, and whose
# required attributes are dependent upon which attributes are
# present in the <condition> parent.
elif name=="li":
if not (parent=="random" or nonBlockStyleCondition):
raise AimlParserError, ("Unexpected <li> element contained by <%s> element "%parent)+self._location()
if nonBlockStyleCondition:
if parentAttr.has_key("name"):
# Single-predicate condition. Each <li> element except the
# last must have a "value" attribute.
if len(attr) == 0:
# This could be the default <li> element for this <condition>,
# unless we've already found one.
if self._foundDefaultLiStack[-1]:
raise AimlParserError, "Unexpected default <li> element inside <condition> "+self._location()
else:
self._foundDefaultLiStack[-1] = True
elif len(attr) == 1 and attr.has_key("value"):
pass # this is the valid case
else:
raise AimlParserError, "Invalid <li> inside single-predicate <condition> "+self._location()
elif len(parentAttr) == 0:
# Multi-predicate condition. Each <li> element except the
# last must have a "name" and a "value" attribute.
if len(attr) == 0:
# This could be the default <li> element for this <condition>,
# unless we've already found one.
if self._foundDefaultLiStack[-1]:
raise AimlParserError, "Unexpected default <li> element inside <condition> "+self._location()
else:
self._foundDefaultLiStack[-1] = True
elif len(attr) == 2 and attr.has_key("value") and attr.has_key("name"):
pass # this is the valid case
else:
raise AimlParserError, "Invalid <li> inside multi-predicate <condition> "+self._location()
# All is well!
return True
def create_parser():
"""Create and return an AIML parser object."""
parser = xml.sax.make_parser()
handler = AimlHandler("UTF-8")
parser.setContentHandler(handler)
#parser.setFeature(xml.sax.handler.feature_namespaces, True)
return parser
| {
"content_hash": "5462d22310d88ccd107bdbbea3a0d30a",
"timestamp": "",
"source": "github",
"line_count": 546,
"max_line_length": 121,
"avg_line_length": 43.85164835164835,
"alnum_prop": 0.658898216597753,
"repo_name": "jstnhuang/chatbot",
"id": "d54c6932f494e9cb3e422b1a5b8d906559fc83f8",
"size": "23943",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "scripts/aiml/AimlParser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "D",
"bytes": "5487330"
},
{
"name": "Python",
"bytes": "95187"
}
],
"symlink_target": ""
} |
from enum import Enum
class PageErrorCode(Enum):
GRAPHQL_ERROR = "graphql_error"
INVALID = "invalid"
NOT_FOUND = "not_found"
REQUIRED = "required"
UNIQUE = "unique"
DUPLICATED_INPUT_ITEM = "duplicated_input_item"
ATTRIBUTE_ALREADY_ASSIGNED = "attribute_already_assigned"
| {
"content_hash": "04dc1cb76a6894ccdf3d14315fe8c673",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 61,
"avg_line_length": 27.363636363636363,
"alnum_prop": 0.6910299003322259,
"repo_name": "mociepka/saleor",
"id": "06eac6a48589df1d9439fb787bebadf9ee6df7fa",
"size": "301",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "saleor/page/error_codes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "2228"
},
{
"name": "HTML",
"bytes": "249248"
},
{
"name": "Procfile",
"bytes": "290"
},
{
"name": "Python",
"bytes": "12686831"
},
{
"name": "Shell",
"bytes": "439"
}
],
"symlink_target": ""
} |
import atexit
import sys
import logging
import os
import pickle
import socket
import string
from cybld import cybld_helpers
from cybld.cybld_command_handler import CyBldCommandHandler
from cybld.cybld_config_command_group import CyBldConfigCommandGroup
from cybld.cybld_config_settings import CyBldConfigSettings
from cybld.cybld_notifier import CyBldNotifier
# --------------------------------------------------------------------------
class CyBldIpcServer():
"""
Provides the IPC service.
Initializes the IPC socket and starts the main loop.
Uses CyBldCommandHandler and CyBldNotifier internally.
:param command_group: command group (cmd0, cmd1, cmd2) with which
this instance has been started with (read from
config)
:type command_group: cybld_config.CyBldConfigCommandGroup
:param do_notify_success: whether to use "notify-send" when a given command
was executed successfully (read from config)
:type do_notify_success: bool
:param do_notify_fail: whether to use "notify-send" when a given command
was executed unsuccessfully (read from config)
:type do_notify_fail: bool
:param timeout: the timeout for notifications
:type timeout: int
:param do_bell_success: whether to ring the bell when a given command
was executed successfully (read from config)
:type do_bell_success: bool
:param do_bell_fail: whether to ring the bell when a given command
was executed unsuccessfully (read from config)
:type do_bell_fail: bool
:param allow_multiple_instances: we quit if this is false and there is
already an instance running
:type allow_multiple_instances: bool
:param print_stats: whether to print command statistics
:type print_stats: bool
:param talk: whether we should print conversational messages or
not
:type talk: bool
"""
def __init__(self, command_group: CyBldConfigCommandGroup, runner_configs,
settings: CyBldConfigSettings):
# This handler should be executed at the end, since it also removes
# the base directory in case no cybld session is left
atexit.register(self._close_socket)
self.settings = settings
self.notifier = CyBldNotifier(settings)
self.command_handler = CyBldCommandHandler(command_group, runner_configs, settings,
self.notifier.notify_success,
self.notifier.notify_fail)
self.server = None
self.socket_name = self._generate_new_random_socket_name()
if not settings.allow_multiple:
self._quit_if_instance_exists()
logging.info("Starting with " + cybld_helpers.BOLD_TEXT +
command_group.name + cybld_helpers.COLOR_END +
" config (" + self.socket_name + ")")
logging.info("The codeword has to match " + cybld_helpers.BOLD_TEXT +
command_group.regex_codeword.pattern + cybld_helpers.COLOR_END)
logging.info("Commands are: " +
"\n" + "- CMD0: " + command_group.cmd0 +
"\n" + "- CMD1: " + command_group.cmd1 +
"\n" + "- CMD2: " + command_group.cmd2)
cybld_helpers.print_seperator_lines()
self._open_socket()
self._start_main_loop()
def _generate_new_random_socket_name(self):
""" Use the next available cybld socket name (letters a-z) """
current_socket_names = cybld_helpers.get_current_socket_names()
for letter in string.ascii_lowercase:
socket_name = cybld_helpers.SOCKET_BASE_NAME + "-" + letter
if socket_name not in current_socket_names:
return socket_name
logging.critical("Too many instances already running...")
sys.exit(1)
def _quit_if_instance_exists(self):
""" Exit if a cybld instance is already running """
current_socket_names = cybld_helpers.get_current_socket_names()
if len(current_socket_names) > 0:
logging.critical("IPC socket is already started!")
sys.exit(1)
def _open_socket(self):
""" Open a new socket (with the previously generated socket name) """
assert(self.server is None)
self.server = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
self.server.bind(os.path.join(cybld_helpers.get_base_path(), self.socket_name))
def _start_main_loop(self):
"""
Start the main loop.
Wait for incoming messages on the socket, "parse" them via pickle and
then forward the command to the command handler.
"""
assert(self.server is not None)
try:
while True:
data = self.server.recv(1024)
if not data:
break
else:
cmd = pickle.loads(data)
self.command_handler.handle_incoming_ipc_message(cmd)
except:
pass
def _close_socket(self):
""" Close the IPC socket (at shutdown) """
logging.info("Shutdown initiated for " + self.socket_name)
self.server.close()
os.remove(os.path.join(cybld_helpers.get_base_path(), self.socket_name))
files_in_base_path = os.listdir(cybld_helpers.get_base_path())
for f in files_in_base_path:
if f.startswith(cybld_helpers.NVIM_LOG_PREFIX):
os.remove(os.path.join(cybld_helpers.get_base_path(), f))
if len(cybld_helpers.get_current_socket_names()) == 0:
logging.info("This was the last cybld session!")
os.rmdir(cybld_helpers.get_base_path())
# --------------------------------------------------------------------------
| {
"content_hash": "05ecd1f47389d4ff92c000c8d14ef412",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 91,
"avg_line_length": 40.38157894736842,
"alnum_prop": 0.5804822417725644,
"repo_name": "dcvetko/cybld",
"id": "89c8b6b11299b84ab8882762ab55dd3fdb7daf52",
"size": "6330",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cybld/cybld_ipc_server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2311"
},
{
"name": "Python",
"bytes": "97452"
}
],
"symlink_target": ""
} |
from flask import Flask
from coh2stats.dao import DAO
from coh2stats.config import Config
from coh2stats.config import schedule
from coh2stats.weeklystats.tasks import *
from coh2stats.personalstats.tasks import *
dao = DAO()
def create_app():
app = Flask(__name__, static_folder='assets')
from coh2stats.weeklystats.routes import stats
app.register_blueprint(stats)
return app
| {
"content_hash": "02b4819cfd06faadd990cb774d724f46",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 50,
"avg_line_length": 23.470588235294116,
"alnum_prop": 0.7619047619047619,
"repo_name": "ZEDGR/coh2stats",
"id": "d9f06c5aca79f36afd028fa6253d3a0e474aa03d",
"size": "399",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coh2stats/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5643"
},
{
"name": "HTML",
"bytes": "39066"
},
{
"name": "Python",
"bytes": "20435"
}
],
"symlink_target": ""
} |
"""
Single interface for code that varies across Python environments.
"""
from builtins import zip
from builtins import next
import inspect
import sys
# backport of inspect.getcallargs from 2.7
def getcallargs(func, *positional, **named):
"""Get the mapping of arguments to values.
A dict is returned, with keys the function argument names (including the
names of the * and ** arguments, if any), and values the respective bound
values from 'positional' and 'named'."""
args, varargs, varkw, defaults = inspect.getargspec(func)
f_name = func.__name__
arg2value = {}
# The following closures are basically because of tuple parameter unpacking.
assigned_tuple_params = []
def assign(arg, value):
if isinstance(arg, str):
arg2value[arg] = value
else:
assigned_tuple_params.append(arg)
value = iter(value)
for i, subarg in enumerate(arg):
try:
subvalue = next(value)
except StopIteration:
raise ValueError('need more than %d %s to unpack' %
(i, 'values' if i > 1 else 'value'))
assign(subarg, subvalue)
try:
next(value)
except StopIteration:
pass
else:
raise ValueError('too many values to unpack')
def is_assigned(arg):
if isinstance(arg, str):
return arg in arg2value
return arg in assigned_tuple_params
if inspect.ismethod(func) and func.__self__ is not None:
# implicit 'self' (or 'cls' for classmethods) argument
positional = (func.__self__,) + positional
num_pos = len(positional)
num_total = num_pos + len(named)
num_args = len(args)
num_defaults = len(defaults) if defaults else 0
for arg, value in zip(args, positional):
assign(arg, value)
if varargs:
if num_pos > num_args:
assign(varargs, positional[-(num_pos - num_args):])
else:
assign(varargs, ())
elif 0 < num_args < num_pos:
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at most' if defaults else 'exactly', num_args,
'arguments' if num_args > 1 else 'argument', num_total))
elif num_args == 0 and num_total:
raise TypeError('%s() takes no arguments (%d given)' %
(f_name, num_total))
for arg in args:
if isinstance(arg, str) and arg in named:
if is_assigned(arg):
raise TypeError("%s() got multiple values for keyword "
"argument '%s'" % (f_name, arg))
else:
assign(arg, named.pop(arg))
if defaults: # fill in any missing values with the defaults
for arg, value in zip(args[-num_defaults:], defaults):
if not is_assigned(arg):
assign(arg, value)
if varkw:
assign(varkw, named)
elif named:
unexpected = next(iter(named))
if isinstance(unexpected, str):
unexpected = unexpected.encode(sys.getdefaultencoding(), 'replace')
raise TypeError("%s() got an unexpected keyword argument '%s'" %
(f_name, unexpected))
unassigned = num_args - len([arg for arg in args if is_assigned(arg)])
if unassigned:
num_required = num_args - num_defaults
raise TypeError('%s() takes %s %d %s (%d given)' % (
f_name, 'at least' if defaults else 'exactly', num_required,
'arguments' if num_required > 1 else 'argument', num_total))
return arg2value
| {
"content_hash": "23d1caa31c36dc2cedd579fa8f053c97",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 80,
"avg_line_length": 38.8421052631579,
"alnum_prop": 0.570189701897019,
"repo_name": "venmo/btnamespace",
"id": "0e81810f8d78c05d47706a468a4be7e45086d5b8",
"size": "3690",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "btnamespace/compat.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46279"
}
],
"symlink_target": ""
} |
"""Utility functions for providing data directories."""
import os
import sys
import zipfile
import rarfile
from urllib.request import FancyURLopener
import shutil
import numpy as np
import matplotlib.image as mpimg
class Data():
# Should be a list containing all subdirectories of the main data dir which
# belong to this dataset
dirs = None
def __init__(self, data_dir, stat_log_dir,
development=True, fast_dir=None):
self.development = development
self.data_dir = data_dir
self.stat_log_dir = stat_log_dir
if not os.path.isdir(data_dir):
os.makedirs(data_dir)
self._fetch_if_missing()
self.fast_dir = fast_dir
if fast_dir:
print(">> Copying files to {}".format(fast_dir))
for d in self.dirs:
src = os.path.join(data_dir, d)
dst = os.path.join(fast_dir, d)
if not os.path.isdir(dst):
shutil.copytree(src, dst)
print(">> Copied {}".format(d))
self.current_dir = fast_dir
else:
self.current_dir = data_dir
if stat_log_dir:
self.stat_log_file = os.path.join(stat_log_dir,
self.__class__.__name__ + ".txt")
self._ensure_statistics()
def __del__(self):
pass
#if self.fast_dir:
# print(">> Removing files from {}".format(self.fast_dir))
# for d in self.dirs:
# shutil.rmtree(os.path.join(self.fast_dir, d))
def clear_statistics(self):
"""Delete saved statistics file if present."""
if self.stat_log_dir and os.path.isfile(self.stat_log_file):
os.remove(self.stat_log_file)
def _ensure_statistics(self):
"""Make sure we know the dataset statistics."""
if os.path.isfile(self.stat_log_file):
vals = np.loadtxt(self.stat_log_file)
self.mean = vals[0]
self.stddev = vals[1]
else:
print(">> Computing statistics (mean, variance) for {}"
.format(self.__class__.__name__))
mean, stddev = self.compute_statistics(self.get_raw_files())
self.mean = mean
self.stddev = stddev
os.makedirs(self.stat_log_dir, exist_ok=True)
np.savetxt(self.stat_log_file, [mean, stddev])
print(">> Statistics complete")
def get_raw_dirs(self):
"""Should return a list of all dirs containing training images.
Note: self.current_dir should be used for loading input data.
"""
raise NotImplementedError()
def get_raw_files(self):
files = []
for d in self.get_raw_dirs():
for path in os.listdir(d):
files.append(os.path.join(d, path))
return files
def _fetch_if_missing(self):
"""A call to this must make subsequent calls to get_raw_files succeed.
All subdirs of data_dir listed in self.dirs must exist after this call.
"""
raise NotImplementedError()
def _download_and_extract(self, url, extract_to, ext='zip'):
def _progress(count, block_size, total_size):
if total_size > 0:
print('\r>> Downloading %s %.1f%%' % (url,
float(count * block_size) / float(total_size) * 100.0), end=' ')
else:
print('\r>> Downloading %s' % (url), end=' ')
sys.stdout.flush()
urlretrieve = FancyURLopener().retrieve
local_zip_path = os.path.join(self.data_dir, 'tmp.' + ext)
urlretrieve(url, local_zip_path, _progress)
sys.stdout.write("\n>> Finished downloading. Unzipping...\n")
if ext == 'zip':
with zipfile.ZipFile(local_zip_path, "r") as zip_ref:
zip_ref.extractall(extract_to)
else:
with rarfile.RarFile(local_zip_path, "r") as zip_ref:
zip_ref.extractall(extract_to)
sys.stdout.write(">> Finished unzipping.\n")
os.remove(local_zip_path)
self.clear_statistics()
def compute_statistics(self, files):
"""Use welford's method to compute mean and variance of the given
dataset.
See https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm."""
assert len(files) > 1
n = 0
mean = np.zeros(3)
M2 = np.zeros(3)
for j, filename in enumerate(files):
#TODO ensure the pixel values are 0..255
im = np.reshape(mpimg.imread(filename) * 255, [-1, 3])
for i in range(np.shape(im)[1]):
n = n + 1
delta = im[i] - mean
mean += delta / n
M2 += delta * (im[i] - mean)
sys.stdout.write('\r>> Processed %.1f%%' % (
float(j) / float(len(files)) * 100.0))
sys.stdout.flush()
var = M2 / (n - 1)
stddev = np.sqrt(var)
return np.float32(mean), np.float32(stddev)
| {
"content_hash": "ec16e38353aa6299e44160f51e12d1db",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 98,
"avg_line_length": 35.90845070422535,
"alnum_prop": 0.5465777603451657,
"repo_name": "simonmeister/UnFlow",
"id": "f4715b2327d6b14f26b307cbde250f3a11afa9fc",
"size": "5099",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/e2eflow/core/data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "51688"
},
{
"name": "Python",
"bytes": "173979"
}
],
"symlink_target": ""
} |
from potatoes.vector import Vector
import math
class Circle:
@staticmethod
def collision(circle1, circle2):
diff = circle1._pos - circle2._pos
return diff.magnitude <= circle1.rad + circle2.rad
def __init__(self, pos, radius):
self._pos = pos
self.rad = radius
def update(self, gx, pos=None):
if pos is not None:
self._pos = pos
class Ellipse(Circle):
PRETTY_MUCH_CIRCULAR_CUTOFF = 0.28
ECCENTRICITY_CUTOFF = 0.45
"""
The maths was just too hard, so now all ellipses are
approximated using up to 5 circles, depending on eccentricity,
as I assumed that we were not going to use ellipses that are
too stretched out.
"""
@staticmethod
def collision(e1, e2):
# Perform bounding box checking first to optimise
dy = abs(e1.pos.y - e2.pos.y)
dx = abs(e1.pos.x - e2.pos.x)
if (dy > e1.half_height + e2.half_height or
dx > e1.half_width + e2.half_width):
return False
# Check each ellipses' circle with each other, 9 checks total
for circle1 in e1.circles:
for circle2 in e2.circles:
if Circle.collision(circle1, circle2):
return True
return False
def __init__(self, pos, half_width, half_height, gx):
"""
This whole thing assumes that the ellipse is taller than it
is wide, as with faces or whatnot.
:param x: position along x-axis
:param y: position along y-axis
:param half_width: half the horizontal width of
the ellipse, related to semi-axes
:param half_height: half the vertical length of
the ellipse, related to semi-axes
"""
Circle.__init__(self, pos, half_width)
self.half_width = half_width
self.half_height = half_height
self.width = half_width * 2
self.height = half_height * 2
focus = math.sqrt(self.half_height**2 - self.half_width**2)
self.eccentricity = (focus / math.sqrt(half_height**2 + half_width**2))
self.circles = list()
# Create default three circles
self.circles.append(Circle(self._pos, half_width))
if self.eccentricity > Ellipse.PRETTY_MUCH_CIRCULAR_CUTOFF:
self.circles.append(Circle(Vector(self._pos.x, self._pos.y+focus),
half_height-focus))
if self.eccentricity > Ellipse.ECCENTRICITY_CUTOFF:
# Use two additional circles to compensate for additional
# eccentricity
half_focus = focus // 2
rad = min(half_height-half_focus,
self.get_x(self._pos.y-half_focus))
self.circles.append(Circle(
Vector(self._pos.x, self._pos.y-half_focus), rad))
self.circles.append(Circle(
Vector(self._pos.x, self._pos.y+half_focus), rad))
# Create draw ellipse:
tag = str(self)
self.ellipse = gx.create_oval(
self._pos.x-self.width // 2,
self._pos.y-self.height // 2,
self._pos.x+self.width // 2,
self._pos.y+self.height // 2,
outline='yellow',
tag=tag
)
def get_x(self, y):
return self.half_width * math.sqrt(
1 - (((y-self._pos.y)/self.half_height)**2))
def update(self, gx, pos=None):
if pos is not None:
self._pos = pos
gx.coords(self.ellipse, (
self._pos.x-self.half_width, self._pos.y-self.half_height,
self._pos.x+self.half_width, self._pos.y+self.half_height)
)
for circle in self.circles:
circle.update(gx, pos)
# # For debugging:
# for circle in self.circles:
# circle.update(gx)
@property
def pos(self):
return self._pos
@pos.setter
def pos(self, value):
diff = value - self.pos
self._pos += diff
for circle in self.circles:
circle._pos += diff
# # Testing code
# from tkinter import *
# root = Tk()
# c = Canvas(root, width=500, height=500, bg='black')
# e1 = Ellipse(200, 200, 93, 100, c)
# e2 = Ellipse(360, 290, 65, 110, c)
# e1.update(c)
# e2.update(c)
# print(e1.eccentricity)
# print(Ellipse.collision(e1, e2))
#
# c.pack()
# root.mainloop()
| {
"content_hash": "893fe9fb476f2d68cb0f787fe1fc74b8",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 79,
"avg_line_length": 33.63076923076923,
"alnum_prop": 0.5699908508691675,
"repo_name": "RSteyn/Potatoes",
"id": "006087f2465d3613a4b3f09c41caf9696e4b8f87",
"size": "4372",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "potatoes/ellipse.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "39117"
}
],
"symlink_target": ""
} |
import serial
from platformio.commands.device import DeviceMonitorFilter
class Hexlify(DeviceMonitorFilter):
NAME = "hexlify"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._counter = 0
def rx(self, text):
result = ""
for b in serial.iterbytes(text):
if (self._counter % 16) == 0:
result += "\n{:04X} | ".format(self._counter)
asciicode = ord(b)
if asciicode <= 255:
result += "{:02X} ".format(asciicode)
else:
result += "?? "
self._counter += 1
return result
| {
"content_hash": "0fcd9a9fe47795760eb3faad6aa59bd7",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 61,
"avg_line_length": 27.25,
"alnum_prop": 0.5061162079510704,
"repo_name": "platformio/platformio",
"id": "7b7538b5f68c5b4c6998f3c9b24214781e6cff20",
"size": "1265",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "platformio/commands/device/filters/hexlify.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "1826"
},
{
"name": "Makefile",
"bytes": "356"
},
{
"name": "Processing",
"bytes": "101"
},
{
"name": "Python",
"bytes": "333618"
},
{
"name": "Smarty",
"bytes": "45408"
}
],
"symlink_target": ""
} |
""" command to speak to a socket """
import os
import sys
import getopt
import yaml
import json
import socketalk
import getpass
CONFIG_FILE="speak.yaml"
def speak(message):
if not os.path.exists(CONFIG_FILE):
sys.exit("Error: Can not exists %s" % CONFIG_FILE)
with open(CONFIG_FILE, 'r') as f:
data = yaml.load(f)
if "host" in data and "port" in data:
host = data['host']
port = data['port']
print socketalk.Speak(message, host, port)
def usage():
""" print usage """
sys.exit("Usage: %s {--enqueue} [exec] \"message\"\n" % sys.argv[0])
def main():
""" check argv """
option = 'h'
long_option = ["help", "enqueue"]
message = {}
queue_message = {}
try:
opts, args = getopt.getopt(sys.argv[1:], option, long_option)
except getopt.GetoptError, detail:
raise Exception('GetoptError: %s' % detail)
# check opts
for opt, arg in opts:
if opt in ["-h", "--help"]:
usage()
elif opt == "--enqueue":
queue_message = {'type': "enqueue"}
# check args
if args[0] == "exec":
message = {'type': "execute",
'command': " ".join(args[1:]),
'uid': os.getuid(),
'gid': os.getgid(),
'username': getpass.getuser()}
if queue_message and message:
queue_message['item'] = message
message = queue_message
speak(json.dumps(message))
if __name__ == '__main__':
main()
| {
"content_hash": "92d08b4809a9db443886c0b0ad2f1e47",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 72,
"avg_line_length": 23.476923076923075,
"alnum_prop": 0.5373525557011796,
"repo_name": "suitai/work-py",
"id": "d13dd87ce3da19bd5c5bcbf67b76d4606bacd65e",
"size": "1548",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "speak.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13617"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function
import errno
import json
import logging
import os
import shutil
import sys
import warnings
from shutil import rmtree as _rmtree
import pytest
import requests
from pipenv._compat import Path
from pipenv.exceptions import VirtualenvActivationException
from pipenv.vendor import delegator, toml, tomlkit
from pipenv.vendor.vistir.compat import (
FileNotFoundError, PermissionError, ResourceWarning, TemporaryDirectory,
fs_encode, fs_str
)
from pipenv.vendor.vistir.contextmanagers import temp_environ
from pipenv.vendor.vistir.misc import run
from pipenv.vendor.vistir.path import (
create_tracked_tempdir, handle_remove_readonly, mkdir_p
)
from pytest_pypi.app import prepare_fixtures
from pytest_pypi.app import prepare_packages as prepare_pypi_packages
log = logging.getLogger(__name__)
warnings.simplefilter("default", category=ResourceWarning)
HAS_WARNED_GITHUB = False
def try_internet(url="http://httpbin.org/ip", timeout=1.5):
resp = requests.get(url, timeout=timeout)
resp.raise_for_status()
def check_internet():
has_internet = False
for url in ("http://httpbin.org/ip", "http://clients3.google.com/generate_204"):
try:
try_internet(url)
except KeyboardInterrupt:
warnings.warn(
"Skipped connecting to internet: {0}".format(url), RuntimeWarning
)
except Exception:
warnings.warn(
"Failed connecting to internet: {0}".format(url), RuntimeWarning
)
else:
has_internet = True
break
return has_internet
def check_github_ssh():
res = False
try:
# `ssh -T git@github.com` will return successfully with return_code==1
# and message 'Hi <username>! You've successfully authenticated, but
# GitHub does not provide shell access.' if ssh keys are available and
# registered with GitHub. Otherwise, the command will fail with
# return_code=255 and say 'Permission denied (publickey).'
c = delegator.run('ssh -o StrictHostKeyChecking=no -o CheckHostIP=no -T git@github.com', timeout=30)
res = True if c.return_code == 1 else False
except KeyboardInterrupt:
warnings.warn(
"KeyboardInterrupt while checking GitHub ssh access", RuntimeWarning
)
except Exception:
pass
global HAS_WARNED_GITHUB
if not res and not HAS_WARNED_GITHUB:
warnings.warn(
'Cannot connect to GitHub via SSH', RuntimeWarning
)
warnings.warn(
'Will skip tests requiring SSH access to GitHub', RuntimeWarning
)
HAS_WARNED_GITHUB = True
return res
def check_for_mercurial():
c = delegator.run("hg --help")
if c.return_code != 0:
return False
else:
return True
TESTS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PYPI_VENDOR_DIR = os.path.join(TESTS_ROOT, 'pypi')
WE_HAVE_HG = check_for_mercurial()
prepare_fixtures(os.path.join(PYPI_VENDOR_DIR, "fixtures"))
prepare_pypi_packages(PYPI_VENDOR_DIR)
def pytest_runtest_setup(item):
if item.get_closest_marker('needs_internet') is not None and not WE_HAVE_INTERNET:
pytest.skip('requires internet')
if item.get_closest_marker('needs_github_ssh') is not None and not WE_HAVE_GITHUB_SSH_KEYS:
pytest.skip('requires github ssh')
if item.get_closest_marker('needs_hg') is not None and not WE_HAVE_HG:
pytest.skip('requires mercurial')
if item.get_closest_marker('skip_py27_win') is not None and (
sys.version_info[:2] <= (2, 7) and os.name == "nt"
):
pytest.skip('must use python > 2.7 on windows')
if item.get_closest_marker('skip_py38') is not None and (
sys.version_info[:2] == (3, 8)
):
pytest.skip('test not applicable on python 3.8')
if item.get_closest_marker('py3_only') is not None and (
sys.version_info < (3, 0)
):
pytest.skip('test only runs on python 3')
if item.get_closest_marker('skip_osx') is not None and sys.platform == 'darwin':
pytest.skip('test does not apply on OSX')
if item.get_closest_marker('lte_py36') is not None and (
sys.version_info >= (3, 7)
):
pytest.skip('test only runs on python < 3.7')
if item.get_closest_marker('skip_py36') is not None and (
sys.version_info[:2] == (3, 6)
):
pytest.skip('test is skipped on python 3.6')
if item.get_closest_marker('skip_windows') is not None and (os.name == 'nt'):
pytest.skip('test does not run on windows')
@pytest.fixture
def pathlib_tmpdir(request, tmpdir):
yield Path(str(tmpdir))
try:
tmpdir.remove(ignore_errors=True)
except Exception:
pass
def _create_tracked_dir():
tmp_location = os.environ.get("TEMP", os.environ.get("TMP"))
temp_args = {"prefix": "pipenv-", "suffix": "-test"}
if tmp_location is not None:
temp_args["dir"] = tmp_location
temp_path = create_tracked_tempdir(**temp_args)
return temp_path
@pytest.fixture
def vistir_tmpdir():
temp_path = _create_tracked_dir()
yield Path(temp_path)
@pytest.fixture()
def local_tempdir(request):
old_temp = os.environ.get("TEMP", "")
new_temp = Path(os.getcwd()).absolute() / "temp"
new_temp.mkdir(parents=True, exist_ok=True)
os.environ["TEMP"] = new_temp.as_posix()
def finalize():
os.environ['TEMP'] = fs_str(old_temp)
_rmtree_func(new_temp.as_posix())
request.addfinalizer(finalize)
with TemporaryDirectory(dir=new_temp.as_posix()) as temp_dir:
yield Path(temp_dir.name)
@pytest.fixture(name='create_tmpdir')
def vistir_tmpdir_factory():
def create_tmpdir():
return Path(_create_tracked_dir())
yield create_tmpdir
# Borrowed from pip's test runner filesystem isolation
@pytest.fixture(autouse=True)
def isolate(create_tmpdir):
"""
Isolate our tests so that things like global configuration files and the
like do not affect our test results.
We use an autouse function scoped fixture because we want to ensure that
every test has it's own isolated home directory.
"""
# Create a directory to use as our home location.
home_dir = os.path.join(str(create_tmpdir()), "home")
os.makedirs(home_dir)
mkdir_p(os.path.join(home_dir, ".config", "git"))
git_config_file = os.path.join(home_dir, ".config", "git", "config")
with open(git_config_file, "wb") as fp:
fp.write(
b"[user]\n\tname = pipenv\n\temail = pipenv@pipenv.org\n"
)
# os.environ["GIT_CONFIG"] = fs_str(git_config_file)
os.environ["GIT_CONFIG_NOSYSTEM"] = fs_str("1")
os.environ["GIT_AUTHOR_NAME"] = fs_str("pipenv")
os.environ["GIT_AUTHOR_EMAIL"] = fs_str("pipenv@pipenv.org")
os.environ["GIT_ASK_YESNO"] = fs_str("false")
workon_home = create_tmpdir()
os.environ["WORKON_HOME"] = fs_str(str(workon_home))
os.environ["HOME"] = os.path.abspath(home_dir)
mkdir_p(os.path.join(home_dir, "projects"))
# Ignore PIPENV_ACTIVE so that it works as under a bare environment.
os.environ.pop("PIPENV_ACTIVE", None)
os.environ.pop("VIRTUAL_ENV", None)
WE_HAVE_INTERNET = check_internet()
WE_HAVE_GITHUB_SSH_KEYS = False
class _Pipfile(object):
def __init__(self, path):
self.path = path
if self.path.exists():
self.loads()
else:
self.document = tomlkit.document()
self.document["source"] = self.document.get("source", tomlkit.aot())
self.document["requires"] = self.document.get("requires", tomlkit.table())
self.document["packages"] = self.document.get("packages", tomlkit.table())
self.document["dev_packages"] = self.document.get("dev_packages", tomlkit.table())
super(_Pipfile, self).__init__()
def install(self, package, value, dev=False):
section = "packages" if not dev else "dev_packages"
if isinstance(value, dict):
table = tomlkit.inline_table()
table.update(value)
self.document[section][package] = table
else:
self.document[section][package] = value
self.write()
def remove(self, package, dev=False):
section = "packages" if not dev else "dev_packages"
if not dev and package not in self.document[section]:
if package in self.document["dev_packages"]:
section = "dev_packages"
del self.document[section][package]
self.write()
def add(self, package, value, dev=False):
self.install(package, value, dev=dev)
def update(self, package, value, dev=False):
self.install(package, value, dev=dev)
def loads(self):
self.document = tomlkit.loads(self.path.read_text())
def dumps(self):
source_table = tomlkit.table()
pypi_url = os.environ.get("PIPENV_PYPI_URL", "https://pypi.org/simple")
source_table["url"] = os.environ.get("PIPENV_TEST_INDEX", pypi_url)
source_table["verify_ssl"] = False
source_table["name"] = "pipenv_test_index"
self.document["source"].append(source_table)
return tomlkit.dumps(self.document)
def write(self):
self.path.write_text(self.dumps())
@classmethod
def get_fixture_path(cls, path):
return Path(__file__).absolute().parent.parent / "test_artifacts" / path
@classmethod
def get_url(cls, pkg=None, filename=None):
pypi = os.environ.get("PIPENV_PYPI_URL")
if not pkg and not filename:
return pypi if pypi else "https://pypi.org/"
file_path = filename
if pkg and filename:
file_path = os.path.join(pkg, filename)
if filename and not pkg:
pkg = os.path.basename(filename)
fixture_pypi = os.getenv("ARTIFACT_PYPI_URL")
if fixture_pypi:
if pkg and not filename:
url = "{0}/artifacts/{1}".format(fixture_pypi, pkg)
else:
url = "{0}/artifacts/{1}/{2}".format(fixture_pypi, pkg, filename)
return url
if pkg and not filename:
return cls.get_fixture_path(file_path).as_uri()
class _PipenvInstance(object):
"""An instance of a Pipenv Project..."""
def __init__(
self, pypi=None, pipfile=True, chdir=False, path=None, home_dir=None,
venv_root=None, ignore_virtualenvs=True, venv_in_project=True, name=None
):
self.index_url = os.getenv("PIPENV_TEST_INDEX")
self.pypi = None
if pypi:
self.pypi = pypi.url
elif self.index_url is not None:
self.pypi, _, _ = self.index_url.rpartition("/") if self.index_url else ""
self.index = os.getenv("PIPENV_PYPI_INDEX")
os.environ["PYTHONWARNINGS"] = "ignore:DEPRECATION"
if ignore_virtualenvs:
os.environ["PIPENV_IGNORE_VIRTUALENVS"] = fs_str("1")
if venv_root:
os.environ["VIRTUAL_ENV"] = venv_root
if venv_in_project:
os.environ["PIPENV_VENV_IN_PROJECT"] = fs_str("1")
else:
os.environ.pop("PIPENV_VENV_IN_PROJECT", None)
self.original_dir = os.path.abspath(os.curdir)
path = path if path else os.environ.get("PIPENV_PROJECT_DIR", None)
if name is not None:
path = Path(os.environ["HOME"]) / "projects" / name
path.mkdir(exist_ok=True)
if not path:
path = TemporaryDirectory(suffix='-project', prefix='pipenv-')
if isinstance(path, TemporaryDirectory):
self._path = path
path = Path(self._path.name)
try:
self.path = str(path.resolve())
except OSError:
self.path = str(path.absolute())
elif isinstance(path, Path):
self._path = path
try:
self.path = str(path.resolve())
except OSError:
self.path = str(path.absolute())
else:
self._path = path
self.path = path
# set file creation perms
self.pipfile_path = None
self.chdir = chdir
if self.pypi and "PIPENV_PYPI_URL" not in os.environ:
os.environ['PIPENV_PYPI_URL'] = fs_str('{0}'.format(self.pypi))
# os.environ['PIPENV_PYPI_URL'] = fs_str('{0}'.format(self.pypi.url))
# os.environ['PIPENV_TEST_INDEX'] = fs_str('{0}/simple'.format(self.pypi.url))
if pipfile:
p_path = os.sep.join([self.path, 'Pipfile'])
with open(p_path, 'a'):
os.utime(p_path, None)
self.chdir = False or chdir
self.pipfile_path = p_path
self._pipfile = _Pipfile(Path(p_path))
def __enter__(self):
if self.chdir:
os.chdir(self.path)
return self
def __exit__(self, *args):
warn_msg = 'Failed to remove resource: {!r}'
if self.chdir:
os.chdir(self.original_dir)
self.path = None
if self._path and getattr(self._path, "cleanup", None):
try:
self._path.cleanup()
except OSError as e:
_warn_msg = warn_msg.format(e)
warnings.warn(_warn_msg, ResourceWarning)
def pipenv(self, cmd, block=True):
if self.pipfile_path and os.path.isfile(self.pipfile_path):
os.environ['PIPENV_PIPFILE'] = fs_str(self.pipfile_path)
# a bit of a hack to make sure the virtualenv is created
with TemporaryDirectory(prefix='pipenv-', suffix='-cache') as tempdir:
os.environ['PIPENV_CACHE_DIR'] = fs_str(tempdir.name)
c = delegator.run(
'pipenv {0}'.format(cmd), block=block,
cwd=os.path.abspath(self.path), env=os.environ.copy()
)
if 'PIPENV_CACHE_DIR' in os.environ:
del os.environ['PIPENV_CACHE_DIR']
if 'PIPENV_PIPFILE' in os.environ:
del os.environ['PIPENV_PIPFILE']
# Pretty output for failing tests.
if block:
print('$ pipenv {0}'.format(cmd))
print(c.out)
print(c.err, file=sys.stderr)
if c.return_code != 0:
print("Command failed...")
# Where the action happens.
return c
@property
def pipfile(self):
p_path = os.sep.join([self.path, 'Pipfile'])
with open(p_path, 'r') as f:
return toml.loads(f.read())
@property
def lockfile(self):
p_path = self.lockfile_path
with open(p_path, 'r') as f:
return json.loads(f.read())
@property
def lockfile_path(self):
return os.sep.join([self.path, 'Pipfile.lock'])
def _rmtree_func(path, ignore_errors=True, onerror=None):
directory = fs_encode(path)
shutil_rmtree = _rmtree
if onerror is None:
onerror = handle_remove_readonly
try:
shutil_rmtree(directory, ignore_errors=ignore_errors, onerror=onerror)
except (IOError, OSError, FileNotFoundError, PermissionError) as exc:
# Ignore removal failures where the file doesn't exist
if exc.errno != errno.ENOENT:
raise
@pytest.fixture()
def pip_src_dir(request, vistir_tmpdir):
old_src_dir = os.environ.get('PIP_SRC', '')
os.environ['PIP_SRC'] = vistir_tmpdir.as_posix()
def finalize():
os.environ['PIP_SRC'] = fs_str(old_src_dir)
request.addfinalizer(finalize)
return request
@pytest.fixture()
def PipenvInstance(pip_src_dir, monkeypatch, pypi):
with temp_environ(), monkeypatch.context() as m:
m.setattr(shutil, "rmtree", _rmtree_func)
original_umask = os.umask(0o007)
m.setenv("PIPENV_NOSPIN", fs_str("1"))
m.setenv("CI", fs_str("1"))
m.setenv('PIPENV_DONT_USE_PYENV', fs_str('1'))
m.setenv("PIPENV_TEST_INDEX", "{0}/simple".format(pypi.url))
m.setenv("PIPENV_PYPI_INDEX", "simple")
m.setenv("ARTIFACT_PYPI_URL", pypi.url)
m.setenv("PIPENV_PYPI_URL", pypi.url)
warnings.simplefilter("ignore", category=ResourceWarning)
warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*<ssl.SSLSocket.*>")
try:
yield _PipenvInstance
finally:
os.umask(original_umask)
@pytest.fixture()
def PipenvInstance_NoPyPI(monkeypatch, pip_src_dir, pypi):
with temp_environ(), monkeypatch.context() as m:
m.setattr(shutil, "rmtree", _rmtree_func)
original_umask = os.umask(0o007)
m.setenv("PIPENV_NOSPIN", fs_str("1"))
m.setenv("CI", fs_str("1"))
m.setenv('PIPENV_DONT_USE_PYENV', fs_str('1'))
m.setenv("PIPENV_TEST_INDEX", "{0}/simple".format(pypi.url))
m.setenv("ARTIFACT_PYPI_URL", pypi.url)
warnings.simplefilter("ignore", category=ResourceWarning)
warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*<ssl.SSLSocket.*>")
try:
yield _PipenvInstance
finally:
os.umask(original_umask)
@pytest.fixture()
def testsroot():
return TESTS_ROOT
class VirtualEnv(object):
def __init__(self, name="venv", base_dir=None):
if base_dir is None:
base_dir = Path(_create_tracked_dir())
self.base_dir = base_dir
self.name = name
self.path = base_dir / name
def __enter__(self):
self._old_environ = os.environ.copy()
self.create()
return self.activate()
def __exit__(self, *args, **kwargs):
os.environ = self._old_environ
def create(self):
python = Path(sys.executable).absolute().as_posix()
cmd = [
python, "-m", "virtualenv", self.path.absolute().as_posix()
]
c = run(
cmd, verbose=False, return_object=True, write_to_stdout=False,
combine_stderr=False, block=True, nospin=True,
)
# cmd = "{0} -m virtualenv {1}".format(python, self.path.as_posix())
# c = delegator.run(cmd, block=True)
assert c.returncode == 0
def activate(self):
script_path = "Scripts" if os.name == "nt" else "bin"
activate_this = self.path / script_path / "activate_this.py"
if activate_this.exists():
with open(str(activate_this)) as f:
code = compile(f.read(), str(activate_this), "exec")
exec(code, dict(__file__=str(activate_this)))
os.environ["VIRTUAL_ENV"] = str(self.path)
try:
return self.path.absolute().resolve()
except OSError:
return self.path.absolute()
else:
raise VirtualenvActivationException("Can't find the activate_this.py script.")
@pytest.fixture()
def virtualenv(vistir_tmpdir):
with temp_environ(), VirtualEnv(base_dir=vistir_tmpdir) as venv:
yield venv
@pytest.fixture()
def raw_venv():
yield VirtualEnv
| {
"content_hash": "84cb9ffa3154a23e09c4bef70c6f29a9",
"timestamp": "",
"source": "github",
"line_count": 549,
"max_line_length": 108,
"avg_line_length": 34.75591985428051,
"alnum_prop": 0.6074105130758346,
"repo_name": "kennethreitz/pipenv",
"id": "20374e013cbdbcf3e04fa90c51a577becf114739",
"size": "19104",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/integration/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "202"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Python",
"bytes": "2588085"
},
{
"name": "Roff",
"bytes": "40754"
}
],
"symlink_target": ""
} |
"""Define row constructs including :class:`.Row`."""
import operator
from .. import util
from ..sql import util as sql_util
from ..util.compat import collections_abc
MD_INDEX = 0 # integer index in cursor.description
# This reconstructor is necessary so that pickles with the C extension or
# without use the same Binary format.
try:
# We need a different reconstructor on the C extension so that we can
# add extra checks that fields have correctly been initialized by
# __setstate__.
from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
# The extra function embedding is needed so that the
# reconstructor function has the same signature whether or not
# the extension is present.
def rowproxy_reconstructor(cls, state):
return safe_rowproxy_reconstructor(cls, state)
except ImportError:
def rowproxy_reconstructor(cls, state):
obj = cls.__new__(cls)
obj.__setstate__(state)
return obj
try:
from sqlalchemy.cresultproxy import BaseRow
_baserow_usecext = True
except ImportError:
_baserow_usecext = False
class BaseRow(object):
__slots__ = ("_parent", "_data", "_keymap")
def __init__(self, parent, processors, keymap, data):
"""Row objects are constructed by ResultProxy objects."""
self._parent = parent
self._data = tuple(
[
proc(value) if proc else value
for proc, value in zip(processors, data)
]
)
self._keymap = keymap
def __reduce__(self):
return (
rowproxy_reconstructor,
(self.__class__, self.__getstate__()),
)
def _values_impl(self):
return list(self)
def __iter__(self):
return iter(self._data)
def __len__(self):
return len(self._data)
def __hash__(self):
return hash(self._data)
def _subscript_impl(self, key, ismapping):
try:
rec = self._keymap[key]
except KeyError as ke:
rec = self._parent._key_fallback(key, ke)
except TypeError:
# the non-C version detects a slice using TypeError.
# this is pretty inefficient for the slice use case
# but is more efficient for the integer use case since we
# don't have to check it up front.
if isinstance(key, slice):
return tuple(self._data[key])
else:
raise
mdindex = rec[MD_INDEX]
if mdindex is None:
self._parent._raise_for_ambiguous_column_name(rec)
elif not ismapping and mdindex != key and not isinstance(key, int):
self._parent._warn_for_nonint(key)
# TODO: warn for non-int here, RemovedIn20Warning when available
return self._data[mdindex]
def _get_by_key_impl(self, key):
return self._subscript_impl(key, False)
def _get_by_key_impl_mapping(self, key):
# the C code has two different methods so that we can distinguish
# between tuple-like keys (integers, slices) and mapping-like keys
# (strings, objects)
return self._subscript_impl(key, True)
def __getattr__(self, name):
try:
return self._get_by_key_impl_mapping(name)
except KeyError as e:
util.raise_(AttributeError(e.args[0]), replace_context=e)
class Row(BaseRow, collections_abc.Sequence):
"""Represent a single result row.
The :class:`.Row` object represents a row of a database result. It is
typically associated in the 1.x series of SQLAlchemy with the
:class:`.ResultProxy` object, however is also used by the ORM for
tuple-like results as of SQLAlchemy 1.4.
The :class:`.Row` object seeks to act as much like a Python named
tuple as possible. For mapping (i.e. dictionary) behavior on a row,
such as testing for containment of keys, refer to the :attr:`.Row._mapping`
attribute.
.. seealso::
:ref:`coretutorial_selecting` - includes examples of selecting
rows from SELECT statements.
:class:`.LegacyRow` - Compatibility interface introduced in SQLAlchemy
1.4.
.. versionchanged:: 1.4
Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a
"proxy" object in that it contains the final form of data within it,
and now acts mostly like a named tuple. Mapping-like functionality is
moved to the :attr:`.Row._mapping` attribute, but will remain available
in SQLAlchemy 1.x series via the :class:`.LegacyRow` class that is used
by :class:`.ResultProxy`. See :ref:`change_4710_core` for background
on this change.
"""
__slots__ = ()
@property
def _mapping(self):
"""Return a :class:`.RowMapping` for this :class:`.Row`.
This object provides a consistent Python mapping (i.e. dictionary)
interface for the data contained within the row. The :class:`.Row`
by itself behaves like a named tuple, however in the 1.4 series of
SQLAlchemy, the :class:`.LegacyRow` class is still used by Core which
continues to have mapping-like behaviors against the row object
itself.
.. seealso::
:attr:`.Row._fields`
.. versionadded:: 1.4
"""
return RowMapping(self)
def __contains__(self, key):
return key in self._data
def __getitem__(self, key):
return self._data[key]
def __getstate__(self):
return {"_parent": self._parent, "_data": self._data}
def __setstate__(self, state):
self._parent = parent = state["_parent"]
self._data = state["_data"]
self._keymap = parent._keymap
def _op(self, other, op):
return (
op(tuple(self), tuple(other))
if isinstance(other, Row)
else op(tuple(self), other)
)
__hash__ = BaseRow.__hash__
def __lt__(self, other):
return self._op(other, operator.lt)
def __le__(self, other):
return self._op(other, operator.le)
def __ge__(self, other):
return self._op(other, operator.ge)
def __gt__(self, other):
return self._op(other, operator.gt)
def __eq__(self, other):
return self._op(other, operator.eq)
def __ne__(self, other):
return self._op(other, operator.ne)
def __repr__(self):
return repr(sql_util._repr_row(self))
@util.deprecated_20(
":meth:`.Row.keys`",
alternative="Use the namedtuple standard accessor "
":attr:`.Row._fields`, or for full mapping behavior use "
"row._mapping.keys() ",
)
def keys(self):
"""Return the list of keys as strings represented by this
:class:`.Row`.
This method is analogous to the Python dictionary ``.keys()`` method,
except that it returns a list, not an iterator.
.. seealso::
:attr:`.Row._fields`
:attr:`.Row._mapping`
"""
return [k for k in self._parent.keys if k is not None]
@property
def _fields(self):
"""Return a tuple of string keys as represented by this
:class:`.Row`.
This attribute is analogous to the Python named tuple ``._fields``
attribute.
.. versionadded:: 1.4
.. seealso::
:attr:`.Row._mapping`
"""
return tuple([k for k in self._parent.keys if k is not None])
def _asdict(self):
"""Return a new dict which maps field names to their corresponding
values.
This method is analogous to the Python named tuple ``._asdict()``
method, and works by applying the ``dict()`` constructor to the
:attr:`.Row._mapping` attribute.
.. versionadded:: 1.4
.. seealso::
:attr:`.Row._mapping`
"""
return dict(self._mapping)
def _replace(self):
raise NotImplementedError()
@property
def _field_defaults(self):
raise NotImplementedError()
class LegacyRow(Row):
"""A subclass of :class:`.Row` that delivers 1.x SQLAlchemy behaviors
for Core.
The :class:`.LegacyRow` class is where most of the Python mapping
(i.e. dictionary-like)
behaviors are implemented for the row object. The mapping behavior
of :class:`.Row` going forward is accessible via the :class:`.Row._mapping`
attribute.
.. versionadded:: 1.4 - added :class:`.LegacyRow` which encapsulates most
of the deprecated behaviors of :class:`.Row`.
"""
def __contains__(self, key):
return self._parent._contains(key, self)
def __getitem__(self, key):
return self._get_by_key_impl(key)
@util.deprecated(
"1.4",
"The :meth:`.LegacyRow.has_key` method is deprecated and will be "
"removed in a future release. To test for key membership, use "
"the :attr:`Row._mapping` attribute, i.e. 'key in row._mapping`.",
)
def has_key(self, key):
"""Return True if this :class:`.LegacyRow` contains the given key.
Through the SQLAlchemy 1.x series, the ``__contains__()`` method of
:class:`.Row` (or :class:`.LegacyRow` as of SQLAlchemy 1.4) also links
to :meth:`.Row.has_key`, in that an expression such as ::
"some_col" in row
Will return True if the row contains a column named ``"some_col"``,
in the way that a Python mapping works.
However, it is planned that the 2.0 series of SQLAlchemy will reverse
this behavior so that ``__contains__()`` will refer to a value being
present in the row, in the way that a Python tuple works.
.. seealso::
:ref:`change_4710_core`
"""
return self._parent._has_key(key)
@util.deprecated(
"1.4",
"The :meth:`.LegacyRow.items` method is deprecated and will be "
"removed in a future release. Use the :attr:`Row._mapping` "
"attribute, i.e., 'row._mapping.items()'.",
)
def items(self):
"""Return a list of tuples, each tuple containing a key/value pair.
This method is analogous to the Python dictionary ``.items()`` method,
except that it returns a list, not an iterator.
"""
return [(key, self[key]) for key in self.keys()]
@util.deprecated(
"1.4",
"The :meth:`.LegacyRow.iterkeys` method is deprecated and will be "
"removed in a future release. Use the :attr:`Row._mapping` "
"attribute, i.e., 'row._mapping.keys()'.",
)
def iterkeys(self):
"""Return a an iterator against the :meth:`.Row.keys` method.
This method is analogous to the Python-2-only dictionary
``.iterkeys()`` method.
"""
return iter(self._parent.keys)
@util.deprecated(
"1.4",
"The :meth:`.LegacyRow.itervalues` method is deprecated and will be "
"removed in a future release. Use the :attr:`Row._mapping` "
"attribute, i.e., 'row._mapping.values()'.",
)
def itervalues(self):
"""Return a an iterator against the :meth:`.Row.values` method.
This method is analogous to the Python-2-only dictionary
``.itervalues()`` method.
"""
return iter(self)
@util.deprecated(
"1.4",
"The :meth:`.LegacyRow.values` method is deprecated and will be "
"removed in a future release. Use the :attr:`Row._mapping` "
"attribute, i.e., 'row._mapping.values()'.",
)
def values(self):
"""Return the values represented by this :class:`.Row` as a list.
This method is analogous to the Python dictionary ``.values()`` method,
except that it returns a list, not an iterator.
"""
return self._values_impl()
BaseRowProxy = BaseRow
RowProxy = Row
class ROMappingView(
collections_abc.KeysView,
collections_abc.ValuesView,
collections_abc.ItemsView,
):
__slots__ = (
"_mapping",
"_items",
)
def __init__(self, mapping, items):
self._mapping = mapping
self._items = items
def __len__(self):
return len(self._items)
def __repr__(self):
return "{0.__class__.__name__}({0._mapping!r})".format(self)
def __iter__(self):
return iter(self._items)
def __contains__(self, item):
return item in self._items
def __eq__(self, other):
return list(other) == list(self)
def __ne__(self, other):
return list(other) != list(self)
class RowMapping(collections_abc.Mapping):
"""A ``Mapping`` that maps column names and objects to :class:`.Row` values.
The :class:`.RowMapping` is available from a :class:`.Row` via the
:attr:`.Row._mapping` attribute and supplies Python mapping (i.e.
dictionary) access to the contents of the row. This includes support
for testing of containment of specific keys (string column names or
objects), as well as iteration of keys, values, and items::
for row in result:
if 'a' in row._mapping:
print("Column 'a': %s" % row._mapping['a'])
print("Column b: %s" % row._mapping[table.c.b])
.. versionadded:: 1.4 The :class:`.RowMapping` object replaces the
mapping-like access previously provided by a database result row,
which now seeks to behave mostly like a named tuple.
"""
__slots__ = ("row",)
def __init__(self, row):
self.row = row
def __getitem__(self, key):
return self.row._get_by_key_impl_mapping(key)
def __iter__(self):
return (k for k in self.row._parent.keys if k is not None)
def __len__(self):
return len(self.row)
def __contains__(self, key):
return self.row._parent._has_key(key)
def items(self):
"""Return a view of key/value tuples for the elements in the
underlying :class:`.Row`.
"""
return ROMappingView(self, [(key, self[key]) for key in self.keys()])
def keys(self):
"""Return a view of 'keys' for string column names represented
by the underlying :class:`.Row`.
"""
return ROMappingView(
self, [k for k in self.row._parent.keys if k is not None]
)
def values(self):
"""Return a view of values for the values represented in the
underlying :class:`.Row`.
"""
return ROMappingView(self, self.row._values_impl())
| {
"content_hash": "3428201aeb834289ae529381a42bcacf",
"timestamp": "",
"source": "github",
"line_count": 493,
"max_line_length": 80,
"avg_line_length": 30.04259634888438,
"alnum_prop": 0.5866585645803795,
"repo_name": "jam-py/jam-py",
"id": "b58b350e25a7f4ff2f4bbbd4ba6b5c507c0afb31",
"size": "15045",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "jam/third_party/sqlalchemy/engine/row.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "37683"
},
{
"name": "HTML",
"bytes": "67579"
},
{
"name": "JavaScript",
"bytes": "2789171"
},
{
"name": "Python",
"bytes": "432048"
}
],
"symlink_target": ""
} |
"""
.. module:: lesscpy.plib
:synopsis: Parse Nodes for Lesscpy
Copyright (c)
See LICENSE for details.
.. moduleauthor:: Johann T. Mariusson <jtm@robot.is>
"""
__all__ = [
'Block',
'Call',
'Deferred',
'Expression',
'Identifier',
'Mixin',
'Node',
'Property',
'Statement',
'String',
'Variable'
]
from .block import Block
from .call import Call
from .deferred import Deferred
from .expression import Expression
from .identifier import Identifier
from .mixin import Mixin
from .node import Node
from .property import Property
from .statement import Statement
from .string import String
from .variable import Variable
| {
"content_hash": "1564b49b6a5682d8d5468a73f551e381",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 52,
"avg_line_length": 20.9375,
"alnum_prop": 0.682089552238806,
"repo_name": "neumerance/cloudloon2",
"id": "fe6d5fe4a868da5b228e75e97da517ccd46db268",
"size": "693",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": ".venv/lib/python2.7/site-packages/lesscpy/plib/__init__.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "178040"
},
{
"name": "JavaScript",
"bytes": "460971"
},
{
"name": "Perl",
"bytes": "1954"
},
{
"name": "Python",
"bytes": "3227734"
},
{
"name": "Ruby",
"bytes": "76"
},
{
"name": "Shell",
"bytes": "14108"
}
],
"symlink_target": ""
} |
from docutils.parsers.rst import Directive, directives
from docutils import nodes
from string import upper
class configurationblock(nodes.General, nodes.Element):
pass
class ConfigurationBlock(Directive):
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
option_spec = {}
formats = {
'html': 'HTML',
'xml': 'XML',
'php': 'PHP',
'yaml': 'YAML',
'jinja': 'Twig',
'html+jinja': 'Twig',
'jinja+html': 'Twig',
'php+html': 'PHP',
'html+php': 'PHP',
'ini': 'INI',
'php-annotations': 'Annotations',
}
def run(self):
env = self.state.document.settings.env
node = nodes.Element()
node.document = self.state.document
self.state.nested_parse(self.content, self.content_offset, node)
entries = []
for i, child in enumerate(node):
if isinstance(child, nodes.literal_block):
# add a title (the language name) before each block
#targetid = "configuration-block-%d" % env.new_serialno('configuration-block')
#targetnode = nodes.target('', '', ids=[targetid])
#targetnode.append(child)
innernode = nodes.emphasis(self.formats[child['language']], self.formats[child['language']])
para = nodes.paragraph()
para += [innernode, child]
entry = nodes.list_item('')
entry.append(para)
entries.append(entry)
resultnode = configurationblock()
resultnode.append(nodes.bullet_list('', *entries))
return [resultnode]
def visit_configurationblock_html(self, node):
self.body.append(self.starttag(node, 'div', CLASS='configuration-block'))
def depart_configurationblock_html(self, node):
self.body.append('</div>\n')
def visit_configurationblock_latex(self, node):
pass
def depart_configurationblock_latex(self, node):
pass
def setup(app):
app.add_node(configurationblock,
html=(visit_configurationblock_html, depart_configurationblock_html),
latex=(visit_configurationblock_latex, depart_configurationblock_latex))
app.add_directive('configuration-block', ConfigurationBlock)
| {
"content_hash": "0e650ec4b661d491b31c9513044ffffc",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 108,
"avg_line_length": 33.23287671232877,
"alnum_prop": 0.5869744435284419,
"repo_name": "DionPieters/Symfony",
"id": "1f99687c37558729571fcd634cf0befdcde664ed",
"size": "3506",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/doctrine/orm/docs/en/_exts/configurationblock.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "3073"
},
{
"name": "CSS",
"bytes": "16180"
},
{
"name": "JavaScript",
"bytes": "17492"
},
{
"name": "PHP",
"bytes": "66218"
}
],
"symlink_target": ""
} |
import jpype
class ParserError(Exception):
def __init__(self, *args, **margs):
Exception.__init__(self, *args,**margs)
stanford_parser_classpath = None
def startJvm():
import os
os.environ.setdefault("STANFORD_PARSER_CLASSPATH", "/usr/stanford-parser/stanford-parser.jar:/usr/stanford-parser/stanford-parser-3.5.2-models.jar")
global stanford_parser_classpath
stanford_parser_home = os.environ["STANFORD_PARSER_HOME"]
stanford_parser_classpath = os.environ["STANFORD_PARSER_CLASSPATH"]
JVMPath_8 = os.environ["JAVA8_JVM"]
jpype.startJVM(JVMPath_8,
"-ea"
,("-Djava.class.path=%s" % stanford_parser_classpath))
startJvm() # one jvm per python instance.
class Parser:
def __init__(self, pcfg_model_fname=None):
if pcfg_model_fname == None:
self.pcfg_model_fname = "edu/stanford/nlp/models/lexparser/chinesePCFG.ser.gz"
else:
self.pcfg_model_fname = pcfg_model_fname
self.package_lexparser = jpype.JPackage("edu.stanford.nlp.parser.lexparser")
LexicalizedParser = jpype.JClass("edu.stanford.nlp.parser.lexparser.LexicalizedParser")
self.parser = LexicalizedParser.loadModel(self.pcfg_model_fname, ["-maxLength", "80"])
self.package = jpype.JPackage("edu.stanford.nlp")
tokenizerFactoryClass = self.package.process.__getattribute__("PTBTokenizer$PTBTokenizerFactory")
self.tokenizerFactory = tokenizerFactoryClass.newPTBTokenizerFactory(True, True)
self.tlp = self.parser.getOp().langpack();
self.gsf = self.tlp.grammaticalStructureFactory()
def printInfo(self):
Numberer = self.package.util.Numberer
print ("Grammar\t" +
`Numberer.getGlobalNumberer("states").total()` + '\t' +
`Numberer.getGlobalNumberer("tags").total()` + '\t' +
`Numberer.getGlobalNumberer("words").total()` + '\t' +
`self.parser.pparser.ug.numRules()` + '\t' +
`self.parser.pparser.bg.numRules()` + '\t' +
`self.parser.pparser.lex.numRules()`)
print "ParserPack is ", self.parser.op.tlpParams.getClass()
print "Lexicon is ", self.parser.pd.lex.getClass()
print "Tags are: ", Numberer.getGlobalNumberer("tags")
self.parser.op.display()
print "Test parameters"
self.parser.op.tlpParams.display()
self.package_lexparser.Test.display()
def parseToStanfordDependencies(self, sentence):
tree = self.parser.parse(sentence)
dep = self.gsf.newGrammaticalStructure(tree)
dep = dep.typedDependenciesCCprocessed()
return (tree, dep)
| {
"content_hash": "b59e3d3af9e0859efcc3a0c7a61e5139",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 152,
"avg_line_length": 39.214285714285715,
"alnum_prop": 0.6342440801457195,
"repo_name": "kahitomi/Stanford-parser-Python-Chniese",
"id": "99794463de6b7c97267fc75190a2ead17b97867b",
"size": "2895",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2895"
}
],
"symlink_target": ""
} |
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RegistrationProfile'
db.create_table('registration_registrationprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], unique=True)),
('activation_key', self.gf('django.db.models.fields.CharField')(max_length=40)),
))
db.send_create_signal('registration', ['RegistrationProfile'])
def backwards(self, orm):
# Deleting model 'RegistrationProfile'
db.delete_table('registration_registrationprofile')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'registration.registrationprofile': {
'Meta': {'object_name': 'RegistrationProfile'},
'activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['registration']
| {
"content_hash": "9698e077acf7ddddba92e782e91c822a",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 182,
"avg_line_length": 62.42857142857143,
"alnum_prop": 0.5665903890160183,
"repo_name": "bruth/django-registration2",
"id": "39f32cb245dee5e615820ab2298088f8f73e2d37",
"size": "4388",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "registration/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "50004"
}
],
"symlink_target": ""
} |
from distutils.version import LooseVersion
import warnings
import tensorflow as tf
from tensorflow_recommenders_addons.version import MIN_TF_VERSION
from tensorflow_recommenders_addons.version import MAX_TF_VERSION
def _check_tf_version():
"""Warn the user if the version of TensorFlow used is not supported.
This is not a check for custom ops compatibility. This check only ensure that
we support this TensorFlow version if the user uses only Addons' Python code.
"""
if "dev" in tf.__version__:
warnings.warn(
"You are currently using a nightly version of TensorFlow ({}). \n"
"TensorFlow Recommenders Addons offers no support for the nightly versions of "
"TensorFlow. Some things might work, some other might not. \n"
"If you encounter a bug, do not file an issue on GitHub."
"".format(tf.__version__),
UserWarning,
)
return
min_version = LooseVersion(MIN_TF_VERSION)
max_version = LooseVersion(MAX_TF_VERSION)
if min_version <= LooseVersion(tf.__version__) <= max_version:
return
warnings.warn(
"Tensorflow Recommenders Addons supports using Python ops for all Tensorflow versions "
"above or equal to {} and strictly below {} (nightly versions are not "
"supported). \n "
"The versions of TensorFlow you are currently using is {} and is not "
"supported. \n"
"Some things might work, some things might not.\n"
"If you were to encounter a bug, do not file an issue.\n"
"If you want to make sure you're using a tested and supported configuration, "
"either change the TensorFlow version or the Recommenders Addons's version. \n"
"You can find the compatibility matrix in Recommenders Addon's readme:\n"
"https://github.com/tensorflow/recommenders-addons".format(
MIN_TF_VERSION, MAX_TF_VERSION, tf.__version__),
UserWarning,
)
| {
"content_hash": "51d02c633d5995e831dee630ce9e3af4",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 93,
"avg_line_length": 39.833333333333336,
"alnum_prop": 0.6940376569037657,
"repo_name": "tensorflow/recommenders-addons",
"id": "74c9c1fe4a6d1f2f5a09d61bc64b74ab8674a1b5",
"size": "2765",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow_recommenders_addons/utils/ensure_tf_install.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "168930"
},
{
"name": "C++",
"bytes": "650354"
},
{
"name": "Cuda",
"bytes": "2042749"
},
{
"name": "Dockerfile",
"bytes": "21751"
},
{
"name": "Python",
"bytes": "824960"
},
{
"name": "Shell",
"bytes": "45743"
},
{
"name": "Smarty",
"bytes": "99765"
},
{
"name": "Starlark",
"bytes": "80175"
}
],
"symlink_target": ""
} |
import logging
from os import environ as env
from . import logship
from . import logstamp
from . import logtag
from . import logtext
__all__ = ['logship', 'logstamp', 'logtag', 'logtext']
_log_level_default = logging.INFO
_log_level = getattr(logging, env.get('TAGALOG_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
| {
"content_hash": "889c79a6ab1f4c61a225099efeb625cc",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 90,
"avg_line_length": 28,
"alnum_prop": 0.7142857142857143,
"repo_name": "alphagov/tagalog",
"id": "44a6e81e1f824c7b8a6aebf3e0815d86be006985",
"size": "392",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tagalog/command/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "73495"
},
{
"name": "Shell",
"bytes": "6878"
}
],
"symlink_target": ""
} |
"""Test bitcoind with different proxy configuration.
Test plan:
- Start bitcoind's with different proxy configurations
- Use addnode to initiate connections
- Verify that proxies are connected to, and the right connection command is given
- Proxy configurations to test on bitcoind side:
- `-proxy` (proxy everything)
- `-onion` (proxy just onions)
- `-proxyrandomize` Circuit randomization
- Proxy configurations to test on proxy side,
- support no authentication (other proxy)
- support no authentication + user/pass authentication (Tor)
- proxy on IPv6
- Create various proxies (as threads)
- Create bitcoinds that connect to them
- Manipulate the bitcoinds using addnode (onetry) an observe effects
addnode connect to IPv4
addnode connect to IPv6
addnode connect to onion
addnode connect to generic DNS name
"""
import socket
import os
from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
PORT_MIN,
PORT_RANGE,
assert_equal,
)
from test_framework.netutil import test_ipv6_local
RANGE_BEGIN = PORT_MIN + 2 * PORT_RANGE # Start after p2p and rpc ports
class ProxyTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
def setup_nodes(self):
self.have_ipv6 = test_ipv6_local()
# Create two proxies on different ports
# ... one unauthenticated
self.conf1 = Socks5Configuration()
self.conf1.addr = ('127.0.0.1', RANGE_BEGIN + (os.getpid() % 1000))
self.conf1.unauth = True
self.conf1.auth = False
# ... one supporting authenticated and unauthenticated (Tor)
self.conf2 = Socks5Configuration()
self.conf2.addr = ('127.0.0.1', RANGE_BEGIN + 1000 + (os.getpid() % 1000))
self.conf2.unauth = True
self.conf2.auth = True
if self.have_ipv6:
# ... one on IPv6 with similar configuration
self.conf3 = Socks5Configuration()
self.conf3.af = socket.AF_INET6
self.conf3.addr = ('::1', RANGE_BEGIN + 2000 + (os.getpid() % 1000))
self.conf3.unauth = True
self.conf3.auth = True
else:
self.log.warning("Testing without local IPv6 support")
self.serv1 = Socks5Server(self.conf1)
self.serv1.start()
self.serv2 = Socks5Server(self.conf2)
self.serv2.start()
if self.have_ipv6:
self.serv3 = Socks5Server(self.conf3)
self.serv3.start()
# Note: proxies are not used to connect to local nodes
# this is because the proxy to use is based on CService.GetNetwork(), which return NET_UNROUTABLE for localhost
args = [
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'],
['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),'-proxyrandomize=0'],
['-listen', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'],
[]
]
if self.have_ipv6:
args[3] = ['-listen', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0', '-noonion']
self.add_nodes(self.num_nodes, extra_args=args)
self.start_nodes()
def node_test(self, node, proxies, auth, test_onion=True):
rv = []
# Test: outgoing IPv4 connection through node
node.addnode("15.61.23.23:1234", "onetry")
cmd = proxies[0].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"15.61.23.23")
assert_equal(cmd.port, 1234)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
if self.have_ipv6:
# Test: outgoing IPv6 connection through node
node.addnode("[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry")
cmd = proxies[1].queue.get()
assert(isinstance(cmd, Socks5Command))
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534")
assert_equal(cmd.port, 5443)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
if test_onion:
# Test: outgoing onion connection through node
node.addnode("bitcoinostk4e4re.onion:8333", "onetry")
cmd = proxies[2].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"bitcoinostk4e4re.onion")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
# Test: outgoing DNS name connection through node
node.addnode("node.noumenon:8333", "onetry")
cmd = proxies[3].queue.get()
assert(isinstance(cmd, Socks5Command))
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"node.noumenon")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
return rv
def run_test(self):
# basic -proxy
self.node_test(self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False)
# -proxy plus -onion
self.node_test(self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False)
# -proxy plus -onion, -proxyrandomize
rv = self.node_test(self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True)
# Check that credentials as used for -proxyrandomize connections are unique
credentials = set((x.username,x.password) for x in rv)
assert_equal(len(credentials), len(rv))
if self.have_ipv6:
# proxy on IPv6 localhost
self.node_test(self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False, False)
def networks_dict(d):
r = {}
for x in d['networks']:
r[x['name']] = x
return r
# test RPC getnetworkinfo
n0 = networks_dict(self.nodes[0].getnetworkinfo())
for net in ['ipv4','ipv6','onion']:
assert_equal(n0[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n0[net]['proxy_randomize_credentials'], True)
assert_equal(n0['onion']['reachable'], True)
n1 = networks_dict(self.nodes[1].getnetworkinfo())
for net in ['ipv4','ipv6']:
assert_equal(n1[net]['proxy'], '%s:%i' % (self.conf1.addr))
assert_equal(n1[net]['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['reachable'], True)
n2 = networks_dict(self.nodes[2].getnetworkinfo())
for net in ['ipv4','ipv6','onion']:
assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n2[net]['proxy_randomize_credentials'], True)
assert_equal(n2['onion']['reachable'], True)
if self.have_ipv6:
n3 = networks_dict(self.nodes[3].getnetworkinfo())
for net in ['ipv4','ipv6']:
assert_equal(n3[net]['proxy'], '[%s]:%i' % (self.conf3.addr))
assert_equal(n3[net]['proxy_randomize_credentials'], False)
assert_equal(n3['onion']['reachable'], False)
if __name__ == '__main__':
ProxyTest().main()
| {
"content_hash": "a4ccc74f97697d58b3d0806ccdc30a59",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 121,
"avg_line_length": 41.27918781725889,
"alnum_prop": 0.6083374323659616,
"repo_name": "maaku/bitcoin",
"id": "2eb1be47a53a5d1f153ef77ff4ec914f6104ff2e",
"size": "8346",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "test/functional/proxy_test.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28453"
},
{
"name": "C",
"bytes": "688621"
},
{
"name": "C++",
"bytes": "5414109"
},
{
"name": "HTML",
"bytes": "21860"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "194024"
},
{
"name": "Makefile",
"bytes": "113993"
},
{
"name": "Objective-C",
"bytes": "141028"
},
{
"name": "Objective-C++",
"bytes": "6763"
},
{
"name": "Python",
"bytes": "1287194"
},
{
"name": "QMake",
"bytes": "756"
},
{
"name": "Shell",
"bytes": "66746"
}
],
"symlink_target": ""
} |
import json
import unittest
from django.test import SimpleTestCase, TestCase
from django.urls import reverse
from django.utils import translation
from wagtail.blocks.field_block import FieldBlockAdapter
from wagtail.contrib.table_block.blocks import DEFAULT_TABLE_OPTIONS, TableBlock
from wagtail.models import Page
from wagtail.test.testapp.models import TableBlockStreamPage
from wagtail.test.utils import WagtailTestUtils
from .blocks import TableInput
class TestTableBlock(TestCase):
def setUp(self):
self.default_table_options = {
"minSpareRows": 0,
"startRows": 3,
"startCols": 3,
"colHeaders": False,
"rowHeaders": False,
"contextMenu": True,
"editor": "text",
"stretchH": "all",
"height": 108,
"language": "en",
"renderer": "text",
"autoColumnSize": False,
}
def test_table_block_render(self):
"""
Test a generic render.
"""
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["Test 1", "Test 2", "Test 3"],
[None, None, None],
[None, None, None],
],
}
block = TableBlock()
result = block.render(value)
expected = """
<table>
<tbody>
<tr><td>Test 1</td><td>Test 2</td><td>Test 3</td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
self.assertHTMLEqual(result, expected)
self.assertIn("Test 2", result)
def test_table_block_alignment_render(self):
"""
Test a generic render with some cells aligned.
"""
value = {
"first_row_is_table_header": True,
"first_col_is_header": False,
"cell": [
{"row": 0, "col": 1, "className": "htLeft"},
{"row": 1, "col": 1, "className": "htRight"},
],
"data": [
["Test 1", "Test 2", "Test 3"],
[None, None, None],
[None, None, None],
],
}
block = TableBlock()
result = block.render(value)
expected = """
<table>
<thead>
<tr><th scope="col">Test 1</th><th scope="col" class="htLeft">Test 2</th><th scope="col">Test 3</th></tr>
</thead>
<tbody>
<tr><td></td><td class="htRight"></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
self.assertHTMLEqual(result, expected)
self.assertIn("Test 2", result)
def test_render_empty_table(self):
"""
An empty table should render okay.
"""
block = TableBlock()
result = block.render(
{
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [[None, None, None], [None, None, None], [None, None, None]],
}
)
expected = """
<table>
<tbody>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
self.assertHTMLEqual(result, expected)
def test_do_not_render_html(self):
"""
Ensure that raw html doesn't render
by default.
"""
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["<p><strong>Test</strong></p>", None, None],
[None, None, None],
[None, None, None],
],
}
expected = """
<table>
<tbody>
<tr><td><p><strong>Test</strong></p></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
block = TableBlock()
result = block.render(value)
self.assertHTMLEqual(result, expected)
def test_row_headers(self):
"""
Ensure that row headers are properly rendered.
"""
value = {
"first_row_is_table_header": True,
"first_col_is_header": False,
"data": [["Foo", "Bar", "Baz"], [None, None, None], [None, None, None]],
}
expected = """
<table>
<thead>
<tr><th scope="col">Foo</th><th scope="col">Bar</th><th scope="col">Baz</th></tr>
</thead>
<tbody>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
block = TableBlock()
result = block.render(value)
self.assertHTMLEqual(result, expected)
def test_column_headers(self):
"""
Ensure that column headers are properly rendered.
"""
value = {
"first_row_is_table_header": False,
"first_col_is_header": True,
"data": [
["Foo", "Bar", "Baz"],
["one", "two", "three"],
["four", "five", "six"],
],
}
expected = """
<table>
<tbody>
<tr><th scope="row">Foo</th><td>Bar</td><td>Baz</td></tr>
<tr><th scope="row">one</th><td>two</td><td>three</td></tr>
<tr><th scope="row">four</th><td>five</td><td>six</td></tr>
</tbody>
</table>
"""
block = TableBlock()
result = block.render(value)
self.assertHTMLEqual(result, expected)
def test_row_and_column_headers(self):
"""
Test row and column headers at the same time.
"""
value = {
"first_row_is_table_header": True,
"first_col_is_header": True,
"data": [
["Foo", "Bar", "Baz"],
["one", "two", "three"],
["four", "five", "six"],
],
}
expected = """
<table>
<thead>
<tr><th scope="col">Foo</th><th scope="col">Bar</th><th scope="col">Baz</th></tr>
</thead>
<tbody>
<tr><th scope="row">one</th><td>two</td><td>three</td></tr>
<tr><th scope="row">four</th><td>five</td><td>six</td></tr>
</tbody>
</table>
"""
block = TableBlock()
result = block.render(value)
self.assertHTMLEqual(result, expected)
def test_value_for_and_from_form(self):
"""
Make sure we get back good json and make
sure it translates back to python.
"""
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [["Foo", 1, None], [3.5, "Bar", "Baz"]],
}
block = TableBlock()
expected_json = '{"first_row_is_table_header": false, "first_col_is_header": false, "data": [["Foo", 1, null], [3.5, "Bar", "Baz"]]}'
returned_json = block.value_for_form(value)
self.assertJSONEqual(expected_json, returned_json)
self.assertEqual(block.value_from_form(returned_json), value)
def test_is_html_renderer(self):
"""
Test that settings flow through correctly to
the is_html_renderer method.
"""
# TableBlock with default table_options
block1 = TableBlock()
self.assertIs(block1.is_html_renderer(), False)
# TableBlock with altered table_options
new_options = self.default_table_options.copy()
new_options["renderer"] = "html"
block2 = TableBlock(table_options=new_options)
self.assertIs(block2.is_html_renderer(), True)
def test_searchable_content(self):
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["Test 1", "Test 2", "Test 3"],
[None, "Bar", None],
[None, "Foo", None],
],
}
block = TableBlock()
content = block.get_searchable_content(value)
self.assertEqual(
content,
[
"Test 1",
"Test 2",
"Test 3",
"Bar",
"Foo",
],
)
def test_searchable_content_for_null_block(self):
value = None
block = TableBlock()
content = block.get_searchable_content(value)
self.assertEqual(content, [])
def test_render_with_extra_context(self):
"""
Test that extra context variables passed in block.render are passed through
to the template.
"""
block = TableBlock(template="tests/blocks/table_block_with_caption.html")
value = {
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["Test 1", "Test 2", "Test 3"],
[None, None, None],
[None, None, None],
],
}
result = block.render(value, context={"caption": "A fascinating table."})
self.assertIn("Test 1", result)
self.assertIn("<div>A fascinating table.</div>", result)
def test_table_block_caption_render(self):
"""
Test a generic render with caption.
"""
value = {
"table_caption": "caption",
"first_row_is_table_header": False,
"first_col_is_header": False,
"data": [
["Test 1", "Test 2", "Test 3"],
[None, None, None],
[None, None, None],
],
}
block = TableBlock()
result = block.render(value)
expected = """
<table>
<caption>caption</caption>
<tbody>
<tr><td>Test 1</td><td>Test 2</td><td>Test 3</td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
</tbody>
</table>
"""
self.assertHTMLEqual(result, expected)
self.assertIn("Test 2", result)
def test_empty_table_block_is_not_rendered(self):
"""
Test an empty table is not rendered.
"""
value = None
block = TableBlock()
result = block.render(value)
expected = ""
self.assertHTMLEqual(result, expected)
self.assertNotIn("None", result)
class TestTableBlockForm(WagtailTestUtils, SimpleTestCase):
def setUp(self):
# test value for table data
self.value = {
"first_row_is_table_header": True,
"first_col_is_header": True,
"data": [
["Ship", "Type", "Status"],
["Galactica", "Battlestar", "Active"],
["Valkyrie", "Battlestar", "Destroyed"],
["Cylon Basestar", "Basestar", "Active"],
["Brenik", "Small Military Vessel", "Destroyed"],
],
}
# set language from testing environment
language = translation.get_language()
self.default_table_options = DEFAULT_TABLE_OPTIONS.copy()
self.default_table_options["language"] = language
def test_default_table_options(self):
"""
Test options without any custom table_options provided.
"""
block = TableBlock()
# check that default_table_options created correctly
self.assertEqual(block.table_options, block.get_table_options())
# check that default_table_options used on self
self.assertEqual(self.default_table_options, block.table_options)
# check a few individual keys from DEFAULT_TABLE_OPTIONS
self.assertEqual(
DEFAULT_TABLE_OPTIONS["startRows"], block.table_options["startRows"]
)
self.assertEqual(
DEFAULT_TABLE_OPTIONS["colHeaders"], block.table_options["colHeaders"]
)
self.assertEqual(
DEFAULT_TABLE_OPTIONS["contextMenu"], block.table_options["contextMenu"]
)
self.assertEqual(DEFAULT_TABLE_OPTIONS["editor"], block.table_options["editor"])
self.assertEqual(
DEFAULT_TABLE_OPTIONS["stretchH"], block.table_options["stretchH"]
)
def test_table_options_language(self):
"""
Test that the environment's language is used if no language provided.
"""
# default must always contain a language value
block = TableBlock()
self.assertIn("language", block.table_options)
# French
translation.activate("fr-fr")
block_fr = TableBlock()
self.assertEqual("fr-fr", block_fr.table_options["language"])
translation.activate("it")
# Italian
block_it = TableBlock()
self.assertEqual("it", block_it.table_options["language"])
# table_options with language provided, different to environment
block_with_lang = TableBlock(table_options={"language": "ja"})
self.assertNotEqual("it", block_with_lang.table_options["language"])
self.assertEqual("ja", block_with_lang.table_options["language"])
translation.activate("en")
def test_table_options_context_menu(self):
"""
Test how contextMenu is set to default.
"""
default_context_menu = list(DEFAULT_TABLE_OPTIONS["contextMenu"]) # create copy
# confirm the default is correct
table_options = TableBlock().table_options
self.assertEqual(table_options["contextMenu"], default_context_menu)
# confirm that when custom option is True, default is still used
table_options_menu_true = TableBlock(
table_options={"contextMenu": True}
).table_options
self.assertEqual(table_options_menu_true["contextMenu"], default_context_menu)
# confirm menu is removed if False is passed in
table_options_menu_false = TableBlock(
table_options={"contextMenu": False}
).table_options
self.assertIs(table_options_menu_false["contextMenu"], False)
# confirm if list passed in, it is used
table_options_menu_list = TableBlock(
table_options={"contextMenu": ["undo", "redo"]}
).table_options
self.assertEqual(table_options_menu_list["contextMenu"], ["undo", "redo"])
# test if empty array passed in
table_options_menu_list = TableBlock(
table_options={"contextMenu": []}
).table_options
self.assertEqual(table_options_menu_list["contextMenu"], [])
def test_table_options_others(self):
"""
Test simple options overrides get passed correctly.
"""
block_1_opts = TableBlock(
table_options={"startRows": 5, "startCols": 2}
).table_options
self.assertEqual(block_1_opts["startRows"], 5)
self.assertEqual(block_1_opts["startCols"], 2)
block_2_opts = TableBlock(table_options={"stretchH": "none"}).table_options
self.assertEqual(block_2_opts["stretchH"], "none")
# check value that is not part of the defaults
block_3_opts = TableBlock(table_options={"allowEmpty": False}).table_options
self.assertIs(block_3_opts["allowEmpty"], False)
def test_adapt(self):
block = TableBlock()
block.set_name("test_tableblock")
js_args = FieldBlockAdapter().js_args(block)
self.assertEqual(js_args[0], "test_tableblock")
self.assertIsInstance(js_args[1], TableInput)
self.assertEqual(
js_args[2],
{
"label": "Test tableblock",
"required": True,
"icon": "table",
"classname": "w-field w-field--char_field w-field--table_input",
"showAddCommentButton": True,
"strings": {"ADD_COMMENT": "Add Comment"},
},
)
def test_searchable_content(self):
"""
Test searchable content is created correctly.
"""
block = TableBlock()
search_content = block.get_searchable_content(value=self.value)
self.assertIn("Galactica", search_content)
self.assertIn("Brenik", search_content)
# TODO(telepath) replace this with a functional test
class TestTableBlockPageEdit(TestCase, WagtailTestUtils):
def setUp(self):
self.value = {
"first_row_is_table_header": True,
"first_col_is_header": True,
"data": [
["Ship", "Type", "Status"],
["Galactica", "Battlestar", "Active"],
["Valkyrie", "Battlestar", "Destroyed"],
["Cylon Basestar", "Basestar", "Active"],
["Brenik", "Small Military Vessel", "Destroyed"],
],
}
self.root_page = Page.objects.get(id=2)
table_block_page_instance = TableBlockStreamPage(
title="Ships", table=json.dumps([{"type": "table", "value": self.value}])
)
self.table_block_page = self.root_page.add_child(
instance=table_block_page_instance
)
self.user = self.login()
@unittest.expectedFailure
def test_page_edit_page_view(self):
"""
Test that edit page loads with saved table data and correct init function.
"""
response = self.client.get(
reverse("wagtailadmin_pages:edit", args=(self.table_block_page.id,))
)
# check page + field renders
self.assertContains(
response,
'<div data-contentpath="table" class="w-field w-field--char_field w-field--table_input">',
)
# check data
self.assertContains(response, "Battlestar")
self.assertContains(response, "Galactica")
# check init
self.assertContains(response, 'initTable("table\\u002D0\\u002Dvalue"')
self.assertContains(response, "minSpareRows")
self.assertContains(response, "startRows")
| {
"content_hash": "6a5c0f75ed6ac60b0add770f93d6d54b",
"timestamp": "",
"source": "github",
"line_count": 535,
"max_line_length": 141,
"avg_line_length": 35.05607476635514,
"alnum_prop": 0.5118635030658492,
"repo_name": "rsalmaso/wagtail",
"id": "1c302fdaa0df851920f3f402b5367aac7252ff5e",
"size": "18755",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "wagtail/contrib/table_block/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2522"
},
{
"name": "Dockerfile",
"bytes": "2041"
},
{
"name": "HTML",
"bytes": "593672"
},
{
"name": "JavaScript",
"bytes": "624463"
},
{
"name": "Makefile",
"bytes": "1413"
},
{
"name": "Python",
"bytes": "6598232"
},
{
"name": "SCSS",
"bytes": "221911"
},
{
"name": "Shell",
"bytes": "6845"
},
{
"name": "TypeScript",
"bytes": "296087"
}
],
"symlink_target": ""
} |
from django.contrib.auth.models import User
from django.test import TestCase
from volunteers.models import Volunteer, Task, Shift, Preference, VolunteerShift, VolunteerUnavailability, \
VolunteerPresence
class VolunteerTestCase(TestCase):
def setUp(self):
self.user = User.objects.create_user(username='refood', email='test@refood-non-profit.org',
password='top_secret')
Volunteer.objects.create(user=self.user, car_availability=True)
self.task = Task.objects.create(name='Distribuição', slug='distribuicao', place='BO', time_beginning='19:30:00',
time_ending='21:30:00', min_volunteers='4', max_volunteers='6')
self.shift = Shift.objects.create(task=self.task, day_of_the_week='1')
self.preference = Preference.objects.create(volunteer=self.user.volunteer, shift=self.shift, priority=1)
self.volunteer_shift = VolunteerShift.objects.create(volunteer=self.user.volunteer, shift=self.shift,
frequency='W')
self.volunteer_unavailability = VolunteerUnavailability.objects.create(volunteer_shift=self.volunteer_shift,
date='2016-12-26')
self.volunteer_presence = VolunteerPresence.objects.create(volunteer_shift=self.volunteer_shift,
date='2016-12-27', presence=True)
def test_Volunteer(self):
self.assertEqual(str(self.user.volunteer), self.user.get_username())
self.user.first_name = 'ReFood'
self.user.last_name = 'Cascais CPR'
self.user.save()
self.assertEqual(str(self.user.volunteer), self.user.get_full_name())
def test_Task(self):
self.assertEqual(str(self.task), 'Distribuição')
def test_Shift(self):
self.assertEqual(str(self.shift), 'Distribuição (Segunda-feira)')
def test_Preference(self):
self.assertEqual(str(self.preference), '[1] refood - Distribuição (Segunda-feira)')
def test_VolunteerShift(self):
self.assertEqual(str(self.volunteer_shift), 'refood - Distribuição (Segunda-feira)')
def test_VolunteerUnavailability(self):
self.assertEqual(str(self.volunteer_unavailability),
'2016-12-26 - refood - Distribuição (Segunda-feira)')
def test_VolunteerPresence(self):
self.assertEqual(str(self.volunteer_presence), '2016-12-27 - refood - Distribuição (Segunda-feira)')
| {
"content_hash": "7666648c2757bee8953f5d12d439ec85",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 120,
"avg_line_length": 50,
"alnum_prop": 0.6288461538461538,
"repo_name": "NBajanca/django-non-profit",
"id": "78b97a14ed071f5c34da4e325e80c5ff4e7ad19d",
"size": "2614",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "volunteers/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "32615"
},
{
"name": "JavaScript",
"bytes": "8365"
},
{
"name": "Python",
"bytes": "49337"
}
],
"symlink_target": ""
} |
"""The blocking connection adapter module implements blocking semantics on top
of Pika's core AMQP driver. While most of the asynchronous expectations are
removed when using the blocking connection adapter, it attempts to remain true
to the asynchronous RPC nature of the AMQP protocol, supporting server sent
RPC commands.
The user facing classes in the module consist of the
:py:class:`~pika.adapters.blocking_connection.BlockingConnection`
and the :class:`~pika.adapters.blocking_connection.BlockingChannel`
classes.
"""
# Disable "access to protected member warnings: this wrapper implementation is
# a friend of those instances
# pylint: disable=W0212
from collections import namedtuple, deque
import contextlib
import functools
import logging
import time
import pika.channel
from pika import compat
from pika import exceptions
import pika.spec
# NOTE: import SelectConnection after others to avoid circular depenency
from pika.adapters.select_connection import SelectConnection
LOGGER = logging.getLogger(__name__)
class _CallbackResult(object):
""" CallbackResult is a non-thread-safe implementation for receiving
callback results; INTERNAL USE ONLY!
"""
__slots__ = ('_value_class', '_ready', '_values')
def __init__(self, value_class=None):
"""
:param callable value_class: only needed if the CallbackResult
instance will be used with
`set_value_once` and `append_element`.
*args and **kwargs of the value setter
methods will be passed to this class.
"""
self._value_class = value_class
self._ready = None
self._values = None
self.reset()
def reset(self):
"""Reset value, but not _value_class"""
self._ready = False
self._values = None
def __bool__(self):
""" Called by python runtime to implement truth value testing and the
built-in operation bool(); NOTE: python 3.x
"""
return self.is_ready()
# python 2.x version of __bool__
__nonzero__ = __bool__
def __enter__(self):
""" Entry into context manager that automatically resets the object
on exit; this usage pattern helps garbage-collection by eliminating
potential circular references.
"""
return self
def __exit__(self, *args, **kwargs):
"""Reset value"""
self.reset()
def is_ready(self):
"""
:returns: True if the object is in a signaled state
"""
return self._ready
@property
def ready(self):
"""True if the object is in a signaled state"""
return self._ready
def signal_once(self, *_args, **_kwargs): # pylint: disable=W0613
""" Set as ready
:raises AssertionError: if result was already signalled
"""
assert not self._ready, '_CallbackResult was already set'
self._ready = True
def set_value_once(self, *args, **kwargs):
""" Set as ready with value; the value may be retrived via the `value`
property getter
:raises AssertionError: if result was already set
"""
self.signal_once()
try:
self._values = (self._value_class(*args, **kwargs),)
except Exception:
LOGGER.error(
"set_value_once failed: value_class=%r; args=%r; kwargs=%r",
self._value_class, args, kwargs)
raise
def append_element(self, *args, **kwargs):
"""Append an element to values"""
assert not self._ready or isinstance(self._values, list), (
'_CallbackResult state is incompatible with append_element: '
'ready=%r; values=%r' % (self._ready, self._values))
try:
value = self._value_class(*args, **kwargs)
except Exception:
LOGGER.error(
"append_element failed: value_class=%r; args=%r; kwargs=%r",
self._value_class, args, kwargs)
raise
if self._values is None:
self._values = [value]
else:
self._values.append(value)
self._ready = True
@property
def value(self):
"""
:returns: a reference to the value that was set via `set_value_once`
:raises AssertionError: if result was not set or value is incompatible
with `set_value_once`
"""
assert self._ready, '_CallbackResult was not set'
assert isinstance(self._values, tuple) and len(self._values) == 1, (
'_CallbackResult value is incompatible with set_value_once: %r'
% (self._values,))
return self._values[0]
@property
def elements(self):
"""
:returns: a reference to the list containing one or more elements that
were added via `append_element`
:raises AssertionError: if result was not set or value is incompatible
with `append_element`
"""
assert self._ready, '_CallbackResult was not set'
assert isinstance(self._values, list) and len(self._values) > 0, (
'_CallbackResult value is incompatible with append_element: %r'
% (self._values,))
return self._values
class _IoloopTimerContext(object): # pylint: disable=R0903
"""Context manager for registering and safely unregistering a
SelectConnection ioloop-based timer
"""
def __init__(self, duration, connection):
"""
:param float duration: non-negative timer duration in seconds
:param SelectConnection connection:
"""
assert hasattr(connection, 'add_timeout'), connection
self._duration = duration
self._connection = connection
self._callback_result = _CallbackResult()
self._timer_id = None
def __enter__(self):
"""Register a timer"""
self._timer_id = self._connection.add_timeout(
self._duration,
self._callback_result.signal_once)
return self
def __exit__(self, *_args, **_kwargs):
"""Unregister timer if it hasn't fired yet"""
if not self._callback_result:
self._connection.remove_timeout(self._timer_id)
def is_ready(self):
"""
:returns: True if timer has fired, False otherwise
"""
return self._callback_result.is_ready()
class _TimerEvt(object): # pylint: disable=R0903
"""Represents a timer created via `BlockingConnection.add_timeout`"""
__slots__ = ('timer_id', '_callback')
def __init__(self, callback):
"""
:param callback: see callback_method in `BlockingConnection.add_timeout`
"""
self._callback = callback
# Will be set to timer id returned from the underlying implementation's
# `add_timeout` method
self.timer_id = None
def __repr__(self):
return '%s(timer_id=%s, callback=%s)' % (self.__class__.__name__,
self.timer_id, self._callback)
def dispatch(self):
"""Dispatch the user's callback method"""
self._callback()
class _ConnectionBlockedUnblockedEvtBase(object): # pylint: disable=R0903
"""Base class for `_ConnectionBlockedEvt` and `_ConnectionUnblockedEvt`"""
__slots__ = ('_callback', '_method_frame')
def __init__(self, callback, method_frame):
"""
:param callback: see callback_method parameter in
`BlockingConnection.add_on_connection_blocked_callback` and
`BlockingConnection.add_on_connection_unblocked_callback`
:param pika.frame.Method method_frame: with method_frame.method of type
`pika.spec.Connection.Blocked` or `pika.spec.Connection.Unblocked`
"""
self._callback = callback
self._method_frame = method_frame
def __repr__(self):
return '%s(callback=%s, frame=%s)' % (self.__class__.__name__,
self._callback,
self._method_frame)
def dispatch(self):
"""Dispatch the user's callback method"""
self._callback(self._method_frame)
class _ConnectionBlockedEvt( # pylint: disable=R0903
_ConnectionBlockedUnblockedEvtBase):
"""Represents a Connection.Blocked notification from RabbitMQ broker`"""
pass
class _ConnectionUnblockedEvt( # pylint: disable=R0903
_ConnectionBlockedUnblockedEvtBase):
"""Represents a Connection.Unblocked notification from RabbitMQ broker`"""
pass
class BlockingConnection(object): # pylint: disable=R0902
"""The BlockingConnection creates a layer on top of Pika's asynchronous core
providing methods that will block until their expected response has
returned. Due to the asynchronous nature of the `Basic.Deliver` and
`Basic.Return` calls from RabbitMQ to your application, you can still
implement continuation-passing style asynchronous methods if you'd like to
receive messages from RabbitMQ using
:meth:`basic_consume <BlockingChannel.basic_consume>` or if you want to be
notified of a delivery failure when using
:meth:`basic_publish <BlockingChannel.basic_publish>` .
For more information about communicating with the blocking_connection
adapter, be sure to check out the
:class:`BlockingChannel <BlockingChannel>` class which implements the
:class:`Channel <pika.channel.Channel>` based communication for the
blocking_connection adapter.
"""
# Connection-opened callback args
_OnOpenedArgs = namedtuple('BlockingConnection__OnOpenedArgs',
'connection')
# Connection-establishment error callback args
_OnOpenErrorArgs = namedtuple('BlockingConnection__OnOpenErrorArgs',
'connection error_text')
# Connection-closing callback args
_OnClosedArgs = namedtuple('BlockingConnection__OnClosedArgs',
'connection reason_code reason_text')
# Channel-opened callback args
_OnChannelOpenedArgs = namedtuple(
'BlockingConnection__OnChannelOpenedArgs',
'channel')
def __init__(self, parameters=None, _impl_class=None):
"""Create a new instance of the Connection object.
:param pika.connection.Parameters parameters: Connection parameters
:param _impl_class: for tests/debugging only; implementation class;
None=default
:raises RuntimeError:
"""
# Used by the _acquire_event_dispatch decorator; when already greater
# than 0, event dispatch is already acquired higher up the call stack
self._event_dispatch_suspend_depth = 0
# Connection-specific events that are ready for dispatch: _TimerEvt,
# _ConnectionBlockedEvt, _ConnectionUnblockedEvt
self._ready_events = deque()
# Channel numbers of channels that are requesting a call to their
# BlockingChannel._dispatch_events method; See
# `_request_channel_dispatch`
self._channels_pending_dispatch = set()
# Receives on_open_callback args from Connection
self._opened_result = _CallbackResult(self._OnOpenedArgs)
# Receives on_open_error_callback args from Connection
self._open_error_result = _CallbackResult(self._OnOpenErrorArgs)
# Receives on_close_callback args from Connection
self._closed_result = _CallbackResult(self._OnClosedArgs)
# Set to True when when user calls close() on the connection
# NOTE: this is a workaround to detect socket error because
# on_close_callback passes reason_code=0 when called due to socket error
self._user_initiated_close = False
impl_class = _impl_class or SelectConnection
self._impl = impl_class(
parameters=parameters,
on_open_callback=self._opened_result.set_value_once,
on_open_error_callback=self._open_error_result.set_value_once,
on_close_callback=self._closed_result.set_value_once,
stop_ioloop_on_close=False)
self._process_io_for_connection_setup()
def _cleanup(self):
"""Clean up members that might inhibit garbage collection"""
self._ready_events.clear()
self._opened_result.reset()
self._open_error_result.reset()
self._closed_result.reset()
@contextlib.contextmanager
def _acquire_event_dispatch(self):
""" Context manager that controls access to event dispatcher for
preventing reentrancy.
The "as" value is True if the managed code block owns the event
dispatcher and False if caller higher up in the call stack already owns
it. Only managed code that gets ownership (got True) is permitted to
dispatch
"""
try:
# __enter__ part
self._event_dispatch_suspend_depth += 1
yield self._event_dispatch_suspend_depth == 1
finally:
# __exit__ part
self._event_dispatch_suspend_depth -= 1
def _process_io_for_connection_setup(self): # pylint: disable=C0103
""" Perform follow-up processing for connection setup request: flush
connection output and process input while waiting for connection-open
or connection-error.
:raises AMQPConnectionError: on connection open error
"""
self._flush_output(self._opened_result.is_ready,
self._open_error_result.is_ready)
if self._open_error_result.ready:
raise exceptions.AMQPConnectionError(
self._open_error_result.value.error_text)
assert self._opened_result.ready
assert self._opened_result.value.connection is self._impl
def _flush_output(self, *waiters):
""" Flush output and process input while waiting for any of the given
callbacks to return true. The wait is aborted upon connection-close.
Otherwise, processing continues until the output is flushed AND at least
one of the callbacks returns true. If there are no callbacks, then
processing ends when all output is flushed.
:param waiters: sequence of zero or more callables taking no args and
returning true when it's time to stop processing.
Their results are OR'ed together.
"""
if self._impl.is_closed:
raise exceptions.ConnectionClosed()
# Conditions for terminating the processing loop:
# connection closed
# OR
# empty outbound buffer and no waiters
# OR
# empty outbound buffer and any waiter is ready
is_done = (lambda:
self._closed_result.ready or
(not self._impl.outbound_buffer and
(not waiters or any(ready() for ready in waiters))))
# Process I/O until our completion condition is satisified
while not is_done():
self._impl.ioloop.poll()
self._impl.ioloop.process_timeouts()
if self._closed_result.ready:
try:
result = self._closed_result.value
if result.reason_code not in [0, 200]:
LOGGER.critical('Connection close detected; result=%r',
result)
raise exceptions.ConnectionClosed(result.reason_code,
result.reason_text)
elif not self._user_initiated_close:
# NOTE: unfortunately, upon socket error, on_close_callback
# presently passes reason_code=0, so we don't detect that as
# an error
LOGGER.critical('Connection close detected')
raise exceptions.ConnectionClosed()
else:
LOGGER.info('Connection closed; result=%r', result)
finally:
self._cleanup()
def _request_channel_dispatch(self, channel_number):
"""Called by BlockingChannel instances to request a call to their
_dispatch_events method or to terminate `process_data_events`;
BlockingConnection will honor these requests from a safe context.
:param int channel_number: positive channel number to request a call
to the channel's `_dispatch_events`; a negative channel number to
request termination of `process_data_events`
"""
self._channels_pending_dispatch.add(channel_number)
def _dispatch_channel_events(self):
"""Invoke the `_dispatch_events` method on open channels that requested
it
"""
if not self._channels_pending_dispatch:
return
with self._acquire_event_dispatch() as dispatch_acquired:
if not dispatch_acquired:
# Nested dispatch or dispatch blocked higher in call stack
return
candidates = list(self._channels_pending_dispatch)
self._channels_pending_dispatch.clear()
for channel_number in candidates:
if channel_number < 0:
# This was meant to terminate process_data_events
continue
try:
impl_channel = self._impl._channels[channel_number]
except KeyError:
continue
if impl_channel.is_open:
impl_channel._get_cookie()._dispatch_events()
def _on_timer_ready(self, evt):
"""Handle expiry of a timer that was registered via `add_timeout`
:param _TimerEvt evt:
"""
self._ready_events.append(evt)
def _on_connection_blocked(self, user_callback, method_frame):
"""Handle Connection.Blocked notification from RabbitMQ broker
:param callable user_callback: callback_method passed to
`add_on_connection_blocked_callback`
:param pika.frame.Method method_frame: method frame having `method`
member of type `pika.spec.Connection.Blocked`
"""
self._ready_events.append(
_ConnectionBlockedEvt(user_callback, method_frame))
def _on_connection_unblocked(self, user_callback, method_frame):
"""Handle Connection.Unblocked notification from RabbitMQ broker
:param callable user_callback: callback_method passed to
`add_on_connection_unblocked_callback`
:param pika.frame.Method method_frame: method frame having `method`
member of type `pika.spec.Connection.Blocked`
"""
self._ready_events.append(
_ConnectionUnblockedEvt(user_callback, method_frame))
def _dispatch_connection_events(self):
"""Dispatch ready connection events"""
if not self._ready_events:
return
with self._acquire_event_dispatch() as dispatch_acquired:
if not dispatch_acquired:
# Nested dispatch or dispatch blocked higher in call stack
return
# Limit dispatch to the number of currently ready events to avoid
# getting stuck in this loop
for _ in compat.xrange(len(self._ready_events)):
try:
evt = self._ready_events.popleft()
except IndexError:
# Some events (e.g., timers) must have been cancelled
break
evt.dispatch()
def add_on_connection_blocked_callback(self, # pylint: disable=C0103
callback_method):
"""Add a callback to be notified when RabbitMQ has sent a
`Connection.Blocked` frame indicating that RabbitMQ is low on
resources. Publishers can use this to voluntarily suspend publishing,
instead of relying on back pressure throttling. The callback
will be passed the `Connection.Blocked` method frame.
:param method callback_method: Callback to call on `Connection.Blocked`,
having the signature callback_method(pika.frame.Method), where the
method frame's `method` member is of type
`pika.spec.Connection.Blocked`
"""
self._impl.add_on_connection_blocked_callback(
functools.partial(self._on_connection_blocked, callback_method))
def add_on_connection_unblocked_callback(self, # pylint: disable=C0103
callback_method):
"""Add a callback to be notified when RabbitMQ has sent a
`Connection.Unblocked` frame letting publishers know it's ok
to start publishing again. The callback will be passed the
`Connection.Unblocked` method frame.
:param method callback_method: Callback to call on
`Connection.Unblocked`, having the signature
callback_method(pika.frame.Method), where the method frame's
`method` member is of type `pika.spec.Connection.Unblocked`
"""
self._impl.add_on_connection_unblocked_callback(
functools.partial(self._on_connection_unblocked, callback_method))
def add_timeout(self, deadline, callback_method):
"""Create a single-shot timer to fire after deadline seconds. Do not
confuse with Tornado's timeout where you pass in the time you want to
have your callback called. Only pass in the seconds until it's to be
called.
NOTE: the timer callbacks are dispatched only in the scope of
specially-designated methods: see
`BlockingConnection.process_data_events` and
`BlockingChannel.start_consuming`.
:param float deadline: The number of seconds to wait to call callback
:param callable callback_method: The callback method with the signature
callback_method()
:returns: opaque timer id
"""
if not callable(callback_method):
raise ValueError(
'callback_method parameter must be callable, but got %r'
% (callback_method,))
evt = _TimerEvt(callback=callback_method)
timer_id = self._impl.add_timeout(
deadline,
functools.partial(self._on_timer_ready, evt))
evt.timer_id = timer_id
return timer_id
def remove_timeout(self, timeout_id):
"""Remove a timer if it's still in the timeout stack
:param timeout_id: The opaque timer id to remove
"""
# Remove from the impl's timeout stack
self._impl.remove_timeout(timeout_id)
# Remove from ready events, if the timer fired already
for i, evt in enumerate(self._ready_events):
if isinstance(evt, _TimerEvt) and evt.timer_id == timeout_id:
index_to_remove = i
break
else:
# Not found
return
del self._ready_events[index_to_remove]
def close(self, reply_code=200, reply_text='Normal shutdown'):
"""Disconnect from RabbitMQ. If there are any open channels, it will
attempt to close them prior to fully disconnecting. Channels which
have active consumers will attempt to send a Basic.Cancel to RabbitMQ
to cleanly stop the delivery of messages prior to closing the channel.
:param int reply_code: The code number for the close
:param str reply_text: The text reason for the close
"""
LOGGER.info('Closing connection (%s): %s', reply_code, reply_text)
self._user_initiated_close = True
# Close channels that remain opened
for impl_channel in pika.compat.dictvalues(self._impl._channels):
channel = impl_channel._get_cookie()
if channel.is_open:
channel.close(reply_code, reply_text)
# Close the connection
self._impl.close(reply_code, reply_text)
self._flush_output(self._closed_result.is_ready)
def process_data_events(self, time_limit=0):
"""Will make sure that data events are processed. Dispatches timer and
channel callbacks if not called from the scope of BlockingConnection or
BlockingChannel callback. Your app can block on this method.
:param float time_limit: suggested upper bound on processing time in
seconds. The actual blocking time depends on the granularity of the
underlying ioloop. Zero means return as soon as possible. None means
there is no limit on processing time and the function will block
until I/O produces actionalable events. Defaults to 0 for backward
compatibility. This parameter is NEW in pika 0.10.0.
"""
common_terminator = lambda: bool(
self._channels_pending_dispatch or self._ready_events)
if time_limit is None:
self._flush_output(common_terminator)
else:
with _IoloopTimerContext(time_limit, self._impl) as timer:
self._flush_output(timer.is_ready, common_terminator)
if self._ready_events:
self._dispatch_connection_events()
if self._channels_pending_dispatch:
self._dispatch_channel_events()
def sleep(self, duration):
"""A safer way to sleep than calling time.sleep() directly that would
keep the adapter from ignoring frames sent from the broker. The
connection will "sleep" or block the number of seconds specified in
duration in small intervals.
:param float duration: The time to sleep in seconds
"""
assert duration >= 0, duration
deadline = time.time() + duration
time_limit = duration
# Process events at least once
while True:
self.process_data_events(time_limit)
time_limit = deadline - time.time()
if time_limit <= 0:
break
def channel(self, channel_number=None):
"""Create a new channel with the next available channel number or pass
in a channel number to use. Must be non-zero if you would like to
specify but it is recommended that you let Pika manage the channel
numbers.
:rtype: pika.synchronous_connection.BlockingChannel
"""
with _CallbackResult(self._OnChannelOpenedArgs) as opened_args:
impl_channel = self._impl.channel(
on_open_callback=opened_args.set_value_once,
channel_number=channel_number)
# Create our proxy channel
channel = BlockingChannel(impl_channel, self)
# Link implementation channel with our proxy channel
impl_channel._set_cookie(channel)
# Drive I/O until Channel.Open-ok
channel._flush_output(opened_args.is_ready)
return channel
def __enter__(self):
# Prepare `with` context
return self
def __exit__(self, tp, value, traceback):
# Close connection after `with` context
self.close()
#
# Connections state properties
#
@property
def is_closed(self):
"""
Returns a boolean reporting the current connection state.
"""
return self._impl.is_closed
@property
def is_closing(self):
"""
Returns a boolean reporting the current connection state.
"""
return self._impl.is_closing
@property
def is_open(self):
"""
Returns a boolean reporting the current connection state.
"""
return self._impl.is_open
#
# Properties that reflect server capabilities for the current connection
#
@property
def basic_nack_supported(self):
"""Specifies if the server supports basic.nack on the active connection.
:rtype: bool
"""
return self._impl.basic_nack
@property
def consumer_cancel_notify_supported(self): # pylint: disable=C0103
"""Specifies if the server supports consumer cancel notification on the
active connection.
:rtype: bool
"""
return self._impl.consumer_cancel_notify
@property
def exchange_exchange_bindings_supported(self): # pylint: disable=C0103
"""Specifies if the active connection supports exchange to exchange
bindings.
:rtype: bool
"""
return self._impl.exchange_exchange_bindings
@property
def publisher_confirms_supported(self):
"""Specifies if the active connection can use publisher confirmations.
:rtype: bool
"""
return self._impl.publisher_confirms
# Legacy property names for backward compatibility
basic_nack = basic_nack_supported
consumer_cancel_notify = consumer_cancel_notify_supported
exchange_exchange_bindings = exchange_exchange_bindings_supported
publisher_confirms = publisher_confirms_supported
class _ChannelPendingEvt(object): # pylint: disable=R0903
"""Base class for BlockingChannel pending events"""
pass
class _ConsumerDeliveryEvt(_ChannelPendingEvt): # pylint: disable=R0903
"""This event represents consumer message delivery `Basic.Deliver`; it
contains method, properties, and body of the delivered message.
"""
__slots__ = ('method', 'properties', 'body')
def __init__(self, method, properties, body):
"""
:param spec.Basic.Deliver method: NOTE: consumer_tag and delivery_tag
are valid only within source channel
:param spec.BasicProperties properties: message properties
:param body: message body; empty string if no body
:type body: str or unicode
"""
self.method = method
self.properties = properties
self.body = body
class _ConsumerCancellationEvt(_ChannelPendingEvt): # pylint: disable=R0903
"""This event represents server-initiated consumer cancellation delivered to
client via Basic.Cancel. After receiving Basic.Cancel, there will be no
further deliveries for the consumer identified by `consumer_tag` in
`Basic.Cancel`
"""
__slots__ = ('method_frame')
def __init__(self, method_frame):
"""
:param pika.frame.Method method_frame: method frame with method of type
`spec.Basic.Cancel`
"""
self.method_frame = method_frame
def __repr__(self):
return '%s(method_frame=%r)' % (self.__class__.__name__,
self.method_frame)
@property
def method(self):
"""method of type spec.Basic.Cancel"""
return self.method_frame.method
class _ReturnedMessageEvt(_ChannelPendingEvt): # pylint: disable=R0903
"""This event represents a message returned by broker via `Basic.Return`"""
__slots__ = ('callback', 'channel', 'method', 'properties', 'body')
def __init__(self, callback, channel, method, properties, body): # pylint: disable=R0913
"""
:param callable callback: user's callback, having the signature
callback(channel, method, properties, body), where
channel: pika.Channel
method: pika.spec.Basic.Return
properties: pika.spec.BasicProperties
body: str, unicode, or bytes (python 3.x)
:param pika.Channel channel:
:param pika.spec.Basic.Return method:
:param pika.spec.BasicProperties properties:
:param body: str, unicode, or bytes (python 3.x)
"""
self.callback = callback
self.channel = channel
self.method = method
self.properties = properties
self.body = body
def __repr__(self):
return ('%s(callback=%r, channel=%r, method=%r, properties=%r, '
'body=%.300r') % (self.__class__.__name__, self.callback,
self.channel, self.method, self.properties,
self.body)
def dispatch(self):
"""Dispatch user's callback"""
self.callback(self.channel, self.method, self.properties, self.body)
class ReturnedMessage(object): # pylint: disable=R0903
"""Represents a message returned via Basic.Return in publish-acknowledgments
mode
"""
__slots__ = ('method', 'properties', 'body')
def __init__(self, method, properties, body):
"""
:param spec.Basic.Return method:
:param spec.BasicProperties properties: message properties
:param body: message body; empty string if no body
:type body: str or unicode
"""
self.method = method
self.properties = properties
self.body = body
class _ConsumerInfo(object):
"""Information about an active consumer"""
__slots__ = ('consumer_tag', 'no_ack', 'consumer_cb',
'alternate_event_sink', 'state')
# Consumer states
SETTING_UP = 1
ACTIVE = 2
TEARING_DOWN = 3
CANCELLED_BY_BROKER = 4
def __init__(self, consumer_tag, no_ack, consumer_cb=None,
alternate_event_sink=None):
"""
NOTE: exactly one of consumer_cb/alternate_event_sink musts be non-None.
:param str consumer_tag:
:param bool no_ack: the no-ack value for the consumer
:param callable consumer_cb: The function for dispatching messages to
user, having the signature:
consumer_callback(channel, method, properties, body)
channel: BlockingChannel
method: spec.Basic.Deliver
properties: spec.BasicProperties
body: str or unicode
:param callable alternate_event_sink: if specified, _ConsumerDeliveryEvt
and _ConsumerCancellationEvt objects will be diverted to this
callback instead of being deposited in the channel's
`_pending_events` container. Signature:
alternate_event_sink(evt)
"""
assert (consumer_cb is None) != (alternate_event_sink is None), (
'exactly one of consumer_cb/alternate_event_sink must be non-None',
consumer_cb, alternate_event_sink)
self.consumer_tag = consumer_tag
self.no_ack = no_ack
self.consumer_cb = consumer_cb
self.alternate_event_sink = alternate_event_sink
self.state = self.SETTING_UP
@property
def setting_up(self):
"""True if in SETTING_UP state"""
return self.state == self.SETTING_UP
@property
def active(self):
"""True if in ACTIVE state"""
return self.state == self.ACTIVE
@property
def tearing_down(self):
"""True if in TEARING_DOWN state"""
return self.state == self.TEARING_DOWN
@property
def cancelled_by_broker(self):
"""True if in CANCELLED_BY_BROKER state"""
return self.state == self.CANCELLED_BY_BROKER
class _QueueConsumerGeneratorInfo(object): # pylint: disable=R0903
"""Container for information about the active queue consumer generator """
__slots__ = ('params', 'consumer_tag', 'pending_events')
def __init__(self, params, consumer_tag):
"""
:params tuple params: a three-tuple (queue, no_ack, exclusive) that were
used to create the queue consumer
:param str consumer_tag: consumer tag
"""
self.params = params
self.consumer_tag = consumer_tag
#self.messages = deque()
# Holds pending events of types _ConsumerDeliveryEvt and
# _ConsumerCancellationEvt
self.pending_events = deque()
def __repr__(self):
return '%s(params=%r, consumer_tag=%r)' % (
self.__class__.__name__, self.params, self.consumer_tag)
class BlockingChannel(object): # pylint: disable=R0904,R0902
"""The BlockingChannel implements blocking semantics for most things that
one would use callback-passing-style for with the
:py:class:`~pika.channel.Channel` class. In addition,
the `BlockingChannel` class implements a :term:`generator` that allows
you to :doc:`consume messages </examples/blocking_consumer_generator>`
without using callbacks.
Example of creating a BlockingChannel::
import pika
# Create our connection object
connection = pika.BlockingConnection()
# The returned object will be a synchronous channel
channel = connection.channel()
"""
# Used as value_class with _CallbackResult for receiving Basic.GetOk args
_RxMessageArgs = namedtuple(
'BlockingChannel__RxMessageArgs',
[
'channel', # implementation pika.Channel instance
'method', # Basic.GetOk
'properties', # pika.spec.BasicProperties
'body' # str, unicode, or bytes (python 3.x)
])
# For use as value_class with any _CallbackResult that expects method_frame
# as the only arg
_MethodFrameCallbackResultArgs = namedtuple(
'BlockingChannel__MethodFrameCallbackResultArgs',
'method_frame')
# Broker's basic-ack/basic-nack args when delivery confirmation is enabled;
# may concern a single or multiple messages
_OnMessageConfirmationReportArgs = namedtuple( # pylint: disable=C0103
'BlockingChannel__OnMessageConfirmationReportArgs',
'method_frame')
# Parameters for broker-inititated Channel.Close request: reply_code
# holds the broker's non-zero error code and reply_text holds the
# corresponding error message text.
_OnChannelClosedByBrokerArgs = namedtuple(
'BlockingChannel__OnChannelClosedByBrokerArgs',
'method_frame')
# For use as value_class with _CallbackResult expecting Channel.Flow
# confirmation.
_FlowOkCallbackResultArgs = namedtuple(
'BlockingChannel__FlowOkCallbackResultArgs',
'active' # True if broker will start or continue sending; False if not
)
_CONSUMER_CANCELLED_CB_KEY = 'blocking_channel_consumer_cancelled'
def __init__(self, channel_impl, connection):
"""Create a new instance of the Channel
:param channel_impl: Channel implementation object as returned from
SelectConnection.channel()
:param BlockingConnection connection: The connection object
"""
self._impl = channel_impl
self._connection = connection
# A mapping of consumer tags to _ConsumerInfo for active consumers
self._consumer_infos = dict()
# Queue consumer generator generator info of type
# _QueueConsumerGeneratorInfo created by BlockingChannel.consume
self._queue_consumer_generator = None
# Whether RabbitMQ delivery confirmation has been enabled
self._delivery_confirmation = False
# Receives message delivery confirmation report (Basic.ack or
# Basic.nack) from broker when delivery confirmations are enabled
self._message_confirmation_result = _CallbackResult(
self._OnMessageConfirmationReportArgs)
# deque of pending events: _ConsumerDeliveryEvt and
# _ConsumerCancellationEvt objects that will be returned by
# `BlockingChannel.get_event()`
self._pending_events = deque()
# Holds a ReturnedMessage object representing a message received via
# Basic.Return in publisher-acknowledgments mode.
self._puback_return = None
# Receives Basic.ConsumeOk reply from server
self._basic_consume_ok_result = _CallbackResult()
# Receives the broker-inititated Channel.Close parameters
self._channel_closed_by_broker_result = _CallbackResult( # pylint: disable=C0103
self._OnChannelClosedByBrokerArgs)
# Receives args from Basic.GetEmpty response
# http://www.rabbitmq.com/amqp-0-9-1-reference.html#basic.get
self._basic_getempty_result = _CallbackResult(
self._MethodFrameCallbackResultArgs)
self._impl.add_on_cancel_callback(self._on_consumer_cancelled_by_broker)
self._impl.add_callback(
self._basic_consume_ok_result.signal_once,
replies=[pika.spec.Basic.ConsumeOk],
one_shot=False)
self._impl.add_callback(
self._channel_closed_by_broker_result.set_value_once,
replies=[pika.spec.Channel.Close],
one_shot=True)
self._impl.add_callback(
self._basic_getempty_result.set_value_once,
replies=[pika.spec.Basic.GetEmpty],
one_shot=False)
LOGGER.info("Created channel=%s", self.channel_number)
def _cleanup(self):
"""Clean up members that might inhibit garbage collection"""
self._message_confirmation_result.reset()
self._pending_events = deque()
self._consumer_infos = dict()
def __int__(self):
"""Return the channel object as its channel number
:rtype: int
"""
return self.channel_number
@property
def channel_number(self):
"""Channel number"""
return self._impl.channel_number
@property
def connection(self):
"""The channel's BlockingConnection instance"""
return self._connection
@property
def is_closed(self):
"""Returns True if the channel is closed.
:rtype: bool
"""
return self._impl.is_closed
@property
def is_closing(self):
"""Returns True if the channel is closing.
:rtype: bool
"""
return self._impl.is_closing
@property
def is_open(self):
"""Returns True if the channel is open.
:rtype: bool
"""
return self._impl.is_open
_ALWAYS_READY_WAITERS = ((lambda: True), )
def _flush_output(self, *waiters):
""" Flush output and process input while waiting for any of the given
callbacks to return true. The wait is aborted upon channel-close or
connection-close.
Otherwise, processing continues until the output is flushed AND at least
one of the callbacks returns true. If there are no callbacks, then
processing ends when all output is flushed.
:param waiters: sequence of zero or more callables taking no args and
returning true when it's time to stop processing.
Their results are OR'ed together.
"""
if self._impl.is_closed:
raise exceptions.ChannelClosed()
if not waiters:
waiters = self._ALWAYS_READY_WAITERS
self._connection._flush_output(
self._channel_closed_by_broker_result.is_ready,
*waiters)
if self._channel_closed_by_broker_result:
# Channel was force-closed by broker
self._cleanup()
method = (
self._channel_closed_by_broker_result.value.method_frame.method)
raise exceptions.ChannelClosed(method.reply_code, method.reply_text)
def _on_puback_message_returned(self, channel, method, properties, body):
"""Called as the result of Basic.Return from broker in
publisher-acknowledgements mode. Saves the info as a ReturnedMessage
instance in self._puback_return.
:param pika.Channel channel: our self._impl channel
:param pika.spec.Basic.Return method:
:param pika.spec.BasicProperties properties: message properties
:param body: returned message body; empty string if no body
:type body: str, unicode
"""
assert channel is self._impl, (
channel.channel_number, self.channel_number)
assert isinstance(method, pika.spec.Basic.Return), method
assert isinstance(properties, pika.spec.BasicProperties), (
properties)
LOGGER.warn(
"Published message was returned: _delivery_confirmation=%s; "
"channel=%s; method=%r; properties=%r; body_size=%d; "
"body_prefix=%.255r", self._delivery_confirmation,
channel.channel_number, method, properties,
len(body) if body is not None else None, body)
self._puback_return = ReturnedMessage(method, properties, body)
def _add_pending_event(self, evt):
"""Append an event to the channel's list of events that are ready for
dispatch to user and signal our connection that this channel is ready
for event dispatch
:param _ChannelPendingEvt evt: an event derived from _ChannelPendingEvt
"""
self._pending_events.append(evt)
self.connection._request_channel_dispatch(self.channel_number)
def _on_consumer_cancelled_by_broker(self, # pylint: disable=C0103
method_frame):
"""Called by impl when broker cancels consumer via Basic.Cancel.
This is a RabbitMQ-specific feature. The circumstances include deletion
of queue being consumed as well as failure of a HA node responsible for
the queue being consumed.
:param pika.frame.Method method_frame: method frame with the
`spec.Basic.Cancel` method
"""
evt = _ConsumerCancellationEvt(method_frame)
consumer = self._consumer_infos[method_frame.method.consumer_tag]
# Don't interfere with client-initiated cancellation flow
if not consumer.tearing_down:
consumer.state = _ConsumerInfo.CANCELLED_BY_BROKER
if consumer.alternate_event_sink is not None:
consumer.alternate_event_sink(evt)
else:
self._add_pending_event(evt)
def _on_consumer_message_delivery(self, channel, # pylint: disable=W0613
method, properties, body):
"""Called by impl when a message is delivered for a consumer
:param Channel channel: The implementation channel object
:param spec.Basic.Deliver method:
:param pika.spec.BasicProperties properties: message properties
:param body: delivered message body; empty string if no body
:type body: str, unicode, or bytes (python 3.x)
"""
evt = _ConsumerDeliveryEvt(method, properties, body)
consumer = self._consumer_infos[method.consumer_tag]
if consumer.alternate_event_sink is not None:
consumer.alternate_event_sink(evt)
else:
self._add_pending_event(evt)
def _on_consumer_generator_event(self, evt):
"""Sink for the queue consumer generator's consumer events; append the
event to queue consumer generator's pending events buffer.
:param evt: an object of type _ConsumerDeliveryEvt or
_ConsumerCancellationEvt
"""
self._queue_consumer_generator.pending_events.append(evt)
# Schedule termination of connection.process_data_events using a
# negative channel number
self.connection._request_channel_dispatch(-self.channel_number)
def _cancel_all_consumers(self):
"""Cancel all consumers.
NOTE: pending non-ackable messages will be lost; pending ackable
messages will be rejected.
"""
if self._consumer_infos:
LOGGER.debug('Cancelling %i consumers', len(self._consumer_infos))
if self._queue_consumer_generator is not None:
# Cancel queue consumer generator
self.cancel()
# Cancel consumers created via basic_consume
for consumer_tag in pika.compat.dictkeys(self._consumer_infos):
self.basic_cancel(consumer_tag)
def _dispatch_events(self):
"""Called by BlockingConnection to dispatch pending events.
`BlockingChannel` schedules this callback via
`BlockingConnection._request_channel_dispatch`
"""
while self._pending_events:
evt = self._pending_events.popleft()
if type(evt) is _ConsumerDeliveryEvt:
consumer_info = self._consumer_infos[evt.method.consumer_tag]
consumer_info.consumer_cb(self, evt.method, evt.properties,
evt.body)
elif type(evt) is _ConsumerCancellationEvt:
del self._consumer_infos[evt.method_frame.method.consumer_tag]
self._impl.callbacks.process(self.channel_number,
self._CONSUMER_CANCELLED_CB_KEY,
self,
evt.method_frame)
else:
evt.dispatch()
def close(self, reply_code=0, reply_text="Normal Shutdown"):
"""Will invoke a clean shutdown of the channel with the AMQP Broker.
:param int reply_code: The reply code to close the channel with
:param str reply_text: The reply text to close the channel with
"""
LOGGER.info('Channel.close(%s, %s)', reply_code, reply_text)
# Cancel remaining consumers
self._cancel_all_consumers()
# Close the channel
try:
with _CallbackResult() as close_ok_result:
self._impl.add_callback(callback=close_ok_result.signal_once,
replies=[pika.spec.Channel.CloseOk],
one_shot=True)
self._impl.close(reply_code=reply_code, reply_text=reply_text)
self._flush_output(close_ok_result.is_ready)
finally:
self._cleanup()
def flow(self, active):
"""Turn Channel flow control off and on.
NOTE: RabbitMQ doesn't support active=False; per
https://www.rabbitmq.com/specification.html: "active=false is not
supported by the server. Limiting prefetch with basic.qos provides much
better control"
For more information, please reference:
http://www.rabbitmq.com/amqp-0-9-1-reference.html#channel.flow
:param bool active: Turn flow on (True) or off (False)
:returns: True if broker will start or continue sending; False if not
:rtype: bool
"""
with _CallbackResult(self._FlowOkCallbackResultArgs) as flow_ok_result:
self._impl.flow(callback=flow_ok_result.set_value_once,
active=active)
self._flush_output(flow_ok_result.is_ready)
return flow_ok_result.value.active
def add_on_cancel_callback(self, callback):
"""Pass a callback function that will be called when Basic.Cancel
is sent by the broker. The callback function should receive a method
frame parameter.
:param callable callback: a callable for handling broker's Basic.Cancel
notification with the call signature: callback(method_frame)
where method_frame is of type `pika.frame.Method` with method of
type `spec.Basic.Cancel`
"""
self._impl.callbacks.add(self.channel_number,
self._CONSUMER_CANCELLED_CB_KEY,
callback,
one_shot=False)
def add_on_return_callback(self, callback):
"""Pass a callback function that will be called when a published
message is rejected and returned by the server via `Basic.Return`.
:param callable callback: The method to call on callback with the
signature callback(channel, method, properties, body), where
channel: pika.Channel
method: pika.spec.Basic.Return
properties: pika.spec.BasicProperties
body: str, unicode, or bytes (python 3.x)
"""
self._impl.add_on_return_callback(
lambda _channel, method, properties, body: (
self._add_pending_event(
_ReturnedMessageEvt(
callback, self, method, properties, body))))
def basic_consume(self, # pylint: disable=R0913
consumer_callback,
queue,
no_ack=False,
exclusive=False,
consumer_tag=None,
arguments=None):
"""Sends the AMQP command Basic.Consume to the broker and binds messages
for the consumer_tag to the consumer callback. If you do not pass in
a consumer_tag, one will be automatically generated for you. Returns
the consumer tag.
NOTE: the consumer callbacks are dispatched only in the scope of
specially-designated methods: see
`BlockingConnection.process_data_events` and
`BlockingChannel.start_consuming`.
For more information about Basic.Consume, see:
http://www.rabbitmq.com/amqp-0-9-1-reference.html#basic.consume
:param callable consumer_callback: The function for dispatching messages
to user, having the signature:
consumer_callback(channel, method, properties, body)
channel: BlockingChannel
method: spec.Basic.Deliver
properties: spec.BasicProperties
body: str or unicode
:param queue: The queue to consume from
:type queue: str or unicode
:param bool no_ack: Tell the broker to not expect a response (i.e.,
no ack/nack)
:param bool exclusive: Don't allow other consumers on the queue
:param consumer_tag: You may specify your own consumer tag; if left
empty, a consumer tag will be generated automatically
:type consumer_tag: str or unicode
:param dict arguments: Custom key/value pair arguments for the consumer
:returns: consumer tag
:rtype: str
:raises pika.exceptions.DuplicateConsumerTag: if consumer with given
consumer_tag is already present.
"""
if not callable(consumer_callback):
raise ValueError('consumer callback must be callable; got %r'
% consumer_callback)
return self._basic_consume_impl(
queue=queue,
no_ack=no_ack,
exclusive=exclusive,
consumer_tag=consumer_tag,
arguments=arguments,
consumer_callback=consumer_callback)
def _basic_consume_impl(self, # pylint: disable=R0913
queue,
no_ack,
exclusive,
consumer_tag,
arguments=None,
consumer_callback=None,
alternate_event_sink=None):
"""The low-level implementation used by `basic_consume` and `consume`.
See `basic_consume` docstring for more info.
NOTE: exactly one of consumer_callback/alternate_event_sink musts be
non-None.
This method has one additional parameter alternate_event_sink over the
args described in `basic_consume`.
:param callable alternate_event_sink: if specified, _ConsumerDeliveryEvt
and _ConsumerCancellationEvt objects will be diverted to this
callback instead of being deposited in the channel's
`_pending_events` container. Signature:
alternate_event_sink(evt)
:raises pika.exceptions.DuplicateConsumerTag: if consumer with given
consumer_tag is already present.
"""
if (consumer_callback is None) == (alternate_event_sink is None):
raise ValueError(
('exactly one of consumer_callback/alternate_event_sink must '
'be non-None', consumer_callback, alternate_event_sink))
if not consumer_tag:
# Need a consumer tag to register consumer info before sending
# request to broker, because I/O might dispatch incoming messages
# immediately following Basic.Consume-ok before _flush_output
# returns
consumer_tag = self._impl._generate_consumer_tag()
if consumer_tag in self._consumer_infos:
raise exceptions.DuplicateConsumerTag(consumer_tag)
# Create new consumer
self._consumer_infos[consumer_tag] = _ConsumerInfo(
consumer_tag,
no_ack=no_ack,
consumer_cb=consumer_callback,
alternate_event_sink=alternate_event_sink)
try:
with self._basic_consume_ok_result as ok_result:
tag = self._impl.basic_consume(
consumer_callback=self._on_consumer_message_delivery,
queue=queue,
no_ack=no_ack,
exclusive=exclusive,
consumer_tag=consumer_tag,
arguments=arguments)
assert tag == consumer_tag, (tag, consumer_tag)
self._flush_output(ok_result.is_ready)
except Exception:
# If channel was closed, self._consumer_infos will be empty
if consumer_tag in self._consumer_infos:
del self._consumer_infos[consumer_tag]
raise
# NOTE: Consumer could get cancelled by broker immediately after opening
# (e.g., queue getting deleted externally)
if self._consumer_infos[consumer_tag].setting_up:
self._consumer_infos[consumer_tag].state = _ConsumerInfo.ACTIVE
return consumer_tag
def basic_cancel(self, consumer_tag):
"""This method cancels a consumer. This does not affect already
delivered messages, but it does mean the server will not send any more
messages for that consumer. The client may receive an arbitrary number
of messages in between sending the cancel method and receiving the
cancel-ok reply.
NOTE: When cancelling a no_ack=False consumer, this implementation
automatically Nacks and suppresses any incoming messages that have not
yet been dispatched to the consumer's callback. However, when cancelling
a no_ack=True consumer, this method will return any pending messages
that arrived before broker confirmed the cancellation.
:param str consumer_tag: Identifier for the consumer; the result of
passing a consumer_tag that was created on another channel is
undefined (bad things will happen)
:returns: (NEW IN pika 0.10.0) empty sequence for a no_ack=False
consumer; for a no_ack=True consumer, returns a (possibly empty)
sequence of pending messages that arrived before broker confirmed
the cancellation (this is done instead of via consumer's callback in
order to prevent reentrancy/recursion. Each message is four-tuple:
(channel, method, properties, body)
channel: BlockingChannel
method: spec.Basic.Deliver
properties: spec.BasicProperties
body: str or unicode
"""
try:
consumer_info = self._consumer_infos[consumer_tag]
except KeyError:
LOGGER.warn("User is attempting to cancel an unknown consumer=%s; "
"already cancelled by user or broker?", consumer_tag)
return []
try:
# Assertion failure here is most likely due to reentrance
assert consumer_info.active or consumer_info.cancelled_by_broker, (
consumer_info.state)
# Assertion failure here signals disconnect between consumer state
# in BlockingConnection and Connection
assert (consumer_info.cancelled_by_broker or
consumer_tag in self._impl._consumers), consumer_tag
no_ack = consumer_info.no_ack
consumer_info.state = _ConsumerInfo.TEARING_DOWN
with _CallbackResult() as cancel_ok_result:
# Nack pending messages for no_ack=False consumer
if not no_ack:
pending_messages = self._remove_pending_deliveries(
consumer_tag)
if pending_messages:
# NOTE: we use impl's basic_reject to avoid the
# possibility of redelivery before basic_cancel takes
# control of nacking.
# NOTE: we can't use basic_nack with the multiple option
# to avoid nacking messages already held by our client.
for message in pending_messages:
self._impl.basic_reject(message.method.delivery_tag,
requeue=True)
# Cancel the consumer; impl takes care of rejecting any
# additional deliveries that arrive for a no_ack=False
# consumer
self._impl.basic_cancel(
callback=cancel_ok_result.signal_once,
consumer_tag=consumer_tag,
nowait=False)
# Flush output and wait for Basic.Cancel-ok or
# broker-initiated Basic.Cancel
self._flush_output(
cancel_ok_result.is_ready,
lambda: consumer_tag not in self._impl._consumers)
if no_ack:
# Return pending messages for no_ack=True consumer
return [
(evt.method, evt.properties, evt.body)
for evt in self._remove_pending_deliveries(consumer_tag)]
else:
# impl takes care of rejecting any incoming deliveries during
# cancellation
messages = self._remove_pending_deliveries(consumer_tag)
assert not messages, messages
return []
finally:
# NOTE: The entry could be purged if channel or connection closes
if consumer_tag in self._consumer_infos:
del self._consumer_infos[consumer_tag]
def _remove_pending_deliveries(self, consumer_tag):
"""Extract _ConsumerDeliveryEvt objects destined for the given consumer
from pending events, discarding the _ConsumerCancellationEvt, if any
:param str consumer_tag:
:returns: a (possibly empty) sequence of _ConsumerDeliveryEvt destined
for the given consumer tag
"""
remaining_events = deque()
unprocessed_messages = []
while self._pending_events:
evt = self._pending_events.popleft()
if type(evt) is _ConsumerDeliveryEvt:
if evt.method.consumer_tag == consumer_tag:
unprocessed_messages.append(evt)
continue
if type(evt) is _ConsumerCancellationEvt:
if evt.method_frame.method.consumer_tag == consumer_tag:
# A broker-initiated Basic.Cancel must have arrived
# before our cancel request completed
continue
remaining_events.append(evt)
self._pending_events = remaining_events
return unprocessed_messages
def start_consuming(self):
"""Processes I/O events and dispatches timers and `basic_consume`
callbacks until all consumers are cancelled.
NOTE: this blocking function may not be called from the scope of a
pika callback, because dispatching `basic_consume` callbacks from this
context would constitute recursion.
:raises pika.exceptions.RecursionError: if called from the scope of a
`BlockingConnection` or `BlockingChannel` callback
"""
# Check if called from the scope of an event dispatch callback
with self.connection._acquire_event_dispatch() as dispatch_allowed:
if not dispatch_allowed:
raise exceptions.RecursionError(
'start_consuming may not be called from the scope of '
'another BlockingConnection or BlockingChannel callback')
# Process events as long as consumers exist on this channel
while self._consumer_infos:
self.connection.process_data_events(time_limit=None)
def stop_consuming(self, consumer_tag=None):
""" Cancels all consumers, signalling the `start_consuming` loop to
exit.
NOTE: pending non-ackable messages will be lost; pending ackable
messages will be rejected.
"""
if consumer_tag:
self.basic_cancel(consumer_tag)
else:
self._cancel_all_consumers()
def consume(self, queue, no_ack=False, # pylint: disable=R0913
exclusive=False, arguments=None,
inactivity_timeout=None):
"""Blocking consumption of a queue instead of via a callback. This
method is a generator that yields each message as a tuple of method,
properties, and body. The active generator iterator terminates when the
consumer is cancelled by client or broker.
Example:
for method, properties, body in channel.consume('queue'):
print body
channel.basic_ack(method.delivery_tag)
You should call `BlockingChannel.cancel()` when you escape out of the
generator loop.
If you don't cancel this consumer, then next call on the same channel
to `consume()` with the exact same (queue, no_ack, exclusive) parameters
will resume the existing consumer generator; however, calling with
different parameters will result in an exception.
:param queue: The queue name to consume
:type queue: str or unicode
:param bool no_ack: Tell the broker to not expect a ack/nack response
:param bool exclusive: Don't allow other consumers on the queue
:param dict arguments: Custom key/value pair arguments for the consumer
:param float inactivity_timeout: if a number is given (in
seconds), will cause the method to yield None after the given period
of inactivity; this permits for pseudo-regular maintenance
activities to be carried out by the user while waiting for messages
to arrive. If None is given (default), then the method blocks until
the next event arrives. NOTE that timing granularity is limited by
the timer resolution of the underlying implementation.
NEW in pika 0.10.0.
:yields: tuple(spec.Basic.Deliver, spec.BasicProperties, str or unicode)
:raises ValueError: if consumer-creation parameters don't match those
of the existing queue consumer generator, if any.
NEW in pika 0.10.0
"""
params = (queue, no_ack, exclusive)
if self._queue_consumer_generator is not None:
if params != self._queue_consumer_generator.params:
raise ValueError(
'Consume with different params not allowed on existing '
'queue consumer generator; previous params: %r; '
'new params: %r'
% (self._queue_consumer_generator.params,
(queue, no_ack, exclusive)))
else:
LOGGER.debug('Creating new queue consumer generator; params: %r',
params)
# Need a consumer tag to register consumer info before sending
# request to broker, because I/O might pick up incoming messages
# in addition to Basic.Consume-ok
consumer_tag = self._impl._generate_consumer_tag()
self._queue_consumer_generator = _QueueConsumerGeneratorInfo(
params,
consumer_tag)
try:
self._basic_consume_impl(
queue=queue,
no_ack=no_ack,
exclusive=exclusive,
consumer_tag=consumer_tag,
arguments=arguments,
alternate_event_sink=self._on_consumer_generator_event)
except Exception:
self._queue_consumer_generator = None
raise
LOGGER.info('Created new queue consumer generator %r',
self._queue_consumer_generator)
while self._queue_consumer_generator is not None:
if self._queue_consumer_generator.pending_events:
evt = self._queue_consumer_generator.pending_events.popleft()
if type(evt) is _ConsumerCancellationEvt:
# Consumer was cancelled by broker
self._queue_consumer_generator = None
break
else:
yield (evt.method, evt.properties, evt.body)
continue
# Wait for a message to arrive
if inactivity_timeout is None:
self.connection.process_data_events(time_limit=None)
continue
# Wait with inactivity timeout
wait_start_time = time.time()
wait_deadline = wait_start_time + inactivity_timeout
delta = inactivity_timeout
while (self._queue_consumer_generator is not None and
not self._queue_consumer_generator.pending_events):
self.connection.process_data_events(time_limit=delta)
if not self._queue_consumer_generator:
# Consumer was cancelled by client
break
if self._queue_consumer_generator.pending_events:
# Got message(s)
break
delta = wait_deadline - time.time()
if delta <= 0.0:
# Signal inactivity timeout
yield None
break
def get_waiting_message_count(self):
"""Returns the number of messages that may be retrieved from the current
queue consumer generator via `BasicChannel.consume` without blocking.
NEW in pika 0.10.0
:rtype: int
"""
if self._queue_consumer_generator is not None:
pending_events = self._queue_consumer_generator.pending_events
count = len(pending_events)
if count and type(pending_events[-1]) is _ConsumerCancellationEvt:
count -= 1
else:
count = 0
return count
def cancel(self):
"""Cancel the queue consumer created by `BlockingChannel.consume`,
rejecting all pending ackable messages.
NOTE: If you're looking to cancel a consumer issued with
BlockingChannel.basic_consume then you should call
BlockingChannel.basic_cancel.
:return int: The number of messages requeued by Basic.Nack.
NEW in 0.10.0: returns 0
"""
if self._queue_consumer_generator is None:
LOGGER.warning('cancel: queue consumer generator is inactive '
'(already cancelled by client or broker?)')
return 0
try:
_, no_ack, _ = self._queue_consumer_generator.params
if not no_ack:
# Reject messages held by queue consumer generator; NOTE: we
# can't use basic_nack with the multiple option to avoid nacking
# messages already held by our client.
pending_events = self._queue_consumer_generator.pending_events
for _ in compat.xrange(self.get_waiting_message_count()):
evt = pending_events.popleft()
self._impl.basic_reject(evt.method.delivery_tag,
requeue=True)
self.basic_cancel(self._queue_consumer_generator.consumer_tag)
finally:
self._queue_consumer_generator = None
# Return 0 for compatibility with legacy implementation; the number of
# nacked messages is not meaningful since only messages consumed with
# no_ack=False may be nacked, and those arriving after calling
# basic_cancel will be rejected automatically by impl channel, so we'll
# never know how many of those were nacked.
return 0
def basic_ack(self, delivery_tag=0, multiple=False):
"""Acknowledge one or more messages. When sent by the client, this
method acknowledges one or more messages delivered via the Deliver or
Get-Ok methods. When sent by server, this method acknowledges one or
more messages published with the Publish method on a channel in
confirm mode. The acknowledgement can be for a single message or a
set of messages up to and including a specific message.
:param int delivery-tag: The server-assigned delivery tag
:param bool multiple: If set to True, the delivery tag is treated as
"up to and including", so that multiple messages
can be acknowledged with a single method. If set
to False, the delivery tag refers to a single
message. If the multiple field is 1, and the
delivery tag is zero, this indicates
acknowledgement of all outstanding messages.
"""
self._impl.basic_ack(delivery_tag=delivery_tag, multiple=multiple)
self._flush_output()
def basic_nack(self, delivery_tag=None, multiple=False, requeue=True):
"""This method allows a client to reject one or more incoming messages.
It can be used to interrupt and cancel large incoming messages, or
return untreatable messages to their original queue.
:param int delivery-tag: The server-assigned delivery tag
:param bool multiple: If set to True, the delivery tag is treated as
"up to and including", so that multiple messages
can be acknowledged with a single method. If set
to False, the delivery tag refers to a single
message. If the multiple field is 1, and the
delivery tag is zero, this indicates
acknowledgement of all outstanding messages.
:param bool requeue: If requeue is true, the server will attempt to
requeue the message. If requeue is false or the
requeue attempt fails the messages are discarded or
dead-lettered.
"""
self._impl.basic_nack(delivery_tag=delivery_tag, multiple=multiple,
requeue=requeue)
self._flush_output()
def basic_get(self, queue=None, no_ack=False):
"""Get a single message from the AMQP broker. Returns a sequence with
the method frame, message properties, and body.
:param queue: Name of queue to get a message from
:type queue: str or unicode
:param bool no_ack: Tell the broker to not expect a reply
:returns: a three-tuple; (None, None, None) if the queue was empty;
otherwise (method, properties, body); NOTE: body may be None
:rtype: (None, None, None)|(spec.Basic.GetOk,
spec.BasicProperties,
str or unicode or None)
"""
assert not self._basic_getempty_result
# NOTE: nested with for python 2.6 compatibility
with _CallbackResult(self._RxMessageArgs) as get_ok_result:
with self._basic_getempty_result:
self._impl.basic_get(callback=get_ok_result.set_value_once,
queue=queue,
no_ack=no_ack)
self._flush_output(get_ok_result.is_ready,
self._basic_getempty_result.is_ready)
if get_ok_result:
evt = get_ok_result.value
return (evt.method, evt.properties, evt.body)
else:
assert self._basic_getempty_result, (
"wait completed without GetOk and GetEmpty")
return None, None, None
def basic_publish(self, exchange, routing_key, body, # pylint: disable=R0913
properties=None, mandatory=False, immediate=False):
"""Publish to the channel with the given exchange, routing key and body.
Returns a boolean value indicating the success of the operation.
This is the legacy BlockingChannel method for publishing. See also
`BasicChannel.publish` that provides more information about failures.
For more information on basic_publish and what the parameters do, see:
http://www.rabbitmq.com/amqp-0-9-1-reference.html#basic.publish
NOTE: mandatory and immediate may be enabled even without delivery
confirmation, but in the absence of delivery confirmation the
synchronous implementation has no way to know how long to wait for
the Basic.Return or lack thereof.
:param exchange: The exchange to publish to
:type exchange: str or unicode
:param routing_key: The routing key to bind on
:type routing_key: str or unicode
:param body: The message body; empty string if no body
:type body: str or unicode
:param pika.spec.BasicProperties properties: message properties
:param bool mandatory: The mandatory flag
:param bool immediate: The immediate flag
:returns: True if delivery confirmation is not enabled (NEW in pika
0.10.0); otherwise returns False if the message could not be
deliveved (Basic.nack and/or Basic.Return) and True if the message
was delivered (Basic.ack and no Basic.Return)
"""
try:
self.publish(exchange, routing_key, body, properties,
mandatory, immediate)
except (exceptions.NackError, exceptions.UnroutableError):
return False
else:
return True
def publish(self, exchange, routing_key, body, # pylint: disable=R0913
properties=None, mandatory=False, immediate=False):
"""Publish to the channel with the given exchange, routing key, and
body. Unlike the legacy `BlockingChannel.basic_publish`, this method
provides more information about failures via exceptions.
For more information on basic_publish and what the parameters do, see:
http://www.rabbitmq.com/amqp-0-9-1-reference.html#basic.publish
NOTE: mandatory and immediate may be enabled even without delivery
confirmation, but in the absence of delivery confirmation the
synchronous implementation has no way to know how long to wait for
the Basic.Return.
:param exchange: The exchange to publish to
:type exchange: str or unicode
:param routing_key: The routing key to bind on
:type routing_key: str or unicode
:param body: The message body; empty string if no body
:type body: str or unicode
:param pika.spec.BasicProperties properties: message properties
:param bool mandatory: The mandatory flag
:param bool immediate: The immediate flag
:raises UnroutableError: raised when a message published in
publisher-acknowledgments mode (see
`BlockingChannel.confirm_delivery`) is returned via `Basic.Return`
followed by `Basic.Ack`.
:raises NackError: raised when a message published in
publisher-acknowledgements mode is Nack'ed by the broker. See
`BlockingChannel.confirm_delivery`.
"""
if self._delivery_confirmation:
# In publisher-acknowledgments mode
with self._message_confirmation_result:
self._impl.basic_publish(exchange=exchange,
routing_key=routing_key,
body=body,
properties=properties,
mandatory=mandatory,
immediate=immediate)
self._flush_output(self._message_confirmation_result.is_ready)
conf_method = (self._message_confirmation_result.value
.method_frame
.method)
if isinstance(conf_method, pika.spec.Basic.Nack):
# Broker was unable to process the message due to internal
# error
LOGGER.warn(
"Message was Nack'ed by broker: nack=%r; channel=%s; "
"exchange=%s; routing_key=%s; mandatory=%r; "
"immediate=%r", conf_method, self.channel_number,
exchange, routing_key, mandatory, immediate)
if self._puback_return is not None:
returned_messages = [self._puback_return]
self._puback_return = None
else:
returned_messages = []
raise exceptions.NackError(returned_messages)
else:
assert isinstance(conf_method, pika.spec.Basic.Ack), (
conf_method)
if self._puback_return is not None:
# Unroutable message was returned
messages = [self._puback_return]
self._puback_return = None
raise exceptions.UnroutableError(messages)
else:
# In non-publisher-acknowledgments mode
self._impl.basic_publish(exchange=exchange,
routing_key=routing_key,
body=body,
properties=properties,
mandatory=mandatory,
immediate=immediate)
self._flush_output()
def basic_qos(self, prefetch_size=0, prefetch_count=0, all_channels=False):
"""Specify quality of service. This method requests a specific quality
of service. The QoS can be specified for the current channel or for all
channels on the connection. The client can request that messages be sent
in advance so that when the client finishes processing a message, the
following message is already held locally, rather than needing to be
sent down the channel. Prefetching gives a performance improvement.
:param int prefetch_size: This field specifies the prefetch window
size. The server will send a message in
advance if it is equal to or smaller in size
than the available prefetch size (and also
falls into other prefetch limits). May be set
to zero, meaning "no specific limit",
although other prefetch limits may still
apply. The prefetch-size is ignored if the
no-ack option is set in the consumer.
:param int prefetch_count: Specifies a prefetch window in terms of whole
messages. This field may be used in
combination with the prefetch-size field; a
message will only be sent in advance if both
prefetch windows (and those at the channel
and connection level) allow it. The
prefetch-count is ignored if the no-ack
option is set in the consumer.
:param bool all_channels: Should the QoS apply to all channels
"""
with _CallbackResult() as qos_ok_result:
self._impl.basic_qos(callback=qos_ok_result.signal_once,
prefetch_size=prefetch_size,
prefetch_count=prefetch_count,
all_channels=all_channels)
self._flush_output(qos_ok_result.is_ready)
def basic_recover(self, requeue=False):
"""This method asks the server to redeliver all unacknowledged messages
on a specified channel. Zero or more messages may be redelivered. This
method replaces the asynchronous Recover.
:param bool requeue: If False, the message will be redelivered to the
original recipient. If True, the server will
attempt to requeue the message, potentially then
delivering it to an alternative subscriber.
"""
with _CallbackResult() as recover_ok_result:
self._impl.basic_recover(callback=recover_ok_result.signal_once,
requeue=requeue)
self._flush_output(recover_ok_result.is_ready)
def basic_reject(self, delivery_tag=None, requeue=True):
"""Reject an incoming message. This method allows a client to reject a
message. It can be used to interrupt and cancel large incoming messages,
or return untreatable messages to their original queue.
:param int delivery-tag: The server-assigned delivery tag
:param bool requeue: If requeue is true, the server will attempt to
requeue the message. If requeue is false or the
requeue attempt fails the messages are discarded or
dead-lettered.
"""
self._impl.basic_reject(delivery_tag=delivery_tag, requeue=requeue)
self._flush_output()
def confirm_delivery(self):
"""Turn on RabbitMQ-proprietary Confirm mode in the channel.
For more information see:
http://www.rabbitmq.com/extensions.html#confirms
"""
if self._delivery_confirmation:
LOGGER.error('confirm_delivery: confirmation was already enabled '
'on channel=%s', self.channel_number)
return
with _CallbackResult() as select_ok_result:
self._impl.add_callback(callback=select_ok_result.signal_once,
replies=[pika.spec.Confirm.SelectOk],
one_shot=True)
self._impl.confirm_delivery(
callback=self._message_confirmation_result.set_value_once,
nowait=False)
self._flush_output(select_ok_result.is_ready)
self._delivery_confirmation = True
# Unroutable messages returned after this point will be in the context
# of publisher acknowledgments
self._impl.add_on_return_callback(self._on_puback_message_returned)
def exchange_declare(self, exchange=None, # pylint: disable=R0913
exchange_type='direct', passive=False, durable=False,
auto_delete=False, internal=False,
arguments=None, **kwargs):
"""This method creates an exchange if it does not already exist, and if
the exchange exists, verifies that it is of the correct and expected
class.
If passive set, the server will reply with Declare-Ok if the exchange
already exists with the same name, and raise an error if not and if the
exchange does not already exist, the server MUST raise a channel
exception with reply code 404 (not found).
:param exchange: The exchange name consists of a non-empty sequence of
these characters: letters, digits, hyphen, underscore,
period, or colon.
:type exchange: str or unicode
:param str exchange_type: The exchange type to use
:param bool passive: Perform a declare or just check to see if it exists
:param bool durable: Survive a reboot of RabbitMQ
:param bool auto_delete: Remove when no more queues are bound to it
:param bool internal: Can only be published to by other exchanges
:param dict arguments: Custom key/value pair arguments for the exchange
:param str type: via kwargs: the deprecated exchange type parameter
:returns: Method frame from the Exchange.Declare-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Exchange.DeclareOk`
"""
assert len(kwargs) <= 1, kwargs
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as declare_ok_result:
self._impl.exchange_declare(
callback=declare_ok_result.set_value_once,
exchange=exchange,
exchange_type=exchange_type,
passive=passive,
durable=durable,
auto_delete=auto_delete,
internal=internal,
nowait=False,
arguments=arguments,
type=kwargs["type"] if kwargs else None)
self._flush_output(declare_ok_result.is_ready)
return declare_ok_result.value.method_frame
def exchange_delete(self, exchange=None, if_unused=False):
"""Delete the exchange.
:param exchange: The exchange name
:type exchange: str or unicode
:param bool if_unused: only delete if the exchange is unused
:returns: Method frame from the Exchange.Delete-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Exchange.DeleteOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as delete_ok_result:
self._impl.exchange_delete(
callback=delete_ok_result.set_value_once,
exchange=exchange,
if_unused=if_unused,
nowait=False)
self._flush_output(delete_ok_result.is_ready)
return delete_ok_result.value.method_frame
def exchange_bind(self, destination=None, source=None, routing_key='',
arguments=None):
"""Bind an exchange to another exchange.
:param destination: The destination exchange to bind
:type destination: str or unicode
:param source: The source exchange to bind to
:type source: str or unicode
:param routing_key: The routing key to bind on
:type routing_key: str or unicode
:param dict arguments: Custom key/value pair arguments for the binding
:returns: Method frame from the Exchange.Bind-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Exchange.BindOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as bind_ok_result:
self._impl.exchange_bind(
callback=bind_ok_result.set_value_once,
destination=destination,
source=source,
routing_key=routing_key,
nowait=False,
arguments=arguments)
self._flush_output(bind_ok_result.is_ready)
return bind_ok_result.value.method_frame
def exchange_unbind(self, destination=None, source=None, routing_key='',
arguments=None):
"""Unbind an exchange from another exchange.
:param destination: The destination exchange to unbind
:type destination: str or unicode
:param source: The source exchange to unbind from
:type source: str or unicode
:param routing_key: The routing key to unbind
:type routing_key: str or unicode
:param dict arguments: Custom key/value pair arguments for the binding
:returns: Method frame from the Exchange.Unbind-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Exchange.UnbindOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as unbind_ok_result:
self._impl.exchange_unbind(
callback=unbind_ok_result.set_value_once,
destination=destination,
source=source,
routing_key=routing_key,
nowait=False,
arguments=arguments)
self._flush_output(unbind_ok_result.is_ready)
return unbind_ok_result.value.method_frame
def queue_declare(self, queue='', passive=False, durable=False, # pylint: disable=R0913
exclusive=False, auto_delete=False,
arguments=None):
"""Declare queue, create if needed. This method creates or checks a
queue. When creating a new queue the client can specify various
properties that control the durability of the queue and its contents,
and the level of sharing for the queue.
Leave the queue name empty for a auto-named queue in RabbitMQ
:param queue: The queue name
:type queue: str or unicode; if empty string, the broker will create a
unique queue name;
:param bool passive: Only check to see if the queue exists
:param bool durable: Survive reboots of the broker
:param bool exclusive: Only allow access by the current connection
:param bool auto_delete: Delete after consumer cancels or disconnects
:param dict arguments: Custom key/value arguments for the queue
:returns: Method frame from the Queue.Declare-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Queue.DeclareOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as declare_ok_result:
self._impl.queue_declare(
callback=declare_ok_result.set_value_once,
queue=queue,
passive=passive,
durable=durable,
exclusive=exclusive,
auto_delete=auto_delete,
nowait=False,
arguments=arguments)
self._flush_output(declare_ok_result.is_ready)
return declare_ok_result.value.method_frame
def queue_delete(self, queue='', if_unused=False, if_empty=False):
"""Delete a queue from the broker.
:param queue: The queue to delete
:type queue: str or unicode
:param bool if_unused: only delete if it's unused
:param bool if_empty: only delete if the queue is empty
:returns: Method frame from the Queue.Delete-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Queue.DeleteOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as delete_ok_result:
self._impl.queue_delete(callback=delete_ok_result.set_value_once,
queue=queue,
if_unused=if_unused,
if_empty=if_empty,
nowait=False)
self._flush_output(delete_ok_result.is_ready)
return delete_ok_result.value.method_frame
def queue_purge(self, queue=''):
"""Purge all of the messages from the specified queue
:param queue: The queue to purge
:type queue: str or unicode
:returns: Method frame from the Queue.Purge-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Queue.PurgeOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as purge_ok_result:
self._impl.queue_purge(callback=purge_ok_result.set_value_once,
queue=queue,
nowait=False)
self._flush_output(purge_ok_result.is_ready)
return purge_ok_result.value.method_frame
def queue_bind(self, queue, exchange, routing_key=None,
arguments=None):
"""Bind the queue to the specified exchange
:param queue: The queue to bind to the exchange
:type queue: str or unicode
:param exchange: The source exchange to bind to
:type exchange: str or unicode
:param routing_key: The routing key to bind on
:type routing_key: str or unicode
:param dict arguments: Custom key/value pair arguments for the binding
:returns: Method frame from the Queue.Bind-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Queue.BindOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as bind_ok_result:
self._impl.queue_bind(callback=bind_ok_result.set_value_once,
queue=queue,
exchange=exchange,
routing_key=routing_key,
nowait=False,
arguments=arguments)
self._flush_output(bind_ok_result.is_ready)
return bind_ok_result.value.method_frame
def queue_unbind(self, queue='', exchange=None, routing_key=None,
arguments=None):
"""Unbind a queue from an exchange.
:param queue: The queue to unbind from the exchange
:type queue: str or unicode
:param exchange: The source exchange to bind from
:type exchange: str or unicode
:param routing_key: The routing key to unbind
:type routing_key: str or unicode
:param dict arguments: Custom key/value pair arguments for the binding
:returns: Method frame from the Queue.Unbind-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Queue.UnbindOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as unbind_ok_result:
self._impl.queue_unbind(callback=unbind_ok_result.set_value_once,
queue=queue,
exchange=exchange,
routing_key=routing_key,
arguments=arguments)
self._flush_output(unbind_ok_result.is_ready)
return unbind_ok_result.value.method_frame
def tx_select(self):
"""Select standard transaction mode. This method sets the channel to use
standard transactions. The client must use this method at least once on
a channel before using the Commit or Rollback methods.
:returns: Method frame from the Tx.Select-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Tx.SelectOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as select_ok_result:
self._impl.tx_select(select_ok_result.set_value_once)
self._flush_output(select_ok_result.is_ready)
return select_ok_result.value.method_frame
def tx_commit(self):
"""Commit a transaction.
:returns: Method frame from the Tx.Commit-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Tx.CommitOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as commit_ok_result:
self._impl.tx_commit(commit_ok_result.set_value_once)
self._flush_output(commit_ok_result.is_ready)
return commit_ok_result.value.method_frame
def tx_rollback(self):
"""Rollback a transaction.
:returns: Method frame from the Tx.Commit-ok response
:rtype: `pika.frame.Method` having `method` attribute of type
`spec.Tx.CommitOk`
"""
with _CallbackResult(
self._MethodFrameCallbackResultArgs) as rollback_ok_result:
self._impl.tx_rollback(rollback_ok_result.set_value_once)
self._flush_output(rollback_ok_result.is_ready)
return rollback_ok_result.value.method_frame
| {
"content_hash": "17d41fceacd27b176212b03fdcfc20e2",
"timestamp": "",
"source": "github",
"line_count": 2477,
"max_line_length": 93,
"avg_line_length": 40.828017763423496,
"alnum_prop": 0.605284235298771,
"repo_name": "jstnlef/pika",
"id": "09051dae61312b7501faf2ebd1776a4fee0b762d",
"size": "101131",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "pika/adapters/blocking_connection.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "731858"
}
],
"symlink_target": ""
} |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'DatabaseUpgrade.source_plan_name'
db.add_column(u'maintenance_databaseupgrade', 'source_plan_name',
self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'DatabaseUpgrade.target_plan_name'
db.add_column(u'maintenance_databaseupgrade', 'target_plan_name',
self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True),
keep_default=False)
# Changing field 'DatabaseUpgrade.target_plan'
db.alter_column(u'maintenance_databaseupgrade', 'target_plan_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['physical.Plan']))
# Changing field 'DatabaseUpgrade.source_plan'
db.alter_column(u'maintenance_databaseupgrade', 'source_plan_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['physical.Plan']))
def backwards(self, orm):
# Deleting field 'DatabaseUpgrade.source_plan_name'
db.delete_column(u'maintenance_databaseupgrade', 'source_plan_name')
# Deleting field 'DatabaseUpgrade.target_plan_name'
db.delete_column(u'maintenance_databaseupgrade', 'target_plan_name')
# User chose to not deal with backwards NULL issues for 'DatabaseUpgrade.target_plan'
raise RuntimeError("Cannot reverse this migration. 'DatabaseUpgrade.target_plan' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'DatabaseUpgrade.target_plan'
db.alter_column(u'maintenance_databaseupgrade', 'target_plan_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['physical.Plan']))
# User chose to not deal with backwards NULL issues for 'DatabaseUpgrade.source_plan'
raise RuntimeError("Cannot reverse this migration. 'DatabaseUpgrade.source_plan' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'DatabaseUpgrade.source_plan'
db.alter_column(u'maintenance_databaseupgrade', 'source_plan_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['physical.Plan']))
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'dbaas_cloudstack.cloudstackoffering': {
'Meta': {'object_name': 'CloudStackOffering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'equivalent_offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['dbaas_cloudstack.CloudStackOffering']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_region'", 'null': 'True', 'to': u"orm['dbaas_cloudstack.CloudStackRegion']"}),
'serviceofferingid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'dbaas_cloudstack.cloudstackpack': {
'Meta': {'object_name': 'CloudStackPack'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_packs'", 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_packs'", 'to': u"orm['dbaas_cloudstack.CloudStackOffering']"}),
'script_file': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'dbaas_cloudstack.cloudstackregion': {
'Meta': {'object_name': 'CloudStackRegion'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_environment_region'", 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Plan']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes_source'", 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes_target'", 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | {
"content_hash": "d9adddeb0e20c30a9c7b8f28b22f4f61",
"timestamp": "",
"source": "github",
"line_count": 481,
"max_line_length": 227,
"avg_line_length": 94.05405405405405,
"alnum_prop": 0.5709106984969053,
"repo_name": "globocom/database-as-a-service",
"id": "67a4f610c5271bb5aaa8fc19f079634e59a09f78",
"size": "45264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dbaas/maintenance/migrations/0024_auto__add_field_databaseupgrade_source_plan_name__add_field_databaseup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "243568"
},
{
"name": "Dockerfile",
"bytes": "1372"
},
{
"name": "HTML",
"bytes": "310401"
},
{
"name": "JavaScript",
"bytes": "988830"
},
{
"name": "Makefile",
"bytes": "5199"
},
{
"name": "Python",
"bytes": "9674426"
},
{
"name": "Shell",
"bytes": "215115"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class PlacementValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self,
plotly_name="placement",
parent_name="layout.mapbox.layer.symbol",
**kwargs
):
super(PlacementValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "info"),
values=kwargs.pop("values", ["point", "line", "line-center"]),
**kwargs
)
| {
"content_hash": "100d61d30d483bd4559c292ccb16de7c",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 75,
"avg_line_length": 32.388888888888886,
"alnum_prop": 0.5728987993138936,
"repo_name": "plotly/python-api",
"id": "06cc0216f4ef6ac9dd4f422d02d544d7e70632ad",
"size": "583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/layout/mapbox/layer/symbol/_placement.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
} |
"""Tests that HTML generation is awesome."""
import os.path, re, sys
import coverage
import coverage.html
from coverage.misc import CoverageException, NotPython, NoSource
from tests.coveragetest import CoverageTest
class HtmlTestHelpers(CoverageTest):
"""Methods that help with HTML tests."""
def create_initial_files(self):
"""Create the source files we need to run these tests."""
self.make_file("main_file.py", """\
import helper1, helper2
helper1.func1(12)
helper2.func2(12)
""")
self.make_file("helper1.py", """\
def func1(x):
if x % 2:
print("odd")
""")
self.make_file("helper2.py", """\
def func2(x):
print("x is %d" % x)
""")
def run_coverage(self, covargs=None, htmlargs=None):
"""Run coverage on main_file.py, and create an HTML report."""
self.clean_local_file_imports()
cov = coverage.coverage(**(covargs or {}))
self.start_import_stop(cov, "main_file")
cov.html_report(**(htmlargs or {}))
def remove_html_files(self):
"""Remove the HTML files created as part of the HTML report."""
os.remove("htmlcov/index.html")
os.remove("htmlcov/main_file.html")
os.remove("htmlcov/helper1.html")
os.remove("htmlcov/helper2.html")
class HtmlDeltaTest(HtmlTestHelpers, CoverageTest):
"""Tests of the HTML delta speed-ups."""
def setUp(self):
super(HtmlDeltaTest, self).setUp()
# At least one of our tests monkey-patches the version of coverage,
# so grab it here to restore it later.
self.real_coverage_version = coverage.__version__
def tearDown(self):
coverage.__version__ = self.real_coverage_version
super(HtmlDeltaTest, self).tearDown()
def test_html_created(self):
# Test basic HTML generation: files should be created.
self.create_initial_files()
self.run_coverage()
self.assert_exists("htmlcov/index.html")
self.assert_exists("htmlcov/main_file.html")
self.assert_exists("htmlcov/helper1.html")
self.assert_exists("htmlcov/helper2.html")
self.assert_exists("htmlcov/style.css")
self.assert_exists("htmlcov/coverage_html.js")
def test_html_delta_from_source_change(self):
# HTML generation can create only the files that have changed.
# In this case, helper1 changes because its source is different.
self.create_initial_files()
self.run_coverage()
index1 = open("htmlcov/index.html").read()
self.remove_html_files()
# Now change a file and do it again
self.make_file("helper1.py", """\
def func1(x): # A nice function
if x % 2:
print("odd")
""")
self.run_coverage()
# Only the changed files should have been created.
self.assert_exists("htmlcov/index.html")
self.assert_exists("htmlcov/helper1.html")
self.assert_doesnt_exist("htmlcov/main_file.html")
self.assert_doesnt_exist("htmlcov/helper2.html")
index2 = open("htmlcov/index.html").read()
self.assertMultiLineEqual(index1, index2)
def test_html_delta_from_coverage_change(self):
# HTML generation can create only the files that have changed.
# In this case, helper1 changes because its coverage is different.
self.create_initial_files()
self.run_coverage()
self.remove_html_files()
# Now change a file and do it again
self.make_file("main_file.py", """\
import helper1, helper2
helper1.func1(23)
helper2.func2(23)
""")
self.run_coverage()
# Only the changed files should have been created.
self.assert_exists("htmlcov/index.html")
self.assert_exists("htmlcov/helper1.html")
self.assert_exists("htmlcov/main_file.html")
self.assert_doesnt_exist("htmlcov/helper2.html")
def test_html_delta_from_settings_change(self):
# HTML generation can create only the files that have changed.
# In this case, everything changes because the coverage settings have
# changed.
self.create_initial_files()
self.run_coverage(covargs=dict(omit=[]))
index1 = open("htmlcov/index.html").read()
self.remove_html_files()
self.run_coverage(covargs=dict(omit=['xyzzy*']))
# All the files have been reported again.
self.assert_exists("htmlcov/index.html")
self.assert_exists("htmlcov/helper1.html")
self.assert_exists("htmlcov/main_file.html")
self.assert_exists("htmlcov/helper2.html")
index2 = open("htmlcov/index.html").read()
self.assertMultiLineEqual(index1, index2)
def test_html_delta_from_coverage_version_change(self):
# HTML generation can create only the files that have changed.
# In this case, everything changes because the coverage version has
# changed.
self.create_initial_files()
self.run_coverage()
index1 = open("htmlcov/index.html").read()
self.remove_html_files()
# "Upgrade" coverage.py!
coverage.__version__ = "XYZZY"
self.run_coverage()
# All the files have been reported again.
self.assert_exists("htmlcov/index.html")
self.assert_exists("htmlcov/helper1.html")
self.assert_exists("htmlcov/main_file.html")
self.assert_exists("htmlcov/helper2.html")
index2 = open("htmlcov/index.html").read()
fixed_index2 = index2.replace("XYZZY", self.real_coverage_version)
self.assertMultiLineEqual(index1, fixed_index2)
class HtmlTitleTest(HtmlTestHelpers, CoverageTest):
"""Tests of the HTML title support."""
def test_default_title(self):
self.create_initial_files()
self.run_coverage()
index = open("htmlcov/index.html").read()
self.assertIn("<title>Coverage report</title>", index)
self.assertIn("<h1>Coverage report:", index)
def test_title_set_in_config_file(self):
self.create_initial_files()
self.make_file(".coveragerc", "[html]\ntitle = Metrics & stuff!\n")
self.run_coverage()
index = open("htmlcov/index.html").read()
self.assertIn("<title>Metrics & stuff!</title>", index)
self.assertIn("<h1>Metrics & stuff!:", index)
if sys.version_info[:2] != (3,1):
def test_non_ascii_title_set_in_config_file(self):
self.create_initial_files()
self.make_file(".coveragerc",
"[html]\ntitle = «ταБЬℓσ» numbers"
)
self.run_coverage()
index = open("htmlcov/index.html").read()
self.assertIn(
"<title>«ταБЬℓσ»"
" numbers", index
)
self.assertIn(
"<h1>«ταБЬℓσ»"
" numbers", index
)
def test_title_set_in_args(self):
self.create_initial_files()
self.make_file(".coveragerc", "[html]\ntitle = Good title\n")
self.run_coverage(htmlargs=dict(title="«ταБЬℓσ» & stüff!"))
index = open("htmlcov/index.html").read()
self.assertIn(
"<title>«ταБЬℓσ»"
" & stüff!</title>", index
)
self.assertIn(
"<h1>«ταБЬℓσ»"
" & stüff!:", index
)
class HtmlWithUnparsableFilesTest(CoverageTest):
"""Test the behavior when measuring unparsable files."""
def test_dotpy_not_python(self):
self.make_file("innocuous.py", "a = 1")
cov = coverage.coverage()
self.start_import_stop(cov, "innocuous")
self.make_file("innocuous.py", "<h1>This isn't python!</h1>")
self.assertRaisesRegexp(
NotPython,
"Couldn't parse '.*innocuous.py' as Python source: '.*' at line 1",
cov.html_report
)
def test_dotpy_not_python_ignored(self):
self.make_file("innocuous.py", "a = 2")
cov = coverage.coverage()
self.start_import_stop(cov, "innocuous")
self.make_file("innocuous.py", "<h1>This isn't python!</h1>")
cov.html_report(ignore_errors=True)
self.assert_exists("htmlcov/index.html")
# this would be better as a glob, if the html layout changes:
self.assert_doesnt_exist("htmlcov/innocuous.html")
def test_dothtml_not_python(self):
# We run a .html file, and when reporting, we can't parse it as
# Python. Since it wasn't .py, no error is reported.
# Run an "html" file
self.make_file("innocuous.html", "a = 3")
self.run_command("coverage run innocuous.html")
# Before reporting, change it to be an HTML file.
self.make_file("innocuous.html", "<h1>This isn't python at all!</h1>")
output = self.run_command("coverage html")
self.assertEqual(output.strip(), "No data to report.")
def test_execed_liar_ignored(self):
# Jinja2 sets __file__ to be a non-Python file, and then execs code.
# If that file contains non-Python code, a TokenError shouldn't
# have been raised when writing the HTML report.
if sys.version_info < (3, 0):
source = "exec compile('','','exec') in {'__file__': 'liar.html'}"
else:
source = "exec(compile('','','exec'), {'__file__': 'liar.html'})"
self.make_file("liar.py", source)
self.make_file("liar.html", "{# Whoops, not python code #}")
cov = coverage.coverage()
self.start_import_stop(cov, "liar")
cov.html_report()
self.assert_exists("htmlcov/index.html")
def test_execed_liar_ignored_indentation_error(self):
# Jinja2 sets __file__ to be a non-Python file, and then execs code.
# If that file contains untokenizable code, we shouldn't get an
# exception.
if sys.version_info < (3, 0):
source = "exec compile('','','exec') in {'__file__': 'liar.html'}"
else:
source = "exec(compile('','','exec'), {'__file__': 'liar.html'})"
self.make_file("liar.py", source)
# Tokenize will raise an IndentationError if it can't dedent.
self.make_file("liar.html", "0\n 2\n 1\n")
cov = coverage.coverage()
self.start_import_stop(cov, "liar")
cov.html_report()
self.assert_exists("htmlcov/index.html")
class HtmlTest(CoverageTest):
"""Moar HTML tests."""
def test_missing_source_file_incorrect_message(self):
# https://bitbucket.org/ned/coveragepy/issue/60
self.make_file("thefile.py", "import sub.another\n")
self.make_file("sub/__init__.py", "")
self.make_file("sub/another.py", "print('another')\n")
cov = coverage.coverage()
self.start_import_stop(cov, 'thefile')
os.remove("sub/another.py")
missing_file = os.path.join(self.temp_dir, "sub", "another.py")
missing_file = os.path.realpath(missing_file)
self.assertRaisesRegexp(
NoSource,
"(?i)No source for code: '%s'" % re.escape(missing_file),
cov.html_report
)
class HtmlStaticFileTest(CoverageTest):
"""Tests of the static file copying for the HTML report."""
def setUp(self):
super(HtmlStaticFileTest, self).setUp()
self.original_path = list(coverage.html.STATIC_PATH)
def tearDown(self):
coverage.html.STATIC_PATH = self.original_path
super(HtmlStaticFileTest, self).tearDown()
def test_copying_static_files_from_system(self):
# Make a new place for static files.
self.make_file("static_here/jquery.min.js", "Not Really JQuery!")
coverage.html.STATIC_PATH.insert(0, "static_here")
self.make_file("main.py", "print(17)")
cov = coverage.coverage()
self.start_import_stop(cov, "main")
cov.html_report()
jquery = open("htmlcov/jquery.min.js").read()
self.assertEqual(jquery, "Not Really JQuery!")
def test_copying_static_files_from_system_in_dir(self):
# Make a new place for static files.
INSTALLED = [
"jquery/jquery.min.js",
"jquery-hotkeys/jquery.hotkeys.js",
"jquery-isonscreen/jquery.isonscreen.js",
"jquery-tablesorter/jquery.tablesorter.min.js",
]
for fpath in INSTALLED:
self.make_file(os.path.join("static_here", fpath), "Not real.")
coverage.html.STATIC_PATH.insert(0, "static_here")
self.make_file("main.py", "print(17)")
cov = coverage.coverage()
self.start_import_stop(cov, "main")
cov.html_report()
for fpath in INSTALLED:
the_file = os.path.basename(fpath)
contents = open(os.path.join("htmlcov", the_file)).read()
self.assertEqual(contents, "Not real.")
def test_cant_find_static_files(self):
# Make the path point to useless places.
coverage.html.STATIC_PATH = ["/xyzzy"]
self.make_file("main.py", "print(17)")
cov = coverage.coverage()
self.start_import_stop(cov, "main")
self.assertRaisesRegexp(
CoverageException, "Couldn't find static file '.*'",
cov.html_report
)
| {
"content_hash": "5516bd3e4d617fc0f4777bc5e6a2935a",
"timestamp": "",
"source": "github",
"line_count": 355,
"max_line_length": 79,
"avg_line_length": 38.53521126760563,
"alnum_prop": 0.5942251461988304,
"repo_name": "I-Valchev/UrPas",
"id": "06132fb46514e85a162ea9eea93066ad387dc795",
"size": "13723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "coverage-3.7.1/tests/test_html.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "23118"
},
{
"name": "CSS",
"bytes": "19984"
},
{
"name": "HTML",
"bytes": "201505"
},
{
"name": "JavaScript",
"bytes": "75039"
},
{
"name": "Makefile",
"bytes": "122"
},
{
"name": "Python",
"bytes": "556532"
}
],
"symlink_target": ""
} |
import discord
from discord.ext import commands
import asyncio
import html
import logging
import sys
import traceback
from more_itertools import chunked
import tweepy
import tweepy.asynchronous
from utilities import checks
errors_logger = logging.getLogger("errors")
async def setup(bot):
await bot.add_cog(Twitter(bot))
class Twitter(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.blacklisted_handles = []
self.stream = TwitterStream(bot)
async def cog_load(self):
# Initialize database
await self.bot.connect_to_database()
await self.bot.db.execute("CREATE SCHEMA IF NOT EXISTS twitter")
await self.bot.db.execute(
"""
CREATE TABLE IF NOT EXISTS twitter.handles (
channel_id BIGINT,
handle TEXT,
replies BOOL,
retweets BOOL,
PRIMARY KEY (channel_id, handle)
)
"""
)
# Initialize blacklist
try:
response = await self.bot.twitter_client.get_me(
user_fields = ["protected"]
)
account = response.data
if account.protected:
self.blacklisted_handles.append(
account.username.lower()
)
# TODO: Handle more than 1000 friends/following
response = await self.bot.twitter_client.get_users_following(
account.id, max_results = 1000, user_fields = ["protected"],
user_auth = True
)
following = response.data
for friend in following:
if friend.protected:
self.blacklisted_handles.append(friend.username.lower())
except (AttributeError, tweepy.TweepyException) as e:
self.bot.print(f"Failed to initialize Twitter cog blacklist: {e}")
# Start stream
self.task = self.bot.loop.create_task(
self.start_stream(), name = "Start Twitter Stream"
)
def cog_unload(self):
if self.stream:
self.stream.disconnect()
self.task.cancel()
@commands.group(invoke_without_command = True, case_insensitive = True)
@checks.not_forbidden()
async def twitter(self, ctx):
'''Twitter'''
await ctx.send_help(ctx.command)
@twitter.command(name = "status")
@checks.not_forbidden()
async def twitter_status(
self, ctx, handle: str, replies: bool = False, retweets: bool = False
):
'''
Show a Twitter user's most recent Tweet
Excludes replies and retweets by default
Limited to 3200 most recent Tweets
'''
if handle.lower().strip('@') in self.blacklisted_handles:
await ctx.embed_reply(f"{ctx.bot.error_emoji} Error: Unauthorized")
return
tweet = None
try:
for status in tweepy.Cursor(
self.bot.twitter_api.user_timeline,
screen_name = handle,
count = 200,
exclude_replies = not replies,
include_rts = retweets,
tweet_mode = "extended"
).items():
tweet = status
break
except tweepy.NotFound:
await ctx.embed_reply(
f"{ctx.bot.error_emoji} Error: @{handle} not found"
)
return
except tweepy.TweepyException as e:
await ctx.embed_reply(f"{ctx.bot.error_emoji} Error: {e}")
return
if not tweet:
await ctx.embed_reply(
f"{ctx.bot.error_emoji} Error: Status not found"
)
return
image_url = None
text = process_tweet_text(tweet.full_text, tweet.entities)
if (
hasattr(tweet, "extended_entities") and
tweet.extended_entities["media"][0]["type"] == "photo"
):
image_url = tweet.extended_entities["media"][0]["media_url_https"]
text = text.replace(tweet.extended_entities["media"][0]["url"], "")
await ctx.embed_reply(
color = self.bot.twitter_color,
title = '@' + tweet.user.screen_name,
title_url = f"https://twitter.com/{tweet.user.screen_name}/status/{tweet.id}",
description = text,
image_url = image_url,
footer_icon_url = tweet.user.profile_image_url,
footer_text = tweet.user.name,
timestamp = tweet.created_at
)
@twitter.command(name = "add", aliases = ["addhandle", "handleadd"])
@commands.check_any(checks.is_permitted(), checks.is_guild_owner())
async def twitter_add(self, ctx, handle: str):
'''
Add a Twitter handle to a text channel
A delay of up to 2 min. is possible due to Twitter rate limits
'''
handle = handle.lstrip('@')
following = await ctx.bot.db.fetchval(
"""
SELECT EXISTS (
SELECT FROM twitter.handles
WHERE channel_id = $1 AND handle = $2
)
""",
ctx.channel.id, handle
)
if following:
return await ctx.embed_reply(":no_entry: This text channel is already following that Twitter handle")
message = await ctx.embed_reply(":hourglass: Please wait")
embed = message.embeds[0]
try:
await self.stream.add_feed(ctx.channel, handle)
except tweepy.TweepyException as e:
embed.description = f":no_entry: Error: {e}"
return await message.edit(embed = embed)
await ctx.bot.db.execute(
"""
INSERT INTO twitter.handles (channel_id, handle)
VALUES ($1, $2)
""",
ctx.channel.id, handle
)
embed.description = f"Added the Twitter handle, [`{handle}`](https://twitter.com/{handle}), to this text channel"
await message.edit(embed = embed)
@twitter.command(name = "remove", aliases = ["delete", "removehandle", "handleremove", "deletehandle", "handledelete"])
@commands.check_any(checks.is_permitted(), checks.is_guild_owner())
async def twitter_remove(self, ctx, handle: str):
'''
Remove a Twitter handle from a text channel
A delay of up to 2 min. is possible due to Twitter rate limits
'''
handle = handle.lstrip('@')
deleted = await ctx.bot.db.fetchval(
"""
DELETE FROM twitter.handles
WHERE channel_id = $1 AND handle = $2
RETURNING *
""",
ctx.channel.id, handle
)
if not deleted:
return await ctx.embed_reply(":no_entry: This text channel isn't following that Twitter handle")
message = await ctx.embed_reply(":hourglass: Please wait")
await self.stream.remove_feed(ctx.channel, handle)
embed = message.embeds[0]
embed.description = f"Removed the Twitter handle, [`{handle}`](https://twitter.com/{handle}), from this text channel."
await message.edit(embed = embed)
@twitter.command(aliases = ["handle", "feeds", "feed", "list"])
@checks.not_forbidden()
async def handles(self, ctx):
'''Show Twitter handles being followed in a text channel'''
records = await ctx.bot.db.fetch(
"""
SELECT handle FROM twitter.handles
WHERE channel_id = $1
""",
ctx.channel.id
)
await ctx.embed_reply(
'\n'.join(sorted(
[record["handle"] for record in records],
key = str.casefold
))
)
# TODO: Add message if none
async def start_stream(self):
await self.bot.wait_until_ready()
try:
records = await self.bot.db.fetch("SELECT * FROM twitter.handles")
usernames = {}
for record in records:
usernames[record["handle"].lower()] = (
usernames.get(record["handle"].lower(), []) +
[record["channel_id"]]
)
user_ids = {}
for usernames_chunk in chunked(usernames, 100):
response = await self.bot.twitter_client.get_users(
usernames = usernames_chunk
)
for user in response.data:
user_ids[user.id] = usernames[user.username.lower()]
await self.stream.start_feeds(user_ids = user_ids)
except Exception as e:
print("Exception in Twitter Task", file = sys.stderr)
traceback.print_exception(
type(e), e, e.__traceback__, file = sys.stderr
)
errors_logger.error(
"Uncaught Twitter Task exception\n",
exc_info = (type(e), e, e.__traceback__)
)
return
def process_tweet_text(text, entities):
mentions = {}
for mention in entities["user_mentions"]:
mentions[text[mention["indices"][0]:mention["indices"][1]]] = (
mention["screen_name"]
)
for mention, screen_name in mentions.items():
text = text.replace(
mention,
f"[{mention}](https://twitter.com/{screen_name})"
)
for hashtag in entities["hashtags"]:
text = text.replace(
'#' + hashtag["text"],
f"[#{hashtag['text']}](https://twitter.com/hashtag/{hashtag['text']})"
)
for symbol in entities["symbols"]:
text = text.replace(
'$' + symbol["text"],
f"[${symbol['text']}](https://twitter.com/search?q=${symbol['text']})"
)
for url in entities["urls"]:
text = text.replace(url["url"], url["expanded_url"])
# Remove Variation Selector-16 characters
# Unescape HTML entities (>, <, &, etc.)
return html.unescape(text.replace('\uFE0F', ""))
class TwitterStream(tweepy.asynchronous.AsyncStream):
def __init__(self, bot):
super().__init__(
bot.TWITTER_CONSUMER_KEY, bot.TWITTER_CONSUMER_SECRET,
bot.TWITTER_ACCESS_TOKEN, bot.TWITTER_ACCESS_TOKEN_SECRET
)
self.bot = bot
self.user_ids = {}
self.reconnect_ready = asyncio.Event()
self.reconnect_ready.set()
self.reconnecting = False
async def start_feeds(self, *, user_ids = None):
if self.reconnecting:
return await self.reconnect_ready.wait()
self.reconnecting = True
await self.reconnect_ready.wait()
self.reconnect_ready.clear()
if user_ids:
self.user_ids = user_ids
if self.task:
self.disconnect()
await self.task
if self.user_ids:
self.filter(follow = self.user_ids)
self.bot.loop.call_later(120, self.reconnect_ready.set)
self.reconnecting = False
async def add_feed(self, channel, handle):
response = await self.bot.twitter_client.get_user(username = handle)
user_id = response.data.id
if channels := self.user_ids.get(user_id):
channels.append(channel.id)
else:
self.user_ids[user_id] = [channel.id]
await self.start_feeds()
async def remove_feed(self, channel, handle):
response = await self.bot.twitter_client.get_user(username = handle)
user_id = response.data.id
channel_ids = self.user_ids[user_id]
channel_ids.remove(channel.id)
if not channel_ids:
del self.user_ids[user_id]
await self.start_feeds() # Necessary?
async def on_status(self, status):
# Ignore replies
if status.in_reply_to_status_id:
return
# TODO: Settings for including replies, retweets, etc.
for channel_id in self.user_ids.get(status.user.id, ()):
channel = self.bot.get_channel(channel_id)
if not channel:
# TODO: Handle channel no longer accessible
continue
if hasattr(status, "extended_tweet"):
text = status.extended_tweet["full_text"]
entities = status.extended_tweet["entities"]
extended_entities = status.extended_tweet.get(
"extended_entities"
)
else:
text = status.text
entities = status.entities
extended_entities = getattr(status, "extended_entities", None)
embed = discord.Embed(
color = self.bot.twitter_color,
title = '@' + status.user.screen_name,
url = f"https://twitter.com/{status.user.screen_name}/status/{status.id}",
description = process_tweet_text(text, entities),
timestamp = status.created_at,
)
embed.set_author(
name = status.user.name,
icon_url = status.user.profile_image_url
)
if (
extended_entities and
extended_entities["media"][0]["type"] == "photo"
):
embed.set_image(
url = extended_entities["media"][0]["media_url_https"]
)
embed.description = embed.description.replace(
extended_entities["media"][0]["url"], ""
)
embed.set_footer(
icon_url = self.bot.twitter_icon_url,
text = "Twitter"
)
try:
await channel.send(embed = embed)
except discord.Forbidden:
# TODO: Handle unable to send embeds/messages in text channel
self.bot.print(
"Twitter Stream: Missing permissions to send embed in "
f"#{channel.name} in {channel.guild.name}"
)
except discord.DiscordServerError as e:
self.bot.print(f"Twitter Stream Discord Server Error: {e}")
async def on_request_error(self, status_code):
self.bot.print(f"Twitter Error: {status_code}")
| {
"content_hash": "277ec56d93e0beb46f4cda25ba21914d",
"timestamp": "",
"source": "github",
"line_count": 385,
"max_line_length": 120,
"avg_line_length": 30.05974025974026,
"alnum_prop": 0.6732048734122527,
"repo_name": "Harmon758/Harmonbot",
"id": "55a7faff23eacfb724f7f11548bf70656f7c0be7",
"size": "11574",
"binary": false,
"copies": "1",
"ref": "refs/heads/rewrite",
"path": "Discord/cogs/twitter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "483"
},
{
"name": "Go",
"bytes": "4664"
},
{
"name": "Python",
"bytes": "974320"
},
{
"name": "mIRC Script",
"bytes": "127551"
}
],
"symlink_target": ""
} |
"""
======================
GeoIP2 Database Reader
======================
"""
import inspect
import geoip2
import geoip2.models
import geoip2.errors
import maxminddb
# pylint: disable=unused-import
from maxminddb import (MODE_AUTO, MODE_MMAP, MODE_MMAP_EXT, MODE_FILE,
MODE_MEMORY)
class Reader(object):
"""GeoIP2 database Reader object.
Instances of this class provide a reader for the GeoIP2 database format.
IP addresses can be looked up using the ``country`` and ``city`` methods.
The basic API for this class is the same for every database. First, you
create a reader object, specifying a file name. You then call the method
corresponding to the specific database, passing it the IP address you want
to look up.
If the request succeeds, the method call will return a model class for the
method you called. This model in turn contains multiple record classes,
each of which represents part of the data returned by the database. If the
database does not contain the requested information, the attributes on the
record class will have a ``None`` value.
If the address is not in the database, an
``geoip2.errors.AddressNotFoundError`` exception will be thrown. If the
database is corrupt or invalid, a ``maxminddb.InvalidDatabaseError`` will
be thrown.
"""
def __init__(self, filename, locales=None, mode=MODE_AUTO):
"""Create GeoIP2 Reader
:param filename: The path to the GeoIP2 database.
:param locales: This is list of locale codes. This argument will be
passed on to record classes to use when their name properties are
called. The default value is ['en'].
The order of the locales is significant. When a record class has
multiple names (country, city, etc.), its name property will return
the name in the first locale that has one.
Note that the only locale which is always present in the GeoIP2
data is "en". If you do not include this locale, the name property
may end up returning None even when the record has an English name.
Currently, the valid locale codes are:
* de -- German
* en -- English names may still include accented characters if that
is the accepted spelling in English. In other words, English does
not mean ASCII.
* es -- Spanish
* fr -- French
* ja -- Japanese
* pt-BR -- Brazilian Portuguese
* ru -- Russian
* zh-CN -- Simplified Chinese.
:param mode: The mode to open the database with. Valid mode are:
* MODE_MMAP_EXT - use the C extension with memory map.
* MODE_MMAP - read from memory map. Pure Python.
* MODE_FILE - read database as standard file. Pure Python.
* MODE_MEMORY - load database into memory. Pure Python.
* MODE_AUTO - try MODE_MMAP_EXT, MODE_MMAP, MODE_FILE in that order.
Default.
"""
if locales is None:
locales = ['en']
self._db_reader = maxminddb.open_database(filename, mode)
self._locales = locales
def country(self, ip_address):
"""Get the Country object for the IP address
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.Country` object
"""
return self._model_for(geoip2.models.Country, 'Country', ip_address)
def city(self, ip_address):
"""Get the City object for the IP address
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.City` object
"""
return self._model_for(geoip2.models.City, 'City', ip_address)
def anonymous_ip(self, ip_address):
"""Get the AnonymousIP object for the IP address
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.AnonymousIP` object
"""
return self._flat_model_for(geoip2.models.AnonymousIP,
'GeoIP2-Anonymous-IP',
ip_address)
def connection_type(self, ip_address):
"""Get the ConnectionType object for the IP address
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.ConnectionType` object
"""
return self._flat_model_for(geoip2.models.ConnectionType,
'GeoIP2-Connection-Type',
ip_address)
def domain(self, ip_address):
"""Get the Domain object for the IP address
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.Domain` object
"""
return self._flat_model_for(geoip2.models.Domain,
'GeoIP2-Domain',
ip_address)
def isp(self, ip_address):
"""Get the ISP object for the IP address
:param ip_address: IPv4 or IPv6 address as a string.
:returns: :py:class:`geoip2.models.ISP` object
"""
return self._flat_model_for(geoip2.models.ISP,
'GeoIP2-ISP',
ip_address)
def _get(self, database_type, ip_address):
if database_type not in self.metadata().database_type:
caller = inspect.stack()[2][3]
raise TypeError("The %s method cannot be used with the "
"%s database" %
(caller, self.metadata().database_type))
record = self._db_reader.get(ip_address)
if record is None:
raise geoip2.errors.AddressNotFoundError(
"The address %s is not in the database." % ip_address)
return record
def _model_for(self, model_class, types, ip_address):
record = self._get(types, ip_address)
record.setdefault('traits', {})['ip_address'] = ip_address
return model_class(record, locales=self._locales)
def _flat_model_for(self, model_class, types, ip_address):
record = self._get(types, ip_address)
record['ip_address'] = ip_address
return model_class(record)
def metadata(self):
"""The metadata for the open database
:returns: :py:class:`maxminddb.reader.Metadata` object
"""
return self._db_reader.metadata()
def close(self):
"""Closes the GeoIP2 database"""
self._db_reader.close()
| {
"content_hash": "95d77febc2c7d65d222766dfcbaa5921",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 78,
"avg_line_length": 35.3903743315508,
"alnum_prop": 0.6030522816560895,
"repo_name": "simudream/GeoIP2-python",
"id": "18fe1e58365224d20314be46ebeeae9f73f8b274",
"size": "6618",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "geoip2/database.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "75581"
}
],
"symlink_target": ""
} |
from functools import partial
from json import dumps, loads
from collections import defaultdict
from tornado.web import authenticated
from qiita_core.qiita_settings import qiita_config
from qiita_core.util import execute_as_transaction
from qiita_core.qiita_settings import r_client
from qiita_pet.handlers.base_handlers import BaseHandler
from qiita_pet.handlers.util import download_link_or_path
from qiita_pet.handlers.analysis_handlers import check_analysis_access
from qiita_pet.util import is_localhost
from qiita_db.util import generate_analysis_list
from qiita_db.analysis import Analysis
from qiita_db.processing_job import ProcessingJob
from qiita_db.software import Parameters
from qiita_db.artifact import Artifact
from qiita_db.software import Software
class ListAnalysesHandler(BaseHandler):
@authenticated
@execute_as_transaction
def get(self):
user = self.current_user
is_local_request = is_localhost(self.request.headers['host'])
uanalyses = user.shared_analyses | user.private_analyses
user_analysis_ids = set([a.id for a in uanalyses])
panalyses = Analysis.get_by_status('public')
public_analysis_ids = set([a.id for a in panalyses])
public_analysis_ids = public_analysis_ids - user_analysis_ids
user_analyses = generate_analysis_list(user_analysis_ids)
public_analyses = generate_analysis_list(public_analysis_ids, True)
dlop = partial(download_link_or_path, is_local_request)
messages = {'info': '', 'danger': ''}
for analysis_id in user_analysis_ids:
job_info = r_client.get('analysis_delete_%d' % analysis_id)
if job_info:
job_info = defaultdict(lambda: '', loads(job_info))
job_id = job_info['job_id']
job = ProcessingJob(job_id)
job_status = job.status
processing = job_status not in ('success', 'error')
if processing:
messages['info'] += (
'Analysis %s is being deleted<br/>' % analysis_id)
elif job_status == 'error':
messages['danger'] += (
job.log.msg.replace('\n', '<br/>') + '<br/>')
else:
if job_info['alert_type'] not in messages:
messages[job_info['alert_type']] = []
messages[job_info['alert_type']] += (
job.log.msg.replace('\n', '<br/>') + '<br/>')
self.render("list_analyses.html", user_analyses=user_analyses,
public_analyses=public_analyses, messages=messages,
dlop=dlop)
@authenticated
@execute_as_transaction
def post(self):
analysis_id = int(self.get_argument('analysis_id'))
user = self.current_user
check_analysis_access(user, Analysis(analysis_id))
qiita_plugin = Software.from_name_and_version('Qiita', 'alpha')
cmd = qiita_plugin.get_command('delete_analysis')
params = Parameters.load(cmd, values_dict={'analysis_id': analysis_id})
job = ProcessingJob.create(user, params, True)
# Store the job id attaching it to the sample template id
r_client.set('analysis_delete_%d' % analysis_id,
dumps({'job_id': job.id}))
job.submit()
self.redirect("%s/analysis/list/" % (qiita_config.portal_dir))
class AnalysisSummaryAJAX(BaseHandler):
@authenticated
@execute_as_transaction
def get(self):
info = self.current_user.default_analysis.summary_data()
self.write(dumps(info))
class SelectedSamplesHandler(BaseHandler):
@authenticated
@execute_as_transaction
def get(self):
# Format sel_data to get study IDs for the processed data
sel_data = defaultdict(dict)
proc_data_info = {}
sel_samps = self.current_user.default_analysis.samples
for aid, samples in sel_samps.items():
artifact = Artifact(aid)
sel_data[artifact.study][aid] = samples
proc_data_info[aid] = {
'processed_date': str(artifact.timestamp),
'merging_scheme': artifact.merging_scheme,
'data_type': artifact.data_type
}
self.render("analysis_selected.html", sel_data=sel_data,
proc_info=proc_data_info)
| {
"content_hash": "1a414e70f72a835abceff2004cecb73f",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 79,
"avg_line_length": 39.57142857142857,
"alnum_prop": 0.6191335740072202,
"repo_name": "ElDeveloper/qiita",
"id": "c92b3af0ce0593a75923521ef1550a44bcd0e4a4",
"size": "4783",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "qiita_pet/handlers/analysis_handlers/listing_handlers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2353"
},
{
"name": "HTML",
"bytes": "548553"
},
{
"name": "JavaScript",
"bytes": "83566"
},
{
"name": "Makefile",
"bytes": "6838"
},
{
"name": "PLpgSQL",
"bytes": "84815"
},
{
"name": "Python",
"bytes": "2293282"
},
{
"name": "SQLPL",
"bytes": "7501"
},
{
"name": "Shell",
"bytes": "3180"
}
],
"symlink_target": ""
} |
__author__ = 'ksen'
import os
from mutagen.easyid3 import EasyID3
def update():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('arguments', metavar='args', type=str, nargs=2,
help='1. Path to music directory, 2. Text which sets into album')
args = parser.parse_args().arguments
path = args[0]
fileList = os.listdir(path)
for item in fileList:
audio = EasyID3(path + '/' + item)
audio["album"] = args[1]
audio.save()
print 'done!'
update() | {
"content_hash": "e93f95370bb1dabad019e933801cde77",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 89,
"avg_line_length": 25.818181818181817,
"alnum_prop": 0.5845070422535211,
"repo_name": "HrundelB/radio-jam",
"id": "5f8c9bebb8dcae52bce348162d105fdd49a146a4",
"size": "613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/Updater.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "84714"
},
{
"name": "Perl",
"bytes": "7880"
},
{
"name": "Perl 6",
"bytes": "5276"
},
{
"name": "Python",
"bytes": "613"
},
{
"name": "Shell",
"bytes": "6001"
}
],
"symlink_target": ""
} |
from .base import AppTemplate
class BalancedDocs(AppTemplate):
"""Balanced docs"""
ENV = 'misc'
CHEF_RECIPE = 'balanced-docs'
STACK_TAG = 'docs'
PUBLIC = True
def elb(self):
"""Load balancer for BalancedDocs."""
elb = super(BalancedDocs, self).elb()
elb['HealthUrl'] = '/__health__'
elb['SSLCertificateId'] = 'balancedpayments-ev-2014'
return elb
if __name__ == '__main__':
print(BalancedDocs().to_json())
| {
"content_hash": "b5646a5c789be25fc33c2663b568c60e",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 60,
"avg_line_length": 22.857142857142858,
"alnum_prop": 0.5854166666666667,
"repo_name": "coderanger/brix",
"id": "d76be34a3f6597a596ee189792a068b158ab5eee",
"size": "1111",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "templates/balanced_docs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "50757"
},
{
"name": "Ruby",
"bytes": "218"
},
{
"name": "Shell",
"bytes": "3788"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division, unicode_literals, absolute_import
import os.path as op
def get_flirt_schedule(name):
if name == 'ecc':
return op.abspath(op.join(op.dirname(__file__),
'ecc.sch'))
elif name == 'hmc':
return op.abspath(op.join(op.dirname(__file__),
'hmc.sch'))
else:
raise RuntimeError('Requested file does not exist.')
| {
"content_hash": "2c32860cf443a4a99457c654489f46c8",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 82,
"avg_line_length": 31.571428571428573,
"alnum_prop": 0.5678733031674208,
"repo_name": "carolFrohlich/nipype",
"id": "390dbe81f393ec3c0a313d29d644c0dca39204c6",
"size": "596",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "nipype/workflows/data/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "2320"
},
{
"name": "Matlab",
"bytes": "1717"
},
{
"name": "Python",
"bytes": "5451077"
},
{
"name": "Shell",
"bytes": "3302"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
} |
from tools.load import LoadMatrix
from sg import sg
lm=LoadMatrix()
traindna=lm.load_dna('../data/fm_train_dna.dat')
testdna=lm.load_dna('../data/fm_test_dna.dat')
trainlabel=lm.load_labels('../data/label_train_dna.dat')
parameter_list=[[traindna,testdna,trainlabel,10,3,0,'n',False,'FULL'],
[traindna,testdna,trainlabel,11,4,0,'n',False,'FULL']]
def kernel_weightedcommwordstring (fm_train_dna=traindna,fm_test_dna=testdna,
label_train_dna=trainlabel,size_cache=10,
order=3,gap=0,reverse='n',use_sign=False,
normalization='FULL'):
sg('add_preproc', 'SORTWORDSTRING')
sg('set_features', 'TRAIN', fm_train_dna, 'DNA')
sg('convert', 'TRAIN', 'STRING', 'CHAR', 'STRING', 'WORD', order, order-1, gap, reverse)
sg('attach_preproc', 'TRAIN')
sg('set_features', 'TEST', fm_test_dna, 'DNA')
sg('convert', 'TEST', 'STRING', 'CHAR', 'STRING', 'WORD', order, order-1, gap, reverse)
sg('attach_preproc', 'TEST')
sg('set_kernel', 'WEIGHTEDCOMMSTRING', 'WORD', size_cache, use_sign, normalization)
km=sg('get_kernel_matrix', 'TRAIN')
km=sg('get_kernel_matrix', 'TEST')
return km
if __name__=='__main__':
print('WeightedCommWordString')
kernel_weightedcommwordstring(*parameter_list[0])
| {
"content_hash": "bfb7f7e9f6c7453583650cd770982fbe",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 89,
"avg_line_length": 37.9375,
"alnum_prop": 0.6836902800658978,
"repo_name": "Saurabh7/shogun",
"id": "d8c4cd45b3d983715edb2a5f13d3de6ef69a4b25",
"size": "1214",
"binary": false,
"copies": "22",
"ref": "refs/heads/master",
"path": "examples/undocumented/python_static/kernel_weightedcommwordstring.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "104870"
},
{
"name": "C++",
"bytes": "11435353"
},
{
"name": "CMake",
"bytes": "213091"
},
{
"name": "Lua",
"bytes": "1204"
},
{
"name": "M",
"bytes": "10020"
},
{
"name": "Makefile",
"bytes": "452"
},
{
"name": "Matlab",
"bytes": "66047"
},
{
"name": "Perl",
"bytes": "31939"
},
{
"name": "Perl6",
"bytes": "15714"
},
{
"name": "Protocol Buffer",
"bytes": "1476"
},
{
"name": "Python",
"bytes": "431160"
},
{
"name": "R",
"bytes": "53362"
},
{
"name": "Ruby",
"bytes": "59"
},
{
"name": "Shell",
"bytes": "17074"
}
],
"symlink_target": ""
} |
from flask import Flask, render_template, request, jsonify
from flask_restful import Resource, Api
from random import choice
import itertools
import random
import numpy
import json
import pandas
import pickle
import re
# SERVER CONFIGURATION ##############################################
class BodyMapServer(Flask):
def __init__(self, *args, **kwargs):
super(BodyMapServer, self).__init__(*args, **kwargs)
# Date range to start with (loaded data)
self.start_date = "3/20/2010"
self.end_date = "5/20/2012"
# load data on start of application
self.bodymap = "".join(open("data/bodymap.svg","r").readlines())
self.labels = json.load(open("data/simpleFMA.json","r"))
self.deaths = json.load(open("data/injuries_index.json","r"))
self.fatalities = pandas.read_csv("data/fatalities_all.tsv",sep="\t",index_col=1)
self.fatalities = self.fatalities.drop(["LATITUDE","LONGITUDE","LOCATION_RAW",
"LATITUDE_EPSG3857","LONGITUDE_EPSG3857",
"LOCATION_IMPORTANCE","ALTITUDE"],axis=1)
self.fatalities = self.fatalities.rename(index=str, columns={"Unnamed: 0":"id"})
# This takes a bit of computation and should be run at startup
self.fatalities['INCIDENT_DATETIME'] = pandas.to_datetime(self.fatalities["INCIDENT_DATE"])
self.wordcounts = pandas.read_csv("data/wordcounts.tsv",sep="\t",index_col=0)
# generate list of descriptions associated with ids
self.descriptions = self.fatalities.DESCRIPTION.copy()
self.descriptions.index = self.fatalities.id.tolist()
self.descriptions = self.descriptions.to_dict()
app = BodyMapServer(__name__)
api = Api(app)
# API VIEWS ##########################################################################################
class apiIndex(Resource):
"""apiIndex
Main view for REST API to display all available fatalities data
"""
def get(self):
fatalities_json = app.fatalities.to_dict(orient="records")
fatalities_json = parse_unicode(fatalities_json)
return fatalities_json
class apiQueryDates(Resource):
"""apiQueryDates
return a list of ids for points that are within a range of dates
/api/dates/0/6/2010/0/6/2010
"""
def get(self, sm, sd, sy, em, ed, ey):
start_date = pandas.to_datetime("%s/%s/%s" %(sm,sd,sy))
end_date = pandas.to_datetime("%s/%s/%s" %(em,ed,ey))
subset = app.fatalities[app.fatalities.INCIDENT_DATETIME >= start_date]
subset = subset[subset.INCIDENT_DATETIME <= end_date]
# Also return wordcounts, sorted list of words with rates most different
# from population baseline. Ideal here would be KL divergence, but too slow
sample = app.wordcounts.loc[subset.index]
# Only assess those terms that have changed
sample_rates = sample.sum() / sample.shape[1]
population_rates = app.wordcounts.sum() / app.wordcounts.shape[1]
difference = sample_rates - population_rates
difference = difference.abs()
difference.sort_values(ascending=False,inplace=True)
# Return those 1 std over mean (of those that have changed)
difference=difference[difference!=0]
thresh = difference.mean() + difference.std()
difference = difference[difference>thresh]
return {"ids": subset.id.tolist(),
"words":difference.index.tolist()}
# Add all resources
api.add_resource(apiIndex,'/api/deaths') # start month, day, year / end month, date, year
api.add_resource(apiQueryDates,'/api/dates/<int:sm>/<int:sd>/<int:sy>/<int:em>/<int:ed>/<int:ey>')
# Global variables and functions #####################################################################
def parse_unicode(meta):
'''parse_unicode: decodes to utf-8 and encodes in ascii, replacing weird characters with empty space.
:param meta: a dictionary or list of dictionaries to parse
'''
if not isinstance(meta,list):
meta = [meta]
parsed = []
for entry in meta:
new_entry = dict()
for key,value in entry.iteritems():
if isinstance(value,int) or isinstance(value,float):
new_entry[key] = value
else:
new_entry[key] = unicode(value,"ISO-8859-1").encode('ascii',errors='replace').replace('?','')
parsed.append(new_entry)
return parsed
def random_colors(concepts):
'''Generate N random colors (not used yet)'''
colors = {}
for x in range(len(concepts)):
concept = concepts[x]
r = lambda: random.randint(0,255)
colors[concept] = '#%02X%02X%02X' % (r(),r(),r())
return colors
def kl_diverence(s):
'''kullback leibler divergence - too slow to implement but ideally
what we would want to assess differences in rates of words between
sample and population. S is a particular column of the data frame
(a single word). Run eg: res=sample.apply(calculate,axis=0)
'''
sample_count = scipy.stats.itemfreq(s.tolist())
col = pandas.DataFrame(s).columns[0]
population_count = scipy.stats.itemfreq(app.wordcounts[col].tolist())
if len(population_count[:,0]) >= len(sample_count[:,0]):
difference = len(population_count[:,0]) - len(sample_count[:,0])
pk1 = population_count[:,1].tolist()
pk2 = sample_count[:,1].tolist() + [0]*difference
else:
difference = len(sample_count[:,0]) - len(population_count[:,0])
pk1 = population_count[:,1].tolist() + [0]*difference
pk2 = sample_count[:,1].tolist()
return scipy.stats.entropy(pk2, pk1, base=None)
# Views ##############################################################################################
@app.route("/")
def index():
'''index view displays the bodymap'''
return render_template("index.html",bodymap=app.bodymap,
labels=app.labels)
@app.route("/detail")
def detail():
'''view details for a set of ids'''
# We will render dates across the bottom along with general counts
# need lookup with date --> ids
dates = app.fatalities["INCIDENT_DATE"].copy()
dates.index = app.fatalities["id"]
dates = pandas.DataFrame(dates).groupby(by="INCIDENT_DATE").groups
df = pandas.DataFrame(columns=["date","count"])
df["date"] = dates.keys()
df["count"] = [len(dates[x]) for x in dates.keys()]
return render_template("brush.html",bodymap=app.bodymap,
labels=app.labels,
descriptions=app.descriptions,
dates=df.to_dict(orient="records"),
lookup=dates)
@app.route("/map")
def bodymap():
'''view deaths via bodymap'''
deaths = dict()
for part,idx in app.deaths.iteritems():
deaths[str(part)] = idx
return render_template("map.html",bodymap=app.bodymap,
labels=app.labels,
deaths=deaths,
descriptions=app.descriptions)
if __name__ == "__main__":
app.debug = True
app.run()
| {
"content_hash": "a7efe82c6f4f57d376fbaac8096e7518",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 109,
"avg_line_length": 40.47540983606557,
"alnum_prop": 0.5805319292561091,
"repo_name": "vsoch/bodymap",
"id": "98c7abebd873e71f2f887d764ccd91b71daf52e3",
"size": "7407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "index.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2408"
},
{
"name": "HTML",
"bytes": "1986472"
},
{
"name": "JavaScript",
"bytes": "74070"
},
{
"name": "Python",
"bytes": "38000"
}
],
"symlink_target": ""
} |
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('analytics', '0006_add_subgroup_to_unique_constraints'),
]
operations = [
migrations.AlterUniqueTogether(
name='installationcount',
unique_together=set([('property', 'subgroup', 'end_time')]),
),
migrations.RemoveField(
model_name='installationcount',
name='interval',
),
migrations.AlterUniqueTogether(
name='realmcount',
unique_together=set([('realm', 'property', 'subgroup', 'end_time')]),
),
migrations.RemoveField(
model_name='realmcount',
name='interval',
),
migrations.AlterUniqueTogether(
name='streamcount',
unique_together=set([('stream', 'property', 'subgroup', 'end_time')]),
),
migrations.RemoveField(
model_name='streamcount',
name='interval',
),
migrations.AlterUniqueTogether(
name='usercount',
unique_together=set([('user', 'property', 'subgroup', 'end_time')]),
),
migrations.RemoveField(
model_name='usercount',
name='interval',
),
]
| {
"content_hash": "493db9c0cad7c8cc1a69da12d863bdc0",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 82,
"avg_line_length": 30.767441860465116,
"alnum_prop": 0.546485260770975,
"repo_name": "dhcrzf/zulip",
"id": "6b114e1527cdd11f60d9748d0cf34c14d7213ada",
"size": "1396",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "analytics/migrations/0007_remove_interval.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "436713"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "HTML",
"bytes": "673974"
},
{
"name": "JavaScript",
"bytes": "2951950"
},
{
"name": "Perl",
"bytes": "398747"
},
{
"name": "Puppet",
"bytes": "72908"
},
{
"name": "Python",
"bytes": "6188005"
},
{
"name": "Ruby",
"bytes": "6110"
},
{
"name": "Shell",
"bytes": "118284"
},
{
"name": "TypeScript",
"bytes": "9543"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
__version__ = 'undefined'
exec(open('ipylayoutwidgets/version.py').read())
with open('README.md') as f:
readme = f.read()
setup(
name='ipylayoutwidgets',
version=__version__,
description='A collection of IPython widgets to position.size other widgets',
long_description=readme,
author='Jack Zentner',
author_email='jack.zentner@gtri.gatech.edu',
url='https://github.com/openseat/ipylayoutwidgets',
install_requires=['ipywidgets'],
license='BSD 3-Clause',
packages=find_packages(exclude=('tests', 'docs')),
include_package_data=True
) | {
"content_hash": "0aba99a66aad826bda1b65e189224716",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 81,
"avg_line_length": 28.59090909090909,
"alnum_prop": 0.6947535771065183,
"repo_name": "openseat/ipylayoutwidgets",
"id": "2f619370536b7374bc0111c4733a3b783aebd752",
"size": "654",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "12887"
},
{
"name": "Jupyter Notebook",
"bytes": "8558"
},
{
"name": "Python",
"bytes": "4244"
}
],
"symlink_target": ""
} |
from msrest.serialization import Model
class RecentJob(Model):
"""Information about the most recent job to run under the job schedule.
:param id: The ID of the job.
:type id: str
:param url: The URL of the job.
:type url: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
}
def __init__(self, id=None, url=None):
super(RecentJob, self).__init__()
self.id = id
self.url = url
| {
"content_hash": "0e10d1efcd30f270d79de0f38108d7f4",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 75,
"avg_line_length": 24.142857142857142,
"alnum_prop": 0.5443786982248521,
"repo_name": "lmazuel/azure-sdk-for-python",
"id": "f4aa8661a8a41fdfa1d68577c3574adb7ab464ca",
"size": "981",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-batch/azure/batch/models/recent_job.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42572767"
}
],
"symlink_target": ""
} |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 7, transform = "RelativeDifference", sigma = 0.0, exog_count = 0, ar_order = 12); | {
"content_hash": "770782375ad34dac0a4ed3e4d1c1fd2b",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 174,
"avg_line_length": 39.142857142857146,
"alnum_prop": 0.7153284671532847,
"repo_name": "antoinecarme/pyaf",
"id": "d0017aaca5a2584d0610e73f33b7a1a755cf4398",
"size": "274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/artificial/transf_RelativeDifference/trend_PolyTrend/cycle_7/ar_12/test_artificial_1024_RelativeDifference_PolyTrend_7_12_0.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
} |
"""
Module for working with Load Balancers
"""
__all__ = ["base", "providers", "types", "drivers"]
| {
"content_hash": "efde47c897e1de63bf729c4b9dd0dcba",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 51,
"avg_line_length": 20,
"alnum_prop": 0.61,
"repo_name": "apache/libcloud",
"id": "ec6c80c61271cb0c56c01f85d4bb7b8327f06e25",
"size": "882",
"binary": false,
"copies": "3",
"ref": "refs/heads/trunk",
"path": "libcloud/loadbalancer/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2155"
},
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "PowerShell",
"bytes": "410"
},
{
"name": "Python",
"bytes": "9105547"
},
{
"name": "Shell",
"bytes": "12994"
}
],
"symlink_target": ""
} |
"""End-to-End tests for CIFuzz."""
import os
import unittest
import run_cifuzz
import test_helpers
CIFUZZ_DIR = os.path.dirname(os.path.abspath(__file__))
EXTERNAL_PROJECT_PATH = os.path.join(CIFUZZ_DIR, 'test_data',
'external-project')
# This test will fail if not run as root because the fuzzer build process
# creates binaries that only root can write to.
# Use a seperate env var to keep this seperate from integration tests which
# don't have this annoying property.
@unittest.skipIf(not os.getenv('END_TO_END_TESTS'),
'END_TO_END_TESTS=1 not set')
class EndToEndTest(unittest.TestCase):
"""End-to-End tests for CIFuzz."""
def setUp(self):
test_helpers.patch_environ(self, runner=True)
def test_simple(self):
"""Simple end-to-end test using run_cifuzz.main()."""
os.environ['REPOSITORY'] = 'external-project'
os.environ['PROJECT_SRC_PATH'] = EXTERNAL_PROJECT_PATH
os.environ['FILESTORE'] = 'no_filestore'
os.environ['NO_CLUSTERFUZZ_DEPLOYMENT'] = 'True'
with test_helpers.docker_temp_dir() as temp_dir:
os.environ['WORKSPACE'] = temp_dir
# TODO(metzman): Verify the crash, affected fuzzers, and other things.
self.assertEqual(run_cifuzz.main(), 1)
| {
"content_hash": "c5e31e4473f9ac4ccbdae42df1fe604e",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 76,
"avg_line_length": 36.17142857142857,
"alnum_prop": 0.6800947867298578,
"repo_name": "skia-dev/oss-fuzz",
"id": "30e28beda330c621ac3a0f5b9d1ee2dc11a5e463",
"size": "1841",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "infra/cifuzz/cifuzz_end_to_end_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "553498"
},
{
"name": "C++",
"bytes": "412656"
},
{
"name": "CMake",
"bytes": "1635"
},
{
"name": "Dockerfile",
"bytes": "874184"
},
{
"name": "Go",
"bytes": "67217"
},
{
"name": "HTML",
"bytes": "13787"
},
{
"name": "Java",
"bytes": "551460"
},
{
"name": "JavaScript",
"bytes": "2508"
},
{
"name": "Makefile",
"bytes": "13162"
},
{
"name": "Python",
"bytes": "969565"
},
{
"name": "Ruby",
"bytes": "1827"
},
{
"name": "Rust",
"bytes": "8384"
},
{
"name": "Shell",
"bytes": "1356613"
},
{
"name": "Starlark",
"bytes": "5471"
},
{
"name": "Swift",
"bytes": "1363"
}
],
"symlink_target": ""
} |
import json
from django.core.urlresolvers import resolve
from django.http import HttpRequest
from django.http import QueryDict
from django.test import TestCase
from django.test import Client
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from . import views
# Contants
TEST_USER_EMAIL = "ledo@gah.com"
TEST_USER_USERNAME = "ledo"
TEST_USER_PASSWORD = "ContinentalUnion"
class LoginTestCase(TestCase):
"""
python manage.py test login
"""
def tearDown(self):
User.objects.get(email=TEST_USER_EMAIL).delete()
def setUp(self):
# Create our user.
user = User.objects.create_user(
email=TEST_USER_EMAIL,
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD
)
user.is_active = True
user.save()
def test_root_url_resolves_to_login_modal_view(self):
found = resolve('/login_modal')
self.assertEqual(found.func,views.login_modal)
def test_login_modal_returns_correct_html(self):
client = Client()
response = client.post('/login_modal')
self.assertEqual(response.status_code, 200)
self.assertTrue(response.content.startswith(b'<div'))
self.assertIn(b'login_modal',response.content)
self.assertIn(b'loginForm',response.content)
def test_login_authentication_with_succesful_login(self):
# Extra parameters to make this a Ajax style request.
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
# Test
client = Client()
response = client.post(
'/login',
{'username': TEST_USER_USERNAME, 'password': TEST_USER_PASSWORD},
**kwargs
)
# Verify: Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Verify: Successful response.
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(array['status'], 'success')
self.assertEqual(array['message'], 'logged on')
def test_login_authentication_with_failed_login(self):
# Extra parameters to make this a Ajax style request.
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
# Test
client = Client()
response = client.post(
'/login',
{'username': TEST_USER_USERNAME, 'password': 'wrong_password'},
**kwargs
)
# Verify: Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Verify: Successful response.
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(array['status'], 'failure')
self.assertEqual(array['message'], 'wrong username or password')
def test_login_authentication_with_suspension(self):
# Extra parameters to make this a Ajax style request.
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
# Suspend User
user = User.objects.get(username=TEST_USER_USERNAME)
user.is_active = False
user.save()
# Test
client = Client()
response = client.post(
'/login',
{'username': TEST_USER_USERNAME, 'password': TEST_USER_PASSWORD},
**kwargs
)
# Verify: Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Verify: Successful response.
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(array['status'], 'failure')
self.assertEqual(array['message'], 'you are suspended')
def test_logout_authentication_with_success(self):
# Extra parameters to make this a Ajax style request.
kwargs = {'HTTP_X_REQUESTED_WITH':'XMLHttpRequest'}
# Test
client = Client()
client.login(
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD
)
response = client.post('/logout', {}, **kwargs )
# Verify: Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Verify: Successful response.
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(array['status'], 'success')
self.assertEqual(array['message'], 'you are logged off')
def test_login_authentication_with_non_ajax_call(self):
# Test
client = Client()
response = client.post(
'/login',
{'username': TEST_USER_USERNAME, 'password': TEST_USER_PASSWORD}
)
# Verify: Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Verify: Successful response.
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(array['status'], 'failure')
self.assertEqual(array['message'], 'an unknown error occured')
def test_logout_authentication_with_non_ajax_call(self):
# Test
client = Client()
client.login(
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD
)
response = client.post('/logout')
# Verify: Check that the response is 200 OK.
self.assertEqual(response.status_code, 200)
# Verify: Successful response.
json_string = response.content.decode(encoding='UTF-8')
array = json.loads(json_string)
self.assertEqual(array['status'], 'success')
self.assertEqual(array['message'], 'you are logged off')
| {
"content_hash": "268fb8354e7006b255307b82542dcd8b",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 77,
"avg_line_length": 35.82941176470588,
"alnum_prop": 0.5956329010014776,
"repo_name": "AcademicsToday/academicstoday-django",
"id": "d35c95fb58041a35e01026ea9e3c733d395413bf",
"size": "6091",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "academicstoday_project/login/tests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "39067"
},
{
"name": "HTML",
"bytes": "518518"
},
{
"name": "JavaScript",
"bytes": "437699"
},
{
"name": "Python",
"bytes": "613743"
},
{
"name": "Shell",
"bytes": "3904"
}
],
"symlink_target": ""
} |
from test_framework.test_framework import DeuscoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class MempoolCoinbaseTest(DeuscoinTestFramework):
alert_filename = None # Set by setup_network
def setup_network(self):
args = ["-checkmempool", "-debug=mempool"]
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, args))
self.nodes.append(start_node(1, self.options.tmpdir, args))
connect_nodes(self.nodes[1], 0)
self.is_network_split = False
self.sync_all()
def create_tx(self, from_txid, to_address, amount):
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
signresult = self.nodes[0].signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
def run_test(self):
start_count = self.nodes[0].getblockcount()
# Mine three blocks. After this, nodes[0] blocks
# 101, 102, and 103 are spend-able.
new_blocks = self.nodes[1].generate(4)
self.sync_all()
node0_address = self.nodes[0].getnewaddress()
node1_address = self.nodes[1].getnewaddress()
# Three scenarios for re-orging coinbase spends in the memory pool:
# 1. Direct coinbase spend : spend_101
# 2. Indirect (coinbase spend in chain, child in mempool) : spend_102 and spend_102_1
# 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1
# Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase),
# and make sure the mempool code behaves correctly.
b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
spend_101_raw = self.create_tx(coinbase_txids[1], node1_address, 50)
spend_102_raw = self.create_tx(coinbase_txids[2], node0_address, 50)
spend_103_raw = self.create_tx(coinbase_txids[3], node0_address, 50)
# Create a block-height-locked transaction which will be invalid after reorg
timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 50})
# Set the time lock
timelock_tx = timelock_tx.replace("ffffffff", "11111111", 1)
timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
timelock_tx = self.nodes[0].signrawtransaction(timelock_tx)["hex"]
assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, timelock_tx)
# Broadcast and mine spend_102 and 103:
spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw)
spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw)
self.nodes[0].generate(1)
assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, timelock_tx)
# Create 102_1 and 103_1:
spend_102_1_raw = self.create_tx(spend_102_id, node1_address, 50)
spend_103_1_raw = self.create_tx(spend_103_id, node1_address, 50)
# Broadcast and mine 103_1:
spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
last_block = self.nodes[0].generate(1)
timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx)
# ... now put spend_101 and spend_102_1 in memory pools:
spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw)
spend_102_1_id = self.nodes[0].sendrawtransaction(spend_102_1_raw)
self.sync_all()
assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, timelock_tx_id})
for node in self.nodes:
node.invalidateblock(last_block[0])
assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, spend_103_1_id})
# Use invalidateblock to re-org back and make all those coinbase spends
# immature/invalid:
for node in self.nodes:
node.invalidateblock(new_blocks[0])
self.sync_all()
# mempool should be empty.
assert_equal(set(self.nodes[0].getrawmempool()), set())
if __name__ == '__main__':
MempoolCoinbaseTest().main()
| {
"content_hash": "400bc5d8dafa857e4f51a41b3acb74ef",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 119,
"avg_line_length": 45.77894736842105,
"alnum_prop": 0.6449758565187399,
"repo_name": "deuscoin-org/deuscoin-core",
"id": "0a852b78d4ff03af3cdc624fcf908b7ff35a2e75",
"size": "4697",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/mempool_reorg.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "619655"
},
{
"name": "C++",
"bytes": "4298620"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2101"
},
{
"name": "M4",
"bytes": "147636"
},
{
"name": "Makefile",
"bytes": "97785"
},
{
"name": "Objective-C",
"bytes": "92297"
},
{
"name": "Objective-C++",
"bytes": "7244"
},
{
"name": "Python",
"bytes": "703811"
},
{
"name": "QMake",
"bytes": "2021"
},
{
"name": "Roff",
"bytes": "3831"
},
{
"name": "Shell",
"bytes": "417069"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="pie.hoverlabel.font", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
**kwargs,
)
| {
"content_hash": "a1a18b0de59b8c641c4d123a63258a4d",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 78,
"avg_line_length": 33.357142857142854,
"alnum_prop": 0.5931477516059958,
"repo_name": "plotly/plotly.py",
"id": "cee4bff0cd94c90f39f9342d774af53fd77c46ce",
"size": "467",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/pie/hoverlabel/font/_color.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import sys
import textwrap
from pyspark.sql import SparkSession
from pyspark.sql.functions import col, lit, coalesce, concat, concat_ws, regexp_replace, udf
if __name__ == "__main__":
if len(sys.argv) != 4:
print("Usage: trove-load.py <input json> <api json> <output parquet>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Trove Load').getOrCreate()
wrapper = textwrap.TextWrapper(width=40, break_long_words=False)
text_fill = udf(lambda s: wrapper.fill(s))
xunesc = udf(lambda s: s \
.replace('<', '<') \
.replace('&', '&') \
.replace('>', '>') \
.replace('"', '"') \
.replace(''', "'"))
raw = spark.read.json(sys.argv[1])
# api = spark.read.json(sys.argv[2])
df = raw.na.drop(subset=['id', 'fulltext']).dropDuplicates(['id']) \
.select(concat(lit('trove/'), col('id')).alias('id'),
concat_ws('/', lit('trove'), col('titleId'), col('date')).alias('issue'),
concat(lit('trove/'), col('titleId')).alias('series'),
col('date'), col('firstPageId'),
col('firstPageSeq').cast('int').alias('seq'),
col('heading').alias('title'), col('category'),
text_fill(col('fulltext')).alias('text'))
# apitext = api.select(concat(lit('trove/'), col('article.id')).alias('id'),
# xunesc(regexp_replace(regexp_replace(regexp_replace(
# col('article.articleText'), r'<(span|p)>[ ]*', ''),
# r'</(span|p)>[ ]*', '\n'),
# r' [ ]*', ' ')).alias('articleText'))
# df.join(apitext, ['id'], 'left_outer') \
# .withColumn('text', coalesce('articleText', 'text')) \
# .drop('articleText') \
# .write.save(sys.argv[3])
df.write.save(sys.argv[3], mode='overwrite')
spark.stop()
| {
"content_hash": "dcc45ef885bce54873ce3c38fcd1f3ac",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 95,
"avg_line_length": 41.509803921568626,
"alnum_prop": 0.49031648559282004,
"repo_name": "ViralTexts/vt-passim",
"id": "8b6ae5906d1e2d1703f416d05eceb4257727136a",
"size": "2117",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/trove-load.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "9433"
},
{
"name": "Makefile",
"bytes": "18200"
},
{
"name": "Python",
"bytes": "96293"
},
{
"name": "Scala",
"bytes": "94268"
},
{
"name": "Shell",
"bytes": "4216"
}
],
"symlink_target": ""
} |
import numpy as np
import pandas as pd
from skbio import OrdinationResults
from scipy.spatial import procrustes
from numpy.random import default_rng
def procrustes_analysis(reference: OrdinationResults, other: OrdinationResults,
dimensions: int = 5,
permutations: int = 999) -> (OrdinationResults,
OrdinationResults,
pd.DataFrame):
if reference.samples.shape != other.samples.shape:
raise ValueError('The matrices cannot be fitted unless they have the '
'same dimensions')
if reference.samples.shape[1] < dimensions:
raise ValueError('Cannot fit fewer dimensions than available')
# fail if there are any elements in the symmetric difference
diff = reference.samples.index.symmetric_difference(other.samples.index)
if not diff.empty:
raise ValueError('The ordinations represent two different sets of '
'samples')
# make the matrices be comparable
other.samples = other.samples.reindex(index=reference.samples.index)
mtx1, mtx2, m2 = procrustes(reference.samples.values[:, :dimensions],
other.samples.values[:, :dimensions])
axes = reference.samples.columns[:dimensions]
samples1 = pd.DataFrame(data=mtx1,
index=reference.samples.index.copy(),
columns=axes.copy())
samples2 = pd.DataFrame(data=mtx2,
index=reference.samples.index.copy(),
columns=axes.copy())
info = _procrustes_monte_carlo(reference.samples.values[:, :dimensions],
other.samples.values[:, :dimensions],
m2, permutations)
out1 = OrdinationResults(
short_method_name=reference.short_method_name,
long_method_name=reference.long_method_name,
eigvals=reference.eigvals[:dimensions].copy(),
samples=samples1,
features=reference.features,
biplot_scores=reference.biplot_scores,
sample_constraints=reference.sample_constraints,
proportion_explained=reference.proportion_explained[:dimensions]
.copy())
out2 = OrdinationResults(
short_method_name=other.short_method_name,
long_method_name=other.long_method_name,
eigvals=other.eigvals[:dimensions].copy(),
samples=samples2,
features=other.features,
biplot_scores=other.biplot_scores,
sample_constraints=other.sample_constraints,
proportion_explained=other.proportion_explained[:dimensions]
.copy())
return out1, out2, info
def _procrustes_monte_carlo(reference: np.ndarray, other: np.ndarray,
true_m2, permutations) -> (pd.DataFrame):
'''
Outputs a dataframe containing:
0: True M^2 value
1: p-value for true M^2 value
2: number of Monte Carlo permutations done in simulation
'''
rng = default_rng()
trials_below_m2 = 0
if permutations == 'disable':
permutations = 0
for i in range(permutations):
# shuffle rows in np array
rng.shuffle(other)
# run procrustes analysis
_, _, m2 = procrustes(reference, other)
# check m2 value
if m2 < true_m2:
trials_below_m2 += 1
if permutations == 0:
p_val = np.nan
else:
# mimic the behaviour in scikit-bio's permutation-based tests and avoid
# returning p-values equal to zero
p_val = (trials_below_m2 + 1) / (permutations + 1)
df = pd.DataFrame({'true M^2 value': [true_m2],
'p-value for true M^2 value': [p_val],
'number of Monte Carlo permutations': [permutations]},
index=pd.Index(['results'], name='id'))
return df
| {
"content_hash": "9a48a8819a6b804dd5b31feb982a0df6",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 79,
"avg_line_length": 37.55555555555556,
"alnum_prop": 0.5860453648915187,
"repo_name": "jakereps/q2-diversity",
"id": "16720f436662640684a72b9ceacdcca9cab2c10f",
"size": "4406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "q2_diversity/_procrustes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "15437"
},
{
"name": "JavaScript",
"bytes": "36122"
},
{
"name": "Makefile",
"bytes": "1523"
},
{
"name": "Python",
"bytes": "314381"
},
{
"name": "TeX",
"bytes": "5073"
}
],
"symlink_target": ""
} |
"""Kernel Principal Components Analysis."""
# Author: Mathieu Blondel <mathieu@mblondel.org>
# Sylvain Marie <sylvain.marie@schneider-electric.com>
# License: BSD 3 clause
import numpy as np
from scipy import linalg
from scipy.sparse.linalg import eigsh
from ..utils._arpack import _init_arpack_v0
from ..utils.extmath import svd_flip, _randomized_eigsh
from ..utils.validation import check_is_fitted, _check_psd_eigenvalues
from ..utils.deprecation import deprecated
from ..exceptions import NotFittedError
from ..base import BaseEstimator, TransformerMixin
from ..preprocessing import KernelCenterer
from ..metrics.pairwise import pairwise_kernels
class KernelPCA(TransformerMixin, BaseEstimator):
"""Kernel Principal component analysis (KPCA).
Non-linear dimensionality reduction through the use of kernels (see
:ref:`metrics`).
It uses the `scipy.linalg.eigh` LAPACK implementation of the full SVD or
the `scipy.sparse.linalg.eigsh` ARPACK implementation of the truncated SVD,
depending on the shape of the input data and the number of components to
extract. It can also use a randomized truncated SVD by the method of
Halko et al. 2009, see `eigen_solver`.
Read more in the :ref:`User Guide <kernel_PCA>`.
Parameters
----------
n_components : int, default=None
Number of components. If None, all non-zero components are kept.
kernel : {'linear', 'poly', \
'rbf', 'sigmoid', 'cosine', 'precomputed'}, default='linear'
Kernel used for PCA.
gamma : float, default=None
Kernel coefficient for rbf, poly and sigmoid kernels. Ignored by other
kernels. If ``gamma`` is ``None``, then it is set to ``1/n_features``.
degree : int, default=3
Degree for poly kernels. Ignored by other kernels.
coef0 : float, default=1
Independent term in poly and sigmoid kernels.
Ignored by other kernels.
kernel_params : dict, default=None
Parameters (keyword arguments) and
values for kernel passed as callable object.
Ignored by other kernels.
alpha : float, default=1.0
Hyperparameter of the ridge regression that learns the
inverse transform (when fit_inverse_transform=True).
fit_inverse_transform : bool, default=False
Learn the inverse transform for non-precomputed kernels.
(i.e. learn to find the pre-image of a point)
eigen_solver : {'auto', 'dense', 'arpack', 'randomized'}, \
default='auto'
Select eigensolver to use. If `n_components` is much
less than the number of training samples, randomized (or arpack to a
smaller extend) may be more efficient than the dense eigensolver.
Randomized SVD is performed according to the method of Halko et al.
auto :
the solver is selected by a default policy based on n_samples
(the number of training samples) and `n_components`:
if the number of components to extract is less than 10 (strict) and
the number of samples is more than 200 (strict), the 'arpack'
method is enabled. Otherwise the exact full eigenvalue
decomposition is computed and optionally truncated afterwards
('dense' method).
dense :
run exact full eigenvalue decomposition calling the standard
LAPACK solver via `scipy.linalg.eigh`, and select the components
by postprocessing
arpack :
run SVD truncated to n_components calling ARPACK solver using
`scipy.sparse.linalg.eigsh`. It requires strictly
0 < n_components < n_samples
randomized :
run randomized SVD by the method of Halko et al. The current
implementation selects eigenvalues based on their module; therefore
using this method can lead to unexpected results if the kernel is
not positive semi-definite.
.. versionchanged:: 1.0
`'randomized'` was added.
tol : float, default=0
Convergence tolerance for arpack.
If 0, optimal value will be chosen by arpack.
max_iter : int, default=None
Maximum number of iterations for arpack.
If None, optimal value will be chosen by arpack.
iterated_power : int >= 0, or 'auto', default='auto'
Number of iterations for the power method computed by
svd_solver == 'randomized'. When 'auto', it is set to 7 when
`n_components < 0.1 * min(X.shape)`, other it is set to 4.
.. versionadded:: 1.0
remove_zero_eig : bool, default=False
If True, then all components with zero eigenvalues are removed, so
that the number of components in the output may be < n_components
(and sometimes even zero due to numerical instability).
When n_components is None, this parameter is ignored and components
with zero eigenvalues are removed regardless.
random_state : int, RandomState instance or None, default=None
Used when ``eigen_solver`` == 'arpack' or 'randomized'. Pass an int
for reproducible results across multiple function calls.
See :term:`Glossary <random_state>`.
.. versionadded:: 0.18
copy_X : bool, default=True
If True, input X is copied and stored by the model in the `X_fit_`
attribute. If no further changes will be done to X, setting
`copy_X=False` saves memory by storing a reference.
.. versionadded:: 0.18
n_jobs : int, default=None
The number of parallel jobs to run.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
.. versionadded:: 0.18
Attributes
----------
eigenvalues_ : ndarray of shape (n_components,)
Eigenvalues of the centered kernel matrix in decreasing order.
If `n_components` and `remove_zero_eig` are not set,
then all values are stored.
lambdas_ : ndarray of shape (n_components,)
Same as `eigenvalues_` but this attribute is deprecated.
.. deprecated:: 1.0
`lambdas_` was renamed to `eigenvalues_` in version 1.0 and will be
removed in 1.2.
eigenvectors_ : ndarray of shape (n_samples, n_components)
Eigenvectors of the centered kernel matrix. If `n_components` and
`remove_zero_eig` are not set, then all components are stored.
alphas_ : ndarray of shape (n_samples, n_components)
Same as `eigenvectors_` but this attribute is deprecated.
.. deprecated:: 1.0
`alphas_` was renamed to `eigenvectors_` in version 1.0 and will be
removed in 1.2.
dual_coef_ : ndarray of shape (n_samples, n_features)
Inverse transform matrix. Only available when
``fit_inverse_transform`` is True.
X_transformed_fit_ : ndarray of shape (n_samples, n_components)
Projection of the fitted data on the kernel principal components.
Only available when ``fit_inverse_transform`` is True.
X_fit_ : ndarray of shape (n_samples, n_features)
The data used to fit the model. If `copy_X=False`, then `X_fit_` is
a reference. This attribute is used for the calls to transform.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
Examples
--------
>>> from sklearn.datasets import load_digits
>>> from sklearn.decomposition import KernelPCA
>>> X, _ = load_digits(return_X_y=True)
>>> transformer = KernelPCA(n_components=7, kernel='linear')
>>> X_transformed = transformer.fit_transform(X)
>>> X_transformed.shape
(1797, 7)
References
----------
Kernel PCA was introduced in:
Bernhard Schoelkopf, Alexander J. Smola,
and Klaus-Robert Mueller. 1999. Kernel principal
component analysis. In Advances in kernel methods,
MIT Press, Cambridge, MA, USA 327-352.
For eigen_solver == 'arpack', refer to `scipy.sparse.linalg.eigsh`.
For eigen_solver == 'randomized', see:
Finding structure with randomness: Stochastic algorithms
for constructing approximate matrix decompositions Halko, et al., 2009
(arXiv:909)
A randomized algorithm for the decomposition of matrices
Per-Gunnar Martinsson, Vladimir Rokhlin and Mark Tygert
"""
def __init__(
self,
n_components=None,
*,
kernel="linear",
gamma=None,
degree=3,
coef0=1,
kernel_params=None,
alpha=1.0,
fit_inverse_transform=False,
eigen_solver="auto",
tol=0,
max_iter=None,
iterated_power="auto",
remove_zero_eig=False,
random_state=None,
copy_X=True,
n_jobs=None,
):
if fit_inverse_transform and kernel == "precomputed":
raise ValueError("Cannot fit_inverse_transform with a precomputed kernel.")
self.n_components = n_components
self.kernel = kernel
self.kernel_params = kernel_params
self.gamma = gamma
self.degree = degree
self.coef0 = coef0
self.alpha = alpha
self.fit_inverse_transform = fit_inverse_transform
self.eigen_solver = eigen_solver
self.tol = tol
self.max_iter = max_iter
self.iterated_power = iterated_power
self.remove_zero_eig = remove_zero_eig
self.random_state = random_state
self.n_jobs = n_jobs
self.copy_X = copy_X
# TODO: Remove in 1.1
# mypy error: Decorated property not supported
@deprecated( # type: ignore
"Attribute `_pairwise` was deprecated in "
"version 0.24 and will be removed in 1.1 (renaming of 0.26)."
)
@property
def _pairwise(self):
return self.kernel == "precomputed"
# TODO: Remove in 1.2
# mypy error: Decorated property not supported
@deprecated( # type: ignore
"Attribute `lambdas_` was deprecated in version 1.0 and will be "
"removed in 1.2. Use `eigenvalues_` instead."
)
@property
def lambdas_(self):
return self.eigenvalues_
# mypy error: Decorated property not supported
@deprecated( # type: ignore
"Attribute `alphas_` was deprecated in version 1.0 and will be "
"removed in 1.2. Use `eigenvectors_` instead."
)
@property
def alphas_(self):
return self.eigenvectors_
def _get_kernel(self, X, Y=None):
if callable(self.kernel):
params = self.kernel_params or {}
else:
params = {"gamma": self.gamma, "degree": self.degree, "coef0": self.coef0}
return pairwise_kernels(
X, Y, metric=self.kernel, filter_params=True, n_jobs=self.n_jobs, **params
)
def _fit_transform(self, K):
"""Fit's using kernel K"""
# center kernel
K = self._centerer.fit_transform(K)
# adjust n_components according to user inputs
if self.n_components is None:
n_components = K.shape[0] # use all dimensions
else:
if self.n_components < 1:
raise ValueError(
f"`n_components` should be >= 1, got: {self.n_component}"
)
n_components = min(K.shape[0], self.n_components)
# compute eigenvectors
if self.eigen_solver == "auto":
if K.shape[0] > 200 and n_components < 10:
eigen_solver = "arpack"
else:
eigen_solver = "dense"
else:
eigen_solver = self.eigen_solver
if eigen_solver == "dense":
# Note: eigvals specifies the indices of smallest/largest to return
self.eigenvalues_, self.eigenvectors_ = linalg.eigh(
K, eigvals=(K.shape[0] - n_components, K.shape[0] - 1)
)
elif eigen_solver == "arpack":
v0 = _init_arpack_v0(K.shape[0], self.random_state)
self.eigenvalues_, self.eigenvectors_ = eigsh(
K, n_components, which="LA", tol=self.tol, maxiter=self.max_iter, v0=v0
)
elif eigen_solver == "randomized":
self.eigenvalues_, self.eigenvectors_ = _randomized_eigsh(
K,
n_components=n_components,
n_iter=self.iterated_power,
random_state=self.random_state,
selection="module",
)
else:
raise ValueError("Unsupported value for `eigen_solver`: %r" % eigen_solver)
# make sure that the eigenvalues are ok and fix numerical issues
self.eigenvalues_ = _check_psd_eigenvalues(
self.eigenvalues_, enable_warnings=False
)
# flip eigenvectors' sign to enforce deterministic output
self.eigenvectors_, _ = svd_flip(
self.eigenvectors_, np.zeros_like(self.eigenvectors_).T
)
# sort eigenvectors in descending order
indices = self.eigenvalues_.argsort()[::-1]
self.eigenvalues_ = self.eigenvalues_[indices]
self.eigenvectors_ = self.eigenvectors_[:, indices]
# remove eigenvectors with a zero eigenvalue (null space) if required
if self.remove_zero_eig or self.n_components is None:
self.eigenvectors_ = self.eigenvectors_[:, self.eigenvalues_ > 0]
self.eigenvalues_ = self.eigenvalues_[self.eigenvalues_ > 0]
# Maintenance note on Eigenvectors normalization
# ----------------------------------------------
# there is a link between
# the eigenvectors of K=Phi(X)'Phi(X) and the ones of Phi(X)Phi(X)'
# if v is an eigenvector of K
# then Phi(X)v is an eigenvector of Phi(X)Phi(X)'
# if u is an eigenvector of Phi(X)Phi(X)'
# then Phi(X)'u is an eigenvector of Phi(X)'Phi(X)
#
# At this stage our self.eigenvectors_ (the v) have norm 1, we need to scale
# them so that eigenvectors in kernel feature space (the u) have norm=1
# instead
#
# We COULD scale them here:
# self.eigenvectors_ = self.eigenvectors_ / np.sqrt(self.eigenvalues_)
#
# But choose to perform that LATER when needed, in `fit()` and in
# `transform()`.
return K
def _fit_inverse_transform(self, X_transformed, X):
if hasattr(X, "tocsr"):
raise NotImplementedError(
"Inverse transform not implemented for sparse matrices!"
)
n_samples = X_transformed.shape[0]
K = self._get_kernel(X_transformed)
K.flat[:: n_samples + 1] += self.alpha
self.dual_coef_ = linalg.solve(K, X, sym_pos=True, overwrite_a=True)
self.X_transformed_fit_ = X_transformed
def fit(self, X, y=None):
"""Fit the model from data in X.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Training vector, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
self : object
Returns the instance itself.
"""
X = self._validate_data(X, accept_sparse="csr", copy=self.copy_X)
self._centerer = KernelCenterer()
K = self._get_kernel(X)
self._fit_transform(K)
if self.fit_inverse_transform:
# no need to use the kernel to transform X, use shortcut expression
X_transformed = self.eigenvectors_ * np.sqrt(self.eigenvalues_)
self._fit_inverse_transform(X_transformed, X)
self.X_fit_ = X
return self
def fit_transform(self, X, y=None, **params):
"""Fit the model from data in X and transform X.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Training vector, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : ndarray of shape (n_samples, n_components)
"""
self.fit(X, **params)
# no need to use the kernel to transform X, use shortcut expression
X_transformed = self.eigenvectors_ * np.sqrt(self.eigenvalues_)
if self.fit_inverse_transform:
self._fit_inverse_transform(X_transformed, X)
return X_transformed
def transform(self, X):
"""Transform X.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Returns
-------
X_new : ndarray of shape (n_samples, n_components)
"""
check_is_fitted(self)
X = self._validate_data(X, accept_sparse="csr", reset=False)
# Compute centered gram matrix between X and training data X_fit_
K = self._centerer.transform(self._get_kernel(X, self.X_fit_))
# scale eigenvectors (properly account for null-space for dot product)
non_zeros = np.flatnonzero(self.eigenvalues_)
scaled_alphas = np.zeros_like(self.eigenvectors_)
scaled_alphas[:, non_zeros] = self.eigenvectors_[:, non_zeros] / np.sqrt(
self.eigenvalues_[non_zeros]
)
# Project with a scalar product between K and the scaled eigenvectors
return np.dot(K, scaled_alphas)
def inverse_transform(self, X):
"""Transform X back to original space.
``inverse_transform`` approximates the inverse transformation using
a learned pre-image. The pre-image is learned by kernel ridge
regression of the original data on their low-dimensional representation
vectors.
.. note:
:meth:`~sklearn.decomposition.fit` internally uses a centered
kernel. As the centered kernel no longer contains the information
of the mean of kernel features, such information is not taken into
account in reconstruction.
.. note::
When users want to compute inverse transformation for 'linear'
kernel, it is recommended that they use
:class:`~sklearn.decomposition.PCA` instead. Unlike
:class:`~sklearn.decomposition.PCA`,
:class:`~sklearn.decomposition.KernelPCA`'s ``inverse_transform``
does not reconstruct the mean of data when 'linear' kernel is used
due to the use of centered kernel.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_components)
Returns
-------
X_new : ndarray of shape (n_samples, n_features)
References
----------
"Learning to Find Pre-Images", G BakIr et al, 2004.
"""
if not self.fit_inverse_transform:
raise NotFittedError(
"The fit_inverse_transform parameter was not"
" set to True when instantiating and hence "
"the inverse transform is not available."
)
K = self._get_kernel(X, self.X_transformed_fit_)
return np.dot(K, self.dual_coef_)
def _more_tags(self):
return {
"preserves_dtype": [np.float64, np.float32],
"pairwise": self.kernel == "precomputed",
}
| {
"content_hash": "a8da417a0509d25ba312ddd6455cf8f7",
"timestamp": "",
"source": "github",
"line_count": 516,
"max_line_length": 87,
"avg_line_length": 37.75,
"alnum_prop": 0.6127624621387134,
"repo_name": "huzq/scikit-learn",
"id": "938c2b6902b15e5517194535e5d8f3aab1251275",
"size": "19479",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sklearn/decomposition/_kernel_pca.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "394787"
},
{
"name": "C++",
"bytes": "140225"
},
{
"name": "Makefile",
"bytes": "1579"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "6394128"
},
{
"name": "Shell",
"bytes": "9250"
}
],
"symlink_target": ""
} |
from io import BytesIO
from typing import TYPE_CHECKING, Any, IO, Optional, overload, Union
from azure.core.exceptions import (
ClientAuthenticationError,
ResourceNotFoundError,
ResourceExistsError,
HttpResponseError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.tracing.decorator import distributed_trace
from ._base_client import ContainerRegistryBaseClient
from ._generated.models import AcrErrors, OCIManifest
from ._helpers import (
_compute_digest,
_is_tag,
_parse_next_link,
_serialize_manifest,
_validate_digest,
OCI_MANIFEST_MEDIA_TYPE,
SUPPORTED_API_VERSIONS,
)
from ._models import (
RepositoryProperties,
ArtifactTagProperties,
ArtifactManifestProperties,
DownloadBlobResult,
DownloadManifestResult,
)
if TYPE_CHECKING:
from azure.core.credentials import TokenCredential
from typing import Dict
def _return_response(pipeline_response, deserialized, response_headers):
return pipeline_response, deserialized, response_headers
class ContainerRegistryClient(ContainerRegistryBaseClient):
def __init__(self, endpoint, credential=None, **kwargs):
# type: (str, Optional[TokenCredential], **Any) -> None
"""Create a ContainerRegistryClient from an ACR endpoint and a credential.
:param str endpoint: An ACR endpoint.
:param credential: The credential with which to authenticate.
:type credential: ~azure.core.credentials.TokenCredential
:keyword api_version: API Version. The default value is "2021-07-01". Note that overriding this default value
may result in unsupported behavior.
:paramtype api_version: str
:keyword audience: URL to use for credential authentication with AAD. Its value could be
"https://management.azure.com", "https://management.chinacloudapi.cn", "https://management.microsoftazure.de"
or "https://management.usgovcloudapi.net".
:paramtype audience: str
:returns: None
:rtype: None
:raises ValueError: If the provided api_version keyword-only argument isn't supported or
audience keyword-only argument isn't provided.
.. admonition:: Example:
.. literalinclude:: ../samples/sample_hello_world.py
:start-after: [START create_registry_client]
:end-before: [END create_registry_client]
:language: python
:dedent: 8
:caption: Instantiate an instance of `ContainerRegistryClient`
"""
api_version = kwargs.get("api_version", None)
if api_version and api_version not in SUPPORTED_API_VERSIONS:
supported_versions = "\n".join(SUPPORTED_API_VERSIONS)
raise ValueError(
"Unsupported API version '{}'. Please select from:\n{}".format(
api_version, supported_versions
)
)
audience = kwargs.pop("audience", None)
if not audience:
raise ValueError("The argument audience must be set to initialize ContainerRegistryClient.")
defaultScope = [audience + "/.default"]
if not endpoint.startswith("https://") and not endpoint.startswith("http://"):
endpoint = "https://" + endpoint
self._endpoint = endpoint
self._credential = credential
super(ContainerRegistryClient, self).__init__(
endpoint=endpoint, credential=credential, credential_scopes=defaultScope, **kwargs)
def _get_digest_from_tag(self, repository, tag):
# type: (str, str) -> str
tag_props = self.get_tag_properties(repository, tag)
return tag_props.digest
@distributed_trace
def delete_repository(self, repository, **kwargs):
# type: (str, **Any) -> None
"""Delete a repository. If the repository cannot be found or a response status code of
404 is returned an error will not be raised.
:param str repository: The repository to delete
:returns: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
.. admonition:: Example:
.. literalinclude:: ../samples/sample_hello_world.py
:start-after: [START delete_repository]
:end-before: [END delete_repository]
:language: python
:dedent: 8
:caption: Delete a repository from the `ContainerRegistryClient`
"""
self._client.container_registry.delete_repository(repository, **kwargs)
@distributed_trace
def list_repository_names(self, **kwargs):
# type: (**Any) -> ItemPaged[str]
"""List all repositories
:keyword results_per_page: Number of repositories to return per page
:paramtype results_per_page: int
:returns: An iterable of strings
:rtype: ~azure.core.paging.ItemPaged[str]
:raises: ~azure.core.exceptions.HttpResponseError
.. admonition:: Example:
.. literalinclude:: ../samples/sample_delete_tags.py
:start-after: [START list_repository_names]
:end-before: [END list_repository_names]
:language: python
:dedent: 8
:caption: List repositories in a container registry account
"""
n = kwargs.pop("results_per_page", None)
last = kwargs.pop("last", None)
cls = kwargs.pop("cls", None) # type: ClsType["_models.Repositories"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._client._serialize.header( # pylint: disable=protected-access
"accept", accept, "str"
)
if not next_link:
# Construct URL
url = "/acr/v1/_catalog"
path_format_arguments = {
"url": self._client._serialize.url( # pylint: disable=protected-access
"self._config.url",
self._client._config.url, # pylint: disable=protected-access
"str",
skip_quote=True,
),
}
url = self._client._client.format_url(url, **path_format_arguments) # pylint: disable=protected-access
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if last is not None:
query_parameters["last"] = self._client._serialize.query( # pylint: disable=protected-access
"last", last, "str"
)
if n is not None:
query_parameters["n"] = self._client._serialize.query( # pylint: disable=protected-access
"n", n, "int"
)
request = self._client._client.get( # pylint: disable=protected-access
url, query_parameters, header_parameters
)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
"url": self._client._serialize.url( # pylint: disable=protected-access
"self._config.url",
self._client._config.url, # pylint: disable=protected-access
"str",
skip_quote=True,
),
}
url = self._client._client.format_url(url, **path_format_arguments) # pylint: disable=protected-access
request = self._client._client.get( # pylint: disable=protected-access
url, query_parameters, header_parameters
)
return request
def extract_data(pipeline_response):
deserialized = self._client._deserialize( # pylint: disable=protected-access
"Repositories", pipeline_response
)
list_of_elem = deserialized.repositories or []
if cls:
list_of_elem = cls(list_of_elem)
link = None
if "Link" in pipeline_response.http_response.headers.keys():
link = _parse_next_link(pipeline_response.http_response.headers["Link"])
return link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._client._deserialize.failsafe_deserialize( # pylint: disable=protected-access
AcrErrors, response
)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(get_next, extract_data)
@distributed_trace
def get_repository_properties(self, repository, **kwargs):
# type: (str, **Any) -> RepositoryProperties
"""Get the properties of a repository
:param str repository: Name of the repository
:rtype: ~azure.containerregistry.RepositoryProperties
:raises: ~azure.core.exceptions.ResourceNotFoundError
"""
return RepositoryProperties._from_generated( # pylint: disable=protected-access
self._client.container_registry.get_properties(repository, **kwargs)
)
@distributed_trace
def list_manifest_properties(self, repository, **kwargs):
# type: (str, **Any) -> ItemPaged[ArtifactManifestProperties]
"""List the artifacts for a repository
:param str repository: Name of the repository
:keyword order_by: Query parameter for ordering by time ascending or descending
:paramtype order_by: ~azure.containerregistry.ArtifactManifestOrder or str
:keyword results_per_page: Number of repositories to return per page
:paramtype results_per_page: int
:returns: An iterable of :class:`~azure.containerregistry.ArtifactManifestProperties`
:rtype: ~azure.core.paging.ItemPaged[~azure.containerregistry.ArtifactManifestProperties]
:raises: ~azure.core.exceptions.ResourceNotFoundError
"""
name = repository
last = kwargs.pop("last", None)
n = kwargs.pop("results_per_page", None)
orderby = kwargs.pop("order_by", None)
cls = kwargs.pop(
"cls",
lambda objs: [
ArtifactManifestProperties._from_generated( # pylint: disable=protected-access
x, repository_name=repository, registry=self._endpoint
)
for x in objs
],
)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._client._serialize.header( # pylint: disable=protected-access
"accept", accept, "str"
)
if not next_link:
# Construct URL
url = "/acr/v1/{name}/_manifests"
path_format_arguments = {
"url": self._client._serialize.url( # pylint: disable=protected-access
"self._client._config.url",
self._client._config.url, # pylint: disable=protected-access
"str",
skip_quote=True,
),
"name": self._client._serialize.url("name", name, "str"), # pylint: disable=protected-access
}
url = self._client._client.format_url(url, **path_format_arguments) # pylint: disable=protected-access
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if last is not None:
query_parameters["last"] = self._client._serialize.query( # pylint: disable=protected-access
"last", last, "str"
)
if n is not None:
query_parameters["n"] = self._client._serialize.query( # pylint: disable=protected-access
"n", n, "int"
)
if orderby is not None:
query_parameters["orderby"] = self._client._serialize.query( # pylint: disable=protected-access
"orderby", orderby, "str"
)
request = self._client._client.get( # pylint: disable=protected-access
url, query_parameters, header_parameters
)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
"url": self._client._serialize.url( # pylint: disable=protected-access
"self._client._config.url",
self._client._config.url, # pylint: disable=protected-access
"str",
skip_quote=True,
),
"name": self._client._serialize.url("name", name, "str"), # pylint: disable=protected-access
}
url = self._client._client.format_url(url, **path_format_arguments) # pylint: disable=protected-access
request = self._client._client.get( # pylint: disable=protected-access
url, query_parameters, header_parameters
)
return request
def extract_data(pipeline_response):
deserialized = self._client._deserialize( # pylint: disable=protected-access
"AcrManifests", pipeline_response
)
list_of_elem = deserialized.manifests
if cls:
list_of_elem = cls(list_of_elem)
link = None
if "Link" in pipeline_response.http_response.headers.keys():
link = _parse_next_link(pipeline_response.http_response.headers["Link"])
return link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._client._deserialize.failsafe_deserialize( # pylint: disable=protected-access
AcrErrors, response
)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(get_next, extract_data)
@distributed_trace
def delete_tag(self, repository, tag, **kwargs):
# type: (str, str, **Any) -> None
"""Delete a tag from a repository. If the tag cannot be found or a response status code of
404 is returned an error will not be raised.
:param str repository: Name of the repository the tag belongs to
:param str tag: The tag to be deleted
:returns: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
Example
.. code-block:: python
from azure.containerregistry import ContainerRegistryClient
from azure.identity import DefaultAzureCredential
endpoint = os.environ["CONTAINERREGISTRY_ENDPOINT"]
client = ContainerRegistryClient(endpoint, DefaultAzureCredential(), audience="my_audience")
for tag in client.list_tag_properties("my_repository"):
client.delete_tag("my_repository", tag.name)
"""
self._client.container_registry.delete_tag(repository, tag, **kwargs)
@distributed_trace
def get_manifest_properties(self, repository, tag_or_digest, **kwargs):
# type: (str, str, **Any) -> ArtifactManifestProperties
"""Get the properties of a registry artifact
:param str repository: Name of the repository
:param str tag_or_digest: Tag or digest of the manifest
:rtype: ~azure.containerregistry.ArtifactManifestProperties
:raises: ~azure.core.exceptions.ResourceNotFoundError
Example
.. code-block:: python
from azure.containerregistry import ContainerRegistryClient
from azure.identity import DefaultAzureCredential
endpoint = os.environ["CONTAINERREGISTRY_ENDPOINT"]
client = ContainerRegistryClient(endpoint, DefaultAzureCredential(), audience="my_audience")
for artifact in client.list_manifest_properties("my_repository"):
properties = client.get_manifest_properties("my_repository", artifact.digest)
"""
if _is_tag(tag_or_digest):
tag_or_digest = self._get_digest_from_tag(repository, tag_or_digest)
return ArtifactManifestProperties._from_generated( # pylint: disable=protected-access
self._client.container_registry.get_manifest_properties(repository, tag_or_digest, **kwargs),
repository_name=repository,
registry=self._endpoint,
)
@distributed_trace
def get_tag_properties(self, repository, tag, **kwargs):
# type: (str, str, **Any) -> ArtifactTagProperties
"""Get the properties for a tag
:param str repository: Name of the repository
:param str tag: The tag to get tag properties for
:rtype: ~azure.containerregistry.ArtifactTagProperties
:raises: ~azure.core.exceptions.ResourceNotFoundError
Example
.. code-block:: python
from azure.containerregistry import ContainerRegistryClient
from azure.identity import DefaultAzureCredential
endpoint = os.environ["CONTAINERREGISTRY_ENDPOINT"]
client = ContainerRegistryClient(endpoint, DefaultAzureCredential(), audience="my_audience")
for tag in client.list_tag_properties("my_repository"):
tag_properties = client.get_tag_properties("my_repository", tag.name)
"""
return ArtifactTagProperties._from_generated( # pylint: disable=protected-access
self._client.container_registry.get_tag_properties(repository, tag, **kwargs),
repository=repository,
)
@distributed_trace
def list_tag_properties(self, repository, **kwargs):
# type: (str, **Any) -> ItemPaged[ArtifactTagProperties]
"""List the tags for a repository
:param str repository: Name of the repository
:keyword order_by: Query parameter for ordering by time ascending or descending
:paramtype order_by: ~azure.containerregistry.ArtifactTagOrder or str
:keyword results_per_page: Number of repositories to return per page
:paramtype results_per_page: int
:returns: An iterable of :class:`~azure.containerregistry.ArtifactTagProperties`
:rtype: ~azure.core.paging.ItemPaged[~azure.containerregistry.ArtifactTagProperties]
:raises: ~azure.core.exceptions.ResourceNotFoundError
Example
.. code-block:: python
from azure.containerregistry import ContainerRegistryClient
from azure.identity import DefaultAzureCredential
endpoint = os.environ["CONTAINERREGISTRY_ENDPOINT"]
client = ContainerRegistryClient(endpoint, DefaultAzureCredential(), audience="my_audience")
for tag in client.list_tag_properties("my_repository"):
tag_properties = client.get_tag_properties("my_repository", tag.name)
"""
name = repository
last = kwargs.pop("last", None)
n = kwargs.pop("results_per_page", None)
orderby = kwargs.pop("order_by", None)
digest = kwargs.pop("digest", None)
cls = kwargs.pop(
"cls",
lambda objs: [
ArtifactTagProperties._from_generated(o, repository=repository) # pylint: disable=protected-access
for o in objs
],
)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters["Accept"] = self._client._serialize.header( # pylint: disable=protected-access
"accept", accept, "str"
)
if not next_link:
# Construct URL
url = "/acr/v1/{name}/_tags"
path_format_arguments = {
"url": self._client._serialize.url( # pylint: disable=protected-access
"self._config.url",
self._client._config.url, # pylint: disable=protected-access
"str",
skip_quote=True,
),
"name": self._client._serialize.url("name", name, "str"), # pylint: disable=protected-access
}
url = self._client._client.format_url(url, **path_format_arguments) # pylint: disable=protected-access
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if last is not None:
query_parameters["last"] = self._client._serialize.query( # pylint: disable=protected-access
"last", last, "str"
)
if n is not None:
query_parameters["n"] = self._client._serialize.query( # pylint: disable=protected-access
"n", n, "int"
)
if orderby is not None:
query_parameters["orderby"] = self._client._serialize.query( # pylint: disable=protected-access
"orderby", orderby, "str"
)
if digest is not None:
query_parameters["digest"] = self._client._serialize.query( # pylint: disable=protected-access
"digest", digest, "str"
)
request = self._client._client.get( # pylint: disable=protected-access
url, query_parameters, header_parameters
)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
path_format_arguments = {
"url": self._client._serialize.url( # pylint: disable=protected-access
"self._client._config.url",
self._client._config.url, # pylint: disable=protected-access
"str",
skip_quote=True,
),
"name": self._client._serialize.url("name", name, "str"), # pylint: disable=protected-access
}
url = self._client._client.format_url(url, **path_format_arguments) # pylint: disable=protected-access
request = self._client._client.get( # pylint: disable=protected-access
url, query_parameters, header_parameters
)
return request
def extract_data(pipeline_response):
deserialized = self._client._deserialize("TagList", pipeline_response) # pylint: disable=protected-access
list_of_elem = deserialized.tag_attribute_bases
if cls:
list_of_elem = cls(list_of_elem)
link = None
if "Link" in pipeline_response.http_response.headers.keys():
link = _parse_next_link(pipeline_response.http_response.headers["Link"])
return link, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._client._deserialize.failsafe_deserialize( # pylint: disable=protected-access
AcrErrors, response
)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(get_next, extract_data)
@overload
def update_manifest_properties(self, repository, tag_or_digest, properties, **kwargs):
# type: (str, str, ArtifactManifestProperties, **Any) -> ArtifactManifestProperties
pass
@overload
def update_manifest_properties(self, repository, tag_or_digest, **kwargs):
# type: (str, str, **Any) -> ArtifactManifestProperties
pass
@distributed_trace
def update_manifest_properties(self, *args, **kwargs):
# type: (Union[str, ArtifactManifestProperties], **Any) -> ArtifactManifestProperties
"""Set the permission properties for a manifest.
The updatable properties include: `can_delete`, `can_list`, `can_read`, and `can_write`.
:param str repository: Repository the manifest belongs to.
:param str tag_or_digest: Tag or digest of the manifest.
:param properties: The property's values to be set. This is a positional-only
parameter. Please provide either this or individual keyword parameters.
:type properties: ~azure.containerregistry.ArtifactManifestProperties
:keyword bool can_delete: Delete permissions for a manifest.
:keyword bool can_list: List permissions for a manifest.
:keyword bool can_read: Read permissions for a manifest.
:keyword bool can_write: Write permissions for a manifest.
:rtype: ~azure.containerregistry.ArtifactManifestProperties
:raises: ~azure.core.exceptions.ResourceNotFoundError
Example
.. code-block:: python
from azure.containerregistry import ContainerRegistryClient
from azure.identity import DefaultAzureCredential
endpoint = os.environ["CONTAINERREGISTRY_ENDPOINT"]
client = ContainerRegistryClient(endpoint, DefaultAzureCredential(), audience="my_audience")
for artifact in client.list_manifest_properties("my_repository"):
received_properties = client.update_manifest_properties(
"my_repository",
artifact.digest,
can_delete=False,
can_list=False,
can_read=False,
can_write=False,
)
"""
repository = args[0]
tag_or_digest = args[1]
properties = None
if len(args) == 3:
properties = args[2]
else:
properties = ArtifactManifestProperties()
properties.can_delete = kwargs.pop("can_delete", properties.can_delete)
properties.can_list = kwargs.pop("can_list", properties.can_list)
properties.can_read = kwargs.pop("can_read", properties.can_read)
properties.can_write = kwargs.pop("can_write", properties.can_write)
if _is_tag(tag_or_digest):
tag_or_digest = self._get_digest_from_tag(repository, tag_or_digest)
return ArtifactManifestProperties._from_generated( # pylint: disable=protected-access
self._client.container_registry.update_manifest_properties(
repository,
tag_or_digest,
value=properties._to_generated(), # pylint: disable=protected-access
**kwargs
),
repository_name=repository,
registry=self._endpoint
)
@overload
def update_tag_properties(self, repository, tag, properties, **kwargs):
# type: (str, str, ArtifactTagProperties, **Any) -> ArtifactTagProperties
pass
@overload
def update_tag_properties(self, repository, tag, **kwargs):
# type: (str, str, **Any) -> ArtifactTagProperties
pass
@distributed_trace
def update_tag_properties(self, *args, **kwargs):
# type: (Union[str, ArtifactTagProperties], **Any) -> ArtifactTagProperties
"""Set the permission properties for a tag.
The updatable properties include: `can_delete`, `can_list`, `can_read`, and `can_write`.
:param str repository: Repository the tag belongs to.
:param str tag: Tag to set properties for.
:param properties: The property's values to be set. This is a positional-only
parameter. Please provide either this or individual keyword parameters.
:type properties: ~azure.containerregistry.ArtifactTagProperties
:keyword bool can_delete: Delete permissions for a tag.
:keyword bool can_list: List permissions for a tag.
:keyword bool can_read: Read permissions for a tag.
:keyword bool can_write: Write permissions for a tag.
:rtype: ~azure.containerregistry.ArtifactTagProperties
:raises: ~azure.core.exceptions.ResourceNotFoundError
Example
.. code-block:: python
from azure.containerregistry import ContainerRegistryClient
from azure.identity import DefaultAzureCredential
endpoint = os.environ["CONTAINERREGISTRY_ENDPOINT"]
client = ContainerRegistryClient(endpoint, DefaultAzureCredential(), audience="my_audience")
tag_identifier = "latest"
received = client.update_tag_properties(
"my_repository",
tag_identifier,
can_delete=False,
can_list=False,
can_read=False,
can_write=False,
)
"""
repository = args[0]
tag = args[1]
properties = None
if len(args) == 3:
properties = args[2]
else:
properties = ArtifactTagProperties()
properties.can_delete = kwargs.pop("can_delete", properties.can_delete)
properties.can_list = kwargs.pop("can_list", properties.can_list)
properties.can_read = kwargs.pop("can_read", properties.can_read)
properties.can_write = kwargs.pop("can_write", properties.can_write)
return ArtifactTagProperties._from_generated( # pylint: disable=protected-access
self._client.container_registry.update_tag_attributes(
repository, tag, value=properties._to_generated(), **kwargs # pylint: disable=protected-access
),
repository=repository,
)
@overload
def update_repository_properties(self, repository, properties, **kwargs):
# type: (str, RepositoryProperties, **Any) -> RepositoryProperties
pass
@overload
def update_repository_properties(self, repository, **kwargs):
# type: (str, **Any) -> RepositoryProperties
pass
@distributed_trace
def update_repository_properties(self, *args, **kwargs):
# type: (Union[str, RepositoryProperties], **Any) -> RepositoryProperties
"""Set the permission properties of a repository.
The updatable properties include: `can_delete`, `can_list`, `can_read`, and `can_write`.
:param str repository: Name of the repository.
:param properties: Properties to set for the repository. This is a positional-only
parameter. Please provide either this or individual keyword parameters.
:type properties: ~azure.containerregistry.RepositoryProperties
:keyword bool can_delete: Delete permissions for a repository.
:keyword bool can_list: List permissions for a repository.
:keyword bool can_read: Read permissions for a repository.
:keyword bool can_write: Write permissions for a repository.
:rtype: ~azure.containerregistry.RepositoryProperties
:raises: ~azure.core.exceptions.ResourceNotFoundError
"""
repository, properties = None, None
if len(args) == 2:
repository = args[0]
properties = args[1]
else:
repository = args[0]
properties = RepositoryProperties()
properties.can_delete = kwargs.pop("can_delete", properties.can_delete)
properties.can_list = kwargs.pop("can_list", properties.can_list)
properties.can_read = kwargs.pop("can_read", properties.can_read)
properties.can_write = kwargs.pop("can_write", properties.can_write)
return RepositoryProperties._from_generated( # pylint: disable=protected-access
self._client.container_registry.update_properties(
repository, value=properties._to_generated(), **kwargs # pylint: disable=protected-access
)
)
@distributed_trace
def upload_manifest(
self, repository: str, manifest: "Union['OCIManifest', 'IO']", *, tag: "Optional[str]" = None, **kwargs: "Any"
) -> str:
"""Upload a manifest for an OCI artifact.
:param str repository: Name of the repository
:param manifest: The manifest to upload. Note: This must be a seekable stream.
:type manifest: ~azure.containerregistry.models.OCIManifest or IO
:keyword tag: Tag of the manifest.
:paramtype tag: str or None
:returns: The digest of the uploaded manifest, calculated by the registry.
:rtype: str
:raises ValueError: If the parameter repository or manifest is None.
:raises ~azure.core.exceptions.HttpResponseError:
If the digest in the response does not match the digest of the uploaded manifest.
"""
try:
data = manifest
if isinstance(manifest, OCIManifest):
data = _serialize_manifest(manifest)
tag_or_digest = tag
if tag is None:
tag_or_digest = _compute_digest(data)
_, _, response_headers = self._client.container_registry.create_manifest(
name=repository,
reference=tag_or_digest,
payload=data,
content_type=OCI_MANIFEST_MEDIA_TYPE,
headers={"Accept": OCI_MANIFEST_MEDIA_TYPE},
cls=_return_response,
**kwargs
)
digest = response_headers['Docker-Content-Digest']
except ValueError:
if repository is None or manifest is None:
raise ValueError("The parameter repository and manifest cannot be None.")
if not _validate_digest(data, digest):
raise ValueError("The digest in the response does not match the digest of the uploaded manifest.")
raise
return digest
@distributed_trace
def upload_blob(self, repository, data, **kwargs):
# type: (str, IO, **Any) -> str
"""Upload an artifact blob.
:param str repository: Name of the repository
:param data: The blob to upload. Note: This must be a seekable stream.
:type data: IO
:returns: The digest of the uploaded blob, calculated by the registry.
:rtype: str
:raises ValueError: If the parameter repository or data is None.
"""
try:
_, _, start_upload_response_headers = self._client.container_registry_blob.start_upload(
repository, cls=_return_response, **kwargs
)
_, _, upload_chunk_response_headers = self._client.container_registry_blob.upload_chunk(
start_upload_response_headers['Location'], data, cls=_return_response, **kwargs
)
digest = _compute_digest(data)
_, _, complete_upload_response_headers = self._client.container_registry_blob.complete_upload(
digest=digest, next_link=upload_chunk_response_headers['Location'], cls=_return_response, **kwargs
)
except ValueError:
if repository is None or data is None:
raise ValueError("The parameter repository and data cannot be None.")
raise
return complete_upload_response_headers['Docker-Content-Digest']
@distributed_trace
def download_manifest(self, repository, tag_or_digest, **kwargs):
# type: (str, str, **Any) -> DownloadManifestResult
"""Download the manifest for an OCI artifact.
:param str repository: Name of the repository
:param str tag_or_digest: The tag or digest of the manifest to download.
:returns: DownloadManifestResult
:rtype: ~azure.containerregistry.models.DownloadManifestResult
:raises ValueError: If the parameter repository or tag_or_digest is None.
:raises ~azure.core.exceptions.HttpResponseError:
If the requested digest does not match the digest of the received manifest.
"""
try:
response, manifest_wrapper, _ = self._client.container_registry.get_manifest(
name=repository,
reference=tag_or_digest,
headers={"Accept": OCI_MANIFEST_MEDIA_TYPE},
cls=_return_response,
**kwargs
)
digest = response.http_response.headers['Docker-Content-Digest']
manifest = OCIManifest.deserialize(manifest_wrapper.serialize())
manifest_stream = _serialize_manifest(manifest)
except ValueError:
if repository is None or tag_or_digest is None:
raise ValueError("The parameter repository and tag_or_digest cannot be None.")
if not _validate_digest(manifest_stream, digest):
raise ValueError("The requested digest does not match the digest of the received manifest.")
raise
return DownloadManifestResult(digest=digest, data=manifest_stream, manifest=manifest)
@distributed_trace
def download_blob(self, repository, digest, **kwargs):
# type: (str, str, **Any) -> DownloadBlobResult | None
"""Download a blob that is part of an artifact.
:param str repository: Name of the repository
:param str digest: The digest of the blob to download.
:returns: DownloadBlobResult or None
:rtype: ~azure.containerregistry.DownloadBlobResult or None
:raises ValueError: If the parameter repository or digest is None.
"""
try:
_, deserialized, _ = self._client.container_registry_blob.get_blob(
repository, digest, cls=_return_response, **kwargs
)
except ValueError:
if repository is None or digest is None:
raise ValueError("The parameter repository and digest cannot be None.")
raise
if deserialized:
blob_content = b''
for chunk in deserialized:
if chunk:
blob_content += chunk
return DownloadBlobResult(data=BytesIO(blob_content), digest=digest)
return None
@distributed_trace
def delete_manifest(self, repository, tag_or_digest, **kwargs):
# type: (str, str, **Any) -> None
"""Delete a manifest. If the manifest cannot be found or a response status code of
404 is returned an error will not be raised.
:param str repository: Name of the repository the manifest belongs to
:param str tag_or_digest: Tag or digest of the manifest to be deleted
:returns: None
:raises: ~azure.core.exceptions.HttpResponseError
Example
.. code-block:: python
from azure.containerregistry import ContainerRegistryClient
from azure.identity import DefaultAzureCredential
endpoint = os.environ["CONTAINERREGISTRY_ENDPOINT"]
client = ContainerRegistryClient(endpoint, DefaultAzureCredential(), audience="my_audience")
client.delete_manifest("my_repository", "my_tag_or_digest")
"""
if _is_tag(tag_or_digest):
tag_or_digest = self._get_digest_from_tag(repository, tag_or_digest)
self._client.container_registry.delete_manifest(repository, tag_or_digest, **kwargs)
@distributed_trace
def delete_blob(self, repository, tag_or_digest, **kwargs):
# type: (str, str, **Any) -> None
"""Delete a blob. If the blob cannot be found or a response status code of
404 is returned an error will not be raised.
:param str repository: Name of the repository the manifest belongs to
:param str tag_or_digest: Tag or digest of the blob to be deleted
:returns: None
:raises: ~azure.core.exceptions.HttpResponseError
Example
.. code-block:: python
from azure.containerregistry import ContainerRegistryClient
from azure.identity import DefaultAzureCredential
endpoint = os.environ["CONTAINERREGISTRY_ENDPOINT"]
client = ContainerRegistryClient(endpoint, DefaultAzureCredential(), audience="my_audience")
client.delete_blob("my_repository", "my_tag_or_digest")
"""
if _is_tag(tag_or_digest):
tag_or_digest = self._get_digest_from_tag(repository, tag_or_digest)
self._client.container_registry_blob.delete_blob(repository, tag_or_digest, **kwargs)
| {
"content_hash": "39754124242a03049c8914589563d7c8",
"timestamp": "",
"source": "github",
"line_count": 940,
"max_line_length": 119,
"avg_line_length": 45.32659574468085,
"alnum_prop": 0.6027648039054615,
"repo_name": "Azure/azure-sdk-for-python",
"id": "27beca99775716c644bdcf54c65202cce4211392",
"size": "42773",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/containerregistry/azure-containerregistry/azure/containerregistry/_container_registry_client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import mock
import unittest
import urllib
from allura.lib.spam.akismetfilter import AKISMET_AVAILABLE, AkismetSpamFilter
@unittest.skipIf(not AKISMET_AVAILABLE, "Akismet not available")
class TestAkismet(unittest.TestCase):
@mock.patch('allura.lib.spam.akismetfilter.akismet')
def setUp(self, akismet_lib):
self.akismet = AkismetSpamFilter({})
def side_effect(*args, **kw):
# side effect to test that data being sent to
# akismet can be successfully urlencoded
urllib.urlencode(kw.get('data', {}))
self.akismet.service.comment_check = mock.Mock(side_effect=side_effect)
self.fake_artifact = mock.Mock(**{'url.return_value': 'artifact url'})
self.fake_user = mock.Mock(display_name=u'Søme User',
email_addresses=['user@domain'])
self.fake_headers = dict(
USER_AGENT='some browser',
REFERER='some url')
self.content = u'spåm text'
self.expected_data = dict(
comment_content=self.content.encode('utf8'),
comment_type='comment',
user_ip='some ip',
user_agent='some browser',
referrer='some url')
@mock.patch('allura.lib.spam.akismetfilter.c')
@mock.patch('allura.lib.spam.akismetfilter.request')
def test_check(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = None
self.akismet.service.comment_check.side_effect({'side_effect': ''})
self.akismet.check(self.content)
self.akismet.service.comment_check.assert_called_once_with(
self.content,
data=self.expected_data, build_data=False)
@mock.patch('allura.lib.spam.akismetfilter.c')
@mock.patch('allura.lib.spam.akismetfilter.request')
def test_check_with_explicit_content_type(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = None
self.akismet.check(self.content, content_type='some content type')
self.expected_data['comment_type'] = 'some content type'
self.akismet.service.comment_check.assert_called_once_with(
self.content,
data=self.expected_data, build_data=False)
@mock.patch('allura.lib.spam.akismetfilter.c')
@mock.patch('allura.lib.spam.akismetfilter.request')
def test_check_with_artifact(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = None
self.akismet.check(self.content, artifact=self.fake_artifact)
expected_data = self.expected_data
expected_data['permalink'] = 'artifact url'
self.akismet.service.comment_check.assert_called_once_with(
self.content,
data=expected_data, build_data=False)
@mock.patch('allura.lib.spam.akismetfilter.c')
@mock.patch('allura.lib.spam.akismetfilter.request')
def test_check_with_user(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = None
self.akismet.check(self.content, user=self.fake_user)
expected_data = self.expected_data
expected_data.update(comment_author=u'Søme User'.encode('utf8'),
comment_author_email='user@domain')
self.akismet.service.comment_check.assert_called_once_with(
self.content,
data=expected_data, build_data=False)
@mock.patch('allura.lib.spam.akismetfilter.c')
@mock.patch('allura.lib.spam.akismetfilter.request')
def test_check_with_implicit_user(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = self.fake_user
self.akismet.check(self.content)
expected_data = self.expected_data
expected_data.update(comment_author=u'Søme User'.encode('utf8'),
comment_author_email='user@domain')
self.akismet.service.comment_check.assert_called_once_with(
self.content,
data=expected_data, build_data=False)
@mock.patch('allura.lib.spam.akismetfilter.c')
@mock.patch('allura.lib.spam.akismetfilter.request')
def test_submit_spam(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = None
self.akismet.submit_spam(self.content)
self.akismet.service.submit_spam.assert_called_once_with(
self.content, data=self.expected_data, build_data=False)
@mock.patch('allura.lib.spam.akismetfilter.c')
@mock.patch('allura.lib.spam.akismetfilter.request')
def test_submit_ham(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = None
self.akismet.submit_ham(self.content)
self.akismet.service.submit_ham.assert_called_once_with(
self.content, data=self.expected_data, build_data=False)
| {
"content_hash": "ecc2a19d09647d14419f76c6414136aa",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 79,
"avg_line_length": 43.29059829059829,
"alnum_prop": 0.6416584402764067,
"repo_name": "lym/allura-git",
"id": "79802c3d7f768f0c350b6610089e72cb886045b3",
"size": "5965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Allura/allura/tests/unit/spam/test_akismet.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7850"
},
{
"name": "CSS",
"bytes": "167419"
},
{
"name": "HTML",
"bytes": "787868"
},
{
"name": "JavaScript",
"bytes": "808388"
},
{
"name": "Makefile",
"bytes": "9792"
},
{
"name": "Puppet",
"bytes": "6872"
},
{
"name": "Python",
"bytes": "4115536"
},
{
"name": "RAML",
"bytes": "23257"
},
{
"name": "Ruby",
"bytes": "5726"
},
{
"name": "Shell",
"bytes": "115283"
},
{
"name": "XSLT",
"bytes": "3357"
}
],
"symlink_target": ""
} |
import logging
import sys
from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop
from app.main import create_app
logging.basicConfig(stream=sys.stdout, level=logging.debug)
logger = logging.getLogger(__name__)
class AmillerImTestCases(AioHTTPTestCase):
def get_app(loop):
return create_app(loop)
def setUp(self):
super(AmillerImTestCases, self).setUp()
def tearDown(self):
super(AmillerImTestCases, self).tearDown()
@unittest_run_loop
async def test_root(self):
# Sanity Smoke Test
req = await self.client.request('GET', '/')
| {
"content_hash": "1268493db6e94ae248cb36a15e817f2a",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 65,
"avg_line_length": 22.48148148148148,
"alnum_prop": 0.6952224052718287,
"repo_name": "atm08e/amiller.im-py3",
"id": "39ee2d7e4825b301ab15f0873ba5e1cd2909d831",
"size": "607",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_amiller_im.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "58995"
},
{
"name": "Dockerfile",
"bytes": "701"
},
{
"name": "HTML",
"bytes": "17011"
},
{
"name": "JavaScript",
"bytes": "663251"
},
{
"name": "Makefile",
"bytes": "1442"
},
{
"name": "Python",
"bytes": "13256"
}
],
"symlink_target": ""
} |
"""Create the mesa driver. This defers to a Docker container
with the build_mesa.sh script."""
import argparse
import subprocess
# TODO(dogben): In the future, it might be simpler to build the docker image as
# part of this script so that we don't need to push it to the container repo.
# Doing so would make this script more repeatable, since someone could
# accidentally change the Docker image that "v2" points to.
DOCKER_IMAGE = 'gcr.io/skia-public/mesa-driver-builder:v2'
BUILD_SCRIPT = '/opt/build_mesa.sh'
MESA_VERSION = '18.3.3'
def create_asset(target_dir):
"""Create the asset."""
cmd = [
'docker', 'run', '--volume', '%s:/OUT' % target_dir,
'--env', 'MESA_VERSION=%s' % MESA_VERSION,
DOCKER_IMAGE, BUILD_SCRIPT
]
print('Running docker cmd', cmd)
subprocess.check_output(cmd)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--target_dir', '-t', required=True)
args = parser.parse_args()
create_asset(args.target_dir)
if __name__ == '__main__':
main()
| {
"content_hash": "7907e716921656d1954e62558b66886f",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 79,
"avg_line_length": 28.47222222222222,
"alnum_prop": 0.6848780487804879,
"repo_name": "HalCanary/skia-hc",
"id": "279360f50a8db24e528090d5e22db840feb345a8",
"size": "1183",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "infra/bots/assets/mesa_intel_driver_linux/create.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "1277297"
},
{
"name": "Batchfile",
"bytes": "865"
},
{
"name": "C",
"bytes": "505166"
},
{
"name": "C#",
"bytes": "4683"
},
{
"name": "C++",
"bytes": "32234337"
},
{
"name": "CMake",
"bytes": "2850"
},
{
"name": "CSS",
"bytes": "3078"
},
{
"name": "Dockerfile",
"bytes": "14764"
},
{
"name": "GLSL",
"bytes": "109164"
},
{
"name": "Go",
"bytes": "135327"
},
{
"name": "HTML",
"bytes": "1321397"
},
{
"name": "Java",
"bytes": "167849"
},
{
"name": "JavaScript",
"bytes": "463920"
},
{
"name": "Lex",
"bytes": "2521"
},
{
"name": "Lua",
"bytes": "70982"
},
{
"name": "Makefile",
"bytes": "13502"
},
{
"name": "Objective-C",
"bytes": "83351"
},
{
"name": "Objective-C++",
"bytes": "366996"
},
{
"name": "PHP",
"bytes": "139510"
},
{
"name": "PowerShell",
"bytes": "1432"
},
{
"name": "Python",
"bytes": "1055437"
},
{
"name": "Shell",
"bytes": "95010"
}
],
"symlink_target": ""
} |
from setuptools import find_packages, setup
setup(name='Fun Views',
version='0.0.5',
description='Functional generic views and view utils.',
url='https://bitbucket.org/keithasaurus/fun_views',
packages=find_packages(exclude=['tests']))
| {
"content_hash": "0706e1ce82d953ef16243d5446d49e35",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 61,
"avg_line_length": 37.285714285714285,
"alnum_prop": 0.6934865900383141,
"repo_name": "keithasaurus/django_fun_views",
"id": "5d0ad6d45a4cb5f1ad3e0d20455d832836433756",
"size": "284",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "80355"
}
],
"symlink_target": ""
} |
from fab_deploy import utils
from fab_deploy.system import aptitude_install
__all__ = ['java_install']
@utils.run_as_sudo
def java_install():
''' Installs JRE '''
aptitude_install('default-jre') | {
"content_hash": "5a49c9d157595cbe8e7f867f10667378",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 46,
"avg_line_length": 22.666666666666668,
"alnum_prop": 0.696078431372549,
"repo_name": "futurecolors/django-fabdeploy-extensions",
"id": "f796422ae2afe19dd0b0435a37a2c599d201b579",
"size": "228",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fabdeploy_extensions/extensions/java.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "18736"
},
{
"name": "Shell",
"bytes": "11039"
}
],
"symlink_target": ""
} |
"""Fichier contenant la volonté HisserVoiles"""
import re
from secondaires.navigation.equipage.ordres.hisser_voile import HisserVoile
from secondaires.navigation.equipage.ordres.long_deplacer import LongDeplacer
from secondaires.navigation.equipage.volonte import Volonte
class HisserVoiles(Volonte):
"""Classe représentant une volonté.
Cette volonté choisit un ou plusieurs matelots pour hisser une
ou plusieurs voiles.
"""
cle = "hisser_voiles"
ordre_court = re.compile(r"^h([0-9\*]*)v$", re.I)
ordre_long = re.compile(r"^hisser\s+([0-9\*]*)\s*voiles?$", re.I)
def __init__(self, navire, nombre=1):
"""Construit une volonté."""
Volonte.__init__(self, navire)
self.nombre = nombre
@property
def arguments(self):
"""Propriété à redéfinir si la volonté comprend des arguments."""
return (self.nombre, )
def choisir_matelots(self, exception=None):
"""Retourne le matelot le plus apte à accomplir la volonté."""
proches = []
matelots = self.navire.equipage.get_matelots_libres(exception)
graph = self.navire.graph
voiles = self.navire.voiles
voiles = [v for v in voiles if not v.hissee]
for voile in voiles:
proche = []
for matelot in matelots:
origine = matelot.salle.mnemonic
destination = voile.parent.mnemonic
if origine == destination:
proche.append((matelot, [], voile))
else:
chemin = graph.get((origine, destination))
if chemin:
proche.append((matelot, chemin, voile))
proches.append(min(proche, key=lambda c: len(c[1])))
proches = proches[:self.nombre]
return proches
def executer(self, proches):
"""Exécute la volonté."""
navire = self.navire
for matelot, sorties, voile in proches:
ordres = []
if sorties:
aller = LongDeplacer(matelot, navire, *sorties)
ordres.append(aller)
hisser = HisserVoile(matelot, navire)
ordres.append(hisser)
ordres.append(self.revenir_affectation(matelot))
self.ajouter_ordres(matelot, ordres)
def crier_ordres(self, personnage):
"""On fait crier l'ordre au personnage."""
nombre = self.nombre
if nombre is None:
msg = "{} s'écrie : toutes voiles dehors".format(
personnage.distinction_audible)
else:
msg = "{} s'écrie : hissez-moi ".format(personnage.distinction_audible)
if nombre < 0:
msg += "ces voiles"
elif nombre == 1:
msg += "une voile"
else:
msg += "{} voiles".format(nombre)
msg += ", et qu'ça saute !"
self.navire.envoyer(msg)
@classmethod
def extraire_arguments(cls, navire, nombre):
"""Extrait les arguments de la volonté."""
if nombre == "":
nombre = 1
elif nombre == "*":
nombre = None
else:
nombre = int(nombre)
return (nombre, )
| {
"content_hash": "4f747a52d592b82831d71247db5302b0",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 83,
"avg_line_length": 33.677083333333336,
"alnum_prop": 0.5663470460872255,
"repo_name": "stormi/tsunami",
"id": "e6508db190414fd648d447ace4ab93ab26283d92",
"size": "4813",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/secondaires/navigation/equipage/volontes/hisser_voiles.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "7188300"
},
{
"name": "Ruby",
"bytes": "373"
}
],
"symlink_target": ""
} |
import logging
from ailment import Expr
from .engine_base import SimplifierAILEngine, SimplifierAILState
from .optimization_pass import OptimizationPass, OptimizationPassStage
_l = logging.getLogger(name=__name__)
class ModSimplifierAILEngine(SimplifierAILEngine):
def _ail_handle_Sub(self, expr):
operand_0 = self._expr(expr.operands[0])
operand_1 = self._expr(expr.operands[1])
x_0, c_0, x_1, c_1 = None, None, None, None
if isinstance(operand_1, Expr.BinaryOp) and isinstance(operand_1.operands[1], Expr.Const):
if operand_1.op == 'Mul':
if isinstance(operand_1.operands[0], Expr.BinaryOp) \
and isinstance(operand_1.operands[0].operands[1], Expr.Const) \
and operand_1.operands[0].op in ['Div', 'DivMod']:
x_0 = operand_1.operands[0].operands[0]
x_1 = operand_0
c_0 = operand_1.operands[1]
c_1 = operand_1.operands[0].operands[1]
elif isinstance(operand_1.operands[0], Expr.Convert) \
and isinstance(operand_1.operands[0].operand, Expr.BinaryOp) \
and operand_1.operands[0].operand.op in ['Div', 'DivMod']:
x_0 = operand_1.operands[0].operand.operands[0]
x_1 = operand_0
c_0 = operand_1.operands[1]
c_1 = operand_1.operands[0].operand.operands[1]
if x_0 is not None and x_1 is not None and x_0.likes(x_1) and c_0.value == c_1.value:
return Expr.BinaryOp(expr.idx, 'DivMod', [x_0, c_0], expr.signed, **expr.tags)
if (operand_0, operand_1) != (expr.operands[0], expr.operands[1]):
return Expr.BinaryOp(expr.idx, 'Sub', [operand_0, operand_1], expr.signed, **expr.tags)
return expr
class ModSimplifier(OptimizationPass):
"""
Simplifies optimized forms of modulo computation back to "mod".
"""
ARCHES = ["X86", "AMD64", "ARMCortexM", "ARMHF", "ARMEL", ]
PLATFORMS = ["linux", "windows"]
STAGE = OptimizationPassStage.AFTER_GLOBAL_SIMPLIFICATION
NAME = "Simplify optimized mod forms"
DESCRIPTION = __doc__.strip()
def __init__(self, func, **kwargs):
super().__init__(func, **kwargs)
self.state = SimplifierAILState(self.project.arch)
self.engine = ModSimplifierAILEngine()
self.analyze()
def _check(self):
return True, None
def _analyze(self, cache=None):
for block in list(self._graph.nodes()):
new_block = block
old_block = None
while new_block != old_block:
old_block = new_block
new_block = self.engine.process(state=self.state.copy(), block=old_block.copy())
_l.debug("new block: %s", new_block.statements)
self._update_block(block, new_block)
| {
"content_hash": "9cec95cc5661de0333a55d50567e7eca",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 101,
"avg_line_length": 37.82051282051282,
"alnum_prop": 0.5772881355932203,
"repo_name": "angr/angr",
"id": "6575b389a9f49552df4f7b61d52078e5f6f7fe7e",
"size": "2950",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "angr/analyses/decompiler/optimization_passes/mod_simplifier.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "6694"
},
{
"name": "C++",
"bytes": "146292"
},
{
"name": "Makefile",
"bytes": "946"
},
{
"name": "Python",
"bytes": "27717304"
}
],
"symlink_target": ""
} |
"""Tests for the Heos Media Player platform."""
import asyncio
from pyheos import CommandFailedError, const
from homeassistant.components.heos import media_player
from homeassistant.components.heos.const import (
DATA_SOURCE_MANAGER,
DOMAIN,
SIGNAL_HEOS_UPDATED,
)
from homeassistant.components.media_player.const import (
ATTR_INPUT_SOURCE,
ATTR_INPUT_SOURCE_LIST,
ATTR_MEDIA_ALBUM_NAME,
ATTR_MEDIA_ARTIST,
ATTR_MEDIA_CONTENT_ID,
ATTR_MEDIA_CONTENT_TYPE,
ATTR_MEDIA_DURATION,
ATTR_MEDIA_ENQUEUE,
ATTR_MEDIA_POSITION,
ATTR_MEDIA_POSITION_UPDATED_AT,
ATTR_MEDIA_SHUFFLE,
ATTR_MEDIA_TITLE,
ATTR_MEDIA_VOLUME_LEVEL,
ATTR_MEDIA_VOLUME_MUTED,
DOMAIN as MEDIA_PLAYER_DOMAIN,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_PLAYLIST,
MEDIA_TYPE_URL,
SERVICE_CLEAR_PLAYLIST,
SERVICE_PLAY_MEDIA,
SERVICE_SELECT_SOURCE,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_STOP,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_SUPPORTED_FEATURES,
SERVICE_MEDIA_NEXT_TRACK,
SERVICE_MEDIA_PAUSE,
SERVICE_MEDIA_PLAY,
SERVICE_MEDIA_PREVIOUS_TRACK,
SERVICE_MEDIA_STOP,
SERVICE_SHUFFLE_SET,
SERVICE_VOLUME_MUTE,
SERVICE_VOLUME_SET,
STATE_IDLE,
STATE_PLAYING,
STATE_UNAVAILABLE,
)
from homeassistant.setup import async_setup_component
async def setup_platform(hass, config_entry, config):
"""Set up the media player platform for testing."""
config_entry.add_to_hass(hass)
assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
async def test_state_attributes(hass, config_entry, config, controller):
"""Tests the state attributes."""
await setup_platform(hass, config_entry, config)
state = hass.states.get("media_player.test_player")
assert state.state == STATE_IDLE
assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.25
assert not state.attributes[ATTR_MEDIA_VOLUME_MUTED]
assert state.attributes[ATTR_MEDIA_CONTENT_ID] == "1"
assert state.attributes[ATTR_MEDIA_CONTENT_TYPE] == MEDIA_TYPE_MUSIC
assert ATTR_MEDIA_DURATION not in state.attributes
assert ATTR_MEDIA_POSITION not in state.attributes
assert state.attributes[ATTR_MEDIA_TITLE] == "Song"
assert state.attributes[ATTR_MEDIA_ARTIST] == "Artist"
assert state.attributes[ATTR_MEDIA_ALBUM_NAME] == "Album"
assert not state.attributes[ATTR_MEDIA_SHUFFLE]
assert state.attributes["media_album_id"] == 1
assert state.attributes["media_queue_id"] == 1
assert state.attributes["media_source_id"] == 1
assert state.attributes["media_station"] == "Station Name"
assert state.attributes["media_type"] == "Station"
assert state.attributes[ATTR_FRIENDLY_NAME] == "Test Player"
assert (
state.attributes[ATTR_SUPPORTED_FEATURES]
== SUPPORT_PLAY
| SUPPORT_PAUSE
| SUPPORT_STOP
| SUPPORT_NEXT_TRACK
| SUPPORT_PREVIOUS_TRACK
| media_player.BASE_SUPPORTED_FEATURES
)
assert ATTR_INPUT_SOURCE not in state.attributes
assert (
state.attributes[ATTR_INPUT_SOURCE_LIST]
== hass.data[DOMAIN][DATA_SOURCE_MANAGER].source_list
)
async def test_updates_from_signals(hass, config_entry, config, controller, favorites):
"""Tests dispatched signals update player."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# Test player does not update for other players
player.state = const.PLAY_STATE_PLAY
player.heos.dispatcher.send(
const.SIGNAL_PLAYER_EVENT, 2, const.EVENT_PLAYER_STATE_CHANGED
)
await hass.async_block_till_done()
state = hass.states.get("media_player.test_player")
assert state.state == STATE_IDLE
# Test player_update standard events
player.state = const.PLAY_STATE_PLAY
player.heos.dispatcher.send(
const.SIGNAL_PLAYER_EVENT, player.player_id, const.EVENT_PLAYER_STATE_CHANGED
)
await hass.async_block_till_done()
state = hass.states.get("media_player.test_player")
assert state.state == STATE_PLAYING
# Test player_update progress events
player.now_playing_media.duration = 360000
player.now_playing_media.current_position = 1000
player.heos.dispatcher.send(
const.SIGNAL_PLAYER_EVENT,
player.player_id,
const.EVENT_PLAYER_NOW_PLAYING_PROGRESS,
)
await hass.async_block_till_done()
state = hass.states.get("media_player.test_player")
assert state.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] is not None
assert state.attributes[ATTR_MEDIA_DURATION] == 360
assert state.attributes[ATTR_MEDIA_POSITION] == 1
async def test_updates_from_connection_event(
hass, config_entry, config, controller, caplog
):
"""Tests player updates from connection event after connection failure."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
event = asyncio.Event()
async def set_signal():
event.set()
hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_HEOS_UPDATED, set_signal)
# Connected
player.available = True
player.heos.dispatcher.send(const.SIGNAL_HEOS_EVENT, const.EVENT_CONNECTED)
await event.wait()
state = hass.states.get("media_player.test_player")
assert state.state == STATE_IDLE
assert controller.load_players.call_count == 1
# Disconnected
event.clear()
player.reset_mock()
controller.load_players.reset_mock()
player.available = False
player.heos.dispatcher.send(const.SIGNAL_HEOS_EVENT, const.EVENT_DISCONNECTED)
await event.wait()
state = hass.states.get("media_player.test_player")
assert state.state == STATE_UNAVAILABLE
assert controller.load_players.call_count == 0
# Connected handles refresh failure
event.clear()
player.reset_mock()
controller.load_players.reset_mock()
controller.load_players.side_effect = CommandFailedError(None, "Failure", 1)
player.available = True
player.heos.dispatcher.send(const.SIGNAL_HEOS_EVENT, const.EVENT_CONNECTED)
await event.wait()
state = hass.states.get("media_player.test_player")
assert state.state == STATE_IDLE
assert controller.load_players.call_count == 1
assert "Unable to refresh players" in caplog.text
async def test_updates_from_sources_updated(
hass, config_entry, config, controller, input_sources
):
"""Tests player updates from changes in sources list."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
event = asyncio.Event()
async def set_signal():
event.set()
hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_HEOS_UPDATED, set_signal)
input_sources.clear()
player.heos.dispatcher.send(
const.SIGNAL_CONTROLLER_EVENT, const.EVENT_SOURCES_CHANGED, {}
)
await event.wait()
source_list = hass.data[DOMAIN][DATA_SOURCE_MANAGER].source_list
assert len(source_list) == 2
state = hass.states.get("media_player.test_player")
assert state.attributes[ATTR_INPUT_SOURCE_LIST] == source_list
async def test_updates_from_players_changed(
hass, config_entry, config, controller, change_data, caplog
):
"""Test player updates from changes to available players."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
event = asyncio.Event()
async def set_signal():
event.set()
hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_HEOS_UPDATED, set_signal)
assert hass.states.get("media_player.test_player").state == STATE_IDLE
player.state = const.PLAY_STATE_PLAY
player.heos.dispatcher.send(
const.SIGNAL_CONTROLLER_EVENT, const.EVENT_PLAYERS_CHANGED, change_data
)
await event.wait()
await hass.async_block_till_done()
assert hass.states.get("media_player.test_player").state == STATE_PLAYING
async def test_updates_from_players_changed_new_ids(
hass, config_entry, config, controller, change_data_mapped_ids, caplog
):
"""Test player updates from changes to available players."""
await setup_platform(hass, config_entry, config)
device_registry = await hass.helpers.device_registry.async_get_registry()
entity_registry = await hass.helpers.entity_registry.async_get_registry()
player = controller.players[1]
event = asyncio.Event()
# Assert device registry matches current id
assert device_registry.async_get_device({(DOMAIN, 1)})
# Assert entity registry matches current id
assert (
entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "1")
== "media_player.test_player"
)
# Trigger update
async def set_signal():
event.set()
hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_HEOS_UPDATED, set_signal)
player.heos.dispatcher.send(
const.SIGNAL_CONTROLLER_EVENT,
const.EVENT_PLAYERS_CHANGED,
change_data_mapped_ids,
)
await event.wait()
# Assert device registry identifiers were updated
assert len(device_registry.devices) == 1
assert device_registry.async_get_device({(DOMAIN, 101)})
# Assert entity registry unique id was updated
assert len(entity_registry.entities) == 1
assert (
entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "101")
== "media_player.test_player"
)
async def test_updates_from_user_changed(hass, config_entry, config, controller):
"""Tests player updates from changes in user."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
event = asyncio.Event()
async def set_signal():
event.set()
hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_HEOS_UPDATED, set_signal)
controller.is_signed_in = False
controller.signed_in_username = None
player.heos.dispatcher.send(
const.SIGNAL_CONTROLLER_EVENT, const.EVENT_USER_CHANGED, None
)
await event.wait()
source_list = hass.data[DOMAIN][DATA_SOURCE_MANAGER].source_list
assert len(source_list) == 1
state = hass.states.get("media_player.test_player")
assert state.attributes[ATTR_INPUT_SOURCE_LIST] == source_list
async def test_clear_playlist(hass, config_entry, config, controller, caplog):
"""Test the clear playlist service."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_CLEAR_PLAYLIST,
{ATTR_ENTITY_ID: "media_player.test_player"},
blocking=True,
)
assert player.clear_queue.call_count == 1
player.clear_queue.reset_mock()
player.clear_queue.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to clear playlist: Failure (1)" in caplog.text
async def test_pause(hass, config_entry, config, controller, caplog):
"""Test the pause service."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_MEDIA_PAUSE,
{ATTR_ENTITY_ID: "media_player.test_player"},
blocking=True,
)
assert player.pause.call_count == 1
player.pause.reset_mock()
player.pause.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to pause: Failure (1)" in caplog.text
async def test_play(hass, config_entry, config, controller, caplog):
"""Test the play service."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_MEDIA_PLAY,
{ATTR_ENTITY_ID: "media_player.test_player"},
blocking=True,
)
assert player.play.call_count == 1
player.play.reset_mock()
player.play.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to play: Failure (1)" in caplog.text
async def test_previous_track(hass, config_entry, config, controller, caplog):
"""Test the previous track service."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_MEDIA_PREVIOUS_TRACK,
{ATTR_ENTITY_ID: "media_player.test_player"},
blocking=True,
)
assert player.play_previous.call_count == 1
player.play_previous.reset_mock()
player.play_previous.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to move to previous track: Failure (1)" in caplog.text
async def test_next_track(hass, config_entry, config, controller, caplog):
"""Test the next track service."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_MEDIA_NEXT_TRACK,
{ATTR_ENTITY_ID: "media_player.test_player"},
blocking=True,
)
assert player.play_next.call_count == 1
player.play_next.reset_mock()
player.play_next.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to move to next track: Failure (1)" in caplog.text
async def test_stop(hass, config_entry, config, controller, caplog):
"""Test the stop service."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_MEDIA_STOP,
{ATTR_ENTITY_ID: "media_player.test_player"},
blocking=True,
)
assert player.stop.call_count == 1
player.stop.reset_mock()
player.stop.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to stop: Failure (1)" in caplog.text
async def test_volume_mute(hass, config_entry, config, controller, caplog):
"""Test the volume mute service."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_VOLUME_MUTE,
{ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_VOLUME_MUTED: True},
blocking=True,
)
assert player.set_mute.call_count == 1
player.set_mute.reset_mock()
player.set_mute.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to set mute: Failure (1)" in caplog.text
async def test_shuffle_set(hass, config_entry, config, controller, caplog):
"""Test the shuffle set service."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_SHUFFLE_SET,
{ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_SHUFFLE: True},
blocking=True,
)
player.set_play_mode.assert_called_once_with(player.repeat, True)
player.set_play_mode.reset_mock()
player.set_play_mode.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to set shuffle: Failure (1)" in caplog.text
async def test_volume_set(hass, config_entry, config, controller, caplog):
"""Test the volume set service."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_VOLUME_SET,
{ATTR_ENTITY_ID: "media_player.test_player", ATTR_MEDIA_VOLUME_LEVEL: 1},
blocking=True,
)
player.set_volume.assert_called_once_with(100)
player.set_volume.reset_mock()
player.set_volume.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to set volume level: Failure (1)" in caplog.text
async def test_select_favorite(hass, config_entry, config, controller, favorites):
"""Tests selecting a music service favorite and state."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# Test set music service preset
favorite = favorites[1]
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_SELECT_SOURCE,
{ATTR_ENTITY_ID: "media_player.test_player", ATTR_INPUT_SOURCE: favorite.name},
blocking=True,
)
player.play_favorite.assert_called_once_with(1)
# Test state is matched by station name
player.now_playing_media.station = favorite.name
player.heos.dispatcher.send(
const.SIGNAL_PLAYER_EVENT, player.player_id, const.EVENT_PLAYER_STATE_CHANGED
)
await hass.async_block_till_done()
state = hass.states.get("media_player.test_player")
assert state.attributes[ATTR_INPUT_SOURCE] == favorite.name
async def test_select_radio_favorite(hass, config_entry, config, controller, favorites):
"""Tests selecting a radio favorite and state."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# Test set radio preset
favorite = favorites[2]
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_SELECT_SOURCE,
{ATTR_ENTITY_ID: "media_player.test_player", ATTR_INPUT_SOURCE: favorite.name},
blocking=True,
)
player.play_favorite.assert_called_once_with(2)
# Test state is matched by album id
player.now_playing_media.station = "Classical"
player.now_playing_media.album_id = favorite.media_id
player.heos.dispatcher.send(
const.SIGNAL_PLAYER_EVENT, player.player_id, const.EVENT_PLAYER_STATE_CHANGED
)
await hass.async_block_till_done()
state = hass.states.get("media_player.test_player")
assert state.attributes[ATTR_INPUT_SOURCE] == favorite.name
async def test_select_radio_favorite_command_error(
hass, config_entry, config, controller, favorites, caplog
):
"""Tests command error logged when playing favorite."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# Test set radio preset
favorite = favorites[2]
player.play_favorite.side_effect = CommandFailedError(None, "Failure", 1)
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_SELECT_SOURCE,
{ATTR_ENTITY_ID: "media_player.test_player", ATTR_INPUT_SOURCE: favorite.name},
blocking=True,
)
player.play_favorite.assert_called_once_with(2)
assert "Unable to select source: Failure (1)" in caplog.text
async def test_select_input_source(
hass, config_entry, config, controller, input_sources
):
"""Tests selecting input source and state."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
# Test proper service called
input_source = input_sources[0]
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_SELECT_SOURCE,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_INPUT_SOURCE: input_source.name,
},
blocking=True,
)
player.play_input_source.assert_called_once_with(input_source)
# Test state is matched by media id
player.now_playing_media.source_id = const.MUSIC_SOURCE_AUX_INPUT
player.now_playing_media.media_id = const.INPUT_AUX_IN_1
player.heos.dispatcher.send(
const.SIGNAL_PLAYER_EVENT, player.player_id, const.EVENT_PLAYER_STATE_CHANGED
)
await hass.async_block_till_done()
state = hass.states.get("media_player.test_player")
assert state.attributes[ATTR_INPUT_SOURCE] == input_source.name
async def test_select_input_unknown(hass, config_entry, config, controller, caplog):
"""Tests selecting an unknown input."""
await setup_platform(hass, config_entry, config)
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_SELECT_SOURCE,
{ATTR_ENTITY_ID: "media_player.test_player", ATTR_INPUT_SOURCE: "Unknown"},
blocking=True,
)
assert "Unknown source: Unknown" in caplog.text
async def test_select_input_command_error(
hass, config_entry, config, controller, caplog, input_sources
):
"""Tests selecting an unknown input."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
input_source = input_sources[0]
player.play_input_source.side_effect = CommandFailedError(None, "Failure", 1)
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_SELECT_SOURCE,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_INPUT_SOURCE: input_source.name,
},
blocking=True,
)
player.play_input_source.assert_called_once_with(input_source)
assert "Unable to select source: Failure (1)" in caplog.text
async def test_unload_config_entry(hass, config_entry, config, controller):
"""Test the player is set unavailable when the config entry is unloaded."""
await setup_platform(hass, config_entry, config)
await config_entry.async_unload(hass)
assert hass.states.get("media_player.test_player").state == STATE_UNAVAILABLE
async def test_play_media_url(hass, config_entry, config, controller, caplog):
"""Test the play media service with type url."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
url = "http://news/podcast.mp3"
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_URL,
ATTR_MEDIA_CONTENT_ID: url,
},
blocking=True,
)
player.play_url.assert_called_once_with(url)
player.play_url.reset_mock()
player.play_url.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to play media: Failure (1)" in caplog.text
async def test_play_media_music(hass, config_entry, config, controller, caplog):
"""Test the play media service with type music."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
url = "http://news/podcast.mp3"
# First pass completes successfully, second pass raises command error
for _ in range(2):
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC,
ATTR_MEDIA_CONTENT_ID: url,
},
blocking=True,
)
player.play_url.assert_called_once_with(url)
player.play_url.reset_mock()
player.play_url.side_effect = CommandFailedError(None, "Failure", 1)
assert "Unable to play media: Failure (1)" in caplog.text
async def test_play_media_quick_select(
hass, config_entry, config, controller, caplog, quick_selects
):
"""Test the play media service with type quick_select."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
quick_select = list(quick_selects.items())[0]
index = quick_select[0]
name = quick_select[1]
# Play by index
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: "quick_select",
ATTR_MEDIA_CONTENT_ID: str(index),
},
blocking=True,
)
player.play_quick_select.assert_called_once_with(index)
# Play by name
player.play_quick_select.reset_mock()
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: "quick_select",
ATTR_MEDIA_CONTENT_ID: name,
},
blocking=True,
)
player.play_quick_select.assert_called_once_with(index)
# Invalid name
player.play_quick_select.reset_mock()
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: "quick_select",
ATTR_MEDIA_CONTENT_ID: "Invalid",
},
blocking=True,
)
assert player.play_quick_select.call_count == 0
assert "Unable to play media: Invalid quick select 'Invalid'" in caplog.text
async def test_play_media_playlist(
hass, config_entry, config, controller, caplog, playlists
):
"""Test the play media service with type playlist."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
playlist = playlists[0]
# Play without enqueuing
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_PLAYLIST,
ATTR_MEDIA_CONTENT_ID: playlist.name,
},
blocking=True,
)
player.add_to_queue.assert_called_once_with(
playlist, const.ADD_QUEUE_REPLACE_AND_PLAY
)
# Play with enqueuing
player.add_to_queue.reset_mock()
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_PLAYLIST,
ATTR_MEDIA_CONTENT_ID: playlist.name,
ATTR_MEDIA_ENQUEUE: True,
},
blocking=True,
)
player.add_to_queue.assert_called_once_with(playlist, const.ADD_QUEUE_ADD_TO_END)
# Invalid name
player.add_to_queue.reset_mock()
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_PLAYLIST,
ATTR_MEDIA_CONTENT_ID: "Invalid",
},
blocking=True,
)
assert player.add_to_queue.call_count == 0
assert "Unable to play media: Invalid playlist 'Invalid'" in caplog.text
async def test_play_media_favorite(
hass, config_entry, config, controller, caplog, favorites
):
"""Test the play media service with type favorite."""
await setup_platform(hass, config_entry, config)
player = controller.players[1]
quick_select = list(favorites.items())[0]
index = quick_select[0]
name = quick_select[1].name
# Play by index
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: "favorite",
ATTR_MEDIA_CONTENT_ID: str(index),
},
blocking=True,
)
player.play_favorite.assert_called_once_with(index)
# Play by name
player.play_favorite.reset_mock()
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: "favorite",
ATTR_MEDIA_CONTENT_ID: name,
},
blocking=True,
)
player.play_favorite.assert_called_once_with(index)
# Invalid name
player.play_favorite.reset_mock()
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: "favorite",
ATTR_MEDIA_CONTENT_ID: "Invalid",
},
blocking=True,
)
assert player.play_favorite.call_count == 0
assert "Unable to play media: Invalid favorite 'Invalid'" in caplog.text
async def test_play_media_invalid_type(hass, config_entry, config, controller, caplog):
"""Test the play media service with an invalid type."""
await setup_platform(hass, config_entry, config)
await hass.services.async_call(
MEDIA_PLAYER_DOMAIN,
SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: "media_player.test_player",
ATTR_MEDIA_CONTENT_TYPE: "Other",
ATTR_MEDIA_CONTENT_ID: "",
},
blocking=True,
)
assert "Unable to play media: Unsupported media type 'Other'" in caplog.text
| {
"content_hash": "fbda3069f53daa99ce911de164815f77",
"timestamp": "",
"source": "github",
"line_count": 806,
"max_line_length": 88,
"avg_line_length": 36.76674937965261,
"alnum_prop": 0.6650131605588175,
"repo_name": "partofthething/home-assistant",
"id": "4d979f8e5560e1c0cd5c5e5c6ff575bb25d373c2",
"size": "29634",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "tests/components/heos/test_media_player.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import scadasim
| {
"content_hash": "0aed8aec83748e4231e7e2a31dafa8f7",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 82,
"avg_line_length": 24.2,
"alnum_prop": 0.7024793388429752,
"repo_name": "sintax1/scadasim",
"id": "2518a514a6247e93fa7d660c6d1f4049484e3860",
"size": "121",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/context.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "189"
},
{
"name": "Python",
"bytes": "23147"
}
],
"symlink_target": ""
} |
import collections
import os
import sys
from optparse import OptionParser, NO_DEFAULT
import imp
import warnings
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError, handle_default_options
from django.core.management.color import color_style
from django.utils.importlib import import_module
from django.utils import six
# For backwards compatibility: get_version() used to be in this module.
from django import get_version
# A cache of loaded commands, so that call_command
# doesn't have to reload every time it's called.
_commands = None
def find_commands(management_dir):
"""
Given a path to a management directory, returns a list of all the command
names that are available.
Returns an empty list if no commands are defined.
"""
command_dir = os.path.join(management_dir, 'commands')
try:
return [f[:-3] for f in os.listdir(command_dir)
if not f.startswith('_') and f.endswith('.py')]
except OSError:
return []
def find_management_module(app_name):
"""
Determines the path to the management module for the given app_name,
without actually importing the application or the management module.
Raises ImportError if the management module cannot be found for any reason.
"""
parts = app_name.split('.')
parts.append('management')
parts.reverse()
part = parts.pop()
path = None
# When using manage.py, the project module is added to the path,
# loaded, then removed from the path. This means that
# testproject.testapp.models can be loaded in future, even if
# testproject isn't in the path. When looking for the management
# module, we need look for the case where the project name is part
# of the app_name but the project directory itself isn't on the path.
try:
f, path, descr = imp.find_module(part, path)
except ImportError as e:
if os.path.basename(os.getcwd()) != part:
raise e
else:
if f:
f.close()
while parts:
part = parts.pop()
f, path, descr = imp.find_module(part, path and [path] or None)
if f:
f.close()
return path
def load_command_class(app_name, name):
"""
Given a command name and an application name, returns the Command
class instance. All errors raised by the import process
(ImportError, AttributeError) are allowed to propagate.
"""
module = import_module('%s.management.commands.%s' % (app_name, name))
return module.Command()
def get_commands():
"""
Returns a dictionary mapping command names to their callback applications.
This works by looking for a management.commands package in django.core, and
in each installed application -- if a commands package exists, all commands
in that package are registered.
Core commands are always included. If a settings module has been
specified, user-defined commands will also be included.
The dictionary is in the format {command_name: app_name}. Key-value
pairs from this dictionary can then be used in calls to
load_command_class(app_name, command_name)
If a specific version of a command must be loaded (e.g., with the
startapp command), the instantiated module can be placed in the
dictionary in place of the application name.
The dictionary is cached on the first call and reused on subsequent
calls.
"""
global _commands
if _commands is None:
_commands = dict([(name, 'django.core') for name in find_commands(__path__[0])])
# Find the installed apps
from django.conf import settings
try:
apps = settings.INSTALLED_APPS
except ImproperlyConfigured:
# Still useful for commands that do not require functional settings,
# like startproject or help
apps = []
# Find and load the management module for each installed app.
for app_name in apps:
try:
path = find_management_module(app_name)
_commands.update(dict([(name, app_name)
for name in find_commands(path)]))
except ImportError:
pass # No management module - ignore this app
return _commands
def call_command(name, *args, **options):
"""
Calls the given command, with the given options and args/kwargs.
This is the primary API you should use for calling specific commands.
Some examples:
call_command('syncdb')
call_command('shell', plain=True)
call_command('sqlall', 'myapp')
"""
# Load the command object.
try:
app_name = get_commands()[name]
except KeyError:
raise CommandError("Unknown command: %r" % name)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
klass = app_name
else:
klass = load_command_class(app_name, name)
# Grab out a list of defaults from the options. optparse does this for us
# when the script runs from the command line, but since call_command can
# be called programatically, we need to simulate the loading and handling
# of defaults (see #10080 for details).
defaults = {}
for opt in klass.option_list:
if opt.default is NO_DEFAULT:
defaults[opt.dest] = None
else:
defaults[opt.dest] = opt.default
defaults.update(options)
return klass.execute(*args, **defaults)
class LaxOptionParser(OptionParser):
"""
An option parser that doesn't raise any errors on unknown options.
This is needed because the --settings and --pythonpath options affect
the commands (and thus the options) that are available to the user.
"""
def error(self, msg):
pass
def print_help(self):
"""Output nothing.
The lax options are included in the normal option parser, so under
normal usage, we don't need to print the lax options.
"""
pass
def print_lax_help(self):
"""Output the basic options available to every command.
This just redirects to the default print_help() behavior.
"""
OptionParser.print_help(self)
def _process_args(self, largs, rargs, values):
"""
Overrides OptionParser._process_args to exclusively handle default
options and ignore args and other options.
This overrides the behavior of the super class, which stop parsing
at the first unrecognized option.
"""
while rargs:
arg = rargs[0]
try:
if arg[0:2] == "--" and len(arg) > 2:
# process a single long option (possibly with value(s))
# the superclass code pops the arg off rargs
self._process_long_opt(rargs, values)
elif arg[:1] == "-" and len(arg) > 1:
# process a cluster of short options (possibly with
# value(s) for the last one only)
# the superclass code pops the arg off rargs
self._process_short_opts(rargs, values)
else:
# it's either a non-default option or an arg
# either way, add it to the args list so we can keep
# dealing with options
del rargs[0]
raise Exception
except:
largs.append(arg)
class ManagementUtility(object):
"""
Encapsulates the logic of the django-admin.py and manage.py utilities.
A ManagementUtility has a number of commands, which can be manipulated
by editing the self.commands dictionary.
"""
def __init__(self, argv=None):
self.argv = argv or sys.argv[:]
self.prog_name = os.path.basename(self.argv[0])
def main_help_text(self, commands_only=False):
"""
Returns the script's main help text, as a string.
"""
if commands_only:
usage = sorted(get_commands().keys())
else:
usage = [
"",
"Type '%s help <subcommand>' for help on a specific subcommand." % self.prog_name,
"",
"Available subcommands:",
]
commands_dict = collections.defaultdict(lambda: [])
for name, app in six.iteritems(get_commands()):
if app == 'django.core':
app = 'django'
else:
app = app.rpartition('.')[-1]
commands_dict[app].append(name)
style = color_style()
for app in sorted(commands_dict.keys()):
usage.append("")
usage.append(style.NOTICE("[%s]" % app))
for name in sorted(commands_dict[app]):
usage.append(" %s" % name)
return '\n'.join(usage)
def fetch_command(self, subcommand):
"""
Tries to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"django-admin.py" or "manage.py") if it can't be found.
"""
try:
app_name = get_commands()[subcommand]
except KeyError:
sys.stderr.write("Unknown command: %r\nType '%s help' for usage.\n" % \
(subcommand, self.prog_name))
sys.exit(1)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
klass = app_name
else:
klass = load_command_class(app_name, subcommand)
return klass
def autocomplete(self):
"""
Output completion suggestions for BASH.
The output of this function is passed to BASH's `COMREPLY` variable and
treated as completion suggestions. `COMREPLY` expects a space
separated string as the result.
The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
to get information about the cli input. Please refer to the BASH
man-page for more information about this variables.
Subcommand options are saved as pairs. A pair consists of
the long option string (e.g. '--exclude') and a boolean
value indicating if the option requires arguments. When printing to
stdout, a equal sign is appended to options which require arguments.
Note: If debugging this function, it is recommended to write the debug
output in a separate file. Otherwise the debug output will be treated
and formatted as potential completion suggestions.
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'DJANGO_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
curr = cwords[cword-1]
except IndexError:
curr = ''
subcommands = list(get_commands()) + ['help']
options = [('--help', None)]
# subcommand
if cword == 1:
print(' '.join(sorted(filter(lambda x: x.startswith(curr), subcommands))))
# subcommand options
# special case: the 'help' subcommand has no options
elif cwords[0] in subcommands and cwords[0] != 'help':
subcommand_cls = self.fetch_command(cwords[0])
# special case: 'runfcgi' stores additional options as
# 'key=value' pairs
if cwords[0] == 'runfcgi':
from django.core.servers.fastcgi import FASTCGI_OPTIONS
options += [(k, 1) for k in FASTCGI_OPTIONS]
# special case: add the names of installed apps to options
elif cwords[0] in ('dumpdata', 'sql', 'sqlall', 'sqlclear',
'sqlcustom', 'sqlindexes', 'sqlsequencereset', 'test'):
try:
from django.conf import settings
# Get the last part of the dotted path as the app name.
options += [(a.split('.')[-1], 0) for a in settings.INSTALLED_APPS]
except ImportError:
# Fail silently if DJANGO_SETTINGS_MODULE isn't set. The
# user will find out once they execute the command.
pass
options += [(s_opt.get_opt_string(), s_opt.nargs) for s_opt in
subcommand_cls.option_list]
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword-1]]
options = [opt for opt in options if opt[0] not in prev_opts]
# filter options by current input
options = sorted([(k, v) for k, v in options if k.startswith(curr)])
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1]:
opt_label += '='
print(opt_label)
sys.exit(1)
def execute(self):
"""
Given the command-line arguments, this figures out which subcommand is
being run, creates a parser appropriate to that command, and runs it.
"""
# Preprocess options to extract --settings and --pythonpath.
# These options could affect the commands that are available, so they
# must be processed early.
parser = LaxOptionParser(usage="%prog subcommand [options] [args]",
version=get_version(),
option_list=BaseCommand.option_list)
self.autocomplete()
try:
options, args = parser.parse_args(self.argv)
handle_default_options(options)
except:
pass # Ignore any option errors at this point.
try:
subcommand = self.argv[1]
except IndexError:
subcommand = 'help' # Display help if no arguments were given.
if subcommand == 'help':
if len(args) <= 2:
parser.print_lax_help()
sys.stdout.write(self.main_help_text() + '\n')
elif args[2] == '--commands':
sys.stdout.write(self.main_help_text(commands_only=True) + '\n')
else:
self.fetch_command(args[2]).print_help(self.prog_name, args[2])
elif subcommand == 'version':
sys.stdout.write(parser.get_version() + '\n')
# Special-cases: We want 'django-admin.py --version' and
# 'django-admin.py --help' to work, for backwards compatibility.
elif self.argv[1:] == ['--version']:
# LaxOptionParser already takes care of printing the version.
pass
elif self.argv[1:] in (['--help'], ['-h']):
parser.print_lax_help()
sys.stdout.write(self.main_help_text() + '\n')
else:
self.fetch_command(subcommand).run_from_argv(self.argv)
def setup_environ(settings_mod, original_settings_path=None):
"""
Configures the runtime environment. This can also be used by external
scripts wanting to set up a similar environment to manage.py.
Returns the project directory (assuming the passed settings module is
directly in the project directory).
The "original_settings_path" parameter is optional, but recommended, since
trying to work out the original path from the module can be problematic.
"""
warnings.warn(
"The 'setup_environ' function is deprecated, "
"you likely need to update your 'manage.py'; "
"please see the Django 1.4 release notes "
"(https://docs.djangoproject.com/en/dev/releases/1.4/).",
DeprecationWarning)
# Add this project to sys.path so that it's importable in the conventional
# way. For example, if this file (manage.py) lives in a directory
# "myproject", this code would add "/path/to/myproject" to sys.path.
if '__init__.py' in settings_mod.__file__:
p = os.path.dirname(settings_mod.__file__)
else:
p = settings_mod.__file__
project_directory, settings_filename = os.path.split(p)
if project_directory == os.curdir or not project_directory:
project_directory = os.getcwd()
project_name = os.path.basename(project_directory)
# Strip filename suffix to get the module name.
settings_name = os.path.splitext(settings_filename)[0]
# Strip $py for Jython compiled files (like settings$py.class)
if settings_name.endswith("$py"):
settings_name = settings_name[:-3]
# Set DJANGO_SETTINGS_MODULE appropriately.
if original_settings_path:
os.environ['DJANGO_SETTINGS_MODULE'] = original_settings_path
else:
# If DJANGO_SETTINGS_MODULE is already set, use it.
os.environ['DJANGO_SETTINGS_MODULE'] = os.environ.get(
'DJANGO_SETTINGS_MODULE',
'%s.%s' % (project_name, settings_name)
)
# Import the project module. We add the parent directory to PYTHONPATH to
# avoid some of the path errors new users can have.
sys.path.append(os.path.join(project_directory, os.pardir))
import_module(project_name)
sys.path.pop()
return project_directory
def execute_from_command_line(argv=None):
"""
A simple method that runs a ManagementUtility.
"""
utility = ManagementUtility(argv)
utility.execute()
def execute_manager(settings_mod, argv=None):
"""
Like execute_from_command_line(), but for use by manage.py, a
project-specific django-admin.py utility.
"""
warnings.warn(
"The 'execute_manager' function is deprecated, "
"you likely need to update your 'manage.py'; "
"please see the Django 1.4 release notes "
"(https://docs.djangoproject.com/en/dev/releases/1.4/).",
DeprecationWarning)
setup_environ(settings_mod)
utility = ManagementUtility(argv)
utility.execute()
| {
"content_hash": "2c060af6f6956b5b0c0a7e927f617eca",
"timestamp": "",
"source": "github",
"line_count": 468,
"max_line_length": 98,
"avg_line_length": 38.82051282051282,
"alnum_prop": 0.6087626596213122,
"repo_name": "chrisfranzen/django",
"id": "bb26c206660c7b934996bc3c910c1c9a152b86e2",
"size": "18168",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "django/core/management/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "42663"
},
{
"name": "HTML",
"bytes": "95024"
},
{
"name": "JavaScript",
"bytes": "94313"
},
{
"name": "Python",
"bytes": "8216479"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
} |
from collections import Counter
from corehq.apps import es
from corehq.apps.data_pipeline_audit.dbacessors import (
get_es_counts_by_doc_type,
get_es_user_counts_by_doc_type,
get_primary_db_case_counts,
get_primary_db_form_counts,
)
from corehq.apps.data_pipeline_audit.utils import map_counter_doc_types
from corehq.apps.domain.dbaccessors import get_doc_count_in_domain_by_class
from corehq.apps.users.dbaccessors import (
get_mobile_user_count,
get_web_user_count,
)
from corehq.apps.users.models import CommCareUser
def get_doc_counts_for_domain(domain):
"""
:param domain:
:return: List of tuples: ``(doc_type, primary_db_count, es_count)``
"""
primary_db_counts = map_counter_doc_types(_get_primary_db_counts(domain))
es_counts = map_counter_doc_types(
get_es_counts_by_doc_type(domain, (es.CaseES, es.FormES))
)
es_counts.update(get_es_user_counts_by_doc_type(domain))
all_doc_types = set(primary_db_counts) | set(es_counts)
output_rows = []
for doc_type in sorted(all_doc_types, key=lambda d: d.lower()):
output_rows.append((
doc_type,
primary_db_counts[doc_type],
es_counts[doc_type]
))
return output_rows
def _get_primary_db_counts(domain):
db_counts = Counter()
db_counts.update(get_primary_db_form_counts(domain))
db_counts.update(get_primary_db_case_counts(domain))
mobile_user_count = get_mobile_user_count(domain)
db_counts.update({
'WebUser': get_web_user_count(domain),
'CommCareUser': mobile_user_count,
'CommCareUser-Deleted': get_doc_count_in_domain_by_class(domain, CommCareUser) - mobile_user_count
})
return db_counts
| {
"content_hash": "018cb7ce591b83f99e7eaa01b609caee",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 106,
"avg_line_length": 32.111111111111114,
"alnum_prop": 0.6764705882352942,
"repo_name": "dimagi/commcare-hq",
"id": "8606686305c70d696ff23546b36922edc33fa186",
"size": "1734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/data_pipeline_audit/tools.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "82928"
},
{
"name": "Dockerfile",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "2589268"
},
{
"name": "JavaScript",
"bytes": "5889543"
},
{
"name": "Jinja",
"bytes": "3693"
},
{
"name": "Less",
"bytes": "176180"
},
{
"name": "Makefile",
"bytes": "1622"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "66704"
},
{
"name": "Python",
"bytes": "21779773"
},
{
"name": "Roff",
"bytes": "150"
},
{
"name": "Shell",
"bytes": "67473"
}
],
"symlink_target": ""
} |
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
from resource_management.libraries.script.script import Script
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.check_process_status import check_process_status
from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.security_commons import build_expectations, \
cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
FILE_TYPE_XML
from resource_management.core.source import Template
from resource_management.core.logger import Logger
from install_jars import install_tez_jars
from yarn import yarn
from service import service
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyImpl
class HistoryServer(Script):
def install(self, env):
self.install_packages(env)
def stop(self, env, upgrade_type=None):
import params
env.set_params(params)
service('historyserver', action='stop', serviceName='mapreduce')
def configure(self, env):
import params
env.set_params(params)
yarn(name="historyserver")
@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
class HistoryserverWindows(HistoryServer):
def start(self, env):
import params
env.set_params(params)
self.configure(env)
service('historyserver', action='start', serviceName='mapreduce')
def status(self, env):
service('historyserver', action='status')
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class HistoryServerDefault(HistoryServer):
def get_component_name(self):
return "hadoop-mapreduce-historyserver"
def pre_upgrade_restart(self, env, upgrade_type=None):
Logger.info("Executing Stack Upgrade pre-restart")
import params
env.set_params(params)
if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
stack_select.select("hadoop-mapreduce-historyserver", params.version)
# MC Hammer said, "Can't touch this"
copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
copy_to_hdfs("slider", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
params.HdfsResource(None, action="execute")
def start(self, env, upgrade_type=None):
import params
env.set_params(params)
self.configure(env) # FOR SECURITY
if params.stack_version_formatted_major and check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, params.stack_version_formatted_major):
# MC Hammer said, "Can't touch this"
resource_created = copy_to_hdfs(
"mapreduce",
params.user_group,
params.hdfs_user,
host_sys_prepped=params.host_sys_prepped)
resource_created = copy_to_hdfs(
"tez",
params.user_group,
params.hdfs_user,
host_sys_prepped=params.host_sys_prepped) or resource_created
resource_created = copy_to_hdfs(
"slider",
params.user_group,
params.hdfs_user,
host_sys_prepped=params.host_sys_prepped) or resource_created
if resource_created:
params.HdfsResource(None, action="execute")
else:
# In stack versions before copy_tarball_to_hdfs support tez.tar.gz was copied to a different folder in HDFS.
install_tez_jars()
service('historyserver', action='start', serviceName='mapreduce')
def status(self, env):
import status_params
env.set_params(status_params)
check_process_status(status_params.mapred_historyserver_pid_file)
def security_status(self, env):
import status_params
env.set_params(status_params)
if status_params.security_enabled:
expectations = {}
expectations.update(build_expectations('mapred-site',
None,
[
'mapreduce.jobhistory.keytab',
'mapreduce.jobhistory.principal',
'mapreduce.jobhistory.webapp.spnego-keytab-file',
'mapreduce.jobhistory.webapp.spnego-principal'
],
None))
security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
{'mapred-site.xml': FILE_TYPE_XML})
result_issues = validate_security_config_properties(security_params, expectations)
if not result_issues: # If all validations passed successfully
try:
# Double check the dict before calling execute
if ( 'mapred-site' not in security_params or
'mapreduce.jobhistory.keytab' not in security_params['mapred-site'] or
'mapreduce.jobhistory.principal' not in security_params['mapred-site'] or
'mapreduce.jobhistory.webapp.spnego-keytab-file' not in security_params['mapred-site'] or
'mapreduce.jobhistory.webapp.spnego-principal' not in security_params['mapred-site']):
self.put_structured_out({"securityState": "UNSECURED"})
self.put_structured_out(
{"securityIssuesFound": "Keytab file or principal not set."})
return
cached_kinit_executor(status_params.kinit_path_local,
status_params.mapred_user,
security_params['mapred-site']['mapreduce.jobhistory.keytab'],
security_params['mapred-site']['mapreduce.jobhistory.principal'],
status_params.hostname,
status_params.tmp_dir)
cached_kinit_executor(status_params.kinit_path_local,
status_params.mapred_user,
security_params['mapred-site']['mapreduce.jobhistory.webapp.spnego-keytab-file'],
security_params['mapred-site']['mapreduce.jobhistory.webapp.spnego-principal'],
status_params.hostname,
status_params.tmp_dir)
self.put_structured_out({"securityState": "SECURED_KERBEROS"})
except Exception as e:
self.put_structured_out({"securityState": "ERROR"})
self.put_structured_out({"securityStateErrorInfo": str(e)})
else:
issues = []
for cf in result_issues:
issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
self.put_structured_out({"securityState": "UNSECURED"})
else:
self.put_structured_out({"securityState": "UNSECURED"})
def get_log_folder(self):
import params
return params.mapred_log_dir
def get_user(self):
import params
return params.mapred_user
if __name__ == "__main__":
HistoryServer().execute()
| {
"content_hash": "b1a7b51ea581bab1043c549adf27f3b2",
"timestamp": "",
"source": "github",
"line_count": 189,
"max_line_length": 141,
"avg_line_length": 44.06349206349206,
"alnum_prop": 0.6643852065321806,
"repo_name": "JunHe77/bigtop",
"id": "f933e9172579f43ff40e4a54cf5150f009e30a13",
"size": "8328",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/historyserver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4822"
},
{
"name": "Dockerfile",
"bytes": "2684"
},
{
"name": "Groovy",
"bytes": "632676"
},
{
"name": "HiveQL",
"bytes": "1658"
},
{
"name": "Java",
"bytes": "676559"
},
{
"name": "Makefile",
"bytes": "57346"
},
{
"name": "PigLatin",
"bytes": "3196"
},
{
"name": "Puppet",
"bytes": "180420"
},
{
"name": "Python",
"bytes": "240919"
},
{
"name": "Roff",
"bytes": "45904"
},
{
"name": "Ruby",
"bytes": "19903"
},
{
"name": "Scala",
"bytes": "85334"
},
{
"name": "Shell",
"bytes": "615830"
},
{
"name": "TSQL",
"bytes": "13064"
},
{
"name": "XSLT",
"bytes": "1323"
}
],
"symlink_target": ""
} |
"""
Defines components for holding properties of rocks or samples or whatevers.
:copyright: 2015 Agile Geoscience
:license: Apache 2.0
"""
import re
class ComponentError(Exception):
"""
Generic error class.
"""
pass
class Component(object):
"""
Initialize with a dictionary of properties. You can use any
properties you want e.g.:
- lithology: a simple one-word rock type
- colour, e.g. 'grey'
- grainsize or range, e.g. 'vf-f'
- modifier, e.g. 'rippled'
- quantity, e.g. '35%', or 'stringers'
- description, e.g. from cuttings
You can include as many other things as you want, e.g.
- porosity
- cementation
- lithology code
"""
def __init__(self, properties):
for k, v in properties.items():
if k and v:
setattr(self, k.lower(), v.lower())
def __repr__(self):
s = str(self)
return "Component({0})".format(s)
def __str__(self):
s = []
for key in self.__dict__:
t = '"{key}":"{value}"'
s.append(t.format(key=key, value=self.__dict__[key]))
return ', '.join(s)
def __getitem__(self, key):
"""
So we can get at attributes with variables.
"""
return self.__dict__.get(key)
def __bool__(self):
if not self.__dict__.keys():
return False
else:
return True
# For Python 2
__nonzero__ = __bool__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
# Weed out empty elements
s = {k: v for k, v in self.__dict__.items() if v}
o = {k: v for k, v in other.__dict__.items() if v}
# Compare
if s == o:
return True
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
# If we define __eq__ we also need __hash__ otherwise the object
# becomes unhashable. All this does is hash the frozenset of the
# keys. (You can only hash immutables.)
def __hash__(self):
return hash(frozenset(self.__dict__.keys()))
def _repr_html_(self):
"""
IPython Notebook magic repr function.
"""
rows = ''
s = '<tr><td><strong>{k}</strong></td><td>{v}</td></tr>'
for k, v in self.__dict__.items():
rows += s.format(k=k, v=v)
html = '<table>{}</table>'.format(rows)
return html
@classmethod
def from_text(cls, text, lexicon, required=None, first_only=True):
"""
Generate a Component from a text string, using a Lexicon.
Args:
text (str): The text string to parse.
lexicon (Lexicon): The dictionary to use for the
categories and lexemes.
first_only (bool): Whether to only take the first
match of a lexeme against the text string.
Returns:
Component: A Component object, or None if there was no
must-have field.
"""
component = lexicon.get_component(text, first_only=first_only)
if required and (required not in component):
return None
else:
return cls(component)
def summary(self, fmt=None, initial=True, default=''):
"""
Given a format string, return a summary description of a component.
Args:
component (dict): A component dictionary.
fmt (str): Describes the format with a string. Use '%'
to signal a field in the component, which is analogous
to a dictionary. If no format is given, you will
just get a list of attributes.
initial (bool): Whether to capitialize the first letter.
default (str): What to give if there's no component defined.
Returns:
str: A summary string.
Example:
r = Component({'colour': 'Red',
'grainsize': 'VF-F',
'lithology': 'Sandstone'})
r.summary() --> 'Red, vf-f, sandstone'
"""
if default and not self.__dict__:
return default
if not fmt:
string, flist = '', []
for item in self.__dict__:
string += '{}, '
flist.append(item)
string = string.strip(', ')
else:
fmt = re.sub(r' ', '_dblspc_', fmt)
string = re.sub(r'\{(\w+)\}', '{}', fmt)
flist = re.findall(r'\{(\w+)\}', fmt)
words = []
for key in flist:
word = self.__dict__.get(key.lower())
if word and key[0].isupper():
word = word.capitalize()
if word and key.isupper():
word = word.upper()
if not word:
word = ''
words.append(word)
try:
summary = string.format(*words)
except KeyError as e:
raise ComponentError("No such attribute, "+str(e))
if initial and summary:
summary = summary[0].upper() + summary[1:]
# Tidy up double spaces
summary = re.sub(r' ', ' ', summary)
summary = re.sub(r'_dblspc_', ' ', summary)
return summary
| {
"content_hash": "27d280b69b31206ae8bc527995810462",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 75,
"avg_line_length": 29.17391304347826,
"alnum_prop": 0.5095007451564829,
"repo_name": "EvanBianco/striplog",
"id": "94d652c9e89b92da3717e49eaf93ae3fc33eda67",
"size": "5414",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "striplog/component.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "141932"
}
],
"symlink_target": ""
} |
from falcon import errors
from falcon import testing
from oslo_policy import policy as os_policy
from monasca_api.api.core import request
from monasca_api.common.policy import policy_engine as policy
from monasca_api.tests import base
import monasca_api.v2.reference.helpers as helpers
from monasca_api.common.rest import utils as rest_utils
class TestHelpersFunction(base.BaseTestCase):
def test_from_json(self):
body_json = {'test_body': 'test'}
req = request.Request(
testing.create_environ(
body=rest_utils.as_json(body_json),
)
)
response = helpers.from_json(req)
self.assertEqual(body_json, response)
def test_from_json_incorrect_message(self):
req = request.Request(
testing.create_environ(
body='incorrect message',
)
)
self.assertRaises(errors.HTTPBadRequest, helpers.from_json, req)
def test_to_json(self):
test_dict = {'test_body': 'test'}
expected_json = '{"test_body": "test"}'
response = helpers.to_json(test_dict)
self.assertEqual(expected_json, response)
def test_validate_json_content_type(self):
req = request.Request(
testing.create_environ(
headers={'Content-Type': 'application/json'}
)
)
helpers.validate_json_content_type(req)
def test_validate_json_content_type_incorrect_content_type(self):
req = request.Request(
testing.create_environ(
headers={'Content-Type': 'multipart/form-data'}
)
)
self.assertRaises(errors.HTTPBadRequest, helpers.validate_json_content_type, req)
def test_validate_json_content_type_missing_content_type(self):
req = request.Request(testing.create_environ())
self.assertRaises(errors.HTTPBadRequest, helpers.validate_json_content_type, req)
class TestGetXTenantOrTenantId(base.BaseApiTestCase):
def setUp(self):
super(TestGetXTenantOrTenantId, self).setUp()
rules = [
os_policy.RuleDefault("example:allowed", "@"),
os_policy.RuleDefault("example:denied", "!"),
os_policy.RuleDefault("example:authorized",
"role:role_1 or role:role_2")
]
policy.reset()
policy.init()
policy._ENFORCER.register_defaults(rules)
def test_return_tenant_id_on_authorized_roles(self):
for role in ['role_1', 'role_2']:
req_context = self._get_request_context(role)
self.assertEqual(
'fake_tenant_id',
helpers.get_x_tenant_or_tenant_id(
req_context, ['example:authorized']
)
)
def test_return_tenant_id_on_allowed_rules(self):
req_context = self._get_request_context()
self.assertEqual(
'fake_tenant_id',
helpers.get_x_tenant_or_tenant_id(
req_context,
['example:allowed']
)
)
def test_return_project_id_on_unauthorized_role(self):
req_context = self._get_request_context()
self.assertEqual('fake_project_id',
helpers.get_x_tenant_or_tenant_id(
req_context,
['example:authorized']))
def test_return_project_id_on_denied_rules(self):
req_context = self._get_request_context()
self.assertEqual(
'fake_project_id',
helpers.get_x_tenant_or_tenant_id(
req_context,
['example:denied']
)
)
def test_return_project_id_on_unavailable_tenant_id(self):
req_context = self._get_request_context()
req_context.query_string = ''
self.assertEqual(
'fake_project_id',
helpers.get_x_tenant_or_tenant_id(
req_context,
['example:allowed']
)
)
@staticmethod
def _get_request_context(role='fake_role'):
return request.Request(
testing.create_environ(
path="/",
query_string="tenant_id=fake_tenant_id",
headers={
"X_PROJECT_ID": "fake_project_id",
"X_ROLES": role
}
)
)
| {
"content_hash": "f2f312c9e518953b5f4870de9208ccf2",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 89,
"avg_line_length": 33.02238805970149,
"alnum_prop": 0.5633898305084746,
"repo_name": "stackforge/monasca-api",
"id": "231477eca891080ca807d2165d518090ba64553a",
"size": "4997",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "monasca_api/tests/test_helpers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "764767"
},
{
"name": "PLpgSQL",
"bytes": "4289"
},
{
"name": "Python",
"bytes": "710072"
},
{
"name": "Ruby",
"bytes": "3774"
},
{
"name": "Shell",
"bytes": "53573"
}
],
"symlink_target": ""
} |
"""
Created on Feb 13, 2016
File: tuplet.py
Purpose: Defines Tuplet note construct
@author: donald p pazel
"""
from structure.abstract_note_collective import AbstractNoteCollective
from structure.note import Note
from timemodel.duration import Duration
class Tuplet(AbstractNoteCollective):
"""
Tuplet is a grouping operation having bounded duration but variable scale factor based on full content duration.
The bounded duration is determined by two attributes;
1) unit_duration: a Duration representing a base note value
2) unit_duration_factor: a numeric representing how many of the above the full duration should be.
"""
def __init__(self, unit_duration, unit_duration_factor, abstract_note_list=None):
"""
unit_duration x unit_duration_factor gives the full intended duration for the construct.
tuplets have bounded duration but variable scale factor based on its contents
Args:
unit_duration: a Duration representing a base note value, e.g. quarter note
unit_duration_factor: a numeric representing how many of the above the full duration should be.
abstract_note_list: a list of abstract notes to append to the tuplet
Note that these factors aggregate multiplicatively through self.contextual_reduction_factor (see rescale())
"""
AbstractNoteCollective.__init__(self)
self.__unit_duration = unit_duration
self.__unit_duration_factor = unit_duration_factor
if abstract_note_list is None:
abstract_note_list = list()
self.append(abstract_note_list)
@property
def unit_duration(self):
return self.__unit_duration
@property
def unit_duration_factor(self):
return self.__unit_duration_factor
@property
def duration(self):
"""
This is an override of AbstractNoteCollective.duration.
Tuplet and Beam override this to do a simple summation of linearly layed out notes and subnotes.
The reason is that the layout algorithm of these subclasses cannot use the realtive_position
attribute as the algorithm determines that.
"""
d = Duration(0)
for note in self.sub_notes:
d += note.duration
return d
def append(self, notes):
"""
Append one or a list of notest to the tuplet.
:param notes: List or individual note
:return:
"""
if isinstance(notes, list):
for n in notes:
self.append(n)
return
elif isinstance(notes, Note) or isinstance(notes, AbstractNoteCollective):
self.add(notes, len(self.sub_notes))
def add(self, note, index):
"""
Beams can only add less than 1/4 notes, and arbitrary beams and tuplets.
Only added beams incur a reduction factor of 1/2
For collective notes, always apply the factor.
"""
from structure.beam import Beam
if note.parent is not None:
raise Exception('Cannot add note already assigned a parent')
if index < 0 or index > len(self.sub_notes):
raise Exception('add note, index {0} not in range[0, {1}]'.format(index, len(self.sub_notes)))
if isinstance(note, Note):
if note.base_duration >= 2 * self.unit_duration:
raise Exception(
"Attempt to add note with duration {0} greater than or equal to {1}".format(note.duration,
2 * self.unit_duration))
elif not isinstance(note, Beam) and not isinstance(note, Tuplet):
raise Exception('illegal type {0}'.format(type(note)))
self.sub_notes.insert(index, note)
note.parent = self
note.apply_factor(self.contextual_reduction_factor)
self.rescale()
# see if prior note is tied, and if so, break the tie.
first_note = note
if not isinstance(note, Note):
first_note = note.get_first_note()
# If empty tuplet or beam added, not note to tie.
if first_note is None:
return
prior = first_note.prior_note()
if prior is not None and prior.is_tied_to:
prior.untie()
self.notes_added([note])
def rescale(self):
"""
Rebuild the factors for the duration is right.
Instead of setting the self.contextual_reduction_factor, we create an incremental factor that when applied to
the contextual_reduction_factor give the correct new factor.
This is preferred since the incremental can be applied downward the tree
in a straight forward way, as a contextual adjustment multiplicative factor.
"""
original_full_duration = self.duration.duration / self.contextual_reduction_factor
new_factor = self.unit_duration.duration * self.unit_duration_factor / original_full_duration
# get the contextual reduction factor contribution the parent give to self.
contrib = self.parent.contextual_reduction_factor if self.parent else 1
orig_f = self.contextual_reduction_factor / contrib
incremental_contextual_factor = new_factor / orig_f # self.contextual_reduction_factor
self.downward_refactor_layout(incremental_contextual_factor)
def __str__(self):
base = 'Tuplet({0}x{1}Dur({2})Off({3})f={4})'.format(self.unit_duration, self.unit_duration_factor,
self.duration, self.relative_position,
self.contextual_reduction_factor)
s = base + '[' + (']' if len(self.sub_notes) == 0 else '\n')
for n in self.sub_notes:
s += ' ' + str(n) + '\n'
s += ']' if len(self.sub_notes) != 0 else ''
return s
| {
"content_hash": "e250b0987c3629592a1a196b053c7327",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 120,
"avg_line_length": 42.61805555555556,
"alnum_prop": 0.6048557927326055,
"repo_name": "dpazel/music_rep",
"id": "e36688651b89756422d2421d241d8294d969a1b1",
"size": "6137",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "structure/tuplet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "6964"
},
{
"name": "Python",
"bytes": "1584408"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function, division, unicode_literals
import argparse
import gzip
import os
import os.path
import re
# Makes the default extension list look a bit nicer
class PrettyTuple(tuple):
def __repr__(self):
return ', '.join(self)
CHUNK_SIZE = 64 * 1024
# Extensions that it's not worth trying to gzip
GZIP_EXCLUDE_EXTENSIONS = PrettyTuple((
# Images
'jpg', 'jpeg', 'png', 'gif', 'webp',
# Compressed files
'zip', 'gz', 'tgz', 'bz2', 'tbz',
# Flash
'swf', 'flv',
# Fonts
'woff',
))
null_log = lambda x: x
def main(root, extensions=None, quiet=False, log=print):
excluded_re = extension_regex(extensions)
if quiet:
log = null_log
for dirpath, dirs, files in os.walk(root):
for filename in files:
if not excluded_re.search(filename):
path = os.path.join(dirpath, filename)
compress(path, log)
def extension_regex(extensions):
if not extensions:
return re.compile('^$')
else:
return re.compile(
r'\.({})$'.format('|'.join(map(re.escape, extensions))),
re.IGNORECASE)
def compress(path, log=null_log):
gzip_path = path + '.gz'
with open(path, 'rb') as in_file:
# Explicitly set mtime to 0 so gzip content is fully determined
# by file content (0 = "no timestamp" according to gzip spec)
with gzip.GzipFile(gzip_path, 'wb', compresslevel=9, mtime=0) as out_file:
for chunk in iter(lambda: in_file.read(CHUNK_SIZE), b''):
out_file.write(chunk)
# If gzipped file isn't actually any smaller then get rid of it
orig_size = os.path.getsize(path)
gzip_size = os.path.getsize(gzip_path)
if not is_worth_gzipping(orig_size, gzip_size):
log('Skipping {} (Gzip not effective)'.format(path))
os.unlink(gzip_path)
else:
log('Gzipping {} ({}K -> {}K)'.format(
path, orig_size // 1024, gzip_size // 1024))
def is_worth_gzipping(orig_size, gzip_size):
if orig_size == 0:
return False
ratio = gzip_size / orig_size
return ratio <= 0.95
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Search for all files inside <root> *not* matching <extensions> "
"and produce gzipped versions with a '.gz' suffix (as long "
"this results in a smaller file)",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-q', '--quiet', help="Don't produce log output", action='store_true')
parser.add_argument('root', help='Path root from which to search for files')
parser.add_argument('extensions', nargs='*', help='File extensions to exclude from gzipping',
default=GZIP_EXCLUDE_EXTENSIONS)
args = parser.parse_args()
main(**vars(args))
| {
"content_hash": "ad4be6728e44564f5edf87758c685d08",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 97,
"avg_line_length": 32.31111111111111,
"alnum_prop": 0.6124484181568088,
"repo_name": "andela-bojengwa/talk",
"id": "93be4e929172a8dd6fa9def2c3cc9b133a2fbdee",
"size": "2930",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/whitenoise/gzip.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56869"
},
{
"name": "JavaScript",
"bytes": "131120"
},
{
"name": "Python",
"bytes": "6685111"
},
{
"name": "Shell",
"bytes": "3767"
}
],
"symlink_target": ""
} |
from django.shortcuts import render
from django.http import HttpResponse
from lists.models import Item
# Create your views here.
def home_page(request):
if request.method == 'POST':
first_item = Item()
first_item.text = request.POST['item_text']
first_item.save()
return HttpResponse(request.POST['item_text'])
return render(request, 'home.html')
| {
"content_hash": "e61222920816957ba3eb09241fb160e6",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 54,
"avg_line_length": 24.470588235294116,
"alnum_prop": 0.6394230769230769,
"repo_name": "Hongqing-Hu/TDD-Django",
"id": "e49ca2ef1406c0a37c36aaef086a1a00680a8455",
"size": "431",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "superlists/lists/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "299"
},
{
"name": "Python",
"bytes": "10625"
}
],
"symlink_target": ""
} |
""" Testing with statements that use generators. This should not crash. """
class Base(object):
""" Base class. """
val = 0
def gen(self):
""" A generator. """
yield self.val
def fun(self):
""" With statement using a generator. """
with self.gen(): # [not-context-manager]
pass
| {
"content_hash": "9626167ce50abe8d081a2e15e90756da",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 75,
"avg_line_length": 24.428571428571427,
"alnum_prop": 0.543859649122807,
"repo_name": "mcking49/apache-flask",
"id": "25c6b3776702546dbbfc8576ff089f1de4acf3da",
"size": "342",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "Python/Lib/site-packages/pylint/test/functional/with_using_generator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2501"
},
{
"name": "C",
"bytes": "479174"
},
{
"name": "C++",
"bytes": "21416"
},
{
"name": "CSS",
"bytes": "170391"
},
{
"name": "Groff",
"bytes": "21"
},
{
"name": "HTML",
"bytes": "1003190"
},
{
"name": "JavaScript",
"bytes": "1559701"
},
{
"name": "PHP",
"bytes": "3338"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "30714489"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
} |
"""Documentation menu."""
import bits.pyfs
import os
import os.path
created_documentation_cfg = False
def grub_escape(s):
return "'{}'".format("'\\''".join(s.split("'")))
def create_documentation_cfg():
global created_documentation_cfg
if created_documentation_cfg:
return
cfg = ""
docpath = "/boot/Documentation"
for basename in sorted(os.listdir(docpath)):
assert '"' not in basename
assert "'" not in basename
filename = os.path.join(docpath, basename)
title = file(filename).readline().strip()
cfg += 'menuentry {} {{\n'.format(grub_escape("{}: {}".format(basename, title)))
cfg += """ py 'import ttypager; ttypager.ttypager(file(r"{}").read())'\n""".format(filename)
cfg += '}\n'
bits.pyfs.add_static("documentation.cfg", cfg)
created_documentation_config = True
| {
"content_hash": "e26ccf402541b840a1a6252b52ec785b",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 103,
"avg_line_length": 32.25925925925926,
"alnum_prop": 0.6199770378874856,
"repo_name": "mfleming/bits",
"id": "596cced2faf0b22de90421fb9fef17f36f1be0af",
"size": "2440",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/documentation.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "11674"
},
{
"name": "C",
"bytes": "537800"
},
{
"name": "C++",
"bytes": "46588"
},
{
"name": "Objective-C",
"bytes": "7077"
},
{
"name": "Python",
"bytes": "447375"
},
{
"name": "Shell",
"bytes": "15988"
}
],
"symlink_target": ""
} |
from session import session
sess1 = session('184.105.247.71')
sess1.connect('pyclass','88newclass')
for line in sess1.sendCmd('config t'):
print line.rstrip()
for line in sess1.sendCmd('logging buffer 10000'):
print line
for line in sess1.sendCmd('end'):
print line
for line in sess1.sendCmd('sho run | i logging'):
print line
sess1.disco()
| {
"content_hash": "6947c9c7b4da201bab28a327a63636eb",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 50,
"avg_line_length": 25.642857142857142,
"alnum_prop": 0.7075208913649025,
"repo_name": "patrebert/pynet_cert",
"id": "33d8bb79321600b96a56492510feea7306526f07",
"size": "377",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "class4/ex4.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "OpenEdge ABL",
"bytes": "144"
},
{
"name": "Python",
"bytes": "69997"
},
{
"name": "Shell",
"bytes": "597"
}
],
"symlink_target": ""
} |
""" Module images2gif
Provides functionality for reading and writing animated GIF images.
Use writeGif to write a series of numpy arrays or PIL images as an
animated GIF. Use readGif to read an animated gif as a series of numpy
arrays.
Note that since July 2004, all patents on the LZW compression patent have
expired. Therefore the GIF format may now be used freely.
Acknowledgements
----------------
Many thanks to Ant1 for:
* noting the use of "palette=PIL.Image.ADAPTIVE", which significantly
improves the results.
* the modifications to save each image with its own palette, or optionally
the global palette (if its the same).
Many thanks to Marius van Voorden for porting the NeuQuant quantization
algorithm of Anthony Dekker to Python (See the NeuQuant class for its
license).
Many thanks to Alex Robinson for implementing the concept of subrectangles,
which (depening on image content) can give a very significant reduction in
file size.
This code is based on gifmaker (in the scripts folder of the source
distribution of PIL)
Usefull links
-------------
* http://tronche.com/computer-graphics/gif/
* http://en.wikipedia.org/wiki/Graphics_Interchange_Format
* http://www.w3.org/Graphics/GIF/spec-gif89a.txt
"""
# todo: This module should be part of imageio (or at least based on)
import os, time
try:
import PIL
from PIL import Image
from PIL.GifImagePlugin import getheader, getdata
except ImportError:
PIL = None
try:
import numpy as np
except ImportError:
np = None
def get_cKDTree():
try:
from scipy.spatial import cKDTree
except ImportError:
cKDTree = None
return cKDTree
# getheader gives a 87a header and a color palette (two elements in a list).
# getdata()[0] gives the Image Descriptor up to (including) "LZW min code size".
# getdatas()[1:] is the image data itself in chuncks of 256 bytes (well
# technically the first byte says how many bytes follow, after which that
# amount (max 255) follows).
def checkImages(images):
""" checkImages(images)
Check numpy images and correct intensity range etc.
The same for all movie formats.
"""
# Init results
images2 = []
for im in images:
if PIL and isinstance(im, PIL.Image.Image):
# We assume PIL images are allright
images2.append(im)
elif np and isinstance(im, np.ndarray):
# Check and convert dtype
if im.dtype == np.uint8:
images2.append(im) # Ok
elif im.dtype in [np.float32, np.float64]:
im = im.copy()
im[im<0] = 0
im[im>1] = 1
im *= 255
images2.append( im.astype(np.uint8) )
else:
im = im.astype(np.uint8)
images2.append(im)
# Check size
if im.ndim == 2:
pass # ok
elif im.ndim == 3:
if im.shape[2] not in [3,4]:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('Invalid image type: ' + str(type(im)))
# Done
return images2
def intToBin(i):
""" Integer to two bytes """
# devide in two parts (bytes)
i1 = i % 256
i2 = int( i/256)
# make string (little endian)
return chr(i1) + chr(i2)
class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0,0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin( xy[0] ) # Left position
bb += intToBin( xy[1] ) # Top position
bb += intToBin( im.size[0] ) # image width
bb += intToBin( im.size[1] ) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops==0 or loops==float('inf'):
loops = 2**16-1
#bb = "" # application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2,transparent_flag=0,transparency_index=0):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr(((dispose & 3) << 2)|(transparent_flag & 1)) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin( int(duration*100) ) # in 100th of seconds
bb += chr(transparency_index) # transparency index
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
image_info = [im.info for im in images ]
if isinstance(subRectangles, (tuple,list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0,0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0,0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape)==0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy, image_info
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0,0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0,0)]
t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im-prev)
if diff.ndim==3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1]+1
y0, y1 = Y[0], Y[-1]+1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1,x0:x1]
prev = im
ims2.append(im2)
xy.append((x0,y0))
# Done
#print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0,images_info=None):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim==3 and im.shape[2]==3:
im = Image.fromarray(im,'RGB')
elif im.ndim==3 and im.shape[2]==4:
# im = Image.fromarray(im[:,:,:3],'RGB')
self.transparency = True
im = Image.fromarray(im[:,:,:4],'RGBA')
elif im.ndim==2:
im = Image.fromarray(im,'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
if nq >= 1:
# NeuQuant algorithm
for im in images:
im = im.convert("RGBA") # NQ assumes RGBA
nqInstance = NeuQuant(im, int(nq)) # Learn colors from image
if dither:
im = im.convert("RGB").quantize(palette=nqInstance.paletteImage(),colors=255)
else:
im = nqInstance.quantize(im,colors=255) # Use to quantize the image itself
self.transparency = True # since NQ assumes transparency
if self.transparency:
alpha = im.split()[3]
mask = Image.eval(alpha, lambda a: 255 if a <=128 else 0)
im.paste(255,mask=mask)
images2.append(im)
else:
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
# for index,im in enumerate(images):
for i in range(len(images)):
im = images[i].convert('RGB').convert('P', palette=AD, dither=dither,colors=255)
if self.transparency:
alpha = images[i].split()[3]
mask = Image.eval(alpha, lambda a: 255 if a <=128 else 0)
im.paste(255,mask=mask)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
# palettes.append( getheader(im)[1] )
palettes.append( im.palette.getdata()[1] ) # Fixed!
for palette in palettes:
occur.append( palettes.count( palette ) )
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[ occur.index(max(occur)) ]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = data[0], data[1:]
transparent_flag = 0
if self.transparency: transparent_flag = 1
graphext = self.getGraphicsControlExt(durations[frames],
disposes[frames],transparent_flag=transparent_flag,transparency_index=255)
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
# Write image data
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
## Exposed functions
def writeGif(filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None):
""" writeGif(filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None)
Write an animated gif from the specified images.
Parameters
----------
filename : string
The name of the file to write the image to.
images : list
Should be a list consisting of PIL images or numpy arrays.
The latter should be between 0 and 255 for integer types, and
between 0 and 1 for float types.
duration : scalar or list of scalars
The duration for all frames, or (if a list) for each frame.
repeat : bool or integer
The amount of loops. If True, loops infinitetely.
dither : bool
Whether to apply dithering
nq : integer
If nonzero, applies the NeuQuant quantization algorithm to create
the color palette. This algorithm is superior, but slower than
the standard PIL algorithm. The value of nq is the quality
parameter. 1 represents the best quality. 10 is in general a
good tradeoff between quality and speed. When using this option,
better results are usually obtained when subRectangles is False.
subRectangles : False, True, or a list of 2-element tuples
Whether to use sub-rectangles. If True, the minimal rectangle that
is required to update each frame is automatically detected. This
can give significant reductions in file size, particularly if only
a part of the image changes. One can also give a list of x-y
coordinates if you want to do the cropping yourself. The default
is True.
dispose : int
How to dispose each frame. 1 means that each frame is to be left
in place. 2 means the background color should be restored after
each frame. 3 means the decoder should restore the previous frame.
If subRectangles==False, the default is 2, otherwise it is 1.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to write animated gif files.")
# Check images
images = checkImages(images)
# Instantiate writer object
gifWriter = GifWriter()
gifWriter.transparency = False # init transparency flag used in GifWriter functions
# Check loops
if repeat is False:
loops = 1
elif repeat is True:
loops = 0 # zero means infinite
else:
loops = int(repeat)
# Check duration
if hasattr(duration, '__len__'):
if len(duration) == len(images):
duration = [d for d in duration]
else:
raise ValueError("len(duration) doesn't match amount of images.")
else:
duration = [duration for im in images]
# Check subrectangles
if subRectangles:
images, xy, images_info = gifWriter.handleSubRectangles(images, subRectangles)
defaultDispose = 1 # Leave image in place
else:
# Normal mode
xy = [(0,0) for im in images]
defaultDispose = 2 # Restore to background color.
# Check dispose
if dispose is None:
dispose = defaultDispose
if hasattr(dispose, '__len__'):
if len(dispose) != len(images):
raise ValueError("len(xy) doesn't match amount of images.")
else:
dispose = [dispose for im in images]
# Make images in a format that we can write easy
images = gifWriter.convertImagesToPIL(images, dither, nq)
# Write
fp = open(filename, 'wb')
try:
gifWriter.writeGifToFile(fp, images, duration, loops, xy, dispose)
finally:
fp.close()
def readGif(filename, asNumpy=True):
""" readGif(filename, asNumpy=True)
Read images from an animated GIF file. Returns a list of numpy
arrays, or, if asNumpy is false, a list if PIL images.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to read animated gif files.")
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to read animated gif files.")
# Check whether it exists
if not os.path.isfile(filename):
raise IOError('File not found: '+str(filename))
# Load file using PIL
pilIm = PIL.Image.open(filename)
pilIm.seek(0)
# Read all images inside
images = []
try:
while True:
# Get image as numpy array
tmp = pilIm.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape)==0:
raise MemoryError("Too little memory to convert PIL image to array")
# Store, and next
images.append(a)
pilIm.seek(pilIm.tell()+1)
except EOFError:
pass
# Convert to normal PIL images if needed
if not asNumpy:
images2 = images
images = []
for index,im in enumerate(images2):
tmp = PIL.Image.fromarray(im)
images.append(tmp)
# Done
return images
class NeuQuant:
""" NeuQuant(image, samplefac=10, colors=256)
samplefac should be an integer number of 1 or higher, 1
being the highest quality, but the slowest performance.
With avalue of 10, one tenth of all pixels are used during
training. This value seems a nice tradeof between speed
and quality.
colors is the amount of colors to reduce the image to. This
should best be a power of two.
See also:
http://members.ozemail.com.au/~dekker/NEUQUANT.HTML
License of the NeuQuant Neural-Net Quantization Algorithm
---------------------------------------------------------
Copyright (c) 1994 Anthony Dekker
Ported to python by Marius van Voorden in 2010
NEUQUANT Neural-Net quantization algorithm by Anthony Dekker, 1994.
See "Kohonen neural networks for optimal colour quantization"
in "network: Computation in Neural Systems" Vol. 5 (1994) pp 351-367.
for a discussion of the algorithm.
See also http://members.ozemail.com.au/~dekker/NEUQUANT.HTML
Any party obtaining a copy of these files from the author, directly or
indirectly, is granted, free of charge, a full and unrestricted irrevocable,
world-wide, paid up, royalty-free, nonexclusive right and license to deal
in this software and documentation files (the "Software"), including without
limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons who receive
copies from any such party to do so, with the only requirement being
that this copyright notice remain intact.
"""
NCYCLES = None # Number of learning cycles
NETSIZE = None # Number of colours used
SPECIALS = None # Number of reserved colours used
BGCOLOR = None # Reserved background colour
CUTNETSIZE = None
MAXNETPOS = None
INITRAD = None # For 256 colours, radius starts at 32
RADIUSBIASSHIFT = None
RADIUSBIAS = None
INITBIASRADIUS = None
RADIUSDEC = None # Factor of 1/30 each cycle
ALPHABIASSHIFT = None
INITALPHA = None # biased by 10 bits
GAMMA = None
BETA = None
BETAGAMMA = None
network = None # The network itself
colormap = None # The network itself
netindex = None # For network lookup - really 256
bias = None # Bias and freq arrays for learning
freq = None
pimage = None
# Four primes near 500 - assume no image has a length so large
# that it is divisible by all four primes
PRIME1 = 499
PRIME2 = 491
PRIME3 = 487
PRIME4 = 503
MAXPRIME = PRIME4
pixels = None
samplefac = None
a_s = None
def setconstants(self, samplefac, colors):
self.NCYCLES = 100 # Number of learning cycles
self.NETSIZE = colors # Number of colours used
self.SPECIALS = 3 # Number of reserved colours used
self.BGCOLOR = self.SPECIALS-1 # Reserved background colour
self.CUTNETSIZE = self.NETSIZE - self.SPECIALS
self.MAXNETPOS = self.NETSIZE - 1
self.INITRAD = self.NETSIZE/8 # For 256 colours, radius starts at 32
self.RADIUSBIASSHIFT = 6
self.RADIUSBIAS = 1 << self.RADIUSBIASSHIFT
self.INITBIASRADIUS = self.INITRAD * self.RADIUSBIAS
self.RADIUSDEC = 30 # Factor of 1/30 each cycle
self.ALPHABIASSHIFT = 10 # Alpha starts at 1
self.INITALPHA = 1 << self.ALPHABIASSHIFT # biased by 10 bits
self.GAMMA = 1024.0
self.BETA = 1.0/1024.0
self.BETAGAMMA = self.BETA * self.GAMMA
self.network = np.empty((self.NETSIZE, 3), dtype='float64') # The network itself
self.colormap = np.empty((self.NETSIZE, 4), dtype='int32') # The network itself
self.netindex = np.empty(256, dtype='int32') # For network lookup - really 256
self.bias = np.empty(self.NETSIZE, dtype='float64') # Bias and freq arrays for learning
self.freq = np.empty(self.NETSIZE, dtype='float64')
self.pixels = None
self.samplefac = samplefac
self.a_s = {}
def __init__(self, image, samplefac=10, colors=256):
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy for the NeuQuant algorithm.")
# Check image
if image.size[0] * image.size[1] < NeuQuant.MAXPRIME:
raise IOError("Image is too small")
if image.mode != "RGBA":
raise IOError("Image mode should be RGBA.")
# Initialize
self.setconstants(samplefac, colors)
self.pixels = np.fromstring(image.tostring(), np.uint32)
self.setUpArrays()
self.learn()
self.fix()
self.inxbuild()
def writeColourMap(self, rgb, outstream):
for i in range(self.NETSIZE):
bb = self.colormap[i,0];
gg = self.colormap[i,1];
rr = self.colormap[i,2];
outstream.write(rr if rgb else bb)
outstream.write(gg)
outstream.write(bb if rgb else rr)
return self.NETSIZE
def setUpArrays(self):
self.network[0,0] = 0.0 # Black
self.network[0,1] = 0.0
self.network[0,2] = 0.0
self.network[1,0] = 255.0 # White
self.network[1,1] = 255.0
self.network[1,2] = 255.0
# RESERVED self.BGCOLOR # Background
for i in range(self.SPECIALS):
self.freq[i] = 1.0 / self.NETSIZE
self.bias[i] = 0.0
for i in range(self.SPECIALS, self.NETSIZE):
p = self.network[i]
p[:] = (255.0 * (i-self.SPECIALS)) / self.CUTNETSIZE
self.freq[i] = 1.0 / self.NETSIZE
self.bias[i] = 0.0
# Omitted: setPixels
def altersingle(self, alpha, i, b, g, r):
"""Move neuron i towards biased (b,g,r) by factor alpha"""
n = self.network[i] # Alter hit neuron
n[0] -= (alpha*(n[0] - b))
n[1] -= (alpha*(n[1] - g))
n[2] -= (alpha*(n[2] - r))
def geta(self, alpha, rad):
try:
return self.a_s[(alpha, rad)]
except KeyError:
length = rad*2-1
mid = length/2
q = np.array(list(range(mid-1,-1,-1))+list(range(-1,mid)))
a = alpha*(rad*rad - q*q)/(rad*rad)
a[mid] = 0
self.a_s[(alpha, rad)] = a
return a
def alterneigh(self, alpha, rad, i, b, g, r):
if i-rad >= self.SPECIALS-1:
lo = i-rad
start = 0
else:
lo = self.SPECIALS-1
start = (self.SPECIALS-1 - (i-rad))
if i+rad <= self.NETSIZE:
hi = i+rad
end = rad*2-1
else:
hi = self.NETSIZE
end = (self.NETSIZE - (i+rad))
a = self.geta(alpha, rad)[start:end]
p = self.network[lo+1:hi]
p -= np.transpose(np.transpose(p - np.array([b, g, r])) * a)
#def contest(self, b, g, r):
# """ Search for biased BGR values
# Finds closest neuron (min dist) and updates self.freq
# finds best neuron (min dist-self.bias) and returns position
# for frequently chosen neurons, self.freq[i] is high and self.bias[i] is negative
# self.bias[i] = self.GAMMA*((1/self.NETSIZE)-self.freq[i])"""
#
# i, j = self.SPECIALS, self.NETSIZE
# dists = abs(self.network[i:j] - np.array([b,g,r])).sum(1)
# bestpos = i + np.argmin(dists)
# biasdists = dists - self.bias[i:j]
# bestbiaspos = i + np.argmin(biasdists)
# self.freq[i:j] -= self.BETA * self.freq[i:j]
# self.bias[i:j] += self.BETAGAMMA * self.freq[i:j]
# self.freq[bestpos] += self.BETA
# self.bias[bestpos] -= self.BETAGAMMA
# return bestbiaspos
def contest(self, b, g, r):
""" Search for biased BGR values
Finds closest neuron (min dist) and updates self.freq
finds best neuron (min dist-self.bias) and returns position
for frequently chosen neurons, self.freq[i] is high and self.bias[i] is negative
self.bias[i] = self.GAMMA*((1/self.NETSIZE)-self.freq[i])"""
i, j = self.SPECIALS, self.NETSIZE
dists = abs(self.network[i:j] - np.array([b,g,r])).sum(1)
bestpos = i + np.argmin(dists)
biasdists = dists - self.bias[i:j]
bestbiaspos = i + np.argmin(biasdists)
self.freq[i:j] *= (1-self.BETA)
self.bias[i:j] += self.BETAGAMMA * self.freq[i:j]
self.freq[bestpos] += self.BETA
self.bias[bestpos] -= self.BETAGAMMA
return bestbiaspos
def specialFind(self, b, g, r):
for i in range(self.SPECIALS):
n = self.network[i]
if n[0] == b and n[1] == g and n[2] == r:
return i
return -1
def learn(self):
biasRadius = self.INITBIASRADIUS
alphadec = 30 + ((self.samplefac-1)/3)
lengthcount = self.pixels.size
samplepixels = lengthcount / self.samplefac
delta = samplepixels / self.NCYCLES
alpha = self.INITALPHA
i = 0;
rad = biasRadius >> self.RADIUSBIASSHIFT
if rad <= 1:
rad = 0
print("Beginning 1D learning: samplepixels = %1.2f rad = %i" %
(samplepixels, rad) )
step = 0
pos = 0
if lengthcount%NeuQuant.PRIME1 != 0:
step = NeuQuant.PRIME1
elif lengthcount%NeuQuant.PRIME2 != 0:
step = NeuQuant.PRIME2
elif lengthcount%NeuQuant.PRIME3 != 0:
step = NeuQuant.PRIME3
else:
step = NeuQuant.PRIME4
i = 0
printed_string = ''
while i < samplepixels:
if i%100 == 99:
tmp = '\b'*len(printed_string)
printed_string = str((i+1)*100/samplepixels)+"%\n"
print(tmp + printed_string)
p = self.pixels[pos]
r = (p >> 16) & 0xff
g = (p >> 8) & 0xff
b = (p ) & 0xff
if i == 0: # Remember background colour
self.network[self.BGCOLOR] = [b, g, r]
j = self.specialFind(b, g, r)
if j < 0:
j = self.contest(b, g, r)
if j >= self.SPECIALS: # Don't learn for specials
a = (1.0 * alpha) / self.INITALPHA
self.altersingle(a, j, b, g, r)
if rad > 0:
self.alterneigh(a, rad, j, b, g, r)
pos = (pos+step)%lengthcount
i += 1
if i%delta == 0:
alpha -= alpha / alphadec
biasRadius -= biasRadius / self.RADIUSDEC
rad = biasRadius >> self.RADIUSBIASSHIFT
if rad <= 1:
rad = 0
finalAlpha = (1.0*alpha)/self.INITALPHA
print("Finished 1D learning: final alpha = %1.2f!" % finalAlpha)
def fix(self):
for i in range(self.NETSIZE):
for j in range(3):
x = int(0.5 + self.network[i,j])
x = max(0, x)
x = min(255, x)
self.colormap[i,j] = x
self.colormap[i,3] = i
def inxbuild(self):
previouscol = 0
startpos = 0
for i in range(self.NETSIZE):
p = self.colormap[i]
q = None
smallpos = i
smallval = p[1] # Index on g
# Find smallest in i..self.NETSIZE-1
for j in range(i+1, self.NETSIZE):
q = self.colormap[j]
if q[1] < smallval: # Index on g
smallpos = j
smallval = q[1] # Index on g
q = self.colormap[smallpos]
# Swap p (i) and q (smallpos) entries
if i != smallpos:
p[:],q[:] = q, p.copy()
# smallval entry is now in position i
if smallval != previouscol:
self.netindex[previouscol] = (startpos+i) >> 1
for j in range(previouscol+1, smallval):
self.netindex[j] = i
previouscol = smallval
startpos = i
self.netindex[previouscol] = (startpos+self.MAXNETPOS) >> 1
for j in range(previouscol+1, 256): # Really 256
self.netindex[j] = self.MAXNETPOS
def paletteImage(self):
""" PIL weird interface for making a paletted image: create an image which
already has the palette, and use that in Image.quantize. This function
returns this palette image. """
if self.pimage is None:
palette = []
for i in range(self.NETSIZE):
palette.extend(self.colormap[i][:3])
palette.extend([0]*(256-self.NETSIZE)*3)
# a palette image to use for quant
self.pimage = Image.new("P", (1, 1), 0)
self.pimage.putpalette(palette)
return self.pimage
def quantize(self, image):
""" Use a kdtree to quickly find the closest palette colors for the pixels """
if get_cKDTree():
return self.quantize_with_scipy(image)
else:
print('Scipy not available, falling back to slower version.')
return self.quantize_without_scipy(image)
def quantize_with_scipy(self, image):
w,h = image.size
px = np.asarray(image).copy()
px2 = px[:,:,:3].reshape((w*h,3))
cKDTree = get_cKDTree()
kdtree = cKDTree(self.colormap[:,:3],leafsize=10)
result = kdtree.query(px2)
colorindex = result[1]
print("Distance: %1.2f" % (result[0].sum()/(w*h)) )
px2[:] = self.colormap[colorindex,:3]
return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage())
def quantize_without_scipy(self, image):
"""" This function can be used if no scipy is availabe.
It's 7 times slower though.
"""
w,h = image.size
px = np.asarray(image).copy()
memo = {}
for j in range(w):
for i in range(h):
key = (px[i,j,0],px[i,j,1],px[i,j,2])
try:
val = memo[key]
except KeyError:
val = self.convert(*key)
memo[key] = val
px[i,j,0],px[i,j,1],px[i,j,2] = val
return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage())
def convert(self, *color):
i = self.inxsearch(*color)
return self.colormap[i,:3]
def inxsearch(self, r, g, b):
"""Search for BGR values 0..255 and return colour index"""
dists = (self.colormap[:,:3] - np.array([r,g,b]))
a= np.argmin((dists*dists).sum(1))
return a
if __name__ == '__main__':
im = np.zeros((200,200), dtype=np.uint8)
im[10:30,:] = 100
im[:,80:120] = 255
im[-50:-40,:] = 50
images = [im*1.0, im*0.8, im*0.6, im*0.4, im*0]
writeGif('lala3.gif',images, duration=0.5, dither=0)
| {
"content_hash": "031e6a9935a33c6b8ccf93e220c91203",
"timestamp": "",
"source": "github",
"line_count": 1061,
"max_line_length": 130,
"avg_line_length": 33.3449575871819,
"alnum_prop": 0.5621696486616354,
"repo_name": "HallaZzang/gersang-spritelib",
"id": "264ffb9e6c599cefe72a45e4bf3c872023505573",
"size": "37010",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spritelib/images2gif/images2gif.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6402"
},
{
"name": "Python",
"bytes": "52931"
}
],
"symlink_target": ""
} |
import math
from functools import cmp_to_key
from django.db import models
import json
from accounts.models import PlayerUser
from pmtour.models import BaseModel, Tournament
class Player(BaseModel):
user = models.ForeignKey(PlayerUser, on_delete=models.CASCADE)
name = models.CharField("name", max_length=100, default="") # 参赛时候的名字,可以跟self.user.name不一样
tournament = models.ForeignKey(Tournament, on_delete=models.CASCADE)
playerid = models.SmallIntegerField()
wins = models.SmallIntegerField(default=0)
loses = models.SmallIntegerField(default=0)
ties = models.SmallIntegerField(default=0)
byes = models.SmallIntegerField(default=0)
foes = models.ManyToManyField("self")
standing = models.SmallIntegerField(default=0)
late = models.BooleanField(default=False)
eliminated = models.BooleanField(default=False)
exited = models.BooleanField(default=False)
score = models.IntegerField(default=0)
@classmethod
def create(cls, **kwargs):
player = cls.objects.create(**kwargs)
if player.late and player.tournament.status > 0:
player.loses += player.tournament.status
player.save()
return player
@classmethod
def create_from_data(cls, tour, data):
if data["player_id"].find("test") == -1:
q = PlayerUser.objects.filter(player_id=data["player_id"])
if q:
if data.get("username", data["player_id"]) == q[0].name:
playeruser = q[0]
else:
raise cls.LoaddataError("the username doesn't match to the player id.")
else:
playeruser = PlayerUser.create_existed_player(
player_id=data["player_id"],
name=data.get("username", data["player_id"])
)
else:
playeruser = PlayerUser.create_test_player(tour, data["name"], data["playerid"])
if not data["standing"]:
data["standing"] = ""
player = cls.create(
user=playeruser,
name=data.get("name", data["playerid"]),
tournament=tour,
playerid=data["playerid"],
wins=data["wins"],
loses=data["loses"],
ties=data["ties"],
byes=data["byes"],
standing=data["standing"],
late=data["late"],
eliminated=data["eliminated"],
exited=data["exited"],
score=data["score"]
)
return player
@classmethod
def dumpdata(cls, tour):
players = []
for player in tour.player_set.all():
aplayer = {
"username": player.user.name,
"player_id": player.user.player_id,
"name": player.name,
"playerid": player.playerid,
"wins": player.wins,
"loses": player.loses,
"ties": player.ties,
"byes": player.byes,
"foes": json.dumps([x.playerid for x in player.foes.all()]),
"standing": player.standing,
"late": player.late,
"eliminated": player.eliminated,
"exited": player.exited,
"score": player.score
}
players.append(aplayer)
return players
@classmethod
def loaddata(cls, tour, players_data):
def cancel_load(data):
for p in data:
p[0].user.delete()
p[0].delete()
players = []
for player_data in players_data:
try:
player = cls.create_from_data(tour, player_data)
except cls.LoaddataError:
cancel_load(players)
return "Error creating players."
players.append((player, player_data["foes"]))
for player, foes_data in players:
foes = json.loads(foes_data)
for foe_id in foes:
try:
foe = tour.player_set.get(playerid=foe_id)
except cls.DoesNotExist:
cancel_load(players)
return "Error loading foes."
player.foes.add(foe)
player.save()
return False
def __init__(self, *arg, **kwargs):
super(Player, self).__init__(*arg, **kwargs)
self._winning_percentage = {}
self._opponents_wp = {}
self._opps_opps_wp = {}
def __str__(self):
return "%s(%s) (%s) %s" % (self.name, self.user.name, self.get_printable(), self.score)
def get_printable(self):
return "%s/%s/%s" % (self.wins + self.byes, self.loses, self.ties)
def _get_winning_percentage(self):
wins, loses, byes = self.wins, self.loses, self.byes
s_turns = self.tournament.get_option("turns")
if self.tournament.status > s_turns:
turns = self.tournament.turn_set.filter(turn_number__gt=s_turns)
for turn in turns:
log = turn.log_set.filter(player_a=self)
if log.count() == 0:
log = turn.log_set.filter(player_b=self)
if log.count() == 0:
continue
log = log[0]
if log.status == 1:
if log.player_a == self:
wins -= 1
else:
loses -= 1
elif log.status == 2:
if log.player_a == self:
loses -= 1
else:
wins -= 1
elif log.status == 4:
byes -= 1
re = wins + loses + byes
if re == 0:
return 0.0
re = float(wins) / re
if re < 0.25:
re = 0.25
if self.exited and re > 0.75:
re = 0.75
return re
def _get_opponents_wp(self):
if len(self.foes.all()) == 0:
return 0.0
else:
return sum([x.winning_percentage for x in self.foes.all()]) / self.foes.count()
def _get_opps_opps_wp(self):
if len(self.foes.all()) == 0:
return 0.0
else:
return sum([x.opponents_wp for x in self.foes.all()]) / self.foes.count()
@property
def winning_percentage(self):
if not self.tournament.status in self._winning_percentage:
self._winning_percentage[self.tournament.status] = self._get_winning_percentage()
return self._winning_percentage[self.tournament.status]
@property
def opponents_wp(self):
if not self.tournament.status in self._opponents_wp:
self._opponents_wp[self.tournament.status] = self._get_opponents_wp()
return round(self._opponents_wp[self.tournament.status], 5)
@property
def opps_opps_wp(self):
if not self.tournament.status in self._opps_opps_wp:
self._opps_opps_wp[self.tournament.status] = self._get_opps_opps_wp()
return round(self._opps_opps_wp[self.tournament.status], 5)
def gen_standing_dict(self):
return {
"standing": self.standing,
"pid": self.playerid,
"match": self.get_printable(),
"score": self.score,
"opswin": "{0:.2%}".format(self.opponents_wp),
"opsopswin": "{0:.2%}".format(self.opps_opps_wp),
}
def set_log(self, status, foe=None, scored=True):
if status == 4:
if scored:
self.score += 3
self.byes += 1
elif status == 3:
if scored:
self.score += 1
self.ties += 1
elif status == 2:
self.loses += 1
elif status == 1:
if scored:
self.score += 3
self.wins += 1
if scored and foe is not None:
self.foes.add(foe)
def delete_log(self, status, foe=None, scored=True):
if status == 4:
if scored:
self.score -= 3
self.byes -= 1
elif status == 3:
if scored:
self.score -= 1
self.ties -= 1
elif status == 2:
self.loses -= 1
elif status == 1:
if scored:
self.score -= 3
self.wins -= 1
if scored and foe is not None:
self.foes.remove(foe)
def exit(self):
self.exited = True
def eliminate(self):
self.eliminated = True
def has_meeted(self, another):
return another in self.foes.all()
@staticmethod
def _by_playerid(a, b):
if a.playerid > b.playerid:
return 1
return -1
@staticmethod
def _by_standing(a, b):
if a.standing > b.standing:
return 1
else:
return -1
@classmethod
def _get_sorted(cls, tour, compare, **kwargs):
players = tour.player_set.filter(**kwargs)
players = [x for x in players]
players.sort(key=cmp_to_key(compare))
return players
@classmethod
def get_sorted_by_playerid(cls, tour):
return cls._get_sorted(tour, cls._by_playerid, exited=False, eliminated=False)
@classmethod
def get_sorted_by_standing(cls, tour):
return cls._get_sorted(tour, cls._by_standing, exited=False, eliminated=False)
@classmethod
def get_sorted_for_elims(cls, tour):
players = cls._get_sorted(tour, cls._by_standing)
elims = tour.get_option("elims")
if 2 ** int(math.log(elims, 2)) != elims:
raise Tournament.NoTypeError("the number of players is wrong")
if elims == 16:
q = [1, 16, 8, 9, 5, 12, 4, 13, 6, 11, 3, 14, 7, 10, 2, 15]
elif elims == 8:
q = [1, 8, 4, 5, 3, 6, 2, 7]
elif elims == 4:
q = [1, 4, 2, 3]
elif elims == 2:
q = [1, 2]
else:
raise Tournament.NoTypeError("the number of players is wrong")
players = [players[i - 1] for i in q if not players[i - 1].eliminated]
return players
| {
"content_hash": "f2aa6e8f1aceb089b00c00be3c8da58a",
"timestamp": "",
"source": "github",
"line_count": 295,
"max_line_length": 95,
"avg_line_length": 34.3593220338983,
"alnum_prop": 0.5226913970007893,
"repo_name": "sunoru/pokemon_tournament",
"id": "d0d7a396ad9e6c9263b622e650757607a86dc599",
"size": "10179",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pmtour/models/player.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2341"
},
{
"name": "HTML",
"bytes": "39898"
},
{
"name": "JavaScript",
"bytes": "9376"
},
{
"name": "Python",
"bytes": "73558"
},
{
"name": "Shell",
"bytes": "577"
}
],
"symlink_target": ""
} |
'''
Created on Feb 15, 2017
@author: julien
'''
from copy import deepcopy
import unittest
from keras.layers.core import Dense
from minos.experiment.experiment import Experiment, ExperimentParameters,\
check_experiment_parameters
from minos.experiment.training import Training, EpochStoppingCondition
from minos.model.design import create_random_blueprint, mutate_blueprint
from minos.model.model import Layout, Objective, Metric
from minos.model.parameter import int_param, float_param
from minos.model.parameters import register_custom_activation,\
register_custom_layer, reference_parameters
class MutationTest(unittest.TestCase):
def test_mutate_layout(self):
layout = Layout(
input_size=100,
output_size=10,
output_activation='softmax')
training = Training(
objective=None,
optimizer=None,
metric=None,
stopping=None,
batch_size=None)
experiment = Experiment(
'test',
layout,
training,
batch_iterator=None,
test_batch_iterator=None,
environment=None,
parameters=ExperimentParameters(use_default_values=False))
check_experiment_parameters(experiment)
for _ in range(10):
blueprint = create_random_blueprint(experiment)
mutant = mutate_blueprint(
blueprint,
parameters=experiment.parameters,
p_mutate_layout=1,
layout_mutation_count=1,
layout_mutables=['rows'],
mutate_in_place=False)
self.assertTrue(
len(mutant.layout.rows) != len(blueprint.layout.rows),
'Should have mutated rows')
mutant = mutate_blueprint(
blueprint,
parameters=experiment.parameters,
p_mutate_layout=1,
layout_mutation_count=1,
layout_mutables=['blocks'],
mutate_in_place=False)
self.assertTrue(
len(mutant.layout.get_blocks()) != len(blueprint.layout.get_blocks()),
'Should have mutated blocks')
mutant = mutate_blueprint(
blueprint,
parameters=experiment.parameters,
p_mutate_layout=1,
layout_mutation_count=1,
layout_mutables=['layers'],
mutate_in_place=False)
self.assertTrue(
len(mutant.layout.get_layers()) != len(blueprint.layout.get_layers()),
'Should have mutated layers')
def test_mutate_parameters(self):
layout = Layout(
input_size=100,
output_size=10,
output_activation='softmax')
training = Training(
objective=None,
optimizer=None,
metric=None,
stopping=None,
batch_size=None)
experiment = Experiment(
'test',
layout,
training,
batch_iterator=None,
test_batch_iterator=None,
environment=None,
parameters=ExperimentParameters(use_default_values=False))
for _ in range(10):
blueprint = create_random_blueprint(experiment)
mutant = mutate_blueprint(
blueprint,
parameters=experiment.parameters,
p_mutate_layout=0,
p_mutate_param=1,
mutate_in_place=False)
for row_idx, row in enumerate(mutant.layout.rows):
for block_idx, block in enumerate(row.blocks):
for layer_idx, layer in enumerate(block.layers):
original_row = blueprint.layout.rows[row_idx]
original_block = original_row.blocks[block_idx]
original_layer = original_block.layers[layer_idx]
for name, value in layer.parameters.items():
self.assertTrue(
value != original_layer.parameters[name],
'Should have mutated parameter %s' % name)
def test_mutate_w_custom_definitions(self):
def custom_activation(x):
return x
register_custom_activation('custom_activation', custom_activation)
register_custom_layer('Dense2', Dense, deepcopy(reference_parameters['layers']['Dense']))
layout = Layout(
input_size=100,
output_size=10,
output_activation='softmax',
block=['Dense', 'Dense2'])
training = Training(
objective=Objective('categorical_crossentropy'),
optimizer=None,
metric=Metric('categorical_accuracy'),
stopping=EpochStoppingCondition(5),
batch_size=250)
experiment_parameters = ExperimentParameters(use_default_values=False)
experiment_parameters.layout_parameter('blocks', int_param(1, 5))
experiment_parameters.layout_parameter('layers', int_param(1, 5))
experiment_parameters.layer_parameter('Dense2.output_dim', int_param(10, 500))
experiment_parameters.layer_parameter('Dropout.p', float_param(0.1, 0.9))
experiment = Experiment(
'test',
layout,
training,
batch_iterator=None,
test_batch_iterator=None,
environment=None,
parameters=experiment_parameters)
check_experiment_parameters(experiment)
for _ in range(10):
blueprint = create_random_blueprint(experiment)
mutant = mutate_blueprint(
blueprint,
parameters=experiment.parameters,
p_mutate_layout=0,
p_mutate_param=1,
mutate_in_place=False)
for row_idx, row in enumerate(mutant.layout.rows):
for block_idx, block in enumerate(row.blocks):
for layer_idx, layer in enumerate(block.layers):
original_row = blueprint.layout.rows[row_idx]
original_block = original_row.blocks[block_idx]
original_layer = original_block.layers[layer_idx]
for name, value in layer.parameters.items():
self.assertTrue(
value != original_layer.parameters[name],
'Should have mutated parameter')
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| {
"content_hash": "65099393c099d11715b6556622299ee3",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 97,
"avg_line_length": 39.12865497076023,
"alnum_prop": 0.5597070691974294,
"repo_name": "guybedo/minos",
"id": "9ac872ea265d05a8f4dd63a37086c510b257b13b",
"size": "6691",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "tests/model/mutation_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "131533"
}
],
"symlink_target": ""
} |
from six import text_type
from typing import Any, Dict
from zerver.lib.test_classes import WebhookTestCase
class TransifexHookTests(WebhookTestCase):
STREAM_NAME = 'transifex'
URL_TEMPLATE = u"/api/v1/external/transifex?stream={stream}&api_key={api_key}&{data_template}"
URL_DATA_TEMPLATE = "project={project}&language={language}&resource={resource}&{method}"
URL_REVIEWED_METHOD_TEMPLATE = "reviewed=100"
URL_TRANSLATED_METHOD_TEMPLATE = "translated=100"
FIXTURE_DIR_NAME = 'transifex'
PROJECT = 'project-title'
LANGUAGE = 'en'
RESOURCE = 'file'
REVIEWED = True
def test_transifex_reviewed_message(self):
# type: () -> None
self.REVIEWED = True
expected_subject = "{} in {}".format(self.PROJECT, self.LANGUAGE)
expected_message = "Resource {} fully reviewed.".format(self.RESOURCE)
self.url = self.build_webhook_url()
self.send_and_test_stream_message(None, expected_subject, expected_message)
def test_transifex_translated_message(self):
# type: () -> None
self.REVIEWED = False
expected_subject = "{} in {}".format(self.PROJECT, self.LANGUAGE)
expected_message = "Resource {} fully translated.".format(self.RESOURCE)
self.url = self.build_webhook_url()
self.send_and_test_stream_message(None, expected_subject, expected_message)
self.REVIEWED = True
def build_webhook_url(self):
# type: () -> text_type
url_data = self.URL_DATA_TEMPLATE.format(
project=self.PROJECT,
language=self.LANGUAGE,
resource=self.RESOURCE,
method=self.URL_REVIEWED_METHOD_TEMPLATE if self.REVIEWED else self.URL_TRANSLATED_METHOD_TEMPLATE
)
api_key = self.get_api_key(self.TEST_USER_EMAIL)
return self.URL_TEMPLATE.format(api_key=api_key, stream=self.STREAM_NAME, data_template=url_data)
def get_body(self, fixture_name):
# type: (text_type) -> Dict[str, Any]
return {}
| {
"content_hash": "6a5407464a8c291dd21e9c8762c947d1",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 110,
"avg_line_length": 42.041666666666664,
"alnum_prop": 0.6565906838453914,
"repo_name": "vikas-parashar/zulip",
"id": "7d8c4aff43559f77dd0eb9460c6ebbc2854fd0f3",
"size": "2042",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "zerver/tests/webhooks/test_transifex.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "241321"
},
{
"name": "Groovy",
"bytes": "5516"
},
{
"name": "HTML",
"bytes": "459360"
},
{
"name": "JavaScript",
"bytes": "1466602"
},
{
"name": "Nginx",
"bytes": "1280"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "401825"
},
{
"name": "Puppet",
"bytes": "82241"
},
{
"name": "Python",
"bytes": "2930542"
},
{
"name": "Ruby",
"bytes": "249748"
},
{
"name": "Shell",
"bytes": "35313"
}
],
"symlink_target": ""
} |
import random
from tempest import config
from tempest import test
from oslo_log import log as logging
from tempest_lib.common.utils import data_utils
import test_subnets as SNET
CONF = config.CONF
LOG = logging.getLogger(__name__)
VLAN_PHYSICAL_NETWORK = CONF.nsxv.vlan_physical_network or None
VLAN_ID_PROVIDER = CONF.nsxv.provider_vlan_id
class VlanNetworksTestJSON(SNET.SubnetTestJSON):
_interface = 'json'
_vlanid = int(VLAN_ID_PROVIDER)
_provider_network_body = {
'name': data_utils.rand_name('VLAN-%04d-network' % _vlanid),
'provider:network_type': 'vlan',
'provider:physical_network': VLAN_PHYSICAL_NETWORK,
'provider:segmentation_id': _vlanid}
@classmethod
def resource_setup(cls):
cls.vlan_range = (2001, 2999)
cls.vlan_assigned = []
super(VlanNetworksTestJSON, cls).resource_setup()
def get_next_vlan(self):
next_vlan = self.next_vlan
self.next_vlan += 1
if self.next_vlan > self.vlan_range[1]:
self.next_vlan = self.vlan_range[0]
return next_vlan
def get_vlan(self):
for x in range(0, 10):
next_vlan = random.randint(*self.vlan_range)
if next_vlan in self.vlan_assigned:
continue
else:
self.vlan_assigned.append(next_vlan)
return next_vlan
return 3000
def _create_network(self, _auto_clean_up=True, network_name=None,
**kwargs):
segmentation_id = kwargs.pop('provider:segmentation_id', None)
if not segmentation_id:
segmentation_id = self.get_vlan()
network_name = (network_name or
data_utils.rand_name(
'vlan-' + str(segmentation_id) + '-netwk'))
post_body = {'name': network_name,
'provider:network_type': 'vlan',
'provider:physical_network': VLAN_PHYSICAL_NETWORK,
'provider:segmentation_id': segmentation_id}
post_body.update(kwargs)
for k, v in post_body.items():
if not v:
post_body.pop(k)
LOG.debug("create VLAN network: %s", str(post_body))
body = self.create_network(**post_body)
network = body['network']
if _auto_clean_up:
self.addCleanup(self._try_delete_network, network['id'])
return network
@test.attr(type='smoke')
@test.idempotent_id('c5f98016-dee3-42f1-8c23-b9cd1e625561')
def test_create_network(self):
# Create a network as an admin user specifying the
# vlan network type attribute
provider_attrs = {
'provider:network_type': 'vlan',
'provider:physical_network': VLAN_PHYSICAL_NETWORK,
'provider:segmentation_id': 1002}
network = self._create_network(_auto_clean_up=False, **provider_attrs)
# Verifies parameters
self.assertIsNotNone(network['id'])
self.assertEqual(network.get('provider:network_type'), 'vlan')
if VLAN_PHYSICAL_NETWORK:
self.assertEqual(network.get('provider:physical_network'),
VLAN_PHYSICAL_NETWORK)
self.assertEqual(network.get('provider:segmentation_id'), 1002)
self._delete_network(network['id'])
@test.attr(type='smoke')
@test.idempotent_id('714e69eb-bb31-4cfc-9804-8e988f04ca65')
def test_update_network(self):
# Update flat network as an admin user specifying the
# flat network attribute
net_profile = {'shared': True, '_auto_clean_up': False,
'provider:segmentation_id': 1003}
network = self._create_network(**net_profile)
self.assertEqual(network.get('shared'), True)
new_name = network['name'] + "-updated"
update_body = {'shared': False, 'name': new_name}
body = self.update_network(network['id'], **update_body)
updated_network = body['network']
# Verify that name and shared parameters were updated
self.assertEqual(updated_network['shared'], False)
self.assertEqual(updated_network['name'], new_name)
# get flat network attributes and verify them
body = self.show_network(network['id'])
updated_network = body['network']
# Verify that name and shared parameters were updated
self.assertEqual(updated_network['shared'], False)
self.assertEqual(updated_network['name'], new_name)
self.assertEqual(updated_network['status'], network['status'])
self.assertEqual(updated_network['subnets'], network['subnets'])
self._delete_network(network['id'])
@test.attr(type='smoke')
@test.idempotent_id('8a8b9f2c-37f8-4c53-b8e3-0c9c0910380f')
def test_list_networks(self):
# Create flat network
net_profile = {'shared': True, '_auto_clean_up': False,
'provider:segmentation_id': 1004}
network = self._create_network(**net_profile)
# List networks as a normal user and confirm it is available
body = self.list_networks(client=self.networks_client)
networks_list = [net['id'] for net in body['networks']]
self.assertIn(network['id'], networks_list)
update_body = {'shared': False}
body = self.update_network(network['id'], **update_body)
# List networks as a normal user and confirm it is not available
body = self.list_networks(client=self.networks_client)
networks_list = [net['id'] for net in body['networks']]
self.assertNotIn(network['id'], networks_list)
self._delete_network(network['id'])
@test.attr(type='smoke')
@test.idempotent_id('5807958d-9ee2-48a5-937e-ddde092956a6')
def test_show_network_attributes(self):
# Create flat network
net_profile = {'shared': True, '_auto_clean_up': False,
'provider:segmentation_id': 1005}
network = self._create_network(**net_profile)
# Show a flat network as a normal user and confirm the
# flat network attribute is returned.
body = self.show_network(network['id'], client=self.networks_client)
show_net = body['network']
self.assertEqual(network['name'], show_net['name'])
self.assertEqual(network['id'], show_net['id'])
# provider attributes are for admin only
body = self.show_network(network['id'])
show_net = body['network']
net_attr_list = show_net.keys()
for attr in ('admin_state_up', 'port_security_enabled', 'shared',
'status', 'subnets', 'tenant_id', 'router:external',
'provider:network_type', 'provider:physical_network',
'provider:segmentation_id'):
self.assertIn(attr, net_attr_list)
self._delete_network(network['id'])
| {
"content_hash": "4b8d1a01a45c3903896a9fd69361b343",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 78,
"avg_line_length": 43.69620253164557,
"alnum_prop": 0.6100811123986095,
"repo_name": "gravity-tak/vmware-nsx-tempest",
"id": "020f5a380579d8c7da0e6a68cf45b19169e6e6df",
"size": "7540",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vmware_nsx_tempest/tests/nsxv/api/test_vlan_network.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "232235"
}
],
"symlink_target": ""
} |
import pytest
from vcx.error import ErrorCode, VcxError
from vcx.api.credential_def import CredentialDef
source_id = '123'
schema_id = 'schema_id1'
name = 'schema name'
schema_no = 44
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_create_credential_def():
credential_def = await CredentialDef.create(source_id, name, schema_id, 0)
assert credential_def.source_id == source_id
assert credential_def.handle > 0
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_serialize():
credential_def = await CredentialDef.create(source_id, name, schema_id, 0)
data = await credential_def.serialize()
assert data['data']['source_id'] == source_id
assert data['data']['name'] == name
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_serialize_with_bad_handle():
with pytest.raises(VcxError) as e:
credential_def = await CredentialDef.create(source_id, name, schema_id, 0)
credential_def.handle = 0
await credential_def.serialize()
assert ErrorCode.InvalidCredentialDefHandle == e.value.error_code
assert 'Invalid Credential Definition handle' == e.value.error_msg
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_deserialize():
credential_def = await CredentialDef.create(source_id, name, schema_id, 0)
data = await credential_def.serialize()
credential_def2 = await CredentialDef.deserialize(data)
assert credential_def2.source_id == data.get('data').get('source_id')
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_deserialize_with_invalid_data():
with pytest.raises(VcxError) as e:
data = {'invalid': -99}
await CredentialDef.deserialize(data)
assert ErrorCode.InvalidCredentialDef == e.value.error_code
assert 'Credential Def not in valid json' == e.value.error_msg
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_serialize_deserialize_and_then_serialize():
credential_def = await CredentialDef.create(source_id, name, schema_id, 0)
data1 = await credential_def.serialize()
credential_def2 = await CredentialDef.deserialize(data1)
data2 = await credential_def2.serialize()
assert data1 == data2
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_release():
with pytest.raises(VcxError) as e:
credential_def = await CredentialDef.create(source_id, name, schema_id, 0)
assert credential_def.handle > 0
credential_def.release()
await credential_def.serialize()
assert ErrorCode.InvalidCredentialDefHandle == e.value.error_code
assert 'Invalid Credential Definition handle' == e.value.error_msg
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_get_cred_def_id_and_payment_txn():
credential_def = await CredentialDef.create(source_id, name, schema_id, 0)
assert await credential_def.get_cred_def_id() == '2hoqvcwupRTUNkXn6ArYzs:3:CL:2471'
txn = await credential_def.get_payment_txn()
assert txn
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_credential_def_prepare_for_endorser():
credential_def = await CredentialDef.prepare_for_endorser(source_id, name, schema_id, 'V4SGRU86Z58d6TV7PBUe6f')
assert credential_def.source_id == source_id
assert credential_def.handle > 0
assert credential_def.transaction
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_schema_update_state():
credential_def = await CredentialDef.prepare_for_endorser(source_id, name, schema_id, 'V4SGRU86Z58d6TV7PBUe6f')
assert 0 == await credential_def.get_state()
assert 1 == await credential_def.update_state()
assert 1 == await credential_def.get_state()
@pytest.mark.asyncio
@pytest.mark.usefixtures('vcx_init_test_mode')
async def test_schema_get_state():
credential_def = await CredentialDef.create(source_id, name, schema_id, 0)
assert 1 == await credential_def.get_state()
| {
"content_hash": "ce068fae2ad39eee54a9db4fa077352d",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 115,
"avg_line_length": 37.518181818181816,
"alnum_prop": 0.7307971892415799,
"repo_name": "Artemkaaas/indy-sdk",
"id": "2349fd289d1b94bc6318976c4f54f7770c13aba4",
"size": "4127",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vcx/wrappers/python3/tests/test_credential_def.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "304593"
},
{
"name": "C#",
"bytes": "851667"
},
{
"name": "C++",
"bytes": "113048"
},
{
"name": "CSS",
"bytes": "137079"
},
{
"name": "Dockerfile",
"bytes": "23662"
},
{
"name": "Groovy",
"bytes": "91076"
},
{
"name": "HTML",
"bytes": "897750"
},
{
"name": "Java",
"bytes": "857349"
},
{
"name": "JavaScript",
"bytes": "187603"
},
{
"name": "Makefile",
"bytes": "328"
},
{
"name": "Objective-C",
"bytes": "567071"
},
{
"name": "Objective-C++",
"bytes": "687924"
},
{
"name": "Perl",
"bytes": "8271"
},
{
"name": "Python",
"bytes": "693656"
},
{
"name": "Ruby",
"bytes": "80312"
},
{
"name": "Rust",
"bytes": "5242907"
},
{
"name": "Shell",
"bytes": "248295"
},
{
"name": "Swift",
"bytes": "1114"
},
{
"name": "TypeScript",
"bytes": "201111"
}
],
"symlink_target": ""
} |
"""
The Predictor module estimates future values using the monitoring data.
@author: fernandez
"""
# pylint: disable=E0611,E1101
import numpy as np
import statsmodels.api as sm
from pandas import Series, DataFrame
from statsmodels.datasets.utils import Dataset
from scipy import stats
import math
"""
This prediction models need a minimum range of input values of 15-20 to be able to make future predictions.
"""
class Prediction_Models:
def __init__(self, logger):
self.logger = logger
def average(self, x):
assert len(x) > 0
return float(sum(x)) / len(x)
def correlation(self, x, y):
assert len(x) == len(y)
n = len(x)
assert n > 0
avg_x = self.average(x)
avg_y = self.average(y)
diffprod = 0
xdiff2 = 0
ydiff2 = 0
for idx in range(n):
xdiff = x[idx] - avg_x
ydiff = y[idx] - avg_y
diffprod += xdiff * ydiff
xdiff2 += xdiff * xdiff
ydiff2 += ydiff * ydiff
return diffprod / math.sqrt(xdiff2 * ydiff2)
def linear_regression(self, php_resp_time, num_predictions):
forecast = []
try:
time = [t for t in range(len(php_resp_time))]
slope, intercept, r_value, p_value, std_err = stats.linregress(time, php_resp_time)
for pred in range(num_predictions):
forecast.append(intercept + slope * (pred + time[len(php_resp_time) - 1]))
self.logger.info("LR: Forecast values obtained for php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast))
except Exception as e:
forecast.append(0)
self.logger.error("ERROR calculating the LR for the php_resp_time values with php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast) + " " + str(e))
return forecast
def vector_auto_regression(self, php_resp_time, cpu_load, num_predictions):
# year = 1921 # year to be mapped with the values
forecast = [] # predict array
try:
# dates = sm.tsa.datetools.dates_from_range(str(year), length=len(php_resp_time)) #
mdata = self.load_data(php_resp_time, cpu_load).data
mdata = mdata[['value', 'value2']]
data = np.array(mdata) # ndarray
model = sm.tsa.VAR(data)
# maxlag: Maximum number of lags to check for order selection, defaults to
# 12 * (nobs/100.)**(1./4), see select_order function
results = model.fit(maxlags=5) # 3
lag_order = results.k_ar
pred = results.forecast(data[-lag_order:], num_predictions)
# start_time = year + len(php_resp_time) #start time year of the prediction
#dates_pred = sm.tsa.datetools.dates_from_range(str(start_time), length=num_predictions)
for i in pred:
forecast.append(i[0]) # fisrt column
self.logger.info("Vector_auto_regression: Forecast values obtained for php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast))
except Exception as e:
forecast = []
forecast.append(0)
self.logger.error("ERROR calculating the VAR for the php_resp_time values with php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast) + " " + str(e))
return forecast
def load_data(self, php_resp_time, cpu_load):
if (len(php_resp_time) != len(cpu_load)):
self.logger.info("load_data: data arrays must have the same number of elements.")
data = []
year = 1921
for i in range(len(php_resp_time)):
data.append({'year': year + i, 'value': php_resp_time[i], 'value2': cpu_load[i]})
names = ('year', 'value', 'value2')
dataset = Dataset(data=data, names=names)
dataset.data = DataFrame(dataset.data)
return dataset
def linear_regression_ols(self, php_resp_time, num_predictions):
forecast = []
try:
time = [t for t in range(len(php_resp_time))]
res = sm.OLS(php_resp_time, time)
model = res.fit(disp=False)
for i in range(num_predictions):
forecast.append(model.predict())
self.logger.info("OLS Regression: Forecast values obtained for php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast))
except Exception as e:
forecast.append(0)
self.logger.error("ERROR calculating the LR_OLS for the php_resp_time values with php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast) + " " + str(e))
return forecast
def multiple_regression(self, php_resp_time, req_rate, cpu_usage, num_predictions):
forecast = []
try:
x = np.column_stack((php_resp_time, req_rate, cpu_usage))
x = sm.add_constant(x, prepend=True) # stack explanatory variables into an array
#y = sm.add_constant(y, prepend=True) #add a constant
res = sm.OLS(php_resp_time, x)
model = res.fit(disp=False)
#res = sm.OLS(y,x).fit() #create a model and fit it
for i in range(num_predictions):
forecast.append(model.predict())
self.logger.info("Multiple Regression: Forecast values obtained for php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast))
except Exception as e:
forecast.append(0)
self.logger.error("ERROR calculating the Multiple Regression for the php_resp_time values with php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast) + " " + str(e))
return forecast
def auto_regression(self, php_resp_time, num_predictions):
forecast = []
try:
dates = sm.tsa.datetools.dates_from_range('1921', length=len(php_resp_time))
endog = Series(php_resp_time, index=dates)
ar_model = sm.tsa.AR(endog).fit(maxlag=9, method='mle', disp=False)
start_time = 1920 + len(php_resp_time)
end_time = start_time + num_predictions
pred = ar_model.predict(start=str(start_time), end=str(end_time))
# print pred
for i in pred.index:
if str(pred.ix).find('nan') > 0:
forecast.append(0)
else:
forecast.append(pred.ix[i])
if math.isnan(forecast[0]):
raise Exception("The forecast values contain 'nan' values")
self.logger.info("AR: Forecast values obtained for php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast))
except Exception as e:
forecast = []
forecast.append(0)
self.logger.error("ERROR calculating the AR for the php_resp_time values with php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast) + " " + str(e))
return forecast
def arma(self, php_resp_time, num_predictions):
forecast = []
try:
#data = sm.datasets.sunspots.load()
dates = sm.tsa.datetools.dates_from_range('1921', length=len(php_resp_time))
endog = Series(php_resp_time, index=dates)
#arma_model = sm.tsa.ARMA(endog,(15,0)).fit()
p = sm.tsa.AR(endog).fit(disp=False).params
arma_model = sm.tsa.ARMA(endog, (len(p) - 1, 0)).fit(start_params=p.values, disp=False)
start_time = 1920 + len(php_resp_time)
end_time = start_time + num_predictions
pred = arma_model.predict(start=str(start_time), end=str(end_time), dynamic=True)
for i in pred.index:
if str(pred.ix).find('nan') > 0:
forecast.append(0)
else:
forecast.append(pred.ix[i])
if math.isnan(forecast[0]):
raise Exception("The forecast values contain 'nan' values")
self.logger.info("ARMA: Forecast values obtained for php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast))
except Exception as e:
forecast = []
forecast.append(0)
self.logger.error("ERROR calculating the ARMA for the php_resp_time values with php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast) + " " + str(e))
return forecast
def holtwinters(self, y, alpha, beta, gamma, c, debug=False):
"""
y - time series data.
alpha , beta, gamma - exponential smoothing coefficients
for level, trend, seasonal components.
c - extrapolated future data points.
4 quarterly
7 weekly.
12 monthly
The length of y must be a an integer multiple (> 2) of c.
"""
# Compute initial b and intercept using the first two complete c periods.
ylen = len(y)
# I cannot pass 12 as a parameter of C
#if ylen % c !=0:
# return None
fc = float(c)
ybar2 = sum([y[i] for i in range(c, 2 * c)]) / fc
ybar1 = sum([y[i] for i in range(c)]) / fc
b0 = (ybar2 - ybar1) / fc
if debug:
print "b0 = ", b0
# Compute for the level estimate a0 using b0 above.
tbar = sum(i for i in range(1, c + 1)) / fc
# print tbar
a0 = ybar1 - b0 * tbar
if debug:
"a0 = ", a0
# Compute for initial indices
I = [y[i] / (a0 + (i + 1) * b0) for i in range(0, ylen)]
if debug:
print "Initial indices = ", I
S = [0] * (ylen + c)
for i in range(c):
S[i] = (I[i] + I[i + c]) / 2.0
# Normalize so S[i] for i in [0, c) will add to c.
tS = c / sum([S[i] for i in range(c)])
for i in range(c):
S[i] *= tS
if debug:
print "S[", i, "]=", S[i]
# Holt - winters proper ...
if debug:
print "Use Holt Winters formulae"
F = [0] * (ylen + c)
At = a0
Bt = b0
for i in range(ylen):
Atm1 = At
Btm1 = Bt
At = alpha * y[i] / S[i] + (1.0 - alpha) * (Atm1 + Btm1)
Bt = beta * (At - Atm1) + (1 - beta) * Btm1
S[i + c] = gamma * y[i] / At + (1.0 - gamma) * S[i]
F[i] = (a0 + b0 * (i + 1)) * S[i]
# print "i=", i+1, "y=", y[i], "S=", S[i], "Atm1=", Atm1, "Btm1=",Btm1, "At=", At, "Bt=", Bt, "S[i+c]=", S[i+c], "F=", F[i]
# print i,y[i], F[i]
# Forecast for next c periods:
forecast = []
for m in range(c):
forecast.append((At + Bt * (m + 1)) * S[ylen + m])
return forecast
def exponential_smoothing(self, php_resp_time, num_predictions):
forecast = []
try:
forecast = self.holtwinters(php_resp_time, 0.2, 0.1, 0.05, num_predictions)
self.logger.info("Exponential Smoothing: Forecast values obtained for php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast))
except Exception as e:
forecast.append(0)
self.logger.error("ERROR calculating the EXP. SMOOTHING for the php_resp_time values with php_resp_time[0]: " + str(php_resp_time[0]) + " --> " + str(forecast) + " " + str(e))
return forecast
| {
"content_hash": "defa77c3e439e8cbb9942bdd65d96be5",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 192,
"avg_line_length": 38.348993288590606,
"alnum_prop": 0.5426146307315366,
"repo_name": "ConPaaS-team/conpaas",
"id": "026c1bf08ca4226e810ea18105db0d052cd18b93",
"size": "11428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conpaas-services/src/conpaas/services/webservers/manager/autoscaling/prediction/prediction_models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "79"
},
{
"name": "Batchfile",
"bytes": "2136"
},
{
"name": "C",
"bytes": "12346"
},
{
"name": "CSS",
"bytes": "47680"
},
{
"name": "HTML",
"bytes": "5494"
},
{
"name": "Java",
"bytes": "404303"
},
{
"name": "JavaScript",
"bytes": "164519"
},
{
"name": "M4",
"bytes": "553"
},
{
"name": "Makefile",
"bytes": "78772"
},
{
"name": "Nginx",
"bytes": "1980"
},
{
"name": "PHP",
"bytes": "1900634"
},
{
"name": "Python",
"bytes": "2842443"
},
{
"name": "Shell",
"bytes": "232043"
},
{
"name": "Smarty",
"bytes": "15450"
}
],
"symlink_target": ""
} |
from threading import Thread, Event, Lock
from utils import get_username, get_next_id, find_work_dir, mkdir_if_not_exists
import RPi.GPIO as GPIO
import subprocess
import logging
import json
import time
import os
import re
import sys
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
pedal = 18
led_red = 16
led_blue = 20
led_green = 21
led_rgb = [led_red, led_blue, led_green]
rf_pin = 9
led_peak = 10
hold_time = 1.2 #seconds
peak_meter_re = re.compile(r'\| (\d\d)\%')
prev_buffer = ['']
def get_peak_vu_meter(pipe):
d = pipe.read(50)
r = peak_meter_re.search(prev_buffer[0] + d)
prev_buffer[0] = d
if r:
return r.group(1)
class Recorder(object):
def __init__(self):
self.recording = False
self.setup_hardware()
self.last_pedal_press = None
self.mark_lock = Lock()
def setup_hardware(self):
self.setup_io_pins()
self.wait_for_sane_state()
self.setup_interrupts()
self.make_rgb_green()
def setup_io_pins(self):
GPIO.setmode(GPIO.BCM)
GPIO.setup(pedal, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
GPIO.setup(led_rgb, GPIO.OUT)
GPIO.setup(led_peak, GPIO.OUT)
GPIO.setup(rf_pin, GPIO.IN)
GPIO.output(led_peak, 1)
def setup_interrupts(self):
GPIO.add_event_detect(pedal, GPIO.RISING, callback=self.on_pedal_change, bouncetime=50)
def wait_for_sane_state(self):
while True:
work_dir = find_work_dir()
if work_dir is None:
self.make_rgb_white()
else:
self.upload_dir = os.path.join(work_dir, 'to_upload')
mkdir_if_not_exists(self.upload_dir)
return
def on_pedal_change(self, channel):
time.sleep(0.02)
if not GPIO.input(channel):
return
if self.last_pedal_press is not None and time.time() - self.last_pedal_press < 0.2:
return
logging.info('pedal change: %d', channel)
t0 = time.time()
while GPIO.input(channel):
self.last_pedal_press = time.time()
if self.last_pedal_press - t0 > hold_time:
break;
time.sleep(0.01)
if time.time() - t0 > hold_time:
self.toggle_rec()
elif self.recording:
self.set_mark(pedal_id=0)
self.last_pedal_press = time.time()
def make_rgb_green(self):
self.turn_rgb_off()
GPIO.output(led_green, 0)
def make_rgb_red(self):
self.turn_rgb_off()
GPIO.output(led_red, 0)
def make_rgb_purple(self):
self.turn_rgb_off()
GPIO.output(led_red, 0)
GPIO.output(led_blue, 0)
def make_rgb_white(self):
GPIO.output(led_red, 0)
GPIO.output(led_green, 0)
GPIO.output(led_blue, 0)
def turn_rgb_off(self):
for led in led_rgb:
GPIO.output(led, 1)
def toggle_rec(self):
if self.recording:
self.stop_recording()
else:
self.start_recording()
self.recording = not self.recording
def create_session(self):
self.session_start_time = int(time.time())
self.session_id = get_next_id()
self.session_file = os.path.join(self.upload_dir, '{}.mp3'.format(self.session_id))
self.metadata_file = os.path.join(self.upload_dir, '{}.json'.format(self.session_id))
logging.info('created session, session id: %d', self.session_id)
@property
def time_since_session_started(self):
return time.time() - self.session_start_time
def record_from_mic(self):
self.create_session()
arecord_args = 'arecord -vv -D plughw:1,0 -f cd -t raw'
lame_args = 'lame -r -h --cbr -b 128 - {}'.format(self.session_file)
self.arecord_process = subprocess.Popen(
arecord_args.split(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
self.lame_process = subprocess.Popen(lame_args.split(), stdin=self.arecord_process.stdout)
self.start_rec_monitor()
def start_rec_monitor(self):
self._monitor_stop = Event()
self._monitor_thread = Thread(target=self.rec_monitor, args=())
self._monitor_thread.start()
def rec_monitor(self):
while True:
if self._monitor_stop.isSet():
GPIO.output(led_peak, 0)
return
v = get_peak_vu_meter(self.arecord_process.stderr)
if v is not None and int(v) > 95:
GPIO.output(led_peak, 0)
else:
GPIO.output(led_peak, 1)
def stop_rec_monitor(self):
self._monitor_stop.set()
self._monitor_thread.join()
GPIO.output(led_peak, 1)
@property
def metadata(self):
return {
'date': self.session_start_time,
'markers': [{'offset': m[0], 'pedal_id': m[1]} for m in self.markers],
}
def write_metadata_file(self):
filename = self.metadata_file + '.tmp'
with open(filename, 'w') as f:
f.write(json.dumps(self.metadata, indent=4))
os.rename(filename, self.metadata_file)
def monitor_rf(self):
probability = 0
while True:
if self._rf_stop.isSet():
break
if GPIO.input(rf_pin) == 0:
probability += 1
else:
probability = 0
if probability > 30:
probability = 0
self.set_mark(pedal_id=1)
time.sleep(0.1)
time.sleep(0.001)
def start_rf_thread(self):
self._rf_stop = Event()
self._rf_thread = Thread(target=self.monitor_rf, args=())
self._rf_thread.start()
def stop_rf_thread(self):
self._rf_stop.set()
self._rf_thread.join()
pass
def start_recording(self):
logging.info('starting recording')
self.markers = []
self.record_from_mic()
self.make_rgb_red()
self.write_metadata_file()
self.start_rf_thread()
def stop_recording(self):
logging.info('stopping recording')
self.stop_rec_monitor()
self.arecord_process.terminate()
self.lame_process.terminate()
self.stop_rf_thread()
self.write_metadata_file()
self.make_rgb_green()
def set_mark(self, pedal_id):
with self.mark_lock:
logging.info('setting mark')
self.markers.append((self.time_since_session_started, pedal_id))
self.make_rgb_purple()
time.sleep(0.2)
self.make_rgb_red()
self.write_metadata_file()
def read_pedal(self):
return GPIO.input(pedal)
def serve(self):
while True:
time.sleep(999)
def main():
try:
Recorder().serve()
finally:
GPIO.cleanup()
if __name__ == '__main__':
main()
| {
"content_hash": "edc20a952c229f39753ec77f9589b91c",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 98,
"avg_line_length": 25.915441176470587,
"alnum_prop": 0.556390977443609,
"repo_name": "quatrix/rekt",
"id": "b33e6caa08f1347186c85a978035d56a238bbad1",
"size": "7071",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "loop.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "31991"
},
{
"name": "HTML",
"bytes": "318314"
},
{
"name": "JavaScript",
"bytes": "64567"
},
{
"name": "Python",
"bytes": "77116"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from . import simple
import sys
if __name__ == "__main__":
verbose = False
if len(sys.argv) > 1:
verbose = int(sys.argv[1])
test_list = None
if len(sys.argv) > 2:
test_list = sys.argv[2:]
simple.tests.run(test_list, verbose)
| {
"content_hash": "0d97724a334f284d5076bf349aa641bf",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 40,
"avg_line_length": 19.0625,
"alnum_prop": 0.5836065573770491,
"repo_name": "pavanky/arrayfire-python",
"id": "603ae9f374d37d075cc7aa784ed9732e888253bb",
"size": "648",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "arrayfire/tests/simple_tests.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "347232"
}
],
"symlink_target": ""
} |
"""
This script looks in the symbol cache directory (defaulting to c:\symbols, but
you can specify a directory) and for each sub-directory deletes all but the
most recent symbols or PE files. This can free up large amounts of space.
The idea is that the old symbols are unlikely to be needed again, and if they
are needed then they will be downloaded from their original symbol server.
It would also be safe to delete everything from the symbol cache directory,
but that would be inefficient because it would require some still-useful symbols
to be downloaded again.
Because this script is doing a partial rmdir /s it can be dangerous - it can
blast away most of the files in a directory if you pass it the wrong path.
However it contains some basic sanity checks. In particular, it will only delete
files if the file names match the grandparent directory, and if there is just
one file in the directory.
"""
from __future__ import print_function
import os
import sys
def main():
symbol_cache_dir = r'c:\symbols'
if len(sys.argv) > 1:
symbol_cache_dir = sys.argv[1]
if not os.path.isdir(symbol_cache_dir):
print('"%s" is not a directory.' % symbol_cache_dir)
return 1
deleted_count = 0
deleted_size = 0
failed_count = 0
for symbol in os.listdir(symbol_cache_dir):
ext = os.path.splitext(symbol)[1]
if ext.lower() in ['.pdb', '.exe', '.dll', '.drv']:
# eg.: c:\symbols\chrome.dll.pdb
outer_symbol_path = os.path.join(symbol_cache_dir, symbol)
# eg.: c:\symbols\chrome.dll.pdb\A982846B8C61458C9C4C3E33C6FA8F511
inner_symbol_dirs = []
for inner_symbol_dir in os.listdir(outer_symbol_path):
mtime = os.path.getmtime(os.path.join(outer_symbol_path, inner_symbol_dir))
inner_symbol_dirs.append((mtime, inner_symbol_dir))
# Sort by date
inner_symbol_dirs.sort()
# Iterate over all but the most recent entries
# Retain the last two because there may be 32-bit/64-bit binaries with the
# same name but different symbols, or development/stable versions.
for guid_path in inner_symbol_dirs[:-2]:
inner_symbol_path = os.path.join(outer_symbol_path, guid_path[1])
files = os.listdir(inner_symbol_path)
deleted_error_files = False
for file in files:
# Files that end with .error are sometimes present due to symbol-server
# download errors. Delete them. Files that end with '_' are compressed
# files that can't be used directly and are not supposed to be
# retained. Delete them.
if file.endswith('.error') or file.endswith('_'):
file_path = os.path.join(inner_symbol_path, file)
print('removing %s' % file_path)
try:
file_size = os.path.getsize(file_path)
os.remove(file_path)
deleted_size += file_size
deleted_count += 1
except WindowsError as e:
print('Failure deleting %s - %s' % (file_path, e))
failed_count += 1
deleted_error_files = True
# If we deleted some .error files then rescan.
if deleted_error_files:
files = os.listdir(inner_symbol_path)
# If there are extra files or if the file name doesn't match the parent
# directory then maybe this isn't a symbol cache.
if len(files) == 1 and files[0].lower() == symbol.lower():
file_path = os.path.join(inner_symbol_path, files[0])
print('removing %s' % file_path)
try:
file_size = os.path.getsize(file_path)
os.remove(file_path)
print('removing %s' % inner_symbol_path)
os.rmdir(inner_symbol_path)
deleted_size += file_size
deleted_count += 1
except WindowsError as e:
print('Failure deleting %s - %s' % (file_path, e))
failed_count += 1
elif len(files) == 0:
try:
print('removing %s' % inner_symbol_path)
os.rmdir(inner_symbol_path)
except WindowsError as e:
print('Failure deleting %s - %s' % (inner_symbol_path, e))
failed_count += 1
else:
print('File/directory mismatch. Leaving %s, just in case.' % inner_symbol_path)
# GB = 1e9. GiB = 2^30 and is dumb in this context.
print('Deleted %d files totaling %1.3f GB' % (deleted_count, deleted_size / 1e9))
if failed_count > 1:
print('Failed to delete %d file(s)' % failed_count)
if __name__ == '__main__':
sys.exit(main())
| {
"content_hash": "ba6d44cf40a06fff7d033a7c2a6c3f39",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 89,
"avg_line_length": 43.25925925925926,
"alnum_prop": 0.6175085616438356,
"repo_name": "randomascii/tools",
"id": "7f1174f8c5443a40aedc048fb6e95eb67cd14f00",
"size": "5283",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "bin/trim_symbols.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "269"
},
{
"name": "C",
"bytes": "3688"
},
{
"name": "C++",
"bytes": "13904"
},
{
"name": "Python",
"bytes": "19276"
}
],
"symlink_target": ""
} |
import sublime, sublime_plugin
import os
class MochaCoffeeScriptDetectFileTypeCommand(sublime_plugin.EventListener):
""" Detects current file type if the file's extension isn't conclusive """
def on_load(self, view):
filename = view.file_name()
if not filename: # buffer has never been saved
return
name = os.path.basename(filename.lower())
if name.endswith("spec.js.coffee") or name.endswith("spec.coffee"):
set_syntax(view, "Mocha Chai CoffeeScript", "Mocha Chai CoffeeScript/Syntaxes")
def set_syntax(view, syntax, path=None):
if path is None:
path = syntax
view.settings().set('syntax', 'Packages/'+ path + '/' + syntax + '.tmLanguage')
print("Switched syntax to: " + syntax)
| {
"content_hash": "9f1ac2f6547aa7326c5e7888a1917726",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 85,
"avg_line_length": 36.3,
"alnum_prop": 0.7011019283746557,
"repo_name": "octoblu/sublime-text-mocha-coffeescript",
"id": "a682497e75521944b917694a69fd5bc208a0d860",
"size": "726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MochaCoffeeScriptDetectFileType.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "73"
},
{
"name": "Python",
"bytes": "726"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.