signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def __len__(self):
|
n = <NUM_LIT:0><EOL>for sense in iter(self):<EOL><INDENT>n += <NUM_LIT:1><EOL><DEDENT>return n<EOL>
|
Count active senses.
|
f9037:c6:m2
|
def __getitem__(self, k):
|
if not self.engine.query.sense_is_active(<EOL>self.character.name,<EOL>k,<EOL>*self.engine._btt()<EOL>):<EOL><INDENT>raise KeyError("<STR_LIT>")<EOL><DEDENT>return CharacterSense(self.character, k)<EOL>
|
Get a :class:`CharacterSense` named ``k`` if it exists.
|
f9037:c6:m3
|
def __setitem__(self, k, v):
|
if isinstance(v, str):<EOL><INDENT>funn = v<EOL><DEDENT>else:<EOL><INDENT>funn = v.__name__<EOL><DEDENT>if funn not in self.engine.sense:<EOL><INDENT>if not isinstance(v, Callable):<EOL><INDENT>raise TypeError("<STR_LIT>")<EOL><DEDENT>self.engine.sense[funn] = v<EOL><DEDENT>branch, turn, tick = self.engine._btt()<EOL>self.engine.query.sense_fun_set(<EOL>self.character.name,<EOL>k,<EOL>branch,<EOL>turn,<EOL>tick,<EOL>funn,<EOL>True<EOL>)<EOL>self.send(self, key=k, val=v)<EOL>
|
Use the function for the sense from here on out.
|
f9037:c6:m4
|
def __delitem__(self, k):
|
branch, turn, tick = self.engine._btt()<EOL>self.engine.query.sense_set(<EOL>self.character.name,<EOL>k,<EOL>branch,<EOL>turn,<EOL>tick,<EOL>False<EOL>)<EOL>self.send(self, key=k, val=None)<EOL>
|
Stop having the given sense.
|
f9037:c6:m5
|
def __call__(self, fun, name=None):
|
if not isinstance(fun, Callable):<EOL><INDENT>raise TypeError(<EOL>"<STR_LIT>"<EOL>)<EOL><DEDENT>if name is None:<EOL><INDENT>name = fun.__name__<EOL><DEDENT>self[name] = fun<EOL>
|
Decorate the function so it's mine now.
|
f9037:c6:m6
|
def __init__(self, facade):
|
super().__init__()<EOL>self.facade = facade<EOL>self._patch = {}<EOL>
|
Store the facade.
|
f9037:c12:m1
|
def __init__(self, character=None):
|
self.character = character<EOL>self.graph = self.StatMapping(self)<EOL>
|
Store the character.
|
f9037:c16:m14
|
def __init__(self, engine, name, data=None, *, init_rulebooks=True, **attr):
|
from allegedb.cache import FuturistWindowDict, PickyDefaultDict<EOL>super().__init__(engine, name, data, **attr)<EOL>self._avatars_cache = PickyDefaultDict(FuturistWindowDict)<EOL>if not init_rulebooks:<EOL><INDENT>return<EOL><DEDENT>cachemap = {<EOL>'<STR_LIT>': engine._characters_rulebooks_cache,<EOL>'<STR_LIT>': engine._avatars_rulebooks_cache,<EOL>'<STR_LIT>': engine._characters_things_rulebooks_cache,<EOL>'<STR_LIT>': engine._characters_places_rulebooks_cache,<EOL>'<STR_LIT>': engine._characters_portals_rulebooks_cache<EOL>}<EOL>for rulebook, cache in cachemap.items():<EOL><INDENT>branch, turn, tick = engine._nbtt()<EOL>rulebook_or_name = attr.get(rulebook, (name, rulebook))<EOL>rulebook_name = getattr(rulebook_or_name, '<STR_LIT:name>', rulebook_or_name)<EOL>engine.query._set_rulebook_on_character(rulebook, name, branch, turn, tick, rulebook_name)<EOL>cache.store((name, rulebook), branch, turn, tick, rulebook_name)<EOL><DEDENT>
|
Store engine and name, and set up mappings for Thing, Place, and
Portal
|
f9037:c17:m3
|
def add_places_from(self, seq, **attrs):
|
super().add_nodes_from(seq, **attrs)<EOL>
|
Take a series of place names and add the lot.
|
f9037:c17:m6
|
def add_thing(self, name, location, **kwargs):
|
if name in self.thing:<EOL><INDENT>raise WorldIntegrityError(<EOL>"<STR_LIT>".format(name)<EOL>)<EOL><DEDENT>self.add_node(name, **kwargs)<EOL>if isinstance(location, Node):<EOL><INDENT>location = location.name<EOL><DEDENT>self.place2thing(name, location,)<EOL>
|
Create a Thing, set its location,
and set its initial attributes from the keyword arguments (if
any).
|
f9037:c17:m7
|
def place2thing(self, name, location):
|
self.engine._set_thing_loc(<EOL>self.name, name, location<EOL>)<EOL>if (self.name, name) in self.engine._node_objs:<EOL><INDENT>obj = self.engine._node_objs[self.name, name]<EOL>thing = Thing(self, name)<EOL>for port in obj.portals():<EOL><INDENT>port.origin = thing<EOL><DEDENT>for port in obj.preportals():<EOL><INDENT>port.destination = thing<EOL><DEDENT>self.engine._node_objs[self.name, name] = thing<EOL><DEDENT>
|
Turn a Place into a Thing with the given location.
It will keep all its attached Portals.
|
f9037:c17:m9
|
def thing2place(self, name):
|
self.engine._set_thing_loc(<EOL>self.name, name, None<EOL>)<EOL>if (self.name, name) in self.engine._node_objs:<EOL><INDENT>thing = self.engine._node_objs[self.name, name]<EOL>place = Place(self, name)<EOL>for port in thing.portals():<EOL><INDENT>port.origin = place<EOL><DEDENT>for port in thing.preportals():<EOL><INDENT>port.destination = place<EOL><DEDENT>self.engine._node_objs[self.name, name] = place<EOL><DEDENT>
|
Unset a Thing's location, and thus turn it into a Place.
|
f9037:c17:m10
|
def add_portal(self, origin, destination, symmetrical=False, **kwargs):
|
if isinstance(origin, Node):<EOL><INDENT>origin = origin.name<EOL><DEDENT>if isinstance(destination, Node):<EOL><INDENT>destination = destination.name<EOL><DEDENT>super().add_edge(origin, destination, **kwargs)<EOL>if symmetrical:<EOL><INDENT>self.add_portal(destination, origin, is_mirror=True)<EOL><DEDENT>
|
Connect the origin to the destination with a :class:`Portal`.
Keyword arguments are the :class:`Portal`'s
attributes. Exception: if keyword ``symmetrical`` == ``True``,
a mirror-:class:`Portal` will be placed in the opposite
direction between the same nodes. It will always appear to
have the placed :class:`Portal`'s stats, and any change to the
mirror :class:`Portal`'s stats will affect the placed
:class:`Portal`.
|
f9037:c17:m11
|
def add_portals_from(self, seq, symmetrical=False):
|
for tup in seq:<EOL><INDENT>orig = tup[<NUM_LIT:0>]<EOL>dest = tup[<NUM_LIT:1>]<EOL>kwargs = tup[<NUM_LIT:2>] if len(tup) > <NUM_LIT:2> else {}<EOL>if symmetrical:<EOL><INDENT>kwargs['<STR_LIT>'] = True<EOL><DEDENT>self.add_portal(orig, dest, **kwargs)<EOL><DEDENT>
|
Take a sequence of (origin, destination) pairs and make a
:class:`Portal` for each.
Actually, triples are acceptable too, in which case the third
item is a dictionary of stats for the new :class:`Portal`.
If optional argument ``symmetrical`` is set to ``True``, all
the :class:`Portal` instances will have a mirror portal going
in the opposite direction, which will always have the same
stats.
|
f9037:c17:m13
|
def add_avatar(self, a, b=None):
|
if self.engine._planning:<EOL><INDENT>raise NotImplementedError("<STR_LIT>")<EOL><DEDENT>if b is None:<EOL><INDENT>if not (<EOL>isinstance(a, Place) or<EOL>isinstance(a, Thing)<EOL>):<EOL><INDENT>raise TypeError(<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'<EOL>)<EOL><DEDENT>g = a.character.name<EOL>n = a.name<EOL><DEDENT>else:<EOL><INDENT>if isinstance(a, Character):<EOL><INDENT>g = a.name<EOL><DEDENT>elif not isinstance(a, str):<EOL><INDENT>raise TypeError(<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'<EOL>)<EOL><DEDENT>else:<EOL><INDENT>g = a<EOL><DEDENT>if isinstance(b, Place) or isinstance(b, Thing):<EOL><INDENT>n = b.name<EOL><DEDENT>elif not isinstance(b, str):<EOL><INDENT>raise TypeError(<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'<EOL>)<EOL><DEDENT>else:<EOL><INDENT>n = b<EOL><DEDENT><DEDENT>self.engine._exist_node(g, n)<EOL>branch, turn, tick = self.engine._nbtt()<EOL>self.engine._remember_avatarness(self.name, g, n, branch=branch, turn=turn, tick=tick)<EOL>
|
Start keeping track of a :class:`Thing` or :class:`Place` in a
different :class:`Character`.
|
f9037:c17:m14
|
def del_avatar(self, a, b=None):
|
if self.engine._planning:<EOL><INDENT>raise NotImplementedError("<STR_LIT>")<EOL><DEDENT>if b is None:<EOL><INDENT>if not isinstance(a, Node):<EOL><INDENT>raise TypeError(<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>)<EOL><DEDENT>g = a.character.name<EOL>n = a.name<EOL><DEDENT>else:<EOL><INDENT>g = a.name if isinstance(a, Character) else a<EOL>n = b.name if isinstance(b, Node) else b<EOL><DEDENT>self.engine._remember_avatarness(<EOL>self.character.name, g, n, False<EOL>)<EOL>
|
This is no longer my avatar, though it still exists on its own.
|
f9037:c17:m15
|
def portals(self):
|
char = self.character<EOL>make_edge = self.engine._get_edge<EOL>for (o, d) in self.engine._edges_cache.iter_keys(<EOL>self.character.name, *self.engine._btt()<EOL>):<EOL><INDENT>yield make_edge(char, o, d)<EOL><DEDENT>
|
Iterate over all portals.
|
f9037:c17:m16
|
def avatars(self):
|
charname = self.character.name<EOL>branch, turn, tick = self.engine._btt()<EOL>charmap = self.engine.character<EOL>avit = self.engine._avatarness_cache.iter_entities<EOL>makenode = self.engine._get_node<EOL>for graph in avit(<EOL>charname, branch, turn, tick<EOL>):<EOL><INDENT>for node in avit(<EOL>charname, graph, branch, turn, tick<EOL>):<EOL><INDENT>try:<EOL><INDENT>yield makenode(charmap[graph], node)<EOL><DEDENT>except KeyError:<EOL><INDENT>continue<EOL><DEDENT><DEDENT><DEDENT>
|
Iterate over all my avatars, regardless of what character they are
in.
|
f9037:c17:m17
|
def __init__(self, portal):
|
super().__init__(portal.engine, portal.rulebook)<EOL>self.portal = portal<EOL>
|
Store portal, engine, and rulebook.
|
f9040:c0:m0
|
def __getitem__(self, key):
|
if key == '<STR_LIT>':<EOL><INDENT>return self.orig<EOL><DEDENT>elif key == '<STR_LIT>':<EOL><INDENT>return self.dest<EOL><DEDENT>elif key == '<STR_LIT>':<EOL><INDENT>return self.character.name<EOL><DEDENT>elif key == '<STR_LIT>':<EOL><INDENT>try:<EOL><INDENT>return super().__getitem__(key)<EOL><DEDENT>except KeyError:<EOL><INDENT>return False<EOL><DEDENT><DEDENT>elif '<STR_LIT>' in self and self['<STR_LIT>']:<EOL><INDENT>return self.character.preportal[<EOL>self.orig<EOL>][<EOL>self.dest<EOL>][<EOL>key<EOL>]<EOL><DEDENT>else:<EOL><INDENT>return super().__getitem__(key)<EOL><DEDENT>
|
Get the present value of the key.
If I am a mirror of another Portal, return the value from that
Portal instead.
|
f9040:c1:m6
|
def __setitem__(self, key, value):
|
if key in ('<STR_LIT>', '<STR_LIT>', '<STR_LIT>'):<EOL><INDENT>raise KeyError("<STR_LIT>" + key)<EOL><DEDENT>elif '<STR_LIT>' in self and self['<STR_LIT>']:<EOL><INDENT>self.reciprocal[key] = value<EOL>return<EOL><DEDENT>elif key == '<STR_LIT>' and value:<EOL><INDENT>if (<EOL>self.dest not in self.character.portal or<EOL>self.orig not in<EOL>self.character.portal[self.dest]<EOL>):<EOL><INDENT>self.character.add_portal(self.dest, self.orig)<EOL><DEDENT>self.character.portal[<EOL>self.dest<EOL>][<EOL>self.orig<EOL>][<EOL>"<STR_LIT>"<EOL>] = True<EOL>self.send(self, key='<STR_LIT>', val=False)<EOL>return<EOL><DEDENT>elif key == '<STR_LIT>' and not value:<EOL><INDENT>try:<EOL><INDENT>self.character.portal[<EOL>self.dest<EOL>][<EOL>self.orig<EOL>][<EOL>"<STR_LIT>"<EOL>] = False<EOL><DEDENT>except KeyError:<EOL><INDENT>pass<EOL><DEDENT>self.send(self, key='<STR_LIT>', val=False)<EOL>return<EOL><DEDENT>super().__setitem__(key, value)<EOL>
|
Set ``key``=``value`` at the present game-time.
If I am a mirror of another Portal, set ``key``==``value`` on
that Portal instead.
|
f9040:c1:m7
|
def __repr__(self):
|
return "<STR_LIT>".format(<EOL>repr(self.engine),<EOL>repr(self['<STR_LIT>']),<EOL>repr(self['<STR_LIT>']),<EOL>repr(self['<STR_LIT>'])<EOL>)<EOL>
|
Describe character, origin, and destination
|
f9040:c1:m8
|
def __bool__(self):
|
return self.orig in self.character.portal andself.dest in self.character.portal[self.orig]<EOL>
|
It means something that I exist, even if I have no data.
|
f9040:c1:m9
|
@property<EOL><INDENT>def reciprocal(self):<DEDENT>
|
try:<EOL><INDENT>return self.character.portal[self.dest][self.orig]<EOL><DEDENT>except KeyError:<EOL><INDENT>raise KeyError("<STR_LIT>")<EOL><DEDENT>
|
If there's another Portal connecting the same origin and
destination that I do, but going the opposite way, return
it. Else raise KeyError.
|
f9040:c1:m10
|
def historical(self, stat):
|
return StatusAlias(<EOL>entity=self,<EOL>stat=stat<EOL>)<EOL>
|
Return a reference to the values that a stat has had in the past.
You can use the reference in comparisons to make a history
query, and execute the query by calling it, or passing it to
``self.engine.ticks_when``.
|
f9040:c1:m11
|
def update(self, d):
|
for (k, v) in d.items():<EOL><INDENT>if k not in self or self[k] != v:<EOL><INDENT>self[k] = v<EOL><DEDENT><DEDENT>
|
Works like regular update, but only actually updates when the new
value and the old value differ. This is necessary to prevent
certain infinite loops.
:arg d: a dictionary
|
f9040:c1:m12
|
def delete(self):
|
branch, turn, tick = self.engine._nbtt()<EOL>self.engine._edges_cache.store(<EOL>self.character.name,<EOL>self.origin.name,<EOL>self.destination.name,<EOL><NUM_LIT:0>,<EOL>branch,<EOL>turn,<EOL>tick,<EOL>None<EOL>)<EOL>self.engine.query.exist_edge(<EOL>self.character.name,<EOL>self.origin.name,<EOL>self.destination.name,<EOL>branch, turn, tick, False<EOL>)<EOL>try:<EOL><INDENT>del self.engine._edge_objs[<EOL>(self.graph.name, self.orig, self.dest)<EOL>]<EOL><DEDENT>except KeyError:<EOL><INDENT>pass<EOL><DEDENT>self.character.portal[self.origin.name].send(<EOL>self.character.portal[self.origin.name],<EOL>key='<STR_LIT>', val=None<EOL>)<EOL>
|
Remove myself from my :class:`Character`.
For symmetry with :class:`Thing` and :class`Place`.
|
f9040:c1:m13
|
def windows_union(windows):
|
def fix_overlap(left, right):<EOL><INDENT>if left == right:<EOL><INDENT>return [left]<EOL><DEDENT>assert left[<NUM_LIT:0>] < right[<NUM_LIT:0>]<EOL>if left[<NUM_LIT:1>] >= right[<NUM_LIT:0>]:<EOL><INDENT>if right[<NUM_LIT:1>] > left[<NUM_LIT:1>]:<EOL><INDENT>return [(left[<NUM_LIT:0>], right[<NUM_LIT:1>])]<EOL><DEDENT>else:<EOL><INDENT>return [left]<EOL><DEDENT><DEDENT>return [left, right]<EOL><DEDENT>if len(windows) == <NUM_LIT:1>:<EOL><INDENT>return windows<EOL><DEDENT>none_left = []<EOL>none_right = []<EOL>otherwise = []<EOL>for window in windows:<EOL><INDENT>if window[<NUM_LIT:0>] is None:<EOL><INDENT>none_left.append(window)<EOL><DEDENT>elif window[<NUM_LIT:1>] is None:<EOL><INDENT>none_right.append(window)<EOL><DEDENT>else:<EOL><INDENT>otherwise.append(window)<EOL><DEDENT><DEDENT>res = []<EOL>otherwise.sort()<EOL>for window in none_left:<EOL><INDENT>if not res:<EOL><INDENT>res.append(window)<EOL>continue<EOL><DEDENT>res.extend(fix_overlap(res.pop(), window))<EOL><DEDENT>while otherwise:<EOL><INDENT>window = otherwise.pop(<NUM_LIT:0>)<EOL>if not res:<EOL><INDENT>res.append(window)<EOL>continue<EOL><DEDENT>res.extend(fix_overlap(res.pop(), window))<EOL><DEDENT>for window in none_right:<EOL><INDENT>if not res:<EOL><INDENT>res.append(window)<EOL>continue<EOL><DEDENT>res.extend(fix_overlap(res.pop(), window))<EOL><DEDENT>return res<EOL>
|
Given a list of (beginning, ending), return a minimal version that contains the same ranges.
:rtype: list
|
f9041:m0
|
def windows_intersection(windows):
|
def intersect2(left, right):<EOL><INDENT>if left == right:<EOL><INDENT>return left<EOL><DEDENT>elif left is (None, None):<EOL><INDENT>return right<EOL><DEDENT>elif right is (None, None):<EOL><INDENT>return left<EOL><DEDENT>elif left[<NUM_LIT:0>] is None:<EOL><INDENT>if right[<NUM_LIT:0>] is None:<EOL><INDENT>return None, min((left[<NUM_LIT:1>], right[<NUM_LIT:1>]))<EOL><DEDENT>elif right[<NUM_LIT:1>] is None:<EOL><INDENT>if left[<NUM_LIT:1>] <= right[<NUM_LIT:0>]:<EOL><INDENT>return left[<NUM_LIT:1>], right[<NUM_LIT:0>]<EOL><DEDENT>else:<EOL><INDENT>return None<EOL><DEDENT><DEDENT>elif right[<NUM_LIT:0>] <= left[<NUM_LIT:1>]:<EOL><INDENT>return right[<NUM_LIT:0>], left[<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>return None<EOL><DEDENT><DEDENT>elif left[<NUM_LIT:1>] is None:<EOL><INDENT>if right[<NUM_LIT:0>] is None:<EOL><INDENT>return left[<NUM_LIT:0>], right[<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>return right <EOL><DEDENT><DEDENT>elif right[<NUM_LIT:0>] is None:<EOL><INDENT>return left[<NUM_LIT:0>], min((left[<NUM_LIT:1>], right[<NUM_LIT:1>]))<EOL><DEDENT>elif right[<NUM_LIT:1>] is None:<EOL><INDENT>if left[<NUM_LIT:1>] >= right[<NUM_LIT:0>]:<EOL><INDENT>return right[<NUM_LIT:0>], left[<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>return None<EOL><DEDENT><DEDENT>assert None not in left and None not in right and left[<NUM_LIT:0>] < right[<NUM_LIT:1>]<EOL>if left[<NUM_LIT:1>] >= right[<NUM_LIT:0>]:<EOL><INDENT>if right[<NUM_LIT:1>] > left[<NUM_LIT:1>]:<EOL><INDENT>return right[<NUM_LIT:0>], left[<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>return right<EOL><DEDENT><DEDENT>return None<EOL><DEDENT>if len(windows) == <NUM_LIT:1>:<EOL><INDENT>return windows<EOL><DEDENT>left_none = []<EOL>right_none = []<EOL>otherwise = []<EOL>for window in windows:<EOL><INDENT>assert window is not None, None<EOL>if window[<NUM_LIT:0>] is None:<EOL><INDENT>left_none.append(window)<EOL><DEDENT>elif window[<NUM_LIT:1>] is None:<EOL><INDENT>right_none.append(window)<EOL><DEDENT>else:<EOL><INDENT>otherwise.append(window)<EOL><DEDENT><DEDENT>done = []<EOL>todo = left_none + sorted(otherwise)<EOL>for window in todo:<EOL><INDENT>if not done:<EOL><INDENT>done.append(window)<EOL>continue<EOL><DEDENT>res = intersect2(done.pop(), window)<EOL>if res:<EOL><INDENT>done.append(res)<EOL><DEDENT><DEDENT>return done<EOL>
|
Given a list of (beginning, ending), return another describing where they overlap.
:rtype: list
|
f9041:m1
|
def slow_iter_turns_eval_cmp(qry, oper, start_branch=None, engine=None):
|
def mungeside(side):<EOL><INDENT>if isinstance(side, Query):<EOL><INDENT>return side.iter_turns<EOL><DEDENT>elif isinstance(side, StatusAlias):<EOL><INDENT>return EntityStatAccessor(<EOL>side.entity, side.stat, side.engine,<EOL>side.branch, side.turn, side.tick, side.current, side.mungers<EOL>)<EOL><DEDENT>elif isinstance(side, EntityStatAccessor):<EOL><INDENT>return side<EOL><DEDENT>else:<EOL><INDENT>return lambda: side<EOL><DEDENT><DEDENT>leftside = mungeside(qry.leftside)<EOL>rightside = mungeside(qry.rightside)<EOL>engine = engine or leftside.engine or rightside.engine<EOL>for (branch, _, _) in engine._iter_parent_btt(start_branch or engine.branch):<EOL><INDENT>if branch is None:<EOL><INDENT>return<EOL><DEDENT>parent, turn_start, tick_start, turn_end, tick_end = engine._branches[branch]<EOL>for turn in range(turn_start, engine.turn + <NUM_LIT:1>):<EOL><INDENT>if oper(leftside(branch, turn), rightside(branch, turn)):<EOL><INDENT>yield branch, turn<EOL><DEDENT><DEDENT><DEDENT>
|
Iterate over all turns on which a comparison holds.
This is expensive. It evaluates the query for every turn in history.
|
f9041:m2
|
def initdb(self):
|
super().initdb()<EOL>for table in (<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>'<EOL>):<EOL><INDENT>self.init_table(table)<EOL><DEDENT>
|
Set up the database schema, both for allegedb and the special
extensions for LiSE
|
f9041:c10:m59
|
def install(<EOL>engine,<EOL>n_creatures=<NUM_LIT:5>,<EOL>n_sickles=<NUM_LIT:3>,<EOL>malaria_chance=<NUM_LIT>,<EOL>mate_chance=<NUM_LIT>,<EOL>mapsize=(<NUM_LIT:1>, <NUM_LIT:1>),<EOL>startpos=(<NUM_LIT:0>, <NUM_LIT:0>)<EOL>):
|
initmap = nx.grid_2d_graph(*mapsize)<EOL>phys = engine.new_character("<STR_LIT>", data=initmap)<EOL>species = engine.new_character(<EOL>"<STR_LIT>",<EOL>mate_chance=mate_chance,<EOL>malaria_chance=malaria_chance,<EOL>n_creatures=n_creatures,<EOL>)<EOL>for n in range(<NUM_LIT:0>, n_creatures):<EOL><INDENT>name = "<STR_LIT>" + str(n)<EOL>phys.add_thing(<EOL>name=name,<EOL>location=startpos,<EOL>sickle_a=(n < n_sickles),<EOL>sickle_b=False,<EOL>male=engine.coinflip(),<EOL>last_mate_turn=-<NUM_LIT:1><EOL>)<EOL>assert name in phys.thing<EOL>assert name not in phys.place<EOL>assert name in phys.node, "<STR_LIT>".format(name)<EOL>assert hasattr(phys.node[name], '<STR_LIT:location>')<EOL>species.add_avatar("<STR_LIT>", name)<EOL>assert hasattr(species.avatar['<STR_LIT>'][name], '<STR_LIT:location>')<EOL><DEDENT>@species.avatar.rule<EOL>def dieoff(critter):<EOL><INDENT>critter.delete()<EOL>assert (critter.name not in critter.character.node)<EOL>if critter['<STR_LIT>']:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT><DEDENT>@species.avatar.rule<EOL>def mate(critter):<EOL><INDENT>"""<STR_LIT>"""<EOL>suitors = list(<EOL>oc for oc in critter.location.contents()<EOL>if oc['<STR_LIT>'] != critter['<STR_LIT>']<EOL>)<EOL>assert (len(suitors) > <NUM_LIT:0>)<EOL>other_critter = critter.engine.choice(suitors)<EOL>sickles = [<EOL>critter['<STR_LIT>'],<EOL>critter['<STR_LIT>'],<EOL>other_critter['<STR_LIT>'],<EOL>other_critter['<STR_LIT>']<EOL>]<EOL>engine.shuffle(sickles)<EOL>name = "<STR_LIT>" + str(species.stat["<STR_LIT>"])<EOL>species.stat["<STR_LIT>"] += <NUM_LIT:1><EOL>engine.character["<STR_LIT>"].add_thing(<EOL>name,<EOL>critter["<STR_LIT:location>"],<EOL>sickle_a=sickles.pop(),<EOL>sickle_b=sickles.pop(),<EOL>male=engine.coinflip(),<EOL>last_mate_turn=engine.turn<EOL>)<EOL>species.add_avatar("<STR_LIT>", name)<EOL>critter['<STR_LIT>'] = other_critter['<STR_LIT>'] =engine.turn<EOL>return '<STR_LIT>'<EOL><DEDENT>@mate.prereq<EOL>def once_per_turn(critter):<EOL><INDENT>return critter['<STR_LIT>'] < critter.engine.turn<EOL><DEDENT>@mate.prereq<EOL>def mate_present(critter):<EOL><INDENT>for oc in critter.location.contents():<EOL><INDENT>if oc['<STR_LIT>'] != critter['<STR_LIT>']:<EOL><INDENT>return True<EOL><DEDENT><DEDENT>return False<EOL><DEDENT>@mate.trigger<EOL>def in_the_mood(critter):<EOL><INDENT>return critter.engine.random() < critter.user.stat['<STR_LIT>']<EOL><DEDENT>@dieoff.trigger<EOL>def sickle2(critter):<EOL><INDENT>r = critter['<STR_LIT>'] and critter['<STR_LIT>']<EOL>if r:<EOL><INDENT>critter['<STR_LIT>'] = False<EOL><DEDENT>return r<EOL><DEDENT>@dieoff.trigger<EOL>def malaria(critter):<EOL><INDENT>r = (<EOL>critter.engine.random() < critter.user.stat['<STR_LIT>'] and not<EOL>(critter['<STR_LIT>'] or critter['<STR_LIT>'])<EOL>)<EOL>if r:<EOL><INDENT>critter['<STR_LIT>'] = True<EOL><DEDENT>return r<EOL><DEDENT>@phys.thing.rule<EOL>def wander(critter):<EOL><INDENT>dests = list(critter.character.place.keys())<EOL>dests.remove(critter['<STR_LIT:location>'])<EOL>dest = critter.engine.choice(dests)<EOL>critter.travel_to(dest)<EOL><DEDENT>@wander.trigger<EOL>def not_travelling(critter):<EOL><INDENT>return critter.next_location is None<EOL><DEDENT>@wander.prereq<EOL>def big_map(critter):<EOL><INDENT>return len(critter.character.place) > <NUM_LIT:1><EOL><DEDENT>
|
Natural Selection on Sickle Cell Anemia
If anyone carries a pair of sickle betaglobin genes, they die of
sickle cell anemia.
Individuals with 1x betaglobin, 1x sickle betaglobin are immune to
malaria.
|
f9043:m0
|
def __getitem__(self, key):
|
try:<EOL><INDENT>return self._getitem_dispatch[key](self)<EOL><DEDENT>except KeyError:<EOL><INDENT>return super().__getitem__(key)<EOL><DEDENT>
|
Return one of my stats stored in the database, or a few
special cases:
``name``: return the name that uniquely identifies me within
my Character
``character``: return the name of my character
``location``: return the name of my location
|
f9049:c0:m6
|
def __setitem__(self, key, value):
|
try:<EOL><INDENT>self._setitem_dispatch[key](self, value)<EOL><DEDENT>except HistoryError as ex:<EOL><INDENT>raise ex<EOL><DEDENT>except KeyError:<EOL><INDENT>super().__setitem__(key, value)<EOL><DEDENT>
|
Set ``key``=``value`` for the present game-time.
|
f9049:c0:m7
|
def __delitem__(self, key):
|
if key in self.extrakeys:<EOL><INDENT>raise ValueError("<STR_LIT>".format(key))<EOL><DEDENT>super().__delitem__(key)<EOL>
|
As of now, this key isn't mine.
|
f9049:c0:m8
|
def clear(self):
|
for k in list(self.keys()):<EOL><INDENT>if k not in self.extrakeys:<EOL><INDENT>del self[k]<EOL><DEDENT><DEDENT>
|
Unset everything.
|
f9049:c0:m11
|
@property<EOL><INDENT>def location(self):<DEDENT>
|
return self.engine._get_node(self.character, self['<STR_LIT:location>'])<EOL>
|
The ``Thing`` or ``Place`` I'm in.
|
f9049:c0:m12
|
def go_to_place(self, place, weight='<STR_LIT>'):
|
if hasattr(place, '<STR_LIT:name>'):<EOL><INDENT>placen = place.name<EOL><DEDENT>else:<EOL><INDENT>placen = place<EOL><DEDENT>curloc = self["<STR_LIT:location>"]<EOL>orm = self.character.engine<EOL>turns = self.engine._portal_objs[<EOL>(self.character.name, curloc, place)].get(weight, <NUM_LIT:1>)<EOL>with self.engine.plan():<EOL><INDENT>orm.turn += turns<EOL>self['<STR_LIT:location>'] = placen<EOL><DEDENT>return turns<EOL>
|
Assuming I'm in a :class:`Place` that has a :class:`Portal` direct
to the given :class:`Place`, schedule myself to travel to the
given :class:`Place`, taking an amount of time indicated by
the ``weight`` stat on the :class:`Portal`, if given; else 1
turn.
Return the number of turns the travel will take.
|
f9049:c0:m15
|
def follow_path(self, path, weight=None):
|
if len(path) < <NUM_LIT:2>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>eng = self.character.engine<EOL>turn_now, tick_now = eng.time<EOL>with eng.plan():<EOL><INDENT>prevplace = path.pop(<NUM_LIT:0>)<EOL>if prevplace != self['<STR_LIT:location>']:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>subpath = [prevplace]<EOL>for place in path:<EOL><INDENT>if (<EOL>prevplace not in self.character.portal or<EOL>place not in self.character.portal[prevplace]<EOL>):<EOL><INDENT>raise TravelException(<EOL>"<STR_LIT>".format(<EOL>prevplace,<EOL>place<EOL>),<EOL>path=subpath,<EOL>traveller=self<EOL>)<EOL><DEDENT>subpath.append(place)<EOL>prevplace = place<EOL><DEDENT>turns_total = <NUM_LIT:0><EOL>prevsubplace = subpath.pop(<NUM_LIT:0>)<EOL>subsubpath = [prevsubplace]<EOL>for subplace in subpath:<EOL><INDENT>portal = self.character.portal[prevsubplace][subplace]<EOL>turn_inc = portal.get(weight, <NUM_LIT:1>)<EOL>eng.turn += turn_inc<EOL>self.location = subplace<EOL>turns_total += turn_inc<EOL>subsubpath.append(subplace)<EOL>prevsubplace = subplace<EOL><DEDENT>self.location = subplace<EOL>eng.time = turn_now, tick_now<EOL><DEDENT>return turns_total<EOL>
|
Go to several :class:`Place`s in succession, deciding how long to
spend in each by consulting the ``weight`` stat of the
:class:`Portal` connecting the one :class:`Place` to the next.
Return the total number of turns the travel will take. Raise
:class:`TravelException` if I can't follow the whole path,
either because some of its nodes don't exist, or because I'm
scheduled to be somewhere else.
|
f9049:c0:m16
|
def travel_to(self, dest, weight=None, graph=None):
|
destn = dest.name if hasattr(dest, '<STR_LIT:name>') else dest<EOL>if destn == self.location.name:<EOL><INDENT>raise ValueError("<STR_LIT>".format(destn))<EOL><DEDENT>graph = self.character if graph is None else graph<EOL>path = nx.shortest_path(graph, self["<STR_LIT:location>"], destn, weight)<EOL>return self.follow_path(path, weight)<EOL>
|
Find the shortest path to the given :class:`Place` from where I am
now, and follow it.
If supplied, the ``weight`` stat of the :class:`Portal`s along
the path will be used in pathfinding, and for deciding how
long to stay in each Place along the way.
The ``graph`` argument may be any NetworkX-style graph. It
will be used for pathfinding if supplied, otherwise I'll use
my :class:`Character`. In either case, however, I will attempt
to actually follow the path using my :class:`Character`, which
might not be possible if the supplied ``graph`` and my
:class:`Character` are too different. If it's not possible,
I'll raise a :class:`TravelException`, whose ``subpath``
attribute holds the part of the path that I *can* follow. To
make me follow it, pass it to my ``follow_path`` method.
Return value is the number of turns the travel will take.
|
f9049:c0:m17
|
def delete(self):
|
super().delete()<EOL>self.character.place.send(self.character.place, key=self.name, val=None)<EOL>
|
Remove myself from the world model immediately.
|
f9050:c0:m2
|
def __init__(self, node):
|
super().__init__(node.engine, node.rulebook)<EOL>self.node = node<EOL>
|
Initialize with node's engine, character, and rulebook.
|
f9051:c0:m0
|
def __init__(self, node):
|
self.node = node<EOL>
|
Store the node
|
f9051:c1:m0
|
@reify<EOL><INDENT>def portal(self):<DEDENT>
|
return Dests(self)<EOL>
|
Return a mapping of portals connecting this node to its neighbors.
|
f9051:c9:m4
|
def __init__(self, character, name):
|
super().__init__(character, name)<EOL>self.db = character.engine<EOL>
|
Store character and name, and initialize caches
|
f9051:c9:m7
|
def __contains__(self, k):
|
if k in self.extrakeys:<EOL><INDENT>return True<EOL><DEDENT>return super().__contains__(k)<EOL>
|
Handle extra keys, then delegate.
|
f9051:c9:m10
|
def portals(self):
|
yield from self.portal.values()<EOL>
|
Iterate over :class:`Portal` objects that lead away from me
|
f9051:c9:m13
|
def successors(self):
|
for port in self.portal.values():<EOL><INDENT>yield port.destination<EOL><DEDENT>
|
Iterate over nodes with edges leading from here to there.
|
f9051:c9:m14
|
def preportals(self):
|
yield from self.preportal.values()<EOL>
|
Iterate over :class:`Portal` objects that lead to me
|
f9051:c9:m15
|
def predecessors(self):
|
for port in self.preportal.values():<EOL><INDENT>yield port.origin<EOL><DEDENT>
|
Iterate over nodes with edges leading here from there.
|
f9051:c9:m16
|
def path_exists(self, dest, weight=None):
|
try:<EOL><INDENT>return bool(self.shortest_path_length(dest, weight))<EOL><DEDENT>except KeyError:<EOL><INDENT>return False<EOL><DEDENT>
|
Return whether there is a path leading from me to ``dest``.
With ``weight``, only consider edges that have a stat by the
given name.
Raise ``ValueError`` if ``dest`` is not a node in my character
or the name of one.
|
f9051:c9:m20
|
def delete(self):
|
if self.name in self.character.portal:<EOL><INDENT>del self.character.portal[self.name]<EOL><DEDENT>if self.name in self.character.preportal:<EOL><INDENT>del self.character.preportal[self.name]<EOL><DEDENT>for contained in list(self.contents()):<EOL><INDENT>contained.delete()<EOL><DEDENT>for user in list(self.users.values()):<EOL><INDENT>user.del_avatar(self.character.name, self.name)<EOL><DEDENT>branch, turn, tick = self.engine._nbtt()<EOL>self.engine._nodes_cache.store(<EOL>self.character.name, self.name,<EOL>branch, turn, tick, False<EOL>)<EOL>self.engine.query.exist_node(<EOL>self.character.name, self.name,<EOL>branch, turn, tick, False<EOL>)<EOL>self.character.node.send(self.character.node, key=self.name, val=None)<EOL>
|
Get rid of this, starting now.
Apart from deleting the node, this also informs all its users
that it doesn't exist and therefore can't be their avatar
anymore.
|
f9051:c9:m23
|
def one_way_portal(self, other, **stats):
|
return self.character.new_portal(<EOL>self, other, symmetrical=False, **stats<EOL>)<EOL>
|
Connect a portal from here to another node, and return it.
|
f9051:c9:m24
|
def one_way(self, other, **stats):
|
return self.one_way_portal(other, **stats)<EOL>
|
Connect a portal from here to another node, and return it.
|
f9051:c9:m25
|
def two_way_portal(self, other, **stats):
|
return self.character.new_portal(<EOL>self, other, symmetrical=True, **stats<EOL>)<EOL>
|
Connect these nodes with a two-way portal and return it.
|
f9051:c9:m26
|
def two_way(self, other, **stats):
|
return self.two_way_portal(other, **stats)<EOL>
|
Connect these nodes with a two-way portal and return it.
|
f9051:c9:m27
|
def new_thing(self, name, **stats):
|
return self.character.new_thing(<EOL>name, self.name, **stats<EOL>)<EOL>
|
Create a new thing, located here, and return it.
|
f9051:c9:m28
|
def historical(self, stat):
|
return StatusAlias(<EOL>entity=self,<EOL>stat=stat<EOL>)<EOL>
|
Return a reference to the values that a stat has had in the past.
You can use the reference in comparisons to make a history
query, and execute the query by calling it, or passing it to
``self.engine.ticks_when``.
|
f9051:c9:m29
|
def __init__(self, query, filename, lang='<STR_LIT>'):
|
super().__init__()<EOL>self.query = query<EOL>self._filename = filename<EOL>self._language = lang<EOL>try:<EOL><INDENT>with open(filename, '<STR_LIT:r>') as inf:<EOL><INDENT>self.cache = json.load(inf)<EOL><DEDENT><DEDENT>except FileNotFoundError:<EOL><INDENT>self.cache = {lang: {}}<EOL><DEDENT>
|
Store the engine, the name of the database table to use, and the
language code.
|
f9052:c3:m0
|
def __setitem__(self, k, v):
|
self.cache[self.language][k] = v<EOL>self.send(self, key=k, val=v)<EOL>
|
Set the value of a string for the current language.
|
f9052:c3:m4
|
def __delitem__(self, k):
|
del self.cache[self.language][k]<EOL>self.send(self, key=k, val=None)<EOL>
|
Delete the string from the current language, and remove it from the
cache.
|
f9052:c3:m5
|
def lang_items(self, lang=None):
|
if lang is None:<EOL><INDENT>lang = self.language<EOL><DEDENT>yield from self.cache.setdefault(lang, {}).items()<EOL>
|
Yield pairs of (id, string) for the given language.
|
f9052:c3:m6
|
def __init__(self, engine):
|
super().__init__()<EOL>self.engine = engine<EOL>
|
Store the engine and initialize my private dictionary of
listeners.
|
f9052:c6:m0
|
def __getitem__(self, k):
|
return self.engine._universal_cache.retrieve(k, *self.engine._btt())<EOL>
|
Get the current value of this key
|
f9052:c6:m3
|
def __setitem__(self, k, v):
|
branch, turn, tick = self.engine._nbtt()<EOL>self.engine._universal_cache.store(k, branch, turn, tick, v)<EOL>self.engine.query.universal_set(k, branch, turn, tick, v)<EOL>self.engine.tick = tick<EOL>self.send(self, key=k, val=v)<EOL>
|
Set k=v at the current branch and tick
|
f9052:c6:m4
|
def __delitem__(self, k):
|
branch, turn, tick = self.engine._nbtt()<EOL>self.engine._universal_cache.store(k, branch, turn, tick, None)<EOL>self.engine.query.universal_del(k, branch, turn, tick)<EOL>self.send(self, key=k, val=None)<EOL>
|
Unset this key for the present (branch, tick)
|
f9052:c6:m5
|
def __init__(self, engine):
|
super().__init__()<EOL>self.engine = engine<EOL>self._cache = None<EOL>
|
Store the engine, initialize caches
|
f9052:c7:m0
|
def __iter__(self):
|
return iter(self.engine._graph_objs)<EOL>
|
Iterate over every character name.
|
f9052:c7:m1
|
def __contains__(self, name):
|
if self.engine._graph_objs:<EOL><INDENT>self._cache = None<EOL>return name in self.engine._graph_objs<EOL><DEDENT>if self._cache is None:<EOL><INDENT>self._cache = [ch for ch, typ in self.engine.query.graphs_types() if typ == '<STR_LIT>']<EOL><DEDENT>return name in self._cache<EOL>
|
Has this character been created?
|
f9052:c7:m2
|
def __len__(self):
|
return len(self.engine._graph_objs)<EOL>
|
How many characters have been created?
|
f9052:c7:m3
|
def __getitem__(self, name):
|
from .character import Character<EOL>if name not in self:<EOL><INDENT>raise KeyError("<STR_LIT>")<EOL><DEDENT>cache = self.engine._graph_objs<EOL>if name not in cache:<EOL><INDENT>cache[name] = Character(self.engine, name)<EOL><DEDENT>ret = cache[name]<EOL>if not isinstance(ret, Character):<EOL><INDENT>raise TypeError(<EOL>"""<STR_LIT>"""<EOL>)<EOL><DEDENT>return ret<EOL>
|
Return the named character, if it's been created.
Try to use the cache if possible.
|
f9052:c7:m4
|
def __setitem__(self, name, value):
|
from .character import Character<EOL>if isinstance(value, Character):<EOL><INDENT>self.engine._graph_objs[name] = value<EOL>return<EOL><DEDENT>if name in self.engine._graph_objs:<EOL><INDENT>ch = self.engine._graph_objs[name]<EOL>ch.stat.clear()<EOL>ch.stat.update(value)<EOL><DEDENT>else:<EOL><INDENT>ch = self.engine._graph_objs[name] = Character(<EOL>self.engine, name, data=value<EOL>)<EOL><DEDENT>self.send(self, key=name, val=ch)<EOL>
|
Make a new character by the given name, and initialize its data to
the given value.
|
f9052:c7:m5
|
def __delitem__(self, name):
|
if name in self.engine._graph_objs:<EOL><INDENT>del self.engine._graph_objs[name]<EOL><DEDENT>self.engine.query.del_graph(name)<EOL>self.send(self, key=name, val=None)<EOL>
|
Delete the named character from both the cache and the database.
|
f9052:c7:m6
|
def __init__(self, d1, d2):
|
self.d1 = d1<EOL>self.d2 = d2<EOL>
|
Store dictionaries
|
f9052:c8:m0
|
def __iter__(self):
|
for k in self.d1:<EOL><INDENT>yield k<EOL><DEDENT>for k in self.d2:<EOL><INDENT>yield k<EOL><DEDENT>
|
Iterate over both dictionaries' keys
|
f9052:c8:m1
|
def __len__(self):
|
return len(self.d1) + len(self.d2)<EOL>
|
Sum the lengths of both dictionaries
|
f9052:c8:m2
|
def __getitem__(self, k):
|
try:<EOL><INDENT>return self.d1[k]<EOL><DEDENT>except KeyError:<EOL><INDENT>return self.d2[k]<EOL><DEDENT>
|
Get an item from ``d1`` if possible, then ``d2``
|
f9052:c8:m3
|
def ensure_session_key(request):
|
key = request.session.session_key<EOL>if key is None:<EOL><INDENT>request.session.save()<EOL>request.session.modified = True<EOL>key = request.session.session_key<EOL><DEDENT>return key<EOL>
|
Given a request return a session key that will be used. There may already
be a session key associated, but if there is not, we force the session to
create itself and persist between requests for the client behind the given
request.
|
f9071:m0
|
@hookimpl<EOL>def tox_configure(config):
|
return _ensure_envs_recreated_on_requirements_update(config)<EOL>
|
:param tox.config.Config config: Configuration object to observe.
:rtype: tox.config.Config
|
f9077:m0
|
def are_requirements_changed(config):
|
deps = (dep.name for dep in config.deps)<EOL>def build_fpath_for_previous_version(fname):<EOL><INDENT>tox_dir = config.config.toxworkdir.strpath<EOL>envdirkey = _str_to_sha1hex(str(config.envdir))<EOL>fname = '<STR_LIT>'.format(fname.replace('<STR_LIT:/>', '<STR_LIT:->'), envdirkey)<EOL>return os.path.join(tox_dir, fname)<EOL><DEDENT>requirement_files = map(parse_requirements_fname, deps)<EOL>return any([<EOL>is_changed(reqfile, build_fpath_for_previous_version(reqfile))<EOL>for reqfile in requirement_files<EOL>if reqfile and os.path.isfile(reqfile)])<EOL>
|
Check if any of the requirement files used by testenv is updated.
:param tox.config.TestenvConfig config: Configuration object to observe.
:rtype: bool
|
f9077:m2
|
def parse_pip_requirements(requirement_file_path):
|
return sorted(<EOL>str(r.req)<EOL>for r in parse_requirements(requirement_file_path,<EOL>session=PipSession())<EOL>if r.req<EOL>)<EOL>
|
Parses requirements using the pip API.
:param str requirement_file_path: path of the requirement file to parse.
:returns list: list of requirements
|
f9077:m3
|
def is_changed(fpath, prev_version_fpath):
|
if not (fpath and os.path.isfile(fpath)):<EOL><INDENT>raise ValueError("<STR_LIT>".format(fpath))<EOL><DEDENT>new_requirements = parse_pip_requirements(fpath)<EOL>new_requirements_hash = _str_to_sha1hex(str(new_requirements))<EOL>previous_requirements_hash = <NUM_LIT:0><EOL>if os.path.exists(prev_version_fpath):<EOL><INDENT>with open(prev_version_fpath) as fd:<EOL><INDENT>previous_requirements_hash = fd.read()<EOL><DEDENT><DEDENT>dirname = os.path.dirname(prev_version_fpath)<EOL>if not os.path.isdir(dirname):<EOL><INDENT>os.makedirs(dirname)<EOL><DEDENT>with open(prev_version_fpath, '<STR_LIT>') as fd:<EOL><INDENT>fd.write(new_requirements_hash)<EOL><DEDENT>return previous_requirements_hash != new_requirements_hash<EOL>
|
Check requirements file is updated relatively to prev. version of the file.
:param str fpath: Path to the requirements file.
:param str prev_version_fpath: Path to the prev. version requirements file.
:rtype: bool
:raise ValueError: Requirements file doesn't exist.
|
f9077:m4
|
def parse_requirements_fname(dep_name):
|
req_option = '<STR_LIT>'<EOL>if dep_name.startswith(req_option):<EOL><INDENT>return dep_name[len(req_option):]<EOL><DEDENT>
|
Parse requirements file path from dependency declaration (-r<filepath>).
>>> parse_requirements_fname('pep8')
>>> parse_requirements_fname('-rrequirements.txt')
'requirements.txt'
:param dep_name: Name of the dependency
:return: Requirements file path specified in the dependency declaration
if specified otherwise None
:rtype: str or None
|
f9077:m5
|
def _str_to_sha1hex(v):
|
return hashlib.sha1(v.encode('<STR_LIT:utf-8>')).hexdigest()<EOL>
|
Turn string into a SHA1 hex-digest.
>>> _str_to_sha1hex('abc')
'a9993e364706816aba3e25717850c26c9cd0d89d'
|
f9077:m6
|
def map_value(self, value, gid):
|
base_gid = self.base_gid_pattern.search(gid).group(<NUM_LIT:1>)<EOL>if self.anonymyze:<EOL><INDENT>try:<EOL><INDENT>if value in self._maps[base_gid]:<EOL><INDENT>return self._maps[base_gid][value]<EOL><DEDENT>else:<EOL><INDENT>k = (len(self._maps[base_gid]) + <NUM_LIT:1>) % self.mapmax<EOL>new_item = u'<STR_LIT>'.format(base_gid.upper(), k, self.mapexp)<EOL>self._maps[base_gid][value] = new_item<EOL>return new_item<EOL><DEDENT><DEDENT>except KeyError:<EOL><INDENT>return value<EOL><DEDENT><DEDENT>elif base_gid in ['<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT:user>'] and self.ip_lookup:<EOL><INDENT>ip_match = self.ip_pattern.search(value)<EOL>if ip_match is None:<EOL><INDENT>return value<EOL><DEDENT>host = self.gethost(ip_match.group(<NUM_LIT:1>))<EOL>if host == ip_match.group(<NUM_LIT:1>) or value.startswith(host):<EOL><INDENT>return value<EOL><DEDENT>return u'<STR_LIT>'.join([<EOL>value[:ip_match.start(<NUM_LIT:1>)],<EOL>self.gethost(ip_match.group(<NUM_LIT:1>)),<EOL>value[ip_match.end(<NUM_LIT:1>):]])<EOL><DEDENT>elif (base_gid == '<STR_LIT:user>' or base_gid == '<STR_LIT>') and self.uid_lookup:<EOL><INDENT>return self.getuname(value)<EOL><DEDENT>else:<EOL><INDENT>return value<EOL><DEDENT>
|
Return the value for a group id, applying requested mapping.
Map only groups related to a filter, ie when the basename of
the group is identical to the name of a filter.
|
f9079:c0:m4
|
def match_to_dict(self, match, gids):
|
values = {}<EOL>for gid in gids:<EOL><INDENT>try:<EOL><INDENT>values[gid] = self.map_value(match.group(gid), gid)<EOL><DEDENT>except IndexError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>return values<EOL>
|
Map values from match into a dictionary.
|
f9079:c0:m5
|
def match_to_string(self, match, gids, values=None):
|
s = match.string<EOL>parts = []<EOL>k = <NUM_LIT:0><EOL>for gid in sorted(gids, key=lambda x: gids[x]):<EOL><INDENT>if values is None:<EOL><INDENT>try:<EOL><INDENT>value = self.map_value(match.group(gid), gid)<EOL>parts.append(s[k:match.start(gid)])<EOL>parts.append(value)<EOL>k = match.end(gid)<EOL><DEDENT>except IndexError:<EOL><INDENT>continue<EOL><DEDENT><DEDENT>elif gid in values:<EOL><INDENT>parts.append(s[k:match.start(gid)])<EOL>parts.append(values[gid])<EOL>k = match.end(gid)<EOL><DEDENT><DEDENT>parts.append(s[k:])<EOL>return u"<STR_LIT>".join(parts)<EOL>
|
Return the mapped string from match object. If a dictionary of
values is provided then use it to build the string.
|
f9079:c0:m6
|
def gethost(self, ip_addr):
|
<EOL>try:<EOL><INDENT>if ip_addr[:<NUM_LIT:7>] == '<STR_LIT>':<EOL><INDENT>ip_addr = ip_addr[<NUM_LIT:7>:]<EOL><DEDENT><DEDENT>except TypeError:<EOL><INDENT>pass<EOL><DEDENT>if ip_addr[<NUM_LIT:0>] in string.letters:<EOL><INDENT>return ip_addr<EOL><DEDENT>try:<EOL><INDENT>return self.hostsmap[ip_addr]<EOL><DEDENT>except KeyError:<EOL><INDENT>pass<EOL><DEDENT>try:<EOL><INDENT>name = socket.gethostbyaddr(ip_addr)[<NUM_LIT:0>]<EOL><DEDENT>except socket.error:<EOL><INDENT>name = ip_addr<EOL><DEDENT>self.hostsmap[ip_addr] = name<EOL>return name<EOL>
|
Do reverse lookup on an ip address
|
f9079:c0:m7
|
def getuname(self, uid):
|
uid = int(uid)<EOL>try:<EOL><INDENT>return self.uidsmap[uid]<EOL><DEDENT>except KeyError:<EOL><INDENT>pass<EOL><DEDENT>try:<EOL><INDENT>name = pwd.getpwuid(uid)[<NUM_LIT:0>]<EOL><DEDENT>except (KeyError, AttributeError):<EOL><INDENT>name = "<STR_LIT>" % uid<EOL><DEDENT>self.uidsmap[uid] = name<EOL>return name<EOL>
|
Get the username of a given uid.
|
f9079:c0:m8
|
def iter_paths(self, pathnames=None, mapfunc=None):
|
pathnames = pathnames or self._pathnames<EOL>if self.recursive and not pathnames:<EOL><INDENT>pathnames = ['<STR_LIT:.>']<EOL><DEDENT>elif not pathnames:<EOL><INDENT>yield []<EOL><DEDENT>if mapfunc is not None:<EOL><INDENT>for mapped_paths in map(mapfunc, pathnames):<EOL><INDENT>for path in mapped_paths:<EOL><INDENT>if self.recursive and (os.path.isdir(path) or os.path.islink(path)):<EOL><INDENT>for t in os.walk(path, followlinks=self.follow_symlinks):<EOL><INDENT>for filename, values in self.iglob(os.path.join(t[<NUM_LIT:0>], '<STR_LIT:*>')):<EOL><INDENT>yield filename, values<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>empty_glob = True<EOL>for filename, values in self.iglob(path):<EOL><INDENT>yield filename, values<EOL>empty_glob = False<EOL><DEDENT>if empty_glob:<EOL><INDENT>yield path, None<EOL><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>for path in pathnames:<EOL><INDENT>if self.recursive and (os.path.isdir(path) or os.path.islink(path)):<EOL><INDENT>for t in os.walk(path, followlinks=self.follow_symlinks):<EOL><INDENT>for filename, values in self.iglob(os.path.join(t[<NUM_LIT:0>], '<STR_LIT:*>')):<EOL><INDENT>yield filename, values<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>empty_glob = True<EOL>for filename, values in self.iglob(path):<EOL><INDENT>yield filename, values<EOL>empty_glob = False<EOL><DEDENT>if empty_glob:<EOL><INDENT>yield path, None<EOL><DEDENT><DEDENT><DEDENT><DEDENT>
|
Special iteration on paths. Yields couples of path and items. If a expanded path
doesn't match with any files a couple with path and `None` is returned.
:param pathnames: Iterable with a set of pathnames. If is `None` uses the all \
the stored pathnames.
:param mapfunc: A mapping function for building the effective path from various \
wildcards (eg. time spec wildcards).
:return: Yields 2-tuples.
|
f9082:c0:m9
|
def __init__(self, time_period=None, recursive=False, follow_symlinks=False, include=None,<EOL>exclude=None, exclude_dir=None):
|
start_dt, end_dt = time_period or (None, None)<EOL>if start_dt is not None and end_dt is not None and start_dt > end_dt:<EOL><INDENT>ValueError("<STR_LIT>")<EOL><DEDENT>self._filemap = GlobDict(recursive=recursive, follow_symlinks=follow_symlinks, include=include,<EOL>exclude=exclude, exclude_dir=exclude_dir, dict_class=OrderedDict)<EOL>self.start_dt = start_dt<EOL>self.end_dt = end_dt<EOL>
|
:param time_period: Time period for filtering the iteration over files. \
When is `(None, None)` no filter is applied to selected files.
|
f9082:c1:m0
|
def __iter__(self):
|
if self.start_dt is None:<EOL><INDENT>for path, items in self._filemap.iter_paths():<EOL><INDENT>yield path, items<EOL><DEDENT><DEDENT>else:<EOL><INDENT>for path, items in self._filemap.iter_paths(mapfunc=strftimegen(self.start_dt, self.end_dt)):<EOL><INDENT>if items is None:<EOL><INDENT>yield path, items<EOL><DEDENT>elif self.check_stat(path):<EOL><INDENT>yield path, items<EOL><DEDENT><DEDENT><DEDENT>
|
Iterate into the file map, with filename glob expansion.
|
f9082:c1:m1
|
def check_stat(self, path):
|
statinfo = os.stat(path)<EOL>st_mtime = datetime.fromtimestamp(statinfo.st_mtime)<EOL>if platform.system() == '<STR_LIT>':<EOL><INDENT>check = st_mtime >= self.start_dt<EOL><DEDENT>else:<EOL><INDENT>st_ctime = datetime.fromtimestamp(statinfo.st_ctime)<EOL>check = st_mtime >= self.start_dt and st_ctime <= self.end_dt<EOL><DEDENT>if not check:<EOL><INDENT>logger.info("<STR_LIT>", path)<EOL><DEDENT>return check<EOL>
|
Checks logfile stat information for excluding files not in datetime period.
On Linux it's possible to checks only modification time, because file creation info
are not available, so it's possible to exclude only older files.
In Unix BSD systems and windows information about file creation date and times are available,
so is possible to exclude too newer files.
|
f9082:c1:m3
|
def add(self, files, items):
|
if isinstance(files, (str, bytes)):<EOL><INDENT>files = iter([files])<EOL><DEDENT>for pathname in files:<EOL><INDENT>try:<EOL><INDENT>values = self._filemap[pathname]<EOL><DEDENT>except KeyError:<EOL><INDENT>self._filemap[pathname] = items<EOL><DEDENT>else:<EOL><INDENT>values.extend(items)<EOL><DEDENT><DEDENT>
|
Add a list of files with a reference to a list of objects.
|
f9082:c1:m4
|
def __init__(self, pattern, app=None):
|
self.parser = re.compile(pattern)<EOL>self.app = app<EOL>self.LogData = namedtuple('<STR_LIT>', self.parser.groupindex.keys())<EOL>self.fields = tuple(self.parser.groupindex.keys())<EOL>for field in ('<STR_LIT>', '<STR_LIT>', '<STR_LIT>', '<STR_LIT:message>'):<EOL><INDENT>if field not in self.parser.groupindex:<EOL><INDENT>msg = '<STR_LIT>' % (self.__class__.__name__, field)<EOL>raise LogRaptorConfigError(msg)<EOL><DEDENT><DEDENT>
|
Compile the pattern and record group fields. Check if pattern
include mandatory named groups.
|
f9084:c0:m0
|
def parse_last_period(last):
|
wordmap = {<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>'<EOL>}<EOL>multmap = {<EOL>'<STR_LIT:h>': <NUM_LIT>,<EOL>'<STR_LIT:d>': <NUM_LIT>,<EOL>'<STR_LIT:w>': <NUM_LIT>,<EOL>'<STR_LIT:m>': <NUM_LIT><EOL>}<EOL>if last in wordmap:<EOL><INDENT>last = wordmap[last]<EOL><DEDENT>cat = last[-<NUM_LIT:1>:].lower()<EOL>if cat not in multmap:<EOL><INDENT>raise TypeError<EOL><DEDENT>try: <EOL><INDENT>num = int(last[:-<NUM_LIT:1>])<EOL>if num <= <NUM_LIT:0>:<EOL><INDENT>raise TypeError<EOL><DEDENT><DEDENT>except ValueError: <EOL><INDENT>raise TypeError<EOL><DEDENT>diff = num * multmap[cat] <EOL>return diff<EOL>
|
Parse the --last value and return the time difference in seconds.
|
f9085:m0
|
def get_datetime_interval(timestamp, diff, offset=<NUM_LIT:0>):
|
fin_datetime = datetime.datetime.fromtimestamp(timestamp + offset)<EOL>ini_datetime = datetime.datetime.fromtimestamp(timestamp - diff)<EOL>return ini_datetime, fin_datetime<EOL>
|
Returns datetime interval from timestamp backward in the past,
computed using the milliseconds difference passed as argument.
The final datetime is corrected with an optional offset.
|
f9085:m1
|
def parse_date_period(date):
|
import datetime<EOL>now = datetime.datetime.today()<EOL>date_len = len(date)<EOL>if date_len == <NUM_LIT:4>:<EOL><INDENT>date1 = str(now.year) + date<EOL>date2 = str(now.year) + date + "<STR_LIT>"<EOL><DEDENT>elif date_len == <NUM_LIT:8>:<EOL><INDENT>date1 = date<EOL>date2 = date + "<STR_LIT>"<EOL><DEDENT>elif date_len == <NUM_LIT:9>:<EOL><INDENT>if date[<NUM_LIT:4>] != '<STR_LIT:U+002C>':<EOL><INDENT>raise TypeError<EOL><DEDENT>date1 = str(now.year) + date[<NUM_LIT:0>:<NUM_LIT:4>]<EOL>date2 = str(now.year) + date[<NUM_LIT:5>:<NUM_LIT:9>] + "<STR_LIT>"<EOL><DEDENT>elif date_len == <NUM_LIT>:<EOL><INDENT>if date[<NUM_LIT:8>] != '<STR_LIT:U+002C>':<EOL><INDENT>raise TypeError<EOL><DEDENT>date1 = date[<NUM_LIT:0>:<NUM_LIT:8>]<EOL>date2 = date[<NUM_LIT:9>:<NUM_LIT>] + "<STR_LIT>"<EOL><DEDENT>else:<EOL><INDENT>raise TypeError<EOL><DEDENT>try:<EOL><INDENT>date1 = datetime.datetime.strptime(date1, "<STR_LIT>")<EOL><DEDENT>except ValueError:<EOL><INDENT>if date_len < <NUM_LIT:9>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>else:<EOL><INDENT>raise ValueError("<STR_LIT>"<EOL>"<STR_LIT>")<EOL><DEDENT><DEDENT>try:<EOL><INDENT>date2 = datetime.datetime.strptime(date2, "<STR_LIT>") <EOL><DEDENT>except ValueError:<EOL><INDENT>raise ValueError("<STR_LIT>"<EOL>"<STR_LIT>")<EOL><DEDENT>if date1 > date2:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>return date1, date2<EOL>
|
Parse the --date value and return a couple of datetime object.
The format is [YYYY]MMDD[,[YYYY]MMDD].
|
f9085:m2
|
def strftimegen(start_dt, end_dt):
|
if start_dt > end_dt:<EOL><INDENT>raise ValueError("<STR_LIT>" % (start_dt, end_dt))<EOL><DEDENT>def iterftime(string):<EOL><INDENT>date_subs = [i for i in DATE_FORMATS if i[<NUM_LIT:1>].search(string) is not None]<EOL>if not date_subs:<EOL><INDENT>yield string<EOL><DEDENT>else:<EOL><INDENT>dt = start_dt<EOL>date_path = string<EOL>while end_dt >= dt:<EOL><INDENT>for item in date_subs:<EOL><INDENT>date_path = item[<NUM_LIT:1>].sub(dt.strftime(item[<NUM_LIT:0>]), date_path)<EOL><DEDENT>yield date_path<EOL>dt = dt + datetime.timedelta(days=<NUM_LIT:1>)<EOL><DEDENT><DEDENT><DEDENT>return iterftime<EOL>
|
Return a generator function for datetime format strings.
The generator produce a day-by-day sequence starting from the first datetime
to the second datetime argument.
|
f9085:m3
|
def __init__(self, time_range):
|
try:<EOL><INDENT>start_time, end_time = time_range.split('<STR_LIT:U+002C>')<EOL><DEDENT>except ValueError:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>self.start_time = datetime.datetime.strptime(start_time.strip(), '<STR_LIT>').time()<EOL>self.end_time = datetime.datetime.strptime(end_time.strip(), '<STR_LIT>').time()<EOL>if self.start_time == self.end_time:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>self.h1 = self.start_time.hour<EOL>self.m1 = self.start_time.minute<EOL>self.h2 = self.end_time.hour<EOL>self.m2 = self.end_time.minute<EOL>
|
Constructor from timerange string.
The time range format is HH:MM,HH:MM.
|
f9085:c0:m0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.