id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
241,100
|
olsoneric/pedemath
|
pedemath/vec2.py
|
dot_v2
|
def dot_v2(vec1, vec2):
"""Return the dot product of two vectors"""
return vec1.x * vec2.x + vec1.y * vec2.y
|
python
|
def dot_v2(vec1, vec2):
"""Return the dot product of two vectors"""
return vec1.x * vec2.x + vec1.y * vec2.y
|
[
"def",
"dot_v2",
"(",
"vec1",
",",
"vec2",
")",
":",
"return",
"vec1",
".",
"x",
"*",
"vec2",
".",
"x",
"+",
"vec1",
".",
"y",
"*",
"vec2",
".",
"y"
] |
Return the dot product of two vectors
|
[
"Return",
"the",
"dot",
"product",
"of",
"two",
"vectors"
] |
4bffcfe7089e421d603eb0a9708b84789c2d16be
|
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec2.py#L74-L77
|
241,101
|
olsoneric/pedemath
|
pedemath/vec2.py
|
cross_v2
|
def cross_v2(vec1, vec2):
"""Return the crossproduct of the two vectors as a Vec2.
Cross product doesn't really make sense in 2D, but return the Z component
of the 3d result.
"""
return vec1.y * vec2.x - vec1.x * vec2.y
|
python
|
def cross_v2(vec1, vec2):
"""Return the crossproduct of the two vectors as a Vec2.
Cross product doesn't really make sense in 2D, but return the Z component
of the 3d result.
"""
return vec1.y * vec2.x - vec1.x * vec2.y
|
[
"def",
"cross_v2",
"(",
"vec1",
",",
"vec2",
")",
":",
"return",
"vec1",
".",
"y",
"*",
"vec2",
".",
"x",
"-",
"vec1",
".",
"x",
"*",
"vec2",
".",
"y"
] |
Return the crossproduct of the two vectors as a Vec2.
Cross product doesn't really make sense in 2D, but return the Z component
of the 3d result.
|
[
"Return",
"the",
"crossproduct",
"of",
"the",
"two",
"vectors",
"as",
"a",
"Vec2",
".",
"Cross",
"product",
"doesn",
"t",
"really",
"make",
"sense",
"in",
"2D",
"but",
"return",
"the",
"Z",
"component",
"of",
"the",
"3d",
"result",
"."
] |
4bffcfe7089e421d603eb0a9708b84789c2d16be
|
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec2.py#L80-L86
|
241,102
|
olsoneric/pedemath
|
pedemath/vec2.py
|
Vec2.truncate
|
def truncate(self, max_length):
"""Truncate this vector so it's length does not exceed max."""
if self.length() > max_length:
# If it's longer than the max_length, scale to the max_length.
self.scale(max_length / self.length())
|
python
|
def truncate(self, max_length):
"""Truncate this vector so it's length does not exceed max."""
if self.length() > max_length:
# If it's longer than the max_length, scale to the max_length.
self.scale(max_length / self.length())
|
[
"def",
"truncate",
"(",
"self",
",",
"max_length",
")",
":",
"if",
"self",
".",
"length",
"(",
")",
">",
"max_length",
":",
"# If it's longer than the max_length, scale to the max_length.",
"self",
".",
"scale",
"(",
"max_length",
"/",
"self",
".",
"length",
"(",
")",
")"
] |
Truncate this vector so it's length does not exceed max.
|
[
"Truncate",
"this",
"vector",
"so",
"it",
"s",
"length",
"does",
"not",
"exceed",
"max",
"."
] |
4bffcfe7089e421d603eb0a9708b84789c2d16be
|
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec2.py#L208-L214
|
241,103
|
olsoneric/pedemath
|
pedemath/vec2.py
|
Vec2.get_scaled_v2
|
def get_scaled_v2(self, amount):
"""Return a new Vec2 with x and y multiplied by amount."""
return Vec2(self.x * amount, self.y * amount)
|
python
|
def get_scaled_v2(self, amount):
"""Return a new Vec2 with x and y multiplied by amount."""
return Vec2(self.x * amount, self.y * amount)
|
[
"def",
"get_scaled_v2",
"(",
"self",
",",
"amount",
")",
":",
"return",
"Vec2",
"(",
"self",
".",
"x",
"*",
"amount",
",",
"self",
".",
"y",
"*",
"amount",
")"
] |
Return a new Vec2 with x and y multiplied by amount.
|
[
"Return",
"a",
"new",
"Vec2",
"with",
"x",
"and",
"y",
"multiplied",
"by",
"amount",
"."
] |
4bffcfe7089e421d603eb0a9708b84789c2d16be
|
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec2.py#L236-L239
|
241,104
|
olsoneric/pedemath
|
pedemath/vec2.py
|
Vec2.dot
|
def dot(self, vec):
"""Return the dot product of self and another Vec2."""
return self.x * vec.x + self.y * vec.y
|
python
|
def dot(self, vec):
"""Return the dot product of self and another Vec2."""
return self.x * vec.x + self.y * vec.y
|
[
"def",
"dot",
"(",
"self",
",",
"vec",
")",
":",
"return",
"self",
".",
"x",
"*",
"vec",
".",
"x",
"+",
"self",
".",
"y",
"*",
"vec",
".",
"y"
] |
Return the dot product of self and another Vec2.
|
[
"Return",
"the",
"dot",
"product",
"of",
"self",
"and",
"another",
"Vec2",
"."
] |
4bffcfe7089e421d603eb0a9708b84789c2d16be
|
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec2.py#L273-L276
|
241,105
|
olsoneric/pedemath
|
pedemath/vec2.py
|
Vec2.cross
|
def cross(self, vec):
"""Return the 2d cross product of self with another vector.
Cross product doesn't make sense in 2D, but return the Z component
of the 3d result.
"""
return self.x * vec.y - vec.x * self.y
|
python
|
def cross(self, vec):
"""Return the 2d cross product of self with another vector.
Cross product doesn't make sense in 2D, but return the Z component
of the 3d result.
"""
return self.x * vec.y - vec.x * self.y
|
[
"def",
"cross",
"(",
"self",
",",
"vec",
")",
":",
"return",
"self",
".",
"x",
"*",
"vec",
".",
"y",
"-",
"vec",
".",
"x",
"*",
"self",
".",
"y"
] |
Return the 2d cross product of self with another vector.
Cross product doesn't make sense in 2D, but return the Z component
of the 3d result.
|
[
"Return",
"the",
"2d",
"cross",
"product",
"of",
"self",
"with",
"another",
"vector",
".",
"Cross",
"product",
"doesn",
"t",
"make",
"sense",
"in",
"2D",
"but",
"return",
"the",
"Z",
"component",
"of",
"the",
"3d",
"result",
"."
] |
4bffcfe7089e421d603eb0a9708b84789c2d16be
|
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec2.py#L278-L284
|
241,106
|
klmitch/appathy
|
appathy/controller.py
|
Controller._get_action
|
def _get_action(self, action):
"""
Retrieve a descriptor for the named action. Caches
descriptors for efficiency.
"""
# If we don't have an action named that, bail out
if action not in self.wsgi_actions:
return None
# Generate an ActionDescriptor if necessary
if action not in self.wsgi_descriptors:
self.wsgi_descriptors[action] = actions.ActionDescriptor(
self.wsgi_actions[action],
self.wsgi_extensions.get(action, []),
self.wsgi_resp_type)
# OK, return the method descriptor
return self.wsgi_descriptors[action]
|
python
|
def _get_action(self, action):
"""
Retrieve a descriptor for the named action. Caches
descriptors for efficiency.
"""
# If we don't have an action named that, bail out
if action not in self.wsgi_actions:
return None
# Generate an ActionDescriptor if necessary
if action not in self.wsgi_descriptors:
self.wsgi_descriptors[action] = actions.ActionDescriptor(
self.wsgi_actions[action],
self.wsgi_extensions.get(action, []),
self.wsgi_resp_type)
# OK, return the method descriptor
return self.wsgi_descriptors[action]
|
[
"def",
"_get_action",
"(",
"self",
",",
"action",
")",
":",
"# If we don't have an action named that, bail out",
"if",
"action",
"not",
"in",
"self",
".",
"wsgi_actions",
":",
"return",
"None",
"# Generate an ActionDescriptor if necessary",
"if",
"action",
"not",
"in",
"self",
".",
"wsgi_descriptors",
":",
"self",
".",
"wsgi_descriptors",
"[",
"action",
"]",
"=",
"actions",
".",
"ActionDescriptor",
"(",
"self",
".",
"wsgi_actions",
"[",
"action",
"]",
",",
"self",
".",
"wsgi_extensions",
".",
"get",
"(",
"action",
",",
"[",
"]",
")",
",",
"self",
".",
"wsgi_resp_type",
")",
"# OK, return the method descriptor",
"return",
"self",
".",
"wsgi_descriptors",
"[",
"action",
"]"
] |
Retrieve a descriptor for the named action. Caches
descriptors for efficiency.
|
[
"Retrieve",
"a",
"descriptor",
"for",
"the",
"named",
"action",
".",
"Caches",
"descriptors",
"for",
"efficiency",
"."
] |
a10aa7d21d38622e984a8fe106ab37114af90dc2
|
https://github.com/klmitch/appathy/blob/a10aa7d21d38622e984a8fe106ab37114af90dc2/appathy/controller.py#L221-L239
|
241,107
|
klmitch/appathy
|
appathy/controller.py
|
Controller._route
|
def _route(self, action, method):
"""
Given an action method, generates a route for it.
"""
# First thing, determine the path for the method
path = method._wsgi_path
methods = None
if path is None:
map_rule = self.wsgi_method_map.get(method.__name__)
if map_rule is None:
# Can't connect this method
LOG.warning("No path specified for action method %s() of "
"resource %s" % (method.__name__, self.wsgi_name))
return
# Compute the path and the method list
path = utils.norm_path(map_rule[0] % self.wsgi_name)
methods = map_rule[1]
# Compute route name
name = '%s_%s' % (self.wsgi_name, action)
# Set up path
path = getattr(self, 'wsgi_path_prefix', '') + path
# Build up the conditions
conditions = {}
if hasattr(method, '_wsgi_methods'):
conditions['method'] = methods if methods else method._wsgi_methods
if hasattr(method, '_wsgi_condition'):
conditions['function'] = method._wsgi_condition
# Create the route
self.wsgi_mapper.connect(name, path,
controller=self,
action=action,
conditions=conditions,
**getattr(method, '_wsgi_keywords', {}))
|
python
|
def _route(self, action, method):
"""
Given an action method, generates a route for it.
"""
# First thing, determine the path for the method
path = method._wsgi_path
methods = None
if path is None:
map_rule = self.wsgi_method_map.get(method.__name__)
if map_rule is None:
# Can't connect this method
LOG.warning("No path specified for action method %s() of "
"resource %s" % (method.__name__, self.wsgi_name))
return
# Compute the path and the method list
path = utils.norm_path(map_rule[0] % self.wsgi_name)
methods = map_rule[1]
# Compute route name
name = '%s_%s' % (self.wsgi_name, action)
# Set up path
path = getattr(self, 'wsgi_path_prefix', '') + path
# Build up the conditions
conditions = {}
if hasattr(method, '_wsgi_methods'):
conditions['method'] = methods if methods else method._wsgi_methods
if hasattr(method, '_wsgi_condition'):
conditions['function'] = method._wsgi_condition
# Create the route
self.wsgi_mapper.connect(name, path,
controller=self,
action=action,
conditions=conditions,
**getattr(method, '_wsgi_keywords', {}))
|
[
"def",
"_route",
"(",
"self",
",",
"action",
",",
"method",
")",
":",
"# First thing, determine the path for the method",
"path",
"=",
"method",
".",
"_wsgi_path",
"methods",
"=",
"None",
"if",
"path",
"is",
"None",
":",
"map_rule",
"=",
"self",
".",
"wsgi_method_map",
".",
"get",
"(",
"method",
".",
"__name__",
")",
"if",
"map_rule",
"is",
"None",
":",
"# Can't connect this method",
"LOG",
".",
"warning",
"(",
"\"No path specified for action method %s() of \"",
"\"resource %s\"",
"%",
"(",
"method",
".",
"__name__",
",",
"self",
".",
"wsgi_name",
")",
")",
"return",
"# Compute the path and the method list",
"path",
"=",
"utils",
".",
"norm_path",
"(",
"map_rule",
"[",
"0",
"]",
"%",
"self",
".",
"wsgi_name",
")",
"methods",
"=",
"map_rule",
"[",
"1",
"]",
"# Compute route name",
"name",
"=",
"'%s_%s'",
"%",
"(",
"self",
".",
"wsgi_name",
",",
"action",
")",
"# Set up path",
"path",
"=",
"getattr",
"(",
"self",
",",
"'wsgi_path_prefix'",
",",
"''",
")",
"+",
"path",
"# Build up the conditions",
"conditions",
"=",
"{",
"}",
"if",
"hasattr",
"(",
"method",
",",
"'_wsgi_methods'",
")",
":",
"conditions",
"[",
"'method'",
"]",
"=",
"methods",
"if",
"methods",
"else",
"method",
".",
"_wsgi_methods",
"if",
"hasattr",
"(",
"method",
",",
"'_wsgi_condition'",
")",
":",
"conditions",
"[",
"'function'",
"]",
"=",
"method",
".",
"_wsgi_condition",
"# Create the route",
"self",
".",
"wsgi_mapper",
".",
"connect",
"(",
"name",
",",
"path",
",",
"controller",
"=",
"self",
",",
"action",
"=",
"action",
",",
"conditions",
"=",
"conditions",
",",
"*",
"*",
"getattr",
"(",
"method",
",",
"'_wsgi_keywords'",
",",
"{",
"}",
")",
")"
] |
Given an action method, generates a route for it.
|
[
"Given",
"an",
"action",
"method",
"generates",
"a",
"route",
"for",
"it",
"."
] |
a10aa7d21d38622e984a8fe106ab37114af90dc2
|
https://github.com/klmitch/appathy/blob/a10aa7d21d38622e984a8fe106ab37114af90dc2/appathy/controller.py#L241-L279
|
241,108
|
Deisss/python-sockjsroom
|
sockjsroom/socketHandler.py
|
SockJSDefaultHandler.on_message
|
def on_message(self, data):
""" Parsing data, and try to call responding message """
# Trying to parse response
data = json.loads(data)
if not data["name"] is None:
logging.debug("%s: receiving message %s" % (data["name"], data["data"]))
fct = getattr(self, "on_" + data["name"])
try:
res = fct(Struct(data["data"]))
except:
# We try without Struct item (on transaction request this can happend)
res = fct(data["data"])
if res is not None:
self.write_message(res)
else:
logging.error("SockJSDefaultHandler: data.name was null")
|
python
|
def on_message(self, data):
""" Parsing data, and try to call responding message """
# Trying to parse response
data = json.loads(data)
if not data["name"] is None:
logging.debug("%s: receiving message %s" % (data["name"], data["data"]))
fct = getattr(self, "on_" + data["name"])
try:
res = fct(Struct(data["data"]))
except:
# We try without Struct item (on transaction request this can happend)
res = fct(data["data"])
if res is not None:
self.write_message(res)
else:
logging.error("SockJSDefaultHandler: data.name was null")
|
[
"def",
"on_message",
"(",
"self",
",",
"data",
")",
":",
"# Trying to parse response",
"data",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"if",
"not",
"data",
"[",
"\"name\"",
"]",
"is",
"None",
":",
"logging",
".",
"debug",
"(",
"\"%s: receiving message %s\"",
"%",
"(",
"data",
"[",
"\"name\"",
"]",
",",
"data",
"[",
"\"data\"",
"]",
")",
")",
"fct",
"=",
"getattr",
"(",
"self",
",",
"\"on_\"",
"+",
"data",
"[",
"\"name\"",
"]",
")",
"try",
":",
"res",
"=",
"fct",
"(",
"Struct",
"(",
"data",
"[",
"\"data\"",
"]",
")",
")",
"except",
":",
"# We try without Struct item (on transaction request this can happend)",
"res",
"=",
"fct",
"(",
"data",
"[",
"\"data\"",
"]",
")",
"if",
"res",
"is",
"not",
"None",
":",
"self",
".",
"write_message",
"(",
"res",
")",
"else",
":",
"logging",
".",
"error",
"(",
"\"SockJSDefaultHandler: data.name was null\"",
")"
] |
Parsing data, and try to call responding message
|
[
"Parsing",
"data",
"and",
"try",
"to",
"call",
"responding",
"message"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/sockjsroom/socketHandler.py#L26-L41
|
241,109
|
Deisss/python-sockjsroom
|
sockjsroom/socketHandler.py
|
SockJSRoomHandler.join
|
def join(self, _id):
""" Join a room """
if not SockJSRoomHandler._room.has_key(self._gcls() + _id):
SockJSRoomHandler._room[self._gcls() + _id] = set()
SockJSRoomHandler._room[self._gcls() + _id].add(self)
|
python
|
def join(self, _id):
""" Join a room """
if not SockJSRoomHandler._room.has_key(self._gcls() + _id):
SockJSRoomHandler._room[self._gcls() + _id] = set()
SockJSRoomHandler._room[self._gcls() + _id].add(self)
|
[
"def",
"join",
"(",
"self",
",",
"_id",
")",
":",
"if",
"not",
"SockJSRoomHandler",
".",
"_room",
".",
"has_key",
"(",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
")",
":",
"SockJSRoomHandler",
".",
"_room",
"[",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
"]",
"=",
"set",
"(",
")",
"SockJSRoomHandler",
".",
"_room",
"[",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
"]",
".",
"add",
"(",
"self",
")"
] |
Join a room
|
[
"Join",
"a",
"room"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/sockjsroom/socketHandler.py#L61-L65
|
241,110
|
Deisss/python-sockjsroom
|
sockjsroom/socketHandler.py
|
SockJSRoomHandler.leave
|
def leave(self, _id):
""" Leave a room """
if SockJSRoomHandler._room.has_key(self._gcls() + _id):
SockJSRoomHandler._room[self._gcls() + _id].remove(self)
if len(SockJSRoomHandler._room[self._gcls() + _id]) == 0:
del SockJSRoomHandler._room[self._gcls() + _id]
|
python
|
def leave(self, _id):
""" Leave a room """
if SockJSRoomHandler._room.has_key(self._gcls() + _id):
SockJSRoomHandler._room[self._gcls() + _id].remove(self)
if len(SockJSRoomHandler._room[self._gcls() + _id]) == 0:
del SockJSRoomHandler._room[self._gcls() + _id]
|
[
"def",
"leave",
"(",
"self",
",",
"_id",
")",
":",
"if",
"SockJSRoomHandler",
".",
"_room",
".",
"has_key",
"(",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
")",
":",
"SockJSRoomHandler",
".",
"_room",
"[",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
"]",
".",
"remove",
"(",
"self",
")",
"if",
"len",
"(",
"SockJSRoomHandler",
".",
"_room",
"[",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
"]",
")",
"==",
"0",
":",
"del",
"SockJSRoomHandler",
".",
"_room",
"[",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
"]"
] |
Leave a room
|
[
"Leave",
"a",
"room"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/sockjsroom/socketHandler.py#L67-L72
|
241,111
|
Deisss/python-sockjsroom
|
sockjsroom/socketHandler.py
|
SockJSRoomHandler.getRoom
|
def getRoom(self, _id):
""" Retrieve a room from it's id """
if SockJSRoomHandler._room.has_key(self._gcls() + _id):
return SockJSRoomHandler._room[self._gcls() + _id]
return None
|
python
|
def getRoom(self, _id):
""" Retrieve a room from it's id """
if SockJSRoomHandler._room.has_key(self._gcls() + _id):
return SockJSRoomHandler._room[self._gcls() + _id]
return None
|
[
"def",
"getRoom",
"(",
"self",
",",
"_id",
")",
":",
"if",
"SockJSRoomHandler",
".",
"_room",
".",
"has_key",
"(",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
")",
":",
"return",
"SockJSRoomHandler",
".",
"_room",
"[",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
"]",
"return",
"None"
] |
Retrieve a room from it's id
|
[
"Retrieve",
"a",
"room",
"from",
"it",
"s",
"id"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/sockjsroom/socketHandler.py#L74-L78
|
241,112
|
Deisss/python-sockjsroom
|
sockjsroom/socketHandler.py
|
SockJSRoomHandler.publishToRoom
|
def publishToRoom(self, roomId, name, data, userList=None):
""" Publish to given room data submitted """
if userList is None:
userList = self.getRoom(roomId)
# Publish data to all room users
logging.debug("%s: broadcasting (name: %s, data: %s, number of users: %s)" % (self._gcls(), name, data, len(userList)))
self.broadcast(userList, {
"name": name,
"data": SockJSRoomHandler._parser.encode(data)
})
|
python
|
def publishToRoom(self, roomId, name, data, userList=None):
""" Publish to given room data submitted """
if userList is None:
userList = self.getRoom(roomId)
# Publish data to all room users
logging.debug("%s: broadcasting (name: %s, data: %s, number of users: %s)" % (self._gcls(), name, data, len(userList)))
self.broadcast(userList, {
"name": name,
"data": SockJSRoomHandler._parser.encode(data)
})
|
[
"def",
"publishToRoom",
"(",
"self",
",",
"roomId",
",",
"name",
",",
"data",
",",
"userList",
"=",
"None",
")",
":",
"if",
"userList",
"is",
"None",
":",
"userList",
"=",
"self",
".",
"getRoom",
"(",
"roomId",
")",
"# Publish data to all room users",
"logging",
".",
"debug",
"(",
"\"%s: broadcasting (name: %s, data: %s, number of users: %s)\"",
"%",
"(",
"self",
".",
"_gcls",
"(",
")",
",",
"name",
",",
"data",
",",
"len",
"(",
"userList",
")",
")",
")",
"self",
".",
"broadcast",
"(",
"userList",
",",
"{",
"\"name\"",
":",
"name",
",",
"\"data\"",
":",
"SockJSRoomHandler",
".",
"_parser",
".",
"encode",
"(",
"data",
")",
"}",
")"
] |
Publish to given room data submitted
|
[
"Publish",
"to",
"given",
"room",
"data",
"submitted"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/sockjsroom/socketHandler.py#L80-L90
|
241,113
|
Deisss/python-sockjsroom
|
sockjsroom/socketHandler.py
|
SockJSRoomHandler.publishToOther
|
def publishToOther(self, roomId, name, data):
""" Publish to only other people than myself """
tmpList = self.getRoom(roomId)
# Select everybody except me
userList = [x for x in tmpList if x is not self]
self.publishToRoom(roomId, name, data, userList)
|
python
|
def publishToOther(self, roomId, name, data):
""" Publish to only other people than myself """
tmpList = self.getRoom(roomId)
# Select everybody except me
userList = [x for x in tmpList if x is not self]
self.publishToRoom(roomId, name, data, userList)
|
[
"def",
"publishToOther",
"(",
"self",
",",
"roomId",
",",
"name",
",",
"data",
")",
":",
"tmpList",
"=",
"self",
".",
"getRoom",
"(",
"roomId",
")",
"# Select everybody except me",
"userList",
"=",
"[",
"x",
"for",
"x",
"in",
"tmpList",
"if",
"x",
"is",
"not",
"self",
"]",
"self",
".",
"publishToRoom",
"(",
"roomId",
",",
"name",
",",
"data",
",",
"userList",
")"
] |
Publish to only other people than myself
|
[
"Publish",
"to",
"only",
"other",
"people",
"than",
"myself"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/sockjsroom/socketHandler.py#L92-L97
|
241,114
|
Deisss/python-sockjsroom
|
sockjsroom/socketHandler.py
|
SockJSRoomHandler.publishToMyself
|
def publishToMyself(self, roomId, name, data):
""" Publish to only myself """
self.publishToRoom(roomId, name, data, [self])
|
python
|
def publishToMyself(self, roomId, name, data):
""" Publish to only myself """
self.publishToRoom(roomId, name, data, [self])
|
[
"def",
"publishToMyself",
"(",
"self",
",",
"roomId",
",",
"name",
",",
"data",
")",
":",
"self",
".",
"publishToRoom",
"(",
"roomId",
",",
"name",
",",
"data",
",",
"[",
"self",
"]",
")"
] |
Publish to only myself
|
[
"Publish",
"to",
"only",
"myself"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/sockjsroom/socketHandler.py#L99-L101
|
241,115
|
Deisss/python-sockjsroom
|
sockjsroom/socketHandler.py
|
SockJSRoomHandler.isInRoom
|
def isInRoom(self, _id):
""" Check a given user is in given room """
if SockJSRoomHandler._room.has_key(self._gcls() + _id):
if self in SockJSRoomHandler._room[self._gcls() + _id]:
return True
return False
|
python
|
def isInRoom(self, _id):
""" Check a given user is in given room """
if SockJSRoomHandler._room.has_key(self._gcls() + _id):
if self in SockJSRoomHandler._room[self._gcls() + _id]:
return True
return False
|
[
"def",
"isInRoom",
"(",
"self",
",",
"_id",
")",
":",
"if",
"SockJSRoomHandler",
".",
"_room",
".",
"has_key",
"(",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
")",
":",
"if",
"self",
"in",
"SockJSRoomHandler",
".",
"_room",
"[",
"self",
".",
"_gcls",
"(",
")",
"+",
"_id",
"]",
":",
"return",
"True",
"return",
"False"
] |
Check a given user is in given room
|
[
"Check",
"a",
"given",
"user",
"is",
"in",
"given",
"room"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/sockjsroom/socketHandler.py#L103-L108
|
241,116
|
dcrosta/sendlib
|
sendlib.py
|
Data.readline
|
def readline(self, size=None):
"""
Read a line from the stream, including the trailing
new line character. If `size` is set, don't read more
than `size` bytes, even if the result does not represent
a complete line.
The last line read may not include a trailing new line
character if one was not present in the underlying stream.
"""
if self._pos >= self.length:
return ''
if size:
amount = min(size, (self.length - self._pos))
else:
amount = self.length - self._pos
out = self.stream.readline(amount)
self._pos += len(out)
return out
|
python
|
def readline(self, size=None):
"""
Read a line from the stream, including the trailing
new line character. If `size` is set, don't read more
than `size` bytes, even if the result does not represent
a complete line.
The last line read may not include a trailing new line
character if one was not present in the underlying stream.
"""
if self._pos >= self.length:
return ''
if size:
amount = min(size, (self.length - self._pos))
else:
amount = self.length - self._pos
out = self.stream.readline(amount)
self._pos += len(out)
return out
|
[
"def",
"readline",
"(",
"self",
",",
"size",
"=",
"None",
")",
":",
"if",
"self",
".",
"_pos",
">=",
"self",
".",
"length",
":",
"return",
"''",
"if",
"size",
":",
"amount",
"=",
"min",
"(",
"size",
",",
"(",
"self",
".",
"length",
"-",
"self",
".",
"_pos",
")",
")",
"else",
":",
"amount",
"=",
"self",
".",
"length",
"-",
"self",
".",
"_pos",
"out",
"=",
"self",
".",
"stream",
".",
"readline",
"(",
"amount",
")",
"self",
".",
"_pos",
"+=",
"len",
"(",
"out",
")",
"return",
"out"
] |
Read a line from the stream, including the trailing
new line character. If `size` is set, don't read more
than `size` bytes, even if the result does not represent
a complete line.
The last line read may not include a trailing new line
character if one was not present in the underlying stream.
|
[
"Read",
"a",
"line",
"from",
"the",
"stream",
"including",
"the",
"trailing",
"new",
"line",
"character",
".",
"If",
"size",
"is",
"set",
"don",
"t",
"read",
"more",
"than",
"size",
"bytes",
"even",
"if",
"the",
"result",
"does",
"not",
"represent",
"a",
"complete",
"line",
"."
] |
51ea5412a70cf83a62d51d5c515c0eeac725aea0
|
https://github.com/dcrosta/sendlib/blob/51ea5412a70cf83a62d51d5c515c0eeac725aea0/sendlib.py#L338-L356
|
241,117
|
JNRowe/jnrbase
|
jnrbase/git.py
|
find_tag
|
def find_tag(__matcher: str = 'v[0-9]*', *, strict: bool = True,
git_dir: str = '.') -> str:
"""Find closest tag for a git repository.
Note:
This defaults to `Semantic Version`_ tag matching.
Args:
__matcher: Glob-style tag pattern to match
strict: Allow commit-ish, if no tag found
git_dir: Repository to search
Returns:
Matching tag name
.. _Semantic Version: http://semver.org/
"""
command = 'git describe --abbrev=12 --dirty'.split()
with chdir(git_dir):
try:
stdout = check_output(command + ['--match={}'.format(__matcher), ])
except CalledProcessError:
if strict:
raise
stdout = check_output(command + ['--always', ])
stdout = stdout.decode('ascii', 'replace')
return stdout.strip()
|
python
|
def find_tag(__matcher: str = 'v[0-9]*', *, strict: bool = True,
git_dir: str = '.') -> str:
"""Find closest tag for a git repository.
Note:
This defaults to `Semantic Version`_ tag matching.
Args:
__matcher: Glob-style tag pattern to match
strict: Allow commit-ish, if no tag found
git_dir: Repository to search
Returns:
Matching tag name
.. _Semantic Version: http://semver.org/
"""
command = 'git describe --abbrev=12 --dirty'.split()
with chdir(git_dir):
try:
stdout = check_output(command + ['--match={}'.format(__matcher), ])
except CalledProcessError:
if strict:
raise
stdout = check_output(command + ['--always', ])
stdout = stdout.decode('ascii', 'replace')
return stdout.strip()
|
[
"def",
"find_tag",
"(",
"__matcher",
":",
"str",
"=",
"'v[0-9]*'",
",",
"*",
",",
"strict",
":",
"bool",
"=",
"True",
",",
"git_dir",
":",
"str",
"=",
"'.'",
")",
"->",
"str",
":",
"command",
"=",
"'git describe --abbrev=12 --dirty'",
".",
"split",
"(",
")",
"with",
"chdir",
"(",
"git_dir",
")",
":",
"try",
":",
"stdout",
"=",
"check_output",
"(",
"command",
"+",
"[",
"'--match={}'",
".",
"format",
"(",
"__matcher",
")",
",",
"]",
")",
"except",
"CalledProcessError",
":",
"if",
"strict",
":",
"raise",
"stdout",
"=",
"check_output",
"(",
"command",
"+",
"[",
"'--always'",
",",
"]",
")",
"stdout",
"=",
"stdout",
".",
"decode",
"(",
"'ascii'",
",",
"'replace'",
")",
"return",
"stdout",
".",
"strip",
"(",
")"
] |
Find closest tag for a git repository.
Note:
This defaults to `Semantic Version`_ tag matching.
Args:
__matcher: Glob-style tag pattern to match
strict: Allow commit-ish, if no tag found
git_dir: Repository to search
Returns:
Matching tag name
.. _Semantic Version: http://semver.org/
|
[
"Find",
"closest",
"tag",
"for",
"a",
"git",
"repository",
"."
] |
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
|
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/git.py#L26-L52
|
241,118
|
minus7/asif
|
asif/bot.py
|
Channel.on_message
|
def on_message(self, *args, accept_query=False, matcher=None, **kwargs):
"""
Convenience wrapper of `Client.on_message` pre-bound with `channel=self.name`.
"""
if accept_query:
def new_matcher(msg: Message):
ret = True
if matcher:
ret = matcher(msg)
if ret is None or ret is False:
return ret
if msg.recipient is not self and not isinstance(msg.sender, User):
return False
return ret
else:
kwargs.setdefault("channel", self.name)
new_matcher = matcher
return self.client.on_message(*args, matcher=new_matcher, **kwargs)
|
python
|
def on_message(self, *args, accept_query=False, matcher=None, **kwargs):
"""
Convenience wrapper of `Client.on_message` pre-bound with `channel=self.name`.
"""
if accept_query:
def new_matcher(msg: Message):
ret = True
if matcher:
ret = matcher(msg)
if ret is None or ret is False:
return ret
if msg.recipient is not self and not isinstance(msg.sender, User):
return False
return ret
else:
kwargs.setdefault("channel", self.name)
new_matcher = matcher
return self.client.on_message(*args, matcher=new_matcher, **kwargs)
|
[
"def",
"on_message",
"(",
"self",
",",
"*",
"args",
",",
"accept_query",
"=",
"False",
",",
"matcher",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"accept_query",
":",
"def",
"new_matcher",
"(",
"msg",
":",
"Message",
")",
":",
"ret",
"=",
"True",
"if",
"matcher",
":",
"ret",
"=",
"matcher",
"(",
"msg",
")",
"if",
"ret",
"is",
"None",
"or",
"ret",
"is",
"False",
":",
"return",
"ret",
"if",
"msg",
".",
"recipient",
"is",
"not",
"self",
"and",
"not",
"isinstance",
"(",
"msg",
".",
"sender",
",",
"User",
")",
":",
"return",
"False",
"return",
"ret",
"else",
":",
"kwargs",
".",
"setdefault",
"(",
"\"channel\"",
",",
"self",
".",
"name",
")",
"new_matcher",
"=",
"matcher",
"return",
"self",
".",
"client",
".",
"on_message",
"(",
"*",
"args",
",",
"matcher",
"=",
"new_matcher",
",",
"*",
"*",
"kwargs",
")"
] |
Convenience wrapper of `Client.on_message` pre-bound with `channel=self.name`.
|
[
"Convenience",
"wrapper",
"of",
"Client",
".",
"on_message",
"pre",
"-",
"bound",
"with",
"channel",
"=",
"self",
".",
"name",
"."
] |
0d8acc5306ba93386ec679f69d466b56f099b877
|
https://github.com/minus7/asif/blob/0d8acc5306ba93386ec679f69d466b56f099b877/asif/bot.py#L56-L74
|
241,119
|
minus7/asif
|
asif/bot.py
|
Client.await_message
|
def await_message(self, *args, **kwargs) -> 'asyncio.Future[Message]':
"""
Block until a message matches. See `on_message`
"""
fut = asyncio.Future()
@self.on_message(*args, **kwargs)
async def handler(message):
fut.set_result(message)
# remove handler when done or cancelled
fut.add_done_callback(lambda _: self.remove_message_handler(handler))
return fut
|
python
|
def await_message(self, *args, **kwargs) -> 'asyncio.Future[Message]':
"""
Block until a message matches. See `on_message`
"""
fut = asyncio.Future()
@self.on_message(*args, **kwargs)
async def handler(message):
fut.set_result(message)
# remove handler when done or cancelled
fut.add_done_callback(lambda _: self.remove_message_handler(handler))
return fut
|
[
"def",
"await_message",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"->",
"'asyncio.Future[Message]'",
":",
"fut",
"=",
"asyncio",
".",
"Future",
"(",
")",
"@",
"self",
".",
"on_message",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"async",
"def",
"handler",
"(",
"message",
")",
":",
"fut",
".",
"set_result",
"(",
"message",
")",
"# remove handler when done or cancelled",
"fut",
".",
"add_done_callback",
"(",
"lambda",
"_",
":",
"self",
".",
"remove_message_handler",
"(",
"handler",
")",
")",
"return",
"fut"
] |
Block until a message matches. See `on_message`
|
[
"Block",
"until",
"a",
"message",
"matches",
".",
"See",
"on_message"
] |
0d8acc5306ba93386ec679f69d466b56f099b877
|
https://github.com/minus7/asif/blob/0d8acc5306ba93386ec679f69d466b56f099b877/asif/bot.py#L287-L297
|
241,120
|
minus7/asif
|
asif/bot.py
|
Client.await_command
|
def await_command(self, *args, **kwargs) -> 'asyncio.Future[IrcMessage]':
"""
Block until a command matches. See `on_command`
"""
fut = asyncio.Future()
@self.on_command(*args, **kwargs)
async def handler(msg):
fut.set_result(msg)
# remove handler when done or cancelled
fut.add_done_callback(lambda _: self.remove_command_handler(handler))
return fut
|
python
|
def await_command(self, *args, **kwargs) -> 'asyncio.Future[IrcMessage]':
"""
Block until a command matches. See `on_command`
"""
fut = asyncio.Future()
@self.on_command(*args, **kwargs)
async def handler(msg):
fut.set_result(msg)
# remove handler when done or cancelled
fut.add_done_callback(lambda _: self.remove_command_handler(handler))
return fut
|
[
"def",
"await_command",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"->",
"'asyncio.Future[IrcMessage]'",
":",
"fut",
"=",
"asyncio",
".",
"Future",
"(",
")",
"@",
"self",
".",
"on_command",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"async",
"def",
"handler",
"(",
"msg",
")",
":",
"fut",
".",
"set_result",
"(",
"msg",
")",
"# remove handler when done or cancelled",
"fut",
".",
"add_done_callback",
"(",
"lambda",
"_",
":",
"self",
".",
"remove_command_handler",
"(",
"handler",
")",
")",
"return",
"fut"
] |
Block until a command matches. See `on_command`
|
[
"Block",
"until",
"a",
"command",
"matches",
".",
"See",
"on_command"
] |
0d8acc5306ba93386ec679f69d466b56f099b877
|
https://github.com/minus7/asif/blob/0d8acc5306ba93386ec679f69d466b56f099b877/asif/bot.py#L348-L358
|
241,121
|
minus7/asif
|
asif/bot.py
|
Client.message
|
async def message(self, recipient: str, text: str, notice: bool=False) -> None:
"""
Lower level messaging function used by User and Channel
"""
await self._send(cc.PRIVMSG if not notice else cc.NOTICE, recipient, rest=text)
|
python
|
async def message(self, recipient: str, text: str, notice: bool=False) -> None:
"""
Lower level messaging function used by User and Channel
"""
await self._send(cc.PRIVMSG if not notice else cc.NOTICE, recipient, rest=text)
|
[
"async",
"def",
"message",
"(",
"self",
",",
"recipient",
":",
"str",
",",
"text",
":",
"str",
",",
"notice",
":",
"bool",
"=",
"False",
")",
"->",
"None",
":",
"await",
"self",
".",
"_send",
"(",
"cc",
".",
"PRIVMSG",
"if",
"not",
"notice",
"else",
"cc",
".",
"NOTICE",
",",
"recipient",
",",
"rest",
"=",
"text",
")"
] |
Lower level messaging function used by User and Channel
|
[
"Lower",
"level",
"messaging",
"function",
"used",
"by",
"User",
"and",
"Channel"
] |
0d8acc5306ba93386ec679f69d466b56f099b877
|
https://github.com/minus7/asif/blob/0d8acc5306ba93386ec679f69d466b56f099b877/asif/bot.py#L389-L393
|
241,122
|
minus7/asif
|
asif/bot.py
|
Client._bg
|
def _bg(self, coro: coroutine) -> asyncio.Task:
"""Run coro in background, log errors"""
async def runner():
try:
await coro
except:
self._log.exception("async: Coroutine raised exception")
return asyncio.ensure_future(runner())
|
python
|
def _bg(self, coro: coroutine) -> asyncio.Task:
"""Run coro in background, log errors"""
async def runner():
try:
await coro
except:
self._log.exception("async: Coroutine raised exception")
return asyncio.ensure_future(runner())
|
[
"def",
"_bg",
"(",
"self",
",",
"coro",
":",
"coroutine",
")",
"->",
"asyncio",
".",
"Task",
":",
"async",
"def",
"runner",
"(",
")",
":",
"try",
":",
"await",
"coro",
"except",
":",
"self",
".",
"_log",
".",
"exception",
"(",
"\"async: Coroutine raised exception\"",
")",
"return",
"asyncio",
".",
"ensure_future",
"(",
"runner",
"(",
")",
")"
] |
Run coro in background, log errors
|
[
"Run",
"coro",
"in",
"background",
"log",
"errors"
] |
0d8acc5306ba93386ec679f69d466b56f099b877
|
https://github.com/minus7/asif/blob/0d8acc5306ba93386ec679f69d466b56f099b877/asif/bot.py#L450-L457
|
241,123
|
minus7/asif
|
asif/bot.py
|
Module._populate
|
def _populate(self, client):
"""
Populate module with the client when available
"""
self.client = client
for fn in self._buffered_calls:
self._log.debug("Executing buffered call {}".format(fn))
fn()
|
python
|
def _populate(self, client):
"""
Populate module with the client when available
"""
self.client = client
for fn in self._buffered_calls:
self._log.debug("Executing buffered call {}".format(fn))
fn()
|
[
"def",
"_populate",
"(",
"self",
",",
"client",
")",
":",
"self",
".",
"client",
"=",
"client",
"for",
"fn",
"in",
"self",
".",
"_buffered_calls",
":",
"self",
".",
"_log",
".",
"debug",
"(",
"\"Executing buffered call {}\"",
".",
"format",
"(",
"fn",
")",
")",
"fn",
"(",
")"
] |
Populate module with the client when available
|
[
"Populate",
"module",
"with",
"the",
"client",
"when",
"available"
] |
0d8acc5306ba93386ec679f69d466b56f099b877
|
https://github.com/minus7/asif/blob/0d8acc5306ba93386ec679f69d466b56f099b877/asif/bot.py#L680-L687
|
241,124
|
b3j0f/schema
|
b3j0f/schema/utils.py
|
datatype2schemacls
|
def datatype2schemacls(
_datatype, _registry=None, _factory=None, _force=True,
_besteffort=True, **kwargs
):
"""Get a schema class which has been associated to input data type by the
registry or the factory in this order.
:param type datatype: data type from where get associated schema.
:param SchemaRegisgry _registry: registry from where call the getbydatatype
. Default is the global registry.
:param SchemaFactory _factory: factory from where call the getschemacls if
getbydatatype returns None. Default is the global factory.
:param bool _force: if true (default), force the building of schema class
if no schema is associated to input data type.
:param bool _besteffort: if True (default), try to resolve schema by
inheritance.
:param dict kwargs: factory builder kwargs.
:rtype: type
:return: Schema associated to input registry or factory. None if no
association found.
"""
result = None
gdbt = getbydatatype if _registry is None else _registry.getbydatatype
result = gdbt(_datatype, besteffort=_besteffort)
if result is None:
gscls = getschemacls if _factory is None else _factory.getschemacls
result = gscls(_datatype, besteffort=_besteffort)
if result is None and _force:
_build = build if _factory is None else _factory.build
result = _build(_resource=_datatype, **kwargs)
return result
|
python
|
def datatype2schemacls(
_datatype, _registry=None, _factory=None, _force=True,
_besteffort=True, **kwargs
):
"""Get a schema class which has been associated to input data type by the
registry or the factory in this order.
:param type datatype: data type from where get associated schema.
:param SchemaRegisgry _registry: registry from where call the getbydatatype
. Default is the global registry.
:param SchemaFactory _factory: factory from where call the getschemacls if
getbydatatype returns None. Default is the global factory.
:param bool _force: if true (default), force the building of schema class
if no schema is associated to input data type.
:param bool _besteffort: if True (default), try to resolve schema by
inheritance.
:param dict kwargs: factory builder kwargs.
:rtype: type
:return: Schema associated to input registry or factory. None if no
association found.
"""
result = None
gdbt = getbydatatype if _registry is None else _registry.getbydatatype
result = gdbt(_datatype, besteffort=_besteffort)
if result is None:
gscls = getschemacls if _factory is None else _factory.getschemacls
result = gscls(_datatype, besteffort=_besteffort)
if result is None and _force:
_build = build if _factory is None else _factory.build
result = _build(_resource=_datatype, **kwargs)
return result
|
[
"def",
"datatype2schemacls",
"(",
"_datatype",
",",
"_registry",
"=",
"None",
",",
"_factory",
"=",
"None",
",",
"_force",
"=",
"True",
",",
"_besteffort",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"result",
"=",
"None",
"gdbt",
"=",
"getbydatatype",
"if",
"_registry",
"is",
"None",
"else",
"_registry",
".",
"getbydatatype",
"result",
"=",
"gdbt",
"(",
"_datatype",
",",
"besteffort",
"=",
"_besteffort",
")",
"if",
"result",
"is",
"None",
":",
"gscls",
"=",
"getschemacls",
"if",
"_factory",
"is",
"None",
"else",
"_factory",
".",
"getschemacls",
"result",
"=",
"gscls",
"(",
"_datatype",
",",
"besteffort",
"=",
"_besteffort",
")",
"if",
"result",
"is",
"None",
"and",
"_force",
":",
"_build",
"=",
"build",
"if",
"_factory",
"is",
"None",
"else",
"_factory",
".",
"build",
"result",
"=",
"_build",
"(",
"_resource",
"=",
"_datatype",
",",
"*",
"*",
"kwargs",
")",
"return",
"result"
] |
Get a schema class which has been associated to input data type by the
registry or the factory in this order.
:param type datatype: data type from where get associated schema.
:param SchemaRegisgry _registry: registry from where call the getbydatatype
. Default is the global registry.
:param SchemaFactory _factory: factory from where call the getschemacls if
getbydatatype returns None. Default is the global factory.
:param bool _force: if true (default), force the building of schema class
if no schema is associated to input data type.
:param bool _besteffort: if True (default), try to resolve schema by
inheritance.
:param dict kwargs: factory builder kwargs.
:rtype: type
:return: Schema associated to input registry or factory. None if no
association found.
|
[
"Get",
"a",
"schema",
"class",
"which",
"has",
"been",
"associated",
"to",
"input",
"data",
"type",
"by",
"the",
"registry",
"or",
"the",
"factory",
"in",
"this",
"order",
"."
] |
1c88c23337f5fef50254e65bd407112c43396dd9
|
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/utils.py#L51-L87
|
241,125
|
b3j0f/schema
|
b3j0f/schema/utils.py
|
data2schema
|
def data2schema(
_data=None, _force=False, _besteffort=True, _registry=None,
_factory=None, _buildkwargs=None, **kwargs
):
"""Get the schema able to instanciate input data.
The default value of schema will be data.
Can be used such as a decorator:
..code-block:: python
@data2schema
def example(): pass # return a function schema
@data2schema(_registry=myregistry)
def example(): pass # return a function schema with specific registry
..warning::
return this function id _data is None.
:param _data: data possibly generated by a schema. Required but in case of
decorator.
:param bool _force: if True (False by default), create the data schema
on the fly if it does not exist.
:param bool _besteffort: if True (default), find a schema class able to
validate data class by inheritance.
:param SchemaRegistry _registry: default registry to use. Global by
default.
:param SchemaFactory factory: default factory to use. Global by default.
:param dict _buildkwargs: factory builder kwargs.
:param kwargs: schema class kwargs.
:return: Schema.
:rtype: Schema.
"""
if _data is None:
return lambda _data: data2schema(
_data, _force=False, _besteffort=True, _registry=None,
_factory=None, _buildkwargs=None, **kwargs
)
result = None
fdata = _data() if isinstance(_data, DynamicValue) else _data
datatype = type(fdata)
content = getattr(fdata, '__dict__', {})
if _buildkwargs:
content.udpate(_buildkwargs)
schemacls = datatype2schemacls(
_datatype=datatype, _registry=_registry, _factory=_factory,
_force=_force, _besteffort=_besteffort, **content
)
if schemacls is not None:
result = schemacls(default=_data, **kwargs)
for attrname in dir(_data):
if not hasattr(schemacls, attrname):
attr = getattr(_data, attrname)
if attr is not None:
setattr(result, attrname, attr)
if result is None and _data is None:
result = AnySchema()
return result
|
python
|
def data2schema(
_data=None, _force=False, _besteffort=True, _registry=None,
_factory=None, _buildkwargs=None, **kwargs
):
"""Get the schema able to instanciate input data.
The default value of schema will be data.
Can be used such as a decorator:
..code-block:: python
@data2schema
def example(): pass # return a function schema
@data2schema(_registry=myregistry)
def example(): pass # return a function schema with specific registry
..warning::
return this function id _data is None.
:param _data: data possibly generated by a schema. Required but in case of
decorator.
:param bool _force: if True (False by default), create the data schema
on the fly if it does not exist.
:param bool _besteffort: if True (default), find a schema class able to
validate data class by inheritance.
:param SchemaRegistry _registry: default registry to use. Global by
default.
:param SchemaFactory factory: default factory to use. Global by default.
:param dict _buildkwargs: factory builder kwargs.
:param kwargs: schema class kwargs.
:return: Schema.
:rtype: Schema.
"""
if _data is None:
return lambda _data: data2schema(
_data, _force=False, _besteffort=True, _registry=None,
_factory=None, _buildkwargs=None, **kwargs
)
result = None
fdata = _data() if isinstance(_data, DynamicValue) else _data
datatype = type(fdata)
content = getattr(fdata, '__dict__', {})
if _buildkwargs:
content.udpate(_buildkwargs)
schemacls = datatype2schemacls(
_datatype=datatype, _registry=_registry, _factory=_factory,
_force=_force, _besteffort=_besteffort, **content
)
if schemacls is not None:
result = schemacls(default=_data, **kwargs)
for attrname in dir(_data):
if not hasattr(schemacls, attrname):
attr = getattr(_data, attrname)
if attr is not None:
setattr(result, attrname, attr)
if result is None and _data is None:
result = AnySchema()
return result
|
[
"def",
"data2schema",
"(",
"_data",
"=",
"None",
",",
"_force",
"=",
"False",
",",
"_besteffort",
"=",
"True",
",",
"_registry",
"=",
"None",
",",
"_factory",
"=",
"None",
",",
"_buildkwargs",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"_data",
"is",
"None",
":",
"return",
"lambda",
"_data",
":",
"data2schema",
"(",
"_data",
",",
"_force",
"=",
"False",
",",
"_besteffort",
"=",
"True",
",",
"_registry",
"=",
"None",
",",
"_factory",
"=",
"None",
",",
"_buildkwargs",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
"result",
"=",
"None",
"fdata",
"=",
"_data",
"(",
")",
"if",
"isinstance",
"(",
"_data",
",",
"DynamicValue",
")",
"else",
"_data",
"datatype",
"=",
"type",
"(",
"fdata",
")",
"content",
"=",
"getattr",
"(",
"fdata",
",",
"'__dict__'",
",",
"{",
"}",
")",
"if",
"_buildkwargs",
":",
"content",
".",
"udpate",
"(",
"_buildkwargs",
")",
"schemacls",
"=",
"datatype2schemacls",
"(",
"_datatype",
"=",
"datatype",
",",
"_registry",
"=",
"_registry",
",",
"_factory",
"=",
"_factory",
",",
"_force",
"=",
"_force",
",",
"_besteffort",
"=",
"_besteffort",
",",
"*",
"*",
"content",
")",
"if",
"schemacls",
"is",
"not",
"None",
":",
"result",
"=",
"schemacls",
"(",
"default",
"=",
"_data",
",",
"*",
"*",
"kwargs",
")",
"for",
"attrname",
"in",
"dir",
"(",
"_data",
")",
":",
"if",
"not",
"hasattr",
"(",
"schemacls",
",",
"attrname",
")",
":",
"attr",
"=",
"getattr",
"(",
"_data",
",",
"attrname",
")",
"if",
"attr",
"is",
"not",
"None",
":",
"setattr",
"(",
"result",
",",
"attrname",
",",
"attr",
")",
"if",
"result",
"is",
"None",
"and",
"_data",
"is",
"None",
":",
"result",
"=",
"AnySchema",
"(",
")",
"return",
"result"
] |
Get the schema able to instanciate input data.
The default value of schema will be data.
Can be used such as a decorator:
..code-block:: python
@data2schema
def example(): pass # return a function schema
@data2schema(_registry=myregistry)
def example(): pass # return a function schema with specific registry
..warning::
return this function id _data is None.
:param _data: data possibly generated by a schema. Required but in case of
decorator.
:param bool _force: if True (False by default), create the data schema
on the fly if it does not exist.
:param bool _besteffort: if True (default), find a schema class able to
validate data class by inheritance.
:param SchemaRegistry _registry: default registry to use. Global by
default.
:param SchemaFactory factory: default factory to use. Global by default.
:param dict _buildkwargs: factory builder kwargs.
:param kwargs: schema class kwargs.
:return: Schema.
:rtype: Schema.
|
[
"Get",
"the",
"schema",
"able",
"to",
"instanciate",
"input",
"data",
"."
] |
1c88c23337f5fef50254e65bd407112c43396dd9
|
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/utils.py#L90-L160
|
241,126
|
b3j0f/schema
|
b3j0f/schema/utils.py
|
data2schemacls
|
def data2schemacls(_data, **kwargs):
"""Convert a data to a schema cls.
:param data: object or dictionary from where get a schema cls.
:return: schema class.
:rtype: type
"""
content = {}
for key in list(kwargs): # fill kwargs
kwargs[key] = data2schema(kwargs[key])
if isinstance(_data, dict):
datacontent = iteritems(_data)
else:
datacontent = getmembers(_data)
for name, value in datacontent:
if name[0] == '_':
continue
if isinstance(value, dict):
schema = data2schemacls(value)()
else:
schema = data2schema(value)
content[name] = schema
content.update(kwargs) # update content
result = type('GeneratedSchema', (Schema,), content)
return result
|
python
|
def data2schemacls(_data, **kwargs):
"""Convert a data to a schema cls.
:param data: object or dictionary from where get a schema cls.
:return: schema class.
:rtype: type
"""
content = {}
for key in list(kwargs): # fill kwargs
kwargs[key] = data2schema(kwargs[key])
if isinstance(_data, dict):
datacontent = iteritems(_data)
else:
datacontent = getmembers(_data)
for name, value in datacontent:
if name[0] == '_':
continue
if isinstance(value, dict):
schema = data2schemacls(value)()
else:
schema = data2schema(value)
content[name] = schema
content.update(kwargs) # update content
result = type('GeneratedSchema', (Schema,), content)
return result
|
[
"def",
"data2schemacls",
"(",
"_data",
",",
"*",
"*",
"kwargs",
")",
":",
"content",
"=",
"{",
"}",
"for",
"key",
"in",
"list",
"(",
"kwargs",
")",
":",
"# fill kwargs",
"kwargs",
"[",
"key",
"]",
"=",
"data2schema",
"(",
"kwargs",
"[",
"key",
"]",
")",
"if",
"isinstance",
"(",
"_data",
",",
"dict",
")",
":",
"datacontent",
"=",
"iteritems",
"(",
"_data",
")",
"else",
":",
"datacontent",
"=",
"getmembers",
"(",
"_data",
")",
"for",
"name",
",",
"value",
"in",
"datacontent",
":",
"if",
"name",
"[",
"0",
"]",
"==",
"'_'",
":",
"continue",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"schema",
"=",
"data2schemacls",
"(",
"value",
")",
"(",
")",
"else",
":",
"schema",
"=",
"data2schema",
"(",
"value",
")",
"content",
"[",
"name",
"]",
"=",
"schema",
"content",
".",
"update",
"(",
"kwargs",
")",
"# update content",
"result",
"=",
"type",
"(",
"'GeneratedSchema'",
",",
"(",
"Schema",
",",
")",
",",
"content",
")",
"return",
"result"
] |
Convert a data to a schema cls.
:param data: object or dictionary from where get a schema cls.
:return: schema class.
:rtype: type
|
[
"Convert",
"a",
"data",
"to",
"a",
"schema",
"cls",
"."
] |
1c88c23337f5fef50254e65bd407112c43396dd9
|
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/utils.py#L163-L198
|
241,127
|
b3j0f/schema
|
b3j0f/schema/utils.py
|
validate
|
def validate(schema, data, owner=None):
"""Validate input data with input schema.
:param Schema schema: schema able to validate input data.
:param data: data to validate.
:param Schema owner: input schema parent schema.
:raises: Exception if the data is not validated.
"""
schema._validate(data=data, owner=owner)
|
python
|
def validate(schema, data, owner=None):
"""Validate input data with input schema.
:param Schema schema: schema able to validate input data.
:param data: data to validate.
:param Schema owner: input schema parent schema.
:raises: Exception if the data is not validated.
"""
schema._validate(data=data, owner=owner)
|
[
"def",
"validate",
"(",
"schema",
",",
"data",
",",
"owner",
"=",
"None",
")",
":",
"schema",
".",
"_validate",
"(",
"data",
"=",
"data",
",",
"owner",
"=",
"owner",
")"
] |
Validate input data with input schema.
:param Schema schema: schema able to validate input data.
:param data: data to validate.
:param Schema owner: input schema parent schema.
:raises: Exception if the data is not validated.
|
[
"Validate",
"input",
"data",
"with",
"input",
"schema",
"."
] |
1c88c23337f5fef50254e65bd407112c43396dd9
|
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/utils.py#L235-L243
|
241,128
|
b3j0f/schema
|
b3j0f/schema/utils.py
|
dump
|
def dump(schema):
"""Get a serialized value of input schema.
:param Schema schema: schema to serialize.
:rtype: dict
"""
result = {}
for name, _ in iteritems(schema.getschemas()):
if hasattr(schema, name):
val = getattr(schema, name)
if isinstance(val, DynamicValue):
val = val()
if isinstance(val, Schema):
val = dump(val)
result[name] = val
return result
|
python
|
def dump(schema):
"""Get a serialized value of input schema.
:param Schema schema: schema to serialize.
:rtype: dict
"""
result = {}
for name, _ in iteritems(schema.getschemas()):
if hasattr(schema, name):
val = getattr(schema, name)
if isinstance(val, DynamicValue):
val = val()
if isinstance(val, Schema):
val = dump(val)
result[name] = val
return result
|
[
"def",
"dump",
"(",
"schema",
")",
":",
"result",
"=",
"{",
"}",
"for",
"name",
",",
"_",
"in",
"iteritems",
"(",
"schema",
".",
"getschemas",
"(",
")",
")",
":",
"if",
"hasattr",
"(",
"schema",
",",
"name",
")",
":",
"val",
"=",
"getattr",
"(",
"schema",
",",
"name",
")",
"if",
"isinstance",
"(",
"val",
",",
"DynamicValue",
")",
":",
"val",
"=",
"val",
"(",
")",
"if",
"isinstance",
"(",
"val",
",",
"Schema",
")",
":",
"val",
"=",
"dump",
"(",
"val",
")",
"result",
"[",
"name",
"]",
"=",
"val",
"return",
"result"
] |
Get a serialized value of input schema.
:param Schema schema: schema to serialize.
:rtype: dict
|
[
"Get",
"a",
"serialized",
"value",
"of",
"input",
"schema",
"."
] |
1c88c23337f5fef50254e65bd407112c43396dd9
|
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/utils.py#L246-L267
|
241,129
|
b3j0f/schema
|
b3j0f/schema/utils.py
|
updatecontent
|
def updatecontent(schemacls=None, updateparents=True, exclude=None):
"""Transform all schema class attributes to schemas.
It can be used such as a decorator in order to ensure to update attributes
with the decorated schema but take care to the limitation to use old style
method call for overidden methods.
.. example:
@updatecontent # update content at the end of its definition.
class Test(Schema):
this = ThisSchema() # instance of Test.
def __init__(self, *args, **kwargs):
Test.__init__(self, *args, **kwargs) # old style method call.
:param type schemacls: sub class of Schema.
:param bool updateparents: if True (default), update parent content.
:param list exclude: attribute names to exclude from updating.
:return: schemacls.
"""
if schemacls is None:
return lambda schemacls: updatecontent(
schemacls=schemacls, updateparents=updateparents, exclude=exclude
)
if updateparents and hasattr(schemacls, 'mro'):
schemaclasses = reversed(list(schemacls.mro()))
else:
schemaclasses = [schemacls]
for schemaclass in schemaclasses:
for name, member in iteritems(getattr(schemaclass, '__dict__', {})):
# transform only public members
if name[0] != '_' and (exclude is None or name not in exclude):
toset = False # flag for setting schemas
fmember = member
if isinstance(fmember, DynamicValue):
fmember = fmember()
toset = True
if isinstance(fmember, Schema):
schema = fmember
if not schema.name:
schema.name = name
else:
toset = True
data = member
if name == 'default':
if isinstance(fmember, ThisSchema):
data = schemaclass(*fmember.args, **fmember.kwargs)
schema = RefSchema(default=data, name=name)
elif isinstance(fmember, ThisSchema):
schema = schemaclass(
name=name, *fmember.args, **fmember.kwargs
)
elif member is None:
schema = AnySchema(name=name)
else:
schema = data2schema(_data=data, name=name)
if isinstance(schema, Schema) and toset:
try:
setattr(schemaclass, name, schema)
except (AttributeError, TypeError):
break
return schemacls
|
python
|
def updatecontent(schemacls=None, updateparents=True, exclude=None):
"""Transform all schema class attributes to schemas.
It can be used such as a decorator in order to ensure to update attributes
with the decorated schema but take care to the limitation to use old style
method call for overidden methods.
.. example:
@updatecontent # update content at the end of its definition.
class Test(Schema):
this = ThisSchema() # instance of Test.
def __init__(self, *args, **kwargs):
Test.__init__(self, *args, **kwargs) # old style method call.
:param type schemacls: sub class of Schema.
:param bool updateparents: if True (default), update parent content.
:param list exclude: attribute names to exclude from updating.
:return: schemacls.
"""
if schemacls is None:
return lambda schemacls: updatecontent(
schemacls=schemacls, updateparents=updateparents, exclude=exclude
)
if updateparents and hasattr(schemacls, 'mro'):
schemaclasses = reversed(list(schemacls.mro()))
else:
schemaclasses = [schemacls]
for schemaclass in schemaclasses:
for name, member in iteritems(getattr(schemaclass, '__dict__', {})):
# transform only public members
if name[0] != '_' and (exclude is None or name not in exclude):
toset = False # flag for setting schemas
fmember = member
if isinstance(fmember, DynamicValue):
fmember = fmember()
toset = True
if isinstance(fmember, Schema):
schema = fmember
if not schema.name:
schema.name = name
else:
toset = True
data = member
if name == 'default':
if isinstance(fmember, ThisSchema):
data = schemaclass(*fmember.args, **fmember.kwargs)
schema = RefSchema(default=data, name=name)
elif isinstance(fmember, ThisSchema):
schema = schemaclass(
name=name, *fmember.args, **fmember.kwargs
)
elif member is None:
schema = AnySchema(name=name)
else:
schema = data2schema(_data=data, name=name)
if isinstance(schema, Schema) and toset:
try:
setattr(schemaclass, name, schema)
except (AttributeError, TypeError):
break
return schemacls
|
[
"def",
"updatecontent",
"(",
"schemacls",
"=",
"None",
",",
"updateparents",
"=",
"True",
",",
"exclude",
"=",
"None",
")",
":",
"if",
"schemacls",
"is",
"None",
":",
"return",
"lambda",
"schemacls",
":",
"updatecontent",
"(",
"schemacls",
"=",
"schemacls",
",",
"updateparents",
"=",
"updateparents",
",",
"exclude",
"=",
"exclude",
")",
"if",
"updateparents",
"and",
"hasattr",
"(",
"schemacls",
",",
"'mro'",
")",
":",
"schemaclasses",
"=",
"reversed",
"(",
"list",
"(",
"schemacls",
".",
"mro",
"(",
")",
")",
")",
"else",
":",
"schemaclasses",
"=",
"[",
"schemacls",
"]",
"for",
"schemaclass",
"in",
"schemaclasses",
":",
"for",
"name",
",",
"member",
"in",
"iteritems",
"(",
"getattr",
"(",
"schemaclass",
",",
"'__dict__'",
",",
"{",
"}",
")",
")",
":",
"# transform only public members",
"if",
"name",
"[",
"0",
"]",
"!=",
"'_'",
"and",
"(",
"exclude",
"is",
"None",
"or",
"name",
"not",
"in",
"exclude",
")",
":",
"toset",
"=",
"False",
"# flag for setting schemas",
"fmember",
"=",
"member",
"if",
"isinstance",
"(",
"fmember",
",",
"DynamicValue",
")",
":",
"fmember",
"=",
"fmember",
"(",
")",
"toset",
"=",
"True",
"if",
"isinstance",
"(",
"fmember",
",",
"Schema",
")",
":",
"schema",
"=",
"fmember",
"if",
"not",
"schema",
".",
"name",
":",
"schema",
".",
"name",
"=",
"name",
"else",
":",
"toset",
"=",
"True",
"data",
"=",
"member",
"if",
"name",
"==",
"'default'",
":",
"if",
"isinstance",
"(",
"fmember",
",",
"ThisSchema",
")",
":",
"data",
"=",
"schemaclass",
"(",
"*",
"fmember",
".",
"args",
",",
"*",
"*",
"fmember",
".",
"kwargs",
")",
"schema",
"=",
"RefSchema",
"(",
"default",
"=",
"data",
",",
"name",
"=",
"name",
")",
"elif",
"isinstance",
"(",
"fmember",
",",
"ThisSchema",
")",
":",
"schema",
"=",
"schemaclass",
"(",
"name",
"=",
"name",
",",
"*",
"fmember",
".",
"args",
",",
"*",
"*",
"fmember",
".",
"kwargs",
")",
"elif",
"member",
"is",
"None",
":",
"schema",
"=",
"AnySchema",
"(",
"name",
"=",
"name",
")",
"else",
":",
"schema",
"=",
"data2schema",
"(",
"_data",
"=",
"data",
",",
"name",
"=",
"name",
")",
"if",
"isinstance",
"(",
"schema",
",",
"Schema",
")",
"and",
"toset",
":",
"try",
":",
"setattr",
"(",
"schemaclass",
",",
"name",
",",
"schema",
")",
"except",
"(",
"AttributeError",
",",
"TypeError",
")",
":",
"break",
"return",
"schemacls"
] |
Transform all schema class attributes to schemas.
It can be used such as a decorator in order to ensure to update attributes
with the decorated schema but take care to the limitation to use old style
method call for overidden methods.
.. example:
@updatecontent # update content at the end of its definition.
class Test(Schema):
this = ThisSchema() # instance of Test.
def __init__(self, *args, **kwargs):
Test.__init__(self, *args, **kwargs) # old style method call.
:param type schemacls: sub class of Schema.
:param bool updateparents: if True (default), update parent content.
:param list exclude: attribute names to exclude from updating.
:return: schemacls.
|
[
"Transform",
"all",
"schema",
"class",
"attributes",
"to",
"schemas",
"."
] |
1c88c23337f5fef50254e65bd407112c43396dd9
|
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/utils.py#L309-L390
|
241,130
|
ryanleland/Akispy
|
akispy/__init__.py
|
Connection.verify_key
|
def verify_key(self, url):
"""For verifying your API key.
Provide the URL of your site or blog you will be checking spam from.
"""
response = self._request('verify-key', {
'blog': url,
'key': self._key
})
if response.status is 200:
# Read response (trimmed of whitespace)
return response.read().strip() == "valid"
return False
|
python
|
def verify_key(self, url):
"""For verifying your API key.
Provide the URL of your site or blog you will be checking spam from.
"""
response = self._request('verify-key', {
'blog': url,
'key': self._key
})
if response.status is 200:
# Read response (trimmed of whitespace)
return response.read().strip() == "valid"
return False
|
[
"def",
"verify_key",
"(",
"self",
",",
"url",
")",
":",
"response",
"=",
"self",
".",
"_request",
"(",
"'verify-key'",
",",
"{",
"'blog'",
":",
"url",
",",
"'key'",
":",
"self",
".",
"_key",
"}",
")",
"if",
"response",
".",
"status",
"is",
"200",
":",
"# Read response (trimmed of whitespace)",
"return",
"response",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
"==",
"\"valid\"",
"return",
"False"
] |
For verifying your API key.
Provide the URL of your site or blog you will be checking spam from.
|
[
"For",
"verifying",
"your",
"API",
"key",
".",
"Provide",
"the",
"URL",
"of",
"your",
"site",
"or",
"blog",
"you",
"will",
"be",
"checking",
"spam",
"from",
"."
] |
dbbb85a1d1b027051e11179289cc9067cb90baf6
|
https://github.com/ryanleland/Akispy/blob/dbbb85a1d1b027051e11179289cc9067cb90baf6/akispy/__init__.py#L29-L44
|
241,131
|
ryanleland/Akispy
|
akispy/__init__.py
|
Connection.comment_check
|
def comment_check(self, params):
"""For checking comments."""
# Check required params for comment-check
for required in ['blog', 'user_ip', 'user_agent']:
if required not in params:
raise MissingParams(required)
response = self._request('comment-check', params)
if response.status is 200:
# Read response (trimmed of whitespace)
return response.read().strip() == "true"
return False
|
python
|
def comment_check(self, params):
"""For checking comments."""
# Check required params for comment-check
for required in ['blog', 'user_ip', 'user_agent']:
if required not in params:
raise MissingParams(required)
response = self._request('comment-check', params)
if response.status is 200:
# Read response (trimmed of whitespace)
return response.read().strip() == "true"
return False
|
[
"def",
"comment_check",
"(",
"self",
",",
"params",
")",
":",
"# Check required params for comment-check",
"for",
"required",
"in",
"[",
"'blog'",
",",
"'user_ip'",
",",
"'user_agent'",
"]",
":",
"if",
"required",
"not",
"in",
"params",
":",
"raise",
"MissingParams",
"(",
"required",
")",
"response",
"=",
"self",
".",
"_request",
"(",
"'comment-check'",
",",
"params",
")",
"if",
"response",
".",
"status",
"is",
"200",
":",
"# Read response (trimmed of whitespace)",
"return",
"response",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
"==",
"\"true\"",
"return",
"False"
] |
For checking comments.
|
[
"For",
"checking",
"comments",
"."
] |
dbbb85a1d1b027051e11179289cc9067cb90baf6
|
https://github.com/ryanleland/Akispy/blob/dbbb85a1d1b027051e11179289cc9067cb90baf6/akispy/__init__.py#L46-L60
|
241,132
|
ryanleland/Akispy
|
akispy/__init__.py
|
Connection.submit_ham
|
def submit_ham(self, params):
"""For submitting a ham comment to Akismet."""
# Check required params for submit-ham
for required in ['blog', 'user_ip', 'user_agent']:
if required not in params:
raise MissingParams(required)
response = self._request('submit-ham', params)
if response.status is 200:
return response.read() == "true"
return False
|
python
|
def submit_ham(self, params):
"""For submitting a ham comment to Akismet."""
# Check required params for submit-ham
for required in ['blog', 'user_ip', 'user_agent']:
if required not in params:
raise MissingParams(required)
response = self._request('submit-ham', params)
if response.status is 200:
return response.read() == "true"
return False
|
[
"def",
"submit_ham",
"(",
"self",
",",
"params",
")",
":",
"# Check required params for submit-ham",
"for",
"required",
"in",
"[",
"'blog'",
",",
"'user_ip'",
",",
"'user_agent'",
"]",
":",
"if",
"required",
"not",
"in",
"params",
":",
"raise",
"MissingParams",
"(",
"required",
")",
"response",
"=",
"self",
".",
"_request",
"(",
"'submit-ham'",
",",
"params",
")",
"if",
"response",
".",
"status",
"is",
"200",
":",
"return",
"response",
".",
"read",
"(",
")",
"==",
"\"true\"",
"return",
"False"
] |
For submitting a ham comment to Akismet.
|
[
"For",
"submitting",
"a",
"ham",
"comment",
"to",
"Akismet",
"."
] |
dbbb85a1d1b027051e11179289cc9067cb90baf6
|
https://github.com/ryanleland/Akispy/blob/dbbb85a1d1b027051e11179289cc9067cb90baf6/akispy/__init__.py#L77-L90
|
241,133
|
ryanleland/Akispy
|
akispy/__init__.py
|
Connection._request
|
def _request(self, function, params, method='POST', headers={}):
"""Builds a request object."""
if method is 'POST':
params = urllib.parse.urlencode(params)
headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain" }
path = '/%s/%s' % (self._version, function)
self._conn.request(method, path, params, headers)
return self._conn.getresponse()
|
python
|
def _request(self, function, params, method='POST', headers={}):
"""Builds a request object."""
if method is 'POST':
params = urllib.parse.urlencode(params)
headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain" }
path = '/%s/%s' % (self._version, function)
self._conn.request(method, path, params, headers)
return self._conn.getresponse()
|
[
"def",
"_request",
"(",
"self",
",",
"function",
",",
"params",
",",
"method",
"=",
"'POST'",
",",
"headers",
"=",
"{",
"}",
")",
":",
"if",
"method",
"is",
"'POST'",
":",
"params",
"=",
"urllib",
".",
"parse",
".",
"urlencode",
"(",
"params",
")",
"headers",
"=",
"{",
"\"Content-type\"",
":",
"\"application/x-www-form-urlencoded\"",
",",
"\"Accept\"",
":",
"\"text/plain\"",
"}",
"path",
"=",
"'/%s/%s'",
"%",
"(",
"self",
".",
"_version",
",",
"function",
")",
"self",
".",
"_conn",
".",
"request",
"(",
"method",
",",
"path",
",",
"params",
",",
"headers",
")",
"return",
"self",
".",
"_conn",
".",
"getresponse",
"(",
")"
] |
Builds a request object.
|
[
"Builds",
"a",
"request",
"object",
"."
] |
dbbb85a1d1b027051e11179289cc9067cb90baf6
|
https://github.com/ryanleland/Akispy/blob/dbbb85a1d1b027051e11179289cc9067cb90baf6/akispy/__init__.py#L92-L102
|
241,134
|
Apitax/Apitax
|
apitax/api/controllers/migrations/apitax_controller.py
|
refresh_token
|
def refresh_token(): # noqa: E501
"""Refreshes login token using refresh token
Refreshes login token using refresh token # noqa: E501
:rtype: UserAuth
"""
current_user = get_jwt_identity()
if not current_user:
return ErrorResponse(status=401, message="Not logged in")
access_token = create_access_token(identity=current_user)
return AuthResponse(status=201, message='Refreshed Access Token', access_token=access_token, auth=UserAuth())
|
python
|
def refresh_token(): # noqa: E501
"""Refreshes login token using refresh token
Refreshes login token using refresh token # noqa: E501
:rtype: UserAuth
"""
current_user = get_jwt_identity()
if not current_user:
return ErrorResponse(status=401, message="Not logged in")
access_token = create_access_token(identity=current_user)
return AuthResponse(status=201, message='Refreshed Access Token', access_token=access_token, auth=UserAuth())
|
[
"def",
"refresh_token",
"(",
")",
":",
"# noqa: E501",
"current_user",
"=",
"get_jwt_identity",
"(",
")",
"if",
"not",
"current_user",
":",
"return",
"ErrorResponse",
"(",
"status",
"=",
"401",
",",
"message",
"=",
"\"Not logged in\"",
")",
"access_token",
"=",
"create_access_token",
"(",
"identity",
"=",
"current_user",
")",
"return",
"AuthResponse",
"(",
"status",
"=",
"201",
",",
"message",
"=",
"'Refreshed Access Token'",
",",
"access_token",
"=",
"access_token",
",",
"auth",
"=",
"UserAuth",
"(",
")",
")"
] |
Refreshes login token using refresh token
Refreshes login token using refresh token # noqa: E501
:rtype: UserAuth
|
[
"Refreshes",
"login",
"token",
"using",
"refresh",
"token"
] |
3883e45f17e01eba4edac9d1bba42f0e7a748682
|
https://github.com/Apitax/Apitax/blob/3883e45f17e01eba4edac9d1bba42f0e7a748682/apitax/api/controllers/migrations/apitax_controller.py#L182-L194
|
241,135
|
b3j0f/schema
|
b3j0f/schema/lang/factory.py
|
build
|
def build(_resource, _cache=True, **kwargs):
"""Build a schema from input _resource.
:param _resource: object from where get the right schema.
:param bool _cache: use cache system.
:rtype: Schema.
"""
return _SCHEMAFACTORY.build(_resource=_resource, _cache=True, **kwargs)
|
python
|
def build(_resource, _cache=True, **kwargs):
"""Build a schema from input _resource.
:param _resource: object from where get the right schema.
:param bool _cache: use cache system.
:rtype: Schema.
"""
return _SCHEMAFACTORY.build(_resource=_resource, _cache=True, **kwargs)
|
[
"def",
"build",
"(",
"_resource",
",",
"_cache",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_SCHEMAFACTORY",
".",
"build",
"(",
"_resource",
"=",
"_resource",
",",
"_cache",
"=",
"True",
",",
"*",
"*",
"kwargs",
")"
] |
Build a schema from input _resource.
:param _resource: object from where get the right schema.
:param bool _cache: use cache system.
:rtype: Schema.
|
[
"Build",
"a",
"schema",
"from",
"input",
"_resource",
"."
] |
1c88c23337f5fef50254e65bd407112c43396dd9
|
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/lang/factory.py#L185-L192
|
241,136
|
b3j0f/schema
|
b3j0f/schema/lang/factory.py
|
SchemaFactory.registerbuilder
|
def registerbuilder(self, builder, name=None):
"""Register a schema builder with a key name.
Can be used such as a decorator where the builder can be the name for a
short use.
:param SchemaBuilder builder: schema builder.
:param str name: builder name. Default is builder name or generated.
"""
if name is None:
name = uuid()
self._builders[name] = builder
return builder
|
python
|
def registerbuilder(self, builder, name=None):
"""Register a schema builder with a key name.
Can be used such as a decorator where the builder can be the name for a
short use.
:param SchemaBuilder builder: schema builder.
:param str name: builder name. Default is builder name or generated.
"""
if name is None:
name = uuid()
self._builders[name] = builder
return builder
|
[
"def",
"registerbuilder",
"(",
"self",
",",
"builder",
",",
"name",
"=",
"None",
")",
":",
"if",
"name",
"is",
"None",
":",
"name",
"=",
"uuid",
"(",
")",
"self",
".",
"_builders",
"[",
"name",
"]",
"=",
"builder",
"return",
"builder"
] |
Register a schema builder with a key name.
Can be used such as a decorator where the builder can be the name for a
short use.
:param SchemaBuilder builder: schema builder.
:param str name: builder name. Default is builder name or generated.
|
[
"Register",
"a",
"schema",
"builder",
"with",
"a",
"key",
"name",
"."
] |
1c88c23337f5fef50254e65bd407112c43396dd9
|
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/lang/factory.py#L54-L68
|
241,137
|
b3j0f/schema
|
b3j0f/schema/lang/factory.py
|
SchemaFactory.build
|
def build(self, _resource, _cache=True, updatecontent=True, **kwargs):
"""Build a schema class from input _resource.
:param _resource: object from where get the right schema.
:param bool _cache: use _cache system.
:param bool updatecontent: if True (default) update result.
:rtype: Schema.
"""
result = None
if _cache and _resource in self._schemasbyresource:
result = self._schemasbyresource[_resource]
else:
for builder in self._builders.values():
try:
result = builder.build(_resource=_resource, **kwargs)
except Exception:
pass
else:
break
if result is None:
raise ValueError('No builder found for {0}'.format(_resource))
if _cache:
self._schemasbyresource[_resource] = result
if updatecontent:
from ..utils import updatecontent
updatecontent(result, updateparents=False)
return result
|
python
|
def build(self, _resource, _cache=True, updatecontent=True, **kwargs):
"""Build a schema class from input _resource.
:param _resource: object from where get the right schema.
:param bool _cache: use _cache system.
:param bool updatecontent: if True (default) update result.
:rtype: Schema.
"""
result = None
if _cache and _resource in self._schemasbyresource:
result = self._schemasbyresource[_resource]
else:
for builder in self._builders.values():
try:
result = builder.build(_resource=_resource, **kwargs)
except Exception:
pass
else:
break
if result is None:
raise ValueError('No builder found for {0}'.format(_resource))
if _cache:
self._schemasbyresource[_resource] = result
if updatecontent:
from ..utils import updatecontent
updatecontent(result, updateparents=False)
return result
|
[
"def",
"build",
"(",
"self",
",",
"_resource",
",",
"_cache",
"=",
"True",
",",
"updatecontent",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"result",
"=",
"None",
"if",
"_cache",
"and",
"_resource",
"in",
"self",
".",
"_schemasbyresource",
":",
"result",
"=",
"self",
".",
"_schemasbyresource",
"[",
"_resource",
"]",
"else",
":",
"for",
"builder",
"in",
"self",
".",
"_builders",
".",
"values",
"(",
")",
":",
"try",
":",
"result",
"=",
"builder",
".",
"build",
"(",
"_resource",
"=",
"_resource",
",",
"*",
"*",
"kwargs",
")",
"except",
"Exception",
":",
"pass",
"else",
":",
"break",
"if",
"result",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'No builder found for {0}'",
".",
"format",
"(",
"_resource",
")",
")",
"if",
"_cache",
":",
"self",
".",
"_schemasbyresource",
"[",
"_resource",
"]",
"=",
"result",
"if",
"updatecontent",
":",
"from",
".",
".",
"utils",
"import",
"updatecontent",
"updatecontent",
"(",
"result",
",",
"updateparents",
"=",
"False",
")",
"return",
"result"
] |
Build a schema class from input _resource.
:param _resource: object from where get the right schema.
:param bool _cache: use _cache system.
:param bool updatecontent: if True (default) update result.
:rtype: Schema.
|
[
"Build",
"a",
"schema",
"class",
"from",
"input",
"_resource",
"."
] |
1c88c23337f5fef50254e65bd407112c43396dd9
|
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/lang/factory.py#L93-L127
|
241,138
|
formwork-io/lazarus
|
lazarus/__init__.py
|
stop
|
def stop():
'''Stops lazarus, regardless of which mode it was started in.
For example:
>>> import lazarus
>>> lazarus.default()
>>> lazarus.stop()
'''
global _active
if not _active:
msg = 'lazarus is not active'
raise RuntimeWarning(msg)
_observer.stop()
_observer.join()
_deactivate()
|
python
|
def stop():
'''Stops lazarus, regardless of which mode it was started in.
For example:
>>> import lazarus
>>> lazarus.default()
>>> lazarus.stop()
'''
global _active
if not _active:
msg = 'lazarus is not active'
raise RuntimeWarning(msg)
_observer.stop()
_observer.join()
_deactivate()
|
[
"def",
"stop",
"(",
")",
":",
"global",
"_active",
"if",
"not",
"_active",
":",
"msg",
"=",
"'lazarus is not active'",
"raise",
"RuntimeWarning",
"(",
"msg",
")",
"_observer",
".",
"stop",
"(",
")",
"_observer",
".",
"join",
"(",
")",
"_deactivate",
"(",
")"
] |
Stops lazarus, regardless of which mode it was started in.
For example:
>>> import lazarus
>>> lazarus.default()
>>> lazarus.stop()
|
[
"Stops",
"lazarus",
"regardless",
"of",
"which",
"mode",
"it",
"was",
"started",
"in",
"."
] |
b2b6120fe06d69c23b4f41d55b6d71860a9fdeaa
|
https://github.com/formwork-io/lazarus/blob/b2b6120fe06d69c23b4f41d55b6d71860a9fdeaa/lazarus/__init__.py#L32-L47
|
241,139
|
formwork-io/lazarus
|
lazarus/__init__.py
|
_restart
|
def _restart():
'''Schedule the restart; returning True if cancelled, False otherwise.'''
if _restart_cb:
# https://github.com/formwork-io/lazarus/issues/2
if _restart_cb() is not None:
# restart cancelled
return True
def down_watchdog():
_observer.stop()
_observer.join()
if _close_fds:
# close all fds...
_util.close_fds()
# declare a mulligan ;)
if _restart_func:
_restart_func()
_deactivate()
else:
_util.do_over()
_util.defer(down_watchdog)
return False
|
python
|
def _restart():
'''Schedule the restart; returning True if cancelled, False otherwise.'''
if _restart_cb:
# https://github.com/formwork-io/lazarus/issues/2
if _restart_cb() is not None:
# restart cancelled
return True
def down_watchdog():
_observer.stop()
_observer.join()
if _close_fds:
# close all fds...
_util.close_fds()
# declare a mulligan ;)
if _restart_func:
_restart_func()
_deactivate()
else:
_util.do_over()
_util.defer(down_watchdog)
return False
|
[
"def",
"_restart",
"(",
")",
":",
"if",
"_restart_cb",
":",
"# https://github.com/formwork-io/lazarus/issues/2",
"if",
"_restart_cb",
"(",
")",
"is",
"not",
"None",
":",
"# restart cancelled",
"return",
"True",
"def",
"down_watchdog",
"(",
")",
":",
"_observer",
".",
"stop",
"(",
")",
"_observer",
".",
"join",
"(",
")",
"if",
"_close_fds",
":",
"# close all fds...",
"_util",
".",
"close_fds",
"(",
")",
"# declare a mulligan ;)",
"if",
"_restart_func",
":",
"_restart_func",
"(",
")",
"_deactivate",
"(",
")",
"else",
":",
"_util",
".",
"do_over",
"(",
")",
"_util",
".",
"defer",
"(",
"down_watchdog",
")",
"return",
"False"
] |
Schedule the restart; returning True if cancelled, False otherwise.
|
[
"Schedule",
"the",
"restart",
";",
"returning",
"True",
"if",
"cancelled",
"False",
"otherwise",
"."
] |
b2b6120fe06d69c23b4f41d55b6d71860a9fdeaa
|
https://github.com/formwork-io/lazarus/blob/b2b6120fe06d69c23b4f41d55b6d71860a9fdeaa/lazarus/__init__.py#L66-L90
|
241,140
|
formwork-io/lazarus
|
lazarus/__init__.py
|
default
|
def default(restart_cb=None, restart_func=None, close_fds=True):
'''Sets up lazarus in default mode.
See the :py:func:`custom` function for a more powerful mode of use.
The default mode of lazarus is to watch all modules rooted at
``PYTHONPATH`` for changes and restart when they take place.
Keyword arguments:
restart_cb -- Callback invoked prior to restarting the process; allows
for any cleanup to occur prior to restarting. Returning anything other
than *None* in the callback will cancel the restart.
restart_func -- Function invoked to restart the process. This supplants
the default behavior of using *sys.executable* and *sys.argv*.
close_fds -- Whether all file descriptors other than *stdin*, *stdout*,
and *stderr* should be closed
A simple example:
>>> import lazarus
>>> lazarus.default()
>>> lazarus.stop()
'''
if _active:
msg = 'lazarus is already active'
raise RuntimeWarning(msg)
_python_path = os.getenv('PYTHONPATH')
if not _python_path:
msg = 'PYTHONPATH is not set'
raise RuntimeError(msg)
if restart_cb and not callable(restart_cb):
msg = 'restart_cb keyword argument is not callable'
raise TypeError(msg)
if restart_func and not callable(restart_func):
msg = 'restart_func keyword argument is not callable'
raise TypeError(msg)
global _close_fds
_close_fds = close_fds
try:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
except ImportError as ie:
msg = 'no watchdog support (%s)' % str(ie)
raise RuntimeError(msg)
class _Handler(FileSystemEventHandler):
def __init__(self):
self.active = True
def dispatch(self, event):
if not self.active:
return
super(_Handler, self).dispatch(event)
def all_events(self, event):
if is_restart_event(event):
cancelled = _restart()
if not cancelled:
self.active = False
def on_created(self, event):
self.all_events(event)
def on_deleted(self, event):
self.all_events(event)
def on_modified(self, event):
self.all_events(event)
def on_moved(self, event):
self.all_events(event)
global _observer
_observer = Observer()
handler = _Handler()
_observer.schedule(handler, _python_path, recursive=True)
global _restart_cb
_restart_cb = restart_cb
global _restart_func
_restart_func = restart_func
_activate()
_observer.start()
|
python
|
def default(restart_cb=None, restart_func=None, close_fds=True):
'''Sets up lazarus in default mode.
See the :py:func:`custom` function for a more powerful mode of use.
The default mode of lazarus is to watch all modules rooted at
``PYTHONPATH`` for changes and restart when they take place.
Keyword arguments:
restart_cb -- Callback invoked prior to restarting the process; allows
for any cleanup to occur prior to restarting. Returning anything other
than *None* in the callback will cancel the restart.
restart_func -- Function invoked to restart the process. This supplants
the default behavior of using *sys.executable* and *sys.argv*.
close_fds -- Whether all file descriptors other than *stdin*, *stdout*,
and *stderr* should be closed
A simple example:
>>> import lazarus
>>> lazarus.default()
>>> lazarus.stop()
'''
if _active:
msg = 'lazarus is already active'
raise RuntimeWarning(msg)
_python_path = os.getenv('PYTHONPATH')
if not _python_path:
msg = 'PYTHONPATH is not set'
raise RuntimeError(msg)
if restart_cb and not callable(restart_cb):
msg = 'restart_cb keyword argument is not callable'
raise TypeError(msg)
if restart_func and not callable(restart_func):
msg = 'restart_func keyword argument is not callable'
raise TypeError(msg)
global _close_fds
_close_fds = close_fds
try:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
except ImportError as ie:
msg = 'no watchdog support (%s)' % str(ie)
raise RuntimeError(msg)
class _Handler(FileSystemEventHandler):
def __init__(self):
self.active = True
def dispatch(self, event):
if not self.active:
return
super(_Handler, self).dispatch(event)
def all_events(self, event):
if is_restart_event(event):
cancelled = _restart()
if not cancelled:
self.active = False
def on_created(self, event):
self.all_events(event)
def on_deleted(self, event):
self.all_events(event)
def on_modified(self, event):
self.all_events(event)
def on_moved(self, event):
self.all_events(event)
global _observer
_observer = Observer()
handler = _Handler()
_observer.schedule(handler, _python_path, recursive=True)
global _restart_cb
_restart_cb = restart_cb
global _restart_func
_restart_func = restart_func
_activate()
_observer.start()
|
[
"def",
"default",
"(",
"restart_cb",
"=",
"None",
",",
"restart_func",
"=",
"None",
",",
"close_fds",
"=",
"True",
")",
":",
"if",
"_active",
":",
"msg",
"=",
"'lazarus is already active'",
"raise",
"RuntimeWarning",
"(",
"msg",
")",
"_python_path",
"=",
"os",
".",
"getenv",
"(",
"'PYTHONPATH'",
")",
"if",
"not",
"_python_path",
":",
"msg",
"=",
"'PYTHONPATH is not set'",
"raise",
"RuntimeError",
"(",
"msg",
")",
"if",
"restart_cb",
"and",
"not",
"callable",
"(",
"restart_cb",
")",
":",
"msg",
"=",
"'restart_cb keyword argument is not callable'",
"raise",
"TypeError",
"(",
"msg",
")",
"if",
"restart_func",
"and",
"not",
"callable",
"(",
"restart_func",
")",
":",
"msg",
"=",
"'restart_func keyword argument is not callable'",
"raise",
"TypeError",
"(",
"msg",
")",
"global",
"_close_fds",
"_close_fds",
"=",
"close_fds",
"try",
":",
"from",
"watchdog",
".",
"observers",
"import",
"Observer",
"from",
"watchdog",
".",
"events",
"import",
"FileSystemEventHandler",
"except",
"ImportError",
"as",
"ie",
":",
"msg",
"=",
"'no watchdog support (%s)'",
"%",
"str",
"(",
"ie",
")",
"raise",
"RuntimeError",
"(",
"msg",
")",
"class",
"_Handler",
"(",
"FileSystemEventHandler",
")",
":",
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"active",
"=",
"True",
"def",
"dispatch",
"(",
"self",
",",
"event",
")",
":",
"if",
"not",
"self",
".",
"active",
":",
"return",
"super",
"(",
"_Handler",
",",
"self",
")",
".",
"dispatch",
"(",
"event",
")",
"def",
"all_events",
"(",
"self",
",",
"event",
")",
":",
"if",
"is_restart_event",
"(",
"event",
")",
":",
"cancelled",
"=",
"_restart",
"(",
")",
"if",
"not",
"cancelled",
":",
"self",
".",
"active",
"=",
"False",
"def",
"on_created",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"all_events",
"(",
"event",
")",
"def",
"on_deleted",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"all_events",
"(",
"event",
")",
"def",
"on_modified",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"all_events",
"(",
"event",
")",
"def",
"on_moved",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"all_events",
"(",
"event",
")",
"global",
"_observer",
"_observer",
"=",
"Observer",
"(",
")",
"handler",
"=",
"_Handler",
"(",
")",
"_observer",
".",
"schedule",
"(",
"handler",
",",
"_python_path",
",",
"recursive",
"=",
"True",
")",
"global",
"_restart_cb",
"_restart_cb",
"=",
"restart_cb",
"global",
"_restart_func",
"_restart_func",
"=",
"restart_func",
"_activate",
"(",
")",
"_observer",
".",
"start",
"(",
")"
] |
Sets up lazarus in default mode.
See the :py:func:`custom` function for a more powerful mode of use.
The default mode of lazarus is to watch all modules rooted at
``PYTHONPATH`` for changes and restart when they take place.
Keyword arguments:
restart_cb -- Callback invoked prior to restarting the process; allows
for any cleanup to occur prior to restarting. Returning anything other
than *None* in the callback will cancel the restart.
restart_func -- Function invoked to restart the process. This supplants
the default behavior of using *sys.executable* and *sys.argv*.
close_fds -- Whether all file descriptors other than *stdin*, *stdout*,
and *stderr* should be closed
A simple example:
>>> import lazarus
>>> lazarus.default()
>>> lazarus.stop()
|
[
"Sets",
"up",
"lazarus",
"in",
"default",
"mode",
"."
] |
b2b6120fe06d69c23b4f41d55b6d71860a9fdeaa
|
https://github.com/formwork-io/lazarus/blob/b2b6120fe06d69c23b4f41d55b6d71860a9fdeaa/lazarus/__init__.py#L119-L210
|
241,141
|
formwork-io/lazarus
|
lazarus/__init__.py
|
custom
|
def custom(srcpaths, event_cb=None, poll_interval=1, recurse=True,
restart_cb=None, restart_func=None, close_fds=True):
'''Sets up lazarus in custom mode.
See the :py:func:`default` function for a simpler mode of use.
The custom mode of lazarus is to watch all modules rooted at any of the
source paths provided for changes and restart when they take place.
Keyword arguments:
event_cb -- Callback invoked when a file rooted at a source path
changes. Without specifying an event callback, changes to any module
rooted at a source path will trigger a restart.
poll_interval -- Rate at which changes will be detected. The default
value of ``1`` means it may take up to one second to detect changes.
Decreasing this value may lead to unnecessary overhead.
recurse -- Whether to watch all subdirectories of every source path for
changes or only the paths provided.
restart_cb -- Callback invoked prior to restarting the process; allows
for any cleanup to occur prior to restarting. Returning anything other
than *None* in the callback will cancel the restart.
restart_func -- Function invoked to restart the process. This supplants
the default behavior of using *sys.executable* and *sys.argv*.
close_fds -- Whether all file descriptors other than *stdin*, *stdout*,
and *stderr* should be closed
An example of using a cleanup function prior to restarting:
>>> def cleanup():
... pass
>>> import lazarus
>>> lazarus.custom(os.curdir, restart_cb=cleanup)
>>> lazarus.stop()
An example of avoiding restarts when any ``__main__.py`` changes:
>>> def skip_main(event):
... if event.src_path == '__main__.py':
... return False
... return True
>>> import lazarus
>>> lazarus.custom(os.curdir, event_cb=skip_main)
>>> lazarus.stop()
'''
if _active:
msg = 'lazarus is already active'
raise RuntimeWarning(msg)
if restart_cb and not callable(restart_cb):
msg = 'restart_cb keyword argument is not callable'
raise TypeError(msg)
if restart_func and not callable(restart_func):
msg = 'restart_func keyword argument is not callable'
raise TypeError(msg)
global _close_fds
_close_fds = close_fds
try:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
except ImportError as ie:
msg = 'no watchdog support (%s)' % str(ie)
raise RuntimeError(msg)
class _Handler(FileSystemEventHandler):
def __init__(self):
self.active = True
def dispatch(self, event):
if not self.active:
return
super(_Handler, self).dispatch(event)
def all_events(self, event):
# if caller wants event_cb control, defer _restart logic to them
# (caller decides whether this is a restart event)
if event_cb:
if event_cb(event):
cancelled = _restart()
if not cancelled:
self.active = False
# use default is_restart_event logic
elif is_restart_event(event):
cancelled = _restart()
if not cancelled:
self.active = False
self.active = False
def on_created(self, event):
self.all_events(event)
def on_deleted(self, event):
self.all_events(event)
def on_modified(self, event):
self.all_events(event)
def on_moved(self, event):
self.all_events(event)
global _observer
kwargs = {'timeout': poll_interval}
_observer = Observer(**kwargs)
global _restart_cb
_restart_cb = restart_cb
handler = _Handler()
srcpaths = _as_list(srcpaths)
kwargs = {}
if recurse:
kwargs['recursive'] = True
for srcpath in srcpaths:
_observer.schedule(handler, srcpath, **kwargs)
_activate()
_observer.start()
|
python
|
def custom(srcpaths, event_cb=None, poll_interval=1, recurse=True,
restart_cb=None, restart_func=None, close_fds=True):
'''Sets up lazarus in custom mode.
See the :py:func:`default` function for a simpler mode of use.
The custom mode of lazarus is to watch all modules rooted at any of the
source paths provided for changes and restart when they take place.
Keyword arguments:
event_cb -- Callback invoked when a file rooted at a source path
changes. Without specifying an event callback, changes to any module
rooted at a source path will trigger a restart.
poll_interval -- Rate at which changes will be detected. The default
value of ``1`` means it may take up to one second to detect changes.
Decreasing this value may lead to unnecessary overhead.
recurse -- Whether to watch all subdirectories of every source path for
changes or only the paths provided.
restart_cb -- Callback invoked prior to restarting the process; allows
for any cleanup to occur prior to restarting. Returning anything other
than *None* in the callback will cancel the restart.
restart_func -- Function invoked to restart the process. This supplants
the default behavior of using *sys.executable* and *sys.argv*.
close_fds -- Whether all file descriptors other than *stdin*, *stdout*,
and *stderr* should be closed
An example of using a cleanup function prior to restarting:
>>> def cleanup():
... pass
>>> import lazarus
>>> lazarus.custom(os.curdir, restart_cb=cleanup)
>>> lazarus.stop()
An example of avoiding restarts when any ``__main__.py`` changes:
>>> def skip_main(event):
... if event.src_path == '__main__.py':
... return False
... return True
>>> import lazarus
>>> lazarus.custom(os.curdir, event_cb=skip_main)
>>> lazarus.stop()
'''
if _active:
msg = 'lazarus is already active'
raise RuntimeWarning(msg)
if restart_cb and not callable(restart_cb):
msg = 'restart_cb keyword argument is not callable'
raise TypeError(msg)
if restart_func and not callable(restart_func):
msg = 'restart_func keyword argument is not callable'
raise TypeError(msg)
global _close_fds
_close_fds = close_fds
try:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
except ImportError as ie:
msg = 'no watchdog support (%s)' % str(ie)
raise RuntimeError(msg)
class _Handler(FileSystemEventHandler):
def __init__(self):
self.active = True
def dispatch(self, event):
if not self.active:
return
super(_Handler, self).dispatch(event)
def all_events(self, event):
# if caller wants event_cb control, defer _restart logic to them
# (caller decides whether this is a restart event)
if event_cb:
if event_cb(event):
cancelled = _restart()
if not cancelled:
self.active = False
# use default is_restart_event logic
elif is_restart_event(event):
cancelled = _restart()
if not cancelled:
self.active = False
self.active = False
def on_created(self, event):
self.all_events(event)
def on_deleted(self, event):
self.all_events(event)
def on_modified(self, event):
self.all_events(event)
def on_moved(self, event):
self.all_events(event)
global _observer
kwargs = {'timeout': poll_interval}
_observer = Observer(**kwargs)
global _restart_cb
_restart_cb = restart_cb
handler = _Handler()
srcpaths = _as_list(srcpaths)
kwargs = {}
if recurse:
kwargs['recursive'] = True
for srcpath in srcpaths:
_observer.schedule(handler, srcpath, **kwargs)
_activate()
_observer.start()
|
[
"def",
"custom",
"(",
"srcpaths",
",",
"event_cb",
"=",
"None",
",",
"poll_interval",
"=",
"1",
",",
"recurse",
"=",
"True",
",",
"restart_cb",
"=",
"None",
",",
"restart_func",
"=",
"None",
",",
"close_fds",
"=",
"True",
")",
":",
"if",
"_active",
":",
"msg",
"=",
"'lazarus is already active'",
"raise",
"RuntimeWarning",
"(",
"msg",
")",
"if",
"restart_cb",
"and",
"not",
"callable",
"(",
"restart_cb",
")",
":",
"msg",
"=",
"'restart_cb keyword argument is not callable'",
"raise",
"TypeError",
"(",
"msg",
")",
"if",
"restart_func",
"and",
"not",
"callable",
"(",
"restart_func",
")",
":",
"msg",
"=",
"'restart_func keyword argument is not callable'",
"raise",
"TypeError",
"(",
"msg",
")",
"global",
"_close_fds",
"_close_fds",
"=",
"close_fds",
"try",
":",
"from",
"watchdog",
".",
"observers",
"import",
"Observer",
"from",
"watchdog",
".",
"events",
"import",
"FileSystemEventHandler",
"except",
"ImportError",
"as",
"ie",
":",
"msg",
"=",
"'no watchdog support (%s)'",
"%",
"str",
"(",
"ie",
")",
"raise",
"RuntimeError",
"(",
"msg",
")",
"class",
"_Handler",
"(",
"FileSystemEventHandler",
")",
":",
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"active",
"=",
"True",
"def",
"dispatch",
"(",
"self",
",",
"event",
")",
":",
"if",
"not",
"self",
".",
"active",
":",
"return",
"super",
"(",
"_Handler",
",",
"self",
")",
".",
"dispatch",
"(",
"event",
")",
"def",
"all_events",
"(",
"self",
",",
"event",
")",
":",
"# if caller wants event_cb control, defer _restart logic to them",
"# (caller decides whether this is a restart event)",
"if",
"event_cb",
":",
"if",
"event_cb",
"(",
"event",
")",
":",
"cancelled",
"=",
"_restart",
"(",
")",
"if",
"not",
"cancelled",
":",
"self",
".",
"active",
"=",
"False",
"# use default is_restart_event logic",
"elif",
"is_restart_event",
"(",
"event",
")",
":",
"cancelled",
"=",
"_restart",
"(",
")",
"if",
"not",
"cancelled",
":",
"self",
".",
"active",
"=",
"False",
"self",
".",
"active",
"=",
"False",
"def",
"on_created",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"all_events",
"(",
"event",
")",
"def",
"on_deleted",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"all_events",
"(",
"event",
")",
"def",
"on_modified",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"all_events",
"(",
"event",
")",
"def",
"on_moved",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"all_events",
"(",
"event",
")",
"global",
"_observer",
"kwargs",
"=",
"{",
"'timeout'",
":",
"poll_interval",
"}",
"_observer",
"=",
"Observer",
"(",
"*",
"*",
"kwargs",
")",
"global",
"_restart_cb",
"_restart_cb",
"=",
"restart_cb",
"handler",
"=",
"_Handler",
"(",
")",
"srcpaths",
"=",
"_as_list",
"(",
"srcpaths",
")",
"kwargs",
"=",
"{",
"}",
"if",
"recurse",
":",
"kwargs",
"[",
"'recursive'",
"]",
"=",
"True",
"for",
"srcpath",
"in",
"srcpaths",
":",
"_observer",
".",
"schedule",
"(",
"handler",
",",
"srcpath",
",",
"*",
"*",
"kwargs",
")",
"_activate",
"(",
")",
"_observer",
".",
"start",
"(",
")"
] |
Sets up lazarus in custom mode.
See the :py:func:`default` function for a simpler mode of use.
The custom mode of lazarus is to watch all modules rooted at any of the
source paths provided for changes and restart when they take place.
Keyword arguments:
event_cb -- Callback invoked when a file rooted at a source path
changes. Without specifying an event callback, changes to any module
rooted at a source path will trigger a restart.
poll_interval -- Rate at which changes will be detected. The default
value of ``1`` means it may take up to one second to detect changes.
Decreasing this value may lead to unnecessary overhead.
recurse -- Whether to watch all subdirectories of every source path for
changes or only the paths provided.
restart_cb -- Callback invoked prior to restarting the process; allows
for any cleanup to occur prior to restarting. Returning anything other
than *None* in the callback will cancel the restart.
restart_func -- Function invoked to restart the process. This supplants
the default behavior of using *sys.executable* and *sys.argv*.
close_fds -- Whether all file descriptors other than *stdin*, *stdout*,
and *stderr* should be closed
An example of using a cleanup function prior to restarting:
>>> def cleanup():
... pass
>>> import lazarus
>>> lazarus.custom(os.curdir, restart_cb=cleanup)
>>> lazarus.stop()
An example of avoiding restarts when any ``__main__.py`` changes:
>>> def skip_main(event):
... if event.src_path == '__main__.py':
... return False
... return True
>>> import lazarus
>>> lazarus.custom(os.curdir, event_cb=skip_main)
>>> lazarus.stop()
|
[
"Sets",
"up",
"lazarus",
"in",
"custom",
"mode",
"."
] |
b2b6120fe06d69c23b4f41d55b6d71860a9fdeaa
|
https://github.com/formwork-io/lazarus/blob/b2b6120fe06d69c23b4f41d55b6d71860a9fdeaa/lazarus/__init__.py#L213-L337
|
241,142
|
0compute/xtraceback
|
xtraceback/xtraceback.py
|
XTraceback.tty_stream
|
def tty_stream(self):
"""
Whether or not our stream is a tty
"""
return hasattr(self.options.stream, "isatty") \
and self.options.stream.isatty()
|
python
|
def tty_stream(self):
"""
Whether or not our stream is a tty
"""
return hasattr(self.options.stream, "isatty") \
and self.options.stream.isatty()
|
[
"def",
"tty_stream",
"(",
"self",
")",
":",
"return",
"hasattr",
"(",
"self",
".",
"options",
".",
"stream",
",",
"\"isatty\"",
")",
"and",
"self",
".",
"options",
".",
"stream",
".",
"isatty",
"(",
")"
] |
Whether or not our stream is a tty
|
[
"Whether",
"or",
"not",
"our",
"stream",
"is",
"a",
"tty"
] |
5f4ae11cf21e6eea830d79aed66d3cd91bd013cd
|
https://github.com/0compute/xtraceback/blob/5f4ae11cf21e6eea830d79aed66d3cd91bd013cd/xtraceback/xtraceback.py#L107-L112
|
241,143
|
0compute/xtraceback
|
xtraceback/xtraceback.py
|
XTraceback.color
|
def color(self):
"""
Whether or not color should be output
"""
return self.tty_stream if self.options.color is None \
else self.options.color
|
python
|
def color(self):
"""
Whether or not color should be output
"""
return self.tty_stream if self.options.color is None \
else self.options.color
|
[
"def",
"color",
"(",
"self",
")",
":",
"return",
"self",
".",
"tty_stream",
"if",
"self",
".",
"options",
".",
"color",
"is",
"None",
"else",
"self",
".",
"options",
".",
"color"
] |
Whether or not color should be output
|
[
"Whether",
"or",
"not",
"color",
"should",
"be",
"output"
] |
5f4ae11cf21e6eea830d79aed66d3cd91bd013cd
|
https://github.com/0compute/xtraceback/blob/5f4ae11cf21e6eea830d79aed66d3cd91bd013cd/xtraceback/xtraceback.py#L115-L120
|
241,144
|
bitlabstudio/django-unshorten
|
unshorten/decorators.py
|
api_auth
|
def api_auth(func):
"""
If the user is not logged in, this decorator looks for basic HTTP auth
data in the request header.
"""
@wraps(func)
def _decorator(request, *args, **kwargs):
authentication = APIAuthentication(request)
if authentication.authenticate():
return func(request, *args, **kwargs)
raise Http404
return _decorator
|
python
|
def api_auth(func):
"""
If the user is not logged in, this decorator looks for basic HTTP auth
data in the request header.
"""
@wraps(func)
def _decorator(request, *args, **kwargs):
authentication = APIAuthentication(request)
if authentication.authenticate():
return func(request, *args, **kwargs)
raise Http404
return _decorator
|
[
"def",
"api_auth",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"_decorator",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"authentication",
"=",
"APIAuthentication",
"(",
"request",
")",
"if",
"authentication",
".",
"authenticate",
"(",
")",
":",
"return",
"func",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"raise",
"Http404",
"return",
"_decorator"
] |
If the user is not logged in, this decorator looks for basic HTTP auth
data in the request header.
|
[
"If",
"the",
"user",
"is",
"not",
"logged",
"in",
"this",
"decorator",
"looks",
"for",
"basic",
"HTTP",
"auth",
"data",
"in",
"the",
"request",
"header",
"."
] |
6d184de908bb9df3aad5ac3fd9732d976afb6953
|
https://github.com/bitlabstudio/django-unshorten/blob/6d184de908bb9df3aad5ac3fd9732d976afb6953/unshorten/decorators.py#L9-L21
|
241,145
|
biocore/mustached-octo-ironman
|
moi/job.py
|
_status_change
|
def _status_change(id, new_status):
"""Update the status of a job
The status associated with the id is updated, an update command is
issued to the job's pubsub, and and the old status is returned.
Parameters
----------
id : str
The job ID
new_status : str
The status change
Returns
-------
str
The old status
"""
job_info = json.loads(r_client.get(id))
old_status = job_info['status']
job_info['status'] = new_status
_deposit_payload(job_info)
return old_status
|
python
|
def _status_change(id, new_status):
"""Update the status of a job
The status associated with the id is updated, an update command is
issued to the job's pubsub, and and the old status is returned.
Parameters
----------
id : str
The job ID
new_status : str
The status change
Returns
-------
str
The old status
"""
job_info = json.loads(r_client.get(id))
old_status = job_info['status']
job_info['status'] = new_status
_deposit_payload(job_info)
return old_status
|
[
"def",
"_status_change",
"(",
"id",
",",
"new_status",
")",
":",
"job_info",
"=",
"json",
".",
"loads",
"(",
"r_client",
".",
"get",
"(",
"id",
")",
")",
"old_status",
"=",
"job_info",
"[",
"'status'",
"]",
"job_info",
"[",
"'status'",
"]",
"=",
"new_status",
"_deposit_payload",
"(",
"job_info",
")",
"return",
"old_status"
] |
Update the status of a job
The status associated with the id is updated, an update command is
issued to the job's pubsub, and and the old status is returned.
Parameters
----------
id : str
The job ID
new_status : str
The status change
Returns
-------
str
The old status
|
[
"Update",
"the",
"status",
"of",
"a",
"job"
] |
54128d8fdff327e1b7ffd9bb77bf38c3df9526d7
|
https://github.com/biocore/mustached-octo-ironman/blob/54128d8fdff327e1b7ffd9bb77bf38c3df9526d7/moi/job.py#L57-L80
|
241,146
|
biocore/mustached-octo-ironman
|
moi/job.py
|
_deposit_payload
|
def _deposit_payload(to_deposit):
"""Store job info, and publish an update
Parameters
----------
to_deposit : dict
The job info
"""
pubsub = to_deposit['pubsub']
id = to_deposit['id']
with r_client.pipeline() as pipe:
pipe.set(id, json.dumps(to_deposit), ex=REDIS_KEY_TIMEOUT)
pipe.publish(pubsub, json.dumps({"update": [id]}))
pipe.execute()
|
python
|
def _deposit_payload(to_deposit):
"""Store job info, and publish an update
Parameters
----------
to_deposit : dict
The job info
"""
pubsub = to_deposit['pubsub']
id = to_deposit['id']
with r_client.pipeline() as pipe:
pipe.set(id, json.dumps(to_deposit), ex=REDIS_KEY_TIMEOUT)
pipe.publish(pubsub, json.dumps({"update": [id]}))
pipe.execute()
|
[
"def",
"_deposit_payload",
"(",
"to_deposit",
")",
":",
"pubsub",
"=",
"to_deposit",
"[",
"'pubsub'",
"]",
"id",
"=",
"to_deposit",
"[",
"'id'",
"]",
"with",
"r_client",
".",
"pipeline",
"(",
")",
"as",
"pipe",
":",
"pipe",
".",
"set",
"(",
"id",
",",
"json",
".",
"dumps",
"(",
"to_deposit",
")",
",",
"ex",
"=",
"REDIS_KEY_TIMEOUT",
")",
"pipe",
".",
"publish",
"(",
"pubsub",
",",
"json",
".",
"dumps",
"(",
"{",
"\"update\"",
":",
"[",
"id",
"]",
"}",
")",
")",
"pipe",
".",
"execute",
"(",
")"
] |
Store job info, and publish an update
Parameters
----------
to_deposit : dict
The job info
|
[
"Store",
"job",
"info",
"and",
"publish",
"an",
"update"
] |
54128d8fdff327e1b7ffd9bb77bf38c3df9526d7
|
https://github.com/biocore/mustached-octo-ironman/blob/54128d8fdff327e1b7ffd9bb77bf38c3df9526d7/moi/job.py#L83-L98
|
241,147
|
biocore/mustached-octo-ironman
|
moi/job.py
|
_redis_wrap
|
def _redis_wrap(job_info, func, *args, **kwargs):
"""Wrap something to compute
The function that will have available, via kwargs['moi_update_status'], a
method to modify the job status. This method can be used within the
executing function by:
old_status = kwargs['moi_update_status']('my new status')
Parameters
----------
job_info : dict
Redis job details
func : function
A function to execute. This function must accept ``**kwargs``, and will
have ``moi_update_status``, ``moi_context`` and ``moi_parent_id``
available.
Raises
------
Exception
If the function called raises, that exception is propagated.
Returns
-------
Anything the function executed returns.
"""
status_changer = partial(_status_change, job_info['id'])
kwargs['moi_update_status'] = status_changer
kwargs['moi_context'] = job_info['context']
kwargs['moi_parent_id'] = job_info['parent']
job_info['status'] = 'Running'
job_info['date_start'] = str(datetime.now())
_deposit_payload(job_info)
caught = None
try:
result = func(*args, **kwargs)
job_info['status'] = 'Success'
except Exception as e:
result = traceback.format_exception(*sys.exc_info())
job_info['status'] = 'Failed'
caught = e
finally:
job_info['result'] = result
job_info['date_end'] = str(datetime.now())
_deposit_payload(job_info)
if caught is None:
return result
else:
raise caught
|
python
|
def _redis_wrap(job_info, func, *args, **kwargs):
"""Wrap something to compute
The function that will have available, via kwargs['moi_update_status'], a
method to modify the job status. This method can be used within the
executing function by:
old_status = kwargs['moi_update_status']('my new status')
Parameters
----------
job_info : dict
Redis job details
func : function
A function to execute. This function must accept ``**kwargs``, and will
have ``moi_update_status``, ``moi_context`` and ``moi_parent_id``
available.
Raises
------
Exception
If the function called raises, that exception is propagated.
Returns
-------
Anything the function executed returns.
"""
status_changer = partial(_status_change, job_info['id'])
kwargs['moi_update_status'] = status_changer
kwargs['moi_context'] = job_info['context']
kwargs['moi_parent_id'] = job_info['parent']
job_info['status'] = 'Running'
job_info['date_start'] = str(datetime.now())
_deposit_payload(job_info)
caught = None
try:
result = func(*args, **kwargs)
job_info['status'] = 'Success'
except Exception as e:
result = traceback.format_exception(*sys.exc_info())
job_info['status'] = 'Failed'
caught = e
finally:
job_info['result'] = result
job_info['date_end'] = str(datetime.now())
_deposit_payload(job_info)
if caught is None:
return result
else:
raise caught
|
[
"def",
"_redis_wrap",
"(",
"job_info",
",",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"status_changer",
"=",
"partial",
"(",
"_status_change",
",",
"job_info",
"[",
"'id'",
"]",
")",
"kwargs",
"[",
"'moi_update_status'",
"]",
"=",
"status_changer",
"kwargs",
"[",
"'moi_context'",
"]",
"=",
"job_info",
"[",
"'context'",
"]",
"kwargs",
"[",
"'moi_parent_id'",
"]",
"=",
"job_info",
"[",
"'parent'",
"]",
"job_info",
"[",
"'status'",
"]",
"=",
"'Running'",
"job_info",
"[",
"'date_start'",
"]",
"=",
"str",
"(",
"datetime",
".",
"now",
"(",
")",
")",
"_deposit_payload",
"(",
"job_info",
")",
"caught",
"=",
"None",
"try",
":",
"result",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"job_info",
"[",
"'status'",
"]",
"=",
"'Success'",
"except",
"Exception",
"as",
"e",
":",
"result",
"=",
"traceback",
".",
"format_exception",
"(",
"*",
"sys",
".",
"exc_info",
"(",
")",
")",
"job_info",
"[",
"'status'",
"]",
"=",
"'Failed'",
"caught",
"=",
"e",
"finally",
":",
"job_info",
"[",
"'result'",
"]",
"=",
"result",
"job_info",
"[",
"'date_end'",
"]",
"=",
"str",
"(",
"datetime",
".",
"now",
"(",
")",
")",
"_deposit_payload",
"(",
"job_info",
")",
"if",
"caught",
"is",
"None",
":",
"return",
"result",
"else",
":",
"raise",
"caught"
] |
Wrap something to compute
The function that will have available, via kwargs['moi_update_status'], a
method to modify the job status. This method can be used within the
executing function by:
old_status = kwargs['moi_update_status']('my new status')
Parameters
----------
job_info : dict
Redis job details
func : function
A function to execute. This function must accept ``**kwargs``, and will
have ``moi_update_status``, ``moi_context`` and ``moi_parent_id``
available.
Raises
------
Exception
If the function called raises, that exception is propagated.
Returns
-------
Anything the function executed returns.
|
[
"Wrap",
"something",
"to",
"compute"
] |
54128d8fdff327e1b7ffd9bb77bf38c3df9526d7
|
https://github.com/biocore/mustached-octo-ironman/blob/54128d8fdff327e1b7ffd9bb77bf38c3df9526d7/moi/job.py#L101-L154
|
241,148
|
biocore/mustached-octo-ironman
|
moi/job.py
|
submit
|
def submit(ctx_name, parent_id, name, url, func, *args, **kwargs):
"""Submit through a context
Parameters
----------
ctx_name : str
The name of the context to submit through
parent_id : str
The ID of the group that the job is a part of.
name : str
The name of the job
url : str
The handler that can take the results (e.g., /beta_diversity/)
func : function
The function to execute. Any returns from this function will be
serialized and deposited into Redis using the uuid for a key. This
function should raise if the method fails.
args : tuple or None
Any args for ``func``
kwargs : dict or None
Any kwargs for ``func``
Returns
-------
tuple, (str, str, AsyncResult)
The job ID, parent ID and the IPython's AsyncResult object of the job
"""
if isinstance(ctx_name, Context):
ctx = ctx_name
else:
ctx = ctxs.get(ctx_name, ctxs[ctx_default])
return _submit(ctx, parent_id, name, url, func, *args, **kwargs)
|
python
|
def submit(ctx_name, parent_id, name, url, func, *args, **kwargs):
"""Submit through a context
Parameters
----------
ctx_name : str
The name of the context to submit through
parent_id : str
The ID of the group that the job is a part of.
name : str
The name of the job
url : str
The handler that can take the results (e.g., /beta_diversity/)
func : function
The function to execute. Any returns from this function will be
serialized and deposited into Redis using the uuid for a key. This
function should raise if the method fails.
args : tuple or None
Any args for ``func``
kwargs : dict or None
Any kwargs for ``func``
Returns
-------
tuple, (str, str, AsyncResult)
The job ID, parent ID and the IPython's AsyncResult object of the job
"""
if isinstance(ctx_name, Context):
ctx = ctx_name
else:
ctx = ctxs.get(ctx_name, ctxs[ctx_default])
return _submit(ctx, parent_id, name, url, func, *args, **kwargs)
|
[
"def",
"submit",
"(",
"ctx_name",
",",
"parent_id",
",",
"name",
",",
"url",
",",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"ctx_name",
",",
"Context",
")",
":",
"ctx",
"=",
"ctx_name",
"else",
":",
"ctx",
"=",
"ctxs",
".",
"get",
"(",
"ctx_name",
",",
"ctxs",
"[",
"ctx_default",
"]",
")",
"return",
"_submit",
"(",
"ctx",
",",
"parent_id",
",",
"name",
",",
"url",
",",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Submit through a context
Parameters
----------
ctx_name : str
The name of the context to submit through
parent_id : str
The ID of the group that the job is a part of.
name : str
The name of the job
url : str
The handler that can take the results (e.g., /beta_diversity/)
func : function
The function to execute. Any returns from this function will be
serialized and deposited into Redis using the uuid for a key. This
function should raise if the method fails.
args : tuple or None
Any args for ``func``
kwargs : dict or None
Any kwargs for ``func``
Returns
-------
tuple, (str, str, AsyncResult)
The job ID, parent ID and the IPython's AsyncResult object of the job
|
[
"Submit",
"through",
"a",
"context"
] |
54128d8fdff327e1b7ffd9bb77bf38c3df9526d7
|
https://github.com/biocore/mustached-octo-ironman/blob/54128d8fdff327e1b7ffd9bb77bf38c3df9526d7/moi/job.py#L157-L188
|
241,149
|
biocore/mustached-octo-ironman
|
moi/job.py
|
_submit
|
def _submit(ctx, parent_id, name, url, func, *args, **kwargs):
"""Submit a function to a cluster
Parameters
----------
parent_id : str
The ID of the group that the job is a part of.
name : str
The name of the job
url : str
The handler that can take the results (e.g., /beta_diversity/)
func : function
The function to execute. Any returns from this function will be
serialized and deposited into Redis using the uuid for a key. This
function should raise if the method fails.
args : tuple or None
Any args for ``func``
kwargs : dict or None
Any kwargs for ``func``
Returns
-------
tuple, (str, str, AsyncResult)
The job ID, parent ID and the IPython's AsyncResult object of the job
"""
parent_info = r_client.get(parent_id)
if parent_info is None:
parent_info = create_info('unnamed', 'group', id=parent_id)
parent_id = parent_info['id']
r_client.set(parent_id, json.dumps(parent_info))
parent_pubsub_key = parent_id + ':pubsub'
job_info = create_info(name, 'job', url=url, parent=parent_id,
context=ctx.name, store=True)
job_info['status'] = 'Queued'
job_id = job_info['id']
with r_client.pipeline() as pipe:
pipe.set(job_id, json.dumps(job_info))
pipe.publish(parent_pubsub_key, json.dumps({'add': [job_id]}))
pipe.execute()
ar = ctx.bv.apply_async(_redis_wrap, job_info, func, *args, **kwargs)
return job_id, parent_id, ar
|
python
|
def _submit(ctx, parent_id, name, url, func, *args, **kwargs):
"""Submit a function to a cluster
Parameters
----------
parent_id : str
The ID of the group that the job is a part of.
name : str
The name of the job
url : str
The handler that can take the results (e.g., /beta_diversity/)
func : function
The function to execute. Any returns from this function will be
serialized and deposited into Redis using the uuid for a key. This
function should raise if the method fails.
args : tuple or None
Any args for ``func``
kwargs : dict or None
Any kwargs for ``func``
Returns
-------
tuple, (str, str, AsyncResult)
The job ID, parent ID and the IPython's AsyncResult object of the job
"""
parent_info = r_client.get(parent_id)
if parent_info is None:
parent_info = create_info('unnamed', 'group', id=parent_id)
parent_id = parent_info['id']
r_client.set(parent_id, json.dumps(parent_info))
parent_pubsub_key = parent_id + ':pubsub'
job_info = create_info(name, 'job', url=url, parent=parent_id,
context=ctx.name, store=True)
job_info['status'] = 'Queued'
job_id = job_info['id']
with r_client.pipeline() as pipe:
pipe.set(job_id, json.dumps(job_info))
pipe.publish(parent_pubsub_key, json.dumps({'add': [job_id]}))
pipe.execute()
ar = ctx.bv.apply_async(_redis_wrap, job_info, func, *args, **kwargs)
return job_id, parent_id, ar
|
[
"def",
"_submit",
"(",
"ctx",
",",
"parent_id",
",",
"name",
",",
"url",
",",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"parent_info",
"=",
"r_client",
".",
"get",
"(",
"parent_id",
")",
"if",
"parent_info",
"is",
"None",
":",
"parent_info",
"=",
"create_info",
"(",
"'unnamed'",
",",
"'group'",
",",
"id",
"=",
"parent_id",
")",
"parent_id",
"=",
"parent_info",
"[",
"'id'",
"]",
"r_client",
".",
"set",
"(",
"parent_id",
",",
"json",
".",
"dumps",
"(",
"parent_info",
")",
")",
"parent_pubsub_key",
"=",
"parent_id",
"+",
"':pubsub'",
"job_info",
"=",
"create_info",
"(",
"name",
",",
"'job'",
",",
"url",
"=",
"url",
",",
"parent",
"=",
"parent_id",
",",
"context",
"=",
"ctx",
".",
"name",
",",
"store",
"=",
"True",
")",
"job_info",
"[",
"'status'",
"]",
"=",
"'Queued'",
"job_id",
"=",
"job_info",
"[",
"'id'",
"]",
"with",
"r_client",
".",
"pipeline",
"(",
")",
"as",
"pipe",
":",
"pipe",
".",
"set",
"(",
"job_id",
",",
"json",
".",
"dumps",
"(",
"job_info",
")",
")",
"pipe",
".",
"publish",
"(",
"parent_pubsub_key",
",",
"json",
".",
"dumps",
"(",
"{",
"'add'",
":",
"[",
"job_id",
"]",
"}",
")",
")",
"pipe",
".",
"execute",
"(",
")",
"ar",
"=",
"ctx",
".",
"bv",
".",
"apply_async",
"(",
"_redis_wrap",
",",
"job_info",
",",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"job_id",
",",
"parent_id",
",",
"ar"
] |
Submit a function to a cluster
Parameters
----------
parent_id : str
The ID of the group that the job is a part of.
name : str
The name of the job
url : str
The handler that can take the results (e.g., /beta_diversity/)
func : function
The function to execute. Any returns from this function will be
serialized and deposited into Redis using the uuid for a key. This
function should raise if the method fails.
args : tuple or None
Any args for ``func``
kwargs : dict or None
Any kwargs for ``func``
Returns
-------
tuple, (str, str, AsyncResult)
The job ID, parent ID and the IPython's AsyncResult object of the job
|
[
"Submit",
"a",
"function",
"to",
"a",
"cluster"
] |
54128d8fdff327e1b7ffd9bb77bf38c3df9526d7
|
https://github.com/biocore/mustached-octo-ironman/blob/54128d8fdff327e1b7ffd9bb77bf38c3df9526d7/moi/job.py#L191-L235
|
241,150
|
andreycizov/python-xrpc
|
xrpc/trace.py
|
trc
|
def trc(postfix: Optional[str] = None, *, depth=1) -> logging.Logger:
"""
Automatically generate a logger from the calling function
:param postfix: append another logger name on top this
:param depth: depth of the call stack at which to capture the caller name
:return: instance of a logger with a correct path to a current caller
"""
x = inspect.stack()[depth]
code = x[0].f_code
func = [obj for obj in gc.get_referrers(code) if inspect.isfunction(obj)][0]
mod = inspect.getmodule(x.frame)
parts = (mod.__name__, func.__qualname__)
if postfix:
parts += (postfix,)
logger_name = '.'.join(parts)
return logging.getLogger(logger_name)
|
python
|
def trc(postfix: Optional[str] = None, *, depth=1) -> logging.Logger:
"""
Automatically generate a logger from the calling function
:param postfix: append another logger name on top this
:param depth: depth of the call stack at which to capture the caller name
:return: instance of a logger with a correct path to a current caller
"""
x = inspect.stack()[depth]
code = x[0].f_code
func = [obj for obj in gc.get_referrers(code) if inspect.isfunction(obj)][0]
mod = inspect.getmodule(x.frame)
parts = (mod.__name__, func.__qualname__)
if postfix:
parts += (postfix,)
logger_name = '.'.join(parts)
return logging.getLogger(logger_name)
|
[
"def",
"trc",
"(",
"postfix",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"*",
",",
"depth",
"=",
"1",
")",
"->",
"logging",
".",
"Logger",
":",
"x",
"=",
"inspect",
".",
"stack",
"(",
")",
"[",
"depth",
"]",
"code",
"=",
"x",
"[",
"0",
"]",
".",
"f_code",
"func",
"=",
"[",
"obj",
"for",
"obj",
"in",
"gc",
".",
"get_referrers",
"(",
"code",
")",
"if",
"inspect",
".",
"isfunction",
"(",
"obj",
")",
"]",
"[",
"0",
"]",
"mod",
"=",
"inspect",
".",
"getmodule",
"(",
"x",
".",
"frame",
")",
"parts",
"=",
"(",
"mod",
".",
"__name__",
",",
"func",
".",
"__qualname__",
")",
"if",
"postfix",
":",
"parts",
"+=",
"(",
"postfix",
",",
")",
"logger_name",
"=",
"'.'",
".",
"join",
"(",
"parts",
")",
"return",
"logging",
".",
"getLogger",
"(",
"logger_name",
")"
] |
Automatically generate a logger from the calling function
:param postfix: append another logger name on top this
:param depth: depth of the call stack at which to capture the caller name
:return: instance of a logger with a correct path to a current caller
|
[
"Automatically",
"generate",
"a",
"logger",
"from",
"the",
"calling",
"function"
] |
4f916383cda7de3272962f3ba07a64f7ec451098
|
https://github.com/andreycizov/python-xrpc/blob/4f916383cda7de3272962f3ba07a64f7ec451098/xrpc/trace.py#L32-L55
|
241,151
|
mdeous/fatbotslim
|
fatbotslim/irc/tcp.py
|
TCP._recv_loop
|
def _recv_loop(self):
"""
Waits for data forever and feeds the input queue.
"""
while True:
try:
data = self._socket.recv(4096)
self._ibuffer += data
while '\r\n' in self._ibuffer:
line, self._ibuffer = self._ibuffer.split('\r\n', 1)
self.iqueue.put(line)
except Exception:
break
|
python
|
def _recv_loop(self):
"""
Waits for data forever and feeds the input queue.
"""
while True:
try:
data = self._socket.recv(4096)
self._ibuffer += data
while '\r\n' in self._ibuffer:
line, self._ibuffer = self._ibuffer.split('\r\n', 1)
self.iqueue.put(line)
except Exception:
break
|
[
"def",
"_recv_loop",
"(",
"self",
")",
":",
"while",
"True",
":",
"try",
":",
"data",
"=",
"self",
".",
"_socket",
".",
"recv",
"(",
"4096",
")",
"self",
".",
"_ibuffer",
"+=",
"data",
"while",
"'\\r\\n'",
"in",
"self",
".",
"_ibuffer",
":",
"line",
",",
"self",
".",
"_ibuffer",
"=",
"self",
".",
"_ibuffer",
".",
"split",
"(",
"'\\r\\n'",
",",
"1",
")",
"self",
".",
"iqueue",
".",
"put",
"(",
"line",
")",
"except",
"Exception",
":",
"break"
] |
Waits for data forever and feeds the input queue.
|
[
"Waits",
"for",
"data",
"forever",
"and",
"feeds",
"the",
"input",
"queue",
"."
] |
341595d24454a79caee23750eac271f9d0626c88
|
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/tcp.py#L73-L85
|
241,152
|
mdeous/fatbotslim
|
fatbotslim/irc/tcp.py
|
TCP._send_loop
|
def _send_loop(self):
"""
Waits for data in the output queue to send.
"""
while True:
try:
line = self.oqueue.get().splitlines()[0][:500]
self._obuffer += line + '\r\n'
while self._obuffer:
sent = self._socket.send(self._obuffer)
self._obuffer = self._obuffer[sent:]
except Exception:
break
|
python
|
def _send_loop(self):
"""
Waits for data in the output queue to send.
"""
while True:
try:
line = self.oqueue.get().splitlines()[0][:500]
self._obuffer += line + '\r\n'
while self._obuffer:
sent = self._socket.send(self._obuffer)
self._obuffer = self._obuffer[sent:]
except Exception:
break
|
[
"def",
"_send_loop",
"(",
"self",
")",
":",
"while",
"True",
":",
"try",
":",
"line",
"=",
"self",
".",
"oqueue",
".",
"get",
"(",
")",
".",
"splitlines",
"(",
")",
"[",
"0",
"]",
"[",
":",
"500",
"]",
"self",
".",
"_obuffer",
"+=",
"line",
"+",
"'\\r\\n'",
"while",
"self",
".",
"_obuffer",
":",
"sent",
"=",
"self",
".",
"_socket",
".",
"send",
"(",
"self",
".",
"_obuffer",
")",
"self",
".",
"_obuffer",
"=",
"self",
".",
"_obuffer",
"[",
"sent",
":",
"]",
"except",
"Exception",
":",
"break"
] |
Waits for data in the output queue to send.
|
[
"Waits",
"for",
"data",
"in",
"the",
"output",
"queue",
"to",
"send",
"."
] |
341595d24454a79caee23750eac271f9d0626c88
|
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/tcp.py#L87-L99
|
241,153
|
mdeous/fatbotslim
|
fatbotslim/irc/tcp.py
|
SSL._create_socket
|
def _create_socket(self):
"""
Creates a new SSL enabled socket and sets its timeout.
"""
log.warning('No certificate check is performed for SSL connections')
s = super(SSL, self)._create_socket()
return wrap_socket(s)
|
python
|
def _create_socket(self):
"""
Creates a new SSL enabled socket and sets its timeout.
"""
log.warning('No certificate check is performed for SSL connections')
s = super(SSL, self)._create_socket()
return wrap_socket(s)
|
[
"def",
"_create_socket",
"(",
"self",
")",
":",
"log",
".",
"warning",
"(",
"'No certificate check is performed for SSL connections'",
")",
"s",
"=",
"super",
"(",
"SSL",
",",
"self",
")",
".",
"_create_socket",
"(",
")",
"return",
"wrap_socket",
"(",
"s",
")"
] |
Creates a new SSL enabled socket and sets its timeout.
|
[
"Creates",
"a",
"new",
"SSL",
"enabled",
"socket",
"and",
"sets",
"its",
"timeout",
"."
] |
341595d24454a79caee23750eac271f9d0626c88
|
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/irc/tcp.py#L125-L131
|
241,154
|
openstack/stacktach-stackdistiller
|
stackdistiller/distiller.py
|
load_config
|
def load_config(filename):
"""Load the event definitions from yaml config file."""
logger.debug("Event Definitions configuration file: %s", filename)
with open(filename, 'r') as cf:
config = cf.read()
try:
events_config = yaml.safe_load(config)
except yaml.YAMLError as err:
if hasattr(err, 'problem_mark'):
mark = err.problem_mark
errmsg = ("Invalid YAML syntax in Event Definitions file "
"%(file)s at line: %(line)s, column: %(column)s."
% dict(file=filename,
line=mark.line + 1,
column=mark.column + 1))
else:
errmsg = ("YAML error reading Event Definitions file "
"%(file)s"
% dict(file=filename))
logger.error(errmsg)
raise
logger.info("Event Definitions: %s", events_config)
return events_config
|
python
|
def load_config(filename):
"""Load the event definitions from yaml config file."""
logger.debug("Event Definitions configuration file: %s", filename)
with open(filename, 'r') as cf:
config = cf.read()
try:
events_config = yaml.safe_load(config)
except yaml.YAMLError as err:
if hasattr(err, 'problem_mark'):
mark = err.problem_mark
errmsg = ("Invalid YAML syntax in Event Definitions file "
"%(file)s at line: %(line)s, column: %(column)s."
% dict(file=filename,
line=mark.line + 1,
column=mark.column + 1))
else:
errmsg = ("YAML error reading Event Definitions file "
"%(file)s"
% dict(file=filename))
logger.error(errmsg)
raise
logger.info("Event Definitions: %s", events_config)
return events_config
|
[
"def",
"load_config",
"(",
"filename",
")",
":",
"logger",
".",
"debug",
"(",
"\"Event Definitions configuration file: %s\"",
",",
"filename",
")",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"cf",
":",
"config",
"=",
"cf",
".",
"read",
"(",
")",
"try",
":",
"events_config",
"=",
"yaml",
".",
"safe_load",
"(",
"config",
")",
"except",
"yaml",
".",
"YAMLError",
"as",
"err",
":",
"if",
"hasattr",
"(",
"err",
",",
"'problem_mark'",
")",
":",
"mark",
"=",
"err",
".",
"problem_mark",
"errmsg",
"=",
"(",
"\"Invalid YAML syntax in Event Definitions file \"",
"\"%(file)s at line: %(line)s, column: %(column)s.\"",
"%",
"dict",
"(",
"file",
"=",
"filename",
",",
"line",
"=",
"mark",
".",
"line",
"+",
"1",
",",
"column",
"=",
"mark",
".",
"column",
"+",
"1",
")",
")",
"else",
":",
"errmsg",
"=",
"(",
"\"YAML error reading Event Definitions file \"",
"\"%(file)s\"",
"%",
"dict",
"(",
"file",
"=",
"filename",
")",
")",
"logger",
".",
"error",
"(",
"errmsg",
")",
"raise",
"logger",
".",
"info",
"(",
"\"Event Definitions: %s\"",
",",
"events_config",
")",
"return",
"events_config"
] |
Load the event definitions from yaml config file.
|
[
"Load",
"the",
"event",
"definitions",
"from",
"yaml",
"config",
"file",
"."
] |
38cc32994cc5411c3f7c76f31ef3ea8b3245e871
|
https://github.com/openstack/stacktach-stackdistiller/blob/38cc32994cc5411c3f7c76f31ef3ea8b3245e871/stackdistiller/distiller.py#L47-L72
|
241,155
|
openstack/stacktach-stackdistiller
|
stackdistiller/distiller.py
|
EventDefinition._extract_when
|
def _extract_when(body):
"""Extract the generated datetime from the notification."""
# NOTE: I am keeping the logic the same as it was in openstack
# code, However, *ALL* notifications should have a 'timestamp'
# field, it's part of the notification envelope spec. If this was
# put here because some openstack project is generating notifications
# without a timestamp, then that needs to be filed as a bug with the
# offending project (mdragon)
when = body.get('timestamp', body.get('_context_timestamp'))
if when:
return Datatype.datetime.convert(when)
return utcnow()
|
python
|
def _extract_when(body):
"""Extract the generated datetime from the notification."""
# NOTE: I am keeping the logic the same as it was in openstack
# code, However, *ALL* notifications should have a 'timestamp'
# field, it's part of the notification envelope spec. If this was
# put here because some openstack project is generating notifications
# without a timestamp, then that needs to be filed as a bug with the
# offending project (mdragon)
when = body.get('timestamp', body.get('_context_timestamp'))
if when:
return Datatype.datetime.convert(when)
return utcnow()
|
[
"def",
"_extract_when",
"(",
"body",
")",
":",
"# NOTE: I am keeping the logic the same as it was in openstack",
"# code, However, *ALL* notifications should have a 'timestamp'",
"# field, it's part of the notification envelope spec. If this was",
"# put here because some openstack project is generating notifications",
"# without a timestamp, then that needs to be filed as a bug with the",
"# offending project (mdragon)",
"when",
"=",
"body",
".",
"get",
"(",
"'timestamp'",
",",
"body",
".",
"get",
"(",
"'_context_timestamp'",
")",
")",
"if",
"when",
":",
"return",
"Datatype",
".",
"datetime",
".",
"convert",
"(",
"when",
")",
"return",
"utcnow",
"(",
")"
] |
Extract the generated datetime from the notification.
|
[
"Extract",
"the",
"generated",
"datetime",
"from",
"the",
"notification",
"."
] |
38cc32994cc5411c3f7c76f31ef3ea8b3245e871
|
https://github.com/openstack/stacktach-stackdistiller/blob/38cc32994cc5411c3f7c76f31ef3ea8b3245e871/stackdistiller/distiller.py#L259-L271
|
241,156
|
ErikBjare/pyzenobase
|
pyzenobase/zenobase_api.py
|
ZenobaseAPI.list_buckets
|
def list_buckets(self, offset=0, limit=100):
"""Limit breaks above 100"""
# TODO: If limit > 100, do multiple fetches
if limit > 100:
raise Exception("Zenobase can't handle limits over 100")
return self._get("/users/{}/buckets/?order=label&offset={}&limit={}".format(self.client_id, offset, limit))
|
python
|
def list_buckets(self, offset=0, limit=100):
"""Limit breaks above 100"""
# TODO: If limit > 100, do multiple fetches
if limit > 100:
raise Exception("Zenobase can't handle limits over 100")
return self._get("/users/{}/buckets/?order=label&offset={}&limit={}".format(self.client_id, offset, limit))
|
[
"def",
"list_buckets",
"(",
"self",
",",
"offset",
"=",
"0",
",",
"limit",
"=",
"100",
")",
":",
"# TODO: If limit > 100, do multiple fetches",
"if",
"limit",
">",
"100",
":",
"raise",
"Exception",
"(",
"\"Zenobase can't handle limits over 100\"",
")",
"return",
"self",
".",
"_get",
"(",
"\"/users/{}/buckets/?order=label&offset={}&limit={}\"",
".",
"format",
"(",
"self",
".",
"client_id",
",",
"offset",
",",
"limit",
")",
")"
] |
Limit breaks above 100
|
[
"Limit",
"breaks",
"above",
"100"
] |
eb0572c7441a350bf5578bc5287f3be53d32ea19
|
https://github.com/ErikBjare/pyzenobase/blob/eb0572c7441a350bf5578bc5287f3be53d32ea19/pyzenobase/zenobase_api.py#L54-L59
|
241,157
|
TkTech/pytextql
|
pytextql/core.py
|
_create_table
|
def _create_table(db, table_name, columns, overwrite=False):
"""
Create's `table_name` in `db` if it does not already exist,
and adds any missing columns.
:param db: An active SQLite3 Connection.
:param table_name: The (unicode) name of the table to setup.
:param columns: An iterable of column names to ensure exist.
:param overwrite: If ``True`` and the table already exists,
overwrite it.
"""
with contextlib.closing(db.cursor()) as c:
table_exists = c.execute((
u'SELECT EXISTS(SELECT 1 FROM sqlite_master'
u' WHERE type="table" and name=?) as "exists"'
), (table_name,)).fetchone()
if table_exists['exists']:
if not overwrite:
raise TableExists()
c.execute(u'DROP TABLE IF EXISTS "{table_name}"'.format(
table_name=table_name
))
# Create the table if it doesn't already exist.
c.execute((
u'CREATE TABLE IF NOT EXISTS "{table_name}"'
u'(id INTEGER PRIMARY KEY AUTOINCREMENT);'
).format(table_name=table_name))
# Cache the columns that are already there so we create only
# those that are missing.
c.execute(u'PRAGMA table_info("{table_name}");'.format(
table_name=table_name
))
results = c.fetchall()
existing_columns = set(r['name'] for r in results)
for header in columns:
if header in existing_columns:
continue
# In SQLite3, new columns can only be appended.
c.execute((
u'ALTER TABLE "{table_name}"'
u' ADD COLUMN "{col}" TEXT;'
).format(
table_name=table_name,
col=header
))
# Typically, table modifications occur outside of a
# transaction so this is just a precaution.
db.commit()
|
python
|
def _create_table(db, table_name, columns, overwrite=False):
"""
Create's `table_name` in `db` if it does not already exist,
and adds any missing columns.
:param db: An active SQLite3 Connection.
:param table_name: The (unicode) name of the table to setup.
:param columns: An iterable of column names to ensure exist.
:param overwrite: If ``True`` and the table already exists,
overwrite it.
"""
with contextlib.closing(db.cursor()) as c:
table_exists = c.execute((
u'SELECT EXISTS(SELECT 1 FROM sqlite_master'
u' WHERE type="table" and name=?) as "exists"'
), (table_name,)).fetchone()
if table_exists['exists']:
if not overwrite:
raise TableExists()
c.execute(u'DROP TABLE IF EXISTS "{table_name}"'.format(
table_name=table_name
))
# Create the table if it doesn't already exist.
c.execute((
u'CREATE TABLE IF NOT EXISTS "{table_name}"'
u'(id INTEGER PRIMARY KEY AUTOINCREMENT);'
).format(table_name=table_name))
# Cache the columns that are already there so we create only
# those that are missing.
c.execute(u'PRAGMA table_info("{table_name}");'.format(
table_name=table_name
))
results = c.fetchall()
existing_columns = set(r['name'] for r in results)
for header in columns:
if header in existing_columns:
continue
# In SQLite3, new columns can only be appended.
c.execute((
u'ALTER TABLE "{table_name}"'
u' ADD COLUMN "{col}" TEXT;'
).format(
table_name=table_name,
col=header
))
# Typically, table modifications occur outside of a
# transaction so this is just a precaution.
db.commit()
|
[
"def",
"_create_table",
"(",
"db",
",",
"table_name",
",",
"columns",
",",
"overwrite",
"=",
"False",
")",
":",
"with",
"contextlib",
".",
"closing",
"(",
"db",
".",
"cursor",
"(",
")",
")",
"as",
"c",
":",
"table_exists",
"=",
"c",
".",
"execute",
"(",
"(",
"u'SELECT EXISTS(SELECT 1 FROM sqlite_master'",
"u' WHERE type=\"table\" and name=?) as \"exists\"'",
")",
",",
"(",
"table_name",
",",
")",
")",
".",
"fetchone",
"(",
")",
"if",
"table_exists",
"[",
"'exists'",
"]",
":",
"if",
"not",
"overwrite",
":",
"raise",
"TableExists",
"(",
")",
"c",
".",
"execute",
"(",
"u'DROP TABLE IF EXISTS \"{table_name}\"'",
".",
"format",
"(",
"table_name",
"=",
"table_name",
")",
")",
"# Create the table if it doesn't already exist.",
"c",
".",
"execute",
"(",
"(",
"u'CREATE TABLE IF NOT EXISTS \"{table_name}\"'",
"u'(id INTEGER PRIMARY KEY AUTOINCREMENT);'",
")",
".",
"format",
"(",
"table_name",
"=",
"table_name",
")",
")",
"# Cache the columns that are already there so we create only",
"# those that are missing.",
"c",
".",
"execute",
"(",
"u'PRAGMA table_info(\"{table_name}\");'",
".",
"format",
"(",
"table_name",
"=",
"table_name",
")",
")",
"results",
"=",
"c",
".",
"fetchall",
"(",
")",
"existing_columns",
"=",
"set",
"(",
"r",
"[",
"'name'",
"]",
"for",
"r",
"in",
"results",
")",
"for",
"header",
"in",
"columns",
":",
"if",
"header",
"in",
"existing_columns",
":",
"continue",
"# In SQLite3, new columns can only be appended.",
"c",
".",
"execute",
"(",
"(",
"u'ALTER TABLE \"{table_name}\"'",
"u' ADD COLUMN \"{col}\" TEXT;'",
")",
".",
"format",
"(",
"table_name",
"=",
"table_name",
",",
"col",
"=",
"header",
")",
")",
"# Typically, table modifications occur outside of a",
"# transaction so this is just a precaution.",
"db",
".",
"commit",
"(",
")"
] |
Create's `table_name` in `db` if it does not already exist,
and adds any missing columns.
:param db: An active SQLite3 Connection.
:param table_name: The (unicode) name of the table to setup.
:param columns: An iterable of column names to ensure exist.
:param overwrite: If ``True`` and the table already exists,
overwrite it.
|
[
"Create",
"s",
"table_name",
"in",
"db",
"if",
"it",
"does",
"not",
"already",
"exist",
"and",
"adds",
"any",
"missing",
"columns",
"."
] |
e054a7a4df7262deaca49bdbf748c00acf011b51
|
https://github.com/TkTech/pytextql/blob/e054a7a4df7262deaca49bdbf748c00acf011b51/pytextql/core.py#L104-L159
|
241,158
|
armstrong/armstrong.apps.images
|
fabfile.py
|
update_colorbox
|
def update_colorbox():
"""Update Colorbox code from vendor tree"""
base_name = os.path.dirname(__file__)
destination = os.path.join(base_name, "armstrong", "apps", "images", "static", "colorbox")
colorbox_source = os.path.join(base_name, "vendor", "colorbox")
colorbox_files = [
os.path.join(colorbox_source, "example1", "colorbox.css"),
os.path.join(colorbox_source, "example1", "images"),
os.path.join(colorbox_source, "colorbox", "jquery.colorbox-min.js"),
]
local("cp -R %s %s" % (" ".join(colorbox_files), destination))
# We're not supporting IE6, so we can drop the backfill
local("rm -rf %s" % (os.path.join(destination, "images", "ie6")))
|
python
|
def update_colorbox():
"""Update Colorbox code from vendor tree"""
base_name = os.path.dirname(__file__)
destination = os.path.join(base_name, "armstrong", "apps", "images", "static", "colorbox")
colorbox_source = os.path.join(base_name, "vendor", "colorbox")
colorbox_files = [
os.path.join(colorbox_source, "example1", "colorbox.css"),
os.path.join(colorbox_source, "example1", "images"),
os.path.join(colorbox_source, "colorbox", "jquery.colorbox-min.js"),
]
local("cp -R %s %s" % (" ".join(colorbox_files), destination))
# We're not supporting IE6, so we can drop the backfill
local("rm -rf %s" % (os.path.join(destination, "images", "ie6")))
|
[
"def",
"update_colorbox",
"(",
")",
":",
"base_name",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
"destination",
"=",
"os",
".",
"path",
".",
"join",
"(",
"base_name",
",",
"\"armstrong\"",
",",
"\"apps\"",
",",
"\"images\"",
",",
"\"static\"",
",",
"\"colorbox\"",
")",
"colorbox_source",
"=",
"os",
".",
"path",
".",
"join",
"(",
"base_name",
",",
"\"vendor\"",
",",
"\"colorbox\"",
")",
"colorbox_files",
"=",
"[",
"os",
".",
"path",
".",
"join",
"(",
"colorbox_source",
",",
"\"example1\"",
",",
"\"colorbox.css\"",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"colorbox_source",
",",
"\"example1\"",
",",
"\"images\"",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"colorbox_source",
",",
"\"colorbox\"",
",",
"\"jquery.colorbox-min.js\"",
")",
",",
"]",
"local",
"(",
"\"cp -R %s %s\"",
"%",
"(",
"\" \"",
".",
"join",
"(",
"colorbox_files",
")",
",",
"destination",
")",
")",
"# We're not supporting IE6, so we can drop the backfill",
"local",
"(",
"\"rm -rf %s\"",
"%",
"(",
"os",
".",
"path",
".",
"join",
"(",
"destination",
",",
"\"images\"",
",",
"\"ie6\"",
")",
")",
")"
] |
Update Colorbox code from vendor tree
|
[
"Update",
"Colorbox",
"code",
"from",
"vendor",
"tree"
] |
f334697ee6e2273deac12092069d02119d913e67
|
https://github.com/armstrong/armstrong.apps.images/blob/f334697ee6e2273deac12092069d02119d913e67/fabfile.py#L40-L53
|
241,159
|
frigg/frigg-worker
|
frigg_worker/environment_variables.py
|
environment_variables_for_task
|
def environment_variables_for_task(task):
"""
This will build a dict with all the environment variables that
should be present when running a build or deployment.
:param task: A dict of the json payload with information about
the build task.
:return: A dict of environment variables.
"""
env = {
'CI': 'frigg',
'FRIGG': 'true',
'FRIGG_CI': 'true',
'GH_TOKEN': task['gh_token'],
'FRIGG_BUILD_BRANCH': task['branch'],
'FRIGG_BUILD_COMMIT_HASH': task['sha'],
'FRIGG_BUILD_DIR': '~/builds/{0}'.format(task['id']),
'FRIGG_BUILD_ID': task['id'],
'FRIGG_DOCKER_IMAGE': task['image'],
'FRIGG_WORKER': socket.getfqdn(),
}
if 'pull_request_id' in task:
env['FRIGG_PULL_REQUEST_ID'] = task['pull_request_id']
if 'build_number' in task:
env['FRIGG_BUILD_NUMBER'] = task['build_number']
if 'secrets' in task:
env.update(task['secrets'])
if 'environment_variables' in task:
env.update(task['environment_variables'])
return env
|
python
|
def environment_variables_for_task(task):
"""
This will build a dict with all the environment variables that
should be present when running a build or deployment.
:param task: A dict of the json payload with information about
the build task.
:return: A dict of environment variables.
"""
env = {
'CI': 'frigg',
'FRIGG': 'true',
'FRIGG_CI': 'true',
'GH_TOKEN': task['gh_token'],
'FRIGG_BUILD_BRANCH': task['branch'],
'FRIGG_BUILD_COMMIT_HASH': task['sha'],
'FRIGG_BUILD_DIR': '~/builds/{0}'.format(task['id']),
'FRIGG_BUILD_ID': task['id'],
'FRIGG_DOCKER_IMAGE': task['image'],
'FRIGG_WORKER': socket.getfqdn(),
}
if 'pull_request_id' in task:
env['FRIGG_PULL_REQUEST_ID'] = task['pull_request_id']
if 'build_number' in task:
env['FRIGG_BUILD_NUMBER'] = task['build_number']
if 'secrets' in task:
env.update(task['secrets'])
if 'environment_variables' in task:
env.update(task['environment_variables'])
return env
|
[
"def",
"environment_variables_for_task",
"(",
"task",
")",
":",
"env",
"=",
"{",
"'CI'",
":",
"'frigg'",
",",
"'FRIGG'",
":",
"'true'",
",",
"'FRIGG_CI'",
":",
"'true'",
",",
"'GH_TOKEN'",
":",
"task",
"[",
"'gh_token'",
"]",
",",
"'FRIGG_BUILD_BRANCH'",
":",
"task",
"[",
"'branch'",
"]",
",",
"'FRIGG_BUILD_COMMIT_HASH'",
":",
"task",
"[",
"'sha'",
"]",
",",
"'FRIGG_BUILD_DIR'",
":",
"'~/builds/{0}'",
".",
"format",
"(",
"task",
"[",
"'id'",
"]",
")",
",",
"'FRIGG_BUILD_ID'",
":",
"task",
"[",
"'id'",
"]",
",",
"'FRIGG_DOCKER_IMAGE'",
":",
"task",
"[",
"'image'",
"]",
",",
"'FRIGG_WORKER'",
":",
"socket",
".",
"getfqdn",
"(",
")",
",",
"}",
"if",
"'pull_request_id'",
"in",
"task",
":",
"env",
"[",
"'FRIGG_PULL_REQUEST_ID'",
"]",
"=",
"task",
"[",
"'pull_request_id'",
"]",
"if",
"'build_number'",
"in",
"task",
":",
"env",
"[",
"'FRIGG_BUILD_NUMBER'",
"]",
"=",
"task",
"[",
"'build_number'",
"]",
"if",
"'secrets'",
"in",
"task",
":",
"env",
".",
"update",
"(",
"task",
"[",
"'secrets'",
"]",
")",
"if",
"'environment_variables'",
"in",
"task",
":",
"env",
".",
"update",
"(",
"task",
"[",
"'environment_variables'",
"]",
")",
"return",
"env"
] |
This will build a dict with all the environment variables that
should be present when running a build or deployment.
:param task: A dict of the json payload with information about
the build task.
:return: A dict of environment variables.
|
[
"This",
"will",
"build",
"a",
"dict",
"with",
"all",
"the",
"environment",
"variables",
"that",
"should",
"be",
"present",
"when",
"running",
"a",
"build",
"or",
"deployment",
"."
] |
8c215cd8f5a27ff9f5a4fedafe93d2ef0fbca86c
|
https://github.com/frigg/frigg-worker/blob/8c215cd8f5a27ff9f5a4fedafe93d2ef0fbca86c/frigg_worker/environment_variables.py#L4-L38
|
241,160
|
WTRMQDev/lnoise
|
lnoise/noisetypes.py
|
Hash.hkdf
|
def hkdf(self, chaining_key, input_key_material, dhlen=64):
"""Hash-based key derivation function
Takes a ``chaining_key'' byte sequence of len HASHLEN, and an
``input_key_material'' byte sequence with length either zero
bytes, 32 bytes or dhlen bytes.
Returns two byte sequences of length HASHLEN"""
if len(chaining_key) != self.HASHLEN:
raise HashError("Incorrect chaining key length")
if len(input_key_material) not in (0, 32, dhlen):
raise HashError("Incorrect input key material length")
temp_key = self.hmac_hash(chaining_key, input_key_material)
output1 = self.hmac_hash(temp_key, b'\x01')
output2 = self.hmac_hash(temp_key, output1 + b'\x02')
return output1, output2
|
python
|
def hkdf(self, chaining_key, input_key_material, dhlen=64):
"""Hash-based key derivation function
Takes a ``chaining_key'' byte sequence of len HASHLEN, and an
``input_key_material'' byte sequence with length either zero
bytes, 32 bytes or dhlen bytes.
Returns two byte sequences of length HASHLEN"""
if len(chaining_key) != self.HASHLEN:
raise HashError("Incorrect chaining key length")
if len(input_key_material) not in (0, 32, dhlen):
raise HashError("Incorrect input key material length")
temp_key = self.hmac_hash(chaining_key, input_key_material)
output1 = self.hmac_hash(temp_key, b'\x01')
output2 = self.hmac_hash(temp_key, output1 + b'\x02')
return output1, output2
|
[
"def",
"hkdf",
"(",
"self",
",",
"chaining_key",
",",
"input_key_material",
",",
"dhlen",
"=",
"64",
")",
":",
"if",
"len",
"(",
"chaining_key",
")",
"!=",
"self",
".",
"HASHLEN",
":",
"raise",
"HashError",
"(",
"\"Incorrect chaining key length\"",
")",
"if",
"len",
"(",
"input_key_material",
")",
"not",
"in",
"(",
"0",
",",
"32",
",",
"dhlen",
")",
":",
"raise",
"HashError",
"(",
"\"Incorrect input key material length\"",
")",
"temp_key",
"=",
"self",
".",
"hmac_hash",
"(",
"chaining_key",
",",
"input_key_material",
")",
"output1",
"=",
"self",
".",
"hmac_hash",
"(",
"temp_key",
",",
"b'\\x01'",
")",
"output2",
"=",
"self",
".",
"hmac_hash",
"(",
"temp_key",
",",
"output1",
"+",
"b'\\x02'",
")",
"return",
"output1",
",",
"output2"
] |
Hash-based key derivation function
Takes a ``chaining_key'' byte sequence of len HASHLEN, and an
``input_key_material'' byte sequence with length either zero
bytes, 32 bytes or dhlen bytes.
Returns two byte sequences of length HASHLEN
|
[
"Hash",
"-",
"based",
"key",
"derivation",
"function"
] |
7f8d9faf135025a6aac50131d14a34d1009e8cdd
|
https://github.com/WTRMQDev/lnoise/blob/7f8d9faf135025a6aac50131d14a34d1009e8cdd/lnoise/noisetypes.py#L69-L85
|
241,161
|
WTRMQDev/lnoise
|
lnoise/noisetypes.py
|
NoiseBuffer.append
|
def append(self, val):
"""Append byte string val to buffer
If the result exceeds the length of the buffer, behavior
depends on whether instance was initialized as strict.
In strict mode, a ValueError is raised.
In non-strict mode, the buffer is extended as necessary.
"""
new_len = self.length + len(val)
to_add = new_len - len(self.bfr)
if self.strict and to_add > 0:
raise ValueError("Cannot resize buffer")
self.bfr[self.length:new_len] = val
self.length = new_len
|
python
|
def append(self, val):
"""Append byte string val to buffer
If the result exceeds the length of the buffer, behavior
depends on whether instance was initialized as strict.
In strict mode, a ValueError is raised.
In non-strict mode, the buffer is extended as necessary.
"""
new_len = self.length + len(val)
to_add = new_len - len(self.bfr)
if self.strict and to_add > 0:
raise ValueError("Cannot resize buffer")
self.bfr[self.length:new_len] = val
self.length = new_len
|
[
"def",
"append",
"(",
"self",
",",
"val",
")",
":",
"new_len",
"=",
"self",
".",
"length",
"+",
"len",
"(",
"val",
")",
"to_add",
"=",
"new_len",
"-",
"len",
"(",
"self",
".",
"bfr",
")",
"if",
"self",
".",
"strict",
"and",
"to_add",
">",
"0",
":",
"raise",
"ValueError",
"(",
"\"Cannot resize buffer\"",
")",
"self",
".",
"bfr",
"[",
"self",
".",
"length",
":",
"new_len",
"]",
"=",
"val",
"self",
".",
"length",
"=",
"new_len"
] |
Append byte string val to buffer
If the result exceeds the length of the buffer, behavior
depends on whether instance was initialized as strict.
In strict mode, a ValueError is raised.
In non-strict mode, the buffer is extended as necessary.
|
[
"Append",
"byte",
"string",
"val",
"to",
"buffer",
"If",
"the",
"result",
"exceeds",
"the",
"length",
"of",
"the",
"buffer",
"behavior",
"depends",
"on",
"whether",
"instance",
"was",
"initialized",
"as",
"strict",
".",
"In",
"strict",
"mode",
"a",
"ValueError",
"is",
"raised",
".",
"In",
"non",
"-",
"strict",
"mode",
"the",
"buffer",
"is",
"extended",
"as",
"necessary",
"."
] |
7f8d9faf135025a6aac50131d14a34d1009e8cdd
|
https://github.com/WTRMQDev/lnoise/blob/7f8d9faf135025a6aac50131d14a34d1009e8cdd/lnoise/noisetypes.py#L113-L125
|
241,162
|
klmitch/tendril
|
tendril/utils.py
|
addr_info
|
def addr_info(addr):
"""
Interprets an address in standard tuple format to determine if it
is valid, and, if so, which socket family it is. Returns the
socket family.
"""
# If it's a string, it's in the UNIX family
if isinstance(addr, basestring):
return socket.AF_UNIX
# Verify that addr is a tuple
if not isinstance(addr, collections.Sequence):
raise ValueError("address is not a tuple")
# Make sure it has at least 2 fields
if len(addr) < 2:
raise ValueError("cannot understand address")
# Sanity-check the port number
if not (0 <= addr[1] < 65536):
raise ValueError("cannot understand port number")
# OK, first field should be an IP address; suck it out...
ipaddr = addr[0]
# Empty string means IPv4
if not ipaddr:
if len(addr) != 2:
raise ValueError("cannot understand address")
return socket.AF_INET
# See if it's valid...
if netaddr.valid_ipv6(ipaddr):
if len(addr) > 4:
raise ValueError("cannot understand address")
return socket.AF_INET6
elif netaddr.valid_ipv4(ipaddr):
if len(addr) != 2:
raise ValueError("cannot understand address")
return socket.AF_INET
raise ValueError("cannot understand address")
|
python
|
def addr_info(addr):
"""
Interprets an address in standard tuple format to determine if it
is valid, and, if so, which socket family it is. Returns the
socket family.
"""
# If it's a string, it's in the UNIX family
if isinstance(addr, basestring):
return socket.AF_UNIX
# Verify that addr is a tuple
if not isinstance(addr, collections.Sequence):
raise ValueError("address is not a tuple")
# Make sure it has at least 2 fields
if len(addr) < 2:
raise ValueError("cannot understand address")
# Sanity-check the port number
if not (0 <= addr[1] < 65536):
raise ValueError("cannot understand port number")
# OK, first field should be an IP address; suck it out...
ipaddr = addr[0]
# Empty string means IPv4
if not ipaddr:
if len(addr) != 2:
raise ValueError("cannot understand address")
return socket.AF_INET
# See if it's valid...
if netaddr.valid_ipv6(ipaddr):
if len(addr) > 4:
raise ValueError("cannot understand address")
return socket.AF_INET6
elif netaddr.valid_ipv4(ipaddr):
if len(addr) != 2:
raise ValueError("cannot understand address")
return socket.AF_INET
raise ValueError("cannot understand address")
|
[
"def",
"addr_info",
"(",
"addr",
")",
":",
"# If it's a string, it's in the UNIX family",
"if",
"isinstance",
"(",
"addr",
",",
"basestring",
")",
":",
"return",
"socket",
".",
"AF_UNIX",
"# Verify that addr is a tuple",
"if",
"not",
"isinstance",
"(",
"addr",
",",
"collections",
".",
"Sequence",
")",
":",
"raise",
"ValueError",
"(",
"\"address is not a tuple\"",
")",
"# Make sure it has at least 2 fields",
"if",
"len",
"(",
"addr",
")",
"<",
"2",
":",
"raise",
"ValueError",
"(",
"\"cannot understand address\"",
")",
"# Sanity-check the port number",
"if",
"not",
"(",
"0",
"<=",
"addr",
"[",
"1",
"]",
"<",
"65536",
")",
":",
"raise",
"ValueError",
"(",
"\"cannot understand port number\"",
")",
"# OK, first field should be an IP address; suck it out...",
"ipaddr",
"=",
"addr",
"[",
"0",
"]",
"# Empty string means IPv4",
"if",
"not",
"ipaddr",
":",
"if",
"len",
"(",
"addr",
")",
"!=",
"2",
":",
"raise",
"ValueError",
"(",
"\"cannot understand address\"",
")",
"return",
"socket",
".",
"AF_INET",
"# See if it's valid...",
"if",
"netaddr",
".",
"valid_ipv6",
"(",
"ipaddr",
")",
":",
"if",
"len",
"(",
"addr",
")",
">",
"4",
":",
"raise",
"ValueError",
"(",
"\"cannot understand address\"",
")",
"return",
"socket",
".",
"AF_INET6",
"elif",
"netaddr",
".",
"valid_ipv4",
"(",
"ipaddr",
")",
":",
"if",
"len",
"(",
"addr",
")",
"!=",
"2",
":",
"raise",
"ValueError",
"(",
"\"cannot understand address\"",
")",
"return",
"socket",
".",
"AF_INET",
"raise",
"ValueError",
"(",
"\"cannot understand address\"",
")"
] |
Interprets an address in standard tuple format to determine if it
is valid, and, if so, which socket family it is. Returns the
socket family.
|
[
"Interprets",
"an",
"address",
"in",
"standard",
"tuple",
"format",
"to",
"determine",
"if",
"it",
"is",
"valid",
"and",
"if",
"so",
"which",
"socket",
"family",
"it",
"is",
".",
"Returns",
"the",
"socket",
"family",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/utils.py#L113-L158
|
241,163
|
anjos/rrbob
|
rr/algorithm.py
|
make_labels
|
def make_labels(X):
"""Helper function that generates a single 1D numpy.ndarray with labels which
are good targets for stock logistic regression.
Parameters:
X (numpy.ndarray): The input data matrix. This must be a numpy.ndarray
with 3 dimensions or an iterable containing 2 numpy.ndarrays with 2
dimensions each. Each correspond to the data for one of the two classes,
every row corresponds to one example of the data set, every column, one
different feature.
Returns:
numpy.ndarray: With a single dimension, containing suitable labels for all
rows and for all classes defined in X (depth).
"""
return numpy.hstack([k*numpy.ones(len(X[k]), dtype=int) for k in range(len(X))])
|
python
|
def make_labels(X):
"""Helper function that generates a single 1D numpy.ndarray with labels which
are good targets for stock logistic regression.
Parameters:
X (numpy.ndarray): The input data matrix. This must be a numpy.ndarray
with 3 dimensions or an iterable containing 2 numpy.ndarrays with 2
dimensions each. Each correspond to the data for one of the two classes,
every row corresponds to one example of the data set, every column, one
different feature.
Returns:
numpy.ndarray: With a single dimension, containing suitable labels for all
rows and for all classes defined in X (depth).
"""
return numpy.hstack([k*numpy.ones(len(X[k]), dtype=int) for k in range(len(X))])
|
[
"def",
"make_labels",
"(",
"X",
")",
":",
"return",
"numpy",
".",
"hstack",
"(",
"[",
"k",
"*",
"numpy",
".",
"ones",
"(",
"len",
"(",
"X",
"[",
"k",
"]",
")",
",",
"dtype",
"=",
"int",
")",
"for",
"k",
"in",
"range",
"(",
"len",
"(",
"X",
")",
")",
"]",
")"
] |
Helper function that generates a single 1D numpy.ndarray with labels which
are good targets for stock logistic regression.
Parameters:
X (numpy.ndarray): The input data matrix. This must be a numpy.ndarray
with 3 dimensions or an iterable containing 2 numpy.ndarrays with 2
dimensions each. Each correspond to the data for one of the two classes,
every row corresponds to one example of the data set, every column, one
different feature.
Returns:
numpy.ndarray: With a single dimension, containing suitable labels for all
rows and for all classes defined in X (depth).
|
[
"Helper",
"function",
"that",
"generates",
"a",
"single",
"1D",
"numpy",
".",
"ndarray",
"with",
"labels",
"which",
"are",
"good",
"targets",
"for",
"stock",
"logistic",
"regression",
"."
] |
d32d35bab2aa2698d3caa923fd02afb6d67f3235
|
https://github.com/anjos/rrbob/blob/d32d35bab2aa2698d3caa923fd02afb6d67f3235/rr/algorithm.py#L13-L34
|
241,164
|
anjos/rrbob
|
rr/algorithm.py
|
add_bias
|
def add_bias(X):
"""Helper function to add a bias column to the input array X
Parameters:
X (numpy.ndarray): The input data matrix. This must be a numpy.ndarray
with 2 dimension wheres every row corresponds to one example of the data
set, every column, one different feature.
Returns:
numpy.ndarray: The same input matrix X with an added (prefix) column of
ones.
"""
return numpy.hstack((numpy.ones((len(X),1), dtype=X.dtype), X))
|
python
|
def add_bias(X):
"""Helper function to add a bias column to the input array X
Parameters:
X (numpy.ndarray): The input data matrix. This must be a numpy.ndarray
with 2 dimension wheres every row corresponds to one example of the data
set, every column, one different feature.
Returns:
numpy.ndarray: The same input matrix X with an added (prefix) column of
ones.
"""
return numpy.hstack((numpy.ones((len(X),1), dtype=X.dtype), X))
|
[
"def",
"add_bias",
"(",
"X",
")",
":",
"return",
"numpy",
".",
"hstack",
"(",
"(",
"numpy",
".",
"ones",
"(",
"(",
"len",
"(",
"X",
")",
",",
"1",
")",
",",
"dtype",
"=",
"X",
".",
"dtype",
")",
",",
"X",
")",
")"
] |
Helper function to add a bias column to the input array X
Parameters:
X (numpy.ndarray): The input data matrix. This must be a numpy.ndarray
with 2 dimension wheres every row corresponds to one example of the data
set, every column, one different feature.
Returns:
numpy.ndarray: The same input matrix X with an added (prefix) column of
ones.
|
[
"Helper",
"function",
"to",
"add",
"a",
"bias",
"column",
"to",
"the",
"input",
"array",
"X"
] |
d32d35bab2aa2698d3caa923fd02afb6d67f3235
|
https://github.com/anjos/rrbob/blob/d32d35bab2aa2698d3caa923fd02afb6d67f3235/rr/algorithm.py#L37-L55
|
241,165
|
anjos/rrbob
|
rr/algorithm.py
|
MultiClassTrainer.train
|
def train(self, X):
"""
Trains multiple logistic regression classifiers to handle the multiclass
problem posed by ``X``
X (numpy.ndarray): The input data matrix. This must be a numpy.ndarray
with 3 dimensions or an iterable containing 2 numpy.ndarrays with 2
dimensions each. Each correspond to the data for one of the input
classes, every row corresponds to one example of the data set, every
column, one different feature.
Returns:
Machine: A trained multiclass machine.
"""
_trainer = bob.learn.linear.CGLogRegTrainer(**{'lambda':self.regularizer})
if len(X) == 2: #trains and returns a single logistic regression classifer
return _trainer.train(add_bias(X[0]), add_bias(X[1]))
else: #trains and returns a multi-class logistic regression classifier
# use one-versus-all strategy
machines = []
for k in range(len(X)):
NC_range = list(range(0,k)) + list(range(k+1,len(X)))
machines.append(_trainer.train(add_bias(numpy.vstack(X[NC_range])),
add_bias(X[k])))
return MultiClassMachine(machines)
|
python
|
def train(self, X):
"""
Trains multiple logistic regression classifiers to handle the multiclass
problem posed by ``X``
X (numpy.ndarray): The input data matrix. This must be a numpy.ndarray
with 3 dimensions or an iterable containing 2 numpy.ndarrays with 2
dimensions each. Each correspond to the data for one of the input
classes, every row corresponds to one example of the data set, every
column, one different feature.
Returns:
Machine: A trained multiclass machine.
"""
_trainer = bob.learn.linear.CGLogRegTrainer(**{'lambda':self.regularizer})
if len(X) == 2: #trains and returns a single logistic regression classifer
return _trainer.train(add_bias(X[0]), add_bias(X[1]))
else: #trains and returns a multi-class logistic regression classifier
# use one-versus-all strategy
machines = []
for k in range(len(X)):
NC_range = list(range(0,k)) + list(range(k+1,len(X)))
machines.append(_trainer.train(add_bias(numpy.vstack(X[NC_range])),
add_bias(X[k])))
return MultiClassMachine(machines)
|
[
"def",
"train",
"(",
"self",
",",
"X",
")",
":",
"_trainer",
"=",
"bob",
".",
"learn",
".",
"linear",
".",
"CGLogRegTrainer",
"(",
"*",
"*",
"{",
"'lambda'",
":",
"self",
".",
"regularizer",
"}",
")",
"if",
"len",
"(",
"X",
")",
"==",
"2",
":",
"#trains and returns a single logistic regression classifer",
"return",
"_trainer",
".",
"train",
"(",
"add_bias",
"(",
"X",
"[",
"0",
"]",
")",
",",
"add_bias",
"(",
"X",
"[",
"1",
"]",
")",
")",
"else",
":",
"#trains and returns a multi-class logistic regression classifier",
"# use one-versus-all strategy",
"machines",
"=",
"[",
"]",
"for",
"k",
"in",
"range",
"(",
"len",
"(",
"X",
")",
")",
":",
"NC_range",
"=",
"list",
"(",
"range",
"(",
"0",
",",
"k",
")",
")",
"+",
"list",
"(",
"range",
"(",
"k",
"+",
"1",
",",
"len",
"(",
"X",
")",
")",
")",
"machines",
".",
"append",
"(",
"_trainer",
".",
"train",
"(",
"add_bias",
"(",
"numpy",
".",
"vstack",
"(",
"X",
"[",
"NC_range",
"]",
")",
")",
",",
"add_bias",
"(",
"X",
"[",
"k",
"]",
")",
")",
")",
"return",
"MultiClassMachine",
"(",
"machines",
")"
] |
Trains multiple logistic regression classifiers to handle the multiclass
problem posed by ``X``
X (numpy.ndarray): The input data matrix. This must be a numpy.ndarray
with 3 dimensions or an iterable containing 2 numpy.ndarrays with 2
dimensions each. Each correspond to the data for one of the input
classes, every row corresponds to one example of the data set, every
column, one different feature.
Returns:
Machine: A trained multiclass machine.
|
[
"Trains",
"multiple",
"logistic",
"regression",
"classifiers",
"to",
"handle",
"the",
"multiclass",
"problem",
"posed",
"by",
"X"
] |
d32d35bab2aa2698d3caa923fd02afb6d67f3235
|
https://github.com/anjos/rrbob/blob/d32d35bab2aa2698d3caa923fd02afb6d67f3235/rr/algorithm.py#L136-L169
|
241,166
|
robertchase/ergaleia
|
ergaleia/nested_get.py
|
nested_get
|
def nested_get(d, keys, default=None, required=False, as_list=False):
""" Multi-level dict get helper
Parameters:
d - dict instance
keys - iterable of keys or dot-delimited str of keys (see
Note 1)
default - value if index fails
required - require every index to work (see Note 2)
as_list - return result as list (see Note 3)
Notes:
1. Each key is used to index a dict, replacing the dict with
the matching value. This process is repeated until an index
fails, or the keys are exhausted.
If keys is a string, it is split by '.' and treated as a
list of keys.
2. If the required flag is False, a failure to match a key
causes the default value to be used. If the required flag is
True, every key must match, otherwise a TypeError or KeyError is
raised.
3. If as_list is True, a non-list match will be wrapped in a list,
unless match is None, which will be replaced with an empty list.
"""
if isinstance(keys, str):
keys = keys.split('.')
for key in keys:
try:
d = d[key]
except KeyError:
if required:
raise
d = default
break
except TypeError:
if required:
raise
d = default
break
if as_list:
return [] if d is None else [d] if not isinstance(d, list) else d
return d
|
python
|
def nested_get(d, keys, default=None, required=False, as_list=False):
""" Multi-level dict get helper
Parameters:
d - dict instance
keys - iterable of keys or dot-delimited str of keys (see
Note 1)
default - value if index fails
required - require every index to work (see Note 2)
as_list - return result as list (see Note 3)
Notes:
1. Each key is used to index a dict, replacing the dict with
the matching value. This process is repeated until an index
fails, or the keys are exhausted.
If keys is a string, it is split by '.' and treated as a
list of keys.
2. If the required flag is False, a failure to match a key
causes the default value to be used. If the required flag is
True, every key must match, otherwise a TypeError or KeyError is
raised.
3. If as_list is True, a non-list match will be wrapped in a list,
unless match is None, which will be replaced with an empty list.
"""
if isinstance(keys, str):
keys = keys.split('.')
for key in keys:
try:
d = d[key]
except KeyError:
if required:
raise
d = default
break
except TypeError:
if required:
raise
d = default
break
if as_list:
return [] if d is None else [d] if not isinstance(d, list) else d
return d
|
[
"def",
"nested_get",
"(",
"d",
",",
"keys",
",",
"default",
"=",
"None",
",",
"required",
"=",
"False",
",",
"as_list",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"keys",
",",
"str",
")",
":",
"keys",
"=",
"keys",
".",
"split",
"(",
"'.'",
")",
"for",
"key",
"in",
"keys",
":",
"try",
":",
"d",
"=",
"d",
"[",
"key",
"]",
"except",
"KeyError",
":",
"if",
"required",
":",
"raise",
"d",
"=",
"default",
"break",
"except",
"TypeError",
":",
"if",
"required",
":",
"raise",
"d",
"=",
"default",
"break",
"if",
"as_list",
":",
"return",
"[",
"]",
"if",
"d",
"is",
"None",
"else",
"[",
"d",
"]",
"if",
"not",
"isinstance",
"(",
"d",
",",
"list",
")",
"else",
"d",
"return",
"d"
] |
Multi-level dict get helper
Parameters:
d - dict instance
keys - iterable of keys or dot-delimited str of keys (see
Note 1)
default - value if index fails
required - require every index to work (see Note 2)
as_list - return result as list (see Note 3)
Notes:
1. Each key is used to index a dict, replacing the dict with
the matching value. This process is repeated until an index
fails, or the keys are exhausted.
If keys is a string, it is split by '.' and treated as a
list of keys.
2. If the required flag is False, a failure to match a key
causes the default value to be used. If the required flag is
True, every key must match, otherwise a TypeError or KeyError is
raised.
3. If as_list is True, a non-list match will be wrapped in a list,
unless match is None, which will be replaced with an empty list.
|
[
"Multi",
"-",
"level",
"dict",
"get",
"helper"
] |
df8e9a4b18c563022a503faa27e822c9a5755490
|
https://github.com/robertchase/ergaleia/blob/df8e9a4b18c563022a503faa27e822c9a5755490/ergaleia/nested_get.py#L8-L51
|
241,167
|
zagfai/webtul
|
webtul/utils.py
|
recur
|
def recur(obj, type_func_tuple_list=()):
'''recuring dealing an object'''
for obj_type, func in type_func_tuple_list:
if type(obj) == type(obj_type):
return func(obj)
# by default, we wolud recurring list, tuple and dict
if isinstance(obj, list) or isinstance(obj, tuple):
n_obj = []
for i in obj:
n_obj.append(recur(i))
return n_obj if isinstance(obj, list) else tuple(obj)
elif isinstance(obj, dict):
n_obj = {}
for k,v in obj.items():
n_obj[k] = recur(v)
return n_obj
return obj
|
python
|
def recur(obj, type_func_tuple_list=()):
'''recuring dealing an object'''
for obj_type, func in type_func_tuple_list:
if type(obj) == type(obj_type):
return func(obj)
# by default, we wolud recurring list, tuple and dict
if isinstance(obj, list) or isinstance(obj, tuple):
n_obj = []
for i in obj:
n_obj.append(recur(i))
return n_obj if isinstance(obj, list) else tuple(obj)
elif isinstance(obj, dict):
n_obj = {}
for k,v in obj.items():
n_obj[k] = recur(v)
return n_obj
return obj
|
[
"def",
"recur",
"(",
"obj",
",",
"type_func_tuple_list",
"=",
"(",
")",
")",
":",
"for",
"obj_type",
",",
"func",
"in",
"type_func_tuple_list",
":",
"if",
"type",
"(",
"obj",
")",
"==",
"type",
"(",
"obj_type",
")",
":",
"return",
"func",
"(",
"obj",
")",
"# by default, we wolud recurring list, tuple and dict",
"if",
"isinstance",
"(",
"obj",
",",
"list",
")",
"or",
"isinstance",
"(",
"obj",
",",
"tuple",
")",
":",
"n_obj",
"=",
"[",
"]",
"for",
"i",
"in",
"obj",
":",
"n_obj",
".",
"append",
"(",
"recur",
"(",
"i",
")",
")",
"return",
"n_obj",
"if",
"isinstance",
"(",
"obj",
",",
"list",
")",
"else",
"tuple",
"(",
"obj",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"n_obj",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"obj",
".",
"items",
"(",
")",
":",
"n_obj",
"[",
"k",
"]",
"=",
"recur",
"(",
"v",
")",
"return",
"n_obj",
"return",
"obj"
] |
recuring dealing an object
|
[
"recuring",
"dealing",
"an",
"object"
] |
58c49928070b56ef54a45b4af20d800b269ad8ce
|
https://github.com/zagfai/webtul/blob/58c49928070b56ef54a45b4af20d800b269ad8ce/webtul/utils.py#L32-L48
|
241,168
|
zagfai/webtul
|
webtul/utils.py
|
browser_cache
|
def browser_cache(seconds):
"""Decorator for browser cache. Only for webpy
@browser_cache( seconds ) before GET/POST function.
"""
import web
def wrap(f):
def wrapped_f(*args):
last_time_str = web.ctx.env.get('HTTP_IF_MODIFIED_SINCE', '')
last_time = web.net.parsehttpdate(last_time_str)
now = datetime.datetime.now()
if last_time and\
last_time + datetime.timedelta(seconds = seconds) > now:
web.notmodified()
else:
web.lastmodified(now)
web.header('Cache-Control', 'max-age='+str(seconds))
yield f(*args)
return wrapped_f
return wrap
|
python
|
def browser_cache(seconds):
"""Decorator for browser cache. Only for webpy
@browser_cache( seconds ) before GET/POST function.
"""
import web
def wrap(f):
def wrapped_f(*args):
last_time_str = web.ctx.env.get('HTTP_IF_MODIFIED_SINCE', '')
last_time = web.net.parsehttpdate(last_time_str)
now = datetime.datetime.now()
if last_time and\
last_time + datetime.timedelta(seconds = seconds) > now:
web.notmodified()
else:
web.lastmodified(now)
web.header('Cache-Control', 'max-age='+str(seconds))
yield f(*args)
return wrapped_f
return wrap
|
[
"def",
"browser_cache",
"(",
"seconds",
")",
":",
"import",
"web",
"def",
"wrap",
"(",
"f",
")",
":",
"def",
"wrapped_f",
"(",
"*",
"args",
")",
":",
"last_time_str",
"=",
"web",
".",
"ctx",
".",
"env",
".",
"get",
"(",
"'HTTP_IF_MODIFIED_SINCE'",
",",
"''",
")",
"last_time",
"=",
"web",
".",
"net",
".",
"parsehttpdate",
"(",
"last_time_str",
")",
"now",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"if",
"last_time",
"and",
"last_time",
"+",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"seconds",
")",
">",
"now",
":",
"web",
".",
"notmodified",
"(",
")",
"else",
":",
"web",
".",
"lastmodified",
"(",
"now",
")",
"web",
".",
"header",
"(",
"'Cache-Control'",
",",
"'max-age='",
"+",
"str",
"(",
"seconds",
")",
")",
"yield",
"f",
"(",
"*",
"args",
")",
"return",
"wrapped_f",
"return",
"wrap"
] |
Decorator for browser cache. Only for webpy
@browser_cache( seconds ) before GET/POST function.
|
[
"Decorator",
"for",
"browser",
"cache",
".",
"Only",
"for",
"webpy"
] |
58c49928070b56ef54a45b4af20d800b269ad8ce
|
https://github.com/zagfai/webtul/blob/58c49928070b56ef54a45b4af20d800b269ad8ce/webtul/utils.py#L50-L68
|
241,169
|
jalanb/pysyte
|
pysyte/splits.py
|
join
|
def join(items, separator=None):
"""Join the items into a string using the separator
Converts items to strings if needed
>>> join([1, 2, 3])
'1,2,3'
"""
if not items:
return ''
if separator is None:
separator = _default_separator()
return separator.join([str(item) for item in items])
|
python
|
def join(items, separator=None):
"""Join the items into a string using the separator
Converts items to strings if needed
>>> join([1, 2, 3])
'1,2,3'
"""
if not items:
return ''
if separator is None:
separator = _default_separator()
return separator.join([str(item) for item in items])
|
[
"def",
"join",
"(",
"items",
",",
"separator",
"=",
"None",
")",
":",
"if",
"not",
"items",
":",
"return",
"''",
"if",
"separator",
"is",
"None",
":",
"separator",
"=",
"_default_separator",
"(",
")",
"return",
"separator",
".",
"join",
"(",
"[",
"str",
"(",
"item",
")",
"for",
"item",
"in",
"items",
"]",
")"
] |
Join the items into a string using the separator
Converts items to strings if needed
>>> join([1, 2, 3])
'1,2,3'
|
[
"Join",
"the",
"items",
"into",
"a",
"string",
"using",
"the",
"separator"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/splits.py#L17-L29
|
241,170
|
jalanb/pysyte
|
pysyte/splits.py
|
split
|
def split(string, separator_regexp=None, maxsplit=0):
"""Split a string to a list
>>> split('fred, was, here')
['fred', ' was', ' here']
"""
if not string:
return []
if separator_regexp is None:
separator_regexp = _default_separator()
if not separator_regexp:
return string.split()
return re.split(separator_regexp, string, maxsplit)
|
python
|
def split(string, separator_regexp=None, maxsplit=0):
"""Split a string to a list
>>> split('fred, was, here')
['fred', ' was', ' here']
"""
if not string:
return []
if separator_regexp is None:
separator_regexp = _default_separator()
if not separator_regexp:
return string.split()
return re.split(separator_regexp, string, maxsplit)
|
[
"def",
"split",
"(",
"string",
",",
"separator_regexp",
"=",
"None",
",",
"maxsplit",
"=",
"0",
")",
":",
"if",
"not",
"string",
":",
"return",
"[",
"]",
"if",
"separator_regexp",
"is",
"None",
":",
"separator_regexp",
"=",
"_default_separator",
"(",
")",
"if",
"not",
"separator_regexp",
":",
"return",
"string",
".",
"split",
"(",
")",
"return",
"re",
".",
"split",
"(",
"separator_regexp",
",",
"string",
",",
"maxsplit",
")"
] |
Split a string to a list
>>> split('fred, was, here')
['fred', ' was', ' here']
|
[
"Split",
"a",
"string",
"to",
"a",
"list"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/splits.py#L41-L53
|
241,171
|
jalanb/pysyte
|
pysyte/splits.py
|
split_and_strip
|
def split_and_strip(string, separator_regexp=None, maxsplit=0):
"""Split a string into items and trim any excess spaces from the items
>>> split_and_strip('fred, was, here ')
['fred', 'was', 'here']
"""
if not string:
return ['']
if separator_regexp is None:
separator_regexp = _default_separator()
if not separator_regexp:
return string.split()
return [item.strip()
for item in re.split(separator_regexp, string, maxsplit)]
|
python
|
def split_and_strip(string, separator_regexp=None, maxsplit=0):
"""Split a string into items and trim any excess spaces from the items
>>> split_and_strip('fred, was, here ')
['fred', 'was', 'here']
"""
if not string:
return ['']
if separator_regexp is None:
separator_regexp = _default_separator()
if not separator_regexp:
return string.split()
return [item.strip()
for item in re.split(separator_regexp, string, maxsplit)]
|
[
"def",
"split_and_strip",
"(",
"string",
",",
"separator_regexp",
"=",
"None",
",",
"maxsplit",
"=",
"0",
")",
":",
"if",
"not",
"string",
":",
"return",
"[",
"''",
"]",
"if",
"separator_regexp",
"is",
"None",
":",
"separator_regexp",
"=",
"_default_separator",
"(",
")",
"if",
"not",
"separator_regexp",
":",
"return",
"string",
".",
"split",
"(",
")",
"return",
"[",
"item",
".",
"strip",
"(",
")",
"for",
"item",
"in",
"re",
".",
"split",
"(",
"separator_regexp",
",",
"string",
",",
"maxsplit",
")",
"]"
] |
Split a string into items and trim any excess spaces from the items
>>> split_and_strip('fred, was, here ')
['fred', 'was', 'here']
|
[
"Split",
"a",
"string",
"into",
"items",
"and",
"trim",
"any",
"excess",
"spaces",
"from",
"the",
"items"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/splits.py#L56-L69
|
241,172
|
jalanb/pysyte
|
pysyte/splits.py
|
split_and_strip_without
|
def split_and_strip_without(string, exclude, separator_regexp=None):
"""Split a string into items, and trim any excess spaces
Any items in exclude are not in the returned list
>>> split_and_strip_without('fred, was, here ', ['was'])
['fred', 'here']
"""
result = split_and_strip(string, separator_regexp)
if not exclude:
return result
return [x for x in result if x not in exclude]
|
python
|
def split_and_strip_without(string, exclude, separator_regexp=None):
"""Split a string into items, and trim any excess spaces
Any items in exclude are not in the returned list
>>> split_and_strip_without('fred, was, here ', ['was'])
['fred', 'here']
"""
result = split_and_strip(string, separator_regexp)
if not exclude:
return result
return [x for x in result if x not in exclude]
|
[
"def",
"split_and_strip_without",
"(",
"string",
",",
"exclude",
",",
"separator_regexp",
"=",
"None",
")",
":",
"result",
"=",
"split_and_strip",
"(",
"string",
",",
"separator_regexp",
")",
"if",
"not",
"exclude",
":",
"return",
"result",
"return",
"[",
"x",
"for",
"x",
"in",
"result",
"if",
"x",
"not",
"in",
"exclude",
"]"
] |
Split a string into items, and trim any excess spaces
Any items in exclude are not in the returned list
>>> split_and_strip_without('fred, was, here ', ['was'])
['fred', 'here']
|
[
"Split",
"a",
"string",
"into",
"items",
"and",
"trim",
"any",
"excess",
"spaces"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/splits.py#L72-L83
|
241,173
|
jalanb/pysyte
|
pysyte/splits.py
|
split_by_count
|
def split_by_count(items, count, filler=None):
"""Split the items into tuples of count items each
>>> split_by_count([0,1,2,3], 2)
[(0, 1), (2, 3)]
If there are a mutiple of count items then filler makes no difference
>>> split_by_count([0,1,2,7,8,9], 3, 0) == split_by_count([0,1,2,7,8,9], 3)
True
If there are not a multiple of count items, then any extras are discarded
>>> split_by_count([0,1,2,7,8,9,6], 3)
[(0, 1, 2), (7, 8, 9)]
Specifying a filler expands the "lost" group
>>> split_by_count([0,1,2,7,8,9,6], 3, 0)
[(0, 1, 2), (7, 8, 9), (6, 0, 0)]
"""
if filler is not None:
items = items[:]
while len(items) % count:
items.append(filler)
iterator = iter(items)
iterators = [iterator] * count
return list(zip(*iterators))
|
python
|
def split_by_count(items, count, filler=None):
"""Split the items into tuples of count items each
>>> split_by_count([0,1,2,3], 2)
[(0, 1), (2, 3)]
If there are a mutiple of count items then filler makes no difference
>>> split_by_count([0,1,2,7,8,9], 3, 0) == split_by_count([0,1,2,7,8,9], 3)
True
If there are not a multiple of count items, then any extras are discarded
>>> split_by_count([0,1,2,7,8,9,6], 3)
[(0, 1, 2), (7, 8, 9)]
Specifying a filler expands the "lost" group
>>> split_by_count([0,1,2,7,8,9,6], 3, 0)
[(0, 1, 2), (7, 8, 9), (6, 0, 0)]
"""
if filler is not None:
items = items[:]
while len(items) % count:
items.append(filler)
iterator = iter(items)
iterators = [iterator] * count
return list(zip(*iterators))
|
[
"def",
"split_by_count",
"(",
"items",
",",
"count",
",",
"filler",
"=",
"None",
")",
":",
"if",
"filler",
"is",
"not",
"None",
":",
"items",
"=",
"items",
"[",
":",
"]",
"while",
"len",
"(",
"items",
")",
"%",
"count",
":",
"items",
".",
"append",
"(",
"filler",
")",
"iterator",
"=",
"iter",
"(",
"items",
")",
"iterators",
"=",
"[",
"iterator",
"]",
"*",
"count",
"return",
"list",
"(",
"zip",
"(",
"*",
"iterators",
")",
")"
] |
Split the items into tuples of count items each
>>> split_by_count([0,1,2,3], 2)
[(0, 1), (2, 3)]
If there are a mutiple of count items then filler makes no difference
>>> split_by_count([0,1,2,7,8,9], 3, 0) == split_by_count([0,1,2,7,8,9], 3)
True
If there are not a multiple of count items, then any extras are discarded
>>> split_by_count([0,1,2,7,8,9,6], 3)
[(0, 1, 2), (7, 8, 9)]
Specifying a filler expands the "lost" group
>>> split_by_count([0,1,2,7,8,9,6], 3, 0)
[(0, 1, 2), (7, 8, 9), (6, 0, 0)]
|
[
"Split",
"the",
"items",
"into",
"tuples",
"of",
"count",
"items",
"each"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/splits.py#L97-L121
|
241,174
|
jalanb/pysyte
|
pysyte/splits.py
|
rejoin
|
def rejoin(string, separator_regexp=None, spaced=False):
"""Split a string and then rejoin it
Spaces are interspersed between items only if spaced is True
>>> rejoin('fred, was, here ')
'fred,was,here'
"""
strings = split_and_strip(string)
if separator_regexp is None:
separator_regexp = _default_separator()
joiner = spaced and '%s ' % separator_regexp or separator_regexp
return joiner.join(strings)
|
python
|
def rejoin(string, separator_regexp=None, spaced=False):
"""Split a string and then rejoin it
Spaces are interspersed between items only if spaced is True
>>> rejoin('fred, was, here ')
'fred,was,here'
"""
strings = split_and_strip(string)
if separator_regexp is None:
separator_regexp = _default_separator()
joiner = spaced and '%s ' % separator_regexp or separator_regexp
return joiner.join(strings)
|
[
"def",
"rejoin",
"(",
"string",
",",
"separator_regexp",
"=",
"None",
",",
"spaced",
"=",
"False",
")",
":",
"strings",
"=",
"split_and_strip",
"(",
"string",
")",
"if",
"separator_regexp",
"is",
"None",
":",
"separator_regexp",
"=",
"_default_separator",
"(",
")",
"joiner",
"=",
"spaced",
"and",
"'%s '",
"%",
"separator_regexp",
"or",
"separator_regexp",
"return",
"joiner",
".",
"join",
"(",
"strings",
")"
] |
Split a string and then rejoin it
Spaces are interspersed between items only if spaced is True
>>> rejoin('fred, was, here ')
'fred,was,here'
|
[
"Split",
"a",
"string",
"and",
"then",
"rejoin",
"it"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/splits.py#L172-L184
|
241,175
|
Bystroushaak/BalancedDiscStorage
|
src/BalancedDiscStorage/balanced_disc_storage_z.py
|
BalancedDiscStorageZ.add_archive_as_dir
|
def add_archive_as_dir(self, zip_file_obj):
"""
Add archive to the storage and unpack it.
Args:
zip_file_obj (file): Opened file-like object.
Returns:
obj: Path where the `zip_file_obj` was unpacked wrapped in \
:class:`.PathAndHash` structure.
Raises:
ValueError: If there is too many files in .zip archive. \
See :attr:`._max_zipfiles` for details.
AssertionError: If the `zip_file_obj` is not file-like object.
"""
BalancedDiscStorage._check_interface(zip_file_obj)
file_hash = self._get_hash(zip_file_obj)
dir_path = self._create_dir_path(file_hash)
full_path = os.path.join(dir_path, file_hash)
if os.path.exists(full_path):
shutil.rmtree(full_path)
os.mkdir(full_path)
try:
self._unpack_zip(zip_file_obj, full_path)
except Exception:
shutil.rmtree(full_path)
raise
return PathAndHash(path=full_path, hash=file_hash)
|
python
|
def add_archive_as_dir(self, zip_file_obj):
"""
Add archive to the storage and unpack it.
Args:
zip_file_obj (file): Opened file-like object.
Returns:
obj: Path where the `zip_file_obj` was unpacked wrapped in \
:class:`.PathAndHash` structure.
Raises:
ValueError: If there is too many files in .zip archive. \
See :attr:`._max_zipfiles` for details.
AssertionError: If the `zip_file_obj` is not file-like object.
"""
BalancedDiscStorage._check_interface(zip_file_obj)
file_hash = self._get_hash(zip_file_obj)
dir_path = self._create_dir_path(file_hash)
full_path = os.path.join(dir_path, file_hash)
if os.path.exists(full_path):
shutil.rmtree(full_path)
os.mkdir(full_path)
try:
self._unpack_zip(zip_file_obj, full_path)
except Exception:
shutil.rmtree(full_path)
raise
return PathAndHash(path=full_path, hash=file_hash)
|
[
"def",
"add_archive_as_dir",
"(",
"self",
",",
"zip_file_obj",
")",
":",
"BalancedDiscStorage",
".",
"_check_interface",
"(",
"zip_file_obj",
")",
"file_hash",
"=",
"self",
".",
"_get_hash",
"(",
"zip_file_obj",
")",
"dir_path",
"=",
"self",
".",
"_create_dir_path",
"(",
"file_hash",
")",
"full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dir_path",
",",
"file_hash",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"full_path",
")",
":",
"shutil",
".",
"rmtree",
"(",
"full_path",
")",
"os",
".",
"mkdir",
"(",
"full_path",
")",
"try",
":",
"self",
".",
"_unpack_zip",
"(",
"zip_file_obj",
",",
"full_path",
")",
"except",
"Exception",
":",
"shutil",
".",
"rmtree",
"(",
"full_path",
")",
"raise",
"return",
"PathAndHash",
"(",
"path",
"=",
"full_path",
",",
"hash",
"=",
"file_hash",
")"
] |
Add archive to the storage and unpack it.
Args:
zip_file_obj (file): Opened file-like object.
Returns:
obj: Path where the `zip_file_obj` was unpacked wrapped in \
:class:`.PathAndHash` structure.
Raises:
ValueError: If there is too many files in .zip archive. \
See :attr:`._max_zipfiles` for details.
AssertionError: If the `zip_file_obj` is not file-like object.
|
[
"Add",
"archive",
"to",
"the",
"storage",
"and",
"unpack",
"it",
"."
] |
d96854e2afdd70c814b16d177ff6308841b34b24
|
https://github.com/Bystroushaak/BalancedDiscStorage/blob/d96854e2afdd70c814b16d177ff6308841b34b24/src/BalancedDiscStorage/balanced_disc_storage_z.py#L57-L90
|
241,176
|
aquatix/python-utilkit
|
utilkit/printutil.py
|
to_even_columns
|
def to_even_columns(data, headers=None):
"""
Nicely format the 2-dimensional list into evenly spaced columns
"""
result = ''
col_width = max(len(word) for row in data for word in row) + 2 # padding
if headers:
header_width = max(len(word) for row in headers for word in row) + 2
if header_width > col_width:
col_width = header_width
result += "".join(word.ljust(col_width) for word in headers) + "\n"
result += '-' * col_width * len(headers) + "\n"
for row in data:
result += "".join(word.ljust(col_width) for word in row) + "\n"
return result
|
python
|
def to_even_columns(data, headers=None):
"""
Nicely format the 2-dimensional list into evenly spaced columns
"""
result = ''
col_width = max(len(word) for row in data for word in row) + 2 # padding
if headers:
header_width = max(len(word) for row in headers for word in row) + 2
if header_width > col_width:
col_width = header_width
result += "".join(word.ljust(col_width) for word in headers) + "\n"
result += '-' * col_width * len(headers) + "\n"
for row in data:
result += "".join(word.ljust(col_width) for word in row) + "\n"
return result
|
[
"def",
"to_even_columns",
"(",
"data",
",",
"headers",
"=",
"None",
")",
":",
"result",
"=",
"''",
"col_width",
"=",
"max",
"(",
"len",
"(",
"word",
")",
"for",
"row",
"in",
"data",
"for",
"word",
"in",
"row",
")",
"+",
"2",
"# padding",
"if",
"headers",
":",
"header_width",
"=",
"max",
"(",
"len",
"(",
"word",
")",
"for",
"row",
"in",
"headers",
"for",
"word",
"in",
"row",
")",
"+",
"2",
"if",
"header_width",
">",
"col_width",
":",
"col_width",
"=",
"header_width",
"result",
"+=",
"\"\"",
".",
"join",
"(",
"word",
".",
"ljust",
"(",
"col_width",
")",
"for",
"word",
"in",
"headers",
")",
"+",
"\"\\n\"",
"result",
"+=",
"'-'",
"*",
"col_width",
"*",
"len",
"(",
"headers",
")",
"+",
"\"\\n\"",
"for",
"row",
"in",
"data",
":",
"result",
"+=",
"\"\"",
".",
"join",
"(",
"word",
".",
"ljust",
"(",
"col_width",
")",
"for",
"word",
"in",
"row",
")",
"+",
"\"\\n\"",
"return",
"result"
] |
Nicely format the 2-dimensional list into evenly spaced columns
|
[
"Nicely",
"format",
"the",
"2",
"-",
"dimensional",
"list",
"into",
"evenly",
"spaced",
"columns"
] |
1b4a4175381d2175592208619315f399610f915c
|
https://github.com/aquatix/python-utilkit/blob/1b4a4175381d2175592208619315f399610f915c/utilkit/printutil.py#L6-L22
|
241,177
|
aquatix/python-utilkit
|
utilkit/printutil.py
|
to_smart_columns
|
def to_smart_columns(data, headers=None, padding=2):
"""
Nicely format the 2-dimensional list into columns
"""
result = ''
col_widths = []
for row in data:
col_counter = 0
for word in row:
try:
col_widths[col_counter] = max(len(word), col_widths[col_counter])
except IndexError:
col_widths.append(len(word))
col_counter += 1
if headers:
col_counter = 0
for word in headers:
try:
col_widths[col_counter] = max(len(word), col_widths[col_counter])
except IndexError:
col_widths.append(len(word))
col_counter += 1
# Add padding
col_widths = [width + padding for width in col_widths]
total_width = sum(col_widths)
if headers:
col_counter = 0
for word in headers:
result += "".join(word.ljust(col_widths[col_counter]))
col_counter += 1
result += "\n"
result += '-' * total_width + "\n"
for row in data:
col_counter = 0
for word in row:
result += "".join(word.ljust(col_widths[col_counter]))
col_counter += 1
result += "\n"
return result
|
python
|
def to_smart_columns(data, headers=None, padding=2):
"""
Nicely format the 2-dimensional list into columns
"""
result = ''
col_widths = []
for row in data:
col_counter = 0
for word in row:
try:
col_widths[col_counter] = max(len(word), col_widths[col_counter])
except IndexError:
col_widths.append(len(word))
col_counter += 1
if headers:
col_counter = 0
for word in headers:
try:
col_widths[col_counter] = max(len(word), col_widths[col_counter])
except IndexError:
col_widths.append(len(word))
col_counter += 1
# Add padding
col_widths = [width + padding for width in col_widths]
total_width = sum(col_widths)
if headers:
col_counter = 0
for word in headers:
result += "".join(word.ljust(col_widths[col_counter]))
col_counter += 1
result += "\n"
result += '-' * total_width + "\n"
for row in data:
col_counter = 0
for word in row:
result += "".join(word.ljust(col_widths[col_counter]))
col_counter += 1
result += "\n"
return result
|
[
"def",
"to_smart_columns",
"(",
"data",
",",
"headers",
"=",
"None",
",",
"padding",
"=",
"2",
")",
":",
"result",
"=",
"''",
"col_widths",
"=",
"[",
"]",
"for",
"row",
"in",
"data",
":",
"col_counter",
"=",
"0",
"for",
"word",
"in",
"row",
":",
"try",
":",
"col_widths",
"[",
"col_counter",
"]",
"=",
"max",
"(",
"len",
"(",
"word",
")",
",",
"col_widths",
"[",
"col_counter",
"]",
")",
"except",
"IndexError",
":",
"col_widths",
".",
"append",
"(",
"len",
"(",
"word",
")",
")",
"col_counter",
"+=",
"1",
"if",
"headers",
":",
"col_counter",
"=",
"0",
"for",
"word",
"in",
"headers",
":",
"try",
":",
"col_widths",
"[",
"col_counter",
"]",
"=",
"max",
"(",
"len",
"(",
"word",
")",
",",
"col_widths",
"[",
"col_counter",
"]",
")",
"except",
"IndexError",
":",
"col_widths",
".",
"append",
"(",
"len",
"(",
"word",
")",
")",
"col_counter",
"+=",
"1",
"# Add padding",
"col_widths",
"=",
"[",
"width",
"+",
"padding",
"for",
"width",
"in",
"col_widths",
"]",
"total_width",
"=",
"sum",
"(",
"col_widths",
")",
"if",
"headers",
":",
"col_counter",
"=",
"0",
"for",
"word",
"in",
"headers",
":",
"result",
"+=",
"\"\"",
".",
"join",
"(",
"word",
".",
"ljust",
"(",
"col_widths",
"[",
"col_counter",
"]",
")",
")",
"col_counter",
"+=",
"1",
"result",
"+=",
"\"\\n\"",
"result",
"+=",
"'-'",
"*",
"total_width",
"+",
"\"\\n\"",
"for",
"row",
"in",
"data",
":",
"col_counter",
"=",
"0",
"for",
"word",
"in",
"row",
":",
"result",
"+=",
"\"\"",
".",
"join",
"(",
"word",
".",
"ljust",
"(",
"col_widths",
"[",
"col_counter",
"]",
")",
")",
"col_counter",
"+=",
"1",
"result",
"+=",
"\"\\n\"",
"return",
"result"
] |
Nicely format the 2-dimensional list into columns
|
[
"Nicely",
"format",
"the",
"2",
"-",
"dimensional",
"list",
"into",
"columns"
] |
1b4a4175381d2175592208619315f399610f915c
|
https://github.com/aquatix/python-utilkit/blob/1b4a4175381d2175592208619315f399610f915c/utilkit/printutil.py#L25-L67
|
241,178
|
aquatix/python-utilkit
|
utilkit/printutil.py
|
progress_bar
|
def progress_bar(items_total, items_progress, columns=40, base_char='.', progress_char='#', percentage=False, prefix='', postfix=''):
"""
Print a progress bar of width `columns`
"""
bins_total = int(float(items_total) / columns) + 1
bins_progress = int((float(items_progress) / float(items_total)) * bins_total) + 1
progress = prefix
progress += progress_char * bins_progress
progress += base_char * (bins_total - bins_progress)
if percentage:
progress_percentage = float(items_progress) / float(items_total) * 100
# Round the percentage to two decimals
postfix = ' ' + str(round(progress_percentage, 2)) + '% ' + postfix
progress += postfix
return progress
|
python
|
def progress_bar(items_total, items_progress, columns=40, base_char='.', progress_char='#', percentage=False, prefix='', postfix=''):
"""
Print a progress bar of width `columns`
"""
bins_total = int(float(items_total) / columns) + 1
bins_progress = int((float(items_progress) / float(items_total)) * bins_total) + 1
progress = prefix
progress += progress_char * bins_progress
progress += base_char * (bins_total - bins_progress)
if percentage:
progress_percentage = float(items_progress) / float(items_total) * 100
# Round the percentage to two decimals
postfix = ' ' + str(round(progress_percentage, 2)) + '% ' + postfix
progress += postfix
return progress
|
[
"def",
"progress_bar",
"(",
"items_total",
",",
"items_progress",
",",
"columns",
"=",
"40",
",",
"base_char",
"=",
"'.'",
",",
"progress_char",
"=",
"'#'",
",",
"percentage",
"=",
"False",
",",
"prefix",
"=",
"''",
",",
"postfix",
"=",
"''",
")",
":",
"bins_total",
"=",
"int",
"(",
"float",
"(",
"items_total",
")",
"/",
"columns",
")",
"+",
"1",
"bins_progress",
"=",
"int",
"(",
"(",
"float",
"(",
"items_progress",
")",
"/",
"float",
"(",
"items_total",
")",
")",
"*",
"bins_total",
")",
"+",
"1",
"progress",
"=",
"prefix",
"progress",
"+=",
"progress_char",
"*",
"bins_progress",
"progress",
"+=",
"base_char",
"*",
"(",
"bins_total",
"-",
"bins_progress",
")",
"if",
"percentage",
":",
"progress_percentage",
"=",
"float",
"(",
"items_progress",
")",
"/",
"float",
"(",
"items_total",
")",
"*",
"100",
"# Round the percentage to two decimals",
"postfix",
"=",
"' '",
"+",
"str",
"(",
"round",
"(",
"progress_percentage",
",",
"2",
")",
")",
"+",
"'% '",
"+",
"postfix",
"progress",
"+=",
"postfix",
"return",
"progress"
] |
Print a progress bar of width `columns`
|
[
"Print",
"a",
"progress",
"bar",
"of",
"width",
"columns"
] |
1b4a4175381d2175592208619315f399610f915c
|
https://github.com/aquatix/python-utilkit/blob/1b4a4175381d2175592208619315f399610f915c/utilkit/printutil.py#L70-L84
|
241,179
|
aquatix/python-utilkit
|
utilkit/printutil.py
|
merge_x_y
|
def merge_x_y(collection_x, collection_y, filter_none=False):
"""
Merge two lists, creating a dictionary with key `label` and a set x and y
"""
data = {}
for item in collection_x:
#print item[0:-1]
#print item[-1]
label = datetimeutil.tuple_to_string(item[0:-1])
if filter_none and label == 'None-None':
continue
data[label] = {'label': label, 'x': item[-1], 'y': 0}
for item in collection_y:
#print item
label = datetimeutil.tuple_to_string(item[0:-1])
if filter_none and label == 'None-None':
continue
try:
data[label]['y'] = item[-1]
except KeyError:
data[label] = {'label': label, 'x': 0, 'y': item[-1]}
# Keys are not sorted
return data
|
python
|
def merge_x_y(collection_x, collection_y, filter_none=False):
"""
Merge two lists, creating a dictionary with key `label` and a set x and y
"""
data = {}
for item in collection_x:
#print item[0:-1]
#print item[-1]
label = datetimeutil.tuple_to_string(item[0:-1])
if filter_none and label == 'None-None':
continue
data[label] = {'label': label, 'x': item[-1], 'y': 0}
for item in collection_y:
#print item
label = datetimeutil.tuple_to_string(item[0:-1])
if filter_none and label == 'None-None':
continue
try:
data[label]['y'] = item[-1]
except KeyError:
data[label] = {'label': label, 'x': 0, 'y': item[-1]}
# Keys are not sorted
return data
|
[
"def",
"merge_x_y",
"(",
"collection_x",
",",
"collection_y",
",",
"filter_none",
"=",
"False",
")",
":",
"data",
"=",
"{",
"}",
"for",
"item",
"in",
"collection_x",
":",
"#print item[0:-1]",
"#print item[-1]",
"label",
"=",
"datetimeutil",
".",
"tuple_to_string",
"(",
"item",
"[",
"0",
":",
"-",
"1",
"]",
")",
"if",
"filter_none",
"and",
"label",
"==",
"'None-None'",
":",
"continue",
"data",
"[",
"label",
"]",
"=",
"{",
"'label'",
":",
"label",
",",
"'x'",
":",
"item",
"[",
"-",
"1",
"]",
",",
"'y'",
":",
"0",
"}",
"for",
"item",
"in",
"collection_y",
":",
"#print item",
"label",
"=",
"datetimeutil",
".",
"tuple_to_string",
"(",
"item",
"[",
"0",
":",
"-",
"1",
"]",
")",
"if",
"filter_none",
"and",
"label",
"==",
"'None-None'",
":",
"continue",
"try",
":",
"data",
"[",
"label",
"]",
"[",
"'y'",
"]",
"=",
"item",
"[",
"-",
"1",
"]",
"except",
"KeyError",
":",
"data",
"[",
"label",
"]",
"=",
"{",
"'label'",
":",
"label",
",",
"'x'",
":",
"0",
",",
"'y'",
":",
"item",
"[",
"-",
"1",
"]",
"}",
"# Keys are not sorted",
"return",
"data"
] |
Merge two lists, creating a dictionary with key `label` and a set x and y
|
[
"Merge",
"two",
"lists",
"creating",
"a",
"dictionary",
"with",
"key",
"label",
"and",
"a",
"set",
"x",
"and",
"y"
] |
1b4a4175381d2175592208619315f399610f915c
|
https://github.com/aquatix/python-utilkit/blob/1b4a4175381d2175592208619315f399610f915c/utilkit/printutil.py#L87-L110
|
241,180
|
aquatix/python-utilkit
|
utilkit/printutil.py
|
x_vs_y
|
def x_vs_y(collection_x, collection_y, title_x=None, title_y=None, width=43, filter_none=False):
"""
Print a histogram with bins for x to the left and bins of y to the right
"""
data = merge_x_y(collection_x, collection_y, filter_none)
max_value = get_max_x_y(data)
bins_total = int(float(max_value) / width) + 1
if title_x is not None and title_y is not None:
headers = [title_x, title_y]
else:
headers = None
result = []
# Sort keys
for item in sorted(data):
#result.append([item, str(data[item]['x']) + '|' + str(data[item]['y'])])
bins_x = int((float(data[item]['x']) / float(max_value)) * bins_total) + 1
bins_y = int((float(data[item]['y']) / float(max_value)) * bins_total) + 1
print(bins_x)
print(bins_y)
#result.append([item, str(data[item]['x']), str(data[item]['y'])])
result.append([item, '*' * bins_x, '*' * bins_y])
result = to_smart_columns(result, headers=headers)
return result
|
python
|
def x_vs_y(collection_x, collection_y, title_x=None, title_y=None, width=43, filter_none=False):
"""
Print a histogram with bins for x to the left and bins of y to the right
"""
data = merge_x_y(collection_x, collection_y, filter_none)
max_value = get_max_x_y(data)
bins_total = int(float(max_value) / width) + 1
if title_x is not None and title_y is not None:
headers = [title_x, title_y]
else:
headers = None
result = []
# Sort keys
for item in sorted(data):
#result.append([item, str(data[item]['x']) + '|' + str(data[item]['y'])])
bins_x = int((float(data[item]['x']) / float(max_value)) * bins_total) + 1
bins_y = int((float(data[item]['y']) / float(max_value)) * bins_total) + 1
print(bins_x)
print(bins_y)
#result.append([item, str(data[item]['x']), str(data[item]['y'])])
result.append([item, '*' * bins_x, '*' * bins_y])
result = to_smart_columns(result, headers=headers)
return result
|
[
"def",
"x_vs_y",
"(",
"collection_x",
",",
"collection_y",
",",
"title_x",
"=",
"None",
",",
"title_y",
"=",
"None",
",",
"width",
"=",
"43",
",",
"filter_none",
"=",
"False",
")",
":",
"data",
"=",
"merge_x_y",
"(",
"collection_x",
",",
"collection_y",
",",
"filter_none",
")",
"max_value",
"=",
"get_max_x_y",
"(",
"data",
")",
"bins_total",
"=",
"int",
"(",
"float",
"(",
"max_value",
")",
"/",
"width",
")",
"+",
"1",
"if",
"title_x",
"is",
"not",
"None",
"and",
"title_y",
"is",
"not",
"None",
":",
"headers",
"=",
"[",
"title_x",
",",
"title_y",
"]",
"else",
":",
"headers",
"=",
"None",
"result",
"=",
"[",
"]",
"# Sort keys",
"for",
"item",
"in",
"sorted",
"(",
"data",
")",
":",
"#result.append([item, str(data[item]['x']) + '|' + str(data[item]['y'])])",
"bins_x",
"=",
"int",
"(",
"(",
"float",
"(",
"data",
"[",
"item",
"]",
"[",
"'x'",
"]",
")",
"/",
"float",
"(",
"max_value",
")",
")",
"*",
"bins_total",
")",
"+",
"1",
"bins_y",
"=",
"int",
"(",
"(",
"float",
"(",
"data",
"[",
"item",
"]",
"[",
"'y'",
"]",
")",
"/",
"float",
"(",
"max_value",
")",
")",
"*",
"bins_total",
")",
"+",
"1",
"print",
"(",
"bins_x",
")",
"print",
"(",
"bins_y",
")",
"#result.append([item, str(data[item]['x']), str(data[item]['y'])])",
"result",
".",
"append",
"(",
"[",
"item",
",",
"'*'",
"*",
"bins_x",
",",
"'*'",
"*",
"bins_y",
"]",
")",
"result",
"=",
"to_smart_columns",
"(",
"result",
",",
"headers",
"=",
"headers",
")",
"return",
"result"
] |
Print a histogram with bins for x to the left and bins of y to the right
|
[
"Print",
"a",
"histogram",
"with",
"bins",
"for",
"x",
"to",
"the",
"left",
"and",
"bins",
"of",
"y",
"to",
"the",
"right"
] |
1b4a4175381d2175592208619315f399610f915c
|
https://github.com/aquatix/python-utilkit/blob/1b4a4175381d2175592208619315f399610f915c/utilkit/printutil.py#L123-L147
|
241,181
|
Amsterdam/authorization_django
|
authorization_django/config.py
|
settings
|
def settings():
""" Fetch the middleware settings.
:return dict: settings
"""
# Get the user-provided settings
user_settings = dict(getattr(django_settings, _settings_key, {}))
user_settings_keys = set(user_settings.keys())
# Check for required but missing settings
missing = _required_settings_keys - user_settings_keys
if missing:
raise AuthzConfigurationError(
'Missing required {} config: {}'.format(_settings_key, missing))
# Check for unknown settings
unknown = user_settings_keys - _available_settings_keys
if unknown:
raise AuthzConfigurationError(
'Unknown {} config params: {}'.format(_settings_key, unknown))
# Merge defaults with provided settings
defaults = _available_settings_keys - user_settings_keys
user_settings.update({key: _available_settings[key] for key in defaults})
_rectify(user_settings)
return types.MappingProxyType(user_settings)
|
python
|
def settings():
""" Fetch the middleware settings.
:return dict: settings
"""
# Get the user-provided settings
user_settings = dict(getattr(django_settings, _settings_key, {}))
user_settings_keys = set(user_settings.keys())
# Check for required but missing settings
missing = _required_settings_keys - user_settings_keys
if missing:
raise AuthzConfigurationError(
'Missing required {} config: {}'.format(_settings_key, missing))
# Check for unknown settings
unknown = user_settings_keys - _available_settings_keys
if unknown:
raise AuthzConfigurationError(
'Unknown {} config params: {}'.format(_settings_key, unknown))
# Merge defaults with provided settings
defaults = _available_settings_keys - user_settings_keys
user_settings.update({key: _available_settings[key] for key in defaults})
_rectify(user_settings)
return types.MappingProxyType(user_settings)
|
[
"def",
"settings",
"(",
")",
":",
"# Get the user-provided settings",
"user_settings",
"=",
"dict",
"(",
"getattr",
"(",
"django_settings",
",",
"_settings_key",
",",
"{",
"}",
")",
")",
"user_settings_keys",
"=",
"set",
"(",
"user_settings",
".",
"keys",
"(",
")",
")",
"# Check for required but missing settings",
"missing",
"=",
"_required_settings_keys",
"-",
"user_settings_keys",
"if",
"missing",
":",
"raise",
"AuthzConfigurationError",
"(",
"'Missing required {} config: {}'",
".",
"format",
"(",
"_settings_key",
",",
"missing",
")",
")",
"# Check for unknown settings",
"unknown",
"=",
"user_settings_keys",
"-",
"_available_settings_keys",
"if",
"unknown",
":",
"raise",
"AuthzConfigurationError",
"(",
"'Unknown {} config params: {}'",
".",
"format",
"(",
"_settings_key",
",",
"unknown",
")",
")",
"# Merge defaults with provided settings",
"defaults",
"=",
"_available_settings_keys",
"-",
"user_settings_keys",
"user_settings",
".",
"update",
"(",
"{",
"key",
":",
"_available_settings",
"[",
"key",
"]",
"for",
"key",
"in",
"defaults",
"}",
")",
"_rectify",
"(",
"user_settings",
")",
"return",
"types",
".",
"MappingProxyType",
"(",
"user_settings",
")"
] |
Fetch the middleware settings.
:return dict: settings
|
[
"Fetch",
"the",
"middleware",
"settings",
"."
] |
71da52b38a7f5a16a2bde8f8ea97b3c11ccb1be1
|
https://github.com/Amsterdam/authorization_django/blob/71da52b38a7f5a16a2bde8f8ea97b3c11ccb1be1/authorization_django/config.py#L79-L102
|
241,182
|
selenol/selenol-python
|
selenol_python/services.py
|
SelenolClient.run
|
def run(self):
"""Run the service in infinitive loop processing requests."""
try:
while True:
message = self.connection.recv()
result = self.on_message(message)
if result:
self.connection.send(result)
except SelenolWebSocketClosedException as ex:
self.on_closed(0, '')
raise SelenolWebSocketClosedException() from ex
|
python
|
def run(self):
"""Run the service in infinitive loop processing requests."""
try:
while True:
message = self.connection.recv()
result = self.on_message(message)
if result:
self.connection.send(result)
except SelenolWebSocketClosedException as ex:
self.on_closed(0, '')
raise SelenolWebSocketClosedException() from ex
|
[
"def",
"run",
"(",
"self",
")",
":",
"try",
":",
"while",
"True",
":",
"message",
"=",
"self",
".",
"connection",
".",
"recv",
"(",
")",
"result",
"=",
"self",
".",
"on_message",
"(",
"message",
")",
"if",
"result",
":",
"self",
".",
"connection",
".",
"send",
"(",
"result",
")",
"except",
"SelenolWebSocketClosedException",
"as",
"ex",
":",
"self",
".",
"on_closed",
"(",
"0",
",",
"''",
")",
"raise",
"SelenolWebSocketClosedException",
"(",
")",
"from",
"ex"
] |
Run the service in infinitive loop processing requests.
|
[
"Run",
"the",
"service",
"in",
"infinitive",
"loop",
"processing",
"requests",
"."
] |
53775fdfc95161f4aca350305cb3459e6f2f808d
|
https://github.com/selenol/selenol-python/blob/53775fdfc95161f4aca350305cb3459e6f2f808d/selenol_python/services.py#L39-L49
|
241,183
|
selenol/selenol-python
|
selenol_python/services.py
|
SelenolService.on_message
|
def on_message(self, message):
"""Message from the backend has been received.
:param message: Message string received.
"""
work_unit = SelenolMessage(message)
request_id = work_unit.request_id
if message['reason'] == ['selenol', 'request']:
try:
result = self.on_request(work_unit)
if result is not None:
return {
'reason': ['request', 'result'],
'request_id': request_id,
'content': {
'content': result,
},
}
except SelenolException as e:
logging.exception(e)
return {
'reason': ['request', 'exception'],
'request_id': request_id,
'content': {
'message': str(e),
},
}
except Exception as e:
logging.exception(e)
return {
'reason': ['request', 'exception'],
'request_id': request_id,
'content': {
'message': 'Not a Selenol exception',
},
}
|
python
|
def on_message(self, message):
"""Message from the backend has been received.
:param message: Message string received.
"""
work_unit = SelenolMessage(message)
request_id = work_unit.request_id
if message['reason'] == ['selenol', 'request']:
try:
result = self.on_request(work_unit)
if result is not None:
return {
'reason': ['request', 'result'],
'request_id': request_id,
'content': {
'content': result,
},
}
except SelenolException as e:
logging.exception(e)
return {
'reason': ['request', 'exception'],
'request_id': request_id,
'content': {
'message': str(e),
},
}
except Exception as e:
logging.exception(e)
return {
'reason': ['request', 'exception'],
'request_id': request_id,
'content': {
'message': 'Not a Selenol exception',
},
}
|
[
"def",
"on_message",
"(",
"self",
",",
"message",
")",
":",
"work_unit",
"=",
"SelenolMessage",
"(",
"message",
")",
"request_id",
"=",
"work_unit",
".",
"request_id",
"if",
"message",
"[",
"'reason'",
"]",
"==",
"[",
"'selenol'",
",",
"'request'",
"]",
":",
"try",
":",
"result",
"=",
"self",
".",
"on_request",
"(",
"work_unit",
")",
"if",
"result",
"is",
"not",
"None",
":",
"return",
"{",
"'reason'",
":",
"[",
"'request'",
",",
"'result'",
"]",
",",
"'request_id'",
":",
"request_id",
",",
"'content'",
":",
"{",
"'content'",
":",
"result",
",",
"}",
",",
"}",
"except",
"SelenolException",
"as",
"e",
":",
"logging",
".",
"exception",
"(",
"e",
")",
"return",
"{",
"'reason'",
":",
"[",
"'request'",
",",
"'exception'",
"]",
",",
"'request_id'",
":",
"request_id",
",",
"'content'",
":",
"{",
"'message'",
":",
"str",
"(",
"e",
")",
",",
"}",
",",
"}",
"except",
"Exception",
"as",
"e",
":",
"logging",
".",
"exception",
"(",
"e",
")",
"return",
"{",
"'reason'",
":",
"[",
"'request'",
",",
"'exception'",
"]",
",",
"'request_id'",
":",
"request_id",
",",
"'content'",
":",
"{",
"'message'",
":",
"'Not a Selenol exception'",
",",
"}",
",",
"}"
] |
Message from the backend has been received.
:param message: Message string received.
|
[
"Message",
"from",
"the",
"backend",
"has",
"been",
"received",
"."
] |
53775fdfc95161f4aca350305cb3459e6f2f808d
|
https://github.com/selenol/selenol-python/blob/53775fdfc95161f4aca350305cb3459e6f2f808d/selenol_python/services.py#L115-L151
|
241,184
|
selenol/selenol-python
|
selenol_python/services.py
|
SelenolService.event
|
def event(self, request_id, trigger, event, message):
"""Create an event in the backend to be triggered given a circumstance.
:param request_id: Request ID of a involved session.
:param trigger: Circumstance that will trigger the event.
:param event: Reason of the message that will be created.
:param message: Content of the message that will be created.
"""
self.connection.send({
'reason': ['request', 'event'],
'request_id': request_id,
'content': {
'trigger': trigger,
'message': {
'reason': event,
'content': message,
},
},
})
|
python
|
def event(self, request_id, trigger, event, message):
"""Create an event in the backend to be triggered given a circumstance.
:param request_id: Request ID of a involved session.
:param trigger: Circumstance that will trigger the event.
:param event: Reason of the message that will be created.
:param message: Content of the message that will be created.
"""
self.connection.send({
'reason': ['request', 'event'],
'request_id': request_id,
'content': {
'trigger': trigger,
'message': {
'reason': event,
'content': message,
},
},
})
|
[
"def",
"event",
"(",
"self",
",",
"request_id",
",",
"trigger",
",",
"event",
",",
"message",
")",
":",
"self",
".",
"connection",
".",
"send",
"(",
"{",
"'reason'",
":",
"[",
"'request'",
",",
"'event'",
"]",
",",
"'request_id'",
":",
"request_id",
",",
"'content'",
":",
"{",
"'trigger'",
":",
"trigger",
",",
"'message'",
":",
"{",
"'reason'",
":",
"event",
",",
"'content'",
":",
"message",
",",
"}",
",",
"}",
",",
"}",
")"
] |
Create an event in the backend to be triggered given a circumstance.
:param request_id: Request ID of a involved session.
:param trigger: Circumstance that will trigger the event.
:param event: Reason of the message that will be created.
:param message: Content of the message that will be created.
|
[
"Create",
"an",
"event",
"in",
"the",
"backend",
"to",
"be",
"triggered",
"given",
"a",
"circumstance",
"."
] |
53775fdfc95161f4aca350305cb3459e6f2f808d
|
https://github.com/selenol/selenol-python/blob/53775fdfc95161f4aca350305cb3459e6f2f808d/selenol_python/services.py#L172-L190
|
241,185
|
selenol/selenol-python
|
selenol_python/services.py
|
SelenolService.send
|
def send(self, event, message):
"""Send a message to the backend.
:param reason: Reason of the message.
:param message: Message content.
"""
self.connection.send({
'reason': ['request', 'send'],
'content': {
'reason': event,
'request_id': self.request_counter,
'content': message,
},
})
self.request_counter = self.request_counter + 1
|
python
|
def send(self, event, message):
"""Send a message to the backend.
:param reason: Reason of the message.
:param message: Message content.
"""
self.connection.send({
'reason': ['request', 'send'],
'content': {
'reason': event,
'request_id': self.request_counter,
'content': message,
},
})
self.request_counter = self.request_counter + 1
|
[
"def",
"send",
"(",
"self",
",",
"event",
",",
"message",
")",
":",
"self",
".",
"connection",
".",
"send",
"(",
"{",
"'reason'",
":",
"[",
"'request'",
",",
"'send'",
"]",
",",
"'content'",
":",
"{",
"'reason'",
":",
"event",
",",
"'request_id'",
":",
"self",
".",
"request_counter",
",",
"'content'",
":",
"message",
",",
"}",
",",
"}",
")",
"self",
".",
"request_counter",
"=",
"self",
".",
"request_counter",
"+",
"1"
] |
Send a message to the backend.
:param reason: Reason of the message.
:param message: Message content.
|
[
"Send",
"a",
"message",
"to",
"the",
"backend",
"."
] |
53775fdfc95161f4aca350305cb3459e6f2f808d
|
https://github.com/selenol/selenol-python/blob/53775fdfc95161f4aca350305cb3459e6f2f808d/selenol_python/services.py#L192-L206
|
241,186
|
Infinidat/infi.recipe.console_scripts
|
src/infi/recipe/console_scripts/__init__.py
|
_get_matching_dist_in_location
|
def _get_matching_dist_in_location(dist, location):
"""
Check if `locations` contain only the one intended dist.
Return the dist with metadata in the new location.
"""
# Getting the dist from the environment causes the
# distribution meta data to be read. Cloning isn't
# good enough.
import pkg_resources
env = pkg_resources.Environment([location])
dists = [ d for project_name in env for d in env[project_name] ]
dist_infos = [ (d.project_name, d.version) for d in dists ]
if dist_infos == [(dist.project_name, dist.version)]:
return dists.pop()
if dist_infos == [(dist.project_name.lower(), dist.version)]:
return dists.pop()
|
python
|
def _get_matching_dist_in_location(dist, location):
"""
Check if `locations` contain only the one intended dist.
Return the dist with metadata in the new location.
"""
# Getting the dist from the environment causes the
# distribution meta data to be read. Cloning isn't
# good enough.
import pkg_resources
env = pkg_resources.Environment([location])
dists = [ d for project_name in env for d in env[project_name] ]
dist_infos = [ (d.project_name, d.version) for d in dists ]
if dist_infos == [(dist.project_name, dist.version)]:
return dists.pop()
if dist_infos == [(dist.project_name.lower(), dist.version)]:
return dists.pop()
|
[
"def",
"_get_matching_dist_in_location",
"(",
"dist",
",",
"location",
")",
":",
"# Getting the dist from the environment causes the",
"# distribution meta data to be read. Cloning isn't",
"# good enough.",
"import",
"pkg_resources",
"env",
"=",
"pkg_resources",
".",
"Environment",
"(",
"[",
"location",
"]",
")",
"dists",
"=",
"[",
"d",
"for",
"project_name",
"in",
"env",
"for",
"d",
"in",
"env",
"[",
"project_name",
"]",
"]",
"dist_infos",
"=",
"[",
"(",
"d",
".",
"project_name",
",",
"d",
".",
"version",
")",
"for",
"d",
"in",
"dists",
"]",
"if",
"dist_infos",
"==",
"[",
"(",
"dist",
".",
"project_name",
",",
"dist",
".",
"version",
")",
"]",
":",
"return",
"dists",
".",
"pop",
"(",
")",
"if",
"dist_infos",
"==",
"[",
"(",
"dist",
".",
"project_name",
".",
"lower",
"(",
")",
",",
"dist",
".",
"version",
")",
"]",
":",
"return",
"dists",
".",
"pop",
"(",
")"
] |
Check if `locations` contain only the one intended dist.
Return the dist with metadata in the new location.
|
[
"Check",
"if",
"locations",
"contain",
"only",
"the",
"one",
"intended",
"dist",
".",
"Return",
"the",
"dist",
"with",
"metadata",
"in",
"the",
"new",
"location",
"."
] |
7beab59537654ee475527dbbd59b0aa49348ebd3
|
https://github.com/Infinidat/infi.recipe.console_scripts/blob/7beab59537654ee475527dbbd59b0aa49348ebd3/src/infi/recipe/console_scripts/__init__.py#L98-L113
|
241,187
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ModelClassMixin.get_model_class
|
def get_model_class(self):
"""Get model class"""
if getattr(self, 'model', None):
return self.model
elif getattr(self, 'object', None):
return self.object.__class__
elif 'app' in self.kwargs and 'model' in self.kwargs:
return apps.get_model(self.kwargs.get('app'), self.kwargs.get('model'))
elif hasattr(self, 'get_queryset'):
return self.get_queryset().model
else:
return None
|
python
|
def get_model_class(self):
"""Get model class"""
if getattr(self, 'model', None):
return self.model
elif getattr(self, 'object', None):
return self.object.__class__
elif 'app' in self.kwargs and 'model' in self.kwargs:
return apps.get_model(self.kwargs.get('app'), self.kwargs.get('model'))
elif hasattr(self, 'get_queryset'):
return self.get_queryset().model
else:
return None
|
[
"def",
"get_model_class",
"(",
"self",
")",
":",
"if",
"getattr",
"(",
"self",
",",
"'model'",
",",
"None",
")",
":",
"return",
"self",
".",
"model",
"elif",
"getattr",
"(",
"self",
",",
"'object'",
",",
"None",
")",
":",
"return",
"self",
".",
"object",
".",
"__class__",
"elif",
"'app'",
"in",
"self",
".",
"kwargs",
"and",
"'model'",
"in",
"self",
".",
"kwargs",
":",
"return",
"apps",
".",
"get_model",
"(",
"self",
".",
"kwargs",
".",
"get",
"(",
"'app'",
")",
",",
"self",
".",
"kwargs",
".",
"get",
"(",
"'model'",
")",
")",
"elif",
"hasattr",
"(",
"self",
",",
"'get_queryset'",
")",
":",
"return",
"self",
".",
"get_queryset",
"(",
")",
".",
"model",
"else",
":",
"return",
"None"
] |
Get model class
|
[
"Get",
"model",
"class"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L55-L66
|
241,188
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ModelClassMixin.get_model_config
|
def get_model_config(self):
"""Get Trionyx model config"""
if not hasattr(self, '__config'):
setattr(self, '__config', models_config.get_config(self.get_model_class()))
return getattr(self, '__config', None)
|
python
|
def get_model_config(self):
"""Get Trionyx model config"""
if not hasattr(self, '__config'):
setattr(self, '__config', models_config.get_config(self.get_model_class()))
return getattr(self, '__config', None)
|
[
"def",
"get_model_config",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'__config'",
")",
":",
"setattr",
"(",
"self",
",",
"'__config'",
",",
"models_config",
".",
"get_config",
"(",
"self",
".",
"get_model_class",
"(",
")",
")",
")",
"return",
"getattr",
"(",
"self",
",",
"'__config'",
",",
"None",
")"
] |
Get Trionyx model config
|
[
"Get",
"Trionyx",
"model",
"config"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L68-L72
|
241,189
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ModelPermission.dispatch
|
def dispatch(self, request, *args, **kwargs):
"""Validate if user can use view"""
if False: # TODO do permission check based on Model
raise PermissionDenied
return super().dispatch(request, *args, **kwargs)
|
python
|
def dispatch(self, request, *args, **kwargs):
"""Validate if user can use view"""
if False: # TODO do permission check based on Model
raise PermissionDenied
return super().dispatch(request, *args, **kwargs)
|
[
"def",
"dispatch",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"False",
":",
"# TODO do permission check based on Model",
"raise",
"PermissionDenied",
"return",
"super",
"(",
")",
".",
"dispatch",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Validate if user can use view
|
[
"Validate",
"if",
"user",
"can",
"use",
"view"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L78-L82
|
241,190
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
SessionValueMixin.get_session_value
|
def get_session_value(self, name, default=None):
"""Get value from session"""
session_name = 'list_{}_{}_{}'.format(self.kwargs.get('app'), self.kwargs.get('model'), name)
return self.request.session.get(session_name, default)
|
python
|
def get_session_value(self, name, default=None):
"""Get value from session"""
session_name = 'list_{}_{}_{}'.format(self.kwargs.get('app'), self.kwargs.get('model'), name)
return self.request.session.get(session_name, default)
|
[
"def",
"get_session_value",
"(",
"self",
",",
"name",
",",
"default",
"=",
"None",
")",
":",
"session_name",
"=",
"'list_{}_{}_{}'",
".",
"format",
"(",
"self",
".",
"kwargs",
".",
"get",
"(",
"'app'",
")",
",",
"self",
".",
"kwargs",
".",
"get",
"(",
"'model'",
")",
",",
"name",
")",
"return",
"self",
".",
"request",
".",
"session",
".",
"get",
"(",
"session_name",
",",
"default",
")"
] |
Get value from session
|
[
"Get",
"value",
"from",
"session"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L98-L101
|
241,191
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
SessionValueMixin.save_value
|
def save_value(self, name, value):
"""Save value to session"""
session_name = 'list_{}_{}_{}'.format(self.kwargs.get('app'), self.kwargs.get('model'), name)
self.request.session[session_name] = value
setattr(self, name, value)
return value
|
python
|
def save_value(self, name, value):
"""Save value to session"""
session_name = 'list_{}_{}_{}'.format(self.kwargs.get('app'), self.kwargs.get('model'), name)
self.request.session[session_name] = value
setattr(self, name, value)
return value
|
[
"def",
"save_value",
"(",
"self",
",",
"name",
",",
"value",
")",
":",
"session_name",
"=",
"'list_{}_{}_{}'",
".",
"format",
"(",
"self",
".",
"kwargs",
".",
"get",
"(",
"'app'",
")",
",",
"self",
".",
"kwargs",
".",
"get",
"(",
"'model'",
")",
",",
"name",
")",
"self",
".",
"request",
".",
"session",
"[",
"session_name",
"]",
"=",
"value",
"setattr",
"(",
"self",
",",
"name",
",",
"value",
")",
"return",
"value"
] |
Save value to session
|
[
"Save",
"value",
"to",
"session"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L103-L108
|
241,192
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ListView.get_title
|
def get_title(self):
"""Get page title"""
if self.title:
return self.title
return self.get_model_class()._meta.verbose_name_plural
|
python
|
def get_title(self):
"""Get page title"""
if self.title:
return self.title
return self.get_model_class()._meta.verbose_name_plural
|
[
"def",
"get_title",
"(",
"self",
")",
":",
"if",
"self",
".",
"title",
":",
"return",
"self",
".",
"title",
"return",
"self",
".",
"get_model_class",
"(",
")",
".",
"_meta",
".",
"verbose_name_plural"
] |
Get page title
|
[
"Get",
"page",
"title"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L131-L135
|
241,193
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ModelListMixin.get_page
|
def get_page(self, paginator):
"""Get current page or page in session"""
page = int(self.get_and_save_value('page', 1))
if page < 1:
return self.save_value('page', 1)
if page > paginator.num_pages:
return self.save_value('page', paginator.num_pages)
return page
|
python
|
def get_page(self, paginator):
"""Get current page or page in session"""
page = int(self.get_and_save_value('page', 1))
if page < 1:
return self.save_value('page', 1)
if page > paginator.num_pages:
return self.save_value('page', paginator.num_pages)
return page
|
[
"def",
"get_page",
"(",
"self",
",",
"paginator",
")",
":",
"page",
"=",
"int",
"(",
"self",
".",
"get_and_save_value",
"(",
"'page'",
",",
"1",
")",
")",
"if",
"page",
"<",
"1",
":",
"return",
"self",
".",
"save_value",
"(",
"'page'",
",",
"1",
")",
"if",
"page",
">",
"paginator",
".",
"num_pages",
":",
"return",
"self",
".",
"save_value",
"(",
"'page'",
",",
"paginator",
".",
"num_pages",
")",
"return",
"page"
] |
Get current page or page in session
|
[
"Get",
"current",
"page",
"or",
"page",
"in",
"session"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L168-L176
|
241,194
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ModelListMixin.get_search
|
def get_search(self):
"""Get current search or search from session, reset page if search is changed"""
old_search = self.get_session_value('search', '')
search = self.get_and_save_value('search', '')
if old_search != search:
self.page = 1
self.get_session_value('page', self.page)
return search
|
python
|
def get_search(self):
"""Get current search or search from session, reset page if search is changed"""
old_search = self.get_session_value('search', '')
search = self.get_and_save_value('search', '')
if old_search != search:
self.page = 1
self.get_session_value('page', self.page)
return search
|
[
"def",
"get_search",
"(",
"self",
")",
":",
"old_search",
"=",
"self",
".",
"get_session_value",
"(",
"'search'",
",",
"''",
")",
"search",
"=",
"self",
".",
"get_and_save_value",
"(",
"'search'",
",",
"''",
")",
"if",
"old_search",
"!=",
"search",
":",
"self",
".",
"page",
"=",
"1",
"self",
".",
"get_session_value",
"(",
"'page'",
",",
"self",
".",
"page",
")",
"return",
"search"
] |
Get current search or search from session, reset page if search is changed
|
[
"Get",
"current",
"search",
"or",
"search",
"from",
"session",
"reset",
"page",
"if",
"search",
"is",
"changed"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L186-L193
|
241,195
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ModelListMixin.get_all_fields
|
def get_all_fields(self):
"""Get all aviable fields"""
return {
name: {
'name': name,
'label': field['label'],
}
for name, field in self.get_model_config().get_list_fields().items()
}
|
python
|
def get_all_fields(self):
"""Get all aviable fields"""
return {
name: {
'name': name,
'label': field['label'],
}
for name, field in self.get_model_config().get_list_fields().items()
}
|
[
"def",
"get_all_fields",
"(",
"self",
")",
":",
"return",
"{",
"name",
":",
"{",
"'name'",
":",
"name",
",",
"'label'",
":",
"field",
"[",
"'label'",
"]",
",",
"}",
"for",
"name",
",",
"field",
"in",
"self",
".",
"get_model_config",
"(",
")",
".",
"get_list_fields",
"(",
")",
".",
"items",
"(",
")",
"}"
] |
Get all aviable fields
|
[
"Get",
"all",
"aviable",
"fields"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L195-L203
|
241,196
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ModelListMixin.get_current_fields
|
def get_current_fields(self):
"""Get current list to be used"""
if hasattr(self, 'current_fields') and self.current_fields:
return self.current_fields
field_attribute = 'list_{}_{}_fields'.format(self.kwargs.get('app'), self.kwargs.get('model'))
current_fields = self.request.user.attributes.get_attribute(field_attribute, [])
request_fields = self.request.POST.get('selected_fields', None)
if request_fields and ','.join(current_fields) != request_fields:
# TODO validate fields
current_fields = request_fields.split(',')
self.request.user.attributes.set_attribute(field_attribute, current_fields)
elif request_fields:
current_fields = request_fields.split(',')
if not current_fields:
config = self.get_model_config()
current_fields = config.list_default_fields if config.list_default_fields else ['created_at', 'id']
self.current_fields = current_fields
return current_fields
|
python
|
def get_current_fields(self):
"""Get current list to be used"""
if hasattr(self, 'current_fields') and self.current_fields:
return self.current_fields
field_attribute = 'list_{}_{}_fields'.format(self.kwargs.get('app'), self.kwargs.get('model'))
current_fields = self.request.user.attributes.get_attribute(field_attribute, [])
request_fields = self.request.POST.get('selected_fields', None)
if request_fields and ','.join(current_fields) != request_fields:
# TODO validate fields
current_fields = request_fields.split(',')
self.request.user.attributes.set_attribute(field_attribute, current_fields)
elif request_fields:
current_fields = request_fields.split(',')
if not current_fields:
config = self.get_model_config()
current_fields = config.list_default_fields if config.list_default_fields else ['created_at', 'id']
self.current_fields = current_fields
return current_fields
|
[
"def",
"get_current_fields",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'current_fields'",
")",
"and",
"self",
".",
"current_fields",
":",
"return",
"self",
".",
"current_fields",
"field_attribute",
"=",
"'list_{}_{}_fields'",
".",
"format",
"(",
"self",
".",
"kwargs",
".",
"get",
"(",
"'app'",
")",
",",
"self",
".",
"kwargs",
".",
"get",
"(",
"'model'",
")",
")",
"current_fields",
"=",
"self",
".",
"request",
".",
"user",
".",
"attributes",
".",
"get_attribute",
"(",
"field_attribute",
",",
"[",
"]",
")",
"request_fields",
"=",
"self",
".",
"request",
".",
"POST",
".",
"get",
"(",
"'selected_fields'",
",",
"None",
")",
"if",
"request_fields",
"and",
"','",
".",
"join",
"(",
"current_fields",
")",
"!=",
"request_fields",
":",
"# TODO validate fields",
"current_fields",
"=",
"request_fields",
".",
"split",
"(",
"','",
")",
"self",
".",
"request",
".",
"user",
".",
"attributes",
".",
"set_attribute",
"(",
"field_attribute",
",",
"current_fields",
")",
"elif",
"request_fields",
":",
"current_fields",
"=",
"request_fields",
".",
"split",
"(",
"','",
")",
"if",
"not",
"current_fields",
":",
"config",
"=",
"self",
".",
"get_model_config",
"(",
")",
"current_fields",
"=",
"config",
".",
"list_default_fields",
"if",
"config",
".",
"list_default_fields",
"else",
"[",
"'created_at'",
",",
"'id'",
"]",
"self",
".",
"current_fields",
"=",
"current_fields",
"return",
"current_fields"
] |
Get current list to be used
|
[
"Get",
"current",
"list",
"to",
"be",
"used"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L205-L226
|
241,197
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ModelListMixin.search_queryset
|
def search_queryset(self):
"""Get search query set"""
queryset = self.get_model_class().objects.get_queryset()
if self.get_model_config().list_select_related:
queryset = queryset.select_related(*self.get_model_config().list_select_related)
return watson.filter(queryset, self.get_search(), ranking=False)
|
python
|
def search_queryset(self):
"""Get search query set"""
queryset = self.get_model_class().objects.get_queryset()
if self.get_model_config().list_select_related:
queryset = queryset.select_related(*self.get_model_config().list_select_related)
return watson.filter(queryset, self.get_search(), ranking=False)
|
[
"def",
"search_queryset",
"(",
"self",
")",
":",
"queryset",
"=",
"self",
".",
"get_model_class",
"(",
")",
".",
"objects",
".",
"get_queryset",
"(",
")",
"if",
"self",
".",
"get_model_config",
"(",
")",
".",
"list_select_related",
":",
"queryset",
"=",
"queryset",
".",
"select_related",
"(",
"*",
"self",
".",
"get_model_config",
"(",
")",
".",
"list_select_related",
")",
"return",
"watson",
".",
"filter",
"(",
"queryset",
",",
"self",
".",
"get_search",
"(",
")",
",",
"ranking",
"=",
"False",
")"
] |
Get search query set
|
[
"Get",
"search",
"query",
"set"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L238-L245
|
241,198
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ListJsendView.handle_request
|
def handle_request(self, request, *args, **kwargs):
"""Give back list items + config"""
paginator = self.get_paginator()
# Call search first, it will reset page if search is changed
search = self.get_search()
page = self.get_page(paginator)
items = self.get_items(paginator, page)
return {
'search': search,
'page': page,
'page_size': self.get_page_size(),
'num_pages': paginator.num_pages,
'sort': self.get_sort(),
'current_fields': self.get_current_fields(),
'fields': self.get_all_fields(),
'items': items,
}
|
python
|
def handle_request(self, request, *args, **kwargs):
"""Give back list items + config"""
paginator = self.get_paginator()
# Call search first, it will reset page if search is changed
search = self.get_search()
page = self.get_page(paginator)
items = self.get_items(paginator, page)
return {
'search': search,
'page': page,
'page_size': self.get_page_size(),
'num_pages': paginator.num_pages,
'sort': self.get_sort(),
'current_fields': self.get_current_fields(),
'fields': self.get_all_fields(),
'items': items,
}
|
[
"def",
"handle_request",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"paginator",
"=",
"self",
".",
"get_paginator",
"(",
")",
"# Call search first, it will reset page if search is changed",
"search",
"=",
"self",
".",
"get_search",
"(",
")",
"page",
"=",
"self",
".",
"get_page",
"(",
"paginator",
")",
"items",
"=",
"self",
".",
"get_items",
"(",
"paginator",
",",
"page",
")",
"return",
"{",
"'search'",
":",
"search",
",",
"'page'",
":",
"page",
",",
"'page_size'",
":",
"self",
".",
"get_page_size",
"(",
")",
",",
"'num_pages'",
":",
"paginator",
".",
"num_pages",
",",
"'sort'",
":",
"self",
".",
"get_sort",
"(",
")",
",",
"'current_fields'",
":",
"self",
".",
"get_current_fields",
"(",
")",
",",
"'fields'",
":",
"self",
".",
"get_all_fields",
"(",
")",
",",
"'items'",
":",
"items",
",",
"}"
] |
Give back list items + config
|
[
"Give",
"back",
"list",
"items",
"+",
"config"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L259-L276
|
241,199
|
krukas/Trionyx
|
trionyx/trionyx/views/core.py
|
ListJsendView.get_items
|
def get_items(self, paginator, current_page):
"""Get list items for current page"""
fields = self.get_model_config().get_list_fields()
page = paginator.page(current_page)
items = []
for item in page:
items.append({
'id': item.id,
'url': item.get_absolute_url(),
'row_data': [
fields[field]['renderer'](item, field)
for field in self.get_current_fields()
]
})
return items
|
python
|
def get_items(self, paginator, current_page):
"""Get list items for current page"""
fields = self.get_model_config().get_list_fields()
page = paginator.page(current_page)
items = []
for item in page:
items.append({
'id': item.id,
'url': item.get_absolute_url(),
'row_data': [
fields[field]['renderer'](item, field)
for field in self.get_current_fields()
]
})
return items
|
[
"def",
"get_items",
"(",
"self",
",",
"paginator",
",",
"current_page",
")",
":",
"fields",
"=",
"self",
".",
"get_model_config",
"(",
")",
".",
"get_list_fields",
"(",
")",
"page",
"=",
"paginator",
".",
"page",
"(",
"current_page",
")",
"items",
"=",
"[",
"]",
"for",
"item",
"in",
"page",
":",
"items",
".",
"append",
"(",
"{",
"'id'",
":",
"item",
".",
"id",
",",
"'url'",
":",
"item",
".",
"get_absolute_url",
"(",
")",
",",
"'row_data'",
":",
"[",
"fields",
"[",
"field",
"]",
"[",
"'renderer'",
"]",
"(",
"item",
",",
"field",
")",
"for",
"field",
"in",
"self",
".",
"get_current_fields",
"(",
")",
"]",
"}",
")",
"return",
"items"
] |
Get list items for current page
|
[
"Get",
"list",
"items",
"for",
"current",
"page"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/views/core.py#L278-L294
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.