id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
240,300
|
ironfroggy/django-better-cache
|
bettercache/objects.py
|
CacheModel.get
|
def get(cls, **kwargs):
"""Get a copy of the type from the cache and reconstruct it."""
data = cls._get(**kwargs)
if data is None:
new = cls()
new.from_miss(**kwargs)
return new
return cls.deserialize(data)
|
python
|
def get(cls, **kwargs):
"""Get a copy of the type from the cache and reconstruct it."""
data = cls._get(**kwargs)
if data is None:
new = cls()
new.from_miss(**kwargs)
return new
return cls.deserialize(data)
|
[
"def",
"get",
"(",
"cls",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"cls",
".",
"_get",
"(",
"*",
"*",
"kwargs",
")",
"if",
"data",
"is",
"None",
":",
"new",
"=",
"cls",
"(",
")",
"new",
".",
"from_miss",
"(",
"*",
"*",
"kwargs",
")",
"return",
"new",
"return",
"cls",
".",
"deserialize",
"(",
"data",
")"
] |
Get a copy of the type from the cache and reconstruct it.
|
[
"Get",
"a",
"copy",
"of",
"the",
"type",
"from",
"the",
"cache",
"and",
"reconstruct",
"it",
"."
] |
5350e8c646cef1c1ca74eab176f856ddd9eaf5c3
|
https://github.com/ironfroggy/django-better-cache/blob/5350e8c646cef1c1ca74eab176f856ddd9eaf5c3/bettercache/objects.py#L144-L152
|
240,301
|
ironfroggy/django-better-cache
|
bettercache/objects.py
|
CacheModel.get_or_create
|
def get_or_create(cls, **kwargs):
"""Get a copy of the type from the cache, or create a new one."""
data = cls._get(**kwargs)
if data is None:
return cls(**kwargs), True
return cls.deserialize(data), False
|
python
|
def get_or_create(cls, **kwargs):
"""Get a copy of the type from the cache, or create a new one."""
data = cls._get(**kwargs)
if data is None:
return cls(**kwargs), True
return cls.deserialize(data), False
|
[
"def",
"get_or_create",
"(",
"cls",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"cls",
".",
"_get",
"(",
"*",
"*",
"kwargs",
")",
"if",
"data",
"is",
"None",
":",
"return",
"cls",
"(",
"*",
"*",
"kwargs",
")",
",",
"True",
"return",
"cls",
".",
"deserialize",
"(",
"data",
")",
",",
"False"
] |
Get a copy of the type from the cache, or create a new one.
|
[
"Get",
"a",
"copy",
"of",
"the",
"type",
"from",
"the",
"cache",
"or",
"create",
"a",
"new",
"one",
"."
] |
5350e8c646cef1c1ca74eab176f856ddd9eaf5c3
|
https://github.com/ironfroggy/django-better-cache/blob/5350e8c646cef1c1ca74eab176f856ddd9eaf5c3/bettercache/objects.py#L155-L161
|
240,302
|
ironfroggy/django-better-cache
|
bettercache/objects.py
|
CacheModel.from_miss
|
def from_miss(self, **kwargs):
"""Called to initialize an instance when it is not found in the cache.
For example, if your CacheModel should pull data from the database to
populate the cache,
...
def from_miss(self, username):
user = User.objects.get(username=username)
self.email = user.email
self.full_name = user.get_full_name()
"""
raise type(self).Missing(type(self)(**kwargs).key())
|
python
|
def from_miss(self, **kwargs):
"""Called to initialize an instance when it is not found in the cache.
For example, if your CacheModel should pull data from the database to
populate the cache,
...
def from_miss(self, username):
user = User.objects.get(username=username)
self.email = user.email
self.full_name = user.get_full_name()
"""
raise type(self).Missing(type(self)(**kwargs).key())
|
[
"def",
"from_miss",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"raise",
"type",
"(",
"self",
")",
".",
"Missing",
"(",
"type",
"(",
"self",
")",
"(",
"*",
"*",
"kwargs",
")",
".",
"key",
"(",
")",
")"
] |
Called to initialize an instance when it is not found in the cache.
For example, if your CacheModel should pull data from the database to
populate the cache,
...
def from_miss(self, username):
user = User.objects.get(username=username)
self.email = user.email
self.full_name = user.get_full_name()
|
[
"Called",
"to",
"initialize",
"an",
"instance",
"when",
"it",
"is",
"not",
"found",
"in",
"the",
"cache",
"."
] |
5350e8c646cef1c1ca74eab176f856ddd9eaf5c3
|
https://github.com/ironfroggy/django-better-cache/blob/5350e8c646cef1c1ca74eab176f856ddd9eaf5c3/bettercache/objects.py#L163-L177
|
240,303
|
ironfroggy/django-better-cache
|
bettercache/objects.py
|
CacheModel.delete
|
def delete(self):
"""Deleting any existing copy of this object from the cache."""
key = self._key(self._all_keys())
_cache.delete(key)
|
python
|
def delete(self):
"""Deleting any existing copy of this object from the cache."""
key = self._key(self._all_keys())
_cache.delete(key)
|
[
"def",
"delete",
"(",
"self",
")",
":",
"key",
"=",
"self",
".",
"_key",
"(",
"self",
".",
"_all_keys",
"(",
")",
")",
"_cache",
".",
"delete",
"(",
"key",
")"
] |
Deleting any existing copy of this object from the cache.
|
[
"Deleting",
"any",
"existing",
"copy",
"of",
"this",
"object",
"from",
"the",
"cache",
"."
] |
5350e8c646cef1c1ca74eab176f856ddd9eaf5c3
|
https://github.com/ironfroggy/django-better-cache/blob/5350e8c646cef1c1ca74eab176f856ddd9eaf5c3/bettercache/objects.py#L179-L183
|
240,304
|
diffeo/yakonfig
|
yakonfig/merge.py
|
overlay_config
|
def overlay_config(base, overlay):
'''Overlay one configuration over another.
This overlays `overlay` on top of `base` as follows:
* If either isn't a dictionary, returns `overlay`.
* Any key in `base` not present in `overlay` is present in the
result with its original value.
* Any key in `overlay` with value :const:`None` is not present in
the result, unless it also is :const:`None` in `base`.
* Any key in `overlay` not present in `base` and not :const:`None`
is present in the result with its new value.
* Any key in both `overlay` and `base` with a non-:const:`None` value
is recursively overlaid.
>>> overlay_config({'a': 'b'}, {'a': 'c'})
{'a': 'c'}
>>> overlay_config({'a': 'b'}, {'c': 'd'})
{'a': 'b', 'c': 'd'}
>>> overlay_config({'a': {'b': 'c'}},
... {'a': {'b': 'd', 'e': 'f'}})
{'a': {'b': 'd', 'e': 'f'}}
>>> overlay_config({'a': 'b', 'c': 'd'}, {'a': None})
{'c': 'd'}
:param dict base: original configuration
:param dict overlay: overlay configuration
:return: new overlaid configuration
:returntype dict:
'''
if not isinstance(base, collections.Mapping):
return overlay
if not isinstance(overlay, collections.Mapping):
return overlay
result = dict()
for k in iterkeys(base):
if k not in overlay:
result[k] = base[k]
for k, v in iteritems(overlay):
if v is not None or (k in base and base[k] is None):
if k in base:
v = overlay_config(base[k], v)
result[k] = v
return result
|
python
|
def overlay_config(base, overlay):
'''Overlay one configuration over another.
This overlays `overlay` on top of `base` as follows:
* If either isn't a dictionary, returns `overlay`.
* Any key in `base` not present in `overlay` is present in the
result with its original value.
* Any key in `overlay` with value :const:`None` is not present in
the result, unless it also is :const:`None` in `base`.
* Any key in `overlay` not present in `base` and not :const:`None`
is present in the result with its new value.
* Any key in both `overlay` and `base` with a non-:const:`None` value
is recursively overlaid.
>>> overlay_config({'a': 'b'}, {'a': 'c'})
{'a': 'c'}
>>> overlay_config({'a': 'b'}, {'c': 'd'})
{'a': 'b', 'c': 'd'}
>>> overlay_config({'a': {'b': 'c'}},
... {'a': {'b': 'd', 'e': 'f'}})
{'a': {'b': 'd', 'e': 'f'}}
>>> overlay_config({'a': 'b', 'c': 'd'}, {'a': None})
{'c': 'd'}
:param dict base: original configuration
:param dict overlay: overlay configuration
:return: new overlaid configuration
:returntype dict:
'''
if not isinstance(base, collections.Mapping):
return overlay
if not isinstance(overlay, collections.Mapping):
return overlay
result = dict()
for k in iterkeys(base):
if k not in overlay:
result[k] = base[k]
for k, v in iteritems(overlay):
if v is not None or (k in base and base[k] is None):
if k in base:
v = overlay_config(base[k], v)
result[k] = v
return result
|
[
"def",
"overlay_config",
"(",
"base",
",",
"overlay",
")",
":",
"if",
"not",
"isinstance",
"(",
"base",
",",
"collections",
".",
"Mapping",
")",
":",
"return",
"overlay",
"if",
"not",
"isinstance",
"(",
"overlay",
",",
"collections",
".",
"Mapping",
")",
":",
"return",
"overlay",
"result",
"=",
"dict",
"(",
")",
"for",
"k",
"in",
"iterkeys",
"(",
"base",
")",
":",
"if",
"k",
"not",
"in",
"overlay",
":",
"result",
"[",
"k",
"]",
"=",
"base",
"[",
"k",
"]",
"for",
"k",
",",
"v",
"in",
"iteritems",
"(",
"overlay",
")",
":",
"if",
"v",
"is",
"not",
"None",
"or",
"(",
"k",
"in",
"base",
"and",
"base",
"[",
"k",
"]",
"is",
"None",
")",
":",
"if",
"k",
"in",
"base",
":",
"v",
"=",
"overlay_config",
"(",
"base",
"[",
"k",
"]",
",",
"v",
")",
"result",
"[",
"k",
"]",
"=",
"v",
"return",
"result"
] |
Overlay one configuration over another.
This overlays `overlay` on top of `base` as follows:
* If either isn't a dictionary, returns `overlay`.
* Any key in `base` not present in `overlay` is present in the
result with its original value.
* Any key in `overlay` with value :const:`None` is not present in
the result, unless it also is :const:`None` in `base`.
* Any key in `overlay` not present in `base` and not :const:`None`
is present in the result with its new value.
* Any key in both `overlay` and `base` with a non-:const:`None` value
is recursively overlaid.
>>> overlay_config({'a': 'b'}, {'a': 'c'})
{'a': 'c'}
>>> overlay_config({'a': 'b'}, {'c': 'd'})
{'a': 'b', 'c': 'd'}
>>> overlay_config({'a': {'b': 'c'}},
... {'a': {'b': 'd', 'e': 'f'}})
{'a': {'b': 'd', 'e': 'f'}}
>>> overlay_config({'a': 'b', 'c': 'd'}, {'a': None})
{'c': 'd'}
:param dict base: original configuration
:param dict overlay: overlay configuration
:return: new overlaid configuration
:returntype dict:
|
[
"Overlay",
"one",
"configuration",
"over",
"another",
"."
] |
412e195da29b4f4fc7b72967c192714a6f5eaeb5
|
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/merge.py#L20-L64
|
240,305
|
diffeo/yakonfig
|
yakonfig/merge.py
|
diff_config
|
def diff_config(base, target):
'''Find the differences between two configurations.
This finds a delta configuration from `base` to `target`, such that
calling :func:`overlay_config` with `base` and the result of this
function yields `target`. This works as follows:
* If both are identical (of any type), returns an empty dictionary.
* If either isn't a dictionary, returns `target`.
* Any key in `target` not present in `base` is included in the output
with its value from `target`.
* Any key in `base` not present in `target` is included in the output
with value :const:`None`.
* Any keys present in both dictionaries are recursively merged.
>>> diff_config({'a': 'b'}, {})
{'a': None}
>>> diff_config({'a': 'b'}, {'a': 'b', 'c': 'd'})
{'c': 'd'}
:param dict base: original configuration
:param dict target: new configuration
:return: overlay configuration
:returntype dict:
'''
if not isinstance(base, collections.Mapping):
if base == target:
return {}
return target
if not isinstance(target, collections.Mapping):
return target
result = dict()
for k in iterkeys(base):
if k not in target:
result[k] = None
for k, v in iteritems(target):
if k in base:
merged = diff_config(base[k], v)
if merged != {}:
result[k] = merged
else:
result[k] = v
return result
|
python
|
def diff_config(base, target):
'''Find the differences between two configurations.
This finds a delta configuration from `base` to `target`, such that
calling :func:`overlay_config` with `base` and the result of this
function yields `target`. This works as follows:
* If both are identical (of any type), returns an empty dictionary.
* If either isn't a dictionary, returns `target`.
* Any key in `target` not present in `base` is included in the output
with its value from `target`.
* Any key in `base` not present in `target` is included in the output
with value :const:`None`.
* Any keys present in both dictionaries are recursively merged.
>>> diff_config({'a': 'b'}, {})
{'a': None}
>>> diff_config({'a': 'b'}, {'a': 'b', 'c': 'd'})
{'c': 'd'}
:param dict base: original configuration
:param dict target: new configuration
:return: overlay configuration
:returntype dict:
'''
if not isinstance(base, collections.Mapping):
if base == target:
return {}
return target
if not isinstance(target, collections.Mapping):
return target
result = dict()
for k in iterkeys(base):
if k not in target:
result[k] = None
for k, v in iteritems(target):
if k in base:
merged = diff_config(base[k], v)
if merged != {}:
result[k] = merged
else:
result[k] = v
return result
|
[
"def",
"diff_config",
"(",
"base",
",",
"target",
")",
":",
"if",
"not",
"isinstance",
"(",
"base",
",",
"collections",
".",
"Mapping",
")",
":",
"if",
"base",
"==",
"target",
":",
"return",
"{",
"}",
"return",
"target",
"if",
"not",
"isinstance",
"(",
"target",
",",
"collections",
".",
"Mapping",
")",
":",
"return",
"target",
"result",
"=",
"dict",
"(",
")",
"for",
"k",
"in",
"iterkeys",
"(",
"base",
")",
":",
"if",
"k",
"not",
"in",
"target",
":",
"result",
"[",
"k",
"]",
"=",
"None",
"for",
"k",
",",
"v",
"in",
"iteritems",
"(",
"target",
")",
":",
"if",
"k",
"in",
"base",
":",
"merged",
"=",
"diff_config",
"(",
"base",
"[",
"k",
"]",
",",
"v",
")",
"if",
"merged",
"!=",
"{",
"}",
":",
"result",
"[",
"k",
"]",
"=",
"merged",
"else",
":",
"result",
"[",
"k",
"]",
"=",
"v",
"return",
"result"
] |
Find the differences between two configurations.
This finds a delta configuration from `base` to `target`, such that
calling :func:`overlay_config` with `base` and the result of this
function yields `target`. This works as follows:
* If both are identical (of any type), returns an empty dictionary.
* If either isn't a dictionary, returns `target`.
* Any key in `target` not present in `base` is included in the output
with its value from `target`.
* Any key in `base` not present in `target` is included in the output
with value :const:`None`.
* Any keys present in both dictionaries are recursively merged.
>>> diff_config({'a': 'b'}, {})
{'a': None}
>>> diff_config({'a': 'b'}, {'a': 'b', 'c': 'd'})
{'c': 'd'}
:param dict base: original configuration
:param dict target: new configuration
:return: overlay configuration
:returntype dict:
|
[
"Find",
"the",
"differences",
"between",
"two",
"configurations",
"."
] |
412e195da29b4f4fc7b72967c192714a6f5eaeb5
|
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/merge.py#L67-L110
|
240,306
|
harlowja/constructs
|
constructs/tree.py
|
pformat
|
def pformat(tree):
"""Recursively formats a tree into a nice string representation.
Example Input:
yahoo = tt.Tree(tt.Node("CEO"))
yahoo.root.add(tt.Node("Infra"))
yahoo.root[0].add(tt.Node("Boss"))
yahoo.root[0][0].add(tt.Node("Me"))
yahoo.root.add(tt.Node("Mobile"))
yahoo.root.add(tt.Node("Mail"))
Example Output:
CEO
|__Infra
| |__Boss
| |__Me
|__Mobile
|__Mail
"""
if tree.empty():
return ''
buf = six.StringIO()
for line in _pformat(tree.root, 0):
buf.write(line + "\n")
return buf.getvalue().strip()
|
python
|
def pformat(tree):
"""Recursively formats a tree into a nice string representation.
Example Input:
yahoo = tt.Tree(tt.Node("CEO"))
yahoo.root.add(tt.Node("Infra"))
yahoo.root[0].add(tt.Node("Boss"))
yahoo.root[0][0].add(tt.Node("Me"))
yahoo.root.add(tt.Node("Mobile"))
yahoo.root.add(tt.Node("Mail"))
Example Output:
CEO
|__Infra
| |__Boss
| |__Me
|__Mobile
|__Mail
"""
if tree.empty():
return ''
buf = six.StringIO()
for line in _pformat(tree.root, 0):
buf.write(line + "\n")
return buf.getvalue().strip()
|
[
"def",
"pformat",
"(",
"tree",
")",
":",
"if",
"tree",
".",
"empty",
"(",
")",
":",
"return",
"''",
"buf",
"=",
"six",
".",
"StringIO",
"(",
")",
"for",
"line",
"in",
"_pformat",
"(",
"tree",
".",
"root",
",",
"0",
")",
":",
"buf",
".",
"write",
"(",
"line",
"+",
"\"\\n\"",
")",
"return",
"buf",
".",
"getvalue",
"(",
")",
".",
"strip",
"(",
")"
] |
Recursively formats a tree into a nice string representation.
Example Input:
yahoo = tt.Tree(tt.Node("CEO"))
yahoo.root.add(tt.Node("Infra"))
yahoo.root[0].add(tt.Node("Boss"))
yahoo.root[0][0].add(tt.Node("Me"))
yahoo.root.add(tt.Node("Mobile"))
yahoo.root.add(tt.Node("Mail"))
Example Output:
CEO
|__Infra
| |__Boss
| |__Me
|__Mobile
|__Mail
|
[
"Recursively",
"formats",
"a",
"tree",
"into",
"a",
"nice",
"string",
"representation",
"."
] |
53f20a8422bbd56294d5c0161081cb5875511fab
|
https://github.com/harlowja/constructs/blob/53f20a8422bbd56294d5c0161081cb5875511fab/constructs/tree.py#L172-L196
|
240,307
|
harlowja/constructs
|
constructs/tree.py
|
Node.path_iter
|
def path_iter(self, include_self=True):
"""Yields back the path from this node to the root node."""
if include_self:
node = self
else:
node = self.parent
while node is not None:
yield node
node = node.parent
|
python
|
def path_iter(self, include_self=True):
"""Yields back the path from this node to the root node."""
if include_self:
node = self
else:
node = self.parent
while node is not None:
yield node
node = node.parent
|
[
"def",
"path_iter",
"(",
"self",
",",
"include_self",
"=",
"True",
")",
":",
"if",
"include_self",
":",
"node",
"=",
"self",
"else",
":",
"node",
"=",
"self",
".",
"parent",
"while",
"node",
"is",
"not",
"None",
":",
"yield",
"node",
"node",
"=",
"node",
".",
"parent"
] |
Yields back the path from this node to the root node.
|
[
"Yields",
"back",
"the",
"path",
"from",
"this",
"node",
"to",
"the",
"root",
"node",
"."
] |
53f20a8422bbd56294d5c0161081cb5875511fab
|
https://github.com/harlowja/constructs/blob/53f20a8422bbd56294d5c0161081cb5875511fab/constructs/tree.py#L101-L109
|
240,308
|
harlowja/constructs
|
constructs/tree.py
|
Node.child_count
|
def child_count(self, only_direct=True):
"""Returns how many children this node has, either only the direct
children of this node or inclusive of all children nodes of this node.
"""
if not only_direct:
count = 0
for _node in self.dfs_iter():
count += 1
return count
return len(self._children)
|
python
|
def child_count(self, only_direct=True):
"""Returns how many children this node has, either only the direct
children of this node or inclusive of all children nodes of this node.
"""
if not only_direct:
count = 0
for _node in self.dfs_iter():
count += 1
return count
return len(self._children)
|
[
"def",
"child_count",
"(",
"self",
",",
"only_direct",
"=",
"True",
")",
":",
"if",
"not",
"only_direct",
":",
"count",
"=",
"0",
"for",
"_node",
"in",
"self",
".",
"dfs_iter",
"(",
")",
":",
"count",
"+=",
"1",
"return",
"count",
"return",
"len",
"(",
"self",
".",
"_children",
")"
] |
Returns how many children this node has, either only the direct
children of this node or inclusive of all children nodes of this node.
|
[
"Returns",
"how",
"many",
"children",
"this",
"node",
"has",
"either",
"only",
"the",
"direct",
"children",
"of",
"this",
"node",
"or",
"inclusive",
"of",
"all",
"children",
"nodes",
"of",
"this",
"node",
"."
] |
53f20a8422bbd56294d5c0161081cb5875511fab
|
https://github.com/harlowja/constructs/blob/53f20a8422bbd56294d5c0161081cb5875511fab/constructs/tree.py#L119-L128
|
240,309
|
harlowja/constructs
|
constructs/tree.py
|
Node.index
|
def index(self, item):
"""Finds the child index of a given item, searchs in added order."""
index_at = None
for (i, child) in enumerate(self._children):
if child.item == item:
index_at = i
break
if index_at is None:
raise ValueError("%s is not contained in any child" % (item))
return index_at
|
python
|
def index(self, item):
"""Finds the child index of a given item, searchs in added order."""
index_at = None
for (i, child) in enumerate(self._children):
if child.item == item:
index_at = i
break
if index_at is None:
raise ValueError("%s is not contained in any child" % (item))
return index_at
|
[
"def",
"index",
"(",
"self",
",",
"item",
")",
":",
"index_at",
"=",
"None",
"for",
"(",
"i",
",",
"child",
")",
"in",
"enumerate",
"(",
"self",
".",
"_children",
")",
":",
"if",
"child",
".",
"item",
"==",
"item",
":",
"index_at",
"=",
"i",
"break",
"if",
"index_at",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"%s is not contained in any child\"",
"%",
"(",
"item",
")",
")",
"return",
"index_at"
] |
Finds the child index of a given item, searchs in added order.
|
[
"Finds",
"the",
"child",
"index",
"of",
"a",
"given",
"item",
"searchs",
"in",
"added",
"order",
"."
] |
53f20a8422bbd56294d5c0161081cb5875511fab
|
https://github.com/harlowja/constructs/blob/53f20a8422bbd56294d5c0161081cb5875511fab/constructs/tree.py#L135-L144
|
240,310
|
mgk/thingamon
|
thingamon/thing.py
|
Thing.publish_state
|
def publish_state(self, state):
"""Publish thing state to AWS IoT.
Args:
state (dict): object state. Must be JSON serializable (i.e., not
have circular references).
"""
message = json.dumps({'state': {'reported': state}})
self.client.publish(self.topic, message)
self._state = state
|
python
|
def publish_state(self, state):
"""Publish thing state to AWS IoT.
Args:
state (dict): object state. Must be JSON serializable (i.e., not
have circular references).
"""
message = json.dumps({'state': {'reported': state}})
self.client.publish(self.topic, message)
self._state = state
|
[
"def",
"publish_state",
"(",
"self",
",",
"state",
")",
":",
"message",
"=",
"json",
".",
"dumps",
"(",
"{",
"'state'",
":",
"{",
"'reported'",
":",
"state",
"}",
"}",
")",
"self",
".",
"client",
".",
"publish",
"(",
"self",
".",
"topic",
",",
"message",
")",
"self",
".",
"_state",
"=",
"state"
] |
Publish thing state to AWS IoT.
Args:
state (dict): object state. Must be JSON serializable (i.e., not
have circular references).
|
[
"Publish",
"thing",
"state",
"to",
"AWS",
"IoT",
"."
] |
3f7d68dc2131c347473af15cd5f7d4b669407c6b
|
https://github.com/mgk/thingamon/blob/3f7d68dc2131c347473af15cd5f7d4b669407c6b/thingamon/thing.py#L28-L37
|
240,311
|
CitrineInformatics/dftparse
|
dftparse/vasp/eigenval_parser.py
|
_is_kpoint
|
def _is_kpoint(line):
"""Is this line the start of a new k-point block"""
# Try to parse the k-point; false otherwise
toks = line.split()
# k-point header lines have 4 tokens
if len(toks) != 4:
return False
try:
# K-points are centered at the origin
xs = [float(x) for x in toks[:3]]
# Weights are in [0,1]
w = float(toks[3])
return all(abs(x) <= 0.5 for x in xs) and w >= 0.0 and w <= 1.0
except ValueError:
return False
|
python
|
def _is_kpoint(line):
"""Is this line the start of a new k-point block"""
# Try to parse the k-point; false otherwise
toks = line.split()
# k-point header lines have 4 tokens
if len(toks) != 4:
return False
try:
# K-points are centered at the origin
xs = [float(x) for x in toks[:3]]
# Weights are in [0,1]
w = float(toks[3])
return all(abs(x) <= 0.5 for x in xs) and w >= 0.0 and w <= 1.0
except ValueError:
return False
|
[
"def",
"_is_kpoint",
"(",
"line",
")",
":",
"# Try to parse the k-point; false otherwise",
"toks",
"=",
"line",
".",
"split",
"(",
")",
"# k-point header lines have 4 tokens",
"if",
"len",
"(",
"toks",
")",
"!=",
"4",
":",
"return",
"False",
"try",
":",
"# K-points are centered at the origin",
"xs",
"=",
"[",
"float",
"(",
"x",
")",
"for",
"x",
"in",
"toks",
"[",
":",
"3",
"]",
"]",
"# Weights are in [0,1]",
"w",
"=",
"float",
"(",
"toks",
"[",
"3",
"]",
")",
"return",
"all",
"(",
"abs",
"(",
"x",
")",
"<=",
"0.5",
"for",
"x",
"in",
"xs",
")",
"and",
"w",
">=",
"0.0",
"and",
"w",
"<=",
"1.0",
"except",
"ValueError",
":",
"return",
"False"
] |
Is this line the start of a new k-point block
|
[
"Is",
"this",
"line",
"the",
"start",
"of",
"a",
"new",
"k",
"-",
"point",
"block"
] |
53a1bf19945cf1c195d6af9beccb3d1b7f4a4c1d
|
https://github.com/CitrineInformatics/dftparse/blob/53a1bf19945cf1c195d6af9beccb3d1b7f4a4c1d/dftparse/vasp/eigenval_parser.py#L4-L19
|
240,312
|
CitrineInformatics/dftparse
|
dftparse/vasp/eigenval_parser.py
|
_parse_kpoint
|
def _parse_kpoint(line, lines):
"""Parse the k-point and then continue to iterate over the band energies and occupations"""
toks = line.split()
kpoint = [float(x) for x in toks[:3]]
weight = float(toks[-1])
newline = next(lines)
bands_up = []
occ_up = []
bands_down = []
occ_down = []
ispin = None
while len(newline.split()) > 0:
toks = newline.split()
if ispin is None:
# there are two spins if there are 5 columns (two spin energies and occupancies) or
# very probably if there are 3 columns and the last column's first value isn't 1.0
# (which would be the occupancy of the lowest energy level)
ispin = (len(toks) == 5) or (len(toks) == 3 and abs(float(toks[2]) - 1.0) > 1.0e-4)
if len(toks) == 2:
bands_up.append(float(toks[1]))
elif len(toks) == 3 and not ispin:
bands_up.append(float(toks[1]))
occ_up.append(float(toks[2]))
elif len(toks) == 3 and ispin:
bands_up.append(float(toks[1]))
bands_down.append(float(toks[2]))
elif len(toks) == 5 and ispin:
bands_up.append(float(toks[1]))
bands_down.append(float(toks[2]))
occ_up.append(float(toks[3]))
occ_down.append(float(toks[4]))
else:
raise ValueError("Encountered {} when parsing k-point".format(newline))
newline = next(lines)
res = {"kpoint": kpoint, "weight": weight}
if len(bands_down) > 0:
res["energies"] = list(zip(bands_up, bands_down))
else:
res["energies"] = list(zip(bands_up))
if len(occ_down) > 0:
res["occupancies"] = list(zip(occ_up, occ_down))
elif len(occ_up) > 0:
res["occupancies"] = list(zip(occ_up))
return res
|
python
|
def _parse_kpoint(line, lines):
"""Parse the k-point and then continue to iterate over the band energies and occupations"""
toks = line.split()
kpoint = [float(x) for x in toks[:3]]
weight = float(toks[-1])
newline = next(lines)
bands_up = []
occ_up = []
bands_down = []
occ_down = []
ispin = None
while len(newline.split()) > 0:
toks = newline.split()
if ispin is None:
# there are two spins if there are 5 columns (two spin energies and occupancies) or
# very probably if there are 3 columns and the last column's first value isn't 1.0
# (which would be the occupancy of the lowest energy level)
ispin = (len(toks) == 5) or (len(toks) == 3 and abs(float(toks[2]) - 1.0) > 1.0e-4)
if len(toks) == 2:
bands_up.append(float(toks[1]))
elif len(toks) == 3 and not ispin:
bands_up.append(float(toks[1]))
occ_up.append(float(toks[2]))
elif len(toks) == 3 and ispin:
bands_up.append(float(toks[1]))
bands_down.append(float(toks[2]))
elif len(toks) == 5 and ispin:
bands_up.append(float(toks[1]))
bands_down.append(float(toks[2]))
occ_up.append(float(toks[3]))
occ_down.append(float(toks[4]))
else:
raise ValueError("Encountered {} when parsing k-point".format(newline))
newline = next(lines)
res = {"kpoint": kpoint, "weight": weight}
if len(bands_down) > 0:
res["energies"] = list(zip(bands_up, bands_down))
else:
res["energies"] = list(zip(bands_up))
if len(occ_down) > 0:
res["occupancies"] = list(zip(occ_up, occ_down))
elif len(occ_up) > 0:
res["occupancies"] = list(zip(occ_up))
return res
|
[
"def",
"_parse_kpoint",
"(",
"line",
",",
"lines",
")",
":",
"toks",
"=",
"line",
".",
"split",
"(",
")",
"kpoint",
"=",
"[",
"float",
"(",
"x",
")",
"for",
"x",
"in",
"toks",
"[",
":",
"3",
"]",
"]",
"weight",
"=",
"float",
"(",
"toks",
"[",
"-",
"1",
"]",
")",
"newline",
"=",
"next",
"(",
"lines",
")",
"bands_up",
"=",
"[",
"]",
"occ_up",
"=",
"[",
"]",
"bands_down",
"=",
"[",
"]",
"occ_down",
"=",
"[",
"]",
"ispin",
"=",
"None",
"while",
"len",
"(",
"newline",
".",
"split",
"(",
")",
")",
">",
"0",
":",
"toks",
"=",
"newline",
".",
"split",
"(",
")",
"if",
"ispin",
"is",
"None",
":",
"# there are two spins if there are 5 columns (two spin energies and occupancies) or",
"# very probably if there are 3 columns and the last column's first value isn't 1.0",
"# (which would be the occupancy of the lowest energy level)",
"ispin",
"=",
"(",
"len",
"(",
"toks",
")",
"==",
"5",
")",
"or",
"(",
"len",
"(",
"toks",
")",
"==",
"3",
"and",
"abs",
"(",
"float",
"(",
"toks",
"[",
"2",
"]",
")",
"-",
"1.0",
")",
">",
"1.0e-4",
")",
"if",
"len",
"(",
"toks",
")",
"==",
"2",
":",
"bands_up",
".",
"append",
"(",
"float",
"(",
"toks",
"[",
"1",
"]",
")",
")",
"elif",
"len",
"(",
"toks",
")",
"==",
"3",
"and",
"not",
"ispin",
":",
"bands_up",
".",
"append",
"(",
"float",
"(",
"toks",
"[",
"1",
"]",
")",
")",
"occ_up",
".",
"append",
"(",
"float",
"(",
"toks",
"[",
"2",
"]",
")",
")",
"elif",
"len",
"(",
"toks",
")",
"==",
"3",
"and",
"ispin",
":",
"bands_up",
".",
"append",
"(",
"float",
"(",
"toks",
"[",
"1",
"]",
")",
")",
"bands_down",
".",
"append",
"(",
"float",
"(",
"toks",
"[",
"2",
"]",
")",
")",
"elif",
"len",
"(",
"toks",
")",
"==",
"5",
"and",
"ispin",
":",
"bands_up",
".",
"append",
"(",
"float",
"(",
"toks",
"[",
"1",
"]",
")",
")",
"bands_down",
".",
"append",
"(",
"float",
"(",
"toks",
"[",
"2",
"]",
")",
")",
"occ_up",
".",
"append",
"(",
"float",
"(",
"toks",
"[",
"3",
"]",
")",
")",
"occ_down",
".",
"append",
"(",
"float",
"(",
"toks",
"[",
"4",
"]",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Encountered {} when parsing k-point\"",
".",
"format",
"(",
"newline",
")",
")",
"newline",
"=",
"next",
"(",
"lines",
")",
"res",
"=",
"{",
"\"kpoint\"",
":",
"kpoint",
",",
"\"weight\"",
":",
"weight",
"}",
"if",
"len",
"(",
"bands_down",
")",
">",
"0",
":",
"res",
"[",
"\"energies\"",
"]",
"=",
"list",
"(",
"zip",
"(",
"bands_up",
",",
"bands_down",
")",
")",
"else",
":",
"res",
"[",
"\"energies\"",
"]",
"=",
"list",
"(",
"zip",
"(",
"bands_up",
")",
")",
"if",
"len",
"(",
"occ_down",
")",
">",
"0",
":",
"res",
"[",
"\"occupancies\"",
"]",
"=",
"list",
"(",
"zip",
"(",
"occ_up",
",",
"occ_down",
")",
")",
"elif",
"len",
"(",
"occ_up",
")",
">",
"0",
":",
"res",
"[",
"\"occupancies\"",
"]",
"=",
"list",
"(",
"zip",
"(",
"occ_up",
")",
")",
"return",
"res"
] |
Parse the k-point and then continue to iterate over the band energies and occupations
|
[
"Parse",
"the",
"k",
"-",
"point",
"and",
"then",
"continue",
"to",
"iterate",
"over",
"the",
"band",
"energies",
"and",
"occupations"
] |
53a1bf19945cf1c195d6af9beccb3d1b7f4a4c1d
|
https://github.com/CitrineInformatics/dftparse/blob/53a1bf19945cf1c195d6af9beccb3d1b7f4a4c1d/dftparse/vasp/eigenval_parser.py#L22-L71
|
240,313
|
Celeo/Pycord
|
pycord/__init__.py
|
WebSocketEvent.parse
|
def parse(cls, op):
"""Gets the enum for the op code
Args:
op: value of the op code (will be casted to int)
Returns:
The enum that matches the op code
"""
for event in cls:
if event.value == int(op):
return event
return None
|
python
|
def parse(cls, op):
"""Gets the enum for the op code
Args:
op: value of the op code (will be casted to int)
Returns:
The enum that matches the op code
"""
for event in cls:
if event.value == int(op):
return event
return None
|
[
"def",
"parse",
"(",
"cls",
",",
"op",
")",
":",
"for",
"event",
"in",
"cls",
":",
"if",
"event",
".",
"value",
"==",
"int",
"(",
"op",
")",
":",
"return",
"event",
"return",
"None"
] |
Gets the enum for the op code
Args:
op: value of the op code (will be casted to int)
Returns:
The enum that matches the op code
|
[
"Gets",
"the",
"enum",
"for",
"the",
"op",
"code"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L45-L57
|
240,314
|
Celeo/Pycord
|
pycord/__init__.py
|
WebSocketKeepAlive.run
|
def run(self):
"""Runs the thread
This method handles sending the heartbeat to the Discord websocket server, so the connection
can remain open and the bot remain online for those commands that require it to be.
Args:
None
"""
while self.should_run:
try:
self.logger.debug('Sending heartbeat, seq ' + last_sequence)
self.ws.send(json.dumps({
'op': 1,
'd': last_sequence
}))
except Exception as e:
self.logger.error(f'Got error in heartbeat: {str(e)}')
finally:
elapsed = 0.0
while elapsed < self.interval and self.should_run:
time.sleep(self.TICK_INTERVAL)
elapsed += self.TICK_INTERVAL
|
python
|
def run(self):
"""Runs the thread
This method handles sending the heartbeat to the Discord websocket server, so the connection
can remain open and the bot remain online for those commands that require it to be.
Args:
None
"""
while self.should_run:
try:
self.logger.debug('Sending heartbeat, seq ' + last_sequence)
self.ws.send(json.dumps({
'op': 1,
'd': last_sequence
}))
except Exception as e:
self.logger.error(f'Got error in heartbeat: {str(e)}')
finally:
elapsed = 0.0
while elapsed < self.interval and self.should_run:
time.sleep(self.TICK_INTERVAL)
elapsed += self.TICK_INTERVAL
|
[
"def",
"run",
"(",
"self",
")",
":",
"while",
"self",
".",
"should_run",
":",
"try",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Sending heartbeat, seq '",
"+",
"last_sequence",
")",
"self",
".",
"ws",
".",
"send",
"(",
"json",
".",
"dumps",
"(",
"{",
"'op'",
":",
"1",
",",
"'d'",
":",
"last_sequence",
"}",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"logger",
".",
"error",
"(",
"f'Got error in heartbeat: {str(e)}'",
")",
"finally",
":",
"elapsed",
"=",
"0.0",
"while",
"elapsed",
"<",
"self",
".",
"interval",
"and",
"self",
".",
"should_run",
":",
"time",
".",
"sleep",
"(",
"self",
".",
"TICK_INTERVAL",
")",
"elapsed",
"+=",
"self",
".",
"TICK_INTERVAL"
] |
Runs the thread
This method handles sending the heartbeat to the Discord websocket server, so the connection
can remain open and the bot remain online for those commands that require it to be.
Args:
None
|
[
"Runs",
"the",
"thread"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L87-L109
|
240,315
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord._setup_logger
|
def _setup_logger(self, logging_level: int, log_to_console: bool):
"""Sets up the internal logger
Args:
logging_level: what logging level to use
log_to_console: whether or not to log to the console
"""
self.logger = logging.getLogger('discord')
self.logger.handlers = []
self.logger.setLevel(logging_level)
formatter = logging.Formatter(style='{', fmt='{asctime} [{levelname}] {message}', datefmt='%Y-%m-%d %H:%M:%S')
file_handler = logging.FileHandler('pycord.log')
file_handler.setFormatter(formatter)
file_handler.setLevel(logging_level)
self.logger.addHandler(file_handler)
if log_to_console:
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging_level)
self.logger.addHandler(stream_handler)
|
python
|
def _setup_logger(self, logging_level: int, log_to_console: bool):
"""Sets up the internal logger
Args:
logging_level: what logging level to use
log_to_console: whether or not to log to the console
"""
self.logger = logging.getLogger('discord')
self.logger.handlers = []
self.logger.setLevel(logging_level)
formatter = logging.Formatter(style='{', fmt='{asctime} [{levelname}] {message}', datefmt='%Y-%m-%d %H:%M:%S')
file_handler = logging.FileHandler('pycord.log')
file_handler.setFormatter(formatter)
file_handler.setLevel(logging_level)
self.logger.addHandler(file_handler)
if log_to_console:
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging_level)
self.logger.addHandler(stream_handler)
|
[
"def",
"_setup_logger",
"(",
"self",
",",
"logging_level",
":",
"int",
",",
"log_to_console",
":",
"bool",
")",
":",
"self",
".",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"'discord'",
")",
"self",
".",
"logger",
".",
"handlers",
"=",
"[",
"]",
"self",
".",
"logger",
".",
"setLevel",
"(",
"logging_level",
")",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"style",
"=",
"'{'",
",",
"fmt",
"=",
"'{asctime} [{levelname}] {message}'",
",",
"datefmt",
"=",
"'%Y-%m-%d %H:%M:%S'",
")",
"file_handler",
"=",
"logging",
".",
"FileHandler",
"(",
"'pycord.log'",
")",
"file_handler",
".",
"setFormatter",
"(",
"formatter",
")",
"file_handler",
".",
"setLevel",
"(",
"logging_level",
")",
"self",
".",
"logger",
".",
"addHandler",
"(",
"file_handler",
")",
"if",
"log_to_console",
":",
"stream_handler",
"=",
"logging",
".",
"StreamHandler",
"(",
"sys",
".",
"stdout",
")",
"stream_handler",
".",
"setFormatter",
"(",
"formatter",
")",
"stream_handler",
".",
"setLevel",
"(",
"logging_level",
")",
"self",
".",
"logger",
".",
"addHandler",
"(",
"stream_handler",
")"
] |
Sets up the internal logger
Args:
logging_level: what logging level to use
log_to_console: whether or not to log to the console
|
[
"Sets",
"up",
"the",
"internal",
"logger"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L181-L200
|
240,316
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord._query
|
def _query(self, path: str, method: str, data: Dict[str, Any]=None, expected_status: int = 200) \
-> Union[List[Dict[str, Any]], Dict[str, Any], None]:
"""Make an HTTP request
Args:
path: the URI path (not including the base url, start with
the first uri segment, like 'users/...')
method: the HTTP method to use (GET, POST, PATCH, ...)
data: the data to send as JSON data
expected_status: expected HTTP status; other statuses
received will raise an Exception
Returns:
Data from the endpoint's response
"""
url = Pycord.url_base + path
self.logger.debug(f'Making {method} request to "{url}"')
if method == 'GET':
r = requests.get(url, headers=self._build_headers())
elif method == 'POST':
r = requests.post(url, headers=self._build_headers(), json=data)
r = requests.get(url, headers=self._build_headers())
elif method == 'PATCH':
r = requests.patch(url, headers=self._build_headers(), json=data)
else:
raise ValueError(f'Unknown HTTP method {method}')
self.logger.debug(f'{method} response from "{url}" was "{r.status_code}"')
if r.status_code != expected_status:
raise ValueError(f'Non-{expected_status} {method} response from Discord API ({r.status_code}): {r.text}')
if expected_status == 200:
return r.json()
return None
|
python
|
def _query(self, path: str, method: str, data: Dict[str, Any]=None, expected_status: int = 200) \
-> Union[List[Dict[str, Any]], Dict[str, Any], None]:
"""Make an HTTP request
Args:
path: the URI path (not including the base url, start with
the first uri segment, like 'users/...')
method: the HTTP method to use (GET, POST, PATCH, ...)
data: the data to send as JSON data
expected_status: expected HTTP status; other statuses
received will raise an Exception
Returns:
Data from the endpoint's response
"""
url = Pycord.url_base + path
self.logger.debug(f'Making {method} request to "{url}"')
if method == 'GET':
r = requests.get(url, headers=self._build_headers())
elif method == 'POST':
r = requests.post(url, headers=self._build_headers(), json=data)
r = requests.get(url, headers=self._build_headers())
elif method == 'PATCH':
r = requests.patch(url, headers=self._build_headers(), json=data)
else:
raise ValueError(f'Unknown HTTP method {method}')
self.logger.debug(f'{method} response from "{url}" was "{r.status_code}"')
if r.status_code != expected_status:
raise ValueError(f'Non-{expected_status} {method} response from Discord API ({r.status_code}): {r.text}')
if expected_status == 200:
return r.json()
return None
|
[
"def",
"_query",
"(",
"self",
",",
"path",
":",
"str",
",",
"method",
":",
"str",
",",
"data",
":",
"Dict",
"[",
"str",
",",
"Any",
"]",
"=",
"None",
",",
"expected_status",
":",
"int",
"=",
"200",
")",
"->",
"Union",
"[",
"List",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
",",
"Dict",
"[",
"str",
",",
"Any",
"]",
",",
"None",
"]",
":",
"url",
"=",
"Pycord",
".",
"url_base",
"+",
"path",
"self",
".",
"logger",
".",
"debug",
"(",
"f'Making {method} request to \"{url}\"'",
")",
"if",
"method",
"==",
"'GET'",
":",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"_build_headers",
"(",
")",
")",
"elif",
"method",
"==",
"'POST'",
":",
"r",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"_build_headers",
"(",
")",
",",
"json",
"=",
"data",
")",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"_build_headers",
"(",
")",
")",
"elif",
"method",
"==",
"'PATCH'",
":",
"r",
"=",
"requests",
".",
"patch",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"_build_headers",
"(",
")",
",",
"json",
"=",
"data",
")",
"else",
":",
"raise",
"ValueError",
"(",
"f'Unknown HTTP method {method}'",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"f'{method} response from \"{url}\" was \"{r.status_code}\"'",
")",
"if",
"r",
".",
"status_code",
"!=",
"expected_status",
":",
"raise",
"ValueError",
"(",
"f'Non-{expected_status} {method} response from Discord API ({r.status_code}): {r.text}'",
")",
"if",
"expected_status",
"==",
"200",
":",
"return",
"r",
".",
"json",
"(",
")",
"return",
"None"
] |
Make an HTTP request
Args:
path: the URI path (not including the base url, start with
the first uri segment, like 'users/...')
method: the HTTP method to use (GET, POST, PATCH, ...)
data: the data to send as JSON data
expected_status: expected HTTP status; other statuses
received will raise an Exception
Returns:
Data from the endpoint's response
|
[
"Make",
"an",
"HTTP",
"request"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L221-L252
|
240,317
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord._ws_on_message
|
def _ws_on_message(self, ws: websocket.WebSocketApp, raw: Union[str, bytes]):
"""Callback for receiving messages from the websocket connection
This method receives ALL events from the websocket connection, some of which
are used for the initial authentication flow, some of which are used for maintaining
the connection, some of which are for notifying this client of user states, etc.
Only a few of the events are really worth listening to by "downstream" clients,
mostly chat events (``WebSocketEvent.DISPATCH`` with element ``t`` == 'MESSAGE_CREATE'),
and those can be accessed by clients using this library via the command registration,
which is handled by this method.
Args:
ws: websocket connection
raw: message received from the connection; either string or bytes, the latter
is a zlip-compressed string. Either way, the end result of formatting is JSON
"""
if isinstance(raw, bytes):
decoded = zlib.decompress(raw, 15, 10490000).decode('utf-8')
else:
decoded = raw
data = json.loads(decoded)
if data.get('s') is not None:
global last_sequence
last_sequence = str(data['s'])
self.logger.debug('Set last_sequence to ' + last_sequence)
event = WebSocketEvent.parse(data['op'])
self.logger.debug('Received event {} (op #{})'.format(
event.name,
data['op']
))
if event == WebSocketEvent.HELLO:
interval = float(data['d']['heartbeat_interval']) / 1000
self.logger.debug(f'Starting heartbeat thread at {interval} seconds')
self._ws_keep_alive = WebSocketKeepAlive(self.logger, ws, interval)
self._ws_keep_alive.start()
elif event == WebSocketEvent.DISPATCH:
self.logger.debug('Got dispatch ' + data['t'])
if data['t'] == PycordCallback.MESSAGE.value:
message_content = data['d']['content']
if message_content.startswith(self.command_prefix) and self._commands:
cmd_str = message_content[1:].split(' ')[0].lower()
self.logger.debug(f'Got new message, checking for callback for command "{cmd_str}"')
for command_obj in self._commands:
if command_obj[0].lower() == cmd_str:
self.logger.debug(f'Found matching command "{command_obj[0]}", invoking callback')
command_obj[1](data)
for key in self.callbacks:
if key.value == data['t']:
self.callbacks[key](data)
|
python
|
def _ws_on_message(self, ws: websocket.WebSocketApp, raw: Union[str, bytes]):
"""Callback for receiving messages from the websocket connection
This method receives ALL events from the websocket connection, some of which
are used for the initial authentication flow, some of which are used for maintaining
the connection, some of which are for notifying this client of user states, etc.
Only a few of the events are really worth listening to by "downstream" clients,
mostly chat events (``WebSocketEvent.DISPATCH`` with element ``t`` == 'MESSAGE_CREATE'),
and those can be accessed by clients using this library via the command registration,
which is handled by this method.
Args:
ws: websocket connection
raw: message received from the connection; either string or bytes, the latter
is a zlip-compressed string. Either way, the end result of formatting is JSON
"""
if isinstance(raw, bytes):
decoded = zlib.decompress(raw, 15, 10490000).decode('utf-8')
else:
decoded = raw
data = json.loads(decoded)
if data.get('s') is not None:
global last_sequence
last_sequence = str(data['s'])
self.logger.debug('Set last_sequence to ' + last_sequence)
event = WebSocketEvent.parse(data['op'])
self.logger.debug('Received event {} (op #{})'.format(
event.name,
data['op']
))
if event == WebSocketEvent.HELLO:
interval = float(data['d']['heartbeat_interval']) / 1000
self.logger.debug(f'Starting heartbeat thread at {interval} seconds')
self._ws_keep_alive = WebSocketKeepAlive(self.logger, ws, interval)
self._ws_keep_alive.start()
elif event == WebSocketEvent.DISPATCH:
self.logger.debug('Got dispatch ' + data['t'])
if data['t'] == PycordCallback.MESSAGE.value:
message_content = data['d']['content']
if message_content.startswith(self.command_prefix) and self._commands:
cmd_str = message_content[1:].split(' ')[0].lower()
self.logger.debug(f'Got new message, checking for callback for command "{cmd_str}"')
for command_obj in self._commands:
if command_obj[0].lower() == cmd_str:
self.logger.debug(f'Found matching command "{command_obj[0]}", invoking callback')
command_obj[1](data)
for key in self.callbacks:
if key.value == data['t']:
self.callbacks[key](data)
|
[
"def",
"_ws_on_message",
"(",
"self",
",",
"ws",
":",
"websocket",
".",
"WebSocketApp",
",",
"raw",
":",
"Union",
"[",
"str",
",",
"bytes",
"]",
")",
":",
"if",
"isinstance",
"(",
"raw",
",",
"bytes",
")",
":",
"decoded",
"=",
"zlib",
".",
"decompress",
"(",
"raw",
",",
"15",
",",
"10490000",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"else",
":",
"decoded",
"=",
"raw",
"data",
"=",
"json",
".",
"loads",
"(",
"decoded",
")",
"if",
"data",
".",
"get",
"(",
"'s'",
")",
"is",
"not",
"None",
":",
"global",
"last_sequence",
"last_sequence",
"=",
"str",
"(",
"data",
"[",
"'s'",
"]",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'Set last_sequence to '",
"+",
"last_sequence",
")",
"event",
"=",
"WebSocketEvent",
".",
"parse",
"(",
"data",
"[",
"'op'",
"]",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'Received event {} (op #{})'",
".",
"format",
"(",
"event",
".",
"name",
",",
"data",
"[",
"'op'",
"]",
")",
")",
"if",
"event",
"==",
"WebSocketEvent",
".",
"HELLO",
":",
"interval",
"=",
"float",
"(",
"data",
"[",
"'d'",
"]",
"[",
"'heartbeat_interval'",
"]",
")",
"/",
"1000",
"self",
".",
"logger",
".",
"debug",
"(",
"f'Starting heartbeat thread at {interval} seconds'",
")",
"self",
".",
"_ws_keep_alive",
"=",
"WebSocketKeepAlive",
"(",
"self",
".",
"logger",
",",
"ws",
",",
"interval",
")",
"self",
".",
"_ws_keep_alive",
".",
"start",
"(",
")",
"elif",
"event",
"==",
"WebSocketEvent",
".",
"DISPATCH",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Got dispatch '",
"+",
"data",
"[",
"'t'",
"]",
")",
"if",
"data",
"[",
"'t'",
"]",
"==",
"PycordCallback",
".",
"MESSAGE",
".",
"value",
":",
"message_content",
"=",
"data",
"[",
"'d'",
"]",
"[",
"'content'",
"]",
"if",
"message_content",
".",
"startswith",
"(",
"self",
".",
"command_prefix",
")",
"and",
"self",
".",
"_commands",
":",
"cmd_str",
"=",
"message_content",
"[",
"1",
":",
"]",
".",
"split",
"(",
"' '",
")",
"[",
"0",
"]",
".",
"lower",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"f'Got new message, checking for callback for command \"{cmd_str}\"'",
")",
"for",
"command_obj",
"in",
"self",
".",
"_commands",
":",
"if",
"command_obj",
"[",
"0",
"]",
".",
"lower",
"(",
")",
"==",
"cmd_str",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"f'Found matching command \"{command_obj[0]}\", invoking callback'",
")",
"command_obj",
"[",
"1",
"]",
"(",
"data",
")",
"for",
"key",
"in",
"self",
".",
"callbacks",
":",
"if",
"key",
".",
"value",
"==",
"data",
"[",
"'t'",
"]",
":",
"self",
".",
"callbacks",
"[",
"key",
"]",
"(",
"data",
")"
] |
Callback for receiving messages from the websocket connection
This method receives ALL events from the websocket connection, some of which
are used for the initial authentication flow, some of which are used for maintaining
the connection, some of which are for notifying this client of user states, etc.
Only a few of the events are really worth listening to by "downstream" clients,
mostly chat events (``WebSocketEvent.DISPATCH`` with element ``t`` == 'MESSAGE_CREATE'),
and those can be accessed by clients using this library via the command registration,
which is handled by this method.
Args:
ws: websocket connection
raw: message received from the connection; either string or bytes, the latter
is a zlip-compressed string. Either way, the end result of formatting is JSON
|
[
"Callback",
"for",
"receiving",
"messages",
"from",
"the",
"websocket",
"connection"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L269-L317
|
240,318
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord._ws_on_error
|
def _ws_on_error(self, ws: websocket.WebSocketApp, error: Exception):
"""Callback for receiving errors from the websocket connection
Args:
ws: websocket connection
error: exception raised
"""
self.logger.error(f'Got error from websocket connection: {str(error)}')
|
python
|
def _ws_on_error(self, ws: websocket.WebSocketApp, error: Exception):
"""Callback for receiving errors from the websocket connection
Args:
ws: websocket connection
error: exception raised
"""
self.logger.error(f'Got error from websocket connection: {str(error)}')
|
[
"def",
"_ws_on_error",
"(",
"self",
",",
"ws",
":",
"websocket",
".",
"WebSocketApp",
",",
"error",
":",
"Exception",
")",
":",
"self",
".",
"logger",
".",
"error",
"(",
"f'Got error from websocket connection: {str(error)}'",
")"
] |
Callback for receiving errors from the websocket connection
Args:
ws: websocket connection
error: exception raised
|
[
"Callback",
"for",
"receiving",
"errors",
"from",
"the",
"websocket",
"connection"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L319-L326
|
240,319
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord._ws_on_close
|
def _ws_on_close(self, ws: websocket.WebSocketApp):
"""Callback for closing the websocket connection
Args:
ws: websocket connection (now closed)
"""
self.connected = False
self.logger.error('Websocket closed')
self._reconnect_websocket()
|
python
|
def _ws_on_close(self, ws: websocket.WebSocketApp):
"""Callback for closing the websocket connection
Args:
ws: websocket connection (now closed)
"""
self.connected = False
self.logger.error('Websocket closed')
self._reconnect_websocket()
|
[
"def",
"_ws_on_close",
"(",
"self",
",",
"ws",
":",
"websocket",
".",
"WebSocketApp",
")",
":",
"self",
".",
"connected",
"=",
"False",
"self",
".",
"logger",
".",
"error",
"(",
"'Websocket closed'",
")",
"self",
".",
"_reconnect_websocket",
"(",
")"
] |
Callback for closing the websocket connection
Args:
ws: websocket connection (now closed)
|
[
"Callback",
"for",
"closing",
"the",
"websocket",
"connection"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L328-L336
|
240,320
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord._ws_on_open
|
def _ws_on_open(self, ws: websocket.WebSocketApp):
"""Callback for sending the initial authentication data
This "payload" contains the required data to authenticate this websocket
client as a suitable bot connection to the Discord websocket.
Args:
ws: websocket connection
"""
payload = {
'op': WebSocketEvent.IDENTIFY.value,
'd': {
'token': self.token,
'properties': {
'$os': sys.platform,
'$browser': 'Pycord',
'$device': 'Pycord',
'$referrer': '',
'$referring_domain': ''
},
'compress': True,
'large_threshold': 250
}
}
self.logger.debug('Sending identify payload')
ws.send(json.dumps(payload))
self.connected = True
|
python
|
def _ws_on_open(self, ws: websocket.WebSocketApp):
"""Callback for sending the initial authentication data
This "payload" contains the required data to authenticate this websocket
client as a suitable bot connection to the Discord websocket.
Args:
ws: websocket connection
"""
payload = {
'op': WebSocketEvent.IDENTIFY.value,
'd': {
'token': self.token,
'properties': {
'$os': sys.platform,
'$browser': 'Pycord',
'$device': 'Pycord',
'$referrer': '',
'$referring_domain': ''
},
'compress': True,
'large_threshold': 250
}
}
self.logger.debug('Sending identify payload')
ws.send(json.dumps(payload))
self.connected = True
|
[
"def",
"_ws_on_open",
"(",
"self",
",",
"ws",
":",
"websocket",
".",
"WebSocketApp",
")",
":",
"payload",
"=",
"{",
"'op'",
":",
"WebSocketEvent",
".",
"IDENTIFY",
".",
"value",
",",
"'d'",
":",
"{",
"'token'",
":",
"self",
".",
"token",
",",
"'properties'",
":",
"{",
"'$os'",
":",
"sys",
".",
"platform",
",",
"'$browser'",
":",
"'Pycord'",
",",
"'$device'",
":",
"'Pycord'",
",",
"'$referrer'",
":",
"''",
",",
"'$referring_domain'",
":",
"''",
"}",
",",
"'compress'",
":",
"True",
",",
"'large_threshold'",
":",
"250",
"}",
"}",
"self",
".",
"logger",
".",
"debug",
"(",
"'Sending identify payload'",
")",
"ws",
".",
"send",
"(",
"json",
".",
"dumps",
"(",
"payload",
")",
")",
"self",
".",
"connected",
"=",
"True"
] |
Callback for sending the initial authentication data
This "payload" contains the required data to authenticate this websocket
client as a suitable bot connection to the Discord websocket.
Args:
ws: websocket connection
|
[
"Callback",
"for",
"sending",
"the",
"initial",
"authentication",
"data"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L338-L364
|
240,321
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.connect_to_websocket
|
def connect_to_websocket(self):
"""Call this method to make the connection to the Discord websocket
This method is not blocking, so you'll probably want to call it after
initializating your Pycord object, and then move on with your code. When
you want to block on just maintaining the websocket connection, then call
``keep_running``, and it'll block until your application is interrupted.
Args:
None
"""
self.logger.info('Making websocket connection')
try:
if hasattr(self, '_ws'):
self._ws.close()
except:
self.logger.debug('Couldn\'t terminate previous websocket connection')
self._ws = websocket.WebSocketApp(
self._get_websocket_address() + '?v=6&encoding=json',
on_message=self._ws_on_message,
on_error=self._ws_on_error,
on_close=self._ws_on_close
)
self._ws.on_open = self._ws_on_open
self._ws_run_forever_wrapper = WebSocketRunForeverWrapper(self.logger, self._ws)
self._ws_run_forever_wrapper.start()
|
python
|
def connect_to_websocket(self):
"""Call this method to make the connection to the Discord websocket
This method is not blocking, so you'll probably want to call it after
initializating your Pycord object, and then move on with your code. When
you want to block on just maintaining the websocket connection, then call
``keep_running``, and it'll block until your application is interrupted.
Args:
None
"""
self.logger.info('Making websocket connection')
try:
if hasattr(self, '_ws'):
self._ws.close()
except:
self.logger.debug('Couldn\'t terminate previous websocket connection')
self._ws = websocket.WebSocketApp(
self._get_websocket_address() + '?v=6&encoding=json',
on_message=self._ws_on_message,
on_error=self._ws_on_error,
on_close=self._ws_on_close
)
self._ws.on_open = self._ws_on_open
self._ws_run_forever_wrapper = WebSocketRunForeverWrapper(self.logger, self._ws)
self._ws_run_forever_wrapper.start()
|
[
"def",
"connect_to_websocket",
"(",
"self",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"'Making websocket connection'",
")",
"try",
":",
"if",
"hasattr",
"(",
"self",
",",
"'_ws'",
")",
":",
"self",
".",
"_ws",
".",
"close",
"(",
")",
"except",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Couldn\\'t terminate previous websocket connection'",
")",
"self",
".",
"_ws",
"=",
"websocket",
".",
"WebSocketApp",
"(",
"self",
".",
"_get_websocket_address",
"(",
")",
"+",
"'?v=6&encoding=json'",
",",
"on_message",
"=",
"self",
".",
"_ws_on_message",
",",
"on_error",
"=",
"self",
".",
"_ws_on_error",
",",
"on_close",
"=",
"self",
".",
"_ws_on_close",
")",
"self",
".",
"_ws",
".",
"on_open",
"=",
"self",
".",
"_ws_on_open",
"self",
".",
"_ws_run_forever_wrapper",
"=",
"WebSocketRunForeverWrapper",
"(",
"self",
".",
"logger",
",",
"self",
".",
"_ws",
")",
"self",
".",
"_ws_run_forever_wrapper",
".",
"start",
"(",
")"
] |
Call this method to make the connection to the Discord websocket
This method is not blocking, so you'll probably want to call it after
initializating your Pycord object, and then move on with your code. When
you want to block on just maintaining the websocket connection, then call
``keep_running``, and it'll block until your application is interrupted.
Args:
None
|
[
"Call",
"this",
"method",
"to",
"make",
"the",
"connection",
"to",
"the",
"Discord",
"websocket"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L374-L399
|
240,322
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.disconnect_from_websocket
|
def disconnect_from_websocket(self):
"""Disconnects from the websocket
Args:
None
"""
self.logger.warning('Disconnecting from websocket')
self.logger.info('Stopping keep alive thread')
self._ws_keep_alive.stop()
self._ws_keep_alive.join()
self.logger.info('Stopped keep alive thread')
try:
self.logger.warning('Disconnecting from websocket')
self._ws.close()
self.logger.info('Closed websocket connection')
except:
self.logger.debug('Couldn\'t terminate previous websocket connection')
|
python
|
def disconnect_from_websocket(self):
"""Disconnects from the websocket
Args:
None
"""
self.logger.warning('Disconnecting from websocket')
self.logger.info('Stopping keep alive thread')
self._ws_keep_alive.stop()
self._ws_keep_alive.join()
self.logger.info('Stopped keep alive thread')
try:
self.logger.warning('Disconnecting from websocket')
self._ws.close()
self.logger.info('Closed websocket connection')
except:
self.logger.debug('Couldn\'t terminate previous websocket connection')
|
[
"def",
"disconnect_from_websocket",
"(",
"self",
")",
":",
"self",
".",
"logger",
".",
"warning",
"(",
"'Disconnecting from websocket'",
")",
"self",
".",
"logger",
".",
"info",
"(",
"'Stopping keep alive thread'",
")",
"self",
".",
"_ws_keep_alive",
".",
"stop",
"(",
")",
"self",
".",
"_ws_keep_alive",
".",
"join",
"(",
")",
"self",
".",
"logger",
".",
"info",
"(",
"'Stopped keep alive thread'",
")",
"try",
":",
"self",
".",
"logger",
".",
"warning",
"(",
"'Disconnecting from websocket'",
")",
"self",
".",
"_ws",
".",
"close",
"(",
")",
"self",
".",
"logger",
".",
"info",
"(",
"'Closed websocket connection'",
")",
"except",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'Couldn\\'t terminate previous websocket connection'",
")"
] |
Disconnects from the websocket
Args:
None
|
[
"Disconnects",
"from",
"the",
"websocket"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L424-L440
|
240,323
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.set_status
|
def set_status(self, name: str = None):
"""Updates the bot's status
This is used to get the game that the bot is "playing" or to clear it.
If you want to set a game, pass a name; if you want to clear it, either
call this method without the optional ``name`` parameter or explicitly
pass ``None``.
Args:
name: the game's name, or None
"""
game = None
if name:
game = {
'name': name
}
payload = {
'op': WebSocketEvent.STATUS_UPDATE.value,
'd': {
'game': game,
'status': 'online',
'afk': False,
'since': 0.0
}
}
data = json.dumps(payload, indent=2)
self.logger.debug(f'Sending status update payload: {data}')
self._ws.send(data)
|
python
|
def set_status(self, name: str = None):
"""Updates the bot's status
This is used to get the game that the bot is "playing" or to clear it.
If you want to set a game, pass a name; if you want to clear it, either
call this method without the optional ``name`` parameter or explicitly
pass ``None``.
Args:
name: the game's name, or None
"""
game = None
if name:
game = {
'name': name
}
payload = {
'op': WebSocketEvent.STATUS_UPDATE.value,
'd': {
'game': game,
'status': 'online',
'afk': False,
'since': 0.0
}
}
data = json.dumps(payload, indent=2)
self.logger.debug(f'Sending status update payload: {data}')
self._ws.send(data)
|
[
"def",
"set_status",
"(",
"self",
",",
"name",
":",
"str",
"=",
"None",
")",
":",
"game",
"=",
"None",
"if",
"name",
":",
"game",
"=",
"{",
"'name'",
":",
"name",
"}",
"payload",
"=",
"{",
"'op'",
":",
"WebSocketEvent",
".",
"STATUS_UPDATE",
".",
"value",
",",
"'d'",
":",
"{",
"'game'",
":",
"game",
",",
"'status'",
":",
"'online'",
",",
"'afk'",
":",
"False",
",",
"'since'",
":",
"0.0",
"}",
"}",
"data",
"=",
"json",
".",
"dumps",
"(",
"payload",
",",
"indent",
"=",
"2",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"f'Sending status update payload: {data}'",
")",
"self",
".",
"_ws",
".",
"send",
"(",
"data",
")"
] |
Updates the bot's status
This is used to get the game that the bot is "playing" or to clear it.
If you want to set a game, pass a name; if you want to clear it, either
call this method without the optional ``name`` parameter or explicitly
pass ``None``.
Args:
name: the game's name, or None
|
[
"Updates",
"the",
"bot",
"s",
"status"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L442-L469
|
240,324
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.get_guild_info
|
def get_guild_info(self, id: str) -> Dict[str, Any]:
"""Get a guild's information by its id
Args:
id: snowflake id of the guild
Returns:
Dictionary data for the guild API object
Example:
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
}
"""
return self._query(f'guilds/{id}', 'GET')
|
python
|
def get_guild_info(self, id: str) -> Dict[str, Any]:
"""Get a guild's information by its id
Args:
id: snowflake id of the guild
Returns:
Dictionary data for the guild API object
Example:
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
}
"""
return self._query(f'guilds/{id}', 'GET')
|
[
"def",
"get_guild_info",
"(",
"self",
",",
"id",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"Any",
"]",
":",
"return",
"self",
".",
"_query",
"(",
"f'guilds/{id}'",
",",
"'GET'",
")"
] |
Get a guild's information by its id
Args:
id: snowflake id of the guild
Returns:
Dictionary data for the guild API object
Example:
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
}
|
[
"Get",
"a",
"guild",
"s",
"information",
"by",
"its",
"id"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L549-L577
|
240,325
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.get_channels_in
|
def get_channels_in(self, guild_id: str) -> List[Dict[str, Any]]:
"""Get a list of channels in the guild
Args:
guild_id: id of the guild to fetch channels from
Returns:
List of dictionary objects of channels in the guild. Note the different
types of channels: text, voice, DM, group DM.
https://discordapp.com/developers/docs/resources/channel#channel-object
Example:
[
{
"id": "41771983423143937",
"guild_id": "41771983423143937",
"name": "general",
"type": 0,
"position": 6,
"permission_overwrites": [],
"topic": "24/7 chat about how to gank Mike #2",
"last_message_id": "155117677105512449"
},
{
"id": "155101607195836416",
"guild_id": "41771983423143937",
"name": "ROCKET CHEESE",
"type": 2,
"position": 5,
"permission_overwrites": [],
"bitrate": 64000,
"user_limit": 0
},
{
"last_message_id": "3343820033257021450",
"type": 1,
"id": "319674150115610528",
"recipients": [
{
"username": "test",
"discriminator": "9999",
"id": "82198898841029460",
"avatar": "33ecab261d4681afa4d85a04691c4a01"
}
]
}
]
"""
return self._query(f'guilds/{guild_id}/channels', 'GET')
|
python
|
def get_channels_in(self, guild_id: str) -> List[Dict[str, Any]]:
"""Get a list of channels in the guild
Args:
guild_id: id of the guild to fetch channels from
Returns:
List of dictionary objects of channels in the guild. Note the different
types of channels: text, voice, DM, group DM.
https://discordapp.com/developers/docs/resources/channel#channel-object
Example:
[
{
"id": "41771983423143937",
"guild_id": "41771983423143937",
"name": "general",
"type": 0,
"position": 6,
"permission_overwrites": [],
"topic": "24/7 chat about how to gank Mike #2",
"last_message_id": "155117677105512449"
},
{
"id": "155101607195836416",
"guild_id": "41771983423143937",
"name": "ROCKET CHEESE",
"type": 2,
"position": 5,
"permission_overwrites": [],
"bitrate": 64000,
"user_limit": 0
},
{
"last_message_id": "3343820033257021450",
"type": 1,
"id": "319674150115610528",
"recipients": [
{
"username": "test",
"discriminator": "9999",
"id": "82198898841029460",
"avatar": "33ecab261d4681afa4d85a04691c4a01"
}
]
}
]
"""
return self._query(f'guilds/{guild_id}/channels', 'GET')
|
[
"def",
"get_channels_in",
"(",
"self",
",",
"guild_id",
":",
"str",
")",
"->",
"List",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
":",
"return",
"self",
".",
"_query",
"(",
"f'guilds/{guild_id}/channels'",
",",
"'GET'",
")"
] |
Get a list of channels in the guild
Args:
guild_id: id of the guild to fetch channels from
Returns:
List of dictionary objects of channels in the guild. Note the different
types of channels: text, voice, DM, group DM.
https://discordapp.com/developers/docs/resources/channel#channel-object
Example:
[
{
"id": "41771983423143937",
"guild_id": "41771983423143937",
"name": "general",
"type": 0,
"position": 6,
"permission_overwrites": [],
"topic": "24/7 chat about how to gank Mike #2",
"last_message_id": "155117677105512449"
},
{
"id": "155101607195836416",
"guild_id": "41771983423143937",
"name": "ROCKET CHEESE",
"type": 2,
"position": 5,
"permission_overwrites": [],
"bitrate": 64000,
"user_limit": 0
},
{
"last_message_id": "3343820033257021450",
"type": 1,
"id": "319674150115610528",
"recipients": [
{
"username": "test",
"discriminator": "9999",
"id": "82198898841029460",
"avatar": "33ecab261d4681afa4d85a04691c4a01"
}
]
}
]
|
[
"Get",
"a",
"list",
"of",
"channels",
"in",
"the",
"guild"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L579-L628
|
240,326
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.get_channel_info
|
def get_channel_info(self, id: str) -> Dict[str, Any]:
"""Get a chanel's information by its id
Args:
id: snowflake id of the chanel
Returns:
Dictionary data for the chanel API object
Example:
{
"id": "41771983423143937",
"guild_id": "41771983423143937",
"name": "general",
"type": 0,
"position": 6,
"permission_overwrites": [],
"topic": "24/7 chat about how to gank Mike #2",
"last_message_id": "155117677105512449"
}
"""
return self._query(f'channels/{id}', 'GET')
|
python
|
def get_channel_info(self, id: str) -> Dict[str, Any]:
"""Get a chanel's information by its id
Args:
id: snowflake id of the chanel
Returns:
Dictionary data for the chanel API object
Example:
{
"id": "41771983423143937",
"guild_id": "41771983423143937",
"name": "general",
"type": 0,
"position": 6,
"permission_overwrites": [],
"topic": "24/7 chat about how to gank Mike #2",
"last_message_id": "155117677105512449"
}
"""
return self._query(f'channels/{id}', 'GET')
|
[
"def",
"get_channel_info",
"(",
"self",
",",
"id",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"Any",
"]",
":",
"return",
"self",
".",
"_query",
"(",
"f'channels/{id}'",
",",
"'GET'",
")"
] |
Get a chanel's information by its id
Args:
id: snowflake id of the chanel
Returns:
Dictionary data for the chanel API object
Example:
{
"id": "41771983423143937",
"guild_id": "41771983423143937",
"name": "general",
"type": 0,
"position": 6,
"permission_overwrites": [],
"topic": "24/7 chat about how to gank Mike #2",
"last_message_id": "155117677105512449"
}
|
[
"Get",
"a",
"chanel",
"s",
"information",
"by",
"its",
"id"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L630-L651
|
240,327
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.get_guild_members
|
def get_guild_members(self, guild_id: int) -> List[Dict[str, Any]]:
"""Get a list of members in the guild
Args:
guild_id: snowflake id of the guild
Returns:
List of dictionary objects of users in the guild.
Example:
[
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
},
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
}
]
"""
return self._query(f'guilds/{guild_id}/members', 'GET')
|
python
|
def get_guild_members(self, guild_id: int) -> List[Dict[str, Any]]:
"""Get a list of members in the guild
Args:
guild_id: snowflake id of the guild
Returns:
List of dictionary objects of users in the guild.
Example:
[
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
},
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
}
]
"""
return self._query(f'guilds/{guild_id}/members', 'GET')
|
[
"def",
"get_guild_members",
"(",
"self",
",",
"guild_id",
":",
"int",
")",
"->",
"List",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
":",
"return",
"self",
".",
"_query",
"(",
"f'guilds/{guild_id}/members'",
",",
"'GET'",
")"
] |
Get a list of members in the guild
Args:
guild_id: snowflake id of the guild
Returns:
List of dictionary objects of users in the guild.
Example:
[
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
},
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
}
]
|
[
"Get",
"a",
"list",
"of",
"members",
"in",
"the",
"guild"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L653-L700
|
240,328
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.get_guild_member_by_id
|
def get_guild_member_by_id(self, guild_id: int, member_id: int) -> Dict[str, Any]:
"""Get a guild member by their id
Args:
guild_id: snowflake id of the guild
member_id: snowflake id of the member
Returns:
Dictionary data for the guild member.
Example:
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [
"41771983423143936",
"41771983423143937",
"41771983423143938"
],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
}
"""
return self._query(f'guilds/{guild_id}/members/{member_id}', 'GET')
|
python
|
def get_guild_member_by_id(self, guild_id: int, member_id: int) -> Dict[str, Any]:
"""Get a guild member by their id
Args:
guild_id: snowflake id of the guild
member_id: snowflake id of the member
Returns:
Dictionary data for the guild member.
Example:
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [
"41771983423143936",
"41771983423143937",
"41771983423143938"
],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
}
"""
return self._query(f'guilds/{guild_id}/members/{member_id}', 'GET')
|
[
"def",
"get_guild_member_by_id",
"(",
"self",
",",
"guild_id",
":",
"int",
",",
"member_id",
":",
"int",
")",
"->",
"Dict",
"[",
"str",
",",
"Any",
"]",
":",
"return",
"self",
".",
"_query",
"(",
"f'guilds/{guild_id}/members/{member_id}'",
",",
"'GET'",
")"
] |
Get a guild member by their id
Args:
guild_id: snowflake id of the guild
member_id: snowflake id of the member
Returns:
Dictionary data for the guild member.
Example:
{
"id": "41771983423143937",
"name": "Discord Developers",
"icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh",
"splash": null,
"owner_id": "80351110224678912",
"region": "us-east",
"afk_channel_id": "42072017402331136",
"afk_timeout": 300,
"embed_enabled": true,
"embed_channel_id": "41771983444115456",
"verification_level": 1,
"roles": [
"41771983423143936",
"41771983423143937",
"41771983423143938"
],
"emojis": [],
"features": ["INVITE_SPLASH"],
"unavailable": false
}
|
[
"Get",
"a",
"guild",
"member",
"by",
"their",
"id"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L702-L735
|
240,329
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.get_all_guild_roles
|
def get_all_guild_roles(self, guild_id: int) -> List[Dict[str, Any]]:
"""Gets all the roles for the specified guild
Args:
guild_id: snowflake id of the guild
Returns:
List of dictionary objects of roles in the guild.
Example:
[
{
"id": "41771983423143936",
"name": "WE DEM BOYZZ!!!!!!",
"color": 3447003,
"hoist": true,
"position": 1,
"permissions": 66321471,
"managed": false,
"mentionable": false
},
{
"hoist": false,
"name": "Admin",
"mentionable": false,
"color": 15158332,
"position": 2,
"id": "151107620239966208",
"managed": false,
"permissions": 66583679
},
{
"hoist": false,
"name": "@everyone",
"mentionable": false,
"color": 0,
"position": 0,
"id": "151106790233210882",
"managed": false,
"permissions": 37215297
}
]
"""
return self._query(f'guilds/{guild_id}/roles', 'GET')
|
python
|
def get_all_guild_roles(self, guild_id: int) -> List[Dict[str, Any]]:
"""Gets all the roles for the specified guild
Args:
guild_id: snowflake id of the guild
Returns:
List of dictionary objects of roles in the guild.
Example:
[
{
"id": "41771983423143936",
"name": "WE DEM BOYZZ!!!!!!",
"color": 3447003,
"hoist": true,
"position": 1,
"permissions": 66321471,
"managed": false,
"mentionable": false
},
{
"hoist": false,
"name": "Admin",
"mentionable": false,
"color": 15158332,
"position": 2,
"id": "151107620239966208",
"managed": false,
"permissions": 66583679
},
{
"hoist": false,
"name": "@everyone",
"mentionable": false,
"color": 0,
"position": 0,
"id": "151106790233210882",
"managed": false,
"permissions": 37215297
}
]
"""
return self._query(f'guilds/{guild_id}/roles', 'GET')
|
[
"def",
"get_all_guild_roles",
"(",
"self",
",",
"guild_id",
":",
"int",
")",
"->",
"List",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
":",
"return",
"self",
".",
"_query",
"(",
"f'guilds/{guild_id}/roles'",
",",
"'GET'",
")"
] |
Gets all the roles for the specified guild
Args:
guild_id: snowflake id of the guild
Returns:
List of dictionary objects of roles in the guild.
Example:
[
{
"id": "41771983423143936",
"name": "WE DEM BOYZZ!!!!!!",
"color": 3447003,
"hoist": true,
"position": 1,
"permissions": 66321471,
"managed": false,
"mentionable": false
},
{
"hoist": false,
"name": "Admin",
"mentionable": false,
"color": 15158332,
"position": 2,
"id": "151107620239966208",
"managed": false,
"permissions": 66583679
},
{
"hoist": false,
"name": "@everyone",
"mentionable": false,
"color": 0,
"position": 0,
"id": "151106790233210882",
"managed": false,
"permissions": 37215297
}
]
|
[
"Gets",
"all",
"the",
"roles",
"for",
"the",
"specified",
"guild"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L737-L780
|
240,330
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.set_member_roles
|
def set_member_roles(self, guild_id: int, member_id: int, roles: List[int]):
"""Set the member's roles
This method takes a list of **role ids** that you want the user to have. This
method will **overwrite** all of the user's current roles with the roles in
the passed list of roles.
When calling this method, be sure that the list of roles that you're setting
for this user is complete, not just the roles that you want to add or remove.
For assistance in just adding or just removing roles, set the ``add_member_roles``
and ``remove_member_roles`` methods.
Args:
guild_id: snowflake id of the guild
member_id: snowflake id of the member
roles: list of snowflake ids of roles to set
"""
self._query(f'guilds/{guild_id}/members/{member_id}', 'PATCH', {'roles': roles}, expected_status=204)
|
python
|
def set_member_roles(self, guild_id: int, member_id: int, roles: List[int]):
"""Set the member's roles
This method takes a list of **role ids** that you want the user to have. This
method will **overwrite** all of the user's current roles with the roles in
the passed list of roles.
When calling this method, be sure that the list of roles that you're setting
for this user is complete, not just the roles that you want to add or remove.
For assistance in just adding or just removing roles, set the ``add_member_roles``
and ``remove_member_roles`` methods.
Args:
guild_id: snowflake id of the guild
member_id: snowflake id of the member
roles: list of snowflake ids of roles to set
"""
self._query(f'guilds/{guild_id}/members/{member_id}', 'PATCH', {'roles': roles}, expected_status=204)
|
[
"def",
"set_member_roles",
"(",
"self",
",",
"guild_id",
":",
"int",
",",
"member_id",
":",
"int",
",",
"roles",
":",
"List",
"[",
"int",
"]",
")",
":",
"self",
".",
"_query",
"(",
"f'guilds/{guild_id}/members/{member_id}'",
",",
"'PATCH'",
",",
"{",
"'roles'",
":",
"roles",
"}",
",",
"expected_status",
"=",
"204",
")"
] |
Set the member's roles
This method takes a list of **role ids** that you want the user to have. This
method will **overwrite** all of the user's current roles with the roles in
the passed list of roles.
When calling this method, be sure that the list of roles that you're setting
for this user is complete, not just the roles that you want to add or remove.
For assistance in just adding or just removing roles, set the ``add_member_roles``
and ``remove_member_roles`` methods.
Args:
guild_id: snowflake id of the guild
member_id: snowflake id of the member
roles: list of snowflake ids of roles to set
|
[
"Set",
"the",
"member",
"s",
"roles"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L782-L799
|
240,331
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.command
|
def command(self, name: str) -> Callable:
"""Decorator to wrap methods to register them as commands
The argument to this method is the command that you want to trigger your
callback. If you want users to send "!hello bob" and your method "command_hello"
to get called when someone does, then your setup will look like:
@pycord.command('hello')
def command_hello(data):
# do stuff here
The ``data`` argument that your method will receive is the message object.
Example:
{
"t": "MESSAGE_CREATE",
"s": 4,
"op": 0,
"d": {
"type": 0,
"tts": false,
"timestamp": "2017-07-22T04:46:41.366000+00:00",
"pinned": false,
"nonce": "338180052904574976",
"mentions": [],
"mention_roles": [],
"mention_everyone": false,
"id": "338180026363150336",
"embeds": [],
"edited_timestamp": null,
"content": "!source",
"channel_id": "151106790233210882",
"author": {
"username": "Celeo",
"id": "110245175636312064",
"discriminator": "1453",
"avatar": "3118c26ea7e40350212196e1d9d7f5c9"
},
"attachments": []
}
}
Args:
name: command name
Returns:
Method decorator
"""
def inner(f: Callable):
self._commands.append((name, f))
return inner
|
python
|
def command(self, name: str) -> Callable:
"""Decorator to wrap methods to register them as commands
The argument to this method is the command that you want to trigger your
callback. If you want users to send "!hello bob" and your method "command_hello"
to get called when someone does, then your setup will look like:
@pycord.command('hello')
def command_hello(data):
# do stuff here
The ``data`` argument that your method will receive is the message object.
Example:
{
"t": "MESSAGE_CREATE",
"s": 4,
"op": 0,
"d": {
"type": 0,
"tts": false,
"timestamp": "2017-07-22T04:46:41.366000+00:00",
"pinned": false,
"nonce": "338180052904574976",
"mentions": [],
"mention_roles": [],
"mention_everyone": false,
"id": "338180026363150336",
"embeds": [],
"edited_timestamp": null,
"content": "!source",
"channel_id": "151106790233210882",
"author": {
"username": "Celeo",
"id": "110245175636312064",
"discriminator": "1453",
"avatar": "3118c26ea7e40350212196e1d9d7f5c9"
},
"attachments": []
}
}
Args:
name: command name
Returns:
Method decorator
"""
def inner(f: Callable):
self._commands.append((name, f))
return inner
|
[
"def",
"command",
"(",
"self",
",",
"name",
":",
"str",
")",
"->",
"Callable",
":",
"def",
"inner",
"(",
"f",
":",
"Callable",
")",
":",
"self",
".",
"_commands",
".",
"append",
"(",
"(",
"name",
",",
"f",
")",
")",
"return",
"inner"
] |
Decorator to wrap methods to register them as commands
The argument to this method is the command that you want to trigger your
callback. If you want users to send "!hello bob" and your method "command_hello"
to get called when someone does, then your setup will look like:
@pycord.command('hello')
def command_hello(data):
# do stuff here
The ``data`` argument that your method will receive is the message object.
Example:
{
"t": "MESSAGE_CREATE",
"s": 4,
"op": 0,
"d": {
"type": 0,
"tts": false,
"timestamp": "2017-07-22T04:46:41.366000+00:00",
"pinned": false,
"nonce": "338180052904574976",
"mentions": [],
"mention_roles": [],
"mention_everyone": false,
"id": "338180026363150336",
"embeds": [],
"edited_timestamp": null,
"content": "!source",
"channel_id": "151106790233210882",
"author": {
"username": "Celeo",
"id": "110245175636312064",
"discriminator": "1453",
"avatar": "3118c26ea7e40350212196e1d9d7f5c9"
},
"attachments": []
}
}
Args:
name: command name
Returns:
Method decorator
|
[
"Decorator",
"to",
"wrap",
"methods",
"to",
"register",
"them",
"as",
"commands"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L865-L915
|
240,332
|
Celeo/Pycord
|
pycord/__init__.py
|
Pycord.register_command
|
def register_command(self, name: str, f: Callable):
"""Registers an existing callable object as a command callback
This method can be used instead of the ``@command`` decorator. Both
do the same thing, but this method is useful for registering callbacks
for methods defined before or outside the scope of your bot object,
allowing you to define methods in another file or wherever, import them,
and register them.
See the documentation for the ``@command`` decorator for more information
on what you method will receive.
Example:
def process_hello(data):
# do stuff
# later, somewhere else, etc.
pycord.register_command('hello', process_hello)
Args:
name: the command to trigger the callback (see ``@command`` documentation)
f: callable that will be triggered on command processing
"""
self._commands.append((name, f))
|
python
|
def register_command(self, name: str, f: Callable):
"""Registers an existing callable object as a command callback
This method can be used instead of the ``@command`` decorator. Both
do the same thing, but this method is useful for registering callbacks
for methods defined before or outside the scope of your bot object,
allowing you to define methods in another file or wherever, import them,
and register them.
See the documentation for the ``@command`` decorator for more information
on what you method will receive.
Example:
def process_hello(data):
# do stuff
# later, somewhere else, etc.
pycord.register_command('hello', process_hello)
Args:
name: the command to trigger the callback (see ``@command`` documentation)
f: callable that will be triggered on command processing
"""
self._commands.append((name, f))
|
[
"def",
"register_command",
"(",
"self",
",",
"name",
":",
"str",
",",
"f",
":",
"Callable",
")",
":",
"self",
".",
"_commands",
".",
"append",
"(",
"(",
"name",
",",
"f",
")",
")"
] |
Registers an existing callable object as a command callback
This method can be used instead of the ``@command`` decorator. Both
do the same thing, but this method is useful for registering callbacks
for methods defined before or outside the scope of your bot object,
allowing you to define methods in another file or wherever, import them,
and register them.
See the documentation for the ``@command`` decorator for more information
on what you method will receive.
Example:
def process_hello(data):
# do stuff
# later, somewhere else, etc.
pycord.register_command('hello', process_hello)
Args:
name: the command to trigger the callback (see ``@command`` documentation)
f: callable that will be triggered on command processing
|
[
"Registers",
"an",
"existing",
"callable",
"object",
"as",
"a",
"command",
"callback"
] |
15c38e39b508c89c35f7f6d7009fe8e9f161a94e
|
https://github.com/Celeo/Pycord/blob/15c38e39b508c89c35f7f6d7009fe8e9f161a94e/pycord/__init__.py#L917-L942
|
240,333
|
GMadorell/abris
|
abris_transform/parsing/csv_parsing.py
|
prepare_csv_to_dataframe
|
def prepare_csv_to_dataframe(data_file, config, use_target=True):
"""
Parses the given data file following the data model of the given configuration.
@return: pandas DataFrame
"""
names, dtypes = [], []
model = config.get_data_model()
for feature in model:
assert feature.get_name() not in names, "Two features can't have the same name."
if not use_target and feature.is_target():
continue
names.append(feature.get_name())
data = pd.read_csv(data_file, names=names)
transform_categorical_features(config, data, use_target)
return data
|
python
|
def prepare_csv_to_dataframe(data_file, config, use_target=True):
"""
Parses the given data file following the data model of the given configuration.
@return: pandas DataFrame
"""
names, dtypes = [], []
model = config.get_data_model()
for feature in model:
assert feature.get_name() not in names, "Two features can't have the same name."
if not use_target and feature.is_target():
continue
names.append(feature.get_name())
data = pd.read_csv(data_file, names=names)
transform_categorical_features(config, data, use_target)
return data
|
[
"def",
"prepare_csv_to_dataframe",
"(",
"data_file",
",",
"config",
",",
"use_target",
"=",
"True",
")",
":",
"names",
",",
"dtypes",
"=",
"[",
"]",
",",
"[",
"]",
"model",
"=",
"config",
".",
"get_data_model",
"(",
")",
"for",
"feature",
"in",
"model",
":",
"assert",
"feature",
".",
"get_name",
"(",
")",
"not",
"in",
"names",
",",
"\"Two features can't have the same name.\"",
"if",
"not",
"use_target",
"and",
"feature",
".",
"is_target",
"(",
")",
":",
"continue",
"names",
".",
"append",
"(",
"feature",
".",
"get_name",
"(",
")",
")",
"data",
"=",
"pd",
".",
"read_csv",
"(",
"data_file",
",",
"names",
"=",
"names",
")",
"transform_categorical_features",
"(",
"config",
",",
"data",
",",
"use_target",
")",
"return",
"data"
] |
Parses the given data file following the data model of the given configuration.
@return: pandas DataFrame
|
[
"Parses",
"the",
"given",
"data",
"file",
"following",
"the",
"data",
"model",
"of",
"the",
"given",
"configuration",
"."
] |
0d8ab7ec506835a45fae6935d129f5d7e6937bb2
|
https://github.com/GMadorell/abris/blob/0d8ab7ec506835a45fae6935d129f5d7e6937bb2/abris_transform/parsing/csv_parsing.py#L4-L20
|
240,334
|
sunlightlabs/django-locksmith
|
locksmith/hub/models.py
|
Key.mark_for_update
|
def mark_for_update(self):
'''
Note that a change has been made so all Statuses need update
'''
self.pub_statuses.exclude(status=UNPUBLISHED).update(status=NEEDS_UPDATE)
push_key.delay(self)
|
python
|
def mark_for_update(self):
'''
Note that a change has been made so all Statuses need update
'''
self.pub_statuses.exclude(status=UNPUBLISHED).update(status=NEEDS_UPDATE)
push_key.delay(self)
|
[
"def",
"mark_for_update",
"(",
"self",
")",
":",
"self",
".",
"pub_statuses",
".",
"exclude",
"(",
"status",
"=",
"UNPUBLISHED",
")",
".",
"update",
"(",
"status",
"=",
"NEEDS_UPDATE",
")",
"push_key",
".",
"delay",
"(",
"self",
")"
] |
Note that a change has been made so all Statuses need update
|
[
"Note",
"that",
"a",
"change",
"has",
"been",
"made",
"so",
"all",
"Statuses",
"need",
"update"
] |
eef5b7c25404560aaad50b6e622594f89239b74b
|
https://github.com/sunlightlabs/django-locksmith/blob/eef5b7c25404560aaad50b6e622594f89239b74b/locksmith/hub/models.py#L82-L87
|
240,335
|
drongo-framework/drongo
|
drongo/response.py
|
Response.set_cookie
|
def set_cookie(self, key, value, domain=None, path='/', secure=False,
httponly=True):
"""Set a cookie.
Args:
key (:obj:`str`): Cookie name
value (:obj:`str`): Cookie value
domain (:obj:`str`): Cookie domain
path (:obj:`str`): Cookie value
secure (:obj:`bool`): True if secure, False otherwise
httponly (:obj:`bool`): True if it's a HTTP only cookie, False
otherwise
"""
self._cookies[key] = value
if domain:
self._cookies[key]['domain'] = domain
if path:
self._cookies[key]['path'] = path
if secure:
self._cookies[key]['secure'] = secure
if httponly:
self._cookies[key]['httponly'] = httponly
|
python
|
def set_cookie(self, key, value, domain=None, path='/', secure=False,
httponly=True):
"""Set a cookie.
Args:
key (:obj:`str`): Cookie name
value (:obj:`str`): Cookie value
domain (:obj:`str`): Cookie domain
path (:obj:`str`): Cookie value
secure (:obj:`bool`): True if secure, False otherwise
httponly (:obj:`bool`): True if it's a HTTP only cookie, False
otherwise
"""
self._cookies[key] = value
if domain:
self._cookies[key]['domain'] = domain
if path:
self._cookies[key]['path'] = path
if secure:
self._cookies[key]['secure'] = secure
if httponly:
self._cookies[key]['httponly'] = httponly
|
[
"def",
"set_cookie",
"(",
"self",
",",
"key",
",",
"value",
",",
"domain",
"=",
"None",
",",
"path",
"=",
"'/'",
",",
"secure",
"=",
"False",
",",
"httponly",
"=",
"True",
")",
":",
"self",
".",
"_cookies",
"[",
"key",
"]",
"=",
"value",
"if",
"domain",
":",
"self",
".",
"_cookies",
"[",
"key",
"]",
"[",
"'domain'",
"]",
"=",
"domain",
"if",
"path",
":",
"self",
".",
"_cookies",
"[",
"key",
"]",
"[",
"'path'",
"]",
"=",
"path",
"if",
"secure",
":",
"self",
".",
"_cookies",
"[",
"key",
"]",
"[",
"'secure'",
"]",
"=",
"secure",
"if",
"httponly",
":",
"self",
".",
"_cookies",
"[",
"key",
"]",
"[",
"'httponly'",
"]",
"=",
"httponly"
] |
Set a cookie.
Args:
key (:obj:`str`): Cookie name
value (:obj:`str`): Cookie value
domain (:obj:`str`): Cookie domain
path (:obj:`str`): Cookie value
secure (:obj:`bool`): True if secure, False otherwise
httponly (:obj:`bool`): True if it's a HTTP only cookie, False
otherwise
|
[
"Set",
"a",
"cookie",
"."
] |
487edb370ae329f370bcf3b433ed3f28ba4c1d8c
|
https://github.com/drongo-framework/drongo/blob/487edb370ae329f370bcf3b433ed3f28ba4c1d8c/drongo/response.py#L51-L72
|
240,336
|
drongo-framework/drongo
|
drongo/response.py
|
Response.set_content
|
def set_content(self, content, content_length=None):
"""Set content for the response.
Args:
content (:obj:`str` or :obj:`iterable`): Response content. Can be
either unicode or raw bytes. When returning large content,
an iterable (or a generator) can be used to avoid loading
entire content into the memory.
content_length (:obj:`int`, optional): Content length. Length will
be determined if not set. If content is an iterable, it's a
good practise to set the content length.
"""
if content_length is not None:
self._content_length = content_length
self._content = content
|
python
|
def set_content(self, content, content_length=None):
"""Set content for the response.
Args:
content (:obj:`str` or :obj:`iterable`): Response content. Can be
either unicode or raw bytes. When returning large content,
an iterable (or a generator) can be used to avoid loading
entire content into the memory.
content_length (:obj:`int`, optional): Content length. Length will
be determined if not set. If content is an iterable, it's a
good practise to set the content length.
"""
if content_length is not None:
self._content_length = content_length
self._content = content
|
[
"def",
"set_content",
"(",
"self",
",",
"content",
",",
"content_length",
"=",
"None",
")",
":",
"if",
"content_length",
"is",
"not",
"None",
":",
"self",
".",
"_content_length",
"=",
"content_length",
"self",
".",
"_content",
"=",
"content"
] |
Set content for the response.
Args:
content (:obj:`str` or :obj:`iterable`): Response content. Can be
either unicode or raw bytes. When returning large content,
an iterable (or a generator) can be used to avoid loading
entire content into the memory.
content_length (:obj:`int`, optional): Content length. Length will
be determined if not set. If content is an iterable, it's a
good practise to set the content length.
|
[
"Set",
"content",
"for",
"the",
"response",
"."
] |
487edb370ae329f370bcf3b433ed3f28ba4c1d8c
|
https://github.com/drongo-framework/drongo/blob/487edb370ae329f370bcf3b433ed3f28ba4c1d8c/drongo/response.py#L74-L88
|
240,337
|
drongo-framework/drongo
|
drongo/response.py
|
Response.bake
|
def bake(self, start_response):
"""Bakes the response and returns the content.
Args:
start_response (:obj:`callable`): Callback method that accepts
status code and a list of tuples (pairs) containing headers'
key and value respectively.
"""
if isinstance(self._content, six.text_type):
self._content = self._content.encode('utf8')
if self._content_length is None:
self._content_length = len(self._content)
self._headers[HttpResponseHeaders.CONTENT_LENGTH] = \
str(self._content_length)
headers = list(self._headers.items())
cookies = [(HttpResponseHeaders.SET_COOKIE, v.OutputString())
for _, v in self._cookies.items()]
if len(cookies):
headers = list(headers) + cookies
start_response(self._status_code, headers)
if isinstance(self._content, six.binary_type):
return [self._content]
return self._content
|
python
|
def bake(self, start_response):
"""Bakes the response and returns the content.
Args:
start_response (:obj:`callable`): Callback method that accepts
status code and a list of tuples (pairs) containing headers'
key and value respectively.
"""
if isinstance(self._content, six.text_type):
self._content = self._content.encode('utf8')
if self._content_length is None:
self._content_length = len(self._content)
self._headers[HttpResponseHeaders.CONTENT_LENGTH] = \
str(self._content_length)
headers = list(self._headers.items())
cookies = [(HttpResponseHeaders.SET_COOKIE, v.OutputString())
for _, v in self._cookies.items()]
if len(cookies):
headers = list(headers) + cookies
start_response(self._status_code, headers)
if isinstance(self._content, six.binary_type):
return [self._content]
return self._content
|
[
"def",
"bake",
"(",
"self",
",",
"start_response",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"_content",
",",
"six",
".",
"text_type",
")",
":",
"self",
".",
"_content",
"=",
"self",
".",
"_content",
".",
"encode",
"(",
"'utf8'",
")",
"if",
"self",
".",
"_content_length",
"is",
"None",
":",
"self",
".",
"_content_length",
"=",
"len",
"(",
"self",
".",
"_content",
")",
"self",
".",
"_headers",
"[",
"HttpResponseHeaders",
".",
"CONTENT_LENGTH",
"]",
"=",
"str",
"(",
"self",
".",
"_content_length",
")",
"headers",
"=",
"list",
"(",
"self",
".",
"_headers",
".",
"items",
"(",
")",
")",
"cookies",
"=",
"[",
"(",
"HttpResponseHeaders",
".",
"SET_COOKIE",
",",
"v",
".",
"OutputString",
"(",
")",
")",
"for",
"_",
",",
"v",
"in",
"self",
".",
"_cookies",
".",
"items",
"(",
")",
"]",
"if",
"len",
"(",
"cookies",
")",
":",
"headers",
"=",
"list",
"(",
"headers",
")",
"+",
"cookies",
"start_response",
"(",
"self",
".",
"_status_code",
",",
"headers",
")",
"if",
"isinstance",
"(",
"self",
".",
"_content",
",",
"six",
".",
"binary_type",
")",
":",
"return",
"[",
"self",
".",
"_content",
"]",
"return",
"self",
".",
"_content"
] |
Bakes the response and returns the content.
Args:
start_response (:obj:`callable`): Callback method that accepts
status code and a list of tuples (pairs) containing headers'
key and value respectively.
|
[
"Bakes",
"the",
"response",
"and",
"returns",
"the",
"content",
"."
] |
487edb370ae329f370bcf3b433ed3f28ba4c1d8c
|
https://github.com/drongo-framework/drongo/blob/487edb370ae329f370bcf3b433ed3f28ba4c1d8c/drongo/response.py#L90-L117
|
240,338
|
drongo-framework/drongo
|
drongo/response.py
|
Response.set_redirect
|
def set_redirect(self, url, status=HttpStatusCodes.HTTP_303):
"""Helper method to set a redirect response.
Args:
url (:obj:`str`): URL to redirect to
status (:obj:`str`, optional): Status code of the response
"""
self.set_status(status)
self.set_content('')
self.set_header(HttpResponseHeaders.LOCATION, url)
|
python
|
def set_redirect(self, url, status=HttpStatusCodes.HTTP_303):
"""Helper method to set a redirect response.
Args:
url (:obj:`str`): URL to redirect to
status (:obj:`str`, optional): Status code of the response
"""
self.set_status(status)
self.set_content('')
self.set_header(HttpResponseHeaders.LOCATION, url)
|
[
"def",
"set_redirect",
"(",
"self",
",",
"url",
",",
"status",
"=",
"HttpStatusCodes",
".",
"HTTP_303",
")",
":",
"self",
".",
"set_status",
"(",
"status",
")",
"self",
".",
"set_content",
"(",
"''",
")",
"self",
".",
"set_header",
"(",
"HttpResponseHeaders",
".",
"LOCATION",
",",
"url",
")"
] |
Helper method to set a redirect response.
Args:
url (:obj:`str`): URL to redirect to
status (:obj:`str`, optional): Status code of the response
|
[
"Helper",
"method",
"to",
"set",
"a",
"redirect",
"response",
"."
] |
487edb370ae329f370bcf3b433ed3f28ba4c1d8c
|
https://github.com/drongo-framework/drongo/blob/487edb370ae329f370bcf3b433ed3f28ba4c1d8c/drongo/response.py#L120-L129
|
240,339
|
drongo-framework/drongo
|
drongo/response.py
|
Response.set_json
|
def set_json(self, obj, status=HttpStatusCodes.HTTP_200):
"""Helper method to set a JSON response.
Args:
obj (:obj:`object`): JSON serializable object
status (:obj:`str`, optional): Status code of the response
"""
obj = json.dumps(obj, sort_keys=True, default=lambda x: str(x))
self.set_status(status)
self.set_header(HttpResponseHeaders.CONTENT_TYPE, 'application/json')
self.set_content(obj)
|
python
|
def set_json(self, obj, status=HttpStatusCodes.HTTP_200):
"""Helper method to set a JSON response.
Args:
obj (:obj:`object`): JSON serializable object
status (:obj:`str`, optional): Status code of the response
"""
obj = json.dumps(obj, sort_keys=True, default=lambda x: str(x))
self.set_status(status)
self.set_header(HttpResponseHeaders.CONTENT_TYPE, 'application/json')
self.set_content(obj)
|
[
"def",
"set_json",
"(",
"self",
",",
"obj",
",",
"status",
"=",
"HttpStatusCodes",
".",
"HTTP_200",
")",
":",
"obj",
"=",
"json",
".",
"dumps",
"(",
"obj",
",",
"sort_keys",
"=",
"True",
",",
"default",
"=",
"lambda",
"x",
":",
"str",
"(",
"x",
")",
")",
"self",
".",
"set_status",
"(",
"status",
")",
"self",
".",
"set_header",
"(",
"HttpResponseHeaders",
".",
"CONTENT_TYPE",
",",
"'application/json'",
")",
"self",
".",
"set_content",
"(",
"obj",
")"
] |
Helper method to set a JSON response.
Args:
obj (:obj:`object`): JSON serializable object
status (:obj:`str`, optional): Status code of the response
|
[
"Helper",
"method",
"to",
"set",
"a",
"JSON",
"response",
"."
] |
487edb370ae329f370bcf3b433ed3f28ba4c1d8c
|
https://github.com/drongo-framework/drongo/blob/487edb370ae329f370bcf3b433ed3f28ba4c1d8c/drongo/response.py#L131-L141
|
240,340
|
Huong-nt/flask-rak
|
flask_rak/core.py
|
find_rak
|
def find_rak():
"""
Find our instance of Rak, navigating Local's and possible blueprints.
"""
if hasattr(current_app, 'rak'):
return getattr(current_app, 'rak')
else:
if hasattr(current_app, 'blueprints'):
blueprints = getattr(current_app, 'blueprints')
for blueprint_name in blueprints:
if hasattr(blueprints[blueprint_name], 'rak'):
return getattr(blueprints[blueprint_name], 'rak')
|
python
|
def find_rak():
"""
Find our instance of Rak, navigating Local's and possible blueprints.
"""
if hasattr(current_app, 'rak'):
return getattr(current_app, 'rak')
else:
if hasattr(current_app, 'blueprints'):
blueprints = getattr(current_app, 'blueprints')
for blueprint_name in blueprints:
if hasattr(blueprints[blueprint_name], 'rak'):
return getattr(blueprints[blueprint_name], 'rak')
|
[
"def",
"find_rak",
"(",
")",
":",
"if",
"hasattr",
"(",
"current_app",
",",
"'rak'",
")",
":",
"return",
"getattr",
"(",
"current_app",
",",
"'rak'",
")",
"else",
":",
"if",
"hasattr",
"(",
"current_app",
",",
"'blueprints'",
")",
":",
"blueprints",
"=",
"getattr",
"(",
"current_app",
",",
"'blueprints'",
")",
"for",
"blueprint_name",
"in",
"blueprints",
":",
"if",
"hasattr",
"(",
"blueprints",
"[",
"blueprint_name",
"]",
",",
"'rak'",
")",
":",
"return",
"getattr",
"(",
"blueprints",
"[",
"blueprint_name",
"]",
",",
"'rak'",
")"
] |
Find our instance of Rak, navigating Local's and possible blueprints.
|
[
"Find",
"our",
"instance",
"of",
"Rak",
"navigating",
"Local",
"s",
"and",
"possible",
"blueprints",
"."
] |
ffe16b0fc3d49e83c1d220c445ce14632219f69d
|
https://github.com/Huong-nt/flask-rak/blob/ffe16b0fc3d49e83c1d220c445ce14632219f69d/flask_rak/core.py#L29-L40
|
240,341
|
Huong-nt/flask-rak
|
flask_rak/core.py
|
RAK.intent
|
def intent(self, intent_name):
"""Decorator routes an Rogo IntentRequest.
Functions decorated as an intent are registered as the view function for the Intent's URL,
and provide the backend responses to give your Skill its functionality.
@ask.intent('WeatherIntent')
def weather(city):
return statement('I predict great weather for {}'.format(city))
Arguments:
intent_name {str} -- Name of the intent request to be mapped to the decorated function
"""
def decorator(f):
self._intent_view_funcs[intent_name] = f
@wraps(f)
def wrapper(*args, **kw):
self._flask_view_func(*args, **kw)
return f
return decorator
|
python
|
def intent(self, intent_name):
"""Decorator routes an Rogo IntentRequest.
Functions decorated as an intent are registered as the view function for the Intent's URL,
and provide the backend responses to give your Skill its functionality.
@ask.intent('WeatherIntent')
def weather(city):
return statement('I predict great weather for {}'.format(city))
Arguments:
intent_name {str} -- Name of the intent request to be mapped to the decorated function
"""
def decorator(f):
self._intent_view_funcs[intent_name] = f
@wraps(f)
def wrapper(*args, **kw):
self._flask_view_func(*args, **kw)
return f
return decorator
|
[
"def",
"intent",
"(",
"self",
",",
"intent_name",
")",
":",
"def",
"decorator",
"(",
"f",
")",
":",
"self",
".",
"_intent_view_funcs",
"[",
"intent_name",
"]",
"=",
"f",
"@",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"self",
".",
"_flask_view_func",
"(",
"*",
"args",
",",
"*",
"*",
"kw",
")",
"return",
"f",
"return",
"decorator"
] |
Decorator routes an Rogo IntentRequest.
Functions decorated as an intent are registered as the view function for the Intent's URL,
and provide the backend responses to give your Skill its functionality.
@ask.intent('WeatherIntent')
def weather(city):
return statement('I predict great weather for {}'.format(city))
Arguments:
intent_name {str} -- Name of the intent request to be mapped to the decorated function
|
[
"Decorator",
"routes",
"an",
"Rogo",
"IntentRequest",
".",
"Functions",
"decorated",
"as",
"an",
"intent",
"are",
"registered",
"as",
"the",
"view",
"function",
"for",
"the",
"Intent",
"s",
"URL",
"and",
"provide",
"the",
"backend",
"responses",
"to",
"give",
"your",
"Skill",
"its",
"functionality",
"."
] |
ffe16b0fc3d49e83c1d220c445ce14632219f69d
|
https://github.com/Huong-nt/flask-rak/blob/ffe16b0fc3d49e83c1d220c445ce14632219f69d/flask_rak/core.py#L155-L172
|
240,342
|
emin63/eyap
|
eyap/core/comments.py
|
SingleComment.make_anchor_id
|
def make_anchor_id(self):
"""Return string to use as URL anchor for this comment.
"""
result = re.sub(
'[^a-zA-Z0-9_]', '_', self.user + '_' + self.timestamp)
return result
|
python
|
def make_anchor_id(self):
"""Return string to use as URL anchor for this comment.
"""
result = re.sub(
'[^a-zA-Z0-9_]', '_', self.user + '_' + self.timestamp)
return result
|
[
"def",
"make_anchor_id",
"(",
"self",
")",
":",
"result",
"=",
"re",
".",
"sub",
"(",
"'[^a-zA-Z0-9_]'",
",",
"'_'",
",",
"self",
".",
"user",
"+",
"'_'",
"+",
"self",
".",
"timestamp",
")",
"return",
"result"
] |
Return string to use as URL anchor for this comment.
|
[
"Return",
"string",
"to",
"use",
"as",
"URL",
"anchor",
"for",
"this",
"comment",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L60-L65
|
240,343
|
emin63/eyap
|
eyap/core/comments.py
|
SingleComment.make_url
|
def make_url(self, my_request, anchor_id=None):
"""Make URL to this comment.
:arg my_request: The request object where this comment is seen from.
:arg anchor_id=None: Optional anchor id. If None, we use
self.make_anchor_id()
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
:returns: String URL to this comment.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Be able to create links to this comment.
"""
if anchor_id is None:
anchor_id = self.make_anchor_id()
result = '{}?{}#{}'.format(
my_request.path, urllib.parse.urlencode(my_request.args),
anchor_id)
return result
|
python
|
def make_url(self, my_request, anchor_id=None):
"""Make URL to this comment.
:arg my_request: The request object where this comment is seen from.
:arg anchor_id=None: Optional anchor id. If None, we use
self.make_anchor_id()
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
:returns: String URL to this comment.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Be able to create links to this comment.
"""
if anchor_id is None:
anchor_id = self.make_anchor_id()
result = '{}?{}#{}'.format(
my_request.path, urllib.parse.urlencode(my_request.args),
anchor_id)
return result
|
[
"def",
"make_url",
"(",
"self",
",",
"my_request",
",",
"anchor_id",
"=",
"None",
")",
":",
"if",
"anchor_id",
"is",
"None",
":",
"anchor_id",
"=",
"self",
".",
"make_anchor_id",
"(",
")",
"result",
"=",
"'{}?{}#{}'",
".",
"format",
"(",
"my_request",
".",
"path",
",",
"urllib",
".",
"parse",
".",
"urlencode",
"(",
"my_request",
".",
"args",
")",
",",
"anchor_id",
")",
"return",
"result"
] |
Make URL to this comment.
:arg my_request: The request object where this comment is seen from.
:arg anchor_id=None: Optional anchor id. If None, we use
self.make_anchor_id()
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
:returns: String URL to this comment.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Be able to create links to this comment.
|
[
"Make",
"URL",
"to",
"this",
"comment",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L67-L90
|
240,344
|
emin63/eyap
|
eyap/core/comments.py
|
SingleComment.to_dict
|
def to_dict(self):
"""Return description of self in dict format.
This is useful for serializing to something like json later.
"""
jdict = {
'user': self.user,
'summary': self.summary,
'body': self.body,
'markup': self.markup,
'url': self.url,
'timestamp': self.timestamp
}
return jdict
|
python
|
def to_dict(self):
"""Return description of self in dict format.
This is useful for serializing to something like json later.
"""
jdict = {
'user': self.user,
'summary': self.summary,
'body': self.body,
'markup': self.markup,
'url': self.url,
'timestamp': self.timestamp
}
return jdict
|
[
"def",
"to_dict",
"(",
"self",
")",
":",
"jdict",
"=",
"{",
"'user'",
":",
"self",
".",
"user",
",",
"'summary'",
":",
"self",
".",
"summary",
",",
"'body'",
":",
"self",
".",
"body",
",",
"'markup'",
":",
"self",
".",
"markup",
",",
"'url'",
":",
"self",
".",
"url",
",",
"'timestamp'",
":",
"self",
".",
"timestamp",
"}",
"return",
"jdict"
] |
Return description of self in dict format.
This is useful for serializing to something like json later.
|
[
"Return",
"description",
"of",
"self",
"in",
"dict",
"format",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L92-L105
|
240,345
|
emin63/eyap
|
eyap/core/comments.py
|
SingleComment.set_display_mode
|
def set_display_mode(self, mytz, fmt):
"""Set the display mode for self.
:arg mytz: A pytz.timezone object.
:arg fmt: A format string for strftime.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Modifies self.display_timestamp to first parse
self.timestamp and then format according to given
timezone and format string.
"""
my_stamp = dateutil.parser.parse(self.timestamp)
tz_stamp = my_stamp.astimezone(
mytz) if my_stamp.tzinfo is not None else my_stamp
self.display_timestamp = tz_stamp.strftime(fmt)
|
python
|
def set_display_mode(self, mytz, fmt):
"""Set the display mode for self.
:arg mytz: A pytz.timezone object.
:arg fmt: A format string for strftime.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Modifies self.display_timestamp to first parse
self.timestamp and then format according to given
timezone and format string.
"""
my_stamp = dateutil.parser.parse(self.timestamp)
tz_stamp = my_stamp.astimezone(
mytz) if my_stamp.tzinfo is not None else my_stamp
self.display_timestamp = tz_stamp.strftime(fmt)
|
[
"def",
"set_display_mode",
"(",
"self",
",",
"mytz",
",",
"fmt",
")",
":",
"my_stamp",
"=",
"dateutil",
".",
"parser",
".",
"parse",
"(",
"self",
".",
"timestamp",
")",
"tz_stamp",
"=",
"my_stamp",
".",
"astimezone",
"(",
"mytz",
")",
"if",
"my_stamp",
".",
"tzinfo",
"is",
"not",
"None",
"else",
"my_stamp",
"self",
".",
"display_timestamp",
"=",
"tz_stamp",
".",
"strftime",
"(",
"fmt",
")"
] |
Set the display mode for self.
:arg mytz: A pytz.timezone object.
:arg fmt: A format string for strftime.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Modifies self.display_timestamp to first parse
self.timestamp and then format according to given
timezone and format string.
|
[
"Set",
"the",
"display",
"mode",
"for",
"self",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L107-L124
|
240,346
|
emin63/eyap
|
eyap/core/comments.py
|
CommentThread.get_comment_section
|
def get_comment_section(self, force_reload=False, reverse=False):
"""Get CommentSection instance representing all comments for thread.
:arg force_reload=False: Whether to force reloading comments
directly or allow using what is cached
in self.content if possible.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
:returns: CommentSection representing all comments for thread.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: High-level function called by user to get comments.
"""
if self.content is not None and not force_reload:
return self.content
if self.thread_id is None:
self.thread_id = self.lookup_thread_id()
self.content = self.lookup_comments(reverse=reverse)
return self.content
|
python
|
def get_comment_section(self, force_reload=False, reverse=False):
"""Get CommentSection instance representing all comments for thread.
:arg force_reload=False: Whether to force reloading comments
directly or allow using what is cached
in self.content if possible.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
:returns: CommentSection representing all comments for thread.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: High-level function called by user to get comments.
"""
if self.content is not None and not force_reload:
return self.content
if self.thread_id is None:
self.thread_id = self.lookup_thread_id()
self.content = self.lookup_comments(reverse=reverse)
return self.content
|
[
"def",
"get_comment_section",
"(",
"self",
",",
"force_reload",
"=",
"False",
",",
"reverse",
"=",
"False",
")",
":",
"if",
"self",
".",
"content",
"is",
"not",
"None",
"and",
"not",
"force_reload",
":",
"return",
"self",
".",
"content",
"if",
"self",
".",
"thread_id",
"is",
"None",
":",
"self",
".",
"thread_id",
"=",
"self",
".",
"lookup_thread_id",
"(",
")",
"self",
".",
"content",
"=",
"self",
".",
"lookup_comments",
"(",
"reverse",
"=",
"reverse",
")",
"return",
"self",
".",
"content"
] |
Get CommentSection instance representing all comments for thread.
:arg force_reload=False: Whether to force reloading comments
directly or allow using what is cached
in self.content if possible.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
:returns: CommentSection representing all comments for thread.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: High-level function called by user to get comments.
|
[
"Get",
"CommentSection",
"instance",
"representing",
"all",
"comments",
"for",
"thread",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L318-L340
|
240,347
|
emin63/eyap
|
eyap/core/comments.py
|
CommentThread.validate_attachment_location
|
def validate_attachment_location(self, location):
"""Validate a proposed attachment location.
:arg location: String representing location to put attachment.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Raises an exception if attachment location is bad.
By default, this just forces reasonable characters.
Sub-classes can override as desired.
"""
if not re.compile(self.valid_attachment_loc_re).match(location):
raise ValueError(
'Bad chars in attachment location. Must match %s' % (
self.valid_attachment_loc_re))
|
python
|
def validate_attachment_location(self, location):
"""Validate a proposed attachment location.
:arg location: String representing location to put attachment.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Raises an exception if attachment location is bad.
By default, this just forces reasonable characters.
Sub-classes can override as desired.
"""
if not re.compile(self.valid_attachment_loc_re).match(location):
raise ValueError(
'Bad chars in attachment location. Must match %s' % (
self.valid_attachment_loc_re))
|
[
"def",
"validate_attachment_location",
"(",
"self",
",",
"location",
")",
":",
"if",
"not",
"re",
".",
"compile",
"(",
"self",
".",
"valid_attachment_loc_re",
")",
".",
"match",
"(",
"location",
")",
":",
"raise",
"ValueError",
"(",
"'Bad chars in attachment location. Must match %s'",
"%",
"(",
"self",
".",
"valid_attachment_loc_re",
")",
")"
] |
Validate a proposed attachment location.
:arg location: String representing location to put attachment.
~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-
PURPOSE: Raises an exception if attachment location is bad.
By default, this just forces reasonable characters.
Sub-classes can override as desired.
|
[
"Validate",
"a",
"proposed",
"attachment",
"location",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L342-L357
|
240,348
|
emin63/eyap
|
eyap/core/comments.py
|
FileCommentThread.lookup_thread_id
|
def lookup_thread_id(self):
"Lookup the thread id as path to comment file."
path = os.path.join(self.realm, self.topic + '.csv')
return path
|
python
|
def lookup_thread_id(self):
"Lookup the thread id as path to comment file."
path = os.path.join(self.realm, self.topic + '.csv')
return path
|
[
"def",
"lookup_thread_id",
"(",
"self",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"realm",
",",
"self",
".",
"topic",
"+",
"'.csv'",
")",
"return",
"path"
] |
Lookup the thread id as path to comment file.
|
[
"Lookup",
"the",
"thread",
"id",
"as",
"path",
"to",
"comment",
"file",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L398-L402
|
240,349
|
emin63/eyap
|
eyap/core/comments.py
|
FileCommentThread.lookup_comments
|
def lookup_comments(self, reverse=False):
"Implement as required by parent to lookup comments in file system."
comments = []
if self.thread_id is None:
self.thread_id = self.lookup_thread_id()
path = self.thread_id
with open(self.thread_id, 'r', newline='') as fdesc:
reader = csv.reader(fdesc)
header = reader.__next__()
assert header == self.header
for num, line in enumerate(reader):
if not line:
continue
assert len(line) == len(header), (
'Line %i in path %s misformatted' % (num+1, path))
line_kw = dict(zip(header, line))
comments.append(SingleComment(**line_kw))
if reverse:
comments = list(reversed(comments))
return CommentSection(comments)
|
python
|
def lookup_comments(self, reverse=False):
"Implement as required by parent to lookup comments in file system."
comments = []
if self.thread_id is None:
self.thread_id = self.lookup_thread_id()
path = self.thread_id
with open(self.thread_id, 'r', newline='') as fdesc:
reader = csv.reader(fdesc)
header = reader.__next__()
assert header == self.header
for num, line in enumerate(reader):
if not line:
continue
assert len(line) == len(header), (
'Line %i in path %s misformatted' % (num+1, path))
line_kw = dict(zip(header, line))
comments.append(SingleComment(**line_kw))
if reverse:
comments = list(reversed(comments))
return CommentSection(comments)
|
[
"def",
"lookup_comments",
"(",
"self",
",",
"reverse",
"=",
"False",
")",
":",
"comments",
"=",
"[",
"]",
"if",
"self",
".",
"thread_id",
"is",
"None",
":",
"self",
".",
"thread_id",
"=",
"self",
".",
"lookup_thread_id",
"(",
")",
"path",
"=",
"self",
".",
"thread_id",
"with",
"open",
"(",
"self",
".",
"thread_id",
",",
"'r'",
",",
"newline",
"=",
"''",
")",
"as",
"fdesc",
":",
"reader",
"=",
"csv",
".",
"reader",
"(",
"fdesc",
")",
"header",
"=",
"reader",
".",
"__next__",
"(",
")",
"assert",
"header",
"==",
"self",
".",
"header",
"for",
"num",
",",
"line",
"in",
"enumerate",
"(",
"reader",
")",
":",
"if",
"not",
"line",
":",
"continue",
"assert",
"len",
"(",
"line",
")",
"==",
"len",
"(",
"header",
")",
",",
"(",
"'Line %i in path %s misformatted'",
"%",
"(",
"num",
"+",
"1",
",",
"path",
")",
")",
"line_kw",
"=",
"dict",
"(",
"zip",
"(",
"header",
",",
"line",
")",
")",
"comments",
".",
"append",
"(",
"SingleComment",
"(",
"*",
"*",
"line_kw",
")",
")",
"if",
"reverse",
":",
"comments",
"=",
"list",
"(",
"reversed",
"(",
"comments",
")",
")",
"return",
"CommentSection",
"(",
"comments",
")"
] |
Implement as required by parent to lookup comments in file system.
|
[
"Implement",
"as",
"required",
"by",
"parent",
"to",
"lookup",
"comments",
"in",
"file",
"system",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L404-L425
|
240,350
|
emin63/eyap
|
eyap/core/comments.py
|
FileCommentThread.create_thread
|
def create_thread(self, body):
"""Implement create_thread as required by parent.
This basically just calls add_comment with allow_create=True
and then builds a response object to indicate everything is fine.
"""
self.add_comment(body, allow_create=True)
the_response = Response()
the_response.code = "OK"
the_response.status_code = 200
|
python
|
def create_thread(self, body):
"""Implement create_thread as required by parent.
This basically just calls add_comment with allow_create=True
and then builds a response object to indicate everything is fine.
"""
self.add_comment(body, allow_create=True)
the_response = Response()
the_response.code = "OK"
the_response.status_code = 200
|
[
"def",
"create_thread",
"(",
"self",
",",
"body",
")",
":",
"self",
".",
"add_comment",
"(",
"body",
",",
"allow_create",
"=",
"True",
")",
"the_response",
"=",
"Response",
"(",
")",
"the_response",
".",
"code",
"=",
"\"OK\"",
"the_response",
".",
"status_code",
"=",
"200"
] |
Implement create_thread as required by parent.
This basically just calls add_comment with allow_create=True
and then builds a response object to indicate everything is fine.
|
[
"Implement",
"create_thread",
"as",
"required",
"by",
"parent",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L427-L436
|
240,351
|
emin63/eyap
|
eyap/core/comments.py
|
FileCommentThread.add_comment
|
def add_comment(self, body, allow_create=False, allow_hashes=False,
summary=None):
"Implement as required by parent to store comment in CSV file."
if allow_hashes:
raise ValueError('allow_hashes not implemented for %s yet' % (
self.__class__.__name__))
if self.thread_id is None:
self.thread_id = self.lookup_thread_id()
if not os.path.exists(self.thread_id):
if not allow_create:
raise KeyError(self.topic)
with open(self.thread_id, 'a', newline='') as fdesc:
csv.writer(fdesc).writerow(self.header)
with open(self.thread_id, 'a', newline='') as fdesc:
writer = csv.writer(fdesc)
writer.writerow([self.user, datetime.datetime.utcnow(), summary,
body, ''])
|
python
|
def add_comment(self, body, allow_create=False, allow_hashes=False,
summary=None):
"Implement as required by parent to store comment in CSV file."
if allow_hashes:
raise ValueError('allow_hashes not implemented for %s yet' % (
self.__class__.__name__))
if self.thread_id is None:
self.thread_id = self.lookup_thread_id()
if not os.path.exists(self.thread_id):
if not allow_create:
raise KeyError(self.topic)
with open(self.thread_id, 'a', newline='') as fdesc:
csv.writer(fdesc).writerow(self.header)
with open(self.thread_id, 'a', newline='') as fdesc:
writer = csv.writer(fdesc)
writer.writerow([self.user, datetime.datetime.utcnow(), summary,
body, ''])
|
[
"def",
"add_comment",
"(",
"self",
",",
"body",
",",
"allow_create",
"=",
"False",
",",
"allow_hashes",
"=",
"False",
",",
"summary",
"=",
"None",
")",
":",
"if",
"allow_hashes",
":",
"raise",
"ValueError",
"(",
"'allow_hashes not implemented for %s yet'",
"%",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
")",
"if",
"self",
".",
"thread_id",
"is",
"None",
":",
"self",
".",
"thread_id",
"=",
"self",
".",
"lookup_thread_id",
"(",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"thread_id",
")",
":",
"if",
"not",
"allow_create",
":",
"raise",
"KeyError",
"(",
"self",
".",
"topic",
")",
"with",
"open",
"(",
"self",
".",
"thread_id",
",",
"'a'",
",",
"newline",
"=",
"''",
")",
"as",
"fdesc",
":",
"csv",
".",
"writer",
"(",
"fdesc",
")",
".",
"writerow",
"(",
"self",
".",
"header",
")",
"with",
"open",
"(",
"self",
".",
"thread_id",
",",
"'a'",
",",
"newline",
"=",
"''",
")",
"as",
"fdesc",
":",
"writer",
"=",
"csv",
".",
"writer",
"(",
"fdesc",
")",
"writer",
".",
"writerow",
"(",
"[",
"self",
".",
"user",
",",
"datetime",
".",
"datetime",
".",
"utcnow",
"(",
")",
",",
"summary",
",",
"body",
",",
"''",
"]",
")"
] |
Implement as required by parent to store comment in CSV file.
|
[
"Implement",
"as",
"required",
"by",
"parent",
"to",
"store",
"comment",
"in",
"CSV",
"file",
"."
] |
a610761973b478ca0e864e970be05ce29d5994a5
|
https://github.com/emin63/eyap/blob/a610761973b478ca0e864e970be05ce29d5994a5/eyap/core/comments.py#L438-L456
|
240,352
|
wushuyi/wsy_captcha
|
wsy_captcha/bezier.py
|
pascal_row
|
def pascal_row(n):
""" Returns n-th row of Pascal's triangle
"""
result = [1]
x, numerator = 1, n
for denominator in range(1, n // 2 + 1):
x *= numerator
x /= denominator
result.append(x)
numerator -= 1
if n & 1 == 0:
result.extend(reversed(result[:-1]))
else:
result.extend(reversed(result))
return result
|
python
|
def pascal_row(n):
""" Returns n-th row of Pascal's triangle
"""
result = [1]
x, numerator = 1, n
for denominator in range(1, n // 2 + 1):
x *= numerator
x /= denominator
result.append(x)
numerator -= 1
if n & 1 == 0:
result.extend(reversed(result[:-1]))
else:
result.extend(reversed(result))
return result
|
[
"def",
"pascal_row",
"(",
"n",
")",
":",
"result",
"=",
"[",
"1",
"]",
"x",
",",
"numerator",
"=",
"1",
",",
"n",
"for",
"denominator",
"in",
"range",
"(",
"1",
",",
"n",
"//",
"2",
"+",
"1",
")",
":",
"x",
"*=",
"numerator",
"x",
"/=",
"denominator",
"result",
".",
"append",
"(",
"x",
")",
"numerator",
"-=",
"1",
"if",
"n",
"&",
"1",
"==",
"0",
":",
"result",
".",
"extend",
"(",
"reversed",
"(",
"result",
"[",
":",
"-",
"1",
"]",
")",
")",
"else",
":",
"result",
".",
"extend",
"(",
"reversed",
"(",
"result",
")",
")",
"return",
"result"
] |
Returns n-th row of Pascal's triangle
|
[
"Returns",
"n",
"-",
"th",
"row",
"of",
"Pascal",
"s",
"triangle"
] |
eeb8fd8b8d37c3550903a83339109e064da45d89
|
https://github.com/wushuyi/wsy_captcha/blob/eeb8fd8b8d37c3550903a83339109e064da45d89/wsy_captcha/bezier.py#L11-L25
|
240,353
|
edelbluth/blackred
|
src/blackred/blackred.py
|
create_salt
|
def create_salt(length: int=128) -> bytes:
"""
Create a new salt
:param int length: How many bytes should the salt be long?
:return: The salt
:rtype: bytes
"""
return b''.join(bytes([SystemRandom().randint(0, 255)]) for _ in range(length))
|
python
|
def create_salt(length: int=128) -> bytes:
"""
Create a new salt
:param int length: How many bytes should the salt be long?
:return: The salt
:rtype: bytes
"""
return b''.join(bytes([SystemRandom().randint(0, 255)]) for _ in range(length))
|
[
"def",
"create_salt",
"(",
"length",
":",
"int",
"=",
"128",
")",
"->",
"bytes",
":",
"return",
"b''",
".",
"join",
"(",
"bytes",
"(",
"[",
"SystemRandom",
"(",
")",
".",
"randint",
"(",
"0",
",",
"255",
")",
"]",
")",
"for",
"_",
"in",
"range",
"(",
"length",
")",
")"
] |
Create a new salt
:param int length: How many bytes should the salt be long?
:return: The salt
:rtype: bytes
|
[
"Create",
"a",
"new",
"salt"
] |
57a655e4d4eca60ce16e7b338079355049a87b49
|
https://github.com/edelbluth/blackred/blob/57a655e4d4eca60ce16e7b338079355049a87b49/src/blackred/blackred.py#L28-L36
|
240,354
|
edelbluth/blackred
|
src/blackred/blackred.py
|
BlackRed.__get_connection
|
def __get_connection(self) -> redis.Redis:
"""
Get a Redis connection
:return: Redis connection instance
:rtype: redis.Redis
"""
if self.__redis_use_socket:
r = redis.from_url(
'unix://{:s}?db={:d}'.format(
self.__redis_host,
self.__redis_db
)
)
else:
r = redis.from_url(
'redis://{:s}:{:d}/{:d}'.format(
self.__redis_host,
self.__redis_port,
self.__redis_db
)
)
if BlackRed.Settings.REDIS_AUTH is not None:
r.execute_command('AUTH {:s}'.format(BlackRed.Settings.REDIS_AUTH))
return r
|
python
|
def __get_connection(self) -> redis.Redis:
"""
Get a Redis connection
:return: Redis connection instance
:rtype: redis.Redis
"""
if self.__redis_use_socket:
r = redis.from_url(
'unix://{:s}?db={:d}'.format(
self.__redis_host,
self.__redis_db
)
)
else:
r = redis.from_url(
'redis://{:s}:{:d}/{:d}'.format(
self.__redis_host,
self.__redis_port,
self.__redis_db
)
)
if BlackRed.Settings.REDIS_AUTH is not None:
r.execute_command('AUTH {:s}'.format(BlackRed.Settings.REDIS_AUTH))
return r
|
[
"def",
"__get_connection",
"(",
"self",
")",
"->",
"redis",
".",
"Redis",
":",
"if",
"self",
".",
"__redis_use_socket",
":",
"r",
"=",
"redis",
".",
"from_url",
"(",
"'unix://{:s}?db={:d}'",
".",
"format",
"(",
"self",
".",
"__redis_host",
",",
"self",
".",
"__redis_db",
")",
")",
"else",
":",
"r",
"=",
"redis",
".",
"from_url",
"(",
"'redis://{:s}:{:d}/{:d}'",
".",
"format",
"(",
"self",
".",
"__redis_host",
",",
"self",
".",
"__redis_port",
",",
"self",
".",
"__redis_db",
")",
")",
"if",
"BlackRed",
".",
"Settings",
".",
"REDIS_AUTH",
"is",
"not",
"None",
":",
"r",
".",
"execute_command",
"(",
"'AUTH {:s}'",
".",
"format",
"(",
"BlackRed",
".",
"Settings",
".",
"REDIS_AUTH",
")",
")",
"return",
"r"
] |
Get a Redis connection
:return: Redis connection instance
:rtype: redis.Redis
|
[
"Get",
"a",
"Redis",
"connection"
] |
57a655e4d4eca60ce16e7b338079355049a87b49
|
https://github.com/edelbluth/blackred/blob/57a655e4d4eca60ce16e7b338079355049a87b49/src/blackred/blackred.py#L208-L234
|
240,355
|
edelbluth/blackred
|
src/blackred/blackred.py
|
BlackRed._encode_item
|
def _encode_item(self, item: str) -> str:
"""
If anonymization is on, an item gets salted and hashed here.
:param str item:
:return: Hashed item, if anonymization is on; the unmodified item otherwise
:rtype: str
"""
assert item is not None
if not self.__redis_conf['anonymization']:
return item
connection = self.__get_connection()
salt = connection.get(self.__redis_conf['salt_key'])
if salt is None:
salt = create_salt()
connection.set(self.__redis_conf['salt_key'], salt)
BlackRed.__release_connection(connection)
return sha512(salt + item.encode()).hexdigest()
|
python
|
def _encode_item(self, item: str) -> str:
"""
If anonymization is on, an item gets salted and hashed here.
:param str item:
:return: Hashed item, if anonymization is on; the unmodified item otherwise
:rtype: str
"""
assert item is not None
if not self.__redis_conf['anonymization']:
return item
connection = self.__get_connection()
salt = connection.get(self.__redis_conf['salt_key'])
if salt is None:
salt = create_salt()
connection.set(self.__redis_conf['salt_key'], salt)
BlackRed.__release_connection(connection)
return sha512(salt + item.encode()).hexdigest()
|
[
"def",
"_encode_item",
"(",
"self",
",",
"item",
":",
"str",
")",
"->",
"str",
":",
"assert",
"item",
"is",
"not",
"None",
"if",
"not",
"self",
".",
"__redis_conf",
"[",
"'anonymization'",
"]",
":",
"return",
"item",
"connection",
"=",
"self",
".",
"__get_connection",
"(",
")",
"salt",
"=",
"connection",
".",
"get",
"(",
"self",
".",
"__redis_conf",
"[",
"'salt_key'",
"]",
")",
"if",
"salt",
"is",
"None",
":",
"salt",
"=",
"create_salt",
"(",
")",
"connection",
".",
"set",
"(",
"self",
".",
"__redis_conf",
"[",
"'salt_key'",
"]",
",",
"salt",
")",
"BlackRed",
".",
"__release_connection",
"(",
"connection",
")",
"return",
"sha512",
"(",
"salt",
"+",
"item",
".",
"encode",
"(",
")",
")",
".",
"hexdigest",
"(",
")"
] |
If anonymization is on, an item gets salted and hashed here.
:param str item:
:return: Hashed item, if anonymization is on; the unmodified item otherwise
:rtype: str
|
[
"If",
"anonymization",
"is",
"on",
"an",
"item",
"gets",
"salted",
"and",
"hashed",
"here",
"."
] |
57a655e4d4eca60ce16e7b338079355049a87b49
|
https://github.com/edelbluth/blackred/blob/57a655e4d4eca60ce16e7b338079355049a87b49/src/blackred/blackred.py#L245-L262
|
240,356
|
edelbluth/blackred
|
src/blackred/blackred.py
|
BlackRed.__get_ttl
|
def __get_ttl(self, item: str) -> int:
"""
Get the amount of time a specific item will remain in the database.
:param str item: The item to get the TTL for
:return: Time in seconds. Returns None for a non-existing element.
:rtype: int
"""
connection = self.__get_connection()
ttl = connection.ttl(item)
BlackRed.__release_connection(connection)
return ttl
|
python
|
def __get_ttl(self, item: str) -> int:
"""
Get the amount of time a specific item will remain in the database.
:param str item: The item to get the TTL for
:return: Time in seconds. Returns None for a non-existing element.
:rtype: int
"""
connection = self.__get_connection()
ttl = connection.ttl(item)
BlackRed.__release_connection(connection)
return ttl
|
[
"def",
"__get_ttl",
"(",
"self",
",",
"item",
":",
"str",
")",
"->",
"int",
":",
"connection",
"=",
"self",
".",
"__get_connection",
"(",
")",
"ttl",
"=",
"connection",
".",
"ttl",
"(",
"item",
")",
"BlackRed",
".",
"__release_connection",
"(",
"connection",
")",
"return",
"ttl"
] |
Get the amount of time a specific item will remain in the database.
:param str item: The item to get the TTL for
:return: Time in seconds. Returns None for a non-existing element.
:rtype: int
|
[
"Get",
"the",
"amount",
"of",
"time",
"a",
"specific",
"item",
"will",
"remain",
"in",
"the",
"database",
"."
] |
57a655e4d4eca60ce16e7b338079355049a87b49
|
https://github.com/edelbluth/blackred/blob/57a655e4d4eca60ce16e7b338079355049a87b49/src/blackred/blackred.py#L264-L275
|
240,357
|
edelbluth/blackred
|
src/blackred/blackred.py
|
BlackRed.get_watchlist_ttl
|
def get_watchlist_ttl(self, item: str) -> int:
"""
Get the amount of time a specific item will remain on the watchlist.
:param str item: The item to get the TTL for on the watchlist
:return: Time in seconds. Returns None for a non-existing element
:rtype: int
"""
assert item is not None
item = self._encode_item(item)
return self.__get_ttl(self.__redis_conf['watchlist_template'].format(item))
|
python
|
def get_watchlist_ttl(self, item: str) -> int:
"""
Get the amount of time a specific item will remain on the watchlist.
:param str item: The item to get the TTL for on the watchlist
:return: Time in seconds. Returns None for a non-existing element
:rtype: int
"""
assert item is not None
item = self._encode_item(item)
return self.__get_ttl(self.__redis_conf['watchlist_template'].format(item))
|
[
"def",
"get_watchlist_ttl",
"(",
"self",
",",
"item",
":",
"str",
")",
"->",
"int",
":",
"assert",
"item",
"is",
"not",
"None",
"item",
"=",
"self",
".",
"_encode_item",
"(",
"item",
")",
"return",
"self",
".",
"__get_ttl",
"(",
"self",
".",
"__redis_conf",
"[",
"'watchlist_template'",
"]",
".",
"format",
"(",
"item",
")",
")"
] |
Get the amount of time a specific item will remain on the watchlist.
:param str item: The item to get the TTL for on the watchlist
:return: Time in seconds. Returns None for a non-existing element
:rtype: int
|
[
"Get",
"the",
"amount",
"of",
"time",
"a",
"specific",
"item",
"will",
"remain",
"on",
"the",
"watchlist",
"."
] |
57a655e4d4eca60ce16e7b338079355049a87b49
|
https://github.com/edelbluth/blackred/blob/57a655e4d4eca60ce16e7b338079355049a87b49/src/blackred/blackred.py#L289-L299
|
240,358
|
edelbluth/blackred
|
src/blackred/blackred.py
|
BlackRed.is_not_blocked
|
def is_not_blocked(self, item: str) -> bool:
"""
Check if an item is _not_ already on the blacklist
:param str item: The item to check
:return: True, when the item is _not_ on the blacklist
:rtype: bool
"""
assert item is not None
item = self._encode_item(item)
connection = self.__get_connection()
key = self.__redis_conf['blacklist_template'].format(item)
value = connection.get(key)
if value is None:
BlackRed.__release_connection(connection)
return True
if self.__redis_conf['blacklist_refresh_ttl']:
connection.expire(key, self.__redis_conf['blacklist_ttl'])
BlackRed.__release_connection(connection)
return False
|
python
|
def is_not_blocked(self, item: str) -> bool:
"""
Check if an item is _not_ already on the blacklist
:param str item: The item to check
:return: True, when the item is _not_ on the blacklist
:rtype: bool
"""
assert item is not None
item = self._encode_item(item)
connection = self.__get_connection()
key = self.__redis_conf['blacklist_template'].format(item)
value = connection.get(key)
if value is None:
BlackRed.__release_connection(connection)
return True
if self.__redis_conf['blacklist_refresh_ttl']:
connection.expire(key, self.__redis_conf['blacklist_ttl'])
BlackRed.__release_connection(connection)
return False
|
[
"def",
"is_not_blocked",
"(",
"self",
",",
"item",
":",
"str",
")",
"->",
"bool",
":",
"assert",
"item",
"is",
"not",
"None",
"item",
"=",
"self",
".",
"_encode_item",
"(",
"item",
")",
"connection",
"=",
"self",
".",
"__get_connection",
"(",
")",
"key",
"=",
"self",
".",
"__redis_conf",
"[",
"'blacklist_template'",
"]",
".",
"format",
"(",
"item",
")",
"value",
"=",
"connection",
".",
"get",
"(",
"key",
")",
"if",
"value",
"is",
"None",
":",
"BlackRed",
".",
"__release_connection",
"(",
"connection",
")",
"return",
"True",
"if",
"self",
".",
"__redis_conf",
"[",
"'blacklist_refresh_ttl'",
"]",
":",
"connection",
".",
"expire",
"(",
"key",
",",
"self",
".",
"__redis_conf",
"[",
"'blacklist_ttl'",
"]",
")",
"BlackRed",
".",
"__release_connection",
"(",
"connection",
")",
"return",
"False"
] |
Check if an item is _not_ already on the blacklist
:param str item: The item to check
:return: True, when the item is _not_ on the blacklist
:rtype: bool
|
[
"Check",
"if",
"an",
"item",
"is",
"_not_",
"already",
"on",
"the",
"blacklist"
] |
57a655e4d4eca60ce16e7b338079355049a87b49
|
https://github.com/edelbluth/blackred/blob/57a655e4d4eca60ce16e7b338079355049a87b49/src/blackred/blackred.py#L301-L320
|
240,359
|
edelbluth/blackred
|
src/blackred/blackred.py
|
BlackRed.log_fail
|
def log_fail(self, item: str) -> None:
"""
Log a failed action for an item. If the fail count for this item reaches the threshold, the item is moved to the
blacklist.
:param str item: The item to log
"""
assert item is not None
item = self._encode_item(item)
if self.is_blocked(item):
return
connection = self.__get_connection()
key = self.__redis_conf['watchlist_template'].format(item)
value = connection.get(key)
if value is None:
connection.set(key, 1, ex=self.__redis_conf['watchlist_ttl'])
BlackRed.__release_connection(connection)
return
value = int(value) + 1
if value < self.__redis_conf['watchlist_to_blacklist']:
connection.set(key, value, ex=self.__redis_conf['watchlist_ttl'])
BlackRed.__release_connection(connection)
return
blacklist_key = self.__redis_conf['blacklist_template'].format(item)
connection.set(blacklist_key, time.time(), ex=self.__redis_conf['blacklist_ttl'])
connection.delete(key)
BlackRed.__release_connection(connection)
|
python
|
def log_fail(self, item: str) -> None:
"""
Log a failed action for an item. If the fail count for this item reaches the threshold, the item is moved to the
blacklist.
:param str item: The item to log
"""
assert item is not None
item = self._encode_item(item)
if self.is_blocked(item):
return
connection = self.__get_connection()
key = self.__redis_conf['watchlist_template'].format(item)
value = connection.get(key)
if value is None:
connection.set(key, 1, ex=self.__redis_conf['watchlist_ttl'])
BlackRed.__release_connection(connection)
return
value = int(value) + 1
if value < self.__redis_conf['watchlist_to_blacklist']:
connection.set(key, value, ex=self.__redis_conf['watchlist_ttl'])
BlackRed.__release_connection(connection)
return
blacklist_key = self.__redis_conf['blacklist_template'].format(item)
connection.set(blacklist_key, time.time(), ex=self.__redis_conf['blacklist_ttl'])
connection.delete(key)
BlackRed.__release_connection(connection)
|
[
"def",
"log_fail",
"(",
"self",
",",
"item",
":",
"str",
")",
"->",
"None",
":",
"assert",
"item",
"is",
"not",
"None",
"item",
"=",
"self",
".",
"_encode_item",
"(",
"item",
")",
"if",
"self",
".",
"is_blocked",
"(",
"item",
")",
":",
"return",
"connection",
"=",
"self",
".",
"__get_connection",
"(",
")",
"key",
"=",
"self",
".",
"__redis_conf",
"[",
"'watchlist_template'",
"]",
".",
"format",
"(",
"item",
")",
"value",
"=",
"connection",
".",
"get",
"(",
"key",
")",
"if",
"value",
"is",
"None",
":",
"connection",
".",
"set",
"(",
"key",
",",
"1",
",",
"ex",
"=",
"self",
".",
"__redis_conf",
"[",
"'watchlist_ttl'",
"]",
")",
"BlackRed",
".",
"__release_connection",
"(",
"connection",
")",
"return",
"value",
"=",
"int",
"(",
"value",
")",
"+",
"1",
"if",
"value",
"<",
"self",
".",
"__redis_conf",
"[",
"'watchlist_to_blacklist'",
"]",
":",
"connection",
".",
"set",
"(",
"key",
",",
"value",
",",
"ex",
"=",
"self",
".",
"__redis_conf",
"[",
"'watchlist_ttl'",
"]",
")",
"BlackRed",
".",
"__release_connection",
"(",
"connection",
")",
"return",
"blacklist_key",
"=",
"self",
".",
"__redis_conf",
"[",
"'blacklist_template'",
"]",
".",
"format",
"(",
"item",
")",
"connection",
".",
"set",
"(",
"blacklist_key",
",",
"time",
".",
"time",
"(",
")",
",",
"ex",
"=",
"self",
".",
"__redis_conf",
"[",
"'blacklist_ttl'",
"]",
")",
"connection",
".",
"delete",
"(",
"key",
")",
"BlackRed",
".",
"__release_connection",
"(",
"connection",
")"
] |
Log a failed action for an item. If the fail count for this item reaches the threshold, the item is moved to the
blacklist.
:param str item: The item to log
|
[
"Log",
"a",
"failed",
"action",
"for",
"an",
"item",
".",
"If",
"the",
"fail",
"count",
"for",
"this",
"item",
"reaches",
"the",
"threshold",
"the",
"item",
"is",
"moved",
"to",
"the",
"blacklist",
"."
] |
57a655e4d4eca60ce16e7b338079355049a87b49
|
https://github.com/edelbluth/blackred/blob/57a655e4d4eca60ce16e7b338079355049a87b49/src/blackred/blackred.py#L332-L358
|
240,360
|
kblin/aio-standalone
|
aiostandalone/app.py
|
StandaloneApplication.start_task
|
def start_task(self, func):
"""Start up a task"""
task = self.loop.create_task(func(self))
self._started_tasks.append(task)
def done_callback(done_task):
self._started_tasks.remove(done_task)
task.add_done_callback(done_callback)
return task
|
python
|
def start_task(self, func):
"""Start up a task"""
task = self.loop.create_task(func(self))
self._started_tasks.append(task)
def done_callback(done_task):
self._started_tasks.remove(done_task)
task.add_done_callback(done_callback)
return task
|
[
"def",
"start_task",
"(",
"self",
",",
"func",
")",
":",
"task",
"=",
"self",
".",
"loop",
".",
"create_task",
"(",
"func",
"(",
"self",
")",
")",
"self",
".",
"_started_tasks",
".",
"append",
"(",
"task",
")",
"def",
"done_callback",
"(",
"done_task",
")",
":",
"self",
".",
"_started_tasks",
".",
"remove",
"(",
"done_task",
")",
"task",
".",
"add_done_callback",
"(",
"done_callback",
")",
"return",
"task"
] |
Start up a task
|
[
"Start",
"up",
"a",
"task"
] |
21f7212ee23e7c2dff679fbf3e9c8d9acf77b568
|
https://github.com/kblin/aio-standalone/blob/21f7212ee23e7c2dff679fbf3e9c8d9acf77b568/aiostandalone/app.py#L77-L86
|
240,361
|
kblin/aio-standalone
|
aiostandalone/app.py
|
StandaloneApplication.run
|
def run(self, loop=None):
"""Actually run the application
:param loop: Custom event loop or None for default
"""
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
loop.run_until_complete(self.startup())
for func in self.tasks:
self.start_task(func)
try:
task = self.start_task(self.main_task)
loop.run_until_complete(task)
except (KeyboardInterrupt, SystemError):
print("Attempting graceful shutdown, press Ctrl-C again to exit", flush=True)
def shutdown_exception_handler(_loop, context):
if "exception" not in context or not isinstance(context["exception"], asyncio.CancelledError):
_loop.default_exception_handler(context)
loop.set_exception_handler(shutdown_exception_handler)
tasks = asyncio.gather(*self._started_tasks, loop=loop, return_exceptions=True)
tasks.add_done_callback(lambda _: loop.stop())
tasks.cancel()
while not tasks.done() and not loop.is_closed():
loop.run_forever()
finally:
loop.run_until_complete(self.shutdown())
loop.run_until_complete(self.cleanup())
loop.close()
|
python
|
def run(self, loop=None):
"""Actually run the application
:param loop: Custom event loop or None for default
"""
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
loop.run_until_complete(self.startup())
for func in self.tasks:
self.start_task(func)
try:
task = self.start_task(self.main_task)
loop.run_until_complete(task)
except (KeyboardInterrupt, SystemError):
print("Attempting graceful shutdown, press Ctrl-C again to exit", flush=True)
def shutdown_exception_handler(_loop, context):
if "exception" not in context or not isinstance(context["exception"], asyncio.CancelledError):
_loop.default_exception_handler(context)
loop.set_exception_handler(shutdown_exception_handler)
tasks = asyncio.gather(*self._started_tasks, loop=loop, return_exceptions=True)
tasks.add_done_callback(lambda _: loop.stop())
tasks.cancel()
while not tasks.done() and not loop.is_closed():
loop.run_forever()
finally:
loop.run_until_complete(self.shutdown())
loop.run_until_complete(self.cleanup())
loop.close()
|
[
"def",
"run",
"(",
"self",
",",
"loop",
"=",
"None",
")",
":",
"if",
"loop",
"is",
"None",
":",
"loop",
"=",
"asyncio",
".",
"get_event_loop",
"(",
")",
"self",
".",
"loop",
"=",
"loop",
"loop",
".",
"run_until_complete",
"(",
"self",
".",
"startup",
"(",
")",
")",
"for",
"func",
"in",
"self",
".",
"tasks",
":",
"self",
".",
"start_task",
"(",
"func",
")",
"try",
":",
"task",
"=",
"self",
".",
"start_task",
"(",
"self",
".",
"main_task",
")",
"loop",
".",
"run_until_complete",
"(",
"task",
")",
"except",
"(",
"KeyboardInterrupt",
",",
"SystemError",
")",
":",
"print",
"(",
"\"Attempting graceful shutdown, press Ctrl-C again to exit\"",
",",
"flush",
"=",
"True",
")",
"def",
"shutdown_exception_handler",
"(",
"_loop",
",",
"context",
")",
":",
"if",
"\"exception\"",
"not",
"in",
"context",
"or",
"not",
"isinstance",
"(",
"context",
"[",
"\"exception\"",
"]",
",",
"asyncio",
".",
"CancelledError",
")",
":",
"_loop",
".",
"default_exception_handler",
"(",
"context",
")",
"loop",
".",
"set_exception_handler",
"(",
"shutdown_exception_handler",
")",
"tasks",
"=",
"asyncio",
".",
"gather",
"(",
"*",
"self",
".",
"_started_tasks",
",",
"loop",
"=",
"loop",
",",
"return_exceptions",
"=",
"True",
")",
"tasks",
".",
"add_done_callback",
"(",
"lambda",
"_",
":",
"loop",
".",
"stop",
"(",
")",
")",
"tasks",
".",
"cancel",
"(",
")",
"while",
"not",
"tasks",
".",
"done",
"(",
")",
"and",
"not",
"loop",
".",
"is_closed",
"(",
")",
":",
"loop",
".",
"run_forever",
"(",
")",
"finally",
":",
"loop",
".",
"run_until_complete",
"(",
"self",
".",
"shutdown",
"(",
")",
")",
"loop",
".",
"run_until_complete",
"(",
"self",
".",
"cleanup",
"(",
")",
")",
"loop",
".",
"close",
"(",
")"
] |
Actually run the application
:param loop: Custom event loop or None for default
|
[
"Actually",
"run",
"the",
"application"
] |
21f7212ee23e7c2dff679fbf3e9c8d9acf77b568
|
https://github.com/kblin/aio-standalone/blob/21f7212ee23e7c2dff679fbf3e9c8d9acf77b568/aiostandalone/app.py#L88-L123
|
240,362
|
jonathansick/paperweight
|
paperweight/gitio.py
|
read_git_blob
|
def read_git_blob(commit_ref, path, repo_dir='.'):
"""Get text from a git blob.
Parameters
----------
commit_ref : str
Any SHA or git tag that can resolve into a commit in the
git repository.
path : str
Path to the document in the git repository, relative to the root
of the repository.
repo_dir : str
Path from current working directory to the root of the git repository.
Returns
-------
text : unicode
The document text.
"""
repo = git.Repo(repo_dir)
tree = repo.tree(commit_ref)
dirname, fname = os.path.split(path)
text = None
if dirname == '':
text = _read_blob(tree, fname)
else:
components = path.split(os.sep)
text = _read_blob_in_tree(tree, components)
return text
|
python
|
def read_git_blob(commit_ref, path, repo_dir='.'):
"""Get text from a git blob.
Parameters
----------
commit_ref : str
Any SHA or git tag that can resolve into a commit in the
git repository.
path : str
Path to the document in the git repository, relative to the root
of the repository.
repo_dir : str
Path from current working directory to the root of the git repository.
Returns
-------
text : unicode
The document text.
"""
repo = git.Repo(repo_dir)
tree = repo.tree(commit_ref)
dirname, fname = os.path.split(path)
text = None
if dirname == '':
text = _read_blob(tree, fname)
else:
components = path.split(os.sep)
text = _read_blob_in_tree(tree, components)
return text
|
[
"def",
"read_git_blob",
"(",
"commit_ref",
",",
"path",
",",
"repo_dir",
"=",
"'.'",
")",
":",
"repo",
"=",
"git",
".",
"Repo",
"(",
"repo_dir",
")",
"tree",
"=",
"repo",
".",
"tree",
"(",
"commit_ref",
")",
"dirname",
",",
"fname",
"=",
"os",
".",
"path",
".",
"split",
"(",
"path",
")",
"text",
"=",
"None",
"if",
"dirname",
"==",
"''",
":",
"text",
"=",
"_read_blob",
"(",
"tree",
",",
"fname",
")",
"else",
":",
"components",
"=",
"path",
".",
"split",
"(",
"os",
".",
"sep",
")",
"text",
"=",
"_read_blob_in_tree",
"(",
"tree",
",",
"components",
")",
"return",
"text"
] |
Get text from a git blob.
Parameters
----------
commit_ref : str
Any SHA or git tag that can resolve into a commit in the
git repository.
path : str
Path to the document in the git repository, relative to the root
of the repository.
repo_dir : str
Path from current working directory to the root of the git repository.
Returns
-------
text : unicode
The document text.
|
[
"Get",
"text",
"from",
"a",
"git",
"blob",
"."
] |
803535b939a56d375967cefecd5fdca81323041e
|
https://github.com/jonathansick/paperweight/blob/803535b939a56d375967cefecd5fdca81323041e/paperweight/gitio.py#L14-L42
|
240,363
|
jonathansick/paperweight
|
paperweight/gitio.py
|
_read_blob_in_tree
|
def _read_blob_in_tree(tree, components):
"""Recursively open trees to ultimately read a blob"""
if len(components) == 1:
# Tree is direct parent of blob
return _read_blob(tree, components[0])
else:
# Still trees to open
dirname = components.pop(0)
for t in tree.traverse():
if t.name == dirname:
return _read_blob_in_tree(t, components)
|
python
|
def _read_blob_in_tree(tree, components):
"""Recursively open trees to ultimately read a blob"""
if len(components) == 1:
# Tree is direct parent of blob
return _read_blob(tree, components[0])
else:
# Still trees to open
dirname = components.pop(0)
for t in tree.traverse():
if t.name == dirname:
return _read_blob_in_tree(t, components)
|
[
"def",
"_read_blob_in_tree",
"(",
"tree",
",",
"components",
")",
":",
"if",
"len",
"(",
"components",
")",
"==",
"1",
":",
"# Tree is direct parent of blob",
"return",
"_read_blob",
"(",
"tree",
",",
"components",
"[",
"0",
"]",
")",
"else",
":",
"# Still trees to open",
"dirname",
"=",
"components",
".",
"pop",
"(",
"0",
")",
"for",
"t",
"in",
"tree",
".",
"traverse",
"(",
")",
":",
"if",
"t",
".",
"name",
"==",
"dirname",
":",
"return",
"_read_blob_in_tree",
"(",
"t",
",",
"components",
")"
] |
Recursively open trees to ultimately read a blob
|
[
"Recursively",
"open",
"trees",
"to",
"ultimately",
"read",
"a",
"blob"
] |
803535b939a56d375967cefecd5fdca81323041e
|
https://github.com/jonathansick/paperweight/blob/803535b939a56d375967cefecd5fdca81323041e/paperweight/gitio.py#L45-L55
|
240,364
|
jonathansick/paperweight
|
paperweight/gitio.py
|
absolute_git_root_dir
|
def absolute_git_root_dir(fpath=""):
"""Absolute path to the git root directory containing a given file or
directory.
"""
if len(fpath) == 0:
dirname_str = os.getcwd()
else:
dirname_str = os.path.dirname(fpath)
dirname_str = os.path.abspath(dirname_str)
dirnames = dirname_str.split(os.sep)
n = len(dirnames)
for i in xrange(n):
# is there a .git directory at this level?
# FIXME hack
basedir = "/" + os.path.join(*dirnames[0:n - i])
gitdir = os.path.join(basedir, ".git")
if os.path.exists(gitdir):
return basedir
|
python
|
def absolute_git_root_dir(fpath=""):
"""Absolute path to the git root directory containing a given file or
directory.
"""
if len(fpath) == 0:
dirname_str = os.getcwd()
else:
dirname_str = os.path.dirname(fpath)
dirname_str = os.path.abspath(dirname_str)
dirnames = dirname_str.split(os.sep)
n = len(dirnames)
for i in xrange(n):
# is there a .git directory at this level?
# FIXME hack
basedir = "/" + os.path.join(*dirnames[0:n - i])
gitdir = os.path.join(basedir, ".git")
if os.path.exists(gitdir):
return basedir
|
[
"def",
"absolute_git_root_dir",
"(",
"fpath",
"=",
"\"\"",
")",
":",
"if",
"len",
"(",
"fpath",
")",
"==",
"0",
":",
"dirname_str",
"=",
"os",
".",
"getcwd",
"(",
")",
"else",
":",
"dirname_str",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"fpath",
")",
"dirname_str",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"dirname_str",
")",
"dirnames",
"=",
"dirname_str",
".",
"split",
"(",
"os",
".",
"sep",
")",
"n",
"=",
"len",
"(",
"dirnames",
")",
"for",
"i",
"in",
"xrange",
"(",
"n",
")",
":",
"# is there a .git directory at this level?",
"# FIXME hack",
"basedir",
"=",
"\"/\"",
"+",
"os",
".",
"path",
".",
"join",
"(",
"*",
"dirnames",
"[",
"0",
":",
"n",
"-",
"i",
"]",
")",
"gitdir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"basedir",
",",
"\".git\"",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"gitdir",
")",
":",
"return",
"basedir"
] |
Absolute path to the git root directory containing a given file or
directory.
|
[
"Absolute",
"path",
"to",
"the",
"git",
"root",
"directory",
"containing",
"a",
"given",
"file",
"or",
"directory",
"."
] |
803535b939a56d375967cefecd5fdca81323041e
|
https://github.com/jonathansick/paperweight/blob/803535b939a56d375967cefecd5fdca81323041e/paperweight/gitio.py#L67-L84
|
240,365
|
DallasMorningNews/django-datafreezer
|
datafreezer/forms.py
|
DatasetUploadForm.clean
|
def clean(self):
"""Verifies that beginning date is before ending date."""
cleaned_data = super(DatasetUploadForm, self).clean()
date_begin = self.cleaned_data.get('date_begin')
date_end = self.cleaned_data.get('date_end')
if date_end < date_begin:
msg = u'End date should be after start date.'
self.add_error('date_begin', msg)
self.add_error('date_end', msg)
return cleaned_data
|
python
|
def clean(self):
"""Verifies that beginning date is before ending date."""
cleaned_data = super(DatasetUploadForm, self).clean()
date_begin = self.cleaned_data.get('date_begin')
date_end = self.cleaned_data.get('date_end')
if date_end < date_begin:
msg = u'End date should be after start date.'
self.add_error('date_begin', msg)
self.add_error('date_end', msg)
return cleaned_data
|
[
"def",
"clean",
"(",
"self",
")",
":",
"cleaned_data",
"=",
"super",
"(",
"DatasetUploadForm",
",",
"self",
")",
".",
"clean",
"(",
")",
"date_begin",
"=",
"self",
".",
"cleaned_data",
".",
"get",
"(",
"'date_begin'",
")",
"date_end",
"=",
"self",
".",
"cleaned_data",
".",
"get",
"(",
"'date_end'",
")",
"if",
"date_end",
"<",
"date_begin",
":",
"msg",
"=",
"u'End date should be after start date.'",
"self",
".",
"add_error",
"(",
"'date_begin'",
",",
"msg",
")",
"self",
".",
"add_error",
"(",
"'date_end'",
",",
"msg",
")",
"return",
"cleaned_data"
] |
Verifies that beginning date is before ending date.
|
[
"Verifies",
"that",
"beginning",
"date",
"is",
"before",
"ending",
"date",
"."
] |
982dcf2015c80a280f1a093e32977cb71d4ea7aa
|
https://github.com/DallasMorningNews/django-datafreezer/blob/982dcf2015c80a280f1a093e32977cb71d4ea7aa/datafreezer/forms.py#L131-L140
|
240,366
|
Bystroushaak/kwargs_obj
|
src/kwargs_obj/kwargs_obj.py
|
KwargsObj._kwargs_to_attributes
|
def _kwargs_to_attributes(self, kwargs):
"""
Put keys from `kwargs` to `self`, if the keys are already there.
"""
for key, val in kwargs.iteritems():
if key not in self.__dict__:
raise ValueError(
"Can't set %s parameter - it is not defined here!" % key
)
self.__dict__[key] = val
|
python
|
def _kwargs_to_attributes(self, kwargs):
"""
Put keys from `kwargs` to `self`, if the keys are already there.
"""
for key, val in kwargs.iteritems():
if key not in self.__dict__:
raise ValueError(
"Can't set %s parameter - it is not defined here!" % key
)
self.__dict__[key] = val
|
[
"def",
"_kwargs_to_attributes",
"(",
"self",
",",
"kwargs",
")",
":",
"for",
"key",
",",
"val",
"in",
"kwargs",
".",
"iteritems",
"(",
")",
":",
"if",
"key",
"not",
"in",
"self",
".",
"__dict__",
":",
"raise",
"ValueError",
"(",
"\"Can't set %s parameter - it is not defined here!\"",
"%",
"key",
")",
"self",
".",
"__dict__",
"[",
"key",
"]",
"=",
"val"
] |
Put keys from `kwargs` to `self`, if the keys are already there.
|
[
"Put",
"keys",
"from",
"kwargs",
"to",
"self",
"if",
"the",
"keys",
"are",
"already",
"there",
"."
] |
67b571706f1dcbdacf465a34cd6a42b58cbd6449
|
https://github.com/Bystroushaak/kwargs_obj/blob/67b571706f1dcbdacf465a34cd6a42b58cbd6449/src/kwargs_obj/kwargs_obj.py#L27-L37
|
240,367
|
baverman/supplement
|
supplement/remote.py
|
Environment.assist
|
def assist(self, project_path, source, position, filename):
"""Return completion match and list of completion proposals
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:returns: tuple (completion match, sorted list of proposals)
"""
return self._call('assist', project_path, source, position, filename)
|
python
|
def assist(self, project_path, source, position, filename):
"""Return completion match and list of completion proposals
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:returns: tuple (completion match, sorted list of proposals)
"""
return self._call('assist', project_path, source, position, filename)
|
[
"def",
"assist",
"(",
"self",
",",
"project_path",
",",
"source",
",",
"position",
",",
"filename",
")",
":",
"return",
"self",
".",
"_call",
"(",
"'assist'",
",",
"project_path",
",",
"source",
",",
"position",
",",
"filename",
")"
] |
Return completion match and list of completion proposals
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:returns: tuple (completion match, sorted list of proposals)
|
[
"Return",
"completion",
"match",
"and",
"list",
"of",
"completion",
"proposals"
] |
955002fe5a5749c9f0d89002f0006ec4fcd35bc9
|
https://github.com/baverman/supplement/blob/955002fe5a5749c9f0d89002f0006ec4fcd35bc9/supplement/remote.py#L114-L123
|
240,368
|
baverman/supplement
|
supplement/remote.py
|
Environment.get_location
|
def get_location(self, project_path, source, position, filename):
"""Return line number and file path where name under cursor is defined
If line is None location wasn't finded. If file path is None, defenition is located in
the same source.
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:returns: tuple (lineno, file path)
"""
return self._call('get_location', project_path, source, position, filename)
|
python
|
def get_location(self, project_path, source, position, filename):
"""Return line number and file path where name under cursor is defined
If line is None location wasn't finded. If file path is None, defenition is located in
the same source.
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:returns: tuple (lineno, file path)
"""
return self._call('get_location', project_path, source, position, filename)
|
[
"def",
"get_location",
"(",
"self",
",",
"project_path",
",",
"source",
",",
"position",
",",
"filename",
")",
":",
"return",
"self",
".",
"_call",
"(",
"'get_location'",
",",
"project_path",
",",
"source",
",",
"position",
",",
"filename",
")"
] |
Return line number and file path where name under cursor is defined
If line is None location wasn't finded. If file path is None, defenition is located in
the same source.
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:returns: tuple (lineno, file path)
|
[
"Return",
"line",
"number",
"and",
"file",
"path",
"where",
"name",
"under",
"cursor",
"is",
"defined"
] |
955002fe5a5749c9f0d89002f0006ec4fcd35bc9
|
https://github.com/baverman/supplement/blob/955002fe5a5749c9f0d89002f0006ec4fcd35bc9/supplement/remote.py#L125-L137
|
240,369
|
baverman/supplement
|
supplement/remote.py
|
Environment.get_docstring
|
def get_docstring(self, project_path, source, position, filename):
"""Return signature and docstring for current cursor call context
Some examples of call context::
func(|
func(arg|
func(arg,|
func(arg, func2(| # call context is func2
Signature and docstring can be None
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:returns: tuple (signarure, docstring)
"""
return self._call('get_docstring', project_path, source, position, filename)
|
python
|
def get_docstring(self, project_path, source, position, filename):
"""Return signature and docstring for current cursor call context
Some examples of call context::
func(|
func(arg|
func(arg,|
func(arg, func2(| # call context is func2
Signature and docstring can be None
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:returns: tuple (signarure, docstring)
"""
return self._call('get_docstring', project_path, source, position, filename)
|
[
"def",
"get_docstring",
"(",
"self",
",",
"project_path",
",",
"source",
",",
"position",
",",
"filename",
")",
":",
"return",
"self",
".",
"_call",
"(",
"'get_docstring'",
",",
"project_path",
",",
"source",
",",
"position",
",",
"filename",
")"
] |
Return signature and docstring for current cursor call context
Some examples of call context::
func(|
func(arg|
func(arg,|
func(arg, func2(| # call context is func2
Signature and docstring can be None
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:returns: tuple (signarure, docstring)
|
[
"Return",
"signature",
"and",
"docstring",
"for",
"current",
"cursor",
"call",
"context"
] |
955002fe5a5749c9f0d89002f0006ec4fcd35bc9
|
https://github.com/baverman/supplement/blob/955002fe5a5749c9f0d89002f0006ec4fcd35bc9/supplement/remote.py#L139-L158
|
240,370
|
baverman/supplement
|
supplement/remote.py
|
Environment.get_scope
|
def get_scope(self, project_path, source, lineno, filename, continous=True):
"""
Return scope name at cursor position
For example::
class Foo:
def foo(self):
pass
|
def bar(self):
pass
get_scope return Foo.foo if continuous is True and Foo otherwise.
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:param continous: allow parent scope beetween children if False
"""
return self._call('get_scope', project_path, source, lineno, filename, continous=continous)
|
python
|
def get_scope(self, project_path, source, lineno, filename, continous=True):
"""
Return scope name at cursor position
For example::
class Foo:
def foo(self):
pass
|
def bar(self):
pass
get_scope return Foo.foo if continuous is True and Foo otherwise.
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:param continous: allow parent scope beetween children if False
"""
return self._call('get_scope', project_path, source, lineno, filename, continous=continous)
|
[
"def",
"get_scope",
"(",
"self",
",",
"project_path",
",",
"source",
",",
"lineno",
",",
"filename",
",",
"continous",
"=",
"True",
")",
":",
"return",
"self",
".",
"_call",
"(",
"'get_scope'",
",",
"project_path",
",",
"source",
",",
"lineno",
",",
"filename",
",",
"continous",
"=",
"continous",
")"
] |
Return scope name at cursor position
For example::
class Foo:
def foo(self):
pass
|
def bar(self):
pass
get_scope return Foo.foo if continuous is True and Foo otherwise.
:param project_path: absolute project path
:param source: unicode or byte string code source
:param position: character or byte cursor position
:param filename: absolute path of file with source code
:param continous: allow parent scope beetween children if False
|
[
"Return",
"scope",
"name",
"at",
"cursor",
"position"
] |
955002fe5a5749c9f0d89002f0006ec4fcd35bc9
|
https://github.com/baverman/supplement/blob/955002fe5a5749c9f0d89002f0006ec4fcd35bc9/supplement/remote.py#L168-L189
|
240,371
|
andreycizov/python-xrpc
|
xrpc/runtime.py
|
_masquerade
|
def _masquerade(origin: str, orig: ServiceDefn, new: ServiceDefn, **map: str) -> str:
"""build an origin URL such that the orig has all of the mappings to new defined by map"""
origin: ParseResult = urlparse(origin)
prev_maps = {}
if origin.query:
prev_maps = {k: v for k, v in parse_qsl(origin.query)}
r_args = {}
for new_k, orig_k in map.items():
assert new_k in new.rpcs, [new_k, new.rpcs]
assert orig_k in orig.rpcs, [orig_k, orig.rpcs]
# todo: check if the definitions are the same
new_v = new.rpcs[new_k]
orig_v = orig.rpcs[orig_k]
if orig_k in prev_maps:
orig_k = prev_maps[orig_k]
assert new_v.res == orig_v.res, [new_v.res, orig_v.res]
assert new_v.req == orig_v.req, [new_v.req, orig_v.req]
r_args[new_k] = orig_k
return urlunparse(origin._replace(query=urlencode(r_args)))
|
python
|
def _masquerade(origin: str, orig: ServiceDefn, new: ServiceDefn, **map: str) -> str:
"""build an origin URL such that the orig has all of the mappings to new defined by map"""
origin: ParseResult = urlparse(origin)
prev_maps = {}
if origin.query:
prev_maps = {k: v for k, v in parse_qsl(origin.query)}
r_args = {}
for new_k, orig_k in map.items():
assert new_k in new.rpcs, [new_k, new.rpcs]
assert orig_k in orig.rpcs, [orig_k, orig.rpcs]
# todo: check if the definitions are the same
new_v = new.rpcs[new_k]
orig_v = orig.rpcs[orig_k]
if orig_k in prev_maps:
orig_k = prev_maps[orig_k]
assert new_v.res == orig_v.res, [new_v.res, orig_v.res]
assert new_v.req == orig_v.req, [new_v.req, orig_v.req]
r_args[new_k] = orig_k
return urlunparse(origin._replace(query=urlencode(r_args)))
|
[
"def",
"_masquerade",
"(",
"origin",
":",
"str",
",",
"orig",
":",
"ServiceDefn",
",",
"new",
":",
"ServiceDefn",
",",
"*",
"*",
"map",
":",
"str",
")",
"->",
"str",
":",
"origin",
":",
"ParseResult",
"=",
"urlparse",
"(",
"origin",
")",
"prev_maps",
"=",
"{",
"}",
"if",
"origin",
".",
"query",
":",
"prev_maps",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"parse_qsl",
"(",
"origin",
".",
"query",
")",
"}",
"r_args",
"=",
"{",
"}",
"for",
"new_k",
",",
"orig_k",
"in",
"map",
".",
"items",
"(",
")",
":",
"assert",
"new_k",
"in",
"new",
".",
"rpcs",
",",
"[",
"new_k",
",",
"new",
".",
"rpcs",
"]",
"assert",
"orig_k",
"in",
"orig",
".",
"rpcs",
",",
"[",
"orig_k",
",",
"orig",
".",
"rpcs",
"]",
"# todo: check if the definitions are the same",
"new_v",
"=",
"new",
".",
"rpcs",
"[",
"new_k",
"]",
"orig_v",
"=",
"orig",
".",
"rpcs",
"[",
"orig_k",
"]",
"if",
"orig_k",
"in",
"prev_maps",
":",
"orig_k",
"=",
"prev_maps",
"[",
"orig_k",
"]",
"assert",
"new_v",
".",
"res",
"==",
"orig_v",
".",
"res",
",",
"[",
"new_v",
".",
"res",
",",
"orig_v",
".",
"res",
"]",
"assert",
"new_v",
".",
"req",
"==",
"orig_v",
".",
"req",
",",
"[",
"new_v",
".",
"req",
",",
"orig_v",
".",
"req",
"]",
"r_args",
"[",
"new_k",
"]",
"=",
"orig_k",
"return",
"urlunparse",
"(",
"origin",
".",
"_replace",
"(",
"query",
"=",
"urlencode",
"(",
"r_args",
")",
")",
")"
] |
build an origin URL such that the orig has all of the mappings to new defined by map
|
[
"build",
"an",
"origin",
"URL",
"such",
"that",
"the",
"orig",
"has",
"all",
"of",
"the",
"mappings",
"to",
"new",
"defined",
"by",
"map"
] |
4f916383cda7de3272962f3ba07a64f7ec451098
|
https://github.com/andreycizov/python-xrpc/blob/4f916383cda7de3272962f3ba07a64f7ec451098/xrpc/runtime.py#L134-L164
|
240,372
|
andreycizov/python-xrpc
|
xrpc/runtime.py
|
masquerade
|
def masquerade(origin: str, orig: Type[TA], new: Type[TB], **map: str) -> str:
"""Make ``orig`` appear as new"""
return _masquerade(origin, cache_get(orig), cache_get(new), **map)
|
python
|
def masquerade(origin: str, orig: Type[TA], new: Type[TB], **map: str) -> str:
"""Make ``orig`` appear as new"""
return _masquerade(origin, cache_get(orig), cache_get(new), **map)
|
[
"def",
"masquerade",
"(",
"origin",
":",
"str",
",",
"orig",
":",
"Type",
"[",
"TA",
"]",
",",
"new",
":",
"Type",
"[",
"TB",
"]",
",",
"*",
"*",
"map",
":",
"str",
")",
"->",
"str",
":",
"return",
"_masquerade",
"(",
"origin",
",",
"cache_get",
"(",
"orig",
")",
",",
"cache_get",
"(",
"new",
")",
",",
"*",
"*",
"map",
")"
] |
Make ``orig`` appear as new
|
[
"Make",
"orig",
"appear",
"as",
"new"
] |
4f916383cda7de3272962f3ba07a64f7ec451098
|
https://github.com/andreycizov/python-xrpc/blob/4f916383cda7de3272962f3ba07a64f7ec451098/xrpc/runtime.py#L167-L170
|
240,373
|
SOBotics/pyRedunda
|
pyRedunda/Redunda.py
|
Redunda.sendStatusPing
|
def sendStatusPing(self):
"""
Sends a status ping to Redunda with the instance key specified while constructing the object.
"""
data = parse.urlencode({"key": self.key, "version": self.version}).encode()
req = request.Request("https://redunda.sobotics.org/status.json", data)
response = request.urlopen(req)
jsonReturned = json.loads(response.read().decode("utf-8"))
self.location = jsonReturned["location"]
self.shouldStandby = jsonReturned["should_standby"]
self.eventCount = jsonReturned["event_count"]
|
python
|
def sendStatusPing(self):
"""
Sends a status ping to Redunda with the instance key specified while constructing the object.
"""
data = parse.urlencode({"key": self.key, "version": self.version}).encode()
req = request.Request("https://redunda.sobotics.org/status.json", data)
response = request.urlopen(req)
jsonReturned = json.loads(response.read().decode("utf-8"))
self.location = jsonReturned["location"]
self.shouldStandby = jsonReturned["should_standby"]
self.eventCount = jsonReturned["event_count"]
|
[
"def",
"sendStatusPing",
"(",
"self",
")",
":",
"data",
"=",
"parse",
".",
"urlencode",
"(",
"{",
"\"key\"",
":",
"self",
".",
"key",
",",
"\"version\"",
":",
"self",
".",
"version",
"}",
")",
".",
"encode",
"(",
")",
"req",
"=",
"request",
".",
"Request",
"(",
"\"https://redunda.sobotics.org/status.json\"",
",",
"data",
")",
"response",
"=",
"request",
".",
"urlopen",
"(",
"req",
")",
"jsonReturned",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"read",
"(",
")",
".",
"decode",
"(",
"\"utf-8\"",
")",
")",
"self",
".",
"location",
"=",
"jsonReturned",
"[",
"\"location\"",
"]",
"self",
".",
"shouldStandby",
"=",
"jsonReturned",
"[",
"\"should_standby\"",
"]",
"self",
".",
"eventCount",
"=",
"jsonReturned",
"[",
"\"event_count\"",
"]"
] |
Sends a status ping to Redunda with the instance key specified while constructing the object.
|
[
"Sends",
"a",
"status",
"ping",
"to",
"Redunda",
"with",
"the",
"instance",
"key",
"specified",
"while",
"constructing",
"the",
"object",
"."
] |
4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab
|
https://github.com/SOBotics/pyRedunda/blob/4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab/pyRedunda/Redunda.py#L36-L49
|
240,374
|
SOBotics/pyRedunda
|
pyRedunda/Redunda.py
|
Redunda.uploadFile
|
def uploadFile(self, filename, ispickle=False, athome=False):
"""
Uploads a single file to Redunda.
:param str filename: The name of the file to upload
:param bool ispickle: Optional variable to be set to True is the file is a pickle; default is False.
:returns: returns nothing
"""
print("Uploading file {} to Redunda.".format(filename))
_, tail = os.path.split(filename)
url = "https://redunda.sobotics.org/bots/data/{}?key={}".format(tail, self.key)
#Set the content type to 'application/octet-stream'
header = {"Content-type": "application/octet-stream"}
filedata = ""
if athome:
filename = str(os.path.expanduser("~")) + filename
#Read the data from a file to a string.
if filename.endswith(".pickle") or ispickle:
try:
with open(filename, "rb") as fileToRead:
data = pickle.load(fileToRead)
except pickle.PickleError as perr:
print("Pickling error occurred: {}".format(perr))
return
filedata = json.dumps(data)
else:
try:
with open(filename, "r") as fileToRead:
filedata = fileToRead.read()
except IOError as ioerr:
print("IOError occurred: {}".format(ioerr))
return
requestToMake = request.Request(url, data=filedata.encode("utf-8"), headers=header)
#Make the request.
response = request.urlopen(requestToMake)
if response.code >= 400:
print("Error occurred while uploading file '{}' with error code {}.".format(filename,response.code))
|
python
|
def uploadFile(self, filename, ispickle=False, athome=False):
"""
Uploads a single file to Redunda.
:param str filename: The name of the file to upload
:param bool ispickle: Optional variable to be set to True is the file is a pickle; default is False.
:returns: returns nothing
"""
print("Uploading file {} to Redunda.".format(filename))
_, tail = os.path.split(filename)
url = "https://redunda.sobotics.org/bots/data/{}?key={}".format(tail, self.key)
#Set the content type to 'application/octet-stream'
header = {"Content-type": "application/octet-stream"}
filedata = ""
if athome:
filename = str(os.path.expanduser("~")) + filename
#Read the data from a file to a string.
if filename.endswith(".pickle") or ispickle:
try:
with open(filename, "rb") as fileToRead:
data = pickle.load(fileToRead)
except pickle.PickleError as perr:
print("Pickling error occurred: {}".format(perr))
return
filedata = json.dumps(data)
else:
try:
with open(filename, "r") as fileToRead:
filedata = fileToRead.read()
except IOError as ioerr:
print("IOError occurred: {}".format(ioerr))
return
requestToMake = request.Request(url, data=filedata.encode("utf-8"), headers=header)
#Make the request.
response = request.urlopen(requestToMake)
if response.code >= 400:
print("Error occurred while uploading file '{}' with error code {}.".format(filename,response.code))
|
[
"def",
"uploadFile",
"(",
"self",
",",
"filename",
",",
"ispickle",
"=",
"False",
",",
"athome",
"=",
"False",
")",
":",
"print",
"(",
"\"Uploading file {} to Redunda.\"",
".",
"format",
"(",
"filename",
")",
")",
"_",
",",
"tail",
"=",
"os",
".",
"path",
".",
"split",
"(",
"filename",
")",
"url",
"=",
"\"https://redunda.sobotics.org/bots/data/{}?key={}\"",
".",
"format",
"(",
"tail",
",",
"self",
".",
"key",
")",
"#Set the content type to 'application/octet-stream'",
"header",
"=",
"{",
"\"Content-type\"",
":",
"\"application/octet-stream\"",
"}",
"filedata",
"=",
"\"\"",
"if",
"athome",
":",
"filename",
"=",
"str",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~\"",
")",
")",
"+",
"filename",
"#Read the data from a file to a string.",
"if",
"filename",
".",
"endswith",
"(",
"\".pickle\"",
")",
"or",
"ispickle",
":",
"try",
":",
"with",
"open",
"(",
"filename",
",",
"\"rb\"",
")",
"as",
"fileToRead",
":",
"data",
"=",
"pickle",
".",
"load",
"(",
"fileToRead",
")",
"except",
"pickle",
".",
"PickleError",
"as",
"perr",
":",
"print",
"(",
"\"Pickling error occurred: {}\"",
".",
"format",
"(",
"perr",
")",
")",
"return",
"filedata",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
"else",
":",
"try",
":",
"with",
"open",
"(",
"filename",
",",
"\"r\"",
")",
"as",
"fileToRead",
":",
"filedata",
"=",
"fileToRead",
".",
"read",
"(",
")",
"except",
"IOError",
"as",
"ioerr",
":",
"print",
"(",
"\"IOError occurred: {}\"",
".",
"format",
"(",
"ioerr",
")",
")",
"return",
"requestToMake",
"=",
"request",
".",
"Request",
"(",
"url",
",",
"data",
"=",
"filedata",
".",
"encode",
"(",
"\"utf-8\"",
")",
",",
"headers",
"=",
"header",
")",
"#Make the request.",
"response",
"=",
"request",
".",
"urlopen",
"(",
"requestToMake",
")",
"if",
"response",
".",
"code",
">=",
"400",
":",
"print",
"(",
"\"Error occurred while uploading file '{}' with error code {}.\"",
".",
"format",
"(",
"filename",
",",
"response",
".",
"code",
")",
")"
] |
Uploads a single file to Redunda.
:param str filename: The name of the file to upload
:param bool ispickle: Optional variable to be set to True is the file is a pickle; default is False.
:returns: returns nothing
|
[
"Uploads",
"a",
"single",
"file",
"to",
"Redunda",
"."
] |
4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab
|
https://github.com/SOBotics/pyRedunda/blob/4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab/pyRedunda/Redunda.py#L51-L96
|
240,375
|
SOBotics/pyRedunda
|
pyRedunda/Redunda.py
|
Redunda.downloadFile
|
def downloadFile(self, filename, ispickle=False, athome=False):
"""
Downloads a single file from Redunda.
:param str filename: The name of the file you want to download
:param bool ispickle: Optional variable which tells if the file to be downloaded is a pickle; default is False.
:returns: returns nothing
"""
print("Downloading file {} from Redunda.".format(filename))
_, tail = os.path.split(filename)
url = "https://redunda.sobotics.org/bots/data/{}?key={}".format(tail, self.key)
requestToMake = request.Request(url)
#Make the request.
response = request.urlopen(requestToMake)
if response.code != 200:
print("Error occured while downloading file '{}' with error code {}.".format(filename,response.code))
if athome:
filename = str(os.path.expanduser("~")) + filename
filedata = response.read().decode("utf-8")
try:
if filename.endswith (".pickle") or ispickle:
data = json.loads(filedata)
try:
with open(filename, "wb") as fileToWrite:
pickle.dump (data, fileToWrite)
except pickle.PickleError as perr:
print("Pickling error occurred: {}".format(perr))
return
else:
with open (filename, "w") as fileToWrite:
fileToWrite.write(filedata)
except IOError as ioerr:
print("IOError occurred: {}".format(ioerr))
return
|
python
|
def downloadFile(self, filename, ispickle=False, athome=False):
"""
Downloads a single file from Redunda.
:param str filename: The name of the file you want to download
:param bool ispickle: Optional variable which tells if the file to be downloaded is a pickle; default is False.
:returns: returns nothing
"""
print("Downloading file {} from Redunda.".format(filename))
_, tail = os.path.split(filename)
url = "https://redunda.sobotics.org/bots/data/{}?key={}".format(tail, self.key)
requestToMake = request.Request(url)
#Make the request.
response = request.urlopen(requestToMake)
if response.code != 200:
print("Error occured while downloading file '{}' with error code {}.".format(filename,response.code))
if athome:
filename = str(os.path.expanduser("~")) + filename
filedata = response.read().decode("utf-8")
try:
if filename.endswith (".pickle") or ispickle:
data = json.loads(filedata)
try:
with open(filename, "wb") as fileToWrite:
pickle.dump (data, fileToWrite)
except pickle.PickleError as perr:
print("Pickling error occurred: {}".format(perr))
return
else:
with open (filename, "w") as fileToWrite:
fileToWrite.write(filedata)
except IOError as ioerr:
print("IOError occurred: {}".format(ioerr))
return
|
[
"def",
"downloadFile",
"(",
"self",
",",
"filename",
",",
"ispickle",
"=",
"False",
",",
"athome",
"=",
"False",
")",
":",
"print",
"(",
"\"Downloading file {} from Redunda.\"",
".",
"format",
"(",
"filename",
")",
")",
"_",
",",
"tail",
"=",
"os",
".",
"path",
".",
"split",
"(",
"filename",
")",
"url",
"=",
"\"https://redunda.sobotics.org/bots/data/{}?key={}\"",
".",
"format",
"(",
"tail",
",",
"self",
".",
"key",
")",
"requestToMake",
"=",
"request",
".",
"Request",
"(",
"url",
")",
"#Make the request.",
"response",
"=",
"request",
".",
"urlopen",
"(",
"requestToMake",
")",
"if",
"response",
".",
"code",
"!=",
"200",
":",
"print",
"(",
"\"Error occured while downloading file '{}' with error code {}.\"",
".",
"format",
"(",
"filename",
",",
"response",
".",
"code",
")",
")",
"if",
"athome",
":",
"filename",
"=",
"str",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~\"",
")",
")",
"+",
"filename",
"filedata",
"=",
"response",
".",
"read",
"(",
")",
".",
"decode",
"(",
"\"utf-8\"",
")",
"try",
":",
"if",
"filename",
".",
"endswith",
"(",
"\".pickle\"",
")",
"or",
"ispickle",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"filedata",
")",
"try",
":",
"with",
"open",
"(",
"filename",
",",
"\"wb\"",
")",
"as",
"fileToWrite",
":",
"pickle",
".",
"dump",
"(",
"data",
",",
"fileToWrite",
")",
"except",
"pickle",
".",
"PickleError",
"as",
"perr",
":",
"print",
"(",
"\"Pickling error occurred: {}\"",
".",
"format",
"(",
"perr",
")",
")",
"return",
"else",
":",
"with",
"open",
"(",
"filename",
",",
"\"w\"",
")",
"as",
"fileToWrite",
":",
"fileToWrite",
".",
"write",
"(",
"filedata",
")",
"except",
"IOError",
"as",
"ioerr",
":",
"print",
"(",
"\"IOError occurred: {}\"",
".",
"format",
"(",
"ioerr",
")",
")",
"return"
] |
Downloads a single file from Redunda.
:param str filename: The name of the file you want to download
:param bool ispickle: Optional variable which tells if the file to be downloaded is a pickle; default is False.
:returns: returns nothing
|
[
"Downloads",
"a",
"single",
"file",
"from",
"Redunda",
"."
] |
4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab
|
https://github.com/SOBotics/pyRedunda/blob/4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab/pyRedunda/Redunda.py#L98-L139
|
240,376
|
SOBotics/pyRedunda
|
pyRedunda/Redunda.py
|
Redunda.uploadFiles
|
def uploadFiles(self):
"""
Uploads all the files in 'filesToSync'
"""
for each_file in self.filesToSync:
self.uploadFile(each_file["name"], each_file["ispickle"], each_file["at_home"])
|
python
|
def uploadFiles(self):
"""
Uploads all the files in 'filesToSync'
"""
for each_file in self.filesToSync:
self.uploadFile(each_file["name"], each_file["ispickle"], each_file["at_home"])
|
[
"def",
"uploadFiles",
"(",
"self",
")",
":",
"for",
"each_file",
"in",
"self",
".",
"filesToSync",
":",
"self",
".",
"uploadFile",
"(",
"each_file",
"[",
"\"name\"",
"]",
",",
"each_file",
"[",
"\"ispickle\"",
"]",
",",
"each_file",
"[",
"\"at_home\"",
"]",
")"
] |
Uploads all the files in 'filesToSync'
|
[
"Uploads",
"all",
"the",
"files",
"in",
"filesToSync"
] |
4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab
|
https://github.com/SOBotics/pyRedunda/blob/4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab/pyRedunda/Redunda.py#L141-L146
|
240,377
|
SOBotics/pyRedunda
|
pyRedunda/Redunda.py
|
Redunda.downloadFiles
|
def downloadFiles(self):
"""
Downloads all the files in 'filesToSync'
"""
for each_file in self.filesToSync:
self.downloadFile(each_file["name"], each_file["ispickle"], each_file["at_home"])
|
python
|
def downloadFiles(self):
"""
Downloads all the files in 'filesToSync'
"""
for each_file in self.filesToSync:
self.downloadFile(each_file["name"], each_file["ispickle"], each_file["at_home"])
|
[
"def",
"downloadFiles",
"(",
"self",
")",
":",
"for",
"each_file",
"in",
"self",
".",
"filesToSync",
":",
"self",
".",
"downloadFile",
"(",
"each_file",
"[",
"\"name\"",
"]",
",",
"each_file",
"[",
"\"ispickle\"",
"]",
",",
"each_file",
"[",
"\"at_home\"",
"]",
")"
] |
Downloads all the files in 'filesToSync'
|
[
"Downloads",
"all",
"the",
"files",
"in",
"filesToSync"
] |
4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab
|
https://github.com/SOBotics/pyRedunda/blob/4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab/pyRedunda/Redunda.py#L148-L153
|
240,378
|
SOBotics/pyRedunda
|
pyRedunda/Redunda.py
|
Redunda.getEvents
|
def getEvents(self):
"""
Gets all events from Redunda and returns them.
:returns: Returns a dictionary of the events which were fetched.
"""
url = "https://redunda.sobotics.org/events.json"
data = parse.urlencode({"key": self.key}).encode()
req = request.Request(url, data)
response = request.urlopen(req)
return json.loads(response.read().decode("utf-8"))
|
python
|
def getEvents(self):
"""
Gets all events from Redunda and returns them.
:returns: Returns a dictionary of the events which were fetched.
"""
url = "https://redunda.sobotics.org/events.json"
data = parse.urlencode({"key": self.key}).encode()
req = request.Request(url, data)
response = request.urlopen(req)
return json.loads(response.read().decode("utf-8"))
|
[
"def",
"getEvents",
"(",
"self",
")",
":",
"url",
"=",
"\"https://redunda.sobotics.org/events.json\"",
"data",
"=",
"parse",
".",
"urlencode",
"(",
"{",
"\"key\"",
":",
"self",
".",
"key",
"}",
")",
".",
"encode",
"(",
")",
"req",
"=",
"request",
".",
"Request",
"(",
"url",
",",
"data",
")",
"response",
"=",
"request",
".",
"urlopen",
"(",
"req",
")",
"return",
"json",
".",
"loads",
"(",
"response",
".",
"read",
"(",
")",
".",
"decode",
"(",
"\"utf-8\"",
")",
")"
] |
Gets all events from Redunda and returns them.
:returns: Returns a dictionary of the events which were fetched.
|
[
"Gets",
"all",
"events",
"from",
"Redunda",
"and",
"returns",
"them",
"."
] |
4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab
|
https://github.com/SOBotics/pyRedunda/blob/4bd190dc908861c5fac4c9b60cf79eeb0e5c76ab/pyRedunda/Redunda.py#L155-L169
|
240,379
|
asphalt-framework/asphalt-memcached
|
asphalt/memcached/component.py
|
MemcachedComponent.configure_client
|
def configure_client(cls, host: str = 'localhost', port: int = 11211, **client_args):
"""
Configure a Memcached client.
:param host: host name or ip address to connect to
:param port: port number to connect to
:param client_args: extra keyword arguments passed to :class:`aiomcache.Client`
"""
assert check_argument_types()
client = Client(host, port, **client_args)
return client
|
python
|
def configure_client(cls, host: str = 'localhost', port: int = 11211, **client_args):
"""
Configure a Memcached client.
:param host: host name or ip address to connect to
:param port: port number to connect to
:param client_args: extra keyword arguments passed to :class:`aiomcache.Client`
"""
assert check_argument_types()
client = Client(host, port, **client_args)
return client
|
[
"def",
"configure_client",
"(",
"cls",
",",
"host",
":",
"str",
"=",
"'localhost'",
",",
"port",
":",
"int",
"=",
"11211",
",",
"*",
"*",
"client_args",
")",
":",
"assert",
"check_argument_types",
"(",
")",
"client",
"=",
"Client",
"(",
"host",
",",
"port",
",",
"*",
"*",
"client_args",
")",
"return",
"client"
] |
Configure a Memcached client.
:param host: host name or ip address to connect to
:param port: port number to connect to
:param client_args: extra keyword arguments passed to :class:`aiomcache.Client`
|
[
"Configure",
"a",
"Memcached",
"client",
"."
] |
48739fbc1f492c2ccfbe9ed0d8c8d8eda740bc0b
|
https://github.com/asphalt-framework/asphalt-memcached/blob/48739fbc1f492c2ccfbe9ed0d8c8d8eda740bc0b/asphalt/memcached/component.py#L44-L55
|
240,380
|
ThreshingFloor/libtf
|
libtf/logparsers/tf_auth_log.py
|
TFAuthLog._extract_features
|
def _extract_features(self):
"""
Extracts and sets the feature data from the log file necessary for a reduction
"""
for parsed_line in self.parsed_lines:
# If it's ssh, we can handle it
if parsed_line.get('program') == 'sshd':
result = self._parse_auth_message(parsed_line['message'])
# Add the ip if we have it
if 'ip' in result:
self.features['ips'].append(result['ip'])
# If we haven't seen the ip, add it
if result['ip'] not in self.ips_to_pids:
# Make the value a list of pids
self.ips_to_pids[result['ip']] = [parsed_line['processid']]
else:
# If we have seen the ip before, add the pid if it's a new one
if parsed_line['processid'] not in self.ips_to_pids[result['ip']]:
self.ips_to_pids[result['ip']].append(parsed_line['processid'])
|
python
|
def _extract_features(self):
"""
Extracts and sets the feature data from the log file necessary for a reduction
"""
for parsed_line in self.parsed_lines:
# If it's ssh, we can handle it
if parsed_line.get('program') == 'sshd':
result = self._parse_auth_message(parsed_line['message'])
# Add the ip if we have it
if 'ip' in result:
self.features['ips'].append(result['ip'])
# If we haven't seen the ip, add it
if result['ip'] not in self.ips_to_pids:
# Make the value a list of pids
self.ips_to_pids[result['ip']] = [parsed_line['processid']]
else:
# If we have seen the ip before, add the pid if it's a new one
if parsed_line['processid'] not in self.ips_to_pids[result['ip']]:
self.ips_to_pids[result['ip']].append(parsed_line['processid'])
|
[
"def",
"_extract_features",
"(",
"self",
")",
":",
"for",
"parsed_line",
"in",
"self",
".",
"parsed_lines",
":",
"# If it's ssh, we can handle it",
"if",
"parsed_line",
".",
"get",
"(",
"'program'",
")",
"==",
"'sshd'",
":",
"result",
"=",
"self",
".",
"_parse_auth_message",
"(",
"parsed_line",
"[",
"'message'",
"]",
")",
"# Add the ip if we have it",
"if",
"'ip'",
"in",
"result",
":",
"self",
".",
"features",
"[",
"'ips'",
"]",
".",
"append",
"(",
"result",
"[",
"'ip'",
"]",
")",
"# If we haven't seen the ip, add it",
"if",
"result",
"[",
"'ip'",
"]",
"not",
"in",
"self",
".",
"ips_to_pids",
":",
"# Make the value a list of pids",
"self",
".",
"ips_to_pids",
"[",
"result",
"[",
"'ip'",
"]",
"]",
"=",
"[",
"parsed_line",
"[",
"'processid'",
"]",
"]",
"else",
":",
"# If we have seen the ip before, add the pid if it's a new one",
"if",
"parsed_line",
"[",
"'processid'",
"]",
"not",
"in",
"self",
".",
"ips_to_pids",
"[",
"result",
"[",
"'ip'",
"]",
"]",
":",
"self",
".",
"ips_to_pids",
"[",
"result",
"[",
"'ip'",
"]",
"]",
".",
"append",
"(",
"parsed_line",
"[",
"'processid'",
"]",
")"
] |
Extracts and sets the feature data from the log file necessary for a reduction
|
[
"Extracts",
"and",
"sets",
"the",
"feature",
"data",
"from",
"the",
"log",
"file",
"necessary",
"for",
"a",
"reduction"
] |
f1a8710f750639c9b9e2a468ece0d2923bf8c3df
|
https://github.com/ThreshingFloor/libtf/blob/f1a8710f750639c9b9e2a468ece0d2923bf8c3df/libtf/logparsers/tf_auth_log.py#L64-L85
|
240,381
|
ThreshingFloor/libtf
|
libtf/logparsers/tf_auth_log.py
|
TFAuthLog._analyze
|
def _analyze(self):
"""
Decide which lines should be filtered out
"""
pids = []
for ip in self.filter['ips']:
if ip in self.ips_to_pids:
for pid in self.ips_to_pids[ip]:
pids.append(pid)
for line in self.parsed_lines:
if 'processid' in line and line['processid'] in pids:
self.noisy_logs.append(line)
else:
self.quiet_logs.append(line)
|
python
|
def _analyze(self):
"""
Decide which lines should be filtered out
"""
pids = []
for ip in self.filter['ips']:
if ip in self.ips_to_pids:
for pid in self.ips_to_pids[ip]:
pids.append(pid)
for line in self.parsed_lines:
if 'processid' in line and line['processid'] in pids:
self.noisy_logs.append(line)
else:
self.quiet_logs.append(line)
|
[
"def",
"_analyze",
"(",
"self",
")",
":",
"pids",
"=",
"[",
"]",
"for",
"ip",
"in",
"self",
".",
"filter",
"[",
"'ips'",
"]",
":",
"if",
"ip",
"in",
"self",
".",
"ips_to_pids",
":",
"for",
"pid",
"in",
"self",
".",
"ips_to_pids",
"[",
"ip",
"]",
":",
"pids",
".",
"append",
"(",
"pid",
")",
"for",
"line",
"in",
"self",
".",
"parsed_lines",
":",
"if",
"'processid'",
"in",
"line",
"and",
"line",
"[",
"'processid'",
"]",
"in",
"pids",
":",
"self",
".",
"noisy_logs",
".",
"append",
"(",
"line",
")",
"else",
":",
"self",
".",
"quiet_logs",
".",
"append",
"(",
"line",
")"
] |
Decide which lines should be filtered out
|
[
"Decide",
"which",
"lines",
"should",
"be",
"filtered",
"out"
] |
f1a8710f750639c9b9e2a468ece0d2923bf8c3df
|
https://github.com/ThreshingFloor/libtf/blob/f1a8710f750639c9b9e2a468ece0d2923bf8c3df/libtf/logparsers/tf_auth_log.py#L109-L124
|
240,382
|
ThreshingFloor/libtf
|
libtf/logparsers/tf_auth_log.py
|
TFAuthLog._to_epoch
|
def _to_epoch(self, ts):
"""
Adds a year to the syslog timestamp because syslog doesn't use years
:param ts: The timestamp to add a year to
:return: Date/time string that includes a year
"""
year = self.year
tmpts = "%s %s" % (ts, str(self.year))
new_time = int(calendar.timegm(time.strptime(tmpts, "%b %d %H:%M:%S %Y")))
# If adding the year puts it in the future, this log must be from last year
if new_time > int(time.time()):
year -= 1
tmpts = "%s %s" % (ts, str(year))
new_time = int(calendar.timegm(time.strptime(tmpts, "%b %d %H:%M:%S %Y")))
return new_time
|
python
|
def _to_epoch(self, ts):
"""
Adds a year to the syslog timestamp because syslog doesn't use years
:param ts: The timestamp to add a year to
:return: Date/time string that includes a year
"""
year = self.year
tmpts = "%s %s" % (ts, str(self.year))
new_time = int(calendar.timegm(time.strptime(tmpts, "%b %d %H:%M:%S %Y")))
# If adding the year puts it in the future, this log must be from last year
if new_time > int(time.time()):
year -= 1
tmpts = "%s %s" % (ts, str(year))
new_time = int(calendar.timegm(time.strptime(tmpts, "%b %d %H:%M:%S %Y")))
return new_time
|
[
"def",
"_to_epoch",
"(",
"self",
",",
"ts",
")",
":",
"year",
"=",
"self",
".",
"year",
"tmpts",
"=",
"\"%s %s\"",
"%",
"(",
"ts",
",",
"str",
"(",
"self",
".",
"year",
")",
")",
"new_time",
"=",
"int",
"(",
"calendar",
".",
"timegm",
"(",
"time",
".",
"strptime",
"(",
"tmpts",
",",
"\"%b %d %H:%M:%S %Y\"",
")",
")",
")",
"# If adding the year puts it in the future, this log must be from last year",
"if",
"new_time",
">",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
":",
"year",
"-=",
"1",
"tmpts",
"=",
"\"%s %s\"",
"%",
"(",
"ts",
",",
"str",
"(",
"year",
")",
")",
"new_time",
"=",
"int",
"(",
"calendar",
".",
"timegm",
"(",
"time",
".",
"strptime",
"(",
"tmpts",
",",
"\"%b %d %H:%M:%S %Y\"",
")",
")",
")",
"return",
"new_time"
] |
Adds a year to the syslog timestamp because syslog doesn't use years
:param ts: The timestamp to add a year to
:return: Date/time string that includes a year
|
[
"Adds",
"a",
"year",
"to",
"the",
"syslog",
"timestamp",
"because",
"syslog",
"doesn",
"t",
"use",
"years"
] |
f1a8710f750639c9b9e2a468ece0d2923bf8c3df
|
https://github.com/ThreshingFloor/libtf/blob/f1a8710f750639c9b9e2a468ece0d2923bf8c3df/libtf/logparsers/tf_auth_log.py#L126-L144
|
240,383
|
ThreshingFloor/libtf
|
libtf/logparsers/tf_auth_log.py
|
TFAuthLog._parse_auth_message
|
def _parse_auth_message(self, auth_message):
"""
Parse a message to see if we have ip addresses or users that we care about
:param auth_message: The auth message to parse
:return: Result
"""
result = {}
has_matched = False
for regex in REGEXES_INVALID_USER:
# Check for the invalid user/ip messages
m = re.search(regex, auth_message)
if m and not has_matched:
has_matched = True
# Save the username and IP
result['username'] = m.group('user')
result['ip'] = m.group('ip')
for regex in REGEXES_INVALID_IP:
# Check for the invalid ip messages
m = re.search(regex, auth_message)
if m and not has_matched:
has_matched = True
# Save the IP
result['ip'] = m.group('ip')
for regex in REGEXES_IGNORE:
# Check for messages we want to ignore
m = re.search(regex, auth_message)
if m and not has_matched:
has_matched = True
# If it's an ssh log and we don't know what it is, handle that
if not has_matched:
sys.stderr.write("Unhandled auth message: %s\n" % auth_message)
return result
|
python
|
def _parse_auth_message(self, auth_message):
"""
Parse a message to see if we have ip addresses or users that we care about
:param auth_message: The auth message to parse
:return: Result
"""
result = {}
has_matched = False
for regex in REGEXES_INVALID_USER:
# Check for the invalid user/ip messages
m = re.search(regex, auth_message)
if m and not has_matched:
has_matched = True
# Save the username and IP
result['username'] = m.group('user')
result['ip'] = m.group('ip')
for regex in REGEXES_INVALID_IP:
# Check for the invalid ip messages
m = re.search(regex, auth_message)
if m and not has_matched:
has_matched = True
# Save the IP
result['ip'] = m.group('ip')
for regex in REGEXES_IGNORE:
# Check for messages we want to ignore
m = re.search(regex, auth_message)
if m and not has_matched:
has_matched = True
# If it's an ssh log and we don't know what it is, handle that
if not has_matched:
sys.stderr.write("Unhandled auth message: %s\n" % auth_message)
return result
|
[
"def",
"_parse_auth_message",
"(",
"self",
",",
"auth_message",
")",
":",
"result",
"=",
"{",
"}",
"has_matched",
"=",
"False",
"for",
"regex",
"in",
"REGEXES_INVALID_USER",
":",
"# Check for the invalid user/ip messages",
"m",
"=",
"re",
".",
"search",
"(",
"regex",
",",
"auth_message",
")",
"if",
"m",
"and",
"not",
"has_matched",
":",
"has_matched",
"=",
"True",
"# Save the username and IP",
"result",
"[",
"'username'",
"]",
"=",
"m",
".",
"group",
"(",
"'user'",
")",
"result",
"[",
"'ip'",
"]",
"=",
"m",
".",
"group",
"(",
"'ip'",
")",
"for",
"regex",
"in",
"REGEXES_INVALID_IP",
":",
"# Check for the invalid ip messages",
"m",
"=",
"re",
".",
"search",
"(",
"regex",
",",
"auth_message",
")",
"if",
"m",
"and",
"not",
"has_matched",
":",
"has_matched",
"=",
"True",
"# Save the IP",
"result",
"[",
"'ip'",
"]",
"=",
"m",
".",
"group",
"(",
"'ip'",
")",
"for",
"regex",
"in",
"REGEXES_IGNORE",
":",
"# Check for messages we want to ignore",
"m",
"=",
"re",
".",
"search",
"(",
"regex",
",",
"auth_message",
")",
"if",
"m",
"and",
"not",
"has_matched",
":",
"has_matched",
"=",
"True",
"# If it's an ssh log and we don't know what it is, handle that",
"if",
"not",
"has_matched",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"Unhandled auth message: %s\\n\"",
"%",
"auth_message",
")",
"return",
"result"
] |
Parse a message to see if we have ip addresses or users that we care about
:param auth_message: The auth message to parse
:return: Result
|
[
"Parse",
"a",
"message",
"to",
"see",
"if",
"we",
"have",
"ip",
"addresses",
"or",
"users",
"that",
"we",
"care",
"about"
] |
f1a8710f750639c9b9e2a468ece0d2923bf8c3df
|
https://github.com/ThreshingFloor/libtf/blob/f1a8710f750639c9b9e2a468ece0d2923bf8c3df/libtf/logparsers/tf_auth_log.py#L146-L189
|
240,384
|
guyingbo/iofree
|
iofree/__init__.py
|
read_until
|
def read_until(data: bytes, *, return_tail: bool = True, from_=None) -> bytes:
"""
read until some bytes appear
"""
return (yield (Traps._read_until, data, return_tail, from_))
|
python
|
def read_until(data: bytes, *, return_tail: bool = True, from_=None) -> bytes:
"""
read until some bytes appear
"""
return (yield (Traps._read_until, data, return_tail, from_))
|
[
"def",
"read_until",
"(",
"data",
":",
"bytes",
",",
"*",
",",
"return_tail",
":",
"bool",
"=",
"True",
",",
"from_",
"=",
"None",
")",
"->",
"bytes",
":",
"return",
"(",
"yield",
"(",
"Traps",
".",
"_read_until",
",",
"data",
",",
"return_tail",
",",
"from_",
")",
")"
] |
read until some bytes appear
|
[
"read",
"until",
"some",
"bytes",
"appear"
] |
9a14250c276f88c784d164f60fb22fbc1e7a3243
|
https://github.com/guyingbo/iofree/blob/9a14250c276f88c784d164f60fb22fbc1e7a3243/iofree/__init__.py#L214-L218
|
240,385
|
guyingbo/iofree
|
iofree/__init__.py
|
read_int
|
def read_int(nbytes: int, *, byteorder: str = "big", from_=None) -> int:
"""
read some bytes as integer
"""
return (yield (Traps._read_int, nbytes, byteorder, from_))
|
python
|
def read_int(nbytes: int, *, byteorder: str = "big", from_=None) -> int:
"""
read some bytes as integer
"""
return (yield (Traps._read_int, nbytes, byteorder, from_))
|
[
"def",
"read_int",
"(",
"nbytes",
":",
"int",
",",
"*",
",",
"byteorder",
":",
"str",
"=",
"\"big\"",
",",
"from_",
"=",
"None",
")",
"->",
"int",
":",
"return",
"(",
"yield",
"(",
"Traps",
".",
"_read_int",
",",
"nbytes",
",",
"byteorder",
",",
"from_",
")",
")"
] |
read some bytes as integer
|
[
"read",
"some",
"bytes",
"as",
"integer"
] |
9a14250c276f88c784d164f60fb22fbc1e7a3243
|
https://github.com/guyingbo/iofree/blob/9a14250c276f88c784d164f60fb22fbc1e7a3243/iofree/__init__.py#L228-L232
|
240,386
|
guyingbo/iofree
|
iofree/__init__.py
|
Parser.send
|
def send(self, data: bytes = b""):
"""
send data for parsing
"""
self.input.extend(data)
self._process()
|
python
|
def send(self, data: bytes = b""):
"""
send data for parsing
"""
self.input.extend(data)
self._process()
|
[
"def",
"send",
"(",
"self",
",",
"data",
":",
"bytes",
"=",
"b\"\"",
")",
":",
"self",
".",
"input",
".",
"extend",
"(",
"data",
")",
"self",
".",
"_process",
"(",
")"
] |
send data for parsing
|
[
"send",
"data",
"for",
"parsing"
] |
9a14250c276f88c784d164f60fb22fbc1e7a3243
|
https://github.com/guyingbo/iofree/blob/9a14250c276f88c784d164f60fb22fbc1e7a3243/iofree/__init__.py#L44-L49
|
240,387
|
ryanjdillon/pylleo
|
pylleo/utils_bokeh.py
|
create_bokeh_server
|
def create_bokeh_server(io_loop, files, argvs, host, port):
'''Start bokeh server with applications paths'''
from bokeh.server.server import Server
from bokeh.command.util import build_single_handler_applications
# Turn file paths into bokeh apps
apps = build_single_handler_applications(files, argvs)
# kwargs lifted from bokeh serve call to Server, with created io_loop
kwargs = {
'io_loop':io_loop,
'generate_session_ids':True,
'redirect_root':True,
'use_x_headers':False,
'secret_key':None,
'num_procs':1,
'host': host,
'sign_sessions':False,
'develop':False,
'port':port,
'use_index':True
}
server = Server(apps,**kwargs)
return server
|
python
|
def create_bokeh_server(io_loop, files, argvs, host, port):
'''Start bokeh server with applications paths'''
from bokeh.server.server import Server
from bokeh.command.util import build_single_handler_applications
# Turn file paths into bokeh apps
apps = build_single_handler_applications(files, argvs)
# kwargs lifted from bokeh serve call to Server, with created io_loop
kwargs = {
'io_loop':io_loop,
'generate_session_ids':True,
'redirect_root':True,
'use_x_headers':False,
'secret_key':None,
'num_procs':1,
'host': host,
'sign_sessions':False,
'develop':False,
'port':port,
'use_index':True
}
server = Server(apps,**kwargs)
return server
|
[
"def",
"create_bokeh_server",
"(",
"io_loop",
",",
"files",
",",
"argvs",
",",
"host",
",",
"port",
")",
":",
"from",
"bokeh",
".",
"server",
".",
"server",
"import",
"Server",
"from",
"bokeh",
".",
"command",
".",
"util",
"import",
"build_single_handler_applications",
"# Turn file paths into bokeh apps",
"apps",
"=",
"build_single_handler_applications",
"(",
"files",
",",
"argvs",
")",
"# kwargs lifted from bokeh serve call to Server, with created io_loop",
"kwargs",
"=",
"{",
"'io_loop'",
":",
"io_loop",
",",
"'generate_session_ids'",
":",
"True",
",",
"'redirect_root'",
":",
"True",
",",
"'use_x_headers'",
":",
"False",
",",
"'secret_key'",
":",
"None",
",",
"'num_procs'",
":",
"1",
",",
"'host'",
":",
"host",
",",
"'sign_sessions'",
":",
"False",
",",
"'develop'",
":",
"False",
",",
"'port'",
":",
"port",
",",
"'use_index'",
":",
"True",
"}",
"server",
"=",
"Server",
"(",
"apps",
",",
"*",
"*",
"kwargs",
")",
"return",
"server"
] |
Start bokeh server with applications paths
|
[
"Start",
"bokeh",
"server",
"with",
"applications",
"paths"
] |
b9b999fef19eaeccce4f207ab1b6198287c1bfec
|
https://github.com/ryanjdillon/pylleo/blob/b9b999fef19eaeccce4f207ab1b6198287c1bfec/pylleo/utils_bokeh.py#L2-L26
|
240,388
|
FlorianLudwig/rueckenwind
|
rw/routing.py
|
_generate_request_handler_proxy
|
def _generate_request_handler_proxy(handler_class, handler_args, name):
"""When a tornado.web.RequestHandler gets mounted we create a launcher function"""
@scope.inject
def request_handler_wrapper(app, handler, **kwargs):
handler = handler_class(app, handler.request, **handler_args)
handler._execute([], **kwargs)
request_handler_wrapper.__name__ = name
request_handler_wrapper.handler_class = handler_class
request_handler_wrapper.handler_args = handler_args
return request_handler_wrapper
|
python
|
def _generate_request_handler_proxy(handler_class, handler_args, name):
"""When a tornado.web.RequestHandler gets mounted we create a launcher function"""
@scope.inject
def request_handler_wrapper(app, handler, **kwargs):
handler = handler_class(app, handler.request, **handler_args)
handler._execute([], **kwargs)
request_handler_wrapper.__name__ = name
request_handler_wrapper.handler_class = handler_class
request_handler_wrapper.handler_args = handler_args
return request_handler_wrapper
|
[
"def",
"_generate_request_handler_proxy",
"(",
"handler_class",
",",
"handler_args",
",",
"name",
")",
":",
"@",
"scope",
".",
"inject",
"def",
"request_handler_wrapper",
"(",
"app",
",",
"handler",
",",
"*",
"*",
"kwargs",
")",
":",
"handler",
"=",
"handler_class",
"(",
"app",
",",
"handler",
".",
"request",
",",
"*",
"*",
"handler_args",
")",
"handler",
".",
"_execute",
"(",
"[",
"]",
",",
"*",
"*",
"kwargs",
")",
"request_handler_wrapper",
".",
"__name__",
"=",
"name",
"request_handler_wrapper",
".",
"handler_class",
"=",
"handler_class",
"request_handler_wrapper",
".",
"handler_args",
"=",
"handler_args",
"return",
"request_handler_wrapper"
] |
When a tornado.web.RequestHandler gets mounted we create a launcher function
|
[
"When",
"a",
"tornado",
".",
"web",
".",
"RequestHandler",
"gets",
"mounted",
"we",
"create",
"a",
"launcher",
"function"
] |
47fec7af05ea10b3cf6d59b9f7bf4d12c02dddea
|
https://github.com/FlorianLudwig/rueckenwind/blob/47fec7af05ea10b3cf6d59b9f7bf4d12c02dddea/rw/routing.py#L178-L189
|
240,389
|
FlorianLudwig/rueckenwind
|
rw/routing.py
|
RoutingTable.setup
|
def setup(self):
"""setup routing table"""
# get all routes from submodules
for prefix, routes in self.sub_rt:
routes.prefix = self.prefix + prefix
routes.setup()
fn_name_prefixes = {}
for fn_key, fn in routes.fn_namespace.items():
self.fn_namespace[routes.name + '.' + fn_key] = fn
fn_prefix = routes.name
if '.' in fn_key:
fn_prefix += '.' + fn_key.rsplit('.', 1)[0]
fn_name_prefixes[fn] = fn_prefix
for key in self:
funcs = set(rule[1] for rule in self[key])
for route, route_module, module, fn in routes.get(key, []):
if fn not in funcs:
new_route = Route(prefix + route.path)
fn.rw_route = new_route
fn_name_prefix = fn_name_prefixes[fn]
data = (new_route, fn_name_prefix, module, fn)
self[key].append(data)
# sort all rules
for key in self:
self[key].sort(key=lambda rule: rule[0])
|
python
|
def setup(self):
"""setup routing table"""
# get all routes from submodules
for prefix, routes in self.sub_rt:
routes.prefix = self.prefix + prefix
routes.setup()
fn_name_prefixes = {}
for fn_key, fn in routes.fn_namespace.items():
self.fn_namespace[routes.name + '.' + fn_key] = fn
fn_prefix = routes.name
if '.' in fn_key:
fn_prefix += '.' + fn_key.rsplit('.', 1)[0]
fn_name_prefixes[fn] = fn_prefix
for key in self:
funcs = set(rule[1] for rule in self[key])
for route, route_module, module, fn in routes.get(key, []):
if fn not in funcs:
new_route = Route(prefix + route.path)
fn.rw_route = new_route
fn_name_prefix = fn_name_prefixes[fn]
data = (new_route, fn_name_prefix, module, fn)
self[key].append(data)
# sort all rules
for key in self:
self[key].sort(key=lambda rule: rule[0])
|
[
"def",
"setup",
"(",
"self",
")",
":",
"# get all routes from submodules",
"for",
"prefix",
",",
"routes",
"in",
"self",
".",
"sub_rt",
":",
"routes",
".",
"prefix",
"=",
"self",
".",
"prefix",
"+",
"prefix",
"routes",
".",
"setup",
"(",
")",
"fn_name_prefixes",
"=",
"{",
"}",
"for",
"fn_key",
",",
"fn",
"in",
"routes",
".",
"fn_namespace",
".",
"items",
"(",
")",
":",
"self",
".",
"fn_namespace",
"[",
"routes",
".",
"name",
"+",
"'.'",
"+",
"fn_key",
"]",
"=",
"fn",
"fn_prefix",
"=",
"routes",
".",
"name",
"if",
"'.'",
"in",
"fn_key",
":",
"fn_prefix",
"+=",
"'.'",
"+",
"fn_key",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"[",
"0",
"]",
"fn_name_prefixes",
"[",
"fn",
"]",
"=",
"fn_prefix",
"for",
"key",
"in",
"self",
":",
"funcs",
"=",
"set",
"(",
"rule",
"[",
"1",
"]",
"for",
"rule",
"in",
"self",
"[",
"key",
"]",
")",
"for",
"route",
",",
"route_module",
",",
"module",
",",
"fn",
"in",
"routes",
".",
"get",
"(",
"key",
",",
"[",
"]",
")",
":",
"if",
"fn",
"not",
"in",
"funcs",
":",
"new_route",
"=",
"Route",
"(",
"prefix",
"+",
"route",
".",
"path",
")",
"fn",
".",
"rw_route",
"=",
"new_route",
"fn_name_prefix",
"=",
"fn_name_prefixes",
"[",
"fn",
"]",
"data",
"=",
"(",
"new_route",
",",
"fn_name_prefix",
",",
"module",
",",
"fn",
")",
"self",
"[",
"key",
"]",
".",
"append",
"(",
"data",
")",
"# sort all rules",
"for",
"key",
"in",
"self",
":",
"self",
"[",
"key",
"]",
".",
"sort",
"(",
"key",
"=",
"lambda",
"rule",
":",
"rule",
"[",
"0",
"]",
")"
] |
setup routing table
|
[
"setup",
"routing",
"table"
] |
47fec7af05ea10b3cf6d59b9f7bf4d12c02dddea
|
https://github.com/FlorianLudwig/rueckenwind/blob/47fec7af05ea10b3cf6d59b9f7bf4d12c02dddea/rw/routing.py#L202-L229
|
240,390
|
TkTech/pytextql
|
setup.py
|
get_version
|
def get_version():
"""
Loads the current module version from version.py and returns
it.
:returns: module version identifier.
:rtype: str
"""
local_results = {}
version_file_path = os.path.join('pytextql', 'version.py')
# This is compatible with py3k which removed execfile.
with open(version_file_path, 'rb') as fin:
# Compiling instead of passing the text straight to exec
# associates any errors with the correct file name.
code = compile(fin.read(), version_file_path, 'exec')
exec(code, {}, local_results)
return local_results['__version__']
|
python
|
def get_version():
"""
Loads the current module version from version.py and returns
it.
:returns: module version identifier.
:rtype: str
"""
local_results = {}
version_file_path = os.path.join('pytextql', 'version.py')
# This is compatible with py3k which removed execfile.
with open(version_file_path, 'rb') as fin:
# Compiling instead of passing the text straight to exec
# associates any errors with the correct file name.
code = compile(fin.read(), version_file_path, 'exec')
exec(code, {}, local_results)
return local_results['__version__']
|
[
"def",
"get_version",
"(",
")",
":",
"local_results",
"=",
"{",
"}",
"version_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"'pytextql'",
",",
"'version.py'",
")",
"# This is compatible with py3k which removed execfile.",
"with",
"open",
"(",
"version_file_path",
",",
"'rb'",
")",
"as",
"fin",
":",
"# Compiling instead of passing the text straight to exec",
"# associates any errors with the correct file name.",
"code",
"=",
"compile",
"(",
"fin",
".",
"read",
"(",
")",
",",
"version_file_path",
",",
"'exec'",
")",
"exec",
"(",
"code",
",",
"{",
"}",
",",
"local_results",
")",
"return",
"local_results",
"[",
"'__version__'",
"]"
] |
Loads the current module version from version.py and returns
it.
:returns: module version identifier.
:rtype: str
|
[
"Loads",
"the",
"current",
"module",
"version",
"from",
"version",
".",
"py",
"and",
"returns",
"it",
"."
] |
e054a7a4df7262deaca49bdbf748c00acf011b51
|
https://github.com/TkTech/pytextql/blob/e054a7a4df7262deaca49bdbf748c00acf011b51/setup.py#L7-L25
|
240,391
|
thomasbiddle/Kippt-for-Python
|
kippt/clips.py
|
Clip.update
|
def update(self, **args):
""" Updates a Clip.
Parameters:
- args Dictionary of other fields
Accepted fields can be found here:
https://github.com/kippt/api-documentation/blob/master/objects/clip.md
"""
# JSONify our data.
data = json.dumps(args)
r = requests.put(
"https://kippt.com/api/clips/%s" % (self.id),
headers=self.kippt.header,
data=data)
return (r.json())
|
python
|
def update(self, **args):
""" Updates a Clip.
Parameters:
- args Dictionary of other fields
Accepted fields can be found here:
https://github.com/kippt/api-documentation/blob/master/objects/clip.md
"""
# JSONify our data.
data = json.dumps(args)
r = requests.put(
"https://kippt.com/api/clips/%s" % (self.id),
headers=self.kippt.header,
data=data)
return (r.json())
|
[
"def",
"update",
"(",
"self",
",",
"*",
"*",
"args",
")",
":",
"# JSONify our data.",
"data",
"=",
"json",
".",
"dumps",
"(",
"args",
")",
"r",
"=",
"requests",
".",
"put",
"(",
"\"https://kippt.com/api/clips/%s\"",
"%",
"(",
"self",
".",
"id",
")",
",",
"headers",
"=",
"self",
".",
"kippt",
".",
"header",
",",
"data",
"=",
"data",
")",
"return",
"(",
"r",
".",
"json",
"(",
")",
")"
] |
Updates a Clip.
Parameters:
- args Dictionary of other fields
Accepted fields can be found here:
https://github.com/kippt/api-documentation/blob/master/objects/clip.md
|
[
"Updates",
"a",
"Clip",
"."
] |
dddd0ff84d70ccf2d84e50e3cff7aad89f9c1267
|
https://github.com/thomasbiddle/Kippt-for-Python/blob/dddd0ff84d70ccf2d84e50e3cff7aad89f9c1267/kippt/clips.py#L150-L165
|
240,392
|
thomasbiddle/Kippt-for-Python
|
kippt/clips.py
|
Clip.like
|
def like(self):
""" Like a clip.
"""
r = requests.post(
"https://kippt.com/api/clips/%s/likes" % (self.id),
headers=self.kippt.header
)
return (r.json())
|
python
|
def like(self):
""" Like a clip.
"""
r = requests.post(
"https://kippt.com/api/clips/%s/likes" % (self.id),
headers=self.kippt.header
)
return (r.json())
|
[
"def",
"like",
"(",
"self",
")",
":",
"r",
"=",
"requests",
".",
"post",
"(",
"\"https://kippt.com/api/clips/%s/likes\"",
"%",
"(",
"self",
".",
"id",
")",
",",
"headers",
"=",
"self",
".",
"kippt",
".",
"header",
")",
"return",
"(",
"r",
".",
"json",
"(",
")",
")"
] |
Like a clip.
|
[
"Like",
"a",
"clip",
"."
] |
dddd0ff84d70ccf2d84e50e3cff7aad89f9c1267
|
https://github.com/thomasbiddle/Kippt-for-Python/blob/dddd0ff84d70ccf2d84e50e3cff7aad89f9c1267/kippt/clips.py#L167-L175
|
240,393
|
thomasbiddle/Kippt-for-Python
|
kippt/clips.py
|
Clip.comment
|
def comment(self, body):
""" Comment on a clip.
Parameters:
- body (Required)
"""
# Merge our url as a parameter and JSONify it.
data = json.dumps({'body': body})
r = requests.post(
"https://kippt.com/api/clips/%s/comments" (self.id),
headers=self.kippt.header,
data=data
)
return (r.json())
|
python
|
def comment(self, body):
""" Comment on a clip.
Parameters:
- body (Required)
"""
# Merge our url as a parameter and JSONify it.
data = json.dumps({'body': body})
r = requests.post(
"https://kippt.com/api/clips/%s/comments" (self.id),
headers=self.kippt.header,
data=data
)
return (r.json())
|
[
"def",
"comment",
"(",
"self",
",",
"body",
")",
":",
"# Merge our url as a parameter and JSONify it.",
"data",
"=",
"json",
".",
"dumps",
"(",
"{",
"'body'",
":",
"body",
"}",
")",
"r",
"=",
"requests",
".",
"post",
"(",
"\"https://kippt.com/api/clips/%s/comments\"",
"(",
"self",
".",
"id",
")",
",",
"headers",
"=",
"self",
".",
"kippt",
".",
"header",
",",
"data",
"=",
"data",
")",
"return",
"(",
"r",
".",
"json",
"(",
")",
")"
] |
Comment on a clip.
Parameters:
- body (Required)
|
[
"Comment",
"on",
"a",
"clip",
"."
] |
dddd0ff84d70ccf2d84e50e3cff7aad89f9c1267
|
https://github.com/thomasbiddle/Kippt-for-Python/blob/dddd0ff84d70ccf2d84e50e3cff7aad89f9c1267/kippt/clips.py#L187-L200
|
240,394
|
thomasbiddle/Kippt-for-Python
|
kippt/clips.py
|
Clip.unlike
|
def unlike(self):
""" Unlike a clip.
"""
r = requests.delete(
"https://kippt.com/api/clips/%s/likes" % (self.id),
headers=self.kippt.header)
return (r.json())
|
python
|
def unlike(self):
""" Unlike a clip.
"""
r = requests.delete(
"https://kippt.com/api/clips/%s/likes" % (self.id),
headers=self.kippt.header)
return (r.json())
|
[
"def",
"unlike",
"(",
"self",
")",
":",
"r",
"=",
"requests",
".",
"delete",
"(",
"\"https://kippt.com/api/clips/%s/likes\"",
"%",
"(",
"self",
".",
"id",
")",
",",
"headers",
"=",
"self",
".",
"kippt",
".",
"header",
")",
"return",
"(",
"r",
".",
"json",
"(",
")",
")"
] |
Unlike a clip.
|
[
"Unlike",
"a",
"clip",
"."
] |
dddd0ff84d70ccf2d84e50e3cff7aad89f9c1267
|
https://github.com/thomasbiddle/Kippt-for-Python/blob/dddd0ff84d70ccf2d84e50e3cff7aad89f9c1267/kippt/clips.py#L223-L230
|
240,395
|
jayclassless/basicserial
|
src/basicserial/__init__.py
|
to_json
|
def to_json(value, pretty=False):
"""
Serializes the given value to JSON.
:param value: the value to serialize
:param pretty:
whether or not to format the output in a more human-readable way; if
not specified, defaults to ``False``
:type pretty: bool
:rtype: str
"""
options = {
'sort_keys': False,
'cls': BasicJSONEncoder,
}
if pretty:
options['indent'] = 2
options['separators'] = (',', ': ')
return json.dumps(value, **options)
|
python
|
def to_json(value, pretty=False):
"""
Serializes the given value to JSON.
:param value: the value to serialize
:param pretty:
whether or not to format the output in a more human-readable way; if
not specified, defaults to ``False``
:type pretty: bool
:rtype: str
"""
options = {
'sort_keys': False,
'cls': BasicJSONEncoder,
}
if pretty:
options['indent'] = 2
options['separators'] = (',', ': ')
return json.dumps(value, **options)
|
[
"def",
"to_json",
"(",
"value",
",",
"pretty",
"=",
"False",
")",
":",
"options",
"=",
"{",
"'sort_keys'",
":",
"False",
",",
"'cls'",
":",
"BasicJSONEncoder",
",",
"}",
"if",
"pretty",
":",
"options",
"[",
"'indent'",
"]",
"=",
"2",
"options",
"[",
"'separators'",
"]",
"=",
"(",
"','",
",",
"': '",
")",
"return",
"json",
".",
"dumps",
"(",
"value",
",",
"*",
"*",
"options",
")"
] |
Serializes the given value to JSON.
:param value: the value to serialize
:param pretty:
whether or not to format the output in a more human-readable way; if
not specified, defaults to ``False``
:type pretty: bool
:rtype: str
|
[
"Serializes",
"the",
"given",
"value",
"to",
"JSON",
"."
] |
da779edd955ba1009d14fae4e5926e29ad112b9d
|
https://github.com/jayclassless/basicserial/blob/da779edd955ba1009d14fae4e5926e29ad112b9d/src/basicserial/__init__.py#L80-L100
|
240,396
|
jayclassless/basicserial
|
src/basicserial/__init__.py
|
from_json
|
def from_json(value, native_datetimes=True):
"""
Deserializes the given value from JSON.
:param value: the value to deserialize
:type value: str
:param native_datetimes:
whether or not strings that look like dates/times should be
automatically cast to the native objects, or left as strings; if not
specified, defaults to ``True``
:type native_datetimes: bool
"""
hook = BasicJsonDecoder(native_datetimes=native_datetimes)
result = json.loads(value, object_hook=hook)
if native_datetimes and isinstance(result, string_types):
return get_date_or_string(result)
return result
|
python
|
def from_json(value, native_datetimes=True):
"""
Deserializes the given value from JSON.
:param value: the value to deserialize
:type value: str
:param native_datetimes:
whether or not strings that look like dates/times should be
automatically cast to the native objects, or left as strings; if not
specified, defaults to ``True``
:type native_datetimes: bool
"""
hook = BasicJsonDecoder(native_datetimes=native_datetimes)
result = json.loads(value, object_hook=hook)
if native_datetimes and isinstance(result, string_types):
return get_date_or_string(result)
return result
|
[
"def",
"from_json",
"(",
"value",
",",
"native_datetimes",
"=",
"True",
")",
":",
"hook",
"=",
"BasicJsonDecoder",
"(",
"native_datetimes",
"=",
"native_datetimes",
")",
"result",
"=",
"json",
".",
"loads",
"(",
"value",
",",
"object_hook",
"=",
"hook",
")",
"if",
"native_datetimes",
"and",
"isinstance",
"(",
"result",
",",
"string_types",
")",
":",
"return",
"get_date_or_string",
"(",
"result",
")",
"return",
"result"
] |
Deserializes the given value from JSON.
:param value: the value to deserialize
:type value: str
:param native_datetimes:
whether or not strings that look like dates/times should be
automatically cast to the native objects, or left as strings; if not
specified, defaults to ``True``
:type native_datetimes: bool
|
[
"Deserializes",
"the",
"given",
"value",
"from",
"JSON",
"."
] |
da779edd955ba1009d14fae4e5926e29ad112b9d
|
https://github.com/jayclassless/basicserial/blob/da779edd955ba1009d14fae4e5926e29ad112b9d/src/basicserial/__init__.py#L173-L190
|
240,397
|
jayclassless/basicserial
|
src/basicserial/__init__.py
|
to_yaml
|
def to_yaml(value, pretty=False):
"""
Serializes the given value to YAML.
:param value: the value to serialize
:param pretty:
whether or not to format the output in a more human-readable way; if
not specified, defaults to ``False``
:type pretty: bool
:rtype: str
"""
if not yaml:
raise NotImplementedError('No supported YAML library available')
options = {
'Dumper': BasicYamlDumper,
'allow_unicode': True,
}
options['default_flow_style'] = not pretty
return yaml.dump(value, **options).rstrip()
|
python
|
def to_yaml(value, pretty=False):
"""
Serializes the given value to YAML.
:param value: the value to serialize
:param pretty:
whether or not to format the output in a more human-readable way; if
not specified, defaults to ``False``
:type pretty: bool
:rtype: str
"""
if not yaml:
raise NotImplementedError('No supported YAML library available')
options = {
'Dumper': BasicYamlDumper,
'allow_unicode': True,
}
options['default_flow_style'] = not pretty
return yaml.dump(value, **options).rstrip()
|
[
"def",
"to_yaml",
"(",
"value",
",",
"pretty",
"=",
"False",
")",
":",
"if",
"not",
"yaml",
":",
"raise",
"NotImplementedError",
"(",
"'No supported YAML library available'",
")",
"options",
"=",
"{",
"'Dumper'",
":",
"BasicYamlDumper",
",",
"'allow_unicode'",
":",
"True",
",",
"}",
"options",
"[",
"'default_flow_style'",
"]",
"=",
"not",
"pretty",
"return",
"yaml",
".",
"dump",
"(",
"value",
",",
"*",
"*",
"options",
")",
".",
"rstrip",
"(",
")"
] |
Serializes the given value to YAML.
:param value: the value to serialize
:param pretty:
whether or not to format the output in a more human-readable way; if
not specified, defaults to ``False``
:type pretty: bool
:rtype: str
|
[
"Serializes",
"the",
"given",
"value",
"to",
"YAML",
"."
] |
da779edd955ba1009d14fae4e5926e29ad112b9d
|
https://github.com/jayclassless/basicserial/blob/da779edd955ba1009d14fae4e5926e29ad112b9d/src/basicserial/__init__.py#L244-L265
|
240,398
|
jayclassless/basicserial
|
src/basicserial/__init__.py
|
from_yaml
|
def from_yaml(value, native_datetimes=True):
"""
Deserializes the given value from YAML.
:param value: the value to deserialize
:type value: str
:param native_datetimes:
whether or not strings that look like dates/times should be
automatically cast to the native objects, or left as strings; if not
specified, defaults to ``True``
:type native_datetimes: bool
"""
if not yaml:
raise NotImplementedError('No supported YAML library available')
if native_datetimes:
loader = NativeDatesYamlLoader
else:
loader = StringedDatesYamlLoader
return yaml.load(value, Loader=loader)
|
python
|
def from_yaml(value, native_datetimes=True):
"""
Deserializes the given value from YAML.
:param value: the value to deserialize
:type value: str
:param native_datetimes:
whether or not strings that look like dates/times should be
automatically cast to the native objects, or left as strings; if not
specified, defaults to ``True``
:type native_datetimes: bool
"""
if not yaml:
raise NotImplementedError('No supported YAML library available')
if native_datetimes:
loader = NativeDatesYamlLoader
else:
loader = StringedDatesYamlLoader
return yaml.load(value, Loader=loader)
|
[
"def",
"from_yaml",
"(",
"value",
",",
"native_datetimes",
"=",
"True",
")",
":",
"if",
"not",
"yaml",
":",
"raise",
"NotImplementedError",
"(",
"'No supported YAML library available'",
")",
"if",
"native_datetimes",
":",
"loader",
"=",
"NativeDatesYamlLoader",
"else",
":",
"loader",
"=",
"StringedDatesYamlLoader",
"return",
"yaml",
".",
"load",
"(",
"value",
",",
"Loader",
"=",
"loader",
")"
] |
Deserializes the given value from YAML.
:param value: the value to deserialize
:type value: str
:param native_datetimes:
whether or not strings that look like dates/times should be
automatically cast to the native objects, or left as strings; if not
specified, defaults to ``True``
:type native_datetimes: bool
|
[
"Deserializes",
"the",
"given",
"value",
"from",
"YAML",
"."
] |
da779edd955ba1009d14fae4e5926e29ad112b9d
|
https://github.com/jayclassless/basicserial/blob/da779edd955ba1009d14fae4e5926e29ad112b9d/src/basicserial/__init__.py#L302-L323
|
240,399
|
jayclassless/basicserial
|
src/basicserial/__init__.py
|
to_toml
|
def to_toml(value, pretty=False): # noqa: unused-argument
"""
Serializes the given value to TOML.
:param value: the value to serialize
:param pretty:
this argument is ignored, as no TOML libraries support this type of
operation
:type pretty: bool
:rtype: str
"""
if not toml:
raise NotImplementedError('No supported TOML library available')
return toml.dumps(make_toml_friendly(value)).rstrip()
|
python
|
def to_toml(value, pretty=False): # noqa: unused-argument
"""
Serializes the given value to TOML.
:param value: the value to serialize
:param pretty:
this argument is ignored, as no TOML libraries support this type of
operation
:type pretty: bool
:rtype: str
"""
if not toml:
raise NotImplementedError('No supported TOML library available')
return toml.dumps(make_toml_friendly(value)).rstrip()
|
[
"def",
"to_toml",
"(",
"value",
",",
"pretty",
"=",
"False",
")",
":",
"# noqa: unused-argument",
"if",
"not",
"toml",
":",
"raise",
"NotImplementedError",
"(",
"'No supported TOML library available'",
")",
"return",
"toml",
".",
"dumps",
"(",
"make_toml_friendly",
"(",
"value",
")",
")",
".",
"rstrip",
"(",
")"
] |
Serializes the given value to TOML.
:param value: the value to serialize
:param pretty:
this argument is ignored, as no TOML libraries support this type of
operation
:type pretty: bool
:rtype: str
|
[
"Serializes",
"the",
"given",
"value",
"to",
"TOML",
"."
] |
da779edd955ba1009d14fae4e5926e29ad112b9d
|
https://github.com/jayclassless/basicserial/blob/da779edd955ba1009d14fae4e5926e29ad112b9d/src/basicserial/__init__.py#L354-L369
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.