id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
234,700
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.pin_add
|
def pin_add(self, path, *paths, **kwargs):
"""Pins objects to local storage.
Stores an IPFS object(s) from a given path locally to disk.
.. code-block:: python
>>> c.pin_add("QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d")
{'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']}
Parameters
----------
path : str
Path to object(s) to be pinned
recursive : bool
Recursively unpin the object linked to by the specified object(s)
Returns
-------
dict : List of IPFS objects that have been pinned
"""
#PY2: No support for kw-only parameters after glob parameters
if "recursive" in kwargs:
kwargs.setdefault("opts", {"recursive": kwargs.pop("recursive")})
args = (path,) + paths
return self._client.request('/pin/add', args, decoder='json', **kwargs)
|
python
|
def pin_add(self, path, *paths, **kwargs):
"""Pins objects to local storage.
Stores an IPFS object(s) from a given path locally to disk.
.. code-block:: python
>>> c.pin_add("QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d")
{'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']}
Parameters
----------
path : str
Path to object(s) to be pinned
recursive : bool
Recursively unpin the object linked to by the specified object(s)
Returns
-------
dict : List of IPFS objects that have been pinned
"""
#PY2: No support for kw-only parameters after glob parameters
if "recursive" in kwargs:
kwargs.setdefault("opts", {"recursive": kwargs.pop("recursive")})
args = (path,) + paths
return self._client.request('/pin/add', args, decoder='json', **kwargs)
|
[
"def",
"pin_add",
"(",
"self",
",",
"path",
",",
"*",
"paths",
",",
"*",
"*",
"kwargs",
")",
":",
"#PY2: No support for kw-only parameters after glob parameters",
"if",
"\"recursive\"",
"in",
"kwargs",
":",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"{",
"\"recursive\"",
":",
"kwargs",
".",
"pop",
"(",
"\"recursive\"",
")",
"}",
")",
"args",
"=",
"(",
"path",
",",
")",
"+",
"paths",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/pin/add'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Pins objects to local storage.
Stores an IPFS object(s) from a given path locally to disk.
.. code-block:: python
>>> c.pin_add("QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d")
{'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']}
Parameters
----------
path : str
Path to object(s) to be pinned
recursive : bool
Recursively unpin the object linked to by the specified object(s)
Returns
-------
dict : List of IPFS objects that have been pinned
|
[
"Pins",
"objects",
"to",
"local",
"storage",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1027-L1053
|
234,701
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.pin_rm
|
def pin_rm(self, path, *paths, **kwargs):
"""Removes a pinned object from local storage.
Removes the pin from the given object allowing it to be garbage
collected if needed.
.. code-block:: python
>>> c.pin_rm('QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d')
{'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']}
Parameters
----------
path : str
Path to object(s) to be unpinned
recursive : bool
Recursively unpin the object linked to by the specified object(s)
Returns
-------
dict : List of IPFS objects that have been unpinned
"""
#PY2: No support for kw-only parameters after glob parameters
if "recursive" in kwargs:
kwargs.setdefault("opts", {"recursive": kwargs["recursive"]})
del kwargs["recursive"]
args = (path,) + paths
return self._client.request('/pin/rm', args, decoder='json', **kwargs)
|
python
|
def pin_rm(self, path, *paths, **kwargs):
"""Removes a pinned object from local storage.
Removes the pin from the given object allowing it to be garbage
collected if needed.
.. code-block:: python
>>> c.pin_rm('QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d')
{'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']}
Parameters
----------
path : str
Path to object(s) to be unpinned
recursive : bool
Recursively unpin the object linked to by the specified object(s)
Returns
-------
dict : List of IPFS objects that have been unpinned
"""
#PY2: No support for kw-only parameters after glob parameters
if "recursive" in kwargs:
kwargs.setdefault("opts", {"recursive": kwargs["recursive"]})
del kwargs["recursive"]
args = (path,) + paths
return self._client.request('/pin/rm', args, decoder='json', **kwargs)
|
[
"def",
"pin_rm",
"(",
"self",
",",
"path",
",",
"*",
"paths",
",",
"*",
"*",
"kwargs",
")",
":",
"#PY2: No support for kw-only parameters after glob parameters",
"if",
"\"recursive\"",
"in",
"kwargs",
":",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"{",
"\"recursive\"",
":",
"kwargs",
"[",
"\"recursive\"",
"]",
"}",
")",
"del",
"kwargs",
"[",
"\"recursive\"",
"]",
"args",
"=",
"(",
"path",
",",
")",
"+",
"paths",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/pin/rm'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Removes a pinned object from local storage.
Removes the pin from the given object allowing it to be garbage
collected if needed.
.. code-block:: python
>>> c.pin_rm('QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d')
{'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']}
Parameters
----------
path : str
Path to object(s) to be unpinned
recursive : bool
Recursively unpin the object linked to by the specified object(s)
Returns
-------
dict : List of IPFS objects that have been unpinned
|
[
"Removes",
"a",
"pinned",
"object",
"from",
"local",
"storage",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1055-L1083
|
234,702
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.pin_ls
|
def pin_ls(self, type="all", **kwargs):
"""Lists objects pinned to local storage.
By default, all pinned objects are returned, but the ``type`` flag or
arguments can restrict that to a specific pin type or to some specific
objects respectively.
.. code-block:: python
>>> c.pin_ls()
{'Keys': {
'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'},
'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'},
'QmNg5zWpRMxzRAVg7FTQ3tUxVbKj8E … gHPz': {'Type': 'indirect'},
…
'QmNiuVapnYCrLjxyweHeuk6Xdqfvts … wCCe': {'Type': 'indirect'}}}
Parameters
----------
type : "str"
The type of pinned keys to list. Can be:
* ``"direct"``
* ``"indirect"``
* ``"recursive"``
* ``"all"``
Returns
-------
dict : Hashes of pinned IPFS objects and why they are pinned
"""
kwargs.setdefault("opts", {"type": type})
return self._client.request('/pin/ls', decoder='json', **kwargs)
|
python
|
def pin_ls(self, type="all", **kwargs):
"""Lists objects pinned to local storage.
By default, all pinned objects are returned, but the ``type`` flag or
arguments can restrict that to a specific pin type or to some specific
objects respectively.
.. code-block:: python
>>> c.pin_ls()
{'Keys': {
'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'},
'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'},
'QmNg5zWpRMxzRAVg7FTQ3tUxVbKj8E … gHPz': {'Type': 'indirect'},
…
'QmNiuVapnYCrLjxyweHeuk6Xdqfvts … wCCe': {'Type': 'indirect'}}}
Parameters
----------
type : "str"
The type of pinned keys to list. Can be:
* ``"direct"``
* ``"indirect"``
* ``"recursive"``
* ``"all"``
Returns
-------
dict : Hashes of pinned IPFS objects and why they are pinned
"""
kwargs.setdefault("opts", {"type": type})
return self._client.request('/pin/ls', decoder='json', **kwargs)
|
[
"def",
"pin_ls",
"(",
"self",
",",
"type",
"=",
"\"all\"",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"{",
"\"type\"",
":",
"type",
"}",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/pin/ls'",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Lists objects pinned to local storage.
By default, all pinned objects are returned, but the ``type`` flag or
arguments can restrict that to a specific pin type or to some specific
objects respectively.
.. code-block:: python
>>> c.pin_ls()
{'Keys': {
'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'},
'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'},
'QmNg5zWpRMxzRAVg7FTQ3tUxVbKj8E … gHPz': {'Type': 'indirect'},
…
'QmNiuVapnYCrLjxyweHeuk6Xdqfvts … wCCe': {'Type': 'indirect'}}}
Parameters
----------
type : "str"
The type of pinned keys to list. Can be:
* ``"direct"``
* ``"indirect"``
* ``"recursive"``
* ``"all"``
Returns
-------
dict : Hashes of pinned IPFS objects and why they are pinned
|
[
"Lists",
"objects",
"pinned",
"to",
"local",
"storage",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1085-L1118
|
234,703
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.pin_update
|
def pin_update(self, from_path, to_path, **kwargs):
"""Replaces one pin with another.
Updates one pin to another, making sure that all objects in the new pin
are local. Then removes the old pin. This is an optimized version of
using first using :meth:`~ipfsapi.Client.pin_add` to add a new pin
for an object and then using :meth:`~ipfsapi.Client.pin_rm` to remove
the pin for the old object.
.. code-block:: python
>>> c.pin_update("QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P",
... "QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH")
{"Pins": ["/ipfs/QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P",
"/ipfs/QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH"]}
Parameters
----------
from_path : str
Path to the old object
to_path : str
Path to the new object to be pinned
unpin : bool
Should the pin of the old object be removed? (Default: ``True``)
Returns
-------
dict : List of IPFS objects affected by the pinning operation
"""
#PY2: No support for kw-only parameters after glob parameters
if "unpin" in kwargs:
kwargs.setdefault("opts", {"unpin": kwargs["unpin"]})
del kwargs["unpin"]
args = (from_path, to_path)
return self._client.request('/pin/update', args, decoder='json',
**kwargs)
|
python
|
def pin_update(self, from_path, to_path, **kwargs):
"""Replaces one pin with another.
Updates one pin to another, making sure that all objects in the new pin
are local. Then removes the old pin. This is an optimized version of
using first using :meth:`~ipfsapi.Client.pin_add` to add a new pin
for an object and then using :meth:`~ipfsapi.Client.pin_rm` to remove
the pin for the old object.
.. code-block:: python
>>> c.pin_update("QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P",
... "QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH")
{"Pins": ["/ipfs/QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P",
"/ipfs/QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH"]}
Parameters
----------
from_path : str
Path to the old object
to_path : str
Path to the new object to be pinned
unpin : bool
Should the pin of the old object be removed? (Default: ``True``)
Returns
-------
dict : List of IPFS objects affected by the pinning operation
"""
#PY2: No support for kw-only parameters after glob parameters
if "unpin" in kwargs:
kwargs.setdefault("opts", {"unpin": kwargs["unpin"]})
del kwargs["unpin"]
args = (from_path, to_path)
return self._client.request('/pin/update', args, decoder='json',
**kwargs)
|
[
"def",
"pin_update",
"(",
"self",
",",
"from_path",
",",
"to_path",
",",
"*",
"*",
"kwargs",
")",
":",
"#PY2: No support for kw-only parameters after glob parameters",
"if",
"\"unpin\"",
"in",
"kwargs",
":",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"{",
"\"unpin\"",
":",
"kwargs",
"[",
"\"unpin\"",
"]",
"}",
")",
"del",
"kwargs",
"[",
"\"unpin\"",
"]",
"args",
"=",
"(",
"from_path",
",",
"to_path",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/pin/update'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Replaces one pin with another.
Updates one pin to another, making sure that all objects in the new pin
are local. Then removes the old pin. This is an optimized version of
using first using :meth:`~ipfsapi.Client.pin_add` to add a new pin
for an object and then using :meth:`~ipfsapi.Client.pin_rm` to remove
the pin for the old object.
.. code-block:: python
>>> c.pin_update("QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P",
... "QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH")
{"Pins": ["/ipfs/QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P",
"/ipfs/QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH"]}
Parameters
----------
from_path : str
Path to the old object
to_path : str
Path to the new object to be pinned
unpin : bool
Should the pin of the old object be removed? (Default: ``True``)
Returns
-------
dict : List of IPFS objects affected by the pinning operation
|
[
"Replaces",
"one",
"pin",
"with",
"another",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1120-L1156
|
234,704
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.pin_verify
|
def pin_verify(self, path, *paths, **kwargs):
"""Verify that recursive pins are complete.
Scan the repo for pinned object graphs and check their integrity.
Issues will be reported back with a helpful human-readable error
message to aid in error recovery. This is useful to help recover
from datastore corruptions (such as when accidentally deleting
files added using the filestore backend).
This function returns an iterator needs to be closed using a context
manager (``with``-statement) or using the ``.close()`` method.
.. code-block:: python
>>> with c.pin_verify("QmN…TTZ", verbose=True) as pin_verify_iter:
... for item in pin_verify_iter:
... print(item)
...
{"Cid":"QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV","Ok":True}
{"Cid":"QmbPzQruAEFjUU3gQfupns6b8USr8VrD9H71GrqGDXQSxm","Ok":True}
{"Cid":"Qmcns1nUvbeWiecdGDPw8JxWeUfxCV8JKhTfgzs3F8JM4P","Ok":True}
…
Parameters
----------
path : str
Path to object(s) to be checked
verbose : bool
Also report status of items that were OK? (Default: ``False``)
Returns
-------
iterable
"""
#PY2: No support for kw-only parameters after glob parameters
if "verbose" in kwargs:
kwargs.setdefault("opts", {"verbose": kwargs["verbose"]})
del kwargs["verbose"]
args = (path,) + paths
return self._client.request('/pin/verify', args, decoder='json',
stream=True, **kwargs)
|
python
|
def pin_verify(self, path, *paths, **kwargs):
"""Verify that recursive pins are complete.
Scan the repo for pinned object graphs and check their integrity.
Issues will be reported back with a helpful human-readable error
message to aid in error recovery. This is useful to help recover
from datastore corruptions (such as when accidentally deleting
files added using the filestore backend).
This function returns an iterator needs to be closed using a context
manager (``with``-statement) or using the ``.close()`` method.
.. code-block:: python
>>> with c.pin_verify("QmN…TTZ", verbose=True) as pin_verify_iter:
... for item in pin_verify_iter:
... print(item)
...
{"Cid":"QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV","Ok":True}
{"Cid":"QmbPzQruAEFjUU3gQfupns6b8USr8VrD9H71GrqGDXQSxm","Ok":True}
{"Cid":"Qmcns1nUvbeWiecdGDPw8JxWeUfxCV8JKhTfgzs3F8JM4P","Ok":True}
…
Parameters
----------
path : str
Path to object(s) to be checked
verbose : bool
Also report status of items that were OK? (Default: ``False``)
Returns
-------
iterable
"""
#PY2: No support for kw-only parameters after glob parameters
if "verbose" in kwargs:
kwargs.setdefault("opts", {"verbose": kwargs["verbose"]})
del kwargs["verbose"]
args = (path,) + paths
return self._client.request('/pin/verify', args, decoder='json',
stream=True, **kwargs)
|
[
"def",
"pin_verify",
"(",
"self",
",",
"path",
",",
"*",
"paths",
",",
"*",
"*",
"kwargs",
")",
":",
"#PY2: No support for kw-only parameters after glob parameters",
"if",
"\"verbose\"",
"in",
"kwargs",
":",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"{",
"\"verbose\"",
":",
"kwargs",
"[",
"\"verbose\"",
"]",
"}",
")",
"del",
"kwargs",
"[",
"\"verbose\"",
"]",
"args",
"=",
"(",
"path",
",",
")",
"+",
"paths",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/pin/verify'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"stream",
"=",
"True",
",",
"*",
"*",
"kwargs",
")"
] |
Verify that recursive pins are complete.
Scan the repo for pinned object graphs and check their integrity.
Issues will be reported back with a helpful human-readable error
message to aid in error recovery. This is useful to help recover
from datastore corruptions (such as when accidentally deleting
files added using the filestore backend).
This function returns an iterator needs to be closed using a context
manager (``with``-statement) or using the ``.close()`` method.
.. code-block:: python
>>> with c.pin_verify("QmN…TTZ", verbose=True) as pin_verify_iter:
... for item in pin_verify_iter:
... print(item)
...
{"Cid":"QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV","Ok":True}
{"Cid":"QmbPzQruAEFjUU3gQfupns6b8USr8VrD9H71GrqGDXQSxm","Ok":True}
{"Cid":"Qmcns1nUvbeWiecdGDPw8JxWeUfxCV8JKhTfgzs3F8JM4P","Ok":True}
…
Parameters
----------
path : str
Path to object(s) to be checked
verbose : bool
Also report status of items that were OK? (Default: ``False``)
Returns
-------
iterable
|
[
"Verify",
"that",
"recursive",
"pins",
"are",
"complete",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1158-L1199
|
234,705
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.id
|
def id(self, peer=None, **kwargs):
"""Shows IPFS Node ID info.
Returns the PublicKey, ProtocolVersion, ID, AgentVersion and
Addresses of the connected daemon or some other node.
.. code-block:: python
>>> c.id()
{'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc',
'PublicKey': 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggE … BAAE=',
'AgentVersion': 'go-libp2p/3.3.4',
'ProtocolVersion': 'ipfs/0.1.0',
'Addresses': [
'/ip4/127.0.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYo … E9Uc',
'/ip4/10.1.0.172/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc',
'/ip4/172.18.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc',
'/ip6/::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYoDEyB97 … E9Uc',
'/ip6/fccc:7904:b05b:a579:957b:deef:f066:cad9/tcp/400 … E9Uc',
'/ip6/fd56:1966:efd8::212/tcp/4001/ipfs/QmVgNoP89mzpg … E9Uc',
'/ip6/fd56:1966:efd8:0:def1:34d0:773:48f/tcp/4001/ipf … E9Uc',
'/ip6/2001:db8:1::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc',
'/ip4/77.116.233.54/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc',
'/ip4/77.116.233.54/tcp/10842/ipfs/QmVgNoP89mzpgEAAqK … E9Uc']}
Parameters
----------
peer : str
Peer.ID of the node to look up (local node if ``None``)
Returns
-------
dict : Information about the IPFS node
"""
args = (peer,) if peer is not None else ()
return self._client.request('/id', args, decoder='json', **kwargs)
|
python
|
def id(self, peer=None, **kwargs):
"""Shows IPFS Node ID info.
Returns the PublicKey, ProtocolVersion, ID, AgentVersion and
Addresses of the connected daemon or some other node.
.. code-block:: python
>>> c.id()
{'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc',
'PublicKey': 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggE … BAAE=',
'AgentVersion': 'go-libp2p/3.3.4',
'ProtocolVersion': 'ipfs/0.1.0',
'Addresses': [
'/ip4/127.0.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYo … E9Uc',
'/ip4/10.1.0.172/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc',
'/ip4/172.18.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc',
'/ip6/::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYoDEyB97 … E9Uc',
'/ip6/fccc:7904:b05b:a579:957b:deef:f066:cad9/tcp/400 … E9Uc',
'/ip6/fd56:1966:efd8::212/tcp/4001/ipfs/QmVgNoP89mzpg … E9Uc',
'/ip6/fd56:1966:efd8:0:def1:34d0:773:48f/tcp/4001/ipf … E9Uc',
'/ip6/2001:db8:1::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc',
'/ip4/77.116.233.54/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc',
'/ip4/77.116.233.54/tcp/10842/ipfs/QmVgNoP89mzpgEAAqK … E9Uc']}
Parameters
----------
peer : str
Peer.ID of the node to look up (local node if ``None``)
Returns
-------
dict : Information about the IPFS node
"""
args = (peer,) if peer is not None else ()
return self._client.request('/id', args, decoder='json', **kwargs)
|
[
"def",
"id",
"(",
"self",
",",
"peer",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"peer",
",",
")",
"if",
"peer",
"is",
"not",
"None",
"else",
"(",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/id'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Shows IPFS Node ID info.
Returns the PublicKey, ProtocolVersion, ID, AgentVersion and
Addresses of the connected daemon or some other node.
.. code-block:: python
>>> c.id()
{'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc',
'PublicKey': 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggE … BAAE=',
'AgentVersion': 'go-libp2p/3.3.4',
'ProtocolVersion': 'ipfs/0.1.0',
'Addresses': [
'/ip4/127.0.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYo … E9Uc',
'/ip4/10.1.0.172/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc',
'/ip4/172.18.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc',
'/ip6/::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYoDEyB97 … E9Uc',
'/ip6/fccc:7904:b05b:a579:957b:deef:f066:cad9/tcp/400 … E9Uc',
'/ip6/fd56:1966:efd8::212/tcp/4001/ipfs/QmVgNoP89mzpg … E9Uc',
'/ip6/fd56:1966:efd8:0:def1:34d0:773:48f/tcp/4001/ipf … E9Uc',
'/ip6/2001:db8:1::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc',
'/ip4/77.116.233.54/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc',
'/ip4/77.116.233.54/tcp/10842/ipfs/QmVgNoP89mzpgEAAqK … E9Uc']}
Parameters
----------
peer : str
Peer.ID of the node to look up (local node if ``None``)
Returns
-------
dict : Information about the IPFS node
|
[
"Shows",
"IPFS",
"Node",
"ID",
"info",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1254-L1289
|
234,706
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.bootstrap_add
|
def bootstrap_add(self, peer, *peers, **kwargs):
"""Adds peers to the bootstrap list.
Parameters
----------
peer : str
IPFS MultiAddr of a peer to add to the list
Returns
-------
dict
"""
args = (peer,) + peers
return self._client.request('/bootstrap/add', args,
decoder='json', **kwargs)
|
python
|
def bootstrap_add(self, peer, *peers, **kwargs):
"""Adds peers to the bootstrap list.
Parameters
----------
peer : str
IPFS MultiAddr of a peer to add to the list
Returns
-------
dict
"""
args = (peer,) + peers
return self._client.request('/bootstrap/add', args,
decoder='json', **kwargs)
|
[
"def",
"bootstrap_add",
"(",
"self",
",",
"peer",
",",
"*",
"peers",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"peer",
",",
")",
"+",
"peers",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/bootstrap/add'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Adds peers to the bootstrap list.
Parameters
----------
peer : str
IPFS MultiAddr of a peer to add to the list
Returns
-------
dict
|
[
"Adds",
"peers",
"to",
"the",
"bootstrap",
"list",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1317-L1331
|
234,707
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.swarm_filters_add
|
def swarm_filters_add(self, address, *addresses, **kwargs):
"""Adds a given multiaddr filter to the filter list.
This will add an address filter to the daemons swarm. Filters applied
this way will not persist daemon reboots, to achieve that, add your
filters to the configuration file.
.. code-block:: python
>>> c.swarm_filters_add("/ip4/192.168.0.0/ipcidr/16")
{'Strings': ['/ip4/192.168.0.0/ipcidr/16']}
Parameters
----------
address : str
Multiaddr to filter
Returns
-------
dict : List of swarm filters added
"""
args = (address,) + addresses
return self._client.request('/swarm/filters/add', args,
decoder='json', **kwargs)
|
python
|
def swarm_filters_add(self, address, *addresses, **kwargs):
"""Adds a given multiaddr filter to the filter list.
This will add an address filter to the daemons swarm. Filters applied
this way will not persist daemon reboots, to achieve that, add your
filters to the configuration file.
.. code-block:: python
>>> c.swarm_filters_add("/ip4/192.168.0.0/ipcidr/16")
{'Strings': ['/ip4/192.168.0.0/ipcidr/16']}
Parameters
----------
address : str
Multiaddr to filter
Returns
-------
dict : List of swarm filters added
"""
args = (address,) + addresses
return self._client.request('/swarm/filters/add', args,
decoder='json', **kwargs)
|
[
"def",
"swarm_filters_add",
"(",
"self",
",",
"address",
",",
"*",
"addresses",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"address",
",",
")",
"+",
"addresses",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/swarm/filters/add'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Adds a given multiaddr filter to the filter list.
This will add an address filter to the daemons swarm. Filters applied
this way will not persist daemon reboots, to achieve that, add your
filters to the configuration file.
.. code-block:: python
>>> c.swarm_filters_add("/ip4/192.168.0.0/ipcidr/16")
{'Strings': ['/ip4/192.168.0.0/ipcidr/16']}
Parameters
----------
address : str
Multiaddr to filter
Returns
-------
dict : List of swarm filters added
|
[
"Adds",
"a",
"given",
"multiaddr",
"filter",
"to",
"the",
"filter",
"list",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1456-L1479
|
234,708
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.dht_query
|
def dht_query(self, peer_id, *peer_ids, **kwargs):
"""Finds the closest Peer IDs to a given Peer ID by querying the DHT.
.. code-block:: python
>>> c.dht_query("/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDM … uvuJ")
[{'ID': 'QmPkFbxAQ7DeKD5VGSh9HQrdS574pyNzDmxJeGrRJxoucF',
'Extra': '', 'Type': 2, 'Responses': None},
{'ID': 'QmR1MhHVLJSLt9ZthsNNhudb1ny1WdhY4FPW21ZYFWec4f',
'Extra': '', 'Type': 2, 'Responses': None},
{'ID': 'Qmcwx1K5aVme45ab6NYWb52K2TFBeABgCLccC7ntUeDsAs',
'Extra': '', 'Type': 2, 'Responses': None},
…
{'ID': 'QmYYy8L3YD1nsF4xtt4xmsc14yqvAAnKksjo3F3iZs5jPv',
'Extra': '', 'Type': 1, 'Responses': []}]
Parameters
----------
peer_id : str
The peerID to run the query against
Returns
-------
dict : List of peers IDs
"""
args = (peer_id,) + peer_ids
return self._client.request('/dht/query', args,
decoder='json', **kwargs)
|
python
|
def dht_query(self, peer_id, *peer_ids, **kwargs):
"""Finds the closest Peer IDs to a given Peer ID by querying the DHT.
.. code-block:: python
>>> c.dht_query("/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDM … uvuJ")
[{'ID': 'QmPkFbxAQ7DeKD5VGSh9HQrdS574pyNzDmxJeGrRJxoucF',
'Extra': '', 'Type': 2, 'Responses': None},
{'ID': 'QmR1MhHVLJSLt9ZthsNNhudb1ny1WdhY4FPW21ZYFWec4f',
'Extra': '', 'Type': 2, 'Responses': None},
{'ID': 'Qmcwx1K5aVme45ab6NYWb52K2TFBeABgCLccC7ntUeDsAs',
'Extra': '', 'Type': 2, 'Responses': None},
…
{'ID': 'QmYYy8L3YD1nsF4xtt4xmsc14yqvAAnKksjo3F3iZs5jPv',
'Extra': '', 'Type': 1, 'Responses': []}]
Parameters
----------
peer_id : str
The peerID to run the query against
Returns
-------
dict : List of peers IDs
"""
args = (peer_id,) + peer_ids
return self._client.request('/dht/query', args,
decoder='json', **kwargs)
|
[
"def",
"dht_query",
"(",
"self",
",",
"peer_id",
",",
"*",
"peer_ids",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"peer_id",
",",
")",
"+",
"peer_ids",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/dht/query'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Finds the closest Peer IDs to a given Peer ID by querying the DHT.
.. code-block:: python
>>> c.dht_query("/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDM … uvuJ")
[{'ID': 'QmPkFbxAQ7DeKD5VGSh9HQrdS574pyNzDmxJeGrRJxoucF',
'Extra': '', 'Type': 2, 'Responses': None},
{'ID': 'QmR1MhHVLJSLt9ZthsNNhudb1ny1WdhY4FPW21ZYFWec4f',
'Extra': '', 'Type': 2, 'Responses': None},
{'ID': 'Qmcwx1K5aVme45ab6NYWb52K2TFBeABgCLccC7ntUeDsAs',
'Extra': '', 'Type': 2, 'Responses': None},
…
{'ID': 'QmYYy8L3YD1nsF4xtt4xmsc14yqvAAnKksjo3F3iZs5jPv',
'Extra': '', 'Type': 1, 'Responses': []}]
Parameters
----------
peer_id : str
The peerID to run the query against
Returns
-------
dict : List of peers IDs
|
[
"Finds",
"the",
"closest",
"Peer",
"IDs",
"to",
"a",
"given",
"Peer",
"ID",
"by",
"querying",
"the",
"DHT",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1506-L1533
|
234,709
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.dht_findprovs
|
def dht_findprovs(self, multihash, *multihashes, **kwargs):
"""Finds peers in the DHT that can provide a specific value.
.. code-block:: python
>>> c.dht_findprovs("QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQu … mpW2")
[{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ',
'Extra': '', 'Type': 6, 'Responses': None},
{'ID': 'QmaK6Aj5WXkfnWGoWq7V8pGUYzcHPZp4jKQ5JtmRvSzQGk',
'Extra': '', 'Type': 6, 'Responses': None},
{'ID': 'QmdUdLu8dNvr4MVW1iWXxKoQrbG6y1vAVWPdkeGK4xppds',
'Extra': '', 'Type': 6, 'Responses': None},
…
{'ID': '', 'Extra': '', 'Type': 4, 'Responses': [
{'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97Mk … E9Uc', 'Addrs': None}
]},
{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ',
'Extra': '', 'Type': 1, 'Responses': [
{'ID': 'QmSHXfsmN3ZduwFDjeqBn1C8b1tcLkxK6yd … waXw', 'Addrs': [
'/ip4/127.0.0.1/tcp/4001',
'/ip4/172.17.0.8/tcp/4001',
'/ip6/::1/tcp/4001',
'/ip4/52.32.109.74/tcp/1028'
]}
]}]
Parameters
----------
multihash : str
The DHT key to find providers for
Returns
-------
dict : List of provider Peer IDs
"""
args = (multihash,) + multihashes
return self._client.request('/dht/findprovs', args,
decoder='json', **kwargs)
|
python
|
def dht_findprovs(self, multihash, *multihashes, **kwargs):
"""Finds peers in the DHT that can provide a specific value.
.. code-block:: python
>>> c.dht_findprovs("QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQu … mpW2")
[{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ',
'Extra': '', 'Type': 6, 'Responses': None},
{'ID': 'QmaK6Aj5WXkfnWGoWq7V8pGUYzcHPZp4jKQ5JtmRvSzQGk',
'Extra': '', 'Type': 6, 'Responses': None},
{'ID': 'QmdUdLu8dNvr4MVW1iWXxKoQrbG6y1vAVWPdkeGK4xppds',
'Extra': '', 'Type': 6, 'Responses': None},
…
{'ID': '', 'Extra': '', 'Type': 4, 'Responses': [
{'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97Mk … E9Uc', 'Addrs': None}
]},
{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ',
'Extra': '', 'Type': 1, 'Responses': [
{'ID': 'QmSHXfsmN3ZduwFDjeqBn1C8b1tcLkxK6yd … waXw', 'Addrs': [
'/ip4/127.0.0.1/tcp/4001',
'/ip4/172.17.0.8/tcp/4001',
'/ip6/::1/tcp/4001',
'/ip4/52.32.109.74/tcp/1028'
]}
]}]
Parameters
----------
multihash : str
The DHT key to find providers for
Returns
-------
dict : List of provider Peer IDs
"""
args = (multihash,) + multihashes
return self._client.request('/dht/findprovs', args,
decoder='json', **kwargs)
|
[
"def",
"dht_findprovs",
"(",
"self",
",",
"multihash",
",",
"*",
"multihashes",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"multihash",
",",
")",
"+",
"multihashes",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/dht/findprovs'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Finds peers in the DHT that can provide a specific value.
.. code-block:: python
>>> c.dht_findprovs("QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQu … mpW2")
[{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ',
'Extra': '', 'Type': 6, 'Responses': None},
{'ID': 'QmaK6Aj5WXkfnWGoWq7V8pGUYzcHPZp4jKQ5JtmRvSzQGk',
'Extra': '', 'Type': 6, 'Responses': None},
{'ID': 'QmdUdLu8dNvr4MVW1iWXxKoQrbG6y1vAVWPdkeGK4xppds',
'Extra': '', 'Type': 6, 'Responses': None},
…
{'ID': '', 'Extra': '', 'Type': 4, 'Responses': [
{'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97Mk … E9Uc', 'Addrs': None}
]},
{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ',
'Extra': '', 'Type': 1, 'Responses': [
{'ID': 'QmSHXfsmN3ZduwFDjeqBn1C8b1tcLkxK6yd … waXw', 'Addrs': [
'/ip4/127.0.0.1/tcp/4001',
'/ip4/172.17.0.8/tcp/4001',
'/ip6/::1/tcp/4001',
'/ip4/52.32.109.74/tcp/1028'
]}
]}]
Parameters
----------
multihash : str
The DHT key to find providers for
Returns
-------
dict : List of provider Peer IDs
|
[
"Finds",
"peers",
"in",
"the",
"DHT",
"that",
"can",
"provide",
"a",
"specific",
"value",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1535-L1572
|
234,710
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.dht_get
|
def dht_get(self, key, *keys, **kwargs):
"""Queries the DHT for its best value related to given key.
There may be several different values for a given key stored in the
DHT; in this context *best* means the record that is most desirable.
There is no one metric for *best*: it depends entirely on the key type.
For IPNS, *best* is the record that is both valid and has the highest
sequence number (freshest). Different key types may specify other rules
for they consider to be the *best*.
Parameters
----------
key : str
One or more keys whose values should be looked up
Returns
-------
str
"""
args = (key,) + keys
res = self._client.request('/dht/get', args, decoder='json', **kwargs)
if isinstance(res, dict) and "Extra" in res:
return res["Extra"]
else:
for r in res:
if "Extra" in r and len(r["Extra"]) > 0:
return r["Extra"]
raise exceptions.Error("empty response from DHT")
|
python
|
def dht_get(self, key, *keys, **kwargs):
"""Queries the DHT for its best value related to given key.
There may be several different values for a given key stored in the
DHT; in this context *best* means the record that is most desirable.
There is no one metric for *best*: it depends entirely on the key type.
For IPNS, *best* is the record that is both valid and has the highest
sequence number (freshest). Different key types may specify other rules
for they consider to be the *best*.
Parameters
----------
key : str
One or more keys whose values should be looked up
Returns
-------
str
"""
args = (key,) + keys
res = self._client.request('/dht/get', args, decoder='json', **kwargs)
if isinstance(res, dict) and "Extra" in res:
return res["Extra"]
else:
for r in res:
if "Extra" in r and len(r["Extra"]) > 0:
return r["Extra"]
raise exceptions.Error("empty response from DHT")
|
[
"def",
"dht_get",
"(",
"self",
",",
"key",
",",
"*",
"keys",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"key",
",",
")",
"+",
"keys",
"res",
"=",
"self",
".",
"_client",
".",
"request",
"(",
"'/dht/get'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")",
"if",
"isinstance",
"(",
"res",
",",
"dict",
")",
"and",
"\"Extra\"",
"in",
"res",
":",
"return",
"res",
"[",
"\"Extra\"",
"]",
"else",
":",
"for",
"r",
"in",
"res",
":",
"if",
"\"Extra\"",
"in",
"r",
"and",
"len",
"(",
"r",
"[",
"\"Extra\"",
"]",
")",
">",
"0",
":",
"return",
"r",
"[",
"\"Extra\"",
"]",
"raise",
"exceptions",
".",
"Error",
"(",
"\"empty response from DHT\"",
")"
] |
Queries the DHT for its best value related to given key.
There may be several different values for a given key stored in the
DHT; in this context *best* means the record that is most desirable.
There is no one metric for *best*: it depends entirely on the key type.
For IPNS, *best* is the record that is both valid and has the highest
sequence number (freshest). Different key types may specify other rules
for they consider to be the *best*.
Parameters
----------
key : str
One or more keys whose values should be looked up
Returns
-------
str
|
[
"Queries",
"the",
"DHT",
"for",
"its",
"best",
"value",
"related",
"to",
"given",
"key",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1610-L1638
|
234,711
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.ping
|
def ping(self, peer, *peers, **kwargs):
"""Provides round-trip latency information for the routing system.
Finds nodes via the routing system, sends pings, waits for pongs,
and prints out round-trip latency information.
.. code-block:: python
>>> c.ping("QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n")
[{'Success': True, 'Time': 0,
'Text': 'Looking up peer QmTzQ1JRkWErjk39mryYw2WVaphAZN … c15n'},
{'Success': False, 'Time': 0,
'Text': 'Peer lookup error: routing: not found'}]
Parameters
----------
peer : str
ID of peer to be pinged
count : int
Number of ping messages to send (Default: ``10``)
Returns
-------
list : Progress reports from the ping
"""
#PY2: No support for kw-only parameters after glob parameters
if "count" in kwargs:
kwargs.setdefault("opts", {"count": kwargs["count"]})
del kwargs["count"]
args = (peer,) + peers
return self._client.request('/ping', args, decoder='json', **kwargs)
|
python
|
def ping(self, peer, *peers, **kwargs):
"""Provides round-trip latency information for the routing system.
Finds nodes via the routing system, sends pings, waits for pongs,
and prints out round-trip latency information.
.. code-block:: python
>>> c.ping("QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n")
[{'Success': True, 'Time': 0,
'Text': 'Looking up peer QmTzQ1JRkWErjk39mryYw2WVaphAZN … c15n'},
{'Success': False, 'Time': 0,
'Text': 'Peer lookup error: routing: not found'}]
Parameters
----------
peer : str
ID of peer to be pinged
count : int
Number of ping messages to send (Default: ``10``)
Returns
-------
list : Progress reports from the ping
"""
#PY2: No support for kw-only parameters after glob parameters
if "count" in kwargs:
kwargs.setdefault("opts", {"count": kwargs["count"]})
del kwargs["count"]
args = (peer,) + peers
return self._client.request('/ping', args, decoder='json', **kwargs)
|
[
"def",
"ping",
"(",
"self",
",",
"peer",
",",
"*",
"peers",
",",
"*",
"*",
"kwargs",
")",
":",
"#PY2: No support for kw-only parameters after glob parameters",
"if",
"\"count\"",
"in",
"kwargs",
":",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"{",
"\"count\"",
":",
"kwargs",
"[",
"\"count\"",
"]",
"}",
")",
"del",
"kwargs",
"[",
"\"count\"",
"]",
"args",
"=",
"(",
"peer",
",",
")",
"+",
"peers",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/ping'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Provides round-trip latency information for the routing system.
Finds nodes via the routing system, sends pings, waits for pongs,
and prints out round-trip latency information.
.. code-block:: python
>>> c.ping("QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n")
[{'Success': True, 'Time': 0,
'Text': 'Looking up peer QmTzQ1JRkWErjk39mryYw2WVaphAZN … c15n'},
{'Success': False, 'Time': 0,
'Text': 'Peer lookup error: routing: not found'}]
Parameters
----------
peer : str
ID of peer to be pinged
count : int
Number of ping messages to send (Default: ``10``)
Returns
-------
list : Progress reports from the ping
|
[
"Provides",
"round",
"-",
"trip",
"latency",
"information",
"for",
"the",
"routing",
"system",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1685-L1716
|
234,712
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.config
|
def config(self, key, value=None, **kwargs):
"""Controls configuration variables.
.. code-block:: python
>>> c.config("Addresses.Gateway")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'}
>>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'}
Parameters
----------
key : str
The key of the configuration entry (e.g. "Addresses.API")
value : dict
The value to set the configuration entry to
Returns
-------
dict : Requested/updated key and its (new) value
"""
args = (key, value)
return self._client.request('/config', args, decoder='json', **kwargs)
|
python
|
def config(self, key, value=None, **kwargs):
"""Controls configuration variables.
.. code-block:: python
>>> c.config("Addresses.Gateway")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'}
>>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'}
Parameters
----------
key : str
The key of the configuration entry (e.g. "Addresses.API")
value : dict
The value to set the configuration entry to
Returns
-------
dict : Requested/updated key and its (new) value
"""
args = (key, value)
return self._client.request('/config', args, decoder='json', **kwargs)
|
[
"def",
"config",
"(",
"self",
",",
"key",
",",
"value",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"key",
",",
"value",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/config'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Controls configuration variables.
.. code-block:: python
>>> c.config("Addresses.Gateway")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'}
>>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081")
{'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'}
Parameters
----------
key : str
The key of the configuration entry (e.g. "Addresses.API")
value : dict
The value to set the configuration entry to
Returns
-------
dict : Requested/updated key and its (new) value
|
[
"Controls",
"configuration",
"variables",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1718-L1740
|
234,713
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.config_replace
|
def config_replace(self, *args, **kwargs):
"""Replaces the existing config with a user-defined config.
Make sure to back up the config file first if neccessary, as this
operation can't be undone.
"""
return self._client.request('/config/replace', args,
decoder='json', **kwargs)
|
python
|
def config_replace(self, *args, **kwargs):
"""Replaces the existing config with a user-defined config.
Make sure to back up the config file first if neccessary, as this
operation can't be undone.
"""
return self._client.request('/config/replace', args,
decoder='json', **kwargs)
|
[
"def",
"config_replace",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/config/replace'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Replaces the existing config with a user-defined config.
Make sure to back up the config file first if neccessary, as this
operation can't be undone.
|
[
"Replaces",
"the",
"existing",
"config",
"with",
"a",
"user",
"-",
"defined",
"config",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1766-L1773
|
234,714
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.log_level
|
def log_level(self, subsystem, level, **kwargs):
r"""Changes the logging output of a running daemon.
.. code-block:: python
>>> c.log_level("path", "info")
{'Message': "Changed log level of 'path' to 'info'\n"}
Parameters
----------
subsystem : str
The subsystem logging identifier (Use ``"all"`` for all subsystems)
level : str
The desired logging level. Must be one of:
* ``"debug"``
* ``"info"``
* ``"warning"``
* ``"error"``
* ``"fatal"``
* ``"panic"``
Returns
-------
dict : Status message
"""
args = (subsystem, level)
return self._client.request('/log/level', args,
decoder='json', **kwargs)
|
python
|
def log_level(self, subsystem, level, **kwargs):
r"""Changes the logging output of a running daemon.
.. code-block:: python
>>> c.log_level("path", "info")
{'Message': "Changed log level of 'path' to 'info'\n"}
Parameters
----------
subsystem : str
The subsystem logging identifier (Use ``"all"`` for all subsystems)
level : str
The desired logging level. Must be one of:
* ``"debug"``
* ``"info"``
* ``"warning"``
* ``"error"``
* ``"fatal"``
* ``"panic"``
Returns
-------
dict : Status message
"""
args = (subsystem, level)
return self._client.request('/log/level', args,
decoder='json', **kwargs)
|
[
"def",
"log_level",
"(",
"self",
",",
"subsystem",
",",
"level",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"subsystem",
",",
"level",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/log/level'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
r"""Changes the logging output of a running daemon.
.. code-block:: python
>>> c.log_level("path", "info")
{'Message': "Changed log level of 'path' to 'info'\n"}
Parameters
----------
subsystem : str
The subsystem logging identifier (Use ``"all"`` for all subsystems)
level : str
The desired logging level. Must be one of:
* ``"debug"``
* ``"info"``
* ``"warning"``
* ``"error"``
* ``"fatal"``
* ``"panic"``
Returns
-------
dict : Status message
|
[
"r",
"Changes",
"the",
"logging",
"output",
"of",
"a",
"running",
"daemon",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1775-L1803
|
234,715
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.log_tail
|
def log_tail(self, **kwargs):
r"""Reads log outputs as they are written.
This function returns an iterator needs to be closed using a context
manager (``with``-statement) or using the ``.close()`` method.
.. code-block:: python
>>> with c.log_tail() as log_tail_iter:
... for item in log_tail_iter:
... print(item)
...
{"event":"updatePeer","system":"dht",
"peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.43353297Z"}
{"event":"handleAddProviderBegin","system":"dht",
"peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.433642581Z"}
{"event":"handleAddProvider","system":"dht","duration":91704,
"key":"QmNT9Tejg6t57Vs8XM2TVJXCwevWiGsZh3kB4HQXUZRK1o",
"peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.433747513Z"}
{"event":"updatePeer","system":"dht",
"peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.435843012Z"}
…
Returns
-------
iterable
"""
return self._client.request('/log/tail', decoder='json',
stream=True, **kwargs)
|
python
|
def log_tail(self, **kwargs):
r"""Reads log outputs as they are written.
This function returns an iterator needs to be closed using a context
manager (``with``-statement) or using the ``.close()`` method.
.. code-block:: python
>>> with c.log_tail() as log_tail_iter:
... for item in log_tail_iter:
... print(item)
...
{"event":"updatePeer","system":"dht",
"peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.43353297Z"}
{"event":"handleAddProviderBegin","system":"dht",
"peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.433642581Z"}
{"event":"handleAddProvider","system":"dht","duration":91704,
"key":"QmNT9Tejg6t57Vs8XM2TVJXCwevWiGsZh3kB4HQXUZRK1o",
"peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.433747513Z"}
{"event":"updatePeer","system":"dht",
"peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.435843012Z"}
…
Returns
-------
iterable
"""
return self._client.request('/log/tail', decoder='json',
stream=True, **kwargs)
|
[
"def",
"log_tail",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/log/tail'",
",",
"decoder",
"=",
"'json'",
",",
"stream",
"=",
"True",
",",
"*",
"*",
"kwargs",
")"
] |
r"""Reads log outputs as they are written.
This function returns an iterator needs to be closed using a context
manager (``with``-statement) or using the ``.close()`` method.
.. code-block:: python
>>> with c.log_tail() as log_tail_iter:
... for item in log_tail_iter:
... print(item)
...
{"event":"updatePeer","system":"dht",
"peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.43353297Z"}
{"event":"handleAddProviderBegin","system":"dht",
"peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.433642581Z"}
{"event":"handleAddProvider","system":"dht","duration":91704,
"key":"QmNT9Tejg6t57Vs8XM2TVJXCwevWiGsZh3kB4HQXUZRK1o",
"peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.433747513Z"}
{"event":"updatePeer","system":"dht",
"peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq",
"session":"7770b5e0-25ec-47cd-aa64-f42e65a10023",
"time":"2016-08-22T13:25:27.435843012Z"}
…
Returns
-------
iterable
|
[
"r",
"Reads",
"log",
"outputs",
"as",
"they",
"are",
"written",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1836-L1872
|
234,716
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.files_cp
|
def files_cp(self, source, dest, **kwargs):
"""Copies files within the MFS.
Due to the nature of IPFS this will not actually involve any of the
file's content being copied.
.. code-block:: python
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0},
{'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0}
]}
>>> c.files_cp("/test", "/bla")
''
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0},
{'Size': 0, 'Hash': '', 'Name': 'bla', 'Type': 0},
{'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0}
]}
Parameters
----------
source : str
Filepath within the MFS to copy from
dest : str
Destination filepath with the MFS to which the file will be
copied to
"""
args = (source, dest)
return self._client.request('/files/cp', args, **kwargs)
|
python
|
def files_cp(self, source, dest, **kwargs):
"""Copies files within the MFS.
Due to the nature of IPFS this will not actually involve any of the
file's content being copied.
.. code-block:: python
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0},
{'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0}
]}
>>> c.files_cp("/test", "/bla")
''
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0},
{'Size': 0, 'Hash': '', 'Name': 'bla', 'Type': 0},
{'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0}
]}
Parameters
----------
source : str
Filepath within the MFS to copy from
dest : str
Destination filepath with the MFS to which the file will be
copied to
"""
args = (source, dest)
return self._client.request('/files/cp', args, **kwargs)
|
[
"def",
"files_cp",
"(",
"self",
",",
"source",
",",
"dest",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"source",
",",
"dest",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/files/cp'",
",",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Copies files within the MFS.
Due to the nature of IPFS this will not actually involve any of the
file's content being copied.
.. code-block:: python
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0},
{'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0}
]}
>>> c.files_cp("/test", "/bla")
''
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0},
{'Size': 0, 'Hash': '', 'Name': 'bla', 'Type': 0},
{'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0}
]}
Parameters
----------
source : str
Filepath within the MFS to copy from
dest : str
Destination filepath with the MFS to which the file will be
copied to
|
[
"Copies",
"files",
"within",
"the",
"MFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1889-L1920
|
234,717
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.files_ls
|
def files_ls(self, path, **kwargs):
"""Lists contents of a directory in the MFS.
.. code-block:: python
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}
]}
Parameters
----------
path : str
Filepath within the MFS
Returns
-------
dict : Directory entries
"""
args = (path,)
return self._client.request('/files/ls', args,
decoder='json', **kwargs)
|
python
|
def files_ls(self, path, **kwargs):
"""Lists contents of a directory in the MFS.
.. code-block:: python
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}
]}
Parameters
----------
path : str
Filepath within the MFS
Returns
-------
dict : Directory entries
"""
args = (path,)
return self._client.request('/files/ls', args,
decoder='json', **kwargs)
|
[
"def",
"files_ls",
"(",
"self",
",",
"path",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"path",
",",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/files/ls'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Lists contents of a directory in the MFS.
.. code-block:: python
>>> c.files_ls("/")
{'Entries': [
{'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}
]}
Parameters
----------
path : str
Filepath within the MFS
Returns
-------
dict : Directory entries
|
[
"Lists",
"contents",
"of",
"a",
"directory",
"in",
"the",
"MFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1922-L1943
|
234,718
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.files_mkdir
|
def files_mkdir(self, path, parents=False, **kwargs):
"""Creates a directory within the MFS.
.. code-block:: python
>>> c.files_mkdir("/test")
b''
Parameters
----------
path : str
Filepath within the MFS
parents : bool
Create parent directories as needed and do not raise an exception
if the requested directory already exists
"""
kwargs.setdefault("opts", {"parents": parents})
args = (path,)
return self._client.request('/files/mkdir', args, **kwargs)
|
python
|
def files_mkdir(self, path, parents=False, **kwargs):
"""Creates a directory within the MFS.
.. code-block:: python
>>> c.files_mkdir("/test")
b''
Parameters
----------
path : str
Filepath within the MFS
parents : bool
Create parent directories as needed and do not raise an exception
if the requested directory already exists
"""
kwargs.setdefault("opts", {"parents": parents})
args = (path,)
return self._client.request('/files/mkdir', args, **kwargs)
|
[
"def",
"files_mkdir",
"(",
"self",
",",
"path",
",",
"parents",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"{",
"\"parents\"",
":",
"parents",
"}",
")",
"args",
"=",
"(",
"path",
",",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/files/mkdir'",
",",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Creates a directory within the MFS.
.. code-block:: python
>>> c.files_mkdir("/test")
b''
Parameters
----------
path : str
Filepath within the MFS
parents : bool
Create parent directories as needed and do not raise an exception
if the requested directory already exists
|
[
"Creates",
"a",
"directory",
"within",
"the",
"MFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1945-L1964
|
234,719
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.files_rm
|
def files_rm(self, path, recursive=False, **kwargs):
"""Removes a file from the MFS.
.. code-block:: python
>>> c.files_rm("/bla/file")
b''
Parameters
----------
path : str
Filepath within the MFS
recursive : bool
Recursively remove directories?
"""
kwargs.setdefault("opts", {"recursive": recursive})
args = (path,)
return self._client.request('/files/rm', args, **kwargs)
|
python
|
def files_rm(self, path, recursive=False, **kwargs):
"""Removes a file from the MFS.
.. code-block:: python
>>> c.files_rm("/bla/file")
b''
Parameters
----------
path : str
Filepath within the MFS
recursive : bool
Recursively remove directories?
"""
kwargs.setdefault("opts", {"recursive": recursive})
args = (path,)
return self._client.request('/files/rm', args, **kwargs)
|
[
"def",
"files_rm",
"(",
"self",
",",
"path",
",",
"recursive",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"{",
"\"recursive\"",
":",
"recursive",
"}",
")",
"args",
"=",
"(",
"path",
",",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/files/rm'",
",",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Removes a file from the MFS.
.. code-block:: python
>>> c.files_rm("/bla/file")
b''
Parameters
----------
path : str
Filepath within the MFS
recursive : bool
Recursively remove directories?
|
[
"Removes",
"a",
"file",
"from",
"the",
"MFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L1989-L2007
|
234,720
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.files_read
|
def files_read(self, path, offset=0, count=None, **kwargs):
"""Reads a file stored in the MFS.
.. code-block:: python
>>> c.files_read("/bla/file")
b'hi'
Parameters
----------
path : str
Filepath within the MFS
offset : int
Byte offset at which to begin reading at
count : int
Maximum number of bytes to read
Returns
-------
str : MFS file contents
"""
opts = {"offset": offset}
if count is not None:
opts["count"] = count
kwargs.setdefault("opts", opts)
args = (path,)
return self._client.request('/files/read', args, **kwargs)
|
python
|
def files_read(self, path, offset=0, count=None, **kwargs):
"""Reads a file stored in the MFS.
.. code-block:: python
>>> c.files_read("/bla/file")
b'hi'
Parameters
----------
path : str
Filepath within the MFS
offset : int
Byte offset at which to begin reading at
count : int
Maximum number of bytes to read
Returns
-------
str : MFS file contents
"""
opts = {"offset": offset}
if count is not None:
opts["count"] = count
kwargs.setdefault("opts", opts)
args = (path,)
return self._client.request('/files/read', args, **kwargs)
|
[
"def",
"files_read",
"(",
"self",
",",
"path",
",",
"offset",
"=",
"0",
",",
"count",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"opts",
"=",
"{",
"\"offset\"",
":",
"offset",
"}",
"if",
"count",
"is",
"not",
"None",
":",
"opts",
"[",
"\"count\"",
"]",
"=",
"count",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"opts",
")",
"args",
"=",
"(",
"path",
",",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/files/read'",
",",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Reads a file stored in the MFS.
.. code-block:: python
>>> c.files_read("/bla/file")
b'hi'
Parameters
----------
path : str
Filepath within the MFS
offset : int
Byte offset at which to begin reading at
count : int
Maximum number of bytes to read
Returns
-------
str : MFS file contents
|
[
"Reads",
"a",
"file",
"stored",
"in",
"the",
"MFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2009-L2036
|
234,721
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.files_write
|
def files_write(self, path, file, offset=0, create=False, truncate=False,
count=None, **kwargs):
"""Writes to a mutable file in the MFS.
.. code-block:: python
>>> c.files_write("/test/file", io.BytesIO(b"hi"), create=True)
b''
Parameters
----------
path : str
Filepath within the MFS
file : io.RawIOBase
IO stream object with data that should be written
offset : int
Byte offset at which to begin writing at
create : bool
Create the file if it does not exist
truncate : bool
Truncate the file to size zero before writing
count : int
Maximum number of bytes to read from the source ``file``
"""
opts = {"offset": offset, "create": create, "truncate": truncate}
if count is not None:
opts["count"] = count
kwargs.setdefault("opts", opts)
args = (path,)
body, headers = multipart.stream_files(file, self.chunk_size)
return self._client.request('/files/write', args,
data=body, headers=headers, **kwargs)
|
python
|
def files_write(self, path, file, offset=0, create=False, truncate=False,
count=None, **kwargs):
"""Writes to a mutable file in the MFS.
.. code-block:: python
>>> c.files_write("/test/file", io.BytesIO(b"hi"), create=True)
b''
Parameters
----------
path : str
Filepath within the MFS
file : io.RawIOBase
IO stream object with data that should be written
offset : int
Byte offset at which to begin writing at
create : bool
Create the file if it does not exist
truncate : bool
Truncate the file to size zero before writing
count : int
Maximum number of bytes to read from the source ``file``
"""
opts = {"offset": offset, "create": create, "truncate": truncate}
if count is not None:
opts["count"] = count
kwargs.setdefault("opts", opts)
args = (path,)
body, headers = multipart.stream_files(file, self.chunk_size)
return self._client.request('/files/write', args,
data=body, headers=headers, **kwargs)
|
[
"def",
"files_write",
"(",
"self",
",",
"path",
",",
"file",
",",
"offset",
"=",
"0",
",",
"create",
"=",
"False",
",",
"truncate",
"=",
"False",
",",
"count",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"opts",
"=",
"{",
"\"offset\"",
":",
"offset",
",",
"\"create\"",
":",
"create",
",",
"\"truncate\"",
":",
"truncate",
"}",
"if",
"count",
"is",
"not",
"None",
":",
"opts",
"[",
"\"count\"",
"]",
"=",
"count",
"kwargs",
".",
"setdefault",
"(",
"\"opts\"",
",",
"opts",
")",
"args",
"=",
"(",
"path",
",",
")",
"body",
",",
"headers",
"=",
"multipart",
".",
"stream_files",
"(",
"file",
",",
"self",
".",
"chunk_size",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/files/write'",
",",
"args",
",",
"data",
"=",
"body",
",",
"headers",
"=",
"headers",
",",
"*",
"*",
"kwargs",
")"
] |
Writes to a mutable file in the MFS.
.. code-block:: python
>>> c.files_write("/test/file", io.BytesIO(b"hi"), create=True)
b''
Parameters
----------
path : str
Filepath within the MFS
file : io.RawIOBase
IO stream object with data that should be written
offset : int
Byte offset at which to begin writing at
create : bool
Create the file if it does not exist
truncate : bool
Truncate the file to size zero before writing
count : int
Maximum number of bytes to read from the source ``file``
|
[
"Writes",
"to",
"a",
"mutable",
"file",
"in",
"the",
"MFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2038-L2070
|
234,722
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.files_mv
|
def files_mv(self, source, dest, **kwargs):
"""Moves files and directories within the MFS.
.. code-block:: python
>>> c.files_mv("/test/file", "/bla/file")
b''
Parameters
----------
source : str
Existing filepath within the MFS
dest : str
Destination to which the file will be moved in the MFS
"""
args = (source, dest)
return self._client.request('/files/mv', args, **kwargs)
|
python
|
def files_mv(self, source, dest, **kwargs):
"""Moves files and directories within the MFS.
.. code-block:: python
>>> c.files_mv("/test/file", "/bla/file")
b''
Parameters
----------
source : str
Existing filepath within the MFS
dest : str
Destination to which the file will be moved in the MFS
"""
args = (source, dest)
return self._client.request('/files/mv', args, **kwargs)
|
[
"def",
"files_mv",
"(",
"self",
",",
"source",
",",
"dest",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"source",
",",
"dest",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/files/mv'",
",",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Moves files and directories within the MFS.
.. code-block:: python
>>> c.files_mv("/test/file", "/bla/file")
b''
Parameters
----------
source : str
Existing filepath within the MFS
dest : str
Destination to which the file will be moved in the MFS
|
[
"Moves",
"files",
"and",
"directories",
"within",
"the",
"MFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2072-L2088
|
234,723
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.add_bytes
|
def add_bytes(self, data, **kwargs):
"""Adds a set of bytes as a file to IPFS.
.. code-block:: python
>>> c.add_bytes(b"Mary had a little lamb")
'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab'
Also accepts and will stream generator objects.
Parameters
----------
data : bytes
Content to be added as a file
Returns
-------
str : Hash of the added IPFS object
"""
body, headers = multipart.stream_bytes(data, self.chunk_size)
return self._client.request('/add', decoder='json',
data=body, headers=headers, **kwargs)
|
python
|
def add_bytes(self, data, **kwargs):
"""Adds a set of bytes as a file to IPFS.
.. code-block:: python
>>> c.add_bytes(b"Mary had a little lamb")
'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab'
Also accepts and will stream generator objects.
Parameters
----------
data : bytes
Content to be added as a file
Returns
-------
str : Hash of the added IPFS object
"""
body, headers = multipart.stream_bytes(data, self.chunk_size)
return self._client.request('/add', decoder='json',
data=body, headers=headers, **kwargs)
|
[
"def",
"add_bytes",
"(",
"self",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"body",
",",
"headers",
"=",
"multipart",
".",
"stream_bytes",
"(",
"data",
",",
"self",
".",
"chunk_size",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/add'",
",",
"decoder",
"=",
"'json'",
",",
"data",
"=",
"body",
",",
"headers",
"=",
"headers",
",",
"*",
"*",
"kwargs",
")"
] |
Adds a set of bytes as a file to IPFS.
.. code-block:: python
>>> c.add_bytes(b"Mary had a little lamb")
'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab'
Also accepts and will stream generator objects.
Parameters
----------
data : bytes
Content to be added as a file
Returns
-------
str : Hash of the added IPFS object
|
[
"Adds",
"a",
"set",
"of",
"bytes",
"as",
"a",
"file",
"to",
"IPFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2109-L2130
|
234,724
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.add_str
|
def add_str(self, string, **kwargs):
"""Adds a Python string as a file to IPFS.
.. code-block:: python
>>> c.add_str(u"Mary had a little lamb")
'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab'
Also accepts and will stream generator objects.
Parameters
----------
string : str
Content to be added as a file
Returns
-------
str : Hash of the added IPFS object
"""
body, headers = multipart.stream_text(string, self.chunk_size)
return self._client.request('/add', decoder='json',
data=body, headers=headers, **kwargs)
|
python
|
def add_str(self, string, **kwargs):
"""Adds a Python string as a file to IPFS.
.. code-block:: python
>>> c.add_str(u"Mary had a little lamb")
'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab'
Also accepts and will stream generator objects.
Parameters
----------
string : str
Content to be added as a file
Returns
-------
str : Hash of the added IPFS object
"""
body, headers = multipart.stream_text(string, self.chunk_size)
return self._client.request('/add', decoder='json',
data=body, headers=headers, **kwargs)
|
[
"def",
"add_str",
"(",
"self",
",",
"string",
",",
"*",
"*",
"kwargs",
")",
":",
"body",
",",
"headers",
"=",
"multipart",
".",
"stream_text",
"(",
"string",
",",
"self",
".",
"chunk_size",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/add'",
",",
"decoder",
"=",
"'json'",
",",
"data",
"=",
"body",
",",
"headers",
"=",
"headers",
",",
"*",
"*",
"kwargs",
")"
] |
Adds a Python string as a file to IPFS.
.. code-block:: python
>>> c.add_str(u"Mary had a little lamb")
'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab'
Also accepts and will stream generator objects.
Parameters
----------
string : str
Content to be added as a file
Returns
-------
str : Hash of the added IPFS object
|
[
"Adds",
"a",
"Python",
"string",
"as",
"a",
"file",
"to",
"IPFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2133-L2154
|
234,725
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.add_json
|
def add_json(self, json_obj, **kwargs):
"""Adds a json-serializable Python dict as a json file to IPFS.
.. code-block:: python
>>> c.add_json({'one': 1, 'two': 2, 'three': 3})
'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob'
Parameters
----------
json_obj : dict
A json-serializable Python dictionary
Returns
-------
str : Hash of the added IPFS object
"""
return self.add_bytes(encoding.Json().encode(json_obj), **kwargs)
|
python
|
def add_json(self, json_obj, **kwargs):
"""Adds a json-serializable Python dict as a json file to IPFS.
.. code-block:: python
>>> c.add_json({'one': 1, 'two': 2, 'three': 3})
'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob'
Parameters
----------
json_obj : dict
A json-serializable Python dictionary
Returns
-------
str : Hash of the added IPFS object
"""
return self.add_bytes(encoding.Json().encode(json_obj), **kwargs)
|
[
"def",
"add_json",
"(",
"self",
",",
"json_obj",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"add_bytes",
"(",
"encoding",
".",
"Json",
"(",
")",
".",
"encode",
"(",
"json_obj",
")",
",",
"*",
"*",
"kwargs",
")"
] |
Adds a json-serializable Python dict as a json file to IPFS.
.. code-block:: python
>>> c.add_json({'one': 1, 'two': 2, 'three': 3})
'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob'
Parameters
----------
json_obj : dict
A json-serializable Python dictionary
Returns
-------
str : Hash of the added IPFS object
|
[
"Adds",
"a",
"json",
"-",
"serializable",
"Python",
"dict",
"as",
"a",
"json",
"file",
"to",
"IPFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2156-L2173
|
234,726
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.add_pyobj
|
def add_pyobj(self, py_obj, **kwargs):
"""Adds a picklable Python object as a file to IPFS.
.. deprecated:: 0.4.2
The ``*_pyobj`` APIs allow for arbitrary code execution if abused.
Either switch to :meth:`~ipfsapi.Client.add_json` or use
``client.add_bytes(pickle.dumps(py_obj))`` instead.
Please see :meth:`~ipfsapi.Client.get_pyobj` for the
**security risks** of using these methods!
.. code-block:: python
>>> c.add_pyobj([0, 1.0, 2j, '3', 4e5])
'QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji'
Parameters
----------
py_obj : object
A picklable Python object
Returns
-------
str : Hash of the added IPFS object
"""
warnings.warn("Using `*_pyobj` on untrusted data is a security risk",
DeprecationWarning)
return self.add_bytes(encoding.Pickle().encode(py_obj), **kwargs)
|
python
|
def add_pyobj(self, py_obj, **kwargs):
"""Adds a picklable Python object as a file to IPFS.
.. deprecated:: 0.4.2
The ``*_pyobj`` APIs allow for arbitrary code execution if abused.
Either switch to :meth:`~ipfsapi.Client.add_json` or use
``client.add_bytes(pickle.dumps(py_obj))`` instead.
Please see :meth:`~ipfsapi.Client.get_pyobj` for the
**security risks** of using these methods!
.. code-block:: python
>>> c.add_pyobj([0, 1.0, 2j, '3', 4e5])
'QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji'
Parameters
----------
py_obj : object
A picklable Python object
Returns
-------
str : Hash of the added IPFS object
"""
warnings.warn("Using `*_pyobj` on untrusted data is a security risk",
DeprecationWarning)
return self.add_bytes(encoding.Pickle().encode(py_obj), **kwargs)
|
[
"def",
"add_pyobj",
"(",
"self",
",",
"py_obj",
",",
"*",
"*",
"kwargs",
")",
":",
"warnings",
".",
"warn",
"(",
"\"Using `*_pyobj` on untrusted data is a security risk\"",
",",
"DeprecationWarning",
")",
"return",
"self",
".",
"add_bytes",
"(",
"encoding",
".",
"Pickle",
"(",
")",
".",
"encode",
"(",
"py_obj",
")",
",",
"*",
"*",
"kwargs",
")"
] |
Adds a picklable Python object as a file to IPFS.
.. deprecated:: 0.4.2
The ``*_pyobj`` APIs allow for arbitrary code execution if abused.
Either switch to :meth:`~ipfsapi.Client.add_json` or use
``client.add_bytes(pickle.dumps(py_obj))`` instead.
Please see :meth:`~ipfsapi.Client.get_pyobj` for the
**security risks** of using these methods!
.. code-block:: python
>>> c.add_pyobj([0, 1.0, 2j, '3', 4e5])
'QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji'
Parameters
----------
py_obj : object
A picklable Python object
Returns
-------
str : Hash of the added IPFS object
|
[
"Adds",
"a",
"picklable",
"Python",
"object",
"as",
"a",
"file",
"to",
"IPFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2194-L2221
|
234,727
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.get_pyobj
|
def get_pyobj(self, multihash, **kwargs):
"""Loads a pickled Python object from IPFS.
.. deprecated:: 0.4.2
The ``*_pyobj`` APIs allow for arbitrary code execution if abused.
Either switch to :meth:`~ipfsapi.Client.get_json` or use
``pickle.loads(client.cat(multihash))`` instead.
.. caution::
The pickle module is not intended to be secure against erroneous or
maliciously constructed data. Never unpickle data received from an
untrusted or unauthenticated source.
Please **read**
`this article <https://www.cs.uic.edu/%7Es/musings/pickle/>`_ to
understand the security risks of using this method!
.. code-block:: python
>>> c.get_pyobj('QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji')
[0, 1.0, 2j, '3', 400000.0]
Parameters
----------
multihash : str
Multihash of the IPFS object to load
Returns
-------
object : Deserialized IPFS Python object
"""
warnings.warn("Using `*_pyobj` on untrusted data is a security risk",
DeprecationWarning)
return self.cat(multihash, decoder='pickle', **kwargs)
|
python
|
def get_pyobj(self, multihash, **kwargs):
"""Loads a pickled Python object from IPFS.
.. deprecated:: 0.4.2
The ``*_pyobj`` APIs allow for arbitrary code execution if abused.
Either switch to :meth:`~ipfsapi.Client.get_json` or use
``pickle.loads(client.cat(multihash))`` instead.
.. caution::
The pickle module is not intended to be secure against erroneous or
maliciously constructed data. Never unpickle data received from an
untrusted or unauthenticated source.
Please **read**
`this article <https://www.cs.uic.edu/%7Es/musings/pickle/>`_ to
understand the security risks of using this method!
.. code-block:: python
>>> c.get_pyobj('QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji')
[0, 1.0, 2j, '3', 400000.0]
Parameters
----------
multihash : str
Multihash of the IPFS object to load
Returns
-------
object : Deserialized IPFS Python object
"""
warnings.warn("Using `*_pyobj` on untrusted data is a security risk",
DeprecationWarning)
return self.cat(multihash, decoder='pickle', **kwargs)
|
[
"def",
"get_pyobj",
"(",
"self",
",",
"multihash",
",",
"*",
"*",
"kwargs",
")",
":",
"warnings",
".",
"warn",
"(",
"\"Using `*_pyobj` on untrusted data is a security risk\"",
",",
"DeprecationWarning",
")",
"return",
"self",
".",
"cat",
"(",
"multihash",
",",
"decoder",
"=",
"'pickle'",
",",
"*",
"*",
"kwargs",
")"
] |
Loads a pickled Python object from IPFS.
.. deprecated:: 0.4.2
The ``*_pyobj`` APIs allow for arbitrary code execution if abused.
Either switch to :meth:`~ipfsapi.Client.get_json` or use
``pickle.loads(client.cat(multihash))`` instead.
.. caution::
The pickle module is not intended to be secure against erroneous or
maliciously constructed data. Never unpickle data received from an
untrusted or unauthenticated source.
Please **read**
`this article <https://www.cs.uic.edu/%7Es/musings/pickle/>`_ to
understand the security risks of using this method!
.. code-block:: python
>>> c.get_pyobj('QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji')
[0, 1.0, 2j, '3', 400000.0]
Parameters
----------
multihash : str
Multihash of the IPFS object to load
Returns
-------
object : Deserialized IPFS Python object
|
[
"Loads",
"a",
"pickled",
"Python",
"object",
"from",
"IPFS",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2223-L2257
|
234,728
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.pubsub_peers
|
def pubsub_peers(self, topic=None, **kwargs):
"""List the peers we are pubsubbing with.
Lists the id's of other IPFS users who we
are connected to via some topic. Without specifying
a topic, IPFS peers from all subscribed topics
will be returned in the data. If a topic is specified
only the IPFS id's of the peers from the specified
topic will be returned in the data.
.. code-block:: python
>>> c.pubsub_peers()
{'Strings':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA',
...
'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a'
]
}
## with a topic
# subscribe to a channel
>>> with c.pubsub_sub('hello') as sub:
... c.pubsub_peers(topic='hello')
{'String':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
...
# other peers connected to the same channel
]
}
Parameters
----------
topic : str
The topic to list connected peers of
(defaults to None which lists peers for all topics)
Returns
-------
dict : Dictionary with the ke "Strings" who's value is id of IPFS
peers we're pubsubbing with
"""
args = (topic,) if topic is not None else ()
return self._client.request('/pubsub/peers', args,
decoder='json', **kwargs)
|
python
|
def pubsub_peers(self, topic=None, **kwargs):
"""List the peers we are pubsubbing with.
Lists the id's of other IPFS users who we
are connected to via some topic. Without specifying
a topic, IPFS peers from all subscribed topics
will be returned in the data. If a topic is specified
only the IPFS id's of the peers from the specified
topic will be returned in the data.
.. code-block:: python
>>> c.pubsub_peers()
{'Strings':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA',
...
'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a'
]
}
## with a topic
# subscribe to a channel
>>> with c.pubsub_sub('hello') as sub:
... c.pubsub_peers(topic='hello')
{'String':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
...
# other peers connected to the same channel
]
}
Parameters
----------
topic : str
The topic to list connected peers of
(defaults to None which lists peers for all topics)
Returns
-------
dict : Dictionary with the ke "Strings" who's value is id of IPFS
peers we're pubsubbing with
"""
args = (topic,) if topic is not None else ()
return self._client.request('/pubsub/peers', args,
decoder='json', **kwargs)
|
[
"def",
"pubsub_peers",
"(",
"self",
",",
"topic",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"topic",
",",
")",
"if",
"topic",
"is",
"not",
"None",
"else",
"(",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/pubsub/peers'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
List the peers we are pubsubbing with.
Lists the id's of other IPFS users who we
are connected to via some topic. Without specifying
a topic, IPFS peers from all subscribed topics
will be returned in the data. If a topic is specified
only the IPFS id's of the peers from the specified
topic will be returned in the data.
.. code-block:: python
>>> c.pubsub_peers()
{'Strings':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA',
...
'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a'
]
}
## with a topic
# subscribe to a channel
>>> with c.pubsub_sub('hello') as sub:
... c.pubsub_peers(topic='hello')
{'String':
[
'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8',
...
# other peers connected to the same channel
]
}
Parameters
----------
topic : str
The topic to list connected peers of
(defaults to None which lists peers for all topics)
Returns
-------
dict : Dictionary with the ke "Strings" who's value is id of IPFS
peers we're pubsubbing with
|
[
"List",
"the",
"peers",
"we",
"are",
"pubsubbing",
"with",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2282-L2330
|
234,729
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.pubsub_pub
|
def pubsub_pub(self, topic, payload, **kwargs):
"""Publish a message to a given pubsub topic
Publishing will publish the given payload (string) to
everyone currently subscribed to the given topic.
All data (including the id of the publisher) is automatically
base64 encoded when published.
.. code-block:: python
# publishes the message 'message' to the topic 'hello'
>>> c.pubsub_pub('hello', 'message')
[]
Parameters
----------
topic : str
Topic to publish to
payload : Data to be published to the given topic
Returns
-------
list : empty list
"""
args = (topic, payload)
return self._client.request('/pubsub/pub', args,
decoder='json', **kwargs)
|
python
|
def pubsub_pub(self, topic, payload, **kwargs):
"""Publish a message to a given pubsub topic
Publishing will publish the given payload (string) to
everyone currently subscribed to the given topic.
All data (including the id of the publisher) is automatically
base64 encoded when published.
.. code-block:: python
# publishes the message 'message' to the topic 'hello'
>>> c.pubsub_pub('hello', 'message')
[]
Parameters
----------
topic : str
Topic to publish to
payload : Data to be published to the given topic
Returns
-------
list : empty list
"""
args = (topic, payload)
return self._client.request('/pubsub/pub', args,
decoder='json', **kwargs)
|
[
"def",
"pubsub_pub",
"(",
"self",
",",
"topic",
",",
"payload",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"topic",
",",
"payload",
")",
"return",
"self",
".",
"_client",
".",
"request",
"(",
"'/pubsub/pub'",
",",
"args",
",",
"decoder",
"=",
"'json'",
",",
"*",
"*",
"kwargs",
")"
] |
Publish a message to a given pubsub topic
Publishing will publish the given payload (string) to
everyone currently subscribed to the given topic.
All data (including the id of the publisher) is automatically
base64 encoded when published.
.. code-block:: python
# publishes the message 'message' to the topic 'hello'
>>> c.pubsub_pub('hello', 'message')
[]
Parameters
----------
topic : str
Topic to publish to
payload : Data to be published to the given topic
Returns
-------
list : empty list
|
[
"Publish",
"a",
"message",
"to",
"a",
"given",
"pubsub",
"topic"
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2332-L2359
|
234,730
|
ipfs/py-ipfs-api
|
ipfsapi/client.py
|
Client.pubsub_sub
|
def pubsub_sub(self, topic, discover=False, **kwargs):
"""Subscribe to mesages on a given topic
Subscribing to a topic in IPFS means anytime
a message is published to a topic, the subscribers
will be notified of the publication.
The connection with the pubsub topic is opened and read.
The Subscription returned should be used inside a context
manager to ensure that it is closed properly and not left
hanging.
.. code-block:: python
>>> sub = c.pubsub_sub('testing')
>>> with c.pubsub_sub('testing') as sub:
# publish a message 'hello' to the topic 'testing'
... c.pubsub_pub('testing', 'hello')
... for message in sub:
... print(message)
... # Stop reading the subscription after
... # we receive one publication
... break
{'from': '<base64encoded IPFS id>',
'data': 'aGVsbG8=',
'topicIDs': ['testing']}
# NOTE: in order to receive published data
# you must already be subscribed to the topic at publication
# time.
Parameters
----------
topic : str
Name of a topic to subscribe to
discover : bool
Try to discover other peers subscibed to the same topic
(defaults to False)
Returns
-------
Generator wrapped in a context
manager that maintains a connection
stream to the given topic.
"""
args = (topic, discover)
return SubChannel(self._client.request('/pubsub/sub', args,
stream=True, decoder='json'))
|
python
|
def pubsub_sub(self, topic, discover=False, **kwargs):
"""Subscribe to mesages on a given topic
Subscribing to a topic in IPFS means anytime
a message is published to a topic, the subscribers
will be notified of the publication.
The connection with the pubsub topic is opened and read.
The Subscription returned should be used inside a context
manager to ensure that it is closed properly and not left
hanging.
.. code-block:: python
>>> sub = c.pubsub_sub('testing')
>>> with c.pubsub_sub('testing') as sub:
# publish a message 'hello' to the topic 'testing'
... c.pubsub_pub('testing', 'hello')
... for message in sub:
... print(message)
... # Stop reading the subscription after
... # we receive one publication
... break
{'from': '<base64encoded IPFS id>',
'data': 'aGVsbG8=',
'topicIDs': ['testing']}
# NOTE: in order to receive published data
# you must already be subscribed to the topic at publication
# time.
Parameters
----------
topic : str
Name of a topic to subscribe to
discover : bool
Try to discover other peers subscibed to the same topic
(defaults to False)
Returns
-------
Generator wrapped in a context
manager that maintains a connection
stream to the given topic.
"""
args = (topic, discover)
return SubChannel(self._client.request('/pubsub/sub', args,
stream=True, decoder='json'))
|
[
"def",
"pubsub_sub",
"(",
"self",
",",
"topic",
",",
"discover",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"topic",
",",
"discover",
")",
"return",
"SubChannel",
"(",
"self",
".",
"_client",
".",
"request",
"(",
"'/pubsub/sub'",
",",
"args",
",",
"stream",
"=",
"True",
",",
"decoder",
"=",
"'json'",
")",
")"
] |
Subscribe to mesages on a given topic
Subscribing to a topic in IPFS means anytime
a message is published to a topic, the subscribers
will be notified of the publication.
The connection with the pubsub topic is opened and read.
The Subscription returned should be used inside a context
manager to ensure that it is closed properly and not left
hanging.
.. code-block:: python
>>> sub = c.pubsub_sub('testing')
>>> with c.pubsub_sub('testing') as sub:
# publish a message 'hello' to the topic 'testing'
... c.pubsub_pub('testing', 'hello')
... for message in sub:
... print(message)
... # Stop reading the subscription after
... # we receive one publication
... break
{'from': '<base64encoded IPFS id>',
'data': 'aGVsbG8=',
'topicIDs': ['testing']}
# NOTE: in order to receive published data
# you must already be subscribed to the topic at publication
# time.
Parameters
----------
topic : str
Name of a topic to subscribe to
discover : bool
Try to discover other peers subscibed to the same topic
(defaults to False)
Returns
-------
Generator wrapped in a context
manager that maintains a connection
stream to the given topic.
|
[
"Subscribe",
"to",
"mesages",
"on",
"a",
"given",
"topic"
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/client.py#L2361-L2409
|
234,731
|
ipfs/py-ipfs-api
|
ipfsapi/utils.py
|
guess_mimetype
|
def guess_mimetype(filename):
"""Guesses the mimetype of a file based on the given ``filename``.
.. code-block:: python
>>> guess_mimetype('example.txt')
'text/plain'
>>> guess_mimetype('/foo/bar/example')
'application/octet-stream'
Parameters
----------
filename : str
The file name or path for which the mimetype is to be guessed
"""
fn = os.path.basename(filename)
return mimetypes.guess_type(fn)[0] or 'application/octet-stream'
|
python
|
def guess_mimetype(filename):
"""Guesses the mimetype of a file based on the given ``filename``.
.. code-block:: python
>>> guess_mimetype('example.txt')
'text/plain'
>>> guess_mimetype('/foo/bar/example')
'application/octet-stream'
Parameters
----------
filename : str
The file name or path for which the mimetype is to be guessed
"""
fn = os.path.basename(filename)
return mimetypes.guess_type(fn)[0] or 'application/octet-stream'
|
[
"def",
"guess_mimetype",
"(",
"filename",
")",
":",
"fn",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
"return",
"mimetypes",
".",
"guess_type",
"(",
"fn",
")",
"[",
"0",
"]",
"or",
"'application/octet-stream'"
] |
Guesses the mimetype of a file based on the given ``filename``.
.. code-block:: python
>>> guess_mimetype('example.txt')
'text/plain'
>>> guess_mimetype('/foo/bar/example')
'application/octet-stream'
Parameters
----------
filename : str
The file name or path for which the mimetype is to be guessed
|
[
"Guesses",
"the",
"mimetype",
"of",
"a",
"file",
"based",
"on",
"the",
"given",
"filename",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/utils.py#L13-L29
|
234,732
|
ipfs/py-ipfs-api
|
ipfsapi/utils.py
|
ls_dir
|
def ls_dir(dirname):
"""Returns files and subdirectories within a given directory.
Returns a pair of lists, containing the names of directories and files
in ``dirname``.
Raises
------
OSError : Accessing the given directory path failed
Parameters
----------
dirname : str
The path of the directory to be listed
"""
ls = os.listdir(dirname)
files = [p for p in ls if os.path.isfile(os.path.join(dirname, p))]
dirs = [p for p in ls if os.path.isdir(os.path.join(dirname, p))]
return files, dirs
|
python
|
def ls_dir(dirname):
"""Returns files and subdirectories within a given directory.
Returns a pair of lists, containing the names of directories and files
in ``dirname``.
Raises
------
OSError : Accessing the given directory path failed
Parameters
----------
dirname : str
The path of the directory to be listed
"""
ls = os.listdir(dirname)
files = [p for p in ls if os.path.isfile(os.path.join(dirname, p))]
dirs = [p for p in ls if os.path.isdir(os.path.join(dirname, p))]
return files, dirs
|
[
"def",
"ls_dir",
"(",
"dirname",
")",
":",
"ls",
"=",
"os",
".",
"listdir",
"(",
"dirname",
")",
"files",
"=",
"[",
"p",
"for",
"p",
"in",
"ls",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"p",
")",
")",
"]",
"dirs",
"=",
"[",
"p",
"for",
"p",
"in",
"ls",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"p",
")",
")",
"]",
"return",
"files",
",",
"dirs"
] |
Returns files and subdirectories within a given directory.
Returns a pair of lists, containing the names of directories and files
in ``dirname``.
Raises
------
OSError : Accessing the given directory path failed
Parameters
----------
dirname : str
The path of the directory to be listed
|
[
"Returns",
"files",
"and",
"subdirectories",
"within",
"a",
"given",
"directory",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/utils.py#L32-L50
|
234,733
|
ipfs/py-ipfs-api
|
ipfsapi/utils.py
|
clean_files
|
def clean_files(files):
"""Generates tuples with a ``file``-like object and a close indicator.
This is a generator of tuples, where the first element is the file object
and the second element is a boolean which is True if this module opened the
file (and thus should close it).
Raises
------
OSError : Accessing the given file path failed
Parameters
----------
files : list | io.IOBase | str
Collection or single instance of a filepath and file-like object
"""
if isinstance(files, (list, tuple)):
for f in files:
yield clean_file(f)
else:
yield clean_file(files)
|
python
|
def clean_files(files):
"""Generates tuples with a ``file``-like object and a close indicator.
This is a generator of tuples, where the first element is the file object
and the second element is a boolean which is True if this module opened the
file (and thus should close it).
Raises
------
OSError : Accessing the given file path failed
Parameters
----------
files : list | io.IOBase | str
Collection or single instance of a filepath and file-like object
"""
if isinstance(files, (list, tuple)):
for f in files:
yield clean_file(f)
else:
yield clean_file(files)
|
[
"def",
"clean_files",
"(",
"files",
")",
":",
"if",
"isinstance",
"(",
"files",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"for",
"f",
"in",
"files",
":",
"yield",
"clean_file",
"(",
"f",
")",
"else",
":",
"yield",
"clean_file",
"(",
"files",
")"
] |
Generates tuples with a ``file``-like object and a close indicator.
This is a generator of tuples, where the first element is the file object
and the second element is a boolean which is True if this module opened the
file (and thus should close it).
Raises
------
OSError : Accessing the given file path failed
Parameters
----------
files : list | io.IOBase | str
Collection or single instance of a filepath and file-like object
|
[
"Generates",
"tuples",
"with",
"a",
"file",
"-",
"like",
"object",
"and",
"a",
"close",
"indicator",
"."
] |
7574dad04877b45dbe4ad321dcfa9e880eb2d90c
|
https://github.com/ipfs/py-ipfs-api/blob/7574dad04877b45dbe4ad321dcfa9e880eb2d90c/ipfsapi/utils.py#L75-L95
|
234,734
|
miguelgrinberg/Flask-Migrate
|
flask_migrate/cli.py
|
merge
|
def merge(directory, message, branch_label, rev_id, revisions):
"""Merge two revisions together, creating a new revision file"""
_merge(directory, revisions, message, branch_label, rev_id)
|
python
|
def merge(directory, message, branch_label, rev_id, revisions):
"""Merge two revisions together, creating a new revision file"""
_merge(directory, revisions, message, branch_label, rev_id)
|
[
"def",
"merge",
"(",
"directory",
",",
"message",
",",
"branch_label",
",",
"rev_id",
",",
"revisions",
")",
":",
"_merge",
"(",
"directory",
",",
"revisions",
",",
"message",
",",
"branch_label",
",",
"rev_id",
")"
] |
Merge two revisions together, creating a new revision file
|
[
"Merge",
"two",
"revisions",
"together",
"creating",
"a",
"new",
"revision",
"file"
] |
65fbd978681bdf2eddf8940edd04ed7272a94480
|
https://github.com/miguelgrinberg/Flask-Migrate/blob/65fbd978681bdf2eddf8940edd04ed7272a94480/flask_migrate/cli.py#L114-L116
|
234,735
|
miguelgrinberg/Flask-Migrate
|
flask_migrate/cli.py
|
downgrade
|
def downgrade(directory, sql, tag, x_arg, revision):
"""Revert to a previous version"""
_downgrade(directory, revision, sql, tag, x_arg)
|
python
|
def downgrade(directory, sql, tag, x_arg, revision):
"""Revert to a previous version"""
_downgrade(directory, revision, sql, tag, x_arg)
|
[
"def",
"downgrade",
"(",
"directory",
",",
"sql",
",",
"tag",
",",
"x_arg",
",",
"revision",
")",
":",
"_downgrade",
"(",
"directory",
",",
"revision",
",",
"sql",
",",
"tag",
",",
"x_arg",
")"
] |
Revert to a previous version
|
[
"Revert",
"to",
"a",
"previous",
"version"
] |
65fbd978681bdf2eddf8940edd04ed7272a94480
|
https://github.com/miguelgrinberg/Flask-Migrate/blob/65fbd978681bdf2eddf8940edd04ed7272a94480/flask_migrate/cli.py#L150-L152
|
234,736
|
miguelgrinberg/Flask-Migrate
|
flask_migrate/templates/flask-multidb/env.py
|
get_metadata
|
def get_metadata(bind):
"""Return the metadata for a bind."""
if bind == '':
bind = None
m = MetaData()
for t in target_metadata.tables.values():
if t.info.get('bind_key') == bind:
t.tometadata(m)
return m
|
python
|
def get_metadata(bind):
"""Return the metadata for a bind."""
if bind == '':
bind = None
m = MetaData()
for t in target_metadata.tables.values():
if t.info.get('bind_key') == bind:
t.tometadata(m)
return m
|
[
"def",
"get_metadata",
"(",
"bind",
")",
":",
"if",
"bind",
"==",
"''",
":",
"bind",
"=",
"None",
"m",
"=",
"MetaData",
"(",
")",
"for",
"t",
"in",
"target_metadata",
".",
"tables",
".",
"values",
"(",
")",
":",
"if",
"t",
".",
"info",
".",
"get",
"(",
"'bind_key'",
")",
"==",
"bind",
":",
"t",
".",
"tometadata",
"(",
"m",
")",
"return",
"m"
] |
Return the metadata for a bind.
|
[
"Return",
"the",
"metadata",
"for",
"a",
"bind",
"."
] |
65fbd978681bdf2eddf8940edd04ed7272a94480
|
https://github.com/miguelgrinberg/Flask-Migrate/blob/65fbd978681bdf2eddf8940edd04ed7272a94480/flask_migrate/templates/flask-multidb/env.py#L44-L52
|
234,737
|
miguelgrinberg/Flask-Migrate
|
flask_migrate/__init__.py
|
init
|
def init(directory=None, multidb=False):
"""Creates a new migration repository"""
if directory is None:
directory = current_app.extensions['migrate'].directory
config = Config()
config.set_main_option('script_location', directory)
config.config_file_name = os.path.join(directory, 'alembic.ini')
config = current_app.extensions['migrate'].\
migrate.call_configure_callbacks(config)
if multidb:
command.init(config, directory, 'flask-multidb')
else:
command.init(config, directory, 'flask')
|
python
|
def init(directory=None, multidb=False):
"""Creates a new migration repository"""
if directory is None:
directory = current_app.extensions['migrate'].directory
config = Config()
config.set_main_option('script_location', directory)
config.config_file_name = os.path.join(directory, 'alembic.ini')
config = current_app.extensions['migrate'].\
migrate.call_configure_callbacks(config)
if multidb:
command.init(config, directory, 'flask-multidb')
else:
command.init(config, directory, 'flask')
|
[
"def",
"init",
"(",
"directory",
"=",
"None",
",",
"multidb",
"=",
"False",
")",
":",
"if",
"directory",
"is",
"None",
":",
"directory",
"=",
"current_app",
".",
"extensions",
"[",
"'migrate'",
"]",
".",
"directory",
"config",
"=",
"Config",
"(",
")",
"config",
".",
"set_main_option",
"(",
"'script_location'",
",",
"directory",
")",
"config",
".",
"config_file_name",
"=",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"'alembic.ini'",
")",
"config",
"=",
"current_app",
".",
"extensions",
"[",
"'migrate'",
"]",
".",
"migrate",
".",
"call_configure_callbacks",
"(",
"config",
")",
"if",
"multidb",
":",
"command",
".",
"init",
"(",
"config",
",",
"directory",
",",
"'flask-multidb'",
")",
"else",
":",
"command",
".",
"init",
"(",
"config",
",",
"directory",
",",
"'flask'",
")"
] |
Creates a new migration repository
|
[
"Creates",
"a",
"new",
"migration",
"repository"
] |
65fbd978681bdf2eddf8940edd04ed7272a94480
|
https://github.com/miguelgrinberg/Flask-Migrate/blob/65fbd978681bdf2eddf8940edd04ed7272a94480/flask_migrate/__init__.py#L122-L134
|
234,738
|
miguelgrinberg/Flask-Migrate
|
flask_migrate/__init__.py
|
edit
|
def edit(directory=None, revision='current'):
"""Edit current revision."""
if alembic_version >= (0, 8, 0):
config = current_app.extensions['migrate'].migrate.get_config(
directory)
command.edit(config, revision)
else:
raise RuntimeError('Alembic 0.8.0 or greater is required')
|
python
|
def edit(directory=None, revision='current'):
"""Edit current revision."""
if alembic_version >= (0, 8, 0):
config = current_app.extensions['migrate'].migrate.get_config(
directory)
command.edit(config, revision)
else:
raise RuntimeError('Alembic 0.8.0 or greater is required')
|
[
"def",
"edit",
"(",
"directory",
"=",
"None",
",",
"revision",
"=",
"'current'",
")",
":",
"if",
"alembic_version",
">=",
"(",
"0",
",",
"8",
",",
"0",
")",
":",
"config",
"=",
"current_app",
".",
"extensions",
"[",
"'migrate'",
"]",
".",
"migrate",
".",
"get_config",
"(",
"directory",
")",
"command",
".",
"edit",
"(",
"config",
",",
"revision",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Alembic 0.8.0 or greater is required'",
")"
] |
Edit current revision.
|
[
"Edit",
"current",
"revision",
"."
] |
65fbd978681bdf2eddf8940edd04ed7272a94480
|
https://github.com/miguelgrinberg/Flask-Migrate/blob/65fbd978681bdf2eddf8940edd04ed7272a94480/flask_migrate/__init__.py#L226-L233
|
234,739
|
miguelgrinberg/Flask-Migrate
|
flask_migrate/__init__.py
|
merge
|
def merge(directory=None, revisions='', message=None, branch_label=None,
rev_id=None):
"""Merge two revisions together. Creates a new migration file"""
if alembic_version >= (0, 7, 0):
config = current_app.extensions['migrate'].migrate.get_config(
directory)
command.merge(config, revisions, message=message,
branch_label=branch_label, rev_id=rev_id)
else:
raise RuntimeError('Alembic 0.7.0 or greater is required')
|
python
|
def merge(directory=None, revisions='', message=None, branch_label=None,
rev_id=None):
"""Merge two revisions together. Creates a new migration file"""
if alembic_version >= (0, 7, 0):
config = current_app.extensions['migrate'].migrate.get_config(
directory)
command.merge(config, revisions, message=message,
branch_label=branch_label, rev_id=rev_id)
else:
raise RuntimeError('Alembic 0.7.0 or greater is required')
|
[
"def",
"merge",
"(",
"directory",
"=",
"None",
",",
"revisions",
"=",
"''",
",",
"message",
"=",
"None",
",",
"branch_label",
"=",
"None",
",",
"rev_id",
"=",
"None",
")",
":",
"if",
"alembic_version",
">=",
"(",
"0",
",",
"7",
",",
"0",
")",
":",
"config",
"=",
"current_app",
".",
"extensions",
"[",
"'migrate'",
"]",
".",
"migrate",
".",
"get_config",
"(",
"directory",
")",
"command",
".",
"merge",
"(",
"config",
",",
"revisions",
",",
"message",
"=",
"message",
",",
"branch_label",
"=",
"branch_label",
",",
"rev_id",
"=",
"rev_id",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Alembic 0.7.0 or greater is required'",
")"
] |
Merge two revisions together. Creates a new migration file
|
[
"Merge",
"two",
"revisions",
"together",
".",
"Creates",
"a",
"new",
"migration",
"file"
] |
65fbd978681bdf2eddf8940edd04ed7272a94480
|
https://github.com/miguelgrinberg/Flask-Migrate/blob/65fbd978681bdf2eddf8940edd04ed7272a94480/flask_migrate/__init__.py#L249-L258
|
234,740
|
miguelgrinberg/Flask-Migrate
|
flask_migrate/__init__.py
|
heads
|
def heads(directory=None, verbose=False, resolve_dependencies=False):
"""Show current available heads in the script directory"""
if alembic_version >= (0, 7, 0):
config = current_app.extensions['migrate'].migrate.get_config(
directory)
command.heads(config, verbose=verbose,
resolve_dependencies=resolve_dependencies)
else:
raise RuntimeError('Alembic 0.7.0 or greater is required')
|
python
|
def heads(directory=None, verbose=False, resolve_dependencies=False):
"""Show current available heads in the script directory"""
if alembic_version >= (0, 7, 0):
config = current_app.extensions['migrate'].migrate.get_config(
directory)
command.heads(config, verbose=verbose,
resolve_dependencies=resolve_dependencies)
else:
raise RuntimeError('Alembic 0.7.0 or greater is required')
|
[
"def",
"heads",
"(",
"directory",
"=",
"None",
",",
"verbose",
"=",
"False",
",",
"resolve_dependencies",
"=",
"False",
")",
":",
"if",
"alembic_version",
">=",
"(",
"0",
",",
"7",
",",
"0",
")",
":",
"config",
"=",
"current_app",
".",
"extensions",
"[",
"'migrate'",
"]",
".",
"migrate",
".",
"get_config",
"(",
"directory",
")",
"command",
".",
"heads",
"(",
"config",
",",
"verbose",
"=",
"verbose",
",",
"resolve_dependencies",
"=",
"resolve_dependencies",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Alembic 0.7.0 or greater is required'",
")"
] |
Show current available heads in the script directory
|
[
"Show",
"current",
"available",
"heads",
"in",
"the",
"script",
"directory"
] |
65fbd978681bdf2eddf8940edd04ed7272a94480
|
https://github.com/miguelgrinberg/Flask-Migrate/blob/65fbd978681bdf2eddf8940edd04ed7272a94480/flask_migrate/__init__.py#L353-L361
|
234,741
|
miguelgrinberg/Flask-Migrate
|
flask_migrate/__init__.py
|
branches
|
def branches(directory=None, verbose=False):
"""Show current branch points"""
config = current_app.extensions['migrate'].migrate.get_config(directory)
if alembic_version >= (0, 7, 0):
command.branches(config, verbose=verbose)
else:
command.branches(config)
|
python
|
def branches(directory=None, verbose=False):
"""Show current branch points"""
config = current_app.extensions['migrate'].migrate.get_config(directory)
if alembic_version >= (0, 7, 0):
command.branches(config, verbose=verbose)
else:
command.branches(config)
|
[
"def",
"branches",
"(",
"directory",
"=",
"None",
",",
"verbose",
"=",
"False",
")",
":",
"config",
"=",
"current_app",
".",
"extensions",
"[",
"'migrate'",
"]",
".",
"migrate",
".",
"get_config",
"(",
"directory",
")",
"if",
"alembic_version",
">=",
"(",
"0",
",",
"7",
",",
"0",
")",
":",
"command",
".",
"branches",
"(",
"config",
",",
"verbose",
"=",
"verbose",
")",
"else",
":",
"command",
".",
"branches",
"(",
"config",
")"
] |
Show current branch points
|
[
"Show",
"current",
"branch",
"points"
] |
65fbd978681bdf2eddf8940edd04ed7272a94480
|
https://github.com/miguelgrinberg/Flask-Migrate/blob/65fbd978681bdf2eddf8940edd04ed7272a94480/flask_migrate/__init__.py#L370-L376
|
234,742
|
miguelgrinberg/Flask-Migrate
|
flask_migrate/__init__.py
|
current
|
def current(directory=None, verbose=False, head_only=False):
"""Display the current revision for each database."""
config = current_app.extensions['migrate'].migrate.get_config(directory)
if alembic_version >= (0, 7, 0):
command.current(config, verbose=verbose, head_only=head_only)
else:
command.current(config)
|
python
|
def current(directory=None, verbose=False, head_only=False):
"""Display the current revision for each database."""
config = current_app.extensions['migrate'].migrate.get_config(directory)
if alembic_version >= (0, 7, 0):
command.current(config, verbose=verbose, head_only=head_only)
else:
command.current(config)
|
[
"def",
"current",
"(",
"directory",
"=",
"None",
",",
"verbose",
"=",
"False",
",",
"head_only",
"=",
"False",
")",
":",
"config",
"=",
"current_app",
".",
"extensions",
"[",
"'migrate'",
"]",
".",
"migrate",
".",
"get_config",
"(",
"directory",
")",
"if",
"alembic_version",
">=",
"(",
"0",
",",
"7",
",",
"0",
")",
":",
"command",
".",
"current",
"(",
"config",
",",
"verbose",
"=",
"verbose",
",",
"head_only",
"=",
"head_only",
")",
"else",
":",
"command",
".",
"current",
"(",
"config",
")"
] |
Display the current revision for each database.
|
[
"Display",
"the",
"current",
"revision",
"for",
"each",
"database",
"."
] |
65fbd978681bdf2eddf8940edd04ed7272a94480
|
https://github.com/miguelgrinberg/Flask-Migrate/blob/65fbd978681bdf2eddf8940edd04ed7272a94480/flask_migrate/__init__.py#L388-L394
|
234,743
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.to_json
|
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
|
python
|
def to_json(self, content, pretty_print=False):
""" Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
"""
if PY3:
if isinstance(content, bytes):
content = content.decode(encoding='utf-8')
if pretty_print:
json_ = self._json_pretty_print(content)
else:
json_ = json.loads(content)
logger.info('To JSON using : content=%s ' % (content))
logger.info('To JSON using : pretty_print=%s ' % (pretty_print))
return json_
|
[
"def",
"to_json",
"(",
"self",
",",
"content",
",",
"pretty_print",
"=",
"False",
")",
":",
"if",
"PY3",
":",
"if",
"isinstance",
"(",
"content",
",",
"bytes",
")",
":",
"content",
"=",
"content",
".",
"decode",
"(",
"encoding",
"=",
"'utf-8'",
")",
"if",
"pretty_print",
":",
"json_",
"=",
"self",
".",
"_json_pretty_print",
"(",
"content",
")",
"else",
":",
"json_",
"=",
"json",
".",
"loads",
"(",
"content",
")",
"logger",
".",
"info",
"(",
"'To JSON using : content=%s '",
"%",
"(",
"content",
")",
")",
"logger",
".",
"info",
"(",
"'To JSON using : pretty_print=%s '",
"%",
"(",
"pretty_print",
")",
")",
"return",
"json_"
] |
Convert a string to a JSON object
``content`` String content to convert into JSON
``pretty_print`` If defined, will output JSON is pretty print format
|
[
"Convert",
"a",
"string",
"to",
"a",
"JSON",
"object"
] |
11baa3277f1cb728712e26d996200703c15254a8
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L460-L477
|
234,744
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.get_request
|
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
|
python
|
def get_request(
self,
alias,
uri,
headers=None,
json=None,
params=None,
allow_redirects=None,
timeout=None):
""" Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._get_request(
session, uri, params, headers, json, redir, timeout)
logger.info(
'Get Request using : alias=%s, uri=%s, headers=%s json=%s' %
(alias, uri, headers, json))
return response
|
[
"def",
"get_request",
"(",
"self",
",",
"alias",
",",
"uri",
",",
"headers",
"=",
"None",
",",
"json",
"=",
"None",
",",
"params",
"=",
"None",
",",
"allow_redirects",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"session",
"=",
"self",
".",
"_cache",
".",
"switch",
"(",
"alias",
")",
"redir",
"=",
"True",
"if",
"allow_redirects",
"is",
"None",
"else",
"allow_redirects",
"response",
"=",
"self",
".",
"_get_request",
"(",
"session",
",",
"uri",
",",
"params",
",",
"headers",
",",
"json",
",",
"redir",
",",
"timeout",
")",
"logger",
".",
"info",
"(",
"'Get Request using : alias=%s, uri=%s, headers=%s json=%s'",
"%",
"(",
"alias",
",",
"uri",
",",
"headers",
",",
"json",
")",
")",
"return",
"response"
] |
Send a GET request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the GET request to
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``json`` json data to send in the body of the :class:`Request`.
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
|
[
"Send",
"a",
"GET",
"request",
"on",
"the",
"session",
"object",
"found",
"using",
"the",
"given",
"alias"
] |
11baa3277f1cb728712e26d996200703c15254a8
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L479-L515
|
234,745
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.post_request
|
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
|
python
|
def post_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
files=None,
allow_redirects=None,
timeout=None):
""" Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
if not files:
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._body_request(
"post",
session,
uri,
data,
json,
params,
files,
headers,
redir,
timeout)
dataStr = self._format_data_to_log_string_according_to_header(data, headers)
logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '
% (alias, uri, dataStr, headers, files, redir))
return response
|
[
"def",
"post_request",
"(",
"self",
",",
"alias",
",",
"uri",
",",
"data",
"=",
"None",
",",
"json",
"=",
"None",
",",
"params",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"files",
"=",
"None",
",",
"allow_redirects",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"session",
"=",
"self",
".",
"_cache",
".",
"switch",
"(",
"alias",
")",
"if",
"not",
"files",
":",
"data",
"=",
"self",
".",
"_format_data_according_to_header",
"(",
"session",
",",
"data",
",",
"headers",
")",
"redir",
"=",
"True",
"if",
"allow_redirects",
"is",
"None",
"else",
"allow_redirects",
"response",
"=",
"self",
".",
"_body_request",
"(",
"\"post\"",
",",
"session",
",",
"uri",
",",
"data",
",",
"json",
",",
"params",
",",
"files",
",",
"headers",
",",
"redir",
",",
"timeout",
")",
"dataStr",
"=",
"self",
".",
"_format_data_to_log_string_according_to_header",
"(",
"data",
",",
"headers",
")",
"logger",
".",
"info",
"(",
"'Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s '",
"%",
"(",
"alias",
",",
"uri",
",",
"dataStr",
",",
"headers",
",",
"files",
",",
"redir",
")",
")",
"return",
"response"
] |
Send a POST request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the POST request to
``data`` a dictionary of key-value pairs that will be urlencoded
and sent as POST data
or binary data that is sent as the raw body content
or passed as such for multipart form data if ``files`` is also
defined
``json`` a value that will be json encoded
and sent as POST data if files or data is not specified
``params`` url parameters to append to the uri
``headers`` a dictionary of headers to use with the request
``files`` a dictionary of file names containing file data to POST to the server
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
|
[
"Send",
"a",
"POST",
"request",
"on",
"the",
"session",
"object",
"found",
"using",
"the",
"given",
"alias"
] |
11baa3277f1cb728712e26d996200703c15254a8
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L550-L607
|
234,746
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.delete_request
|
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
|
python
|
def delete_request(
self,
alias,
uri,
data=None,
json=None,
params=None,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
"""
session = self._cache.switch(alias)
data = self._format_data_according_to_header(session, data, headers)
redir = True if allow_redirects is None else allow_redirects
response = self._delete_request(
session, uri, data, json, params, headers, redir, timeout)
if isinstance(data, bytes):
data = data.decode('utf-8')
logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \
headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir))
return response
|
[
"def",
"delete_request",
"(",
"self",
",",
"alias",
",",
"uri",
",",
"data",
"=",
"None",
",",
"json",
"=",
"None",
",",
"params",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"allow_redirects",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"session",
"=",
"self",
".",
"_cache",
".",
"switch",
"(",
"alias",
")",
"data",
"=",
"self",
".",
"_format_data_according_to_header",
"(",
"session",
",",
"data",
",",
"headers",
")",
"redir",
"=",
"True",
"if",
"allow_redirects",
"is",
"None",
"else",
"allow_redirects",
"response",
"=",
"self",
".",
"_delete_request",
"(",
"session",
",",
"uri",
",",
"data",
",",
"json",
",",
"params",
",",
"headers",
",",
"redir",
",",
"timeout",
")",
"if",
"isinstance",
"(",
"data",
",",
"bytes",
")",
":",
"data",
"=",
"data",
".",
"decode",
"(",
"'utf-8'",
")",
"logger",
".",
"info",
"(",
"'Delete Request using : alias=%s, uri=%s, data=%s, \\\n headers=%s, allow_redirects=%s '",
"%",
"(",
"alias",
",",
"uri",
",",
"data",
",",
"headers",
",",
"redir",
")",
")",
"return",
"response"
] |
Send a DELETE request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the DELETE request to
``json`` a value that will be json encoded
and sent as request data if data is not specified
``headers`` a dictionary of headers to use with the request
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``timeout`` connection timeout
|
[
"Send",
"a",
"DELETE",
"request",
"on",
"the",
"session",
"object",
"found",
"using",
"the",
"given",
"alias"
] |
11baa3277f1cb728712e26d996200703c15254a8
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L864-L902
|
234,747
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.head_request
|
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
|
python
|
def head_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = False if allow_redirects is None else allow_redirects
response = self._head_request(session, uri, headers, redir, timeout)
logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \
allow_redirects=%s ' % (alias, uri, headers, redir))
return response
|
[
"def",
"head_request",
"(",
"self",
",",
"alias",
",",
"uri",
",",
"headers",
"=",
"None",
",",
"allow_redirects",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"session",
"=",
"self",
".",
"_cache",
".",
"switch",
"(",
"alias",
")",
"redir",
"=",
"False",
"if",
"allow_redirects",
"is",
"None",
"else",
"allow_redirects",
"response",
"=",
"self",
".",
"_head_request",
"(",
"session",
",",
"uri",
",",
"headers",
",",
"redir",
",",
"timeout",
")",
"logger",
".",
"info",
"(",
"'Head Request using : alias=%s, uri=%s, headers=%s, \\\n allow_redirects=%s '",
"%",
"(",
"alias",
",",
"uri",
",",
"headers",
",",
"redir",
")",
")",
"return",
"response"
] |
Send a HEAD request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the HEAD request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
|
[
"Send",
"a",
"HEAD",
"request",
"on",
"the",
"session",
"object",
"found",
"using",
"the",
"given",
"alias"
] |
11baa3277f1cb728712e26d996200703c15254a8
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L937-L961
|
234,748
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords.options_request
|
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
|
python
|
def options_request(
self,
alias,
uri,
headers=None,
allow_redirects=None,
timeout=None):
""" Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
"""
session = self._cache.switch(alias)
redir = True if allow_redirects is None else allow_redirects
response = self._options_request(session, uri, headers, redir, timeout)
logger.info(
'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' %
(alias, uri, headers, redir))
return response
|
[
"def",
"options_request",
"(",
"self",
",",
"alias",
",",
"uri",
",",
"headers",
"=",
"None",
",",
"allow_redirects",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"session",
"=",
"self",
".",
"_cache",
".",
"switch",
"(",
"alias",
")",
"redir",
"=",
"True",
"if",
"allow_redirects",
"is",
"None",
"else",
"allow_redirects",
"response",
"=",
"self",
".",
"_options_request",
"(",
"session",
",",
"uri",
",",
"headers",
",",
"redir",
",",
"timeout",
")",
"logger",
".",
"info",
"(",
"'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s '",
"%",
"(",
"alias",
",",
"uri",
",",
"headers",
",",
"redir",
")",
")",
"return",
"response"
] |
Send an OPTIONS request on the session object found using the
given `alias`
``alias`` that will be used to identify the Session object in the cache
``uri`` to send the OPTIONS request to
``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
``headers`` a dictionary of headers to use with the request
|
[
"Send",
"an",
"OPTIONS",
"request",
"on",
"the",
"session",
"object",
"found",
"using",
"the",
"given",
"alias"
] |
11baa3277f1cb728712e26d996200703c15254a8
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L990-L1015
|
234,749
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords._get_url
|
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
|
python
|
def _get_url(self, session, uri):
"""
Helper method to get the full url
"""
url = session.url
if uri:
slash = '' if uri.startswith('/') else '/'
url = "%s%s%s" % (session.url, slash, uri)
return url
|
[
"def",
"_get_url",
"(",
"self",
",",
"session",
",",
"uri",
")",
":",
"url",
"=",
"session",
".",
"url",
"if",
"uri",
":",
"slash",
"=",
"''",
"if",
"uri",
".",
"startswith",
"(",
"'/'",
")",
"else",
"'/'",
"url",
"=",
"\"%s%s%s\"",
"%",
"(",
"session",
".",
"url",
",",
"slash",
",",
"uri",
")",
"return",
"url"
] |
Helper method to get the full url
|
[
"Helper",
"method",
"to",
"get",
"the",
"full",
"url"
] |
11baa3277f1cb728712e26d996200703c15254a8
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L1174-L1182
|
234,750
|
bulkan/robotframework-requests
|
src/RequestsLibrary/RequestsKeywords.py
|
RequestsKeywords._json_pretty_print
|
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
|
python
|
def _json_pretty_print(self, content):
"""
Pretty print a JSON object
``content`` JSON object to pretty print
"""
temp = json.loads(content)
return json.dumps(
temp,
sort_keys=True,
indent=4,
separators=(
',',
': '))
|
[
"def",
"_json_pretty_print",
"(",
"self",
",",
"content",
")",
":",
"temp",
"=",
"json",
".",
"loads",
"(",
"content",
")",
"return",
"json",
".",
"dumps",
"(",
"temp",
",",
"sort_keys",
"=",
"True",
",",
"indent",
"=",
"4",
",",
"separators",
"=",
"(",
"','",
",",
"': '",
")",
")"
] |
Pretty print a JSON object
``content`` JSON object to pretty print
|
[
"Pretty",
"print",
"a",
"JSON",
"object"
] |
11baa3277f1cb728712e26d996200703c15254a8
|
https://github.com/bulkan/robotframework-requests/blob/11baa3277f1cb728712e26d996200703c15254a8/src/RequestsLibrary/RequestsKeywords.py#L1215-L1228
|
234,751
|
coddingtonbear/python-myfitnesspal
|
myfitnesspal/client.py
|
Client.get_measurements
|
def get_measurements(
self, measurement='Weight', lower_bound=None, upper_bound=None
):
""" Returns measurements of a given name between two dates."""
if upper_bound is None:
upper_bound = datetime.date.today()
if lower_bound is None:
lower_bound = upper_bound - datetime.timedelta(days=30)
# If they entered the dates in the opposite order, let's
# just flip them around for them as a convenience
if lower_bound > upper_bound:
lower_bound, upper_bound = upper_bound, lower_bound
# get the URL for the main check in page
document = self._get_document_for_url(
self._get_url_for_measurements()
)
# gather the IDs for all measurement types
measurement_ids = self._get_measurement_ids(document)
# select the measurement ID based on the input
if measurement in measurement_ids.keys():
measurement_id = measurement_ids[measurement]
else:
raise ValueError(
"Measurement '%s' does not exist." % measurement
)
page = 1
measurements = OrderedDict()
# retrieve entries until finished
while True:
# retrieve the HTML from MyFitnessPal
document = self._get_document_for_url(
self._get_url_for_measurements(page, measurement_id)
)
# parse the HTML for measurement entries and add to dictionary
results = self._get_measurements(document)
measurements.update(results)
# stop if there are no more entries
if len(results) == 0:
break
# continue if the lower bound has not been reached
elif list(results.keys())[-1] > lower_bound:
page += 1
continue
# otherwise stop
else:
break
# remove entries that are not within the dates specified
for date in list(measurements.keys()):
if not upper_bound >= date >= lower_bound:
del measurements[date]
return measurements
|
python
|
def get_measurements(
self, measurement='Weight', lower_bound=None, upper_bound=None
):
""" Returns measurements of a given name between two dates."""
if upper_bound is None:
upper_bound = datetime.date.today()
if lower_bound is None:
lower_bound = upper_bound - datetime.timedelta(days=30)
# If they entered the dates in the opposite order, let's
# just flip them around for them as a convenience
if lower_bound > upper_bound:
lower_bound, upper_bound = upper_bound, lower_bound
# get the URL for the main check in page
document = self._get_document_for_url(
self._get_url_for_measurements()
)
# gather the IDs for all measurement types
measurement_ids = self._get_measurement_ids(document)
# select the measurement ID based on the input
if measurement in measurement_ids.keys():
measurement_id = measurement_ids[measurement]
else:
raise ValueError(
"Measurement '%s' does not exist." % measurement
)
page = 1
measurements = OrderedDict()
# retrieve entries until finished
while True:
# retrieve the HTML from MyFitnessPal
document = self._get_document_for_url(
self._get_url_for_measurements(page, measurement_id)
)
# parse the HTML for measurement entries and add to dictionary
results = self._get_measurements(document)
measurements.update(results)
# stop if there are no more entries
if len(results) == 0:
break
# continue if the lower bound has not been reached
elif list(results.keys())[-1] > lower_bound:
page += 1
continue
# otherwise stop
else:
break
# remove entries that are not within the dates specified
for date in list(measurements.keys()):
if not upper_bound >= date >= lower_bound:
del measurements[date]
return measurements
|
[
"def",
"get_measurements",
"(",
"self",
",",
"measurement",
"=",
"'Weight'",
",",
"lower_bound",
"=",
"None",
",",
"upper_bound",
"=",
"None",
")",
":",
"if",
"upper_bound",
"is",
"None",
":",
"upper_bound",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"if",
"lower_bound",
"is",
"None",
":",
"lower_bound",
"=",
"upper_bound",
"-",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"30",
")",
"# If they entered the dates in the opposite order, let's",
"# just flip them around for them as a convenience",
"if",
"lower_bound",
">",
"upper_bound",
":",
"lower_bound",
",",
"upper_bound",
"=",
"upper_bound",
",",
"lower_bound",
"# get the URL for the main check in page",
"document",
"=",
"self",
".",
"_get_document_for_url",
"(",
"self",
".",
"_get_url_for_measurements",
"(",
")",
")",
"# gather the IDs for all measurement types",
"measurement_ids",
"=",
"self",
".",
"_get_measurement_ids",
"(",
"document",
")",
"# select the measurement ID based on the input",
"if",
"measurement",
"in",
"measurement_ids",
".",
"keys",
"(",
")",
":",
"measurement_id",
"=",
"measurement_ids",
"[",
"measurement",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"\"Measurement '%s' does not exist.\"",
"%",
"measurement",
")",
"page",
"=",
"1",
"measurements",
"=",
"OrderedDict",
"(",
")",
"# retrieve entries until finished",
"while",
"True",
":",
"# retrieve the HTML from MyFitnessPal",
"document",
"=",
"self",
".",
"_get_document_for_url",
"(",
"self",
".",
"_get_url_for_measurements",
"(",
"page",
",",
"measurement_id",
")",
")",
"# parse the HTML for measurement entries and add to dictionary",
"results",
"=",
"self",
".",
"_get_measurements",
"(",
"document",
")",
"measurements",
".",
"update",
"(",
"results",
")",
"# stop if there are no more entries",
"if",
"len",
"(",
"results",
")",
"==",
"0",
":",
"break",
"# continue if the lower bound has not been reached",
"elif",
"list",
"(",
"results",
".",
"keys",
"(",
")",
")",
"[",
"-",
"1",
"]",
">",
"lower_bound",
":",
"page",
"+=",
"1",
"continue",
"# otherwise stop",
"else",
":",
"break",
"# remove entries that are not within the dates specified",
"for",
"date",
"in",
"list",
"(",
"measurements",
".",
"keys",
"(",
")",
")",
":",
"if",
"not",
"upper_bound",
">=",
"date",
">=",
"lower_bound",
":",
"del",
"measurements",
"[",
"date",
"]",
"return",
"measurements"
] |
Returns measurements of a given name between two dates.
|
[
"Returns",
"measurements",
"of",
"a",
"given",
"name",
"between",
"two",
"dates",
"."
] |
29aad88d31adc025eacaddd3390cb521b6012b73
|
https://github.com/coddingtonbear/python-myfitnesspal/blob/29aad88d31adc025eacaddd3390cb521b6012b73/myfitnesspal/client.py#L524-L586
|
234,752
|
coddingtonbear/python-myfitnesspal
|
myfitnesspal/client.py
|
Client.set_measurements
|
def set_measurements(
self, measurement='Weight', value=None
):
""" Sets measurement for today's date."""
if value is None:
raise ValueError(
"Cannot update blank value."
)
# get the URL for the main check in page
# this is left in because we need to parse
# the 'measurement' name to set the value.
document = self._get_document_for_url(
self._get_url_for_measurements()
)
# gather the IDs for all measurement types
measurement_ids = self._get_measurement_ids(document)
# check if the measurement exists before going too far
if measurement not in measurement_ids.keys():
raise ValueError(
"Measurement '%s' does not exist." % measurement
)
# build the update url.
update_url = parse.urljoin(
self.BASE_URL,
'measurements/save'
)
# setup a dict for the post
data = {}
# here's where we need that required element
data['authenticity_token'] = self._authenticity_token
# Weight has it's own key value pair
if measurement == 'Weight':
data['weight[display_value]'] = value
# the other measurements have generic names with
# an incrementing numeric index.
measurement_index = 0
# iterate all the measurement_ids
for measurement_id in measurement_ids.keys():
# create the measurement_type[n]
# key value pair
n = str(measurement_index)
meas_type = 'measurement_type[' + n + ']'
meas_val = 'measurement_value[' + n + ']'
data[meas_type] = measurement_ids[measurement_id]
# and if it corresponds to the value we want to update
if measurement == measurement_id:
# create the measurement_value[n]
# key value pair and assign it the value.
data[meas_val] = value
else:
# otherwise, create the key value pair and leave it blank
data[meas_val] = ""
measurement_index += 1
# now post it.
result = self.session.post(
update_url,
data=data
)
# throw an error if it failed.
if not result.ok:
raise RuntimeError(
"Unable to update measurement in MyFitnessPal: "
"status code: {status}".format(
status=result.status_code
)
)
|
python
|
def set_measurements(
self, measurement='Weight', value=None
):
""" Sets measurement for today's date."""
if value is None:
raise ValueError(
"Cannot update blank value."
)
# get the URL for the main check in page
# this is left in because we need to parse
# the 'measurement' name to set the value.
document = self._get_document_for_url(
self._get_url_for_measurements()
)
# gather the IDs for all measurement types
measurement_ids = self._get_measurement_ids(document)
# check if the measurement exists before going too far
if measurement not in measurement_ids.keys():
raise ValueError(
"Measurement '%s' does not exist." % measurement
)
# build the update url.
update_url = parse.urljoin(
self.BASE_URL,
'measurements/save'
)
# setup a dict for the post
data = {}
# here's where we need that required element
data['authenticity_token'] = self._authenticity_token
# Weight has it's own key value pair
if measurement == 'Weight':
data['weight[display_value]'] = value
# the other measurements have generic names with
# an incrementing numeric index.
measurement_index = 0
# iterate all the measurement_ids
for measurement_id in measurement_ids.keys():
# create the measurement_type[n]
# key value pair
n = str(measurement_index)
meas_type = 'measurement_type[' + n + ']'
meas_val = 'measurement_value[' + n + ']'
data[meas_type] = measurement_ids[measurement_id]
# and if it corresponds to the value we want to update
if measurement == measurement_id:
# create the measurement_value[n]
# key value pair and assign it the value.
data[meas_val] = value
else:
# otherwise, create the key value pair and leave it blank
data[meas_val] = ""
measurement_index += 1
# now post it.
result = self.session.post(
update_url,
data=data
)
# throw an error if it failed.
if not result.ok:
raise RuntimeError(
"Unable to update measurement in MyFitnessPal: "
"status code: {status}".format(
status=result.status_code
)
)
|
[
"def",
"set_measurements",
"(",
"self",
",",
"measurement",
"=",
"'Weight'",
",",
"value",
"=",
"None",
")",
":",
"if",
"value",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Cannot update blank value.\"",
")",
"# get the URL for the main check in page",
"# this is left in because we need to parse",
"# the 'measurement' name to set the value.",
"document",
"=",
"self",
".",
"_get_document_for_url",
"(",
"self",
".",
"_get_url_for_measurements",
"(",
")",
")",
"# gather the IDs for all measurement types",
"measurement_ids",
"=",
"self",
".",
"_get_measurement_ids",
"(",
"document",
")",
"# check if the measurement exists before going too far",
"if",
"measurement",
"not",
"in",
"measurement_ids",
".",
"keys",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"Measurement '%s' does not exist.\"",
"%",
"measurement",
")",
"# build the update url.",
"update_url",
"=",
"parse",
".",
"urljoin",
"(",
"self",
".",
"BASE_URL",
",",
"'measurements/save'",
")",
"# setup a dict for the post",
"data",
"=",
"{",
"}",
"# here's where we need that required element",
"data",
"[",
"'authenticity_token'",
"]",
"=",
"self",
".",
"_authenticity_token",
"# Weight has it's own key value pair",
"if",
"measurement",
"==",
"'Weight'",
":",
"data",
"[",
"'weight[display_value]'",
"]",
"=",
"value",
"# the other measurements have generic names with",
"# an incrementing numeric index.",
"measurement_index",
"=",
"0",
"# iterate all the measurement_ids",
"for",
"measurement_id",
"in",
"measurement_ids",
".",
"keys",
"(",
")",
":",
"# create the measurement_type[n]",
"# key value pair",
"n",
"=",
"str",
"(",
"measurement_index",
")",
"meas_type",
"=",
"'measurement_type['",
"+",
"n",
"+",
"']'",
"meas_val",
"=",
"'measurement_value['",
"+",
"n",
"+",
"']'",
"data",
"[",
"meas_type",
"]",
"=",
"measurement_ids",
"[",
"measurement_id",
"]",
"# and if it corresponds to the value we want to update",
"if",
"measurement",
"==",
"measurement_id",
":",
"# create the measurement_value[n]",
"# key value pair and assign it the value.",
"data",
"[",
"meas_val",
"]",
"=",
"value",
"else",
":",
"# otherwise, create the key value pair and leave it blank",
"data",
"[",
"meas_val",
"]",
"=",
"\"\"",
"measurement_index",
"+=",
"1",
"# now post it.",
"result",
"=",
"self",
".",
"session",
".",
"post",
"(",
"update_url",
",",
"data",
"=",
"data",
")",
"# throw an error if it failed.",
"if",
"not",
"result",
".",
"ok",
":",
"raise",
"RuntimeError",
"(",
"\"Unable to update measurement in MyFitnessPal: \"",
"\"status code: {status}\"",
".",
"format",
"(",
"status",
"=",
"result",
".",
"status_code",
")",
")"
] |
Sets measurement for today's date.
|
[
"Sets",
"measurement",
"for",
"today",
"s",
"date",
"."
] |
29aad88d31adc025eacaddd3390cb521b6012b73
|
https://github.com/coddingtonbear/python-myfitnesspal/blob/29aad88d31adc025eacaddd3390cb521b6012b73/myfitnesspal/client.py#L588-L667
|
234,753
|
coddingtonbear/python-myfitnesspal
|
myfitnesspal/client.py
|
Client.get_measurement_id_options
|
def get_measurement_id_options(self):
""" Returns list of measurement choices."""
# get the URL for the main check in page
document = self._get_document_for_url(
self._get_url_for_measurements()
)
# gather the IDs for all measurement types
measurement_ids = self._get_measurement_ids(document)
return measurement_ids
|
python
|
def get_measurement_id_options(self):
""" Returns list of measurement choices."""
# get the URL for the main check in page
document = self._get_document_for_url(
self._get_url_for_measurements()
)
# gather the IDs for all measurement types
measurement_ids = self._get_measurement_ids(document)
return measurement_ids
|
[
"def",
"get_measurement_id_options",
"(",
"self",
")",
":",
"# get the URL for the main check in page",
"document",
"=",
"self",
".",
"_get_document_for_url",
"(",
"self",
".",
"_get_url_for_measurements",
"(",
")",
")",
"# gather the IDs for all measurement types",
"measurement_ids",
"=",
"self",
".",
"_get_measurement_ids",
"(",
"document",
")",
"return",
"measurement_ids"
] |
Returns list of measurement choices.
|
[
"Returns",
"list",
"of",
"measurement",
"choices",
"."
] |
29aad88d31adc025eacaddd3390cb521b6012b73
|
https://github.com/coddingtonbear/python-myfitnesspal/blob/29aad88d31adc025eacaddd3390cb521b6012b73/myfitnesspal/client.py#L709-L718
|
234,754
|
joerick/pyinstrument
|
pyinstrument/__main__.py
|
file_supports_color
|
def file_supports_color(file_obj):
"""
Returns True if the running system's terminal supports color.
Borrowed from Django
https://github.com/django/django/blob/master/django/core/management/color.py
"""
plat = sys.platform
supported_platform = plat != 'Pocket PC' and (plat != 'win32' or
'ANSICON' in os.environ)
is_a_tty = file_is_a_tty(file_obj)
return (supported_platform and is_a_tty)
|
python
|
def file_supports_color(file_obj):
"""
Returns True if the running system's terminal supports color.
Borrowed from Django
https://github.com/django/django/blob/master/django/core/management/color.py
"""
plat = sys.platform
supported_platform = plat != 'Pocket PC' and (plat != 'win32' or
'ANSICON' in os.environ)
is_a_tty = file_is_a_tty(file_obj)
return (supported_platform and is_a_tty)
|
[
"def",
"file_supports_color",
"(",
"file_obj",
")",
":",
"plat",
"=",
"sys",
".",
"platform",
"supported_platform",
"=",
"plat",
"!=",
"'Pocket PC'",
"and",
"(",
"plat",
"!=",
"'win32'",
"or",
"'ANSICON'",
"in",
"os",
".",
"environ",
")",
"is_a_tty",
"=",
"file_is_a_tty",
"(",
"file_obj",
")",
"return",
"(",
"supported_platform",
"and",
"is_a_tty",
")"
] |
Returns True if the running system's terminal supports color.
Borrowed from Django
https://github.com/django/django/blob/master/django/core/management/color.py
|
[
"Returns",
"True",
"if",
"the",
"running",
"system",
"s",
"terminal",
"supports",
"color",
"."
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/__main__.py#L198-L211
|
234,755
|
joerick/pyinstrument
|
pyinstrument/__main__.py
|
load_report
|
def load_report(identifier=None):
'''
Returns the session referred to by identifier
'''
path = os.path.join(
report_dir(),
identifier + '.pyireport'
)
return ProfilerSession.load(path)
|
python
|
def load_report(identifier=None):
'''
Returns the session referred to by identifier
'''
path = os.path.join(
report_dir(),
identifier + '.pyireport'
)
return ProfilerSession.load(path)
|
[
"def",
"load_report",
"(",
"identifier",
"=",
"None",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"report_dir",
"(",
")",
",",
"identifier",
"+",
"'.pyireport'",
")",
"return",
"ProfilerSession",
".",
"load",
"(",
"path",
")"
] |
Returns the session referred to by identifier
|
[
"Returns",
"the",
"session",
"referred",
"to",
"by",
"identifier"
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/__main__.py#L245-L253
|
234,756
|
joerick/pyinstrument
|
pyinstrument/__main__.py
|
save_report
|
def save_report(session):
'''
Saves the session to a temp file, and returns that path.
Also prunes the number of reports to 10 so there aren't loads building up.
'''
# prune this folder to contain the last 10 sessions
previous_reports = glob.glob(os.path.join(report_dir(), '*.pyireport'))
previous_reports.sort(reverse=True)
while len(previous_reports) > 10:
report_file = previous_reports.pop()
os.remove(report_file)
identifier = time.strftime('%Y-%m-%dT%H-%M-%S', time.localtime(session.start_time))
path = os.path.join(
report_dir(),
identifier + '.pyireport'
)
session.save(path)
return path, identifier
|
python
|
def save_report(session):
'''
Saves the session to a temp file, and returns that path.
Also prunes the number of reports to 10 so there aren't loads building up.
'''
# prune this folder to contain the last 10 sessions
previous_reports = glob.glob(os.path.join(report_dir(), '*.pyireport'))
previous_reports.sort(reverse=True)
while len(previous_reports) > 10:
report_file = previous_reports.pop()
os.remove(report_file)
identifier = time.strftime('%Y-%m-%dT%H-%M-%S', time.localtime(session.start_time))
path = os.path.join(
report_dir(),
identifier + '.pyireport'
)
session.save(path)
return path, identifier
|
[
"def",
"save_report",
"(",
"session",
")",
":",
"# prune this folder to contain the last 10 sessions",
"previous_reports",
"=",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"report_dir",
"(",
")",
",",
"'*.pyireport'",
")",
")",
"previous_reports",
".",
"sort",
"(",
"reverse",
"=",
"True",
")",
"while",
"len",
"(",
"previous_reports",
")",
">",
"10",
":",
"report_file",
"=",
"previous_reports",
".",
"pop",
"(",
")",
"os",
".",
"remove",
"(",
"report_file",
")",
"identifier",
"=",
"time",
".",
"strftime",
"(",
"'%Y-%m-%dT%H-%M-%S'",
",",
"time",
".",
"localtime",
"(",
"session",
".",
"start_time",
")",
")",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"report_dir",
"(",
")",
",",
"identifier",
"+",
"'.pyireport'",
")",
"session",
".",
"save",
"(",
"path",
")",
"return",
"path",
",",
"identifier"
] |
Saves the session to a temp file, and returns that path.
Also prunes the number of reports to 10 so there aren't loads building up.
|
[
"Saves",
"the",
"session",
"to",
"a",
"temp",
"file",
"and",
"returns",
"that",
"path",
".",
"Also",
"prunes",
"the",
"number",
"of",
"reports",
"to",
"10",
"so",
"there",
"aren",
"t",
"loads",
"building",
"up",
"."
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/__main__.py#L255-L274
|
234,757
|
joerick/pyinstrument
|
pyinstrument/session.py
|
ProfilerSession.root_frame
|
def root_frame(self, trim_stem=True):
'''
Parses the internal frame records and returns a tree of Frame objects
'''
root_frame = None
frame_stack = []
for frame_tuple in self.frame_records:
identifier_stack = frame_tuple[0]
time = frame_tuple[1]
# now we must create a stack of frame objects and assign this time to the leaf
for stack_depth, frame_identifier in enumerate(identifier_stack):
if stack_depth < len(frame_stack):
if frame_identifier != frame_stack[stack_depth].identifier:
# trim any frames after and including this one
del frame_stack[stack_depth:]
if stack_depth >= len(frame_stack):
frame = Frame(frame_identifier)
frame_stack.append(frame)
if stack_depth == 0:
# There should only be one root frame, as far as I know
assert root_frame is None, ASSERTION_MESSAGE
root_frame = frame
else:
parent = frame_stack[stack_depth-1]
parent.add_child(frame)
# trim any extra frames
del frame_stack[stack_depth+1:] # pylint: disable=W0631
# assign the time to the final frame
frame_stack[-1].add_child(SelfTimeFrame(self_time=time))
if root_frame is None:
return None
if trim_stem:
root_frame = self._trim_stem(root_frame)
return root_frame
|
python
|
def root_frame(self, trim_stem=True):
'''
Parses the internal frame records and returns a tree of Frame objects
'''
root_frame = None
frame_stack = []
for frame_tuple in self.frame_records:
identifier_stack = frame_tuple[0]
time = frame_tuple[1]
# now we must create a stack of frame objects and assign this time to the leaf
for stack_depth, frame_identifier in enumerate(identifier_stack):
if stack_depth < len(frame_stack):
if frame_identifier != frame_stack[stack_depth].identifier:
# trim any frames after and including this one
del frame_stack[stack_depth:]
if stack_depth >= len(frame_stack):
frame = Frame(frame_identifier)
frame_stack.append(frame)
if stack_depth == 0:
# There should only be one root frame, as far as I know
assert root_frame is None, ASSERTION_MESSAGE
root_frame = frame
else:
parent = frame_stack[stack_depth-1]
parent.add_child(frame)
# trim any extra frames
del frame_stack[stack_depth+1:] # pylint: disable=W0631
# assign the time to the final frame
frame_stack[-1].add_child(SelfTimeFrame(self_time=time))
if root_frame is None:
return None
if trim_stem:
root_frame = self._trim_stem(root_frame)
return root_frame
|
[
"def",
"root_frame",
"(",
"self",
",",
"trim_stem",
"=",
"True",
")",
":",
"root_frame",
"=",
"None",
"frame_stack",
"=",
"[",
"]",
"for",
"frame_tuple",
"in",
"self",
".",
"frame_records",
":",
"identifier_stack",
"=",
"frame_tuple",
"[",
"0",
"]",
"time",
"=",
"frame_tuple",
"[",
"1",
"]",
"# now we must create a stack of frame objects and assign this time to the leaf",
"for",
"stack_depth",
",",
"frame_identifier",
"in",
"enumerate",
"(",
"identifier_stack",
")",
":",
"if",
"stack_depth",
"<",
"len",
"(",
"frame_stack",
")",
":",
"if",
"frame_identifier",
"!=",
"frame_stack",
"[",
"stack_depth",
"]",
".",
"identifier",
":",
"# trim any frames after and including this one",
"del",
"frame_stack",
"[",
"stack_depth",
":",
"]",
"if",
"stack_depth",
">=",
"len",
"(",
"frame_stack",
")",
":",
"frame",
"=",
"Frame",
"(",
"frame_identifier",
")",
"frame_stack",
".",
"append",
"(",
"frame",
")",
"if",
"stack_depth",
"==",
"0",
":",
"# There should only be one root frame, as far as I know",
"assert",
"root_frame",
"is",
"None",
",",
"ASSERTION_MESSAGE",
"root_frame",
"=",
"frame",
"else",
":",
"parent",
"=",
"frame_stack",
"[",
"stack_depth",
"-",
"1",
"]",
"parent",
".",
"add_child",
"(",
"frame",
")",
"# trim any extra frames",
"del",
"frame_stack",
"[",
"stack_depth",
"+",
"1",
":",
"]",
"# pylint: disable=W0631",
"# assign the time to the final frame",
"frame_stack",
"[",
"-",
"1",
"]",
".",
"add_child",
"(",
"SelfTimeFrame",
"(",
"self_time",
"=",
"time",
")",
")",
"if",
"root_frame",
"is",
"None",
":",
"return",
"None",
"if",
"trim_stem",
":",
"root_frame",
"=",
"self",
".",
"_trim_stem",
"(",
"root_frame",
")",
"return",
"root_frame"
] |
Parses the internal frame records and returns a tree of Frame objects
|
[
"Parses",
"the",
"internal",
"frame",
"records",
"and",
"returns",
"a",
"tree",
"of",
"Frame",
"objects"
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/session.py#L52-L95
|
234,758
|
joerick/pyinstrument
|
pyinstrument/frame.py
|
BaseFrame.remove_from_parent
|
def remove_from_parent(self):
'''
Removes this frame from its parent, and nulls the parent link
'''
if self.parent:
self.parent._children.remove(self)
self.parent._invalidate_time_caches()
self.parent = None
|
python
|
def remove_from_parent(self):
'''
Removes this frame from its parent, and nulls the parent link
'''
if self.parent:
self.parent._children.remove(self)
self.parent._invalidate_time_caches()
self.parent = None
|
[
"def",
"remove_from_parent",
"(",
"self",
")",
":",
"if",
"self",
".",
"parent",
":",
"self",
".",
"parent",
".",
"_children",
".",
"remove",
"(",
"self",
")",
"self",
".",
"parent",
".",
"_invalidate_time_caches",
"(",
")",
"self",
".",
"parent",
"=",
"None"
] |
Removes this frame from its parent, and nulls the parent link
|
[
"Removes",
"this",
"frame",
"from",
"its",
"parent",
"and",
"nulls",
"the",
"parent",
"link"
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L11-L18
|
234,759
|
joerick/pyinstrument
|
pyinstrument/frame.py
|
Frame.add_child
|
def add_child(self, frame, after=None):
'''
Adds a child frame, updating the parent link.
Optionally, insert the frame in a specific position by passing the frame to insert
this one after.
'''
frame.remove_from_parent()
frame.parent = self
if after is None:
self._children.append(frame)
else:
index = self._children.index(after) + 1
self._children.insert(index, frame)
self._invalidate_time_caches()
|
python
|
def add_child(self, frame, after=None):
'''
Adds a child frame, updating the parent link.
Optionally, insert the frame in a specific position by passing the frame to insert
this one after.
'''
frame.remove_from_parent()
frame.parent = self
if after is None:
self._children.append(frame)
else:
index = self._children.index(after) + 1
self._children.insert(index, frame)
self._invalidate_time_caches()
|
[
"def",
"add_child",
"(",
"self",
",",
"frame",
",",
"after",
"=",
"None",
")",
":",
"frame",
".",
"remove_from_parent",
"(",
")",
"frame",
".",
"parent",
"=",
"self",
"if",
"after",
"is",
"None",
":",
"self",
".",
"_children",
".",
"append",
"(",
"frame",
")",
"else",
":",
"index",
"=",
"self",
".",
"_children",
".",
"index",
"(",
"after",
")",
"+",
"1",
"self",
".",
"_children",
".",
"insert",
"(",
"index",
",",
"frame",
")",
"self",
".",
"_invalidate_time_caches",
"(",
")"
] |
Adds a child frame, updating the parent link.
Optionally, insert the frame in a specific position by passing the frame to insert
this one after.
|
[
"Adds",
"a",
"child",
"frame",
"updating",
"the",
"parent",
"link",
".",
"Optionally",
"insert",
"the",
"frame",
"in",
"a",
"specific",
"position",
"by",
"passing",
"the",
"frame",
"to",
"insert",
"this",
"one",
"after",
"."
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L99-L113
|
234,760
|
joerick/pyinstrument
|
pyinstrument/frame.py
|
Frame.add_children
|
def add_children(self, frames, after=None):
'''
Convenience method to add multiple frames at once.
'''
if after is not None:
# if there's an 'after' parameter, add the frames in reverse so the order is
# preserved.
for frame in reversed(frames):
self.add_child(frame, after=after)
else:
for frame in frames:
self.add_child(frame)
|
python
|
def add_children(self, frames, after=None):
'''
Convenience method to add multiple frames at once.
'''
if after is not None:
# if there's an 'after' parameter, add the frames in reverse so the order is
# preserved.
for frame in reversed(frames):
self.add_child(frame, after=after)
else:
for frame in frames:
self.add_child(frame)
|
[
"def",
"add_children",
"(",
"self",
",",
"frames",
",",
"after",
"=",
"None",
")",
":",
"if",
"after",
"is",
"not",
"None",
":",
"# if there's an 'after' parameter, add the frames in reverse so the order is",
"# preserved.",
"for",
"frame",
"in",
"reversed",
"(",
"frames",
")",
":",
"self",
".",
"add_child",
"(",
"frame",
",",
"after",
"=",
"after",
")",
"else",
":",
"for",
"frame",
"in",
"frames",
":",
"self",
".",
"add_child",
"(",
"frame",
")"
] |
Convenience method to add multiple frames at once.
|
[
"Convenience",
"method",
"to",
"add",
"multiple",
"frames",
"at",
"once",
"."
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L115-L126
|
234,761
|
joerick/pyinstrument
|
pyinstrument/frame.py
|
Frame.file_path_short
|
def file_path_short(self):
""" Return the path resolved against the closest entry in sys.path """
if not hasattr(self, '_file_path_short'):
if self.file_path:
result = None
for path in sys.path:
# On Windows, if self.file_path and path are on different drives, relpath
# will result in exception, because it cannot compute a relpath in this case.
# The root cause is that on Windows, there is no root dir like '/' on Linux.
try:
candidate = os.path.relpath(self.file_path, path)
except ValueError:
continue
if not result or (len(candidate.split(os.sep)) < len(result.split(os.sep))):
result = candidate
self._file_path_short = result
else:
self._file_path_short = None
return self._file_path_short
|
python
|
def file_path_short(self):
""" Return the path resolved against the closest entry in sys.path """
if not hasattr(self, '_file_path_short'):
if self.file_path:
result = None
for path in sys.path:
# On Windows, if self.file_path and path are on different drives, relpath
# will result in exception, because it cannot compute a relpath in this case.
# The root cause is that on Windows, there is no root dir like '/' on Linux.
try:
candidate = os.path.relpath(self.file_path, path)
except ValueError:
continue
if not result or (len(candidate.split(os.sep)) < len(result.split(os.sep))):
result = candidate
self._file_path_short = result
else:
self._file_path_short = None
return self._file_path_short
|
[
"def",
"file_path_short",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_file_path_short'",
")",
":",
"if",
"self",
".",
"file_path",
":",
"result",
"=",
"None",
"for",
"path",
"in",
"sys",
".",
"path",
":",
"# On Windows, if self.file_path and path are on different drives, relpath",
"# will result in exception, because it cannot compute a relpath in this case.",
"# The root cause is that on Windows, there is no root dir like '/' on Linux.",
"try",
":",
"candidate",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"self",
".",
"file_path",
",",
"path",
")",
"except",
"ValueError",
":",
"continue",
"if",
"not",
"result",
"or",
"(",
"len",
"(",
"candidate",
".",
"split",
"(",
"os",
".",
"sep",
")",
")",
"<",
"len",
"(",
"result",
".",
"split",
"(",
"os",
".",
"sep",
")",
")",
")",
":",
"result",
"=",
"candidate",
"self",
".",
"_file_path_short",
"=",
"result",
"else",
":",
"self",
".",
"_file_path_short",
"=",
"None",
"return",
"self",
".",
"_file_path_short"
] |
Return the path resolved against the closest entry in sys.path
|
[
"Return",
"the",
"path",
"resolved",
"against",
"the",
"closest",
"entry",
"in",
"sys",
".",
"path"
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L151-L173
|
234,762
|
joerick/pyinstrument
|
pyinstrument/frame.py
|
FrameGroup.exit_frames
|
def exit_frames(self):
'''
Returns a list of frames whose children include a frame outside of the group
'''
if self._exit_frames is None:
exit_frames = []
for frame in self.frames:
if any(c.group != self for c in frame.children):
exit_frames.append(frame)
self._exit_frames = exit_frames
return self._exit_frames
|
python
|
def exit_frames(self):
'''
Returns a list of frames whose children include a frame outside of the group
'''
if self._exit_frames is None:
exit_frames = []
for frame in self.frames:
if any(c.group != self for c in frame.children):
exit_frames.append(frame)
self._exit_frames = exit_frames
return self._exit_frames
|
[
"def",
"exit_frames",
"(",
"self",
")",
":",
"if",
"self",
".",
"_exit_frames",
"is",
"None",
":",
"exit_frames",
"=",
"[",
"]",
"for",
"frame",
"in",
"self",
".",
"frames",
":",
"if",
"any",
"(",
"c",
".",
"group",
"!=",
"self",
"for",
"c",
"in",
"frame",
".",
"children",
")",
":",
"exit_frames",
".",
"append",
"(",
"frame",
")",
"self",
".",
"_exit_frames",
"=",
"exit_frames",
"return",
"self",
".",
"_exit_frames"
] |
Returns a list of frames whose children include a frame outside of the group
|
[
"Returns",
"a",
"list",
"of",
"frames",
"whose",
"children",
"include",
"a",
"frame",
"outside",
"of",
"the",
"group"
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/frame.py#L286-L297
|
234,763
|
joerick/pyinstrument
|
pyinstrument/profiler.py
|
Profiler.first_interesting_frame
|
def first_interesting_frame(self):
"""
Traverse down the frame hierarchy until a frame is found with more than one child
"""
root_frame = self.root_frame()
frame = root_frame
while len(frame.children) <= 1:
if frame.children:
frame = frame.children[0]
else:
# there are no branches
return root_frame
return frame
|
python
|
def first_interesting_frame(self):
"""
Traverse down the frame hierarchy until a frame is found with more than one child
"""
root_frame = self.root_frame()
frame = root_frame
while len(frame.children) <= 1:
if frame.children:
frame = frame.children[0]
else:
# there are no branches
return root_frame
return frame
|
[
"def",
"first_interesting_frame",
"(",
"self",
")",
":",
"root_frame",
"=",
"self",
".",
"root_frame",
"(",
")",
"frame",
"=",
"root_frame",
"while",
"len",
"(",
"frame",
".",
"children",
")",
"<=",
"1",
":",
"if",
"frame",
".",
"children",
":",
"frame",
"=",
"frame",
".",
"children",
"[",
"0",
"]",
"else",
":",
"# there are no branches",
"return",
"root_frame",
"return",
"frame"
] |
Traverse down the frame hierarchy until a frame is found with more than one child
|
[
"Traverse",
"down",
"the",
"frame",
"hierarchy",
"until",
"a",
"frame",
"is",
"found",
"with",
"more",
"than",
"one",
"child"
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/profiler.py#L119-L133
|
234,764
|
joerick/pyinstrument
|
pyinstrument/processors.py
|
aggregate_repeated_calls
|
def aggregate_repeated_calls(frame, options):
'''
Converts a timeline into a time-aggregate summary.
Adds together calls along the same call stack, so that repeated calls appear as the same
frame. Removes time-linearity - frames are sorted according to total time spent.
Useful for outputs that display a summary of execution (e.g. text and html outputs)
'''
if frame is None:
return None
children_by_identifier = {}
# iterate over a copy of the children since it's going to mutate while we're iterating
for child in frame.children:
if child.identifier in children_by_identifier:
aggregate_frame = children_by_identifier[child.identifier]
# combine the two frames, putting the children and self_time into the aggregate frame.
aggregate_frame.self_time += child.self_time
if child.children:
aggregate_frame.add_children(child.children)
# remove this frame, it's been incorporated into aggregate_frame
child.remove_from_parent()
else:
# never seen this identifier before. It becomes the aggregate frame.
children_by_identifier[child.identifier] = child
# recurse into the children
for child in frame.children:
aggregate_repeated_calls(child, options=options)
# sort the children by time
# it's okay to use the internal _children list, sinde we're not changing the tree
# structure.
frame._children.sort(key=methodcaller('time'), reverse=True) # pylint: disable=W0212
return frame
|
python
|
def aggregate_repeated_calls(frame, options):
'''
Converts a timeline into a time-aggregate summary.
Adds together calls along the same call stack, so that repeated calls appear as the same
frame. Removes time-linearity - frames are sorted according to total time spent.
Useful for outputs that display a summary of execution (e.g. text and html outputs)
'''
if frame is None:
return None
children_by_identifier = {}
# iterate over a copy of the children since it's going to mutate while we're iterating
for child in frame.children:
if child.identifier in children_by_identifier:
aggregate_frame = children_by_identifier[child.identifier]
# combine the two frames, putting the children and self_time into the aggregate frame.
aggregate_frame.self_time += child.self_time
if child.children:
aggregate_frame.add_children(child.children)
# remove this frame, it's been incorporated into aggregate_frame
child.remove_from_parent()
else:
# never seen this identifier before. It becomes the aggregate frame.
children_by_identifier[child.identifier] = child
# recurse into the children
for child in frame.children:
aggregate_repeated_calls(child, options=options)
# sort the children by time
# it's okay to use the internal _children list, sinde we're not changing the tree
# structure.
frame._children.sort(key=methodcaller('time'), reverse=True) # pylint: disable=W0212
return frame
|
[
"def",
"aggregate_repeated_calls",
"(",
"frame",
",",
"options",
")",
":",
"if",
"frame",
"is",
"None",
":",
"return",
"None",
"children_by_identifier",
"=",
"{",
"}",
"# iterate over a copy of the children since it's going to mutate while we're iterating",
"for",
"child",
"in",
"frame",
".",
"children",
":",
"if",
"child",
".",
"identifier",
"in",
"children_by_identifier",
":",
"aggregate_frame",
"=",
"children_by_identifier",
"[",
"child",
".",
"identifier",
"]",
"# combine the two frames, putting the children and self_time into the aggregate frame.",
"aggregate_frame",
".",
"self_time",
"+=",
"child",
".",
"self_time",
"if",
"child",
".",
"children",
":",
"aggregate_frame",
".",
"add_children",
"(",
"child",
".",
"children",
")",
"# remove this frame, it's been incorporated into aggregate_frame",
"child",
".",
"remove_from_parent",
"(",
")",
"else",
":",
"# never seen this identifier before. It becomes the aggregate frame.",
"children_by_identifier",
"[",
"child",
".",
"identifier",
"]",
"=",
"child",
"# recurse into the children",
"for",
"child",
"in",
"frame",
".",
"children",
":",
"aggregate_repeated_calls",
"(",
"child",
",",
"options",
"=",
"options",
")",
"# sort the children by time",
"# it's okay to use the internal _children list, sinde we're not changing the tree",
"# structure.",
"frame",
".",
"_children",
".",
"sort",
"(",
"key",
"=",
"methodcaller",
"(",
"'time'",
")",
",",
"reverse",
"=",
"True",
")",
"# pylint: disable=W0212",
"return",
"frame"
] |
Converts a timeline into a time-aggregate summary.
Adds together calls along the same call stack, so that repeated calls appear as the same
frame. Removes time-linearity - frames are sorted according to total time spent.
Useful for outputs that display a summary of execution (e.g. text and html outputs)
|
[
"Converts",
"a",
"timeline",
"into",
"a",
"time",
"-",
"aggregate",
"summary",
"."
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/processors.py#L31-L70
|
234,765
|
joerick/pyinstrument
|
pyinstrument/processors.py
|
merge_consecutive_self_time
|
def merge_consecutive_self_time(frame, options):
'''
Combines consecutive 'self time' frames
'''
if frame is None:
return None
previous_self_time_frame = None
for child in frame.children:
if isinstance(child, SelfTimeFrame):
if previous_self_time_frame:
# merge
previous_self_time_frame.self_time += child.self_time
child.remove_from_parent()
else:
# keep a reference, maybe it'll be added to on the next loop
previous_self_time_frame = child
else:
previous_self_time_frame = None
for child in frame.children:
merge_consecutive_self_time(child, options=options)
return frame
|
python
|
def merge_consecutive_self_time(frame, options):
'''
Combines consecutive 'self time' frames
'''
if frame is None:
return None
previous_self_time_frame = None
for child in frame.children:
if isinstance(child, SelfTimeFrame):
if previous_self_time_frame:
# merge
previous_self_time_frame.self_time += child.self_time
child.remove_from_parent()
else:
# keep a reference, maybe it'll be added to on the next loop
previous_self_time_frame = child
else:
previous_self_time_frame = None
for child in frame.children:
merge_consecutive_self_time(child, options=options)
return frame
|
[
"def",
"merge_consecutive_self_time",
"(",
"frame",
",",
"options",
")",
":",
"if",
"frame",
"is",
"None",
":",
"return",
"None",
"previous_self_time_frame",
"=",
"None",
"for",
"child",
"in",
"frame",
".",
"children",
":",
"if",
"isinstance",
"(",
"child",
",",
"SelfTimeFrame",
")",
":",
"if",
"previous_self_time_frame",
":",
"# merge",
"previous_self_time_frame",
".",
"self_time",
"+=",
"child",
".",
"self_time",
"child",
".",
"remove_from_parent",
"(",
")",
"else",
":",
"# keep a reference, maybe it'll be added to on the next loop",
"previous_self_time_frame",
"=",
"child",
"else",
":",
"previous_self_time_frame",
"=",
"None",
"for",
"child",
"in",
"frame",
".",
"children",
":",
"merge_consecutive_self_time",
"(",
"child",
",",
"options",
"=",
"options",
")",
"return",
"frame"
] |
Combines consecutive 'self time' frames
|
[
"Combines",
"consecutive",
"self",
"time",
"frames"
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/processors.py#L101-L125
|
234,766
|
joerick/pyinstrument
|
pyinstrument/processors.py
|
remove_unnecessary_self_time_nodes
|
def remove_unnecessary_self_time_nodes(frame, options):
'''
When a frame has only one child, and that is a self-time frame, remove that node, since it's
unnecessary - it clutters the output and offers no additional information.
'''
if frame is None:
return None
if len(frame.children) == 1 and isinstance(frame.children[0], SelfTimeFrame):
child = frame.children[0]
frame.self_time += child.self_time
child.remove_from_parent()
for child in frame.children:
remove_unnecessary_self_time_nodes(child, options=options)
return frame
|
python
|
def remove_unnecessary_self_time_nodes(frame, options):
'''
When a frame has only one child, and that is a self-time frame, remove that node, since it's
unnecessary - it clutters the output and offers no additional information.
'''
if frame is None:
return None
if len(frame.children) == 1 and isinstance(frame.children[0], SelfTimeFrame):
child = frame.children[0]
frame.self_time += child.self_time
child.remove_from_parent()
for child in frame.children:
remove_unnecessary_self_time_nodes(child, options=options)
return frame
|
[
"def",
"remove_unnecessary_self_time_nodes",
"(",
"frame",
",",
"options",
")",
":",
"if",
"frame",
"is",
"None",
":",
"return",
"None",
"if",
"len",
"(",
"frame",
".",
"children",
")",
"==",
"1",
"and",
"isinstance",
"(",
"frame",
".",
"children",
"[",
"0",
"]",
",",
"SelfTimeFrame",
")",
":",
"child",
"=",
"frame",
".",
"children",
"[",
"0",
"]",
"frame",
".",
"self_time",
"+=",
"child",
".",
"self_time",
"child",
".",
"remove_from_parent",
"(",
")",
"for",
"child",
"in",
"frame",
".",
"children",
":",
"remove_unnecessary_self_time_nodes",
"(",
"child",
",",
"options",
"=",
"options",
")",
"return",
"frame"
] |
When a frame has only one child, and that is a self-time frame, remove that node, since it's
unnecessary - it clutters the output and offers no additional information.
|
[
"When",
"a",
"frame",
"has",
"only",
"one",
"child",
"and",
"that",
"is",
"a",
"self",
"-",
"time",
"frame",
"remove",
"that",
"node",
"since",
"it",
"s",
"unnecessary",
"-",
"it",
"clutters",
"the",
"output",
"and",
"offers",
"no",
"additional",
"information",
"."
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/processors.py#L128-L144
|
234,767
|
joerick/pyinstrument
|
pyinstrument/renderers/html.py
|
HTMLRenderer.open_in_browser
|
def open_in_browser(self, session, output_filename=None):
"""
Open the rendered HTML in a webbrowser.
If output_filename=None (the default), a tempfile is used.
The filename of the HTML file is returned.
"""
if output_filename is None:
output_file = tempfile.NamedTemporaryFile(suffix='.html', delete=False)
output_filename = output_file.name
with codecs.getwriter('utf-8')(output_file) as f:
f.write(self.render(session))
else:
with codecs.open(output_filename, 'w', 'utf-8') as f:
f.write(self.render(session))
from pyinstrument.vendor.six.moves import urllib
url = urllib.parse.urlunparse(('file', '', output_filename, '', '', ''))
webbrowser.open(url)
return output_filename
|
python
|
def open_in_browser(self, session, output_filename=None):
"""
Open the rendered HTML in a webbrowser.
If output_filename=None (the default), a tempfile is used.
The filename of the HTML file is returned.
"""
if output_filename is None:
output_file = tempfile.NamedTemporaryFile(suffix='.html', delete=False)
output_filename = output_file.name
with codecs.getwriter('utf-8')(output_file) as f:
f.write(self.render(session))
else:
with codecs.open(output_filename, 'w', 'utf-8') as f:
f.write(self.render(session))
from pyinstrument.vendor.six.moves import urllib
url = urllib.parse.urlunparse(('file', '', output_filename, '', '', ''))
webbrowser.open(url)
return output_filename
|
[
"def",
"open_in_browser",
"(",
"self",
",",
"session",
",",
"output_filename",
"=",
"None",
")",
":",
"if",
"output_filename",
"is",
"None",
":",
"output_file",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"suffix",
"=",
"'.html'",
",",
"delete",
"=",
"False",
")",
"output_filename",
"=",
"output_file",
".",
"name",
"with",
"codecs",
".",
"getwriter",
"(",
"'utf-8'",
")",
"(",
"output_file",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"self",
".",
"render",
"(",
"session",
")",
")",
"else",
":",
"with",
"codecs",
".",
"open",
"(",
"output_filename",
",",
"'w'",
",",
"'utf-8'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"self",
".",
"render",
"(",
"session",
")",
")",
"from",
"pyinstrument",
".",
"vendor",
".",
"six",
".",
"moves",
"import",
"urllib",
"url",
"=",
"urllib",
".",
"parse",
".",
"urlunparse",
"(",
"(",
"'file'",
",",
"''",
",",
"output_filename",
",",
"''",
",",
"''",
",",
"''",
")",
")",
"webbrowser",
".",
"open",
"(",
"url",
")",
"return",
"output_filename"
] |
Open the rendered HTML in a webbrowser.
If output_filename=None (the default), a tempfile is used.
The filename of the HTML file is returned.
|
[
"Open",
"the",
"rendered",
"HTML",
"in",
"a",
"webbrowser",
"."
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/renderers/html.py#L43-L64
|
234,768
|
joerick/pyinstrument
|
setup.py
|
BuildPyCommand.run
|
def run(self):
'''compile the JS, then run superclass implementation'''
if subprocess.call(['npm', '--version']) != 0:
raise RuntimeError('npm is required to build the HTML renderer.')
self.check_call(['npm', 'install'], cwd=HTML_RENDERER_DIR)
self.check_call(['npm', 'run', 'build'], cwd=HTML_RENDERER_DIR)
self.copy_file(HTML_RENDERER_DIR+'/dist/js/app.js', 'pyinstrument/renderers/html_resources/app.js')
setuptools.command.build_py.build_py.run(self)
|
python
|
def run(self):
'''compile the JS, then run superclass implementation'''
if subprocess.call(['npm', '--version']) != 0:
raise RuntimeError('npm is required to build the HTML renderer.')
self.check_call(['npm', 'install'], cwd=HTML_RENDERER_DIR)
self.check_call(['npm', 'run', 'build'], cwd=HTML_RENDERER_DIR)
self.copy_file(HTML_RENDERER_DIR+'/dist/js/app.js', 'pyinstrument/renderers/html_resources/app.js')
setuptools.command.build_py.build_py.run(self)
|
[
"def",
"run",
"(",
"self",
")",
":",
"if",
"subprocess",
".",
"call",
"(",
"[",
"'npm'",
",",
"'--version'",
"]",
")",
"!=",
"0",
":",
"raise",
"RuntimeError",
"(",
"'npm is required to build the HTML renderer.'",
")",
"self",
".",
"check_call",
"(",
"[",
"'npm'",
",",
"'install'",
"]",
",",
"cwd",
"=",
"HTML_RENDERER_DIR",
")",
"self",
".",
"check_call",
"(",
"[",
"'npm'",
",",
"'run'",
",",
"'build'",
"]",
",",
"cwd",
"=",
"HTML_RENDERER_DIR",
")",
"self",
".",
"copy_file",
"(",
"HTML_RENDERER_DIR",
"+",
"'/dist/js/app.js'",
",",
"'pyinstrument/renderers/html_resources/app.js'",
")",
"setuptools",
".",
"command",
".",
"build_py",
".",
"build_py",
".",
"run",
"(",
"self",
")"
] |
compile the JS, then run superclass implementation
|
[
"compile",
"the",
"JS",
"then",
"run",
"superclass",
"implementation"
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/setup.py#L19-L30
|
234,769
|
joerick/pyinstrument
|
pyinstrument/util.py
|
deprecated
|
def deprecated(func, *args, **kwargs):
''' Marks a function as deprecated. '''
warnings.warn(
'{} is deprecated and should no longer be used.'.format(func),
DeprecationWarning,
stacklevel=3
)
return func(*args, **kwargs)
|
python
|
def deprecated(func, *args, **kwargs):
''' Marks a function as deprecated. '''
warnings.warn(
'{} is deprecated and should no longer be used.'.format(func),
DeprecationWarning,
stacklevel=3
)
return func(*args, **kwargs)
|
[
"def",
"deprecated",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"warnings",
".",
"warn",
"(",
"'{} is deprecated and should no longer be used.'",
".",
"format",
"(",
"func",
")",
",",
"DeprecationWarning",
",",
"stacklevel",
"=",
"3",
")",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Marks a function as deprecated.
|
[
"Marks",
"a",
"function",
"as",
"deprecated",
"."
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/util.py#L18-L25
|
234,770
|
joerick/pyinstrument
|
pyinstrument/util.py
|
deprecated_option
|
def deprecated_option(option_name, message=''):
''' Marks an option as deprecated. '''
def caller(func, *args, **kwargs):
if option_name in kwargs:
warnings.warn(
'{} is deprecated. {}'.format(option_name, message),
DeprecationWarning,
stacklevel=3
)
return func(*args, **kwargs)
return decorator(caller)
|
python
|
def deprecated_option(option_name, message=''):
''' Marks an option as deprecated. '''
def caller(func, *args, **kwargs):
if option_name in kwargs:
warnings.warn(
'{} is deprecated. {}'.format(option_name, message),
DeprecationWarning,
stacklevel=3
)
return func(*args, **kwargs)
return decorator(caller)
|
[
"def",
"deprecated_option",
"(",
"option_name",
",",
"message",
"=",
"''",
")",
":",
"def",
"caller",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"option_name",
"in",
"kwargs",
":",
"warnings",
".",
"warn",
"(",
"'{} is deprecated. {}'",
".",
"format",
"(",
"option_name",
",",
"message",
")",
",",
"DeprecationWarning",
",",
"stacklevel",
"=",
"3",
")",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"decorator",
"(",
"caller",
")"
] |
Marks an option as deprecated.
|
[
"Marks",
"an",
"option",
"as",
"deprecated",
"."
] |
cc4f3f6fc1b493d7cd058ecf41ad012e0030a512
|
https://github.com/joerick/pyinstrument/blob/cc4f3f6fc1b493d7cd058ecf41ad012e0030a512/pyinstrument/util.py#L27-L38
|
234,771
|
jrief/django-angular
|
djng/app_settings.py
|
AppSettings.THUMBNAIL_OPTIONS
|
def THUMBNAIL_OPTIONS(self):
"""
Set the size as a 2-tuple for thumbnailed images after uploading them.
"""
from django.core.exceptions import ImproperlyConfigured
size = self._setting('DJNG_THUMBNAIL_SIZE', (200, 200))
if not (isinstance(size, (list, tuple)) and len(size) == 2 and isinstance(size[0], int) and isinstance(size[1], int)):
raise ImproperlyConfigured("'DJNG_THUMBNAIL_SIZE' must be a 2-tuple of integers.")
return {'crop': True, 'size': size}
|
python
|
def THUMBNAIL_OPTIONS(self):
"""
Set the size as a 2-tuple for thumbnailed images after uploading them.
"""
from django.core.exceptions import ImproperlyConfigured
size = self._setting('DJNG_THUMBNAIL_SIZE', (200, 200))
if not (isinstance(size, (list, tuple)) and len(size) == 2 and isinstance(size[0], int) and isinstance(size[1], int)):
raise ImproperlyConfigured("'DJNG_THUMBNAIL_SIZE' must be a 2-tuple of integers.")
return {'crop': True, 'size': size}
|
[
"def",
"THUMBNAIL_OPTIONS",
"(",
"self",
")",
":",
"from",
"django",
".",
"core",
".",
"exceptions",
"import",
"ImproperlyConfigured",
"size",
"=",
"self",
".",
"_setting",
"(",
"'DJNG_THUMBNAIL_SIZE'",
",",
"(",
"200",
",",
"200",
")",
")",
"if",
"not",
"(",
"isinstance",
"(",
"size",
",",
"(",
"list",
",",
"tuple",
")",
")",
"and",
"len",
"(",
"size",
")",
"==",
"2",
"and",
"isinstance",
"(",
"size",
"[",
"0",
"]",
",",
"int",
")",
"and",
"isinstance",
"(",
"size",
"[",
"1",
"]",
",",
"int",
")",
")",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"'DJNG_THUMBNAIL_SIZE' must be a 2-tuple of integers.\"",
")",
"return",
"{",
"'crop'",
":",
"True",
",",
"'size'",
":",
"size",
"}"
] |
Set the size as a 2-tuple for thumbnailed images after uploading them.
|
[
"Set",
"the",
"size",
"as",
"a",
"2",
"-",
"tuple",
"for",
"thumbnailed",
"images",
"after",
"uploading",
"them",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/app_settings.py#L20-L29
|
234,772
|
jrief/django-angular
|
djng/forms/angular_base.py
|
NgWidgetMixin.get_context
|
def get_context(self, name, value, attrs):
"""
Some widgets require a modified rendering context, if they contain angular directives.
"""
context = super(NgWidgetMixin, self).get_context(name, value, attrs)
if callable(getattr(self._field, 'update_widget_rendering_context', None)):
self._field.update_widget_rendering_context(context)
return context
|
python
|
def get_context(self, name, value, attrs):
"""
Some widgets require a modified rendering context, if they contain angular directives.
"""
context = super(NgWidgetMixin, self).get_context(name, value, attrs)
if callable(getattr(self._field, 'update_widget_rendering_context', None)):
self._field.update_widget_rendering_context(context)
return context
|
[
"def",
"get_context",
"(",
"self",
",",
"name",
",",
"value",
",",
"attrs",
")",
":",
"context",
"=",
"super",
"(",
"NgWidgetMixin",
",",
"self",
")",
".",
"get_context",
"(",
"name",
",",
"value",
",",
"attrs",
")",
"if",
"callable",
"(",
"getattr",
"(",
"self",
".",
"_field",
",",
"'update_widget_rendering_context'",
",",
"None",
")",
")",
":",
"self",
".",
"_field",
".",
"update_widget_rendering_context",
"(",
"context",
")",
"return",
"context"
] |
Some widgets require a modified rendering context, if they contain angular directives.
|
[
"Some",
"widgets",
"require",
"a",
"modified",
"rendering",
"context",
"if",
"they",
"contain",
"angular",
"directives",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L156-L163
|
234,773
|
jrief/django-angular
|
djng/forms/angular_base.py
|
NgBoundField.errors
|
def errors(self):
"""
Returns a TupleErrorList for this field. This overloaded method adds additional error lists
to the errors as detected by the form validator.
"""
if not hasattr(self, '_errors_cache'):
self._errors_cache = self.form.get_field_errors(self)
return self._errors_cache
|
python
|
def errors(self):
"""
Returns a TupleErrorList for this field. This overloaded method adds additional error lists
to the errors as detected by the form validator.
"""
if not hasattr(self, '_errors_cache'):
self._errors_cache = self.form.get_field_errors(self)
return self._errors_cache
|
[
"def",
"errors",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_errors_cache'",
")",
":",
"self",
".",
"_errors_cache",
"=",
"self",
".",
"form",
".",
"get_field_errors",
"(",
"self",
")",
"return",
"self",
".",
"_errors_cache"
] |
Returns a TupleErrorList for this field. This overloaded method adds additional error lists
to the errors as detected by the form validator.
|
[
"Returns",
"a",
"TupleErrorList",
"for",
"this",
"field",
".",
"This",
"overloaded",
"method",
"adds",
"additional",
"error",
"lists",
"to",
"the",
"errors",
"as",
"detected",
"by",
"the",
"form",
"validator",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L168-L175
|
234,774
|
jrief/django-angular
|
djng/forms/angular_base.py
|
NgBoundField.css_classes
|
def css_classes(self, extra_classes=None):
"""
Returns a string of space-separated CSS classes for the wrapping element of this input field.
"""
if hasattr(extra_classes, 'split'):
extra_classes = extra_classes.split()
extra_classes = set(extra_classes or [])
# field_css_classes is an optional member of a Form optimized for django-angular
field_css_classes = getattr(self.form, 'field_css_classes', None)
if hasattr(field_css_classes, 'split'):
extra_classes.update(field_css_classes.split())
elif isinstance(field_css_classes, (list, tuple)):
extra_classes.update(field_css_classes)
elif isinstance(field_css_classes, dict):
extra_field_classes = []
for key in ('*', self.name):
css_classes = field_css_classes.get(key)
if hasattr(css_classes, 'split'):
extra_field_classes = css_classes.split()
elif isinstance(css_classes, (list, tuple)):
if '__default__' in css_classes:
css_classes.remove('__default__')
extra_field_classes.extend(css_classes)
else:
extra_field_classes = css_classes
extra_classes.update(extra_field_classes)
return super(NgBoundField, self).css_classes(extra_classes)
|
python
|
def css_classes(self, extra_classes=None):
"""
Returns a string of space-separated CSS classes for the wrapping element of this input field.
"""
if hasattr(extra_classes, 'split'):
extra_classes = extra_classes.split()
extra_classes = set(extra_classes or [])
# field_css_classes is an optional member of a Form optimized for django-angular
field_css_classes = getattr(self.form, 'field_css_classes', None)
if hasattr(field_css_classes, 'split'):
extra_classes.update(field_css_classes.split())
elif isinstance(field_css_classes, (list, tuple)):
extra_classes.update(field_css_classes)
elif isinstance(field_css_classes, dict):
extra_field_classes = []
for key in ('*', self.name):
css_classes = field_css_classes.get(key)
if hasattr(css_classes, 'split'):
extra_field_classes = css_classes.split()
elif isinstance(css_classes, (list, tuple)):
if '__default__' in css_classes:
css_classes.remove('__default__')
extra_field_classes.extend(css_classes)
else:
extra_field_classes = css_classes
extra_classes.update(extra_field_classes)
return super(NgBoundField, self).css_classes(extra_classes)
|
[
"def",
"css_classes",
"(",
"self",
",",
"extra_classes",
"=",
"None",
")",
":",
"if",
"hasattr",
"(",
"extra_classes",
",",
"'split'",
")",
":",
"extra_classes",
"=",
"extra_classes",
".",
"split",
"(",
")",
"extra_classes",
"=",
"set",
"(",
"extra_classes",
"or",
"[",
"]",
")",
"# field_css_classes is an optional member of a Form optimized for django-angular",
"field_css_classes",
"=",
"getattr",
"(",
"self",
".",
"form",
",",
"'field_css_classes'",
",",
"None",
")",
"if",
"hasattr",
"(",
"field_css_classes",
",",
"'split'",
")",
":",
"extra_classes",
".",
"update",
"(",
"field_css_classes",
".",
"split",
"(",
")",
")",
"elif",
"isinstance",
"(",
"field_css_classes",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"extra_classes",
".",
"update",
"(",
"field_css_classes",
")",
"elif",
"isinstance",
"(",
"field_css_classes",
",",
"dict",
")",
":",
"extra_field_classes",
"=",
"[",
"]",
"for",
"key",
"in",
"(",
"'*'",
",",
"self",
".",
"name",
")",
":",
"css_classes",
"=",
"field_css_classes",
".",
"get",
"(",
"key",
")",
"if",
"hasattr",
"(",
"css_classes",
",",
"'split'",
")",
":",
"extra_field_classes",
"=",
"css_classes",
".",
"split",
"(",
")",
"elif",
"isinstance",
"(",
"css_classes",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"if",
"'__default__'",
"in",
"css_classes",
":",
"css_classes",
".",
"remove",
"(",
"'__default__'",
")",
"extra_field_classes",
".",
"extend",
"(",
"css_classes",
")",
"else",
":",
"extra_field_classes",
"=",
"css_classes",
"extra_classes",
".",
"update",
"(",
"extra_field_classes",
")",
"return",
"super",
"(",
"NgBoundField",
",",
"self",
")",
".",
"css_classes",
"(",
"extra_classes",
")"
] |
Returns a string of space-separated CSS classes for the wrapping element of this input field.
|
[
"Returns",
"a",
"string",
"of",
"space",
"-",
"separated",
"CSS",
"classes",
"for",
"the",
"wrapping",
"element",
"of",
"this",
"input",
"field",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L177-L203
|
234,775
|
jrief/django-angular
|
djng/forms/angular_base.py
|
NgFormBaseMixin.get_field_errors
|
def get_field_errors(self, field):
"""
Return server side errors. Shall be overridden by derived forms to add their
extra errors for AngularJS.
"""
identifier = format_html('{0}[\'{1}\']', self.form_name, field.name)
errors = self.errors.get(field.html_name, [])
return self.error_class([SafeTuple(
(identifier, self.field_error_css_classes, '$pristine', '$pristine', 'invalid', e)) for e in errors])
|
python
|
def get_field_errors(self, field):
"""
Return server side errors. Shall be overridden by derived forms to add their
extra errors for AngularJS.
"""
identifier = format_html('{0}[\'{1}\']', self.form_name, field.name)
errors = self.errors.get(field.html_name, [])
return self.error_class([SafeTuple(
(identifier, self.field_error_css_classes, '$pristine', '$pristine', 'invalid', e)) for e in errors])
|
[
"def",
"get_field_errors",
"(",
"self",
",",
"field",
")",
":",
"identifier",
"=",
"format_html",
"(",
"'{0}[\\'{1}\\']'",
",",
"self",
".",
"form_name",
",",
"field",
".",
"name",
")",
"errors",
"=",
"self",
".",
"errors",
".",
"get",
"(",
"field",
".",
"html_name",
",",
"[",
"]",
")",
"return",
"self",
".",
"error_class",
"(",
"[",
"SafeTuple",
"(",
"(",
"identifier",
",",
"self",
".",
"field_error_css_classes",
",",
"'$pristine'",
",",
"'$pristine'",
",",
"'invalid'",
",",
"e",
")",
")",
"for",
"e",
"in",
"errors",
"]",
")"
] |
Return server side errors. Shall be overridden by derived forms to add their
extra errors for AngularJS.
|
[
"Return",
"server",
"side",
"errors",
".",
"Shall",
"be",
"overridden",
"by",
"derived",
"forms",
"to",
"add",
"their",
"extra",
"errors",
"for",
"AngularJS",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L330-L338
|
234,776
|
jrief/django-angular
|
djng/forms/angular_base.py
|
NgFormBaseMixin.update_widget_attrs
|
def update_widget_attrs(self, bound_field, attrs):
"""
Updated the widget attributes which shall be added to the widget when rendering this field.
"""
if bound_field.field.has_subwidgets() is False:
widget_classes = getattr(self, 'widget_css_classes', None)
if widget_classes:
if 'class' in attrs:
attrs['class'] += ' ' + widget_classes
else:
attrs.update({'class': widget_classes})
return attrs
|
python
|
def update_widget_attrs(self, bound_field, attrs):
"""
Updated the widget attributes which shall be added to the widget when rendering this field.
"""
if bound_field.field.has_subwidgets() is False:
widget_classes = getattr(self, 'widget_css_classes', None)
if widget_classes:
if 'class' in attrs:
attrs['class'] += ' ' + widget_classes
else:
attrs.update({'class': widget_classes})
return attrs
|
[
"def",
"update_widget_attrs",
"(",
"self",
",",
"bound_field",
",",
"attrs",
")",
":",
"if",
"bound_field",
".",
"field",
".",
"has_subwidgets",
"(",
")",
"is",
"False",
":",
"widget_classes",
"=",
"getattr",
"(",
"self",
",",
"'widget_css_classes'",
",",
"None",
")",
"if",
"widget_classes",
":",
"if",
"'class'",
"in",
"attrs",
":",
"attrs",
"[",
"'class'",
"]",
"+=",
"' '",
"+",
"widget_classes",
"else",
":",
"attrs",
".",
"update",
"(",
"{",
"'class'",
":",
"widget_classes",
"}",
")",
"return",
"attrs"
] |
Updated the widget attributes which shall be added to the widget when rendering this field.
|
[
"Updated",
"the",
"widget",
"attributes",
"which",
"shall",
"be",
"added",
"to",
"the",
"widget",
"when",
"rendering",
"this",
"field",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L354-L365
|
234,777
|
jrief/django-angular
|
djng/forms/angular_base.py
|
NgFormBaseMixin.rectify_multipart_form_data
|
def rectify_multipart_form_data(self, data):
"""
If a widget was converted and the Form data was submitted through a multipart request,
then these data fields must be converted to suit the Django Form validation
"""
for name, field in self.base_fields.items():
try:
field.implode_multi_values(name, data)
except AttributeError:
pass
return data
|
python
|
def rectify_multipart_form_data(self, data):
"""
If a widget was converted and the Form data was submitted through a multipart request,
then these data fields must be converted to suit the Django Form validation
"""
for name, field in self.base_fields.items():
try:
field.implode_multi_values(name, data)
except AttributeError:
pass
return data
|
[
"def",
"rectify_multipart_form_data",
"(",
"self",
",",
"data",
")",
":",
"for",
"name",
",",
"field",
"in",
"self",
".",
"base_fields",
".",
"items",
"(",
")",
":",
"try",
":",
"field",
".",
"implode_multi_values",
"(",
"name",
",",
"data",
")",
"except",
"AttributeError",
":",
"pass",
"return",
"data"
] |
If a widget was converted and the Form data was submitted through a multipart request,
then these data fields must be converted to suit the Django Form validation
|
[
"If",
"a",
"widget",
"was",
"converted",
"and",
"the",
"Form",
"data",
"was",
"submitted",
"through",
"a",
"multipart",
"request",
"then",
"these",
"data",
"fields",
"must",
"be",
"converted",
"to",
"suit",
"the",
"Django",
"Form",
"validation"
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L380-L390
|
234,778
|
jrief/django-angular
|
djng/forms/angular_base.py
|
NgFormBaseMixin.rectify_ajax_form_data
|
def rectify_ajax_form_data(self, data):
"""
If a widget was converted and the Form data was submitted through an Ajax request,
then these data fields must be converted to suit the Django Form validation
"""
for name, field in self.base_fields.items():
try:
data[name] = field.convert_ajax_data(data.get(name, {}))
except AttributeError:
pass
return data
|
python
|
def rectify_ajax_form_data(self, data):
"""
If a widget was converted and the Form data was submitted through an Ajax request,
then these data fields must be converted to suit the Django Form validation
"""
for name, field in self.base_fields.items():
try:
data[name] = field.convert_ajax_data(data.get(name, {}))
except AttributeError:
pass
return data
|
[
"def",
"rectify_ajax_form_data",
"(",
"self",
",",
"data",
")",
":",
"for",
"name",
",",
"field",
"in",
"self",
".",
"base_fields",
".",
"items",
"(",
")",
":",
"try",
":",
"data",
"[",
"name",
"]",
"=",
"field",
".",
"convert_ajax_data",
"(",
"data",
".",
"get",
"(",
"name",
",",
"{",
"}",
")",
")",
"except",
"AttributeError",
":",
"pass",
"return",
"data"
] |
If a widget was converted and the Form data was submitted through an Ajax request,
then these data fields must be converted to suit the Django Form validation
|
[
"If",
"a",
"widget",
"was",
"converted",
"and",
"the",
"Form",
"data",
"was",
"submitted",
"through",
"an",
"Ajax",
"request",
"then",
"these",
"data",
"fields",
"must",
"be",
"converted",
"to",
"suit",
"the",
"Django",
"Form",
"validation"
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_base.py#L392-L402
|
234,779
|
jrief/django-angular
|
djng/templatetags/djng_tags.py
|
djng_locale_script
|
def djng_locale_script(context, default_language='en'):
"""
Returns a script tag for including the proper locale script in any HTML page.
This tag determines the current language with its locale.
Usage:
<script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script %}"></script>
or, if used with a default language:
<script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script 'de' %}"></script>
"""
language = get_language_from_request(context['request'])
if not language:
language = default_language
return format_html('angular-locale_{}.js', language.lower())
|
python
|
def djng_locale_script(context, default_language='en'):
"""
Returns a script tag for including the proper locale script in any HTML page.
This tag determines the current language with its locale.
Usage:
<script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script %}"></script>
or, if used with a default language:
<script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script 'de' %}"></script>
"""
language = get_language_from_request(context['request'])
if not language:
language = default_language
return format_html('angular-locale_{}.js', language.lower())
|
[
"def",
"djng_locale_script",
"(",
"context",
",",
"default_language",
"=",
"'en'",
")",
":",
"language",
"=",
"get_language_from_request",
"(",
"context",
"[",
"'request'",
"]",
")",
"if",
"not",
"language",
":",
"language",
"=",
"default_language",
"return",
"format_html",
"(",
"'angular-locale_{}.js'",
",",
"language",
".",
"lower",
"(",
")",
")"
] |
Returns a script tag for including the proper locale script in any HTML page.
This tag determines the current language with its locale.
Usage:
<script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script %}"></script>
or, if used with a default language:
<script src="{% static 'node_modules/angular-i18n/' %}{% djng_locale_script 'de' %}"></script>
|
[
"Returns",
"a",
"script",
"tag",
"for",
"including",
"the",
"proper",
"locale",
"script",
"in",
"any",
"HTML",
"page",
".",
"This",
"tag",
"determines",
"the",
"current",
"language",
"with",
"its",
"locale",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/templatetags/djng_tags.py#L101-L114
|
234,780
|
jrief/django-angular
|
djng/forms/fields.py
|
DefaultFieldMixin.update_widget_attrs
|
def update_widget_attrs(self, bound_field, attrs):
"""
Update the dictionary of attributes used while rendering the input widget
"""
bound_field.form.update_widget_attrs(bound_field, attrs)
widget_classes = self.widget.attrs.get('class', None)
if widget_classes:
if 'class' in attrs:
attrs['class'] += ' ' + widget_classes
else:
attrs.update({'class': widget_classes})
return attrs
|
python
|
def update_widget_attrs(self, bound_field, attrs):
"""
Update the dictionary of attributes used while rendering the input widget
"""
bound_field.form.update_widget_attrs(bound_field, attrs)
widget_classes = self.widget.attrs.get('class', None)
if widget_classes:
if 'class' in attrs:
attrs['class'] += ' ' + widget_classes
else:
attrs.update({'class': widget_classes})
return attrs
|
[
"def",
"update_widget_attrs",
"(",
"self",
",",
"bound_field",
",",
"attrs",
")",
":",
"bound_field",
".",
"form",
".",
"update_widget_attrs",
"(",
"bound_field",
",",
"attrs",
")",
"widget_classes",
"=",
"self",
".",
"widget",
".",
"attrs",
".",
"get",
"(",
"'class'",
",",
"None",
")",
"if",
"widget_classes",
":",
"if",
"'class'",
"in",
"attrs",
":",
"attrs",
"[",
"'class'",
"]",
"+=",
"' '",
"+",
"widget_classes",
"else",
":",
"attrs",
".",
"update",
"(",
"{",
"'class'",
":",
"widget_classes",
"}",
")",
"return",
"attrs"
] |
Update the dictionary of attributes used while rendering the input widget
|
[
"Update",
"the",
"dictionary",
"of",
"attributes",
"used",
"while",
"rendering",
"the",
"input",
"widget"
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/fields.py#L100-L111
|
234,781
|
jrief/django-angular
|
djng/forms/fields.py
|
MultipleChoiceField.implode_multi_values
|
def implode_multi_values(self, name, data):
"""
Due to the way Angular organizes it model, when Form data is sent via a POST request,
then for this kind of widget, the posted data must to be converted into a format suitable
for Django's Form validation.
"""
mkeys = [k for k in data.keys() if k.startswith(name + '.')]
mvls = [data.pop(k)[0] for k in mkeys]
if mvls:
data.setlist(name, mvls)
|
python
|
def implode_multi_values(self, name, data):
"""
Due to the way Angular organizes it model, when Form data is sent via a POST request,
then for this kind of widget, the posted data must to be converted into a format suitable
for Django's Form validation.
"""
mkeys = [k for k in data.keys() if k.startswith(name + '.')]
mvls = [data.pop(k)[0] for k in mkeys]
if mvls:
data.setlist(name, mvls)
|
[
"def",
"implode_multi_values",
"(",
"self",
",",
"name",
",",
"data",
")",
":",
"mkeys",
"=",
"[",
"k",
"for",
"k",
"in",
"data",
".",
"keys",
"(",
")",
"if",
"k",
".",
"startswith",
"(",
"name",
"+",
"'.'",
")",
"]",
"mvls",
"=",
"[",
"data",
".",
"pop",
"(",
"k",
")",
"[",
"0",
"]",
"for",
"k",
"in",
"mkeys",
"]",
"if",
"mvls",
":",
"data",
".",
"setlist",
"(",
"name",
",",
"mvls",
")"
] |
Due to the way Angular organizes it model, when Form data is sent via a POST request,
then for this kind of widget, the posted data must to be converted into a format suitable
for Django's Form validation.
|
[
"Due",
"to",
"the",
"way",
"Angular",
"organizes",
"it",
"model",
"when",
"Form",
"data",
"is",
"sent",
"via",
"a",
"POST",
"request",
"then",
"for",
"this",
"kind",
"of",
"widget",
"the",
"posted",
"data",
"must",
"to",
"be",
"converted",
"into",
"a",
"format",
"suitable",
"for",
"Django",
"s",
"Form",
"validation",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/fields.py#L355-L364
|
234,782
|
jrief/django-angular
|
djng/forms/fields.py
|
MultipleChoiceField.convert_ajax_data
|
def convert_ajax_data(self, field_data):
"""
Due to the way Angular organizes it model, when this Form data is sent using Ajax,
then for this kind of widget, the sent data has to be converted into a format suitable
for Django's Form validation.
"""
data = [key for key, val in field_data.items() if val]
return data
|
python
|
def convert_ajax_data(self, field_data):
"""
Due to the way Angular organizes it model, when this Form data is sent using Ajax,
then for this kind of widget, the sent data has to be converted into a format suitable
for Django's Form validation.
"""
data = [key for key, val in field_data.items() if val]
return data
|
[
"def",
"convert_ajax_data",
"(",
"self",
",",
"field_data",
")",
":",
"data",
"=",
"[",
"key",
"for",
"key",
",",
"val",
"in",
"field_data",
".",
"items",
"(",
")",
"if",
"val",
"]",
"return",
"data"
] |
Due to the way Angular organizes it model, when this Form data is sent using Ajax,
then for this kind of widget, the sent data has to be converted into a format suitable
for Django's Form validation.
|
[
"Due",
"to",
"the",
"way",
"Angular",
"organizes",
"it",
"model",
"when",
"this",
"Form",
"data",
"is",
"sent",
"using",
"Ajax",
"then",
"for",
"this",
"kind",
"of",
"widget",
"the",
"sent",
"data",
"has",
"to",
"be",
"converted",
"into",
"a",
"format",
"suitable",
"for",
"Django",
"s",
"Form",
"validation",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/fields.py#L366-L373
|
234,783
|
jrief/django-angular
|
djng/middleware.py
|
AngularUrlMiddleware.process_request
|
def process_request(self, request):
"""
Reads url name, args, kwargs from GET parameters, reverses the url and resolves view function
Returns the result of resolved view function, called with provided args and kwargs
Since the view function is called directly, it isn't ran through middlewares, so the middlewares must
be added manually
The final result is exactly the same as if the request was for the resolved view.
Parametrized urls:
djangoUrl.reverse can be used with parametrized urls of $resource
In that case the reverse url is something like: /angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=:id
$resource can either replace the ':id' part with say 2 and we can proceed as usual,
reverse with reverse('orders', kwargs={'id': 2}).
If it's not replaced we want to reverse to url we get a request to url
'/angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=' which
gives a request.GET QueryDict {u'djng_url_name': [u'orders'], u'djng_url_kwarg_id': [u'']}
In that case we want to ignore the id param and only reverse to url with name 'orders' and no params.
So we ignore args and kwargs that are empty strings.
"""
if request.path == self.ANGULAR_REVERSE:
url_name = request.GET.get('djng_url_name')
url_args = request.GET.getlist('djng_url_args', [])
url_kwargs = {}
# Remove falsy values (empty strings)
url_args = filter(lambda x: x, url_args)
# Read kwargs
for param in request.GET:
if param.startswith('djng_url_kwarg_'):
# Ignore kwargs that are empty strings
if request.GET[param]:
url_kwargs[param[15:]] = request.GET[param] # [15:] to remove 'djng_url_kwarg' prefix
url = unquote(reverse(url_name, args=url_args, kwargs=url_kwargs))
assert not url.startswith(self.ANGULAR_REVERSE), "Prevent recursive requests"
# rebuild the request object with a different environ
request.path = request.path_info = url
request.environ['PATH_INFO'] = url
query = request.GET.copy()
for key in request.GET:
if key.startswith('djng_url'):
query.pop(key, None)
if six.PY3:
request.environ['QUERY_STRING'] = query.urlencode()
else:
request.environ['QUERY_STRING'] = query.urlencode().encode('utf-8')
# Reconstruct GET QueryList in the same way WSGIRequest.GET function works
request.GET = http.QueryDict(request.environ['QUERY_STRING'])
|
python
|
def process_request(self, request):
"""
Reads url name, args, kwargs from GET parameters, reverses the url and resolves view function
Returns the result of resolved view function, called with provided args and kwargs
Since the view function is called directly, it isn't ran through middlewares, so the middlewares must
be added manually
The final result is exactly the same as if the request was for the resolved view.
Parametrized urls:
djangoUrl.reverse can be used with parametrized urls of $resource
In that case the reverse url is something like: /angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=:id
$resource can either replace the ':id' part with say 2 and we can proceed as usual,
reverse with reverse('orders', kwargs={'id': 2}).
If it's not replaced we want to reverse to url we get a request to url
'/angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=' which
gives a request.GET QueryDict {u'djng_url_name': [u'orders'], u'djng_url_kwarg_id': [u'']}
In that case we want to ignore the id param and only reverse to url with name 'orders' and no params.
So we ignore args and kwargs that are empty strings.
"""
if request.path == self.ANGULAR_REVERSE:
url_name = request.GET.get('djng_url_name')
url_args = request.GET.getlist('djng_url_args', [])
url_kwargs = {}
# Remove falsy values (empty strings)
url_args = filter(lambda x: x, url_args)
# Read kwargs
for param in request.GET:
if param.startswith('djng_url_kwarg_'):
# Ignore kwargs that are empty strings
if request.GET[param]:
url_kwargs[param[15:]] = request.GET[param] # [15:] to remove 'djng_url_kwarg' prefix
url = unquote(reverse(url_name, args=url_args, kwargs=url_kwargs))
assert not url.startswith(self.ANGULAR_REVERSE), "Prevent recursive requests"
# rebuild the request object with a different environ
request.path = request.path_info = url
request.environ['PATH_INFO'] = url
query = request.GET.copy()
for key in request.GET:
if key.startswith('djng_url'):
query.pop(key, None)
if six.PY3:
request.environ['QUERY_STRING'] = query.urlencode()
else:
request.environ['QUERY_STRING'] = query.urlencode().encode('utf-8')
# Reconstruct GET QueryList in the same way WSGIRequest.GET function works
request.GET = http.QueryDict(request.environ['QUERY_STRING'])
|
[
"def",
"process_request",
"(",
"self",
",",
"request",
")",
":",
"if",
"request",
".",
"path",
"==",
"self",
".",
"ANGULAR_REVERSE",
":",
"url_name",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'djng_url_name'",
")",
"url_args",
"=",
"request",
".",
"GET",
".",
"getlist",
"(",
"'djng_url_args'",
",",
"[",
"]",
")",
"url_kwargs",
"=",
"{",
"}",
"# Remove falsy values (empty strings)",
"url_args",
"=",
"filter",
"(",
"lambda",
"x",
":",
"x",
",",
"url_args",
")",
"# Read kwargs",
"for",
"param",
"in",
"request",
".",
"GET",
":",
"if",
"param",
".",
"startswith",
"(",
"'djng_url_kwarg_'",
")",
":",
"# Ignore kwargs that are empty strings",
"if",
"request",
".",
"GET",
"[",
"param",
"]",
":",
"url_kwargs",
"[",
"param",
"[",
"15",
":",
"]",
"]",
"=",
"request",
".",
"GET",
"[",
"param",
"]",
"# [15:] to remove 'djng_url_kwarg' prefix",
"url",
"=",
"unquote",
"(",
"reverse",
"(",
"url_name",
",",
"args",
"=",
"url_args",
",",
"kwargs",
"=",
"url_kwargs",
")",
")",
"assert",
"not",
"url",
".",
"startswith",
"(",
"self",
".",
"ANGULAR_REVERSE",
")",
",",
"\"Prevent recursive requests\"",
"# rebuild the request object with a different environ",
"request",
".",
"path",
"=",
"request",
".",
"path_info",
"=",
"url",
"request",
".",
"environ",
"[",
"'PATH_INFO'",
"]",
"=",
"url",
"query",
"=",
"request",
".",
"GET",
".",
"copy",
"(",
")",
"for",
"key",
"in",
"request",
".",
"GET",
":",
"if",
"key",
".",
"startswith",
"(",
"'djng_url'",
")",
":",
"query",
".",
"pop",
"(",
"key",
",",
"None",
")",
"if",
"six",
".",
"PY3",
":",
"request",
".",
"environ",
"[",
"'QUERY_STRING'",
"]",
"=",
"query",
".",
"urlencode",
"(",
")",
"else",
":",
"request",
".",
"environ",
"[",
"'QUERY_STRING'",
"]",
"=",
"query",
".",
"urlencode",
"(",
")",
".",
"encode",
"(",
"'utf-8'",
")",
"# Reconstruct GET QueryList in the same way WSGIRequest.GET function works",
"request",
".",
"GET",
"=",
"http",
".",
"QueryDict",
"(",
"request",
".",
"environ",
"[",
"'QUERY_STRING'",
"]",
")"
] |
Reads url name, args, kwargs from GET parameters, reverses the url and resolves view function
Returns the result of resolved view function, called with provided args and kwargs
Since the view function is called directly, it isn't ran through middlewares, so the middlewares must
be added manually
The final result is exactly the same as if the request was for the resolved view.
Parametrized urls:
djangoUrl.reverse can be used with parametrized urls of $resource
In that case the reverse url is something like: /angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=:id
$resource can either replace the ':id' part with say 2 and we can proceed as usual,
reverse with reverse('orders', kwargs={'id': 2}).
If it's not replaced we want to reverse to url we get a request to url
'/angular/reverse/?djng_url_name=orders&djng_url_kwarg_id=' which
gives a request.GET QueryDict {u'djng_url_name': [u'orders'], u'djng_url_kwarg_id': [u'']}
In that case we want to ignore the id param and only reverse to url with name 'orders' and no params.
So we ignore args and kwargs that are empty strings.
|
[
"Reads",
"url",
"name",
"args",
"kwargs",
"from",
"GET",
"parameters",
"reverses",
"the",
"url",
"and",
"resolves",
"view",
"function",
"Returns",
"the",
"result",
"of",
"resolved",
"view",
"function",
"called",
"with",
"provided",
"args",
"and",
"kwargs",
"Since",
"the",
"view",
"function",
"is",
"called",
"directly",
"it",
"isn",
"t",
"ran",
"through",
"middlewares",
"so",
"the",
"middlewares",
"must",
"be",
"added",
"manually",
"The",
"final",
"result",
"is",
"exactly",
"the",
"same",
"as",
"if",
"the",
"request",
"was",
"for",
"the",
"resolved",
"view",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/middleware.py#L21-L73
|
234,784
|
jrief/django-angular
|
djng/views/crud.py
|
NgCRUDView.ng_delete
|
def ng_delete(self, request, *args, **kwargs):
"""
Delete object and return it's data in JSON encoding
The response is build before the object is actually deleted
so that we can still retrieve a serialization in the response
even with a m2m relationship
"""
if 'pk' not in request.GET:
raise NgMissingParameterError("Object id is required to delete.")
obj = self.get_object()
response = self.build_json_response(obj)
obj.delete()
return response
|
python
|
def ng_delete(self, request, *args, **kwargs):
"""
Delete object and return it's data in JSON encoding
The response is build before the object is actually deleted
so that we can still retrieve a serialization in the response
even with a m2m relationship
"""
if 'pk' not in request.GET:
raise NgMissingParameterError("Object id is required to delete.")
obj = self.get_object()
response = self.build_json_response(obj)
obj.delete()
return response
|
[
"def",
"ng_delete",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'pk'",
"not",
"in",
"request",
".",
"GET",
":",
"raise",
"NgMissingParameterError",
"(",
"\"Object id is required to delete.\"",
")",
"obj",
"=",
"self",
".",
"get_object",
"(",
")",
"response",
"=",
"self",
".",
"build_json_response",
"(",
"obj",
")",
"obj",
".",
"delete",
"(",
")",
"return",
"response"
] |
Delete object and return it's data in JSON encoding
The response is build before the object is actually deleted
so that we can still retrieve a serialization in the response
even with a m2m relationship
|
[
"Delete",
"object",
"and",
"return",
"it",
"s",
"data",
"in",
"JSON",
"encoding",
"The",
"response",
"is",
"build",
"before",
"the",
"object",
"is",
"actually",
"deleted",
"so",
"that",
"we",
"can",
"still",
"retrieve",
"a",
"serialization",
"in",
"the",
"response",
"even",
"with",
"a",
"m2m",
"relationship"
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/views/crud.py#L170-L183
|
234,785
|
jrief/django-angular
|
djng/forms/angular_model.py
|
NgModelFormMixin._post_clean
|
def _post_clean(self):
"""
Rewrite the error dictionary, so that its keys correspond to the model fields.
"""
super(NgModelFormMixin, self)._post_clean()
if self._errors and self.prefix:
self._errors = ErrorDict((self.add_prefix(name), value) for name, value in self._errors.items())
|
python
|
def _post_clean(self):
"""
Rewrite the error dictionary, so that its keys correspond to the model fields.
"""
super(NgModelFormMixin, self)._post_clean()
if self._errors and self.prefix:
self._errors = ErrorDict((self.add_prefix(name), value) for name, value in self._errors.items())
|
[
"def",
"_post_clean",
"(",
"self",
")",
":",
"super",
"(",
"NgModelFormMixin",
",",
"self",
")",
".",
"_post_clean",
"(",
")",
"if",
"self",
".",
"_errors",
"and",
"self",
".",
"prefix",
":",
"self",
".",
"_errors",
"=",
"ErrorDict",
"(",
"(",
"self",
".",
"add_prefix",
"(",
"name",
")",
",",
"value",
")",
"for",
"name",
",",
"value",
"in",
"self",
".",
"_errors",
".",
"items",
"(",
")",
")"
] |
Rewrite the error dictionary, so that its keys correspond to the model fields.
|
[
"Rewrite",
"the",
"error",
"dictionary",
"so",
"that",
"its",
"keys",
"correspond",
"to",
"the",
"model",
"fields",
"."
] |
9f2f8247027173e3b3ad3b245ca299a9c9f31b40
|
https://github.com/jrief/django-angular/blob/9f2f8247027173e3b3ad3b245ca299a9c9f31b40/djng/forms/angular_model.py#L42-L48
|
234,786
|
WoLpH/python-progressbar
|
progressbar/bar.py
|
ProgressBar.percentage
|
def percentage(self):
'''Return current percentage, returns None if no max_value is given
>>> progress = ProgressBar()
>>> progress.max_value = 10
>>> progress.min_value = 0
>>> progress.value = 0
>>> progress.percentage
0.0
>>>
>>> progress.value = 1
>>> progress.percentage
10.0
>>> progress.value = 10
>>> progress.percentage
100.0
>>> progress.min_value = -10
>>> progress.percentage
100.0
>>> progress.value = 0
>>> progress.percentage
50.0
>>> progress.value = 5
>>> progress.percentage
75.0
>>> progress.value = -5
>>> progress.percentage
25.0
>>> progress.max_value = None
>>> progress.percentage
'''
if self.max_value is None or self.max_value is base.UnknownLength:
return None
elif self.max_value:
todo = self.value - self.min_value
total = self.max_value - self.min_value
percentage = todo / total
else:
percentage = 1
return percentage * 100
|
python
|
def percentage(self):
'''Return current percentage, returns None if no max_value is given
>>> progress = ProgressBar()
>>> progress.max_value = 10
>>> progress.min_value = 0
>>> progress.value = 0
>>> progress.percentage
0.0
>>>
>>> progress.value = 1
>>> progress.percentage
10.0
>>> progress.value = 10
>>> progress.percentage
100.0
>>> progress.min_value = -10
>>> progress.percentage
100.0
>>> progress.value = 0
>>> progress.percentage
50.0
>>> progress.value = 5
>>> progress.percentage
75.0
>>> progress.value = -5
>>> progress.percentage
25.0
>>> progress.max_value = None
>>> progress.percentage
'''
if self.max_value is None or self.max_value is base.UnknownLength:
return None
elif self.max_value:
todo = self.value - self.min_value
total = self.max_value - self.min_value
percentage = todo / total
else:
percentage = 1
return percentage * 100
|
[
"def",
"percentage",
"(",
"self",
")",
":",
"if",
"self",
".",
"max_value",
"is",
"None",
"or",
"self",
".",
"max_value",
"is",
"base",
".",
"UnknownLength",
":",
"return",
"None",
"elif",
"self",
".",
"max_value",
":",
"todo",
"=",
"self",
".",
"value",
"-",
"self",
".",
"min_value",
"total",
"=",
"self",
".",
"max_value",
"-",
"self",
".",
"min_value",
"percentage",
"=",
"todo",
"/",
"total",
"else",
":",
"percentage",
"=",
"1",
"return",
"percentage",
"*",
"100"
] |
Return current percentage, returns None if no max_value is given
>>> progress = ProgressBar()
>>> progress.max_value = 10
>>> progress.min_value = 0
>>> progress.value = 0
>>> progress.percentage
0.0
>>>
>>> progress.value = 1
>>> progress.percentage
10.0
>>> progress.value = 10
>>> progress.percentage
100.0
>>> progress.min_value = -10
>>> progress.percentage
100.0
>>> progress.value = 0
>>> progress.percentage
50.0
>>> progress.value = 5
>>> progress.percentage
75.0
>>> progress.value = -5
>>> progress.percentage
25.0
>>> progress.max_value = None
>>> progress.percentage
|
[
"Return",
"current",
"percentage",
"returns",
"None",
"if",
"no",
"max_value",
"is",
"given"
] |
963617a1bb9d81624ecf31f3457185992cd97bfa
|
https://github.com/WoLpH/python-progressbar/blob/963617a1bb9d81624ecf31f3457185992cd97bfa/progressbar/bar.py#L297-L337
|
234,787
|
WoLpH/python-progressbar
|
examples.py
|
example
|
def example(fn):
'''Wrap the examples so they generate readable output'''
@functools.wraps(fn)
def wrapped():
try:
sys.stdout.write('Running: %s\n' % fn.__name__)
fn()
sys.stdout.write('\n')
except KeyboardInterrupt:
sys.stdout.write('\nSkipping example.\n\n')
# Sleep a bit to make killing the script easier
time.sleep(0.2)
examples.append(wrapped)
return wrapped
|
python
|
def example(fn):
'''Wrap the examples so they generate readable output'''
@functools.wraps(fn)
def wrapped():
try:
sys.stdout.write('Running: %s\n' % fn.__name__)
fn()
sys.stdout.write('\n')
except KeyboardInterrupt:
sys.stdout.write('\nSkipping example.\n\n')
# Sleep a bit to make killing the script easier
time.sleep(0.2)
examples.append(wrapped)
return wrapped
|
[
"def",
"example",
"(",
"fn",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"fn",
")",
"def",
"wrapped",
"(",
")",
":",
"try",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'Running: %s\\n'",
"%",
"fn",
".",
"__name__",
")",
"fn",
"(",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"'\\n'",
")",
"except",
"KeyboardInterrupt",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'\\nSkipping example.\\n\\n'",
")",
"# Sleep a bit to make killing the script easier",
"time",
".",
"sleep",
"(",
"0.2",
")",
"examples",
".",
"append",
"(",
"wrapped",
")",
"return",
"wrapped"
] |
Wrap the examples so they generate readable output
|
[
"Wrap",
"the",
"examples",
"so",
"they",
"generate",
"readable",
"output"
] |
963617a1bb9d81624ecf31f3457185992cd97bfa
|
https://github.com/WoLpH/python-progressbar/blob/963617a1bb9d81624ecf31f3457185992cd97bfa/examples.py#L16-L31
|
234,788
|
rigetti/quantumflow
|
quantumflow/datasets/__init__.py
|
load_stdgraphs
|
def load_stdgraphs(size: int) -> List[nx.Graph]:
"""Load standard graph validation sets
For each size (from 6 to 32 graph nodes) the dataset consists of
100 graphs drawn from the Erdős-Rényi ensemble with edge
probability 50%.
"""
from pkg_resources import resource_stream
if size < 6 or size > 32:
raise ValueError('Size out of range.')
filename = 'datasets/data/graph{}er100.g6'.format(size)
fdata = resource_stream('quantumflow', filename)
return nx.read_graph6(fdata)
|
python
|
def load_stdgraphs(size: int) -> List[nx.Graph]:
"""Load standard graph validation sets
For each size (from 6 to 32 graph nodes) the dataset consists of
100 graphs drawn from the Erdős-Rényi ensemble with edge
probability 50%.
"""
from pkg_resources import resource_stream
if size < 6 or size > 32:
raise ValueError('Size out of range.')
filename = 'datasets/data/graph{}er100.g6'.format(size)
fdata = resource_stream('quantumflow', filename)
return nx.read_graph6(fdata)
|
[
"def",
"load_stdgraphs",
"(",
"size",
":",
"int",
")",
"->",
"List",
"[",
"nx",
".",
"Graph",
"]",
":",
"from",
"pkg_resources",
"import",
"resource_stream",
"if",
"size",
"<",
"6",
"or",
"size",
">",
"32",
":",
"raise",
"ValueError",
"(",
"'Size out of range.'",
")",
"filename",
"=",
"'datasets/data/graph{}er100.g6'",
".",
"format",
"(",
"size",
")",
"fdata",
"=",
"resource_stream",
"(",
"'quantumflow'",
",",
"filename",
")",
"return",
"nx",
".",
"read_graph6",
"(",
"fdata",
")"
] |
Load standard graph validation sets
For each size (from 6 to 32 graph nodes) the dataset consists of
100 graphs drawn from the Erdős-Rényi ensemble with edge
probability 50%.
|
[
"Load",
"standard",
"graph",
"validation",
"sets"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/datasets/__init__.py#L23-L37
|
234,789
|
rigetti/quantumflow
|
quantumflow/datasets/__init__.py
|
load_mnist
|
def load_mnist(size: int = None,
border: int = _MNIST_BORDER,
blank_corners: bool = False,
nums: List[int] = None) \
-> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""Download and rescale the MNIST database of handwritten digits
MNIST is a dataset of 60,000 28x28 grayscale images handwritten digits,
along with a test set of 10,000 images. We use Keras to download and
access the dataset. The first invocation of this method may take a while
as the dataset has to be downloaded and cached.
If size is None, then we return the original MNIST data.
For rescaled MNIST, we chop off the border, downsample to the
desired size with Lanczos resampling, and then (optionally) zero out the
corner pixels.
Returns (x_train, y_train, x_test, y_test)
x_train ndarray of shape (60000, size, size)
y_train ndarray of shape (60000,)
x_test ndarray of shape (10000, size, size)
y_test ndarray of shape (10000,)
"""
# DOCME: Fix up formatting above,
# DOCME: Explain nums argument
# JIT import since keras startup is slow
from keras.datasets import mnist
def _filter_mnist(x: np.ndarray, y: np.ndarray, nums: List[int] = None) \
-> Tuple[np.ndarray, np.ndarray]:
xt = []
yt = []
items = len(y)
for n in range(items):
if nums is not None and y[n] in nums:
xt.append(x[n])
yt.append(y[n])
xt = np.stack(xt)
yt = np.stack(yt)
return xt, yt
def _rescale(imgarray: np.ndarray, size: int) -> np.ndarray:
N = imgarray.shape[0]
# Chop off border
imgarray = imgarray[:, border:-border, border:-border]
rescaled = np.zeros(shape=(N, size, size), dtype=np.float)
for n in range(0, N):
img = Image.fromarray(imgarray[n])
img = img.resize((size, size), Image.LANCZOS)
rsc = np.asarray(img).reshape((size, size))
rsc = 256.*rsc/rsc.max()
rescaled[n] = rsc
return rescaled.astype(dtype=np.uint8)
def _blank_corners(imgarray: np.ndarray) -> None:
# Zero out corners
sz = imgarray.shape[1]
corner = (sz//2)-1
for x in range(0, corner):
for y in range(0, corner-x):
imgarray[:, x, y] = 0
imgarray[:, -(1+x), y] = 0
imgarray[:, -(1+x), -(1+y)] = 0
imgarray[:, x, -(1+y)] = 0
(x_train, y_train), (x_test, y_test) = mnist.load_data()
if nums:
x_train, y_train = _filter_mnist(x_train, y_train, nums)
x_test, y_test = _filter_mnist(x_test, y_test, nums)
if size:
x_train = _rescale(x_train, size)
x_test = _rescale(x_test, size)
if blank_corners:
_blank_corners(x_train)
_blank_corners(x_test)
return x_train, y_train, x_test, y_test
|
python
|
def load_mnist(size: int = None,
border: int = _MNIST_BORDER,
blank_corners: bool = False,
nums: List[int] = None) \
-> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""Download and rescale the MNIST database of handwritten digits
MNIST is a dataset of 60,000 28x28 grayscale images handwritten digits,
along with a test set of 10,000 images. We use Keras to download and
access the dataset. The first invocation of this method may take a while
as the dataset has to be downloaded and cached.
If size is None, then we return the original MNIST data.
For rescaled MNIST, we chop off the border, downsample to the
desired size with Lanczos resampling, and then (optionally) zero out the
corner pixels.
Returns (x_train, y_train, x_test, y_test)
x_train ndarray of shape (60000, size, size)
y_train ndarray of shape (60000,)
x_test ndarray of shape (10000, size, size)
y_test ndarray of shape (10000,)
"""
# DOCME: Fix up formatting above,
# DOCME: Explain nums argument
# JIT import since keras startup is slow
from keras.datasets import mnist
def _filter_mnist(x: np.ndarray, y: np.ndarray, nums: List[int] = None) \
-> Tuple[np.ndarray, np.ndarray]:
xt = []
yt = []
items = len(y)
for n in range(items):
if nums is not None and y[n] in nums:
xt.append(x[n])
yt.append(y[n])
xt = np.stack(xt)
yt = np.stack(yt)
return xt, yt
def _rescale(imgarray: np.ndarray, size: int) -> np.ndarray:
N = imgarray.shape[0]
# Chop off border
imgarray = imgarray[:, border:-border, border:-border]
rescaled = np.zeros(shape=(N, size, size), dtype=np.float)
for n in range(0, N):
img = Image.fromarray(imgarray[n])
img = img.resize((size, size), Image.LANCZOS)
rsc = np.asarray(img).reshape((size, size))
rsc = 256.*rsc/rsc.max()
rescaled[n] = rsc
return rescaled.astype(dtype=np.uint8)
def _blank_corners(imgarray: np.ndarray) -> None:
# Zero out corners
sz = imgarray.shape[1]
corner = (sz//2)-1
for x in range(0, corner):
for y in range(0, corner-x):
imgarray[:, x, y] = 0
imgarray[:, -(1+x), y] = 0
imgarray[:, -(1+x), -(1+y)] = 0
imgarray[:, x, -(1+y)] = 0
(x_train, y_train), (x_test, y_test) = mnist.load_data()
if nums:
x_train, y_train = _filter_mnist(x_train, y_train, nums)
x_test, y_test = _filter_mnist(x_test, y_test, nums)
if size:
x_train = _rescale(x_train, size)
x_test = _rescale(x_test, size)
if blank_corners:
_blank_corners(x_train)
_blank_corners(x_test)
return x_train, y_train, x_test, y_test
|
[
"def",
"load_mnist",
"(",
"size",
":",
"int",
"=",
"None",
",",
"border",
":",
"int",
"=",
"_MNIST_BORDER",
",",
"blank_corners",
":",
"bool",
"=",
"False",
",",
"nums",
":",
"List",
"[",
"int",
"]",
"=",
"None",
")",
"->",
"Tuple",
"[",
"np",
".",
"ndarray",
",",
"np",
".",
"ndarray",
",",
"np",
".",
"ndarray",
",",
"np",
".",
"ndarray",
"]",
":",
"# DOCME: Fix up formatting above,",
"# DOCME: Explain nums argument",
"# JIT import since keras startup is slow",
"from",
"keras",
".",
"datasets",
"import",
"mnist",
"def",
"_filter_mnist",
"(",
"x",
":",
"np",
".",
"ndarray",
",",
"y",
":",
"np",
".",
"ndarray",
",",
"nums",
":",
"List",
"[",
"int",
"]",
"=",
"None",
")",
"->",
"Tuple",
"[",
"np",
".",
"ndarray",
",",
"np",
".",
"ndarray",
"]",
":",
"xt",
"=",
"[",
"]",
"yt",
"=",
"[",
"]",
"items",
"=",
"len",
"(",
"y",
")",
"for",
"n",
"in",
"range",
"(",
"items",
")",
":",
"if",
"nums",
"is",
"not",
"None",
"and",
"y",
"[",
"n",
"]",
"in",
"nums",
":",
"xt",
".",
"append",
"(",
"x",
"[",
"n",
"]",
")",
"yt",
".",
"append",
"(",
"y",
"[",
"n",
"]",
")",
"xt",
"=",
"np",
".",
"stack",
"(",
"xt",
")",
"yt",
"=",
"np",
".",
"stack",
"(",
"yt",
")",
"return",
"xt",
",",
"yt",
"def",
"_rescale",
"(",
"imgarray",
":",
"np",
".",
"ndarray",
",",
"size",
":",
"int",
")",
"->",
"np",
".",
"ndarray",
":",
"N",
"=",
"imgarray",
".",
"shape",
"[",
"0",
"]",
"# Chop off border",
"imgarray",
"=",
"imgarray",
"[",
":",
",",
"border",
":",
"-",
"border",
",",
"border",
":",
"-",
"border",
"]",
"rescaled",
"=",
"np",
".",
"zeros",
"(",
"shape",
"=",
"(",
"N",
",",
"size",
",",
"size",
")",
",",
"dtype",
"=",
"np",
".",
"float",
")",
"for",
"n",
"in",
"range",
"(",
"0",
",",
"N",
")",
":",
"img",
"=",
"Image",
".",
"fromarray",
"(",
"imgarray",
"[",
"n",
"]",
")",
"img",
"=",
"img",
".",
"resize",
"(",
"(",
"size",
",",
"size",
")",
",",
"Image",
".",
"LANCZOS",
")",
"rsc",
"=",
"np",
".",
"asarray",
"(",
"img",
")",
".",
"reshape",
"(",
"(",
"size",
",",
"size",
")",
")",
"rsc",
"=",
"256.",
"*",
"rsc",
"/",
"rsc",
".",
"max",
"(",
")",
"rescaled",
"[",
"n",
"]",
"=",
"rsc",
"return",
"rescaled",
".",
"astype",
"(",
"dtype",
"=",
"np",
".",
"uint8",
")",
"def",
"_blank_corners",
"(",
"imgarray",
":",
"np",
".",
"ndarray",
")",
"->",
"None",
":",
"# Zero out corners",
"sz",
"=",
"imgarray",
".",
"shape",
"[",
"1",
"]",
"corner",
"=",
"(",
"sz",
"//",
"2",
")",
"-",
"1",
"for",
"x",
"in",
"range",
"(",
"0",
",",
"corner",
")",
":",
"for",
"y",
"in",
"range",
"(",
"0",
",",
"corner",
"-",
"x",
")",
":",
"imgarray",
"[",
":",
",",
"x",
",",
"y",
"]",
"=",
"0",
"imgarray",
"[",
":",
",",
"-",
"(",
"1",
"+",
"x",
")",
",",
"y",
"]",
"=",
"0",
"imgarray",
"[",
":",
",",
"-",
"(",
"1",
"+",
"x",
")",
",",
"-",
"(",
"1",
"+",
"y",
")",
"]",
"=",
"0",
"imgarray",
"[",
":",
",",
"x",
",",
"-",
"(",
"1",
"+",
"y",
")",
"]",
"=",
"0",
"(",
"x_train",
",",
"y_train",
")",
",",
"(",
"x_test",
",",
"y_test",
")",
"=",
"mnist",
".",
"load_data",
"(",
")",
"if",
"nums",
":",
"x_train",
",",
"y_train",
"=",
"_filter_mnist",
"(",
"x_train",
",",
"y_train",
",",
"nums",
")",
"x_test",
",",
"y_test",
"=",
"_filter_mnist",
"(",
"x_test",
",",
"y_test",
",",
"nums",
")",
"if",
"size",
":",
"x_train",
"=",
"_rescale",
"(",
"x_train",
",",
"size",
")",
"x_test",
"=",
"_rescale",
"(",
"x_test",
",",
"size",
")",
"if",
"blank_corners",
":",
"_blank_corners",
"(",
"x_train",
")",
"_blank_corners",
"(",
"x_test",
")",
"return",
"x_train",
",",
"y_train",
",",
"x_test",
",",
"y_test"
] |
Download and rescale the MNIST database of handwritten digits
MNIST is a dataset of 60,000 28x28 grayscale images handwritten digits,
along with a test set of 10,000 images. We use Keras to download and
access the dataset. The first invocation of this method may take a while
as the dataset has to be downloaded and cached.
If size is None, then we return the original MNIST data.
For rescaled MNIST, we chop off the border, downsample to the
desired size with Lanczos resampling, and then (optionally) zero out the
corner pixels.
Returns (x_train, y_train, x_test, y_test)
x_train ndarray of shape (60000, size, size)
y_train ndarray of shape (60000,)
x_test ndarray of shape (10000, size, size)
y_test ndarray of shape (10000,)
|
[
"Download",
"and",
"rescale",
"the",
"MNIST",
"database",
"of",
"handwritten",
"digits"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/datasets/__init__.py#L43-L127
|
234,790
|
rigetti/quantumflow
|
quantumflow/backend/tensorflow2bk.py
|
astensor
|
def astensor(array: TensorLike) -> BKTensor:
"""Covert numpy array to tensorflow tensor"""
tensor = tf.convert_to_tensor(value=array, dtype=CTYPE)
return tensor
|
python
|
def astensor(array: TensorLike) -> BKTensor:
"""Covert numpy array to tensorflow tensor"""
tensor = tf.convert_to_tensor(value=array, dtype=CTYPE)
return tensor
|
[
"def",
"astensor",
"(",
"array",
":",
"TensorLike",
")",
"->",
"BKTensor",
":",
"tensor",
"=",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"array",
",",
"dtype",
"=",
"CTYPE",
")",
"return",
"tensor"
] |
Covert numpy array to tensorflow tensor
|
[
"Covert",
"numpy",
"array",
"to",
"tensorflow",
"tensor"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/backend/tensorflow2bk.py#L74-L77
|
234,791
|
rigetti/quantumflow
|
quantumflow/backend/tensorflow2bk.py
|
inner
|
def inner(tensor0: BKTensor, tensor1: BKTensor) -> BKTensor:
"""Return the inner product between two states"""
# Note: Relying on fact that vdot flattens arrays
N = rank(tensor0)
axes = list(range(N))
return tf.tensordot(tf.math.conj(tensor0), tensor1, axes=(axes, axes))
|
python
|
def inner(tensor0: BKTensor, tensor1: BKTensor) -> BKTensor:
"""Return the inner product between two states"""
# Note: Relying on fact that vdot flattens arrays
N = rank(tensor0)
axes = list(range(N))
return tf.tensordot(tf.math.conj(tensor0), tensor1, axes=(axes, axes))
|
[
"def",
"inner",
"(",
"tensor0",
":",
"BKTensor",
",",
"tensor1",
":",
"BKTensor",
")",
"->",
"BKTensor",
":",
"# Note: Relying on fact that vdot flattens arrays",
"N",
"=",
"rank",
"(",
"tensor0",
")",
"axes",
"=",
"list",
"(",
"range",
"(",
"N",
")",
")",
"return",
"tf",
".",
"tensordot",
"(",
"tf",
".",
"math",
".",
"conj",
"(",
"tensor0",
")",
",",
"tensor1",
",",
"axes",
"=",
"(",
"axes",
",",
"axes",
")",
")"
] |
Return the inner product between two states
|
[
"Return",
"the",
"inner",
"product",
"between",
"two",
"states"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/backend/tensorflow2bk.py#L92-L97
|
234,792
|
rigetti/quantumflow
|
quantumflow/qaoa.py
|
graph_cuts
|
def graph_cuts(graph: nx.Graph) -> np.ndarray:
"""For the given graph, return the cut value for all binary assignments
of the graph.
"""
N = len(graph)
diag_hamiltonian = np.zeros(shape=([2]*N), dtype=np.double)
for q0, q1 in graph.edges():
for index, _ in np.ndenumerate(diag_hamiltonian):
if index[q0] != index[q1]:
weight = graph[q0][q1].get('weight', 1)
diag_hamiltonian[index] += weight
return diag_hamiltonian
|
python
|
def graph_cuts(graph: nx.Graph) -> np.ndarray:
"""For the given graph, return the cut value for all binary assignments
of the graph.
"""
N = len(graph)
diag_hamiltonian = np.zeros(shape=([2]*N), dtype=np.double)
for q0, q1 in graph.edges():
for index, _ in np.ndenumerate(diag_hamiltonian):
if index[q0] != index[q1]:
weight = graph[q0][q1].get('weight', 1)
diag_hamiltonian[index] += weight
return diag_hamiltonian
|
[
"def",
"graph_cuts",
"(",
"graph",
":",
"nx",
".",
"Graph",
")",
"->",
"np",
".",
"ndarray",
":",
"N",
"=",
"len",
"(",
"graph",
")",
"diag_hamiltonian",
"=",
"np",
".",
"zeros",
"(",
"shape",
"=",
"(",
"[",
"2",
"]",
"*",
"N",
")",
",",
"dtype",
"=",
"np",
".",
"double",
")",
"for",
"q0",
",",
"q1",
"in",
"graph",
".",
"edges",
"(",
")",
":",
"for",
"index",
",",
"_",
"in",
"np",
".",
"ndenumerate",
"(",
"diag_hamiltonian",
")",
":",
"if",
"index",
"[",
"q0",
"]",
"!=",
"index",
"[",
"q1",
"]",
":",
"weight",
"=",
"graph",
"[",
"q0",
"]",
"[",
"q1",
"]",
".",
"get",
"(",
"'weight'",
",",
"1",
")",
"diag_hamiltonian",
"[",
"index",
"]",
"+=",
"weight",
"return",
"diag_hamiltonian"
] |
For the given graph, return the cut value for all binary assignments
of the graph.
|
[
"For",
"the",
"given",
"graph",
"return",
"the",
"cut",
"value",
"for",
"all",
"binary",
"assignments",
"of",
"the",
"graph",
"."
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/qaoa.py#L68-L81
|
234,793
|
rigetti/quantumflow
|
quantumflow/dagcircuit.py
|
DAGCircuit.depth
|
def depth(self, local: bool = True) -> int:
"""Return the circuit depth.
Args:
local: If True include local one-qubit gates in depth
calculation. Else return the multi-qubit gate depth.
"""
G = self.graph
if not local:
def remove_local(dagc: DAGCircuit) \
-> Generator[Operation, None, None]:
for elem in dagc:
if dagc.graph.degree[elem] > 2:
yield elem
G = DAGCircuit(remove_local(self)).graph
return nx.dag_longest_path_length(G) - 1
|
python
|
def depth(self, local: bool = True) -> int:
"""Return the circuit depth.
Args:
local: If True include local one-qubit gates in depth
calculation. Else return the multi-qubit gate depth.
"""
G = self.graph
if not local:
def remove_local(dagc: DAGCircuit) \
-> Generator[Operation, None, None]:
for elem in dagc:
if dagc.graph.degree[elem] > 2:
yield elem
G = DAGCircuit(remove_local(self)).graph
return nx.dag_longest_path_length(G) - 1
|
[
"def",
"depth",
"(",
"self",
",",
"local",
":",
"bool",
"=",
"True",
")",
"->",
"int",
":",
"G",
"=",
"self",
".",
"graph",
"if",
"not",
"local",
":",
"def",
"remove_local",
"(",
"dagc",
":",
"DAGCircuit",
")",
"->",
"Generator",
"[",
"Operation",
",",
"None",
",",
"None",
"]",
":",
"for",
"elem",
"in",
"dagc",
":",
"if",
"dagc",
".",
"graph",
".",
"degree",
"[",
"elem",
"]",
">",
"2",
":",
"yield",
"elem",
"G",
"=",
"DAGCircuit",
"(",
"remove_local",
"(",
"self",
")",
")",
".",
"graph",
"return",
"nx",
".",
"dag_longest_path_length",
"(",
"G",
")",
"-",
"1"
] |
Return the circuit depth.
Args:
local: If True include local one-qubit gates in depth
calculation. Else return the multi-qubit gate depth.
|
[
"Return",
"the",
"circuit",
"depth",
"."
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/dagcircuit.py#L97-L113
|
234,794
|
rigetti/quantumflow
|
quantumflow/dagcircuit.py
|
DAGCircuit.components
|
def components(self) -> List['DAGCircuit']:
"""Split DAGCircuit into independent components"""
comps = nx.weakly_connected_component_subgraphs(self.graph)
return [DAGCircuit(comp) for comp in comps]
|
python
|
def components(self) -> List['DAGCircuit']:
"""Split DAGCircuit into independent components"""
comps = nx.weakly_connected_component_subgraphs(self.graph)
return [DAGCircuit(comp) for comp in comps]
|
[
"def",
"components",
"(",
"self",
")",
"->",
"List",
"[",
"'DAGCircuit'",
"]",
":",
"comps",
"=",
"nx",
".",
"weakly_connected_component_subgraphs",
"(",
"self",
".",
"graph",
")",
"return",
"[",
"DAGCircuit",
"(",
"comp",
")",
"for",
"comp",
"in",
"comps",
"]"
] |
Split DAGCircuit into independent components
|
[
"Split",
"DAGCircuit",
"into",
"independent",
"components"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/dagcircuit.py#L124-L127
|
234,795
|
rigetti/quantumflow
|
quantumflow/states.py
|
zero_state
|
def zero_state(qubits: Union[int, Qubits]) -> State:
"""Return the all-zero state on N qubits"""
N, qubits = qubits_count_tuple(qubits)
ket = np.zeros(shape=[2] * N)
ket[(0,) * N] = 1
return State(ket, qubits)
|
python
|
def zero_state(qubits: Union[int, Qubits]) -> State:
"""Return the all-zero state on N qubits"""
N, qubits = qubits_count_tuple(qubits)
ket = np.zeros(shape=[2] * N)
ket[(0,) * N] = 1
return State(ket, qubits)
|
[
"def",
"zero_state",
"(",
"qubits",
":",
"Union",
"[",
"int",
",",
"Qubits",
"]",
")",
"->",
"State",
":",
"N",
",",
"qubits",
"=",
"qubits_count_tuple",
"(",
"qubits",
")",
"ket",
"=",
"np",
".",
"zeros",
"(",
"shape",
"=",
"[",
"2",
"]",
"*",
"N",
")",
"ket",
"[",
"(",
"0",
",",
")",
"*",
"N",
"]",
"=",
"1",
"return",
"State",
"(",
"ket",
",",
"qubits",
")"
] |
Return the all-zero state on N qubits
|
[
"Return",
"the",
"all",
"-",
"zero",
"state",
"on",
"N",
"qubits"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L186-L191
|
234,796
|
rigetti/quantumflow
|
quantumflow/states.py
|
w_state
|
def w_state(qubits: Union[int, Qubits]) -> State:
"""Return a W state on N qubits"""
N, qubits = qubits_count_tuple(qubits)
ket = np.zeros(shape=[2] * N)
for n in range(N):
idx = np.zeros(shape=N, dtype=int)
idx[n] += 1
ket[tuple(idx)] = 1 / sqrt(N)
return State(ket, qubits)
|
python
|
def w_state(qubits: Union[int, Qubits]) -> State:
"""Return a W state on N qubits"""
N, qubits = qubits_count_tuple(qubits)
ket = np.zeros(shape=[2] * N)
for n in range(N):
idx = np.zeros(shape=N, dtype=int)
idx[n] += 1
ket[tuple(idx)] = 1 / sqrt(N)
return State(ket, qubits)
|
[
"def",
"w_state",
"(",
"qubits",
":",
"Union",
"[",
"int",
",",
"Qubits",
"]",
")",
"->",
"State",
":",
"N",
",",
"qubits",
"=",
"qubits_count_tuple",
"(",
"qubits",
")",
"ket",
"=",
"np",
".",
"zeros",
"(",
"shape",
"=",
"[",
"2",
"]",
"*",
"N",
")",
"for",
"n",
"in",
"range",
"(",
"N",
")",
":",
"idx",
"=",
"np",
".",
"zeros",
"(",
"shape",
"=",
"N",
",",
"dtype",
"=",
"int",
")",
"idx",
"[",
"n",
"]",
"+=",
"1",
"ket",
"[",
"tuple",
"(",
"idx",
")",
"]",
"=",
"1",
"/",
"sqrt",
"(",
"N",
")",
"return",
"State",
"(",
"ket",
",",
"qubits",
")"
] |
Return a W state on N qubits
|
[
"Return",
"a",
"W",
"state",
"on",
"N",
"qubits"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L194-L202
|
234,797
|
rigetti/quantumflow
|
quantumflow/states.py
|
ghz_state
|
def ghz_state(qubits: Union[int, Qubits]) -> State:
"""Return a GHZ state on N qubits"""
N, qubits = qubits_count_tuple(qubits)
ket = np.zeros(shape=[2] * N)
ket[(0, ) * N] = 1 / sqrt(2)
ket[(1, ) * N] = 1 / sqrt(2)
return State(ket, qubits)
|
python
|
def ghz_state(qubits: Union[int, Qubits]) -> State:
"""Return a GHZ state on N qubits"""
N, qubits = qubits_count_tuple(qubits)
ket = np.zeros(shape=[2] * N)
ket[(0, ) * N] = 1 / sqrt(2)
ket[(1, ) * N] = 1 / sqrt(2)
return State(ket, qubits)
|
[
"def",
"ghz_state",
"(",
"qubits",
":",
"Union",
"[",
"int",
",",
"Qubits",
"]",
")",
"->",
"State",
":",
"N",
",",
"qubits",
"=",
"qubits_count_tuple",
"(",
"qubits",
")",
"ket",
"=",
"np",
".",
"zeros",
"(",
"shape",
"=",
"[",
"2",
"]",
"*",
"N",
")",
"ket",
"[",
"(",
"0",
",",
")",
"*",
"N",
"]",
"=",
"1",
"/",
"sqrt",
"(",
"2",
")",
"ket",
"[",
"(",
"1",
",",
")",
"*",
"N",
"]",
"=",
"1",
"/",
"sqrt",
"(",
"2",
")",
"return",
"State",
"(",
"ket",
",",
"qubits",
")"
] |
Return a GHZ state on N qubits
|
[
"Return",
"a",
"GHZ",
"state",
"on",
"N",
"qubits"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L205-L211
|
234,798
|
rigetti/quantumflow
|
quantumflow/states.py
|
random_state
|
def random_state(qubits: Union[int, Qubits]) -> State:
"""Return a random state from the space of N qubits"""
N, qubits = qubits_count_tuple(qubits)
ket = np.random.normal(size=([2] * N)) \
+ 1j * np.random.normal(size=([2] * N))
return State(ket, qubits).normalize()
|
python
|
def random_state(qubits: Union[int, Qubits]) -> State:
"""Return a random state from the space of N qubits"""
N, qubits = qubits_count_tuple(qubits)
ket = np.random.normal(size=([2] * N)) \
+ 1j * np.random.normal(size=([2] * N))
return State(ket, qubits).normalize()
|
[
"def",
"random_state",
"(",
"qubits",
":",
"Union",
"[",
"int",
",",
"Qubits",
"]",
")",
"->",
"State",
":",
"N",
",",
"qubits",
"=",
"qubits_count_tuple",
"(",
"qubits",
")",
"ket",
"=",
"np",
".",
"random",
".",
"normal",
"(",
"size",
"=",
"(",
"[",
"2",
"]",
"*",
"N",
")",
")",
"+",
"1j",
"*",
"np",
".",
"random",
".",
"normal",
"(",
"size",
"=",
"(",
"[",
"2",
"]",
"*",
"N",
")",
")",
"return",
"State",
"(",
"ket",
",",
"qubits",
")",
".",
"normalize",
"(",
")"
] |
Return a random state from the space of N qubits
|
[
"Return",
"a",
"random",
"state",
"from",
"the",
"space",
"of",
"N",
"qubits"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L214-L219
|
234,799
|
rigetti/quantumflow
|
quantumflow/states.py
|
join_states
|
def join_states(*states: State) -> State:
"""Join two state vectors into a larger qubit state"""
vectors = [ket.vec for ket in states]
vec = reduce(outer_product, vectors)
return State(vec.tensor, vec.qubits)
|
python
|
def join_states(*states: State) -> State:
"""Join two state vectors into a larger qubit state"""
vectors = [ket.vec for ket in states]
vec = reduce(outer_product, vectors)
return State(vec.tensor, vec.qubits)
|
[
"def",
"join_states",
"(",
"*",
"states",
":",
"State",
")",
"->",
"State",
":",
"vectors",
"=",
"[",
"ket",
".",
"vec",
"for",
"ket",
"in",
"states",
"]",
"vec",
"=",
"reduce",
"(",
"outer_product",
",",
"vectors",
")",
"return",
"State",
"(",
"vec",
".",
"tensor",
",",
"vec",
".",
"qubits",
")"
] |
Join two state vectors into a larger qubit state
|
[
"Join",
"two",
"state",
"vectors",
"into",
"a",
"larger",
"qubit",
"state"
] |
13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb
|
https://github.com/rigetti/quantumflow/blob/13a66cabbe8aabf6e023cc675f4a4ebe6ccda8fb/quantumflow/states.py#L225-L229
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.