id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
239,900
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/main.py
|
init_environment
|
def init_environment():
"""Set environment variables that are important for the pipeline.
:returns: None
:rtype: None
:raises: None
"""
os.environ['DJANGO_SETTINGS_MODULE'] = 'jukeboxcore.djsettings'
pluginpath = os.pathsep.join((os.environ.get('JUKEBOX_PLUGIN_PATH', ''), constants.BUILTIN_PLUGIN_PATH))
os.environ['JUKEBOX_PLUGIN_PATH'] = pluginpath
|
python
|
def init_environment():
"""Set environment variables that are important for the pipeline.
:returns: None
:rtype: None
:raises: None
"""
os.environ['DJANGO_SETTINGS_MODULE'] = 'jukeboxcore.djsettings'
pluginpath = os.pathsep.join((os.environ.get('JUKEBOX_PLUGIN_PATH', ''), constants.BUILTIN_PLUGIN_PATH))
os.environ['JUKEBOX_PLUGIN_PATH'] = pluginpath
|
[
"def",
"init_environment",
"(",
")",
":",
"os",
".",
"environ",
"[",
"'DJANGO_SETTINGS_MODULE'",
"]",
"=",
"'jukeboxcore.djsettings'",
"pluginpath",
"=",
"os",
".",
"pathsep",
".",
"join",
"(",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'JUKEBOX_PLUGIN_PATH'",
",",
"''",
")",
",",
"constants",
".",
"BUILTIN_PLUGIN_PATH",
")",
")",
"os",
".",
"environ",
"[",
"'JUKEBOX_PLUGIN_PATH'",
"]",
"=",
"pluginpath"
] |
Set environment variables that are important for the pipeline.
:returns: None
:rtype: None
:raises: None
|
[
"Set",
"environment",
"variables",
"that",
"are",
"important",
"for",
"the",
"pipeline",
"."
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/main.py#L13-L22
|
239,901
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/main.py
|
unload_modules
|
def unload_modules():
""" Unload all modules of the jukebox package and all plugin modules
Python provides the ``reload`` command for reloading modules. The major drawback is, that if this module is loaded in any other module
the source code will not be resourced!
If you want to reload the code because you changed the source file, you have to get rid of it completely first.
:returns: None
:rtype: None
:raises: None
"""
mods = set([])
for m in sys.modules:
if m.startswith('jukebox'):
mods.add(m)
pm = PluginManager.get()
for p in pm.get_all_plugins():
mods.add(p.__module__)
for m in mods:
del(sys.modules[m])
|
python
|
def unload_modules():
""" Unload all modules of the jukebox package and all plugin modules
Python provides the ``reload`` command for reloading modules. The major drawback is, that if this module is loaded in any other module
the source code will not be resourced!
If you want to reload the code because you changed the source file, you have to get rid of it completely first.
:returns: None
:rtype: None
:raises: None
"""
mods = set([])
for m in sys.modules:
if m.startswith('jukebox'):
mods.add(m)
pm = PluginManager.get()
for p in pm.get_all_plugins():
mods.add(p.__module__)
for m in mods:
del(sys.modules[m])
|
[
"def",
"unload_modules",
"(",
")",
":",
"mods",
"=",
"set",
"(",
"[",
"]",
")",
"for",
"m",
"in",
"sys",
".",
"modules",
":",
"if",
"m",
".",
"startswith",
"(",
"'jukebox'",
")",
":",
"mods",
".",
"add",
"(",
"m",
")",
"pm",
"=",
"PluginManager",
".",
"get",
"(",
")",
"for",
"p",
"in",
"pm",
".",
"get_all_plugins",
"(",
")",
":",
"mods",
".",
"add",
"(",
"p",
".",
"__module__",
")",
"for",
"m",
"in",
"mods",
":",
"del",
"(",
"sys",
".",
"modules",
"[",
"m",
"]",
")"
] |
Unload all modules of the jukebox package and all plugin modules
Python provides the ``reload`` command for reloading modules. The major drawback is, that if this module is loaded in any other module
the source code will not be resourced!
If you want to reload the code because you changed the source file, you have to get rid of it completely first.
:returns: None
:rtype: None
:raises: None
|
[
"Unload",
"all",
"modules",
"of",
"the",
"jukebox",
"package",
"and",
"all",
"plugin",
"modules"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/main.py#L36-L55
|
239,902
|
epfl-idevelop/epfl-ldap
|
epflldap/ldap_search.py
|
_get_LDAP_connection
|
def _get_LDAP_connection():
"""
Return a LDAP connection
"""
server = ldap3.Server('ldap://' + get_optional_env('EPFL_LDAP_SERVER_FOR_SEARCH'))
connection = ldap3.Connection(server)
connection.open()
return connection, get_optional_env('EPFL_LDAP_BASE_DN_FOR_SEARCH')
|
python
|
def _get_LDAP_connection():
"""
Return a LDAP connection
"""
server = ldap3.Server('ldap://' + get_optional_env('EPFL_LDAP_SERVER_FOR_SEARCH'))
connection = ldap3.Connection(server)
connection.open()
return connection, get_optional_env('EPFL_LDAP_BASE_DN_FOR_SEARCH')
|
[
"def",
"_get_LDAP_connection",
"(",
")",
":",
"server",
"=",
"ldap3",
".",
"Server",
"(",
"'ldap://'",
"+",
"get_optional_env",
"(",
"'EPFL_LDAP_SERVER_FOR_SEARCH'",
")",
")",
"connection",
"=",
"ldap3",
".",
"Connection",
"(",
"server",
")",
"connection",
".",
"open",
"(",
")",
"return",
"connection",
",",
"get_optional_env",
"(",
"'EPFL_LDAP_BASE_DN_FOR_SEARCH'",
")"
] |
Return a LDAP connection
|
[
"Return",
"a",
"LDAP",
"connection"
] |
bebb94da3609d358bd83f31672eeaddcda872c5d
|
https://github.com/epfl-idevelop/epfl-ldap/blob/bebb94da3609d358bd83f31672eeaddcda872c5d/epflldap/ldap_search.py#L7-L15
|
239,903
|
epfl-idevelop/epfl-ldap
|
epflldap/ldap_search.py
|
LDAP_search
|
def LDAP_search(pattern_search, attribute):
"""
Do a LDAP search
"""
connection, ldap_base = _get_LDAP_connection()
connection.search(
search_base=ldap_base,
search_filter=pattern_search,
attributes=[attribute]
)
return connection.response
|
python
|
def LDAP_search(pattern_search, attribute):
"""
Do a LDAP search
"""
connection, ldap_base = _get_LDAP_connection()
connection.search(
search_base=ldap_base,
search_filter=pattern_search,
attributes=[attribute]
)
return connection.response
|
[
"def",
"LDAP_search",
"(",
"pattern_search",
",",
"attribute",
")",
":",
"connection",
",",
"ldap_base",
"=",
"_get_LDAP_connection",
"(",
")",
"connection",
".",
"search",
"(",
"search_base",
"=",
"ldap_base",
",",
"search_filter",
"=",
"pattern_search",
",",
"attributes",
"=",
"[",
"attribute",
"]",
")",
"return",
"connection",
".",
"response"
] |
Do a LDAP search
|
[
"Do",
"a",
"LDAP",
"search"
] |
bebb94da3609d358bd83f31672eeaddcda872c5d
|
https://github.com/epfl-idevelop/epfl-ldap/blob/bebb94da3609d358bd83f31672eeaddcda872c5d/epflldap/ldap_search.py#L18-L29
|
239,904
|
epfl-idevelop/epfl-ldap
|
epflldap/ldap_search.py
|
is_unit_exist
|
def is_unit_exist(unit_id):
"""
Return True if the unit 'unid_id' exists.
Otherwise return False
"""
attribute = 'objectClass'
response = LDAP_search(
pattern_search="(uniqueidentifier={})".format(unit_id),
attribute=attribute
)
try:
unit_exist = 'EPFLorganizationalUnit' in response[0]['attributes'][attribute]
except Exception:
return False
return unit_exist
|
python
|
def is_unit_exist(unit_id):
"""
Return True if the unit 'unid_id' exists.
Otherwise return False
"""
attribute = 'objectClass'
response = LDAP_search(
pattern_search="(uniqueidentifier={})".format(unit_id),
attribute=attribute
)
try:
unit_exist = 'EPFLorganizationalUnit' in response[0]['attributes'][attribute]
except Exception:
return False
return unit_exist
|
[
"def",
"is_unit_exist",
"(",
"unit_id",
")",
":",
"attribute",
"=",
"'objectClass'",
"response",
"=",
"LDAP_search",
"(",
"pattern_search",
"=",
"\"(uniqueidentifier={})\"",
".",
"format",
"(",
"unit_id",
")",
",",
"attribute",
"=",
"attribute",
")",
"try",
":",
"unit_exist",
"=",
"'EPFLorganizationalUnit'",
"in",
"response",
"[",
"0",
"]",
"[",
"'attributes'",
"]",
"[",
"attribute",
"]",
"except",
"Exception",
":",
"return",
"False",
"return",
"unit_exist"
] |
Return True if the unit 'unid_id' exists.
Otherwise return False
|
[
"Return",
"True",
"if",
"the",
"unit",
"unid_id",
"exists",
".",
"Otherwise",
"return",
"False"
] |
bebb94da3609d358bd83f31672eeaddcda872c5d
|
https://github.com/epfl-idevelop/epfl-ldap/blob/bebb94da3609d358bd83f31672eeaddcda872c5d/epflldap/ldap_search.py#L36-L51
|
239,905
|
epfl-idevelop/epfl-ldap
|
epflldap/ldap_search.py
|
get_unit_name
|
def get_unit_name(unit_id):
"""
Return the unit name to the unit 'unit_id'
"""
attribute = 'cn'
response = LDAP_search(
pattern_search='(uniqueIdentifier={})'.format(unit_id),
attribute=attribute
)
try:
unit_name = get_attribute(response, attribute)
except Exception:
raise EpflLdapException("The unit with id '{}' was not found".format(unit_id))
return unit_name
|
python
|
def get_unit_name(unit_id):
"""
Return the unit name to the unit 'unit_id'
"""
attribute = 'cn'
response = LDAP_search(
pattern_search='(uniqueIdentifier={})'.format(unit_id),
attribute=attribute
)
try:
unit_name = get_attribute(response, attribute)
except Exception:
raise EpflLdapException("The unit with id '{}' was not found".format(unit_id))
return unit_name
|
[
"def",
"get_unit_name",
"(",
"unit_id",
")",
":",
"attribute",
"=",
"'cn'",
"response",
"=",
"LDAP_search",
"(",
"pattern_search",
"=",
"'(uniqueIdentifier={})'",
".",
"format",
"(",
"unit_id",
")",
",",
"attribute",
"=",
"attribute",
")",
"try",
":",
"unit_name",
"=",
"get_attribute",
"(",
"response",
",",
"attribute",
")",
"except",
"Exception",
":",
"raise",
"EpflLdapException",
"(",
"\"The unit with id '{}' was not found\"",
".",
"format",
"(",
"unit_id",
")",
")",
"return",
"unit_name"
] |
Return the unit name to the unit 'unit_id'
|
[
"Return",
"the",
"unit",
"name",
"to",
"the",
"unit",
"unit_id"
] |
bebb94da3609d358bd83f31672eeaddcda872c5d
|
https://github.com/epfl-idevelop/epfl-ldap/blob/bebb94da3609d358bd83f31672eeaddcda872c5d/epflldap/ldap_search.py#L54-L68
|
239,906
|
epfl-idevelop/epfl-ldap
|
epflldap/ldap_search.py
|
get_unit_id
|
def get_unit_id(unit_name):
"""
Return the unit id to the unit 'unit_name'
"""
unit_name = unit_name.lower()
attribute = 'uniqueIdentifier'
response = LDAP_search(
pattern_search='(cn={})'.format(unit_name),
attribute=attribute
)
unit_id = ""
try:
for element in response:
if 'dn' in element and element['dn'].startswith('ou={},'.format(unit_name)):
unit_id = element['attributes'][attribute][0]
except Exception:
raise EpflLdapException("The unit named '{}' was not found".format(unit_name))
finally:
if not unit_id:
raise EpflLdapException("The unit named '{}' was not found".format(unit_name))
return unit_id
|
python
|
def get_unit_id(unit_name):
"""
Return the unit id to the unit 'unit_name'
"""
unit_name = unit_name.lower()
attribute = 'uniqueIdentifier'
response = LDAP_search(
pattern_search='(cn={})'.format(unit_name),
attribute=attribute
)
unit_id = ""
try:
for element in response:
if 'dn' in element and element['dn'].startswith('ou={},'.format(unit_name)):
unit_id = element['attributes'][attribute][0]
except Exception:
raise EpflLdapException("The unit named '{}' was not found".format(unit_name))
finally:
if not unit_id:
raise EpflLdapException("The unit named '{}' was not found".format(unit_name))
return unit_id
|
[
"def",
"get_unit_id",
"(",
"unit_name",
")",
":",
"unit_name",
"=",
"unit_name",
".",
"lower",
"(",
")",
"attribute",
"=",
"'uniqueIdentifier'",
"response",
"=",
"LDAP_search",
"(",
"pattern_search",
"=",
"'(cn={})'",
".",
"format",
"(",
"unit_name",
")",
",",
"attribute",
"=",
"attribute",
")",
"unit_id",
"=",
"\"\"",
"try",
":",
"for",
"element",
"in",
"response",
":",
"if",
"'dn'",
"in",
"element",
"and",
"element",
"[",
"'dn'",
"]",
".",
"startswith",
"(",
"'ou={},'",
".",
"format",
"(",
"unit_name",
")",
")",
":",
"unit_id",
"=",
"element",
"[",
"'attributes'",
"]",
"[",
"attribute",
"]",
"[",
"0",
"]",
"except",
"Exception",
":",
"raise",
"EpflLdapException",
"(",
"\"The unit named '{}' was not found\"",
".",
"format",
"(",
"unit_name",
")",
")",
"finally",
":",
"if",
"not",
"unit_id",
":",
"raise",
"EpflLdapException",
"(",
"\"The unit named '{}' was not found\"",
".",
"format",
"(",
"unit_name",
")",
")",
"return",
"unit_id"
] |
Return the unit id to the unit 'unit_name'
|
[
"Return",
"the",
"unit",
"id",
"to",
"the",
"unit",
"unit_name"
] |
bebb94da3609d358bd83f31672eeaddcda872c5d
|
https://github.com/epfl-idevelop/epfl-ldap/blob/bebb94da3609d358bd83f31672eeaddcda872c5d/epflldap/ldap_search.py#L71-L93
|
239,907
|
epfl-idevelop/epfl-ldap
|
epflldap/ldap_search.py
|
get_units
|
def get_units(username):
"""
Return all units of user 'username'
"""
connection, ldap_base = _get_LDAP_connection()
# Search the user dn
connection.search(
search_base=ldap_base,
search_filter='(uid={}@*)'.format(username),
)
# For each user dn give me the unit
dn_list = [connection.response[index]['dn'] for index in range(len(connection.response))]
units = []
# For each unit search unit information and give me the unit id
for dn in dn_list:
unit = dn.split(",ou=")[1]
connection.search(search_base=ldap_base, search_filter='(ou={})'.format(unit), attributes=['uniqueidentifier'])
units.append(get_attribute(connection.response, 'uniqueIdentifier'))
return units
|
python
|
def get_units(username):
"""
Return all units of user 'username'
"""
connection, ldap_base = _get_LDAP_connection()
# Search the user dn
connection.search(
search_base=ldap_base,
search_filter='(uid={}@*)'.format(username),
)
# For each user dn give me the unit
dn_list = [connection.response[index]['dn'] for index in range(len(connection.response))]
units = []
# For each unit search unit information and give me the unit id
for dn in dn_list:
unit = dn.split(",ou=")[1]
connection.search(search_base=ldap_base, search_filter='(ou={})'.format(unit), attributes=['uniqueidentifier'])
units.append(get_attribute(connection.response, 'uniqueIdentifier'))
return units
|
[
"def",
"get_units",
"(",
"username",
")",
":",
"connection",
",",
"ldap_base",
"=",
"_get_LDAP_connection",
"(",
")",
"# Search the user dn",
"connection",
".",
"search",
"(",
"search_base",
"=",
"ldap_base",
",",
"search_filter",
"=",
"'(uid={}@*)'",
".",
"format",
"(",
"username",
")",
",",
")",
"# For each user dn give me the unit",
"dn_list",
"=",
"[",
"connection",
".",
"response",
"[",
"index",
"]",
"[",
"'dn'",
"]",
"for",
"index",
"in",
"range",
"(",
"len",
"(",
"connection",
".",
"response",
")",
")",
"]",
"units",
"=",
"[",
"]",
"# For each unit search unit information and give me the unit id",
"for",
"dn",
"in",
"dn_list",
":",
"unit",
"=",
"dn",
".",
"split",
"(",
"\",ou=\"",
")",
"[",
"1",
"]",
"connection",
".",
"search",
"(",
"search_base",
"=",
"ldap_base",
",",
"search_filter",
"=",
"'(ou={})'",
".",
"format",
"(",
"unit",
")",
",",
"attributes",
"=",
"[",
"'uniqueidentifier'",
"]",
")",
"units",
".",
"append",
"(",
"get_attribute",
"(",
"connection",
".",
"response",
",",
"'uniqueIdentifier'",
")",
")",
"return",
"units"
] |
Return all units of user 'username'
|
[
"Return",
"all",
"units",
"of",
"user",
"username"
] |
bebb94da3609d358bd83f31672eeaddcda872c5d
|
https://github.com/epfl-idevelop/epfl-ldap/blob/bebb94da3609d358bd83f31672eeaddcda872c5d/epflldap/ldap_search.py#L96-L118
|
239,908
|
epfl-idevelop/epfl-ldap
|
epflldap/ldap_search.py
|
get_username
|
def get_username(sciper):
"""
Return username of user
"""
attribute = 'uid'
response = LDAP_search(
pattern_search='(uniqueIdentifier={})'.format(sciper),
attribute=attribute
)
try:
username = get_attribute(response, attribute)
except Exception:
raise EpflLdapException("No username corresponds to sciper {}".format(sciper))
return username
|
python
|
def get_username(sciper):
"""
Return username of user
"""
attribute = 'uid'
response = LDAP_search(
pattern_search='(uniqueIdentifier={})'.format(sciper),
attribute=attribute
)
try:
username = get_attribute(response, attribute)
except Exception:
raise EpflLdapException("No username corresponds to sciper {}".format(sciper))
return username
|
[
"def",
"get_username",
"(",
"sciper",
")",
":",
"attribute",
"=",
"'uid'",
"response",
"=",
"LDAP_search",
"(",
"pattern_search",
"=",
"'(uniqueIdentifier={})'",
".",
"format",
"(",
"sciper",
")",
",",
"attribute",
"=",
"attribute",
")",
"try",
":",
"username",
"=",
"get_attribute",
"(",
"response",
",",
"attribute",
")",
"except",
"Exception",
":",
"raise",
"EpflLdapException",
"(",
"\"No username corresponds to sciper {}\"",
".",
"format",
"(",
"sciper",
")",
")",
"return",
"username"
] |
Return username of user
|
[
"Return",
"username",
"of",
"user"
] |
bebb94da3609d358bd83f31672eeaddcda872c5d
|
https://github.com/epfl-idevelop/epfl-ldap/blob/bebb94da3609d358bd83f31672eeaddcda872c5d/epflldap/ldap_search.py#L137-L150
|
239,909
|
epfl-idevelop/epfl-ldap
|
epflldap/ldap_search.py
|
get_email
|
def get_email(sciper):
"""
Return email of user
"""
attribute = 'mail'
response = LDAP_search(
pattern_search='(uniqueIdentifier={})'.format(sciper),
attribute=attribute
)
try:
email = get_attribute(response, attribute)
except Exception:
raise EpflLdapException("No email address corresponds to sciper {}".format(sciper))
return email
|
python
|
def get_email(sciper):
"""
Return email of user
"""
attribute = 'mail'
response = LDAP_search(
pattern_search='(uniqueIdentifier={})'.format(sciper),
attribute=attribute
)
try:
email = get_attribute(response, attribute)
except Exception:
raise EpflLdapException("No email address corresponds to sciper {}".format(sciper))
return email
|
[
"def",
"get_email",
"(",
"sciper",
")",
":",
"attribute",
"=",
"'mail'",
"response",
"=",
"LDAP_search",
"(",
"pattern_search",
"=",
"'(uniqueIdentifier={})'",
".",
"format",
"(",
"sciper",
")",
",",
"attribute",
"=",
"attribute",
")",
"try",
":",
"email",
"=",
"get_attribute",
"(",
"response",
",",
"attribute",
")",
"except",
"Exception",
":",
"raise",
"EpflLdapException",
"(",
"\"No email address corresponds to sciper {}\"",
".",
"format",
"(",
"sciper",
")",
")",
"return",
"email"
] |
Return email of user
|
[
"Return",
"email",
"of",
"user"
] |
bebb94da3609d358bd83f31672eeaddcda872c5d
|
https://github.com/epfl-idevelop/epfl-ldap/blob/bebb94da3609d358bd83f31672eeaddcda872c5d/epflldap/ldap_search.py#L153-L166
|
239,910
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/errors/ApplicationException.py
|
ApplicationException.with_code
|
def with_code(self, code):
"""
Sets a unique error code.
This method returns reference to this exception to implement Builder pattern to chain additional calls.
:param code: a unique error code
:return: this exception object
"""
self.code = code if code != None else 'UNKNOWN'
self.name = code
return self
|
python
|
def with_code(self, code):
"""
Sets a unique error code.
This method returns reference to this exception to implement Builder pattern to chain additional calls.
:param code: a unique error code
:return: this exception object
"""
self.code = code if code != None else 'UNKNOWN'
self.name = code
return self
|
[
"def",
"with_code",
"(",
"self",
",",
"code",
")",
":",
"self",
".",
"code",
"=",
"code",
"if",
"code",
"!=",
"None",
"else",
"'UNKNOWN'",
"self",
".",
"name",
"=",
"code",
"return",
"self"
] |
Sets a unique error code.
This method returns reference to this exception to implement Builder pattern to chain additional calls.
:param code: a unique error code
:return: this exception object
|
[
"Sets",
"a",
"unique",
"error",
"code",
".",
"This",
"method",
"returns",
"reference",
"to",
"this",
"exception",
"to",
"implement",
"Builder",
"pattern",
"to",
"chain",
"additional",
"calls",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/errors/ApplicationException.py#L131-L142
|
239,911
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/errors/ApplicationException.py
|
ApplicationException.with_details
|
def with_details(self, key, value):
"""
Sets a parameter for additional error details.
This details can be used to restore error description in other languages.
This method returns reference to this exception to implement Builder pattern to chain additional calls.
:param key: a details parameter name
:param value: a details parameter name
:return: this exception object
"""
self.details = self.details if self.details != None else {}
self.details[key] = value
return self
|
python
|
def with_details(self, key, value):
"""
Sets a parameter for additional error details.
This details can be used to restore error description in other languages.
This method returns reference to this exception to implement Builder pattern to chain additional calls.
:param key: a details parameter name
:param value: a details parameter name
:return: this exception object
"""
self.details = self.details if self.details != None else {}
self.details[key] = value
return self
|
[
"def",
"with_details",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"self",
".",
"details",
"=",
"self",
".",
"details",
"if",
"self",
".",
"details",
"!=",
"None",
"else",
"{",
"}",
"self",
".",
"details",
"[",
"key",
"]",
"=",
"value",
"return",
"self"
] |
Sets a parameter for additional error details.
This details can be used to restore error description in other languages.
This method returns reference to this exception to implement Builder pattern to chain additional calls.
:param key: a details parameter name
:param value: a details parameter name
:return: this exception object
|
[
"Sets",
"a",
"parameter",
"for",
"additional",
"error",
"details",
".",
"This",
"details",
"can",
"be",
"used",
"to",
"restore",
"error",
"description",
"in",
"other",
"languages",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/errors/ApplicationException.py#L156-L171
|
239,912
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/errors/ApplicationException.py
|
ApplicationException.wrap
|
def wrap(self, cause):
"""
Wraps another exception into an application exception object.
If original exception is of ApplicationException type it is returned without changes.
Otherwise a new ApplicationException is created and original error is set as its cause.
:param cause: an original error object
:return: an original or newly created ApplicationException
"""
if isinstance(cause, ApplicationException):
return cause
self.with_cause(cause)
return self
|
python
|
def wrap(self, cause):
"""
Wraps another exception into an application exception object.
If original exception is of ApplicationException type it is returned without changes.
Otherwise a new ApplicationException is created and original error is set as its cause.
:param cause: an original error object
:return: an original or newly created ApplicationException
"""
if isinstance(cause, ApplicationException):
return cause
self.with_cause(cause)
return self
|
[
"def",
"wrap",
"(",
"self",
",",
"cause",
")",
":",
"if",
"isinstance",
"(",
"cause",
",",
"ApplicationException",
")",
":",
"return",
"cause",
"self",
".",
"with_cause",
"(",
"cause",
")",
"return",
"self"
] |
Wraps another exception into an application exception object.
If original exception is of ApplicationException type it is returned without changes.
Otherwise a new ApplicationException is created and original error is set as its cause.
:param cause: an original error object
:return: an original or newly created ApplicationException
|
[
"Wraps",
"another",
"exception",
"into",
"an",
"application",
"exception",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/errors/ApplicationException.py#L199-L214
|
239,913
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/errors/ApplicationException.py
|
ApplicationException.wrap_exception
|
def wrap_exception(exception, cause):
"""
Wraps another exception into specified application exception object.
If original exception is of ApplicationException type it is returned without changes.
Otherwise the original error is set as a cause to specified ApplicationException object.
:param exception: an ApplicationException object to wrap the cause
:param cause: an original error object
:return: an original or newly created ApplicationException
"""
if isinstance(cause, ApplicationException):
return cause
exception.with_cause(cause)
return exception
|
python
|
def wrap_exception(exception, cause):
"""
Wraps another exception into specified application exception object.
If original exception is of ApplicationException type it is returned without changes.
Otherwise the original error is set as a cause to specified ApplicationException object.
:param exception: an ApplicationException object to wrap the cause
:param cause: an original error object
:return: an original or newly created ApplicationException
"""
if isinstance(cause, ApplicationException):
return cause
exception.with_cause(cause)
return exception
|
[
"def",
"wrap_exception",
"(",
"exception",
",",
"cause",
")",
":",
"if",
"isinstance",
"(",
"cause",
",",
"ApplicationException",
")",
":",
"return",
"cause",
"exception",
".",
"with_cause",
"(",
"cause",
")",
"return",
"exception"
] |
Wraps another exception into specified application exception object.
If original exception is of ApplicationException type it is returned without changes.
Otherwise the original error is set as a cause to specified ApplicationException object.
:param exception: an ApplicationException object to wrap the cause
:param cause: an original error object
:return: an original or newly created ApplicationException
|
[
"Wraps",
"another",
"exception",
"into",
"specified",
"application",
"exception",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/errors/ApplicationException.py#L217-L234
|
239,914
|
FujiMakoto/AgentML
|
agentml/parser/tags/var.py
|
Var.value
|
def value(self):
"""
Return the current value of a variable
"""
# Does the variable name have tags to parse?
if len(self._element):
var = ''.join(map(str, self.trigger.agentml.parse_tags(self._element, self.trigger)))
else:
var = self._element.text or attribute(self._element, 'name')
# Is there a default value defined?
default = attribute(self._element, 'default')
try:
self._log.debug('Retrieving {type} variable {var}'.format(type=self.type, var=var))
if self.type == 'user':
return self.trigger.user.get_var(var)
else:
return self.trigger.agentml.get_var(var)
except VarNotDefinedError:
# Do we have a default value?
if default:
self._log.info('{type} variable {var} not set, returning default: {default}'
.format(type=self.type.capitalize(), var=var, default=default))
self._log.info('{type} variable {var} not set and no default value has been specified'
.format(type=self.type.capitalize(), var=var))
return ''
|
python
|
def value(self):
"""
Return the current value of a variable
"""
# Does the variable name have tags to parse?
if len(self._element):
var = ''.join(map(str, self.trigger.agentml.parse_tags(self._element, self.trigger)))
else:
var = self._element.text or attribute(self._element, 'name')
# Is there a default value defined?
default = attribute(self._element, 'default')
try:
self._log.debug('Retrieving {type} variable {var}'.format(type=self.type, var=var))
if self.type == 'user':
return self.trigger.user.get_var(var)
else:
return self.trigger.agentml.get_var(var)
except VarNotDefinedError:
# Do we have a default value?
if default:
self._log.info('{type} variable {var} not set, returning default: {default}'
.format(type=self.type.capitalize(), var=var, default=default))
self._log.info('{type} variable {var} not set and no default value has been specified'
.format(type=self.type.capitalize(), var=var))
return ''
|
[
"def",
"value",
"(",
"self",
")",
":",
"# Does the variable name have tags to parse?",
"if",
"len",
"(",
"self",
".",
"_element",
")",
":",
"var",
"=",
"''",
".",
"join",
"(",
"map",
"(",
"str",
",",
"self",
".",
"trigger",
".",
"agentml",
".",
"parse_tags",
"(",
"self",
".",
"_element",
",",
"self",
".",
"trigger",
")",
")",
")",
"else",
":",
"var",
"=",
"self",
".",
"_element",
".",
"text",
"or",
"attribute",
"(",
"self",
".",
"_element",
",",
"'name'",
")",
"# Is there a default value defined?",
"default",
"=",
"attribute",
"(",
"self",
".",
"_element",
",",
"'default'",
")",
"try",
":",
"self",
".",
"_log",
".",
"debug",
"(",
"'Retrieving {type} variable {var}'",
".",
"format",
"(",
"type",
"=",
"self",
".",
"type",
",",
"var",
"=",
"var",
")",
")",
"if",
"self",
".",
"type",
"==",
"'user'",
":",
"return",
"self",
".",
"trigger",
".",
"user",
".",
"get_var",
"(",
"var",
")",
"else",
":",
"return",
"self",
".",
"trigger",
".",
"agentml",
".",
"get_var",
"(",
"var",
")",
"except",
"VarNotDefinedError",
":",
"# Do we have a default value?",
"if",
"default",
":",
"self",
".",
"_log",
".",
"info",
"(",
"'{type} variable {var} not set, returning default: {default}'",
".",
"format",
"(",
"type",
"=",
"self",
".",
"type",
".",
"capitalize",
"(",
")",
",",
"var",
"=",
"var",
",",
"default",
"=",
"default",
")",
")",
"self",
".",
"_log",
".",
"info",
"(",
"'{type} variable {var} not set and no default value has been specified'",
".",
"format",
"(",
"type",
"=",
"self",
".",
"type",
".",
"capitalize",
"(",
")",
",",
"var",
"=",
"var",
")",
")",
"return",
"''"
] |
Return the current value of a variable
|
[
"Return",
"the",
"current",
"value",
"of",
"a",
"variable"
] |
c8cb64b460d876666bf29ea2c682189874c7c403
|
https://github.com/FujiMakoto/AgentML/blob/c8cb64b460d876666bf29ea2c682189874c7c403/agentml/parser/tags/var.py#L28-L55
|
239,915
|
rbarrois/django-batchform
|
batchform/parsers/csv.py
|
BaseCsvParser.reopen
|
def reopen(self, file_obj):
"""Reopen the file-like object in a safe manner."""
file_obj.open('U')
if sys.version_info[0] <= 2:
return file_obj
else:
return codecs.getreader('utf-8')(file_obj)
|
python
|
def reopen(self, file_obj):
"""Reopen the file-like object in a safe manner."""
file_obj.open('U')
if sys.version_info[0] <= 2:
return file_obj
else:
return codecs.getreader('utf-8')(file_obj)
|
[
"def",
"reopen",
"(",
"self",
",",
"file_obj",
")",
":",
"file_obj",
".",
"open",
"(",
"'U'",
")",
"if",
"sys",
".",
"version_info",
"[",
"0",
"]",
"<=",
"2",
":",
"return",
"file_obj",
"else",
":",
"return",
"codecs",
".",
"getreader",
"(",
"'utf-8'",
")",
"(",
"file_obj",
")"
] |
Reopen the file-like object in a safe manner.
|
[
"Reopen",
"the",
"file",
"-",
"like",
"object",
"in",
"a",
"safe",
"manner",
"."
] |
f6b659a6790750285af248ccd1d4d178ecbad129
|
https://github.com/rbarrois/django-batchform/blob/f6b659a6790750285af248ccd1d4d178ecbad129/batchform/parsers/csv.py#L36-L42
|
239,916
|
caseyjlaw/sdmreader
|
sdmreader/sdmreader.py
|
BDFData.parse
|
def parse(self):
"""wrapper for original parse function. will read pkl with bdf info, if available."""
if os.path.exists(self.pklname): # check for pkl with binary info
logger.info('Found bdf pkl file %s. Loading...' % (self.pklname))
try:
with open(self.pklname,'rb') as pkl:
(self.mimemsg, self.headxml, self.sizeinfo, self.binarychunks, self.n_integrations, self.n_antennas, self.n_baselines, self.n_basebands, self.n_spws, self.n_channels, self.crosspols) = pickle.load(pkl)
except:
logger.warning('Something went wrong. Parsing bdf directly...')
self._parse()
else:
if self.pklname:
logger.info('Could not find bdf pkl file %s.' % (self.pklname))
self._parse()
self.headsize, self.intsize = self.calc_intsize()
return self
|
python
|
def parse(self):
"""wrapper for original parse function. will read pkl with bdf info, if available."""
if os.path.exists(self.pklname): # check for pkl with binary info
logger.info('Found bdf pkl file %s. Loading...' % (self.pklname))
try:
with open(self.pklname,'rb') as pkl:
(self.mimemsg, self.headxml, self.sizeinfo, self.binarychunks, self.n_integrations, self.n_antennas, self.n_baselines, self.n_basebands, self.n_spws, self.n_channels, self.crosspols) = pickle.load(pkl)
except:
logger.warning('Something went wrong. Parsing bdf directly...')
self._parse()
else:
if self.pklname:
logger.info('Could not find bdf pkl file %s.' % (self.pklname))
self._parse()
self.headsize, self.intsize = self.calc_intsize()
return self
|
[
"def",
"parse",
"(",
"self",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"pklname",
")",
":",
"# check for pkl with binary info\r",
"logger",
".",
"info",
"(",
"'Found bdf pkl file %s. Loading...'",
"%",
"(",
"self",
".",
"pklname",
")",
")",
"try",
":",
"with",
"open",
"(",
"self",
".",
"pklname",
",",
"'rb'",
")",
"as",
"pkl",
":",
"(",
"self",
".",
"mimemsg",
",",
"self",
".",
"headxml",
",",
"self",
".",
"sizeinfo",
",",
"self",
".",
"binarychunks",
",",
"self",
".",
"n_integrations",
",",
"self",
".",
"n_antennas",
",",
"self",
".",
"n_baselines",
",",
"self",
".",
"n_basebands",
",",
"self",
".",
"n_spws",
",",
"self",
".",
"n_channels",
",",
"self",
".",
"crosspols",
")",
"=",
"pickle",
".",
"load",
"(",
"pkl",
")",
"except",
":",
"logger",
".",
"warning",
"(",
"'Something went wrong. Parsing bdf directly...'",
")",
"self",
".",
"_parse",
"(",
")",
"else",
":",
"if",
"self",
".",
"pklname",
":",
"logger",
".",
"info",
"(",
"'Could not find bdf pkl file %s.'",
"%",
"(",
"self",
".",
"pklname",
")",
")",
"self",
".",
"_parse",
"(",
")",
"self",
".",
"headsize",
",",
"self",
".",
"intsize",
"=",
"self",
".",
"calc_intsize",
"(",
")",
"return",
"self"
] |
wrapper for original parse function. will read pkl with bdf info, if available.
|
[
"wrapper",
"for",
"original",
"parse",
"function",
".",
"will",
"read",
"pkl",
"with",
"bdf",
"info",
"if",
"available",
"."
] |
b6c3498f1915138727819715ee00d2c46353382d
|
https://github.com/caseyjlaw/sdmreader/blob/b6c3498f1915138727819715ee00d2c46353382d/sdmreader/sdmreader.py#L331-L349
|
239,917
|
Bystroushaak/pyDHTMLParser
|
src/dhtmlparser/htmlelement/html_element.py
|
HTMLElement.toString
|
def toString(self):
"""
Returns almost original string.
If you want prettified string, try :meth:`.prettify`.
Returns:
str: Complete representation of the element with childs, endtag \
and so on.
"""
output = ""
if self.childs or self.isOpeningTag():
output += self.tagToString()
for c in self.childs:
output += c.toString()
if self.endtag is not None:
output += self.endtag.tagToString()
elif not self.isEndTag():
output += self.tagToString()
return output
|
python
|
def toString(self):
"""
Returns almost original string.
If you want prettified string, try :meth:`.prettify`.
Returns:
str: Complete representation of the element with childs, endtag \
and so on.
"""
output = ""
if self.childs or self.isOpeningTag():
output += self.tagToString()
for c in self.childs:
output += c.toString()
if self.endtag is not None:
output += self.endtag.tagToString()
elif not self.isEndTag():
output += self.tagToString()
return output
|
[
"def",
"toString",
"(",
"self",
")",
":",
"output",
"=",
"\"\"",
"if",
"self",
".",
"childs",
"or",
"self",
".",
"isOpeningTag",
"(",
")",
":",
"output",
"+=",
"self",
".",
"tagToString",
"(",
")",
"for",
"c",
"in",
"self",
".",
"childs",
":",
"output",
"+=",
"c",
".",
"toString",
"(",
")",
"if",
"self",
".",
"endtag",
"is",
"not",
"None",
":",
"output",
"+=",
"self",
".",
"endtag",
".",
"tagToString",
"(",
")",
"elif",
"not",
"self",
".",
"isEndTag",
"(",
")",
":",
"output",
"+=",
"self",
".",
"tagToString",
"(",
")",
"return",
"output"
] |
Returns almost original string.
If you want prettified string, try :meth:`.prettify`.
Returns:
str: Complete representation of the element with childs, endtag \
and so on.
|
[
"Returns",
"almost",
"original",
"string",
"."
] |
4756f93dd048500b038ece2323fe26e46b6bfdea
|
https://github.com/Bystroushaak/pyDHTMLParser/blob/4756f93dd048500b038ece2323fe26e46b6bfdea/src/dhtmlparser/htmlelement/html_element.py#L20-L44
|
239,918
|
Bystroushaak/pyDHTMLParser
|
src/dhtmlparser/htmlelement/html_element.py
|
HTMLElement.replaceWith
|
def replaceWith(self, el):
"""
Replace value in this element with values from `el`.
This useful when you don't want change all references to object.
Args:
el (obj): :class:`HTMLElement` instance.
"""
self.childs = el.childs
self.params = el.params
self.endtag = el.endtag
self.openertag = el.openertag
self._tagname = el.getTagName()
self._element = el.tagToString()
self._istag = el.isTag()
self._isendtag = el.isEndTag()
self._iscomment = el.isComment()
self._isnonpairtag = el.isNonPairTag()
|
python
|
def replaceWith(self, el):
"""
Replace value in this element with values from `el`.
This useful when you don't want change all references to object.
Args:
el (obj): :class:`HTMLElement` instance.
"""
self.childs = el.childs
self.params = el.params
self.endtag = el.endtag
self.openertag = el.openertag
self._tagname = el.getTagName()
self._element = el.tagToString()
self._istag = el.isTag()
self._isendtag = el.isEndTag()
self._iscomment = el.isComment()
self._isnonpairtag = el.isNonPairTag()
|
[
"def",
"replaceWith",
"(",
"self",
",",
"el",
")",
":",
"self",
".",
"childs",
"=",
"el",
".",
"childs",
"self",
".",
"params",
"=",
"el",
".",
"params",
"self",
".",
"endtag",
"=",
"el",
".",
"endtag",
"self",
".",
"openertag",
"=",
"el",
".",
"openertag",
"self",
".",
"_tagname",
"=",
"el",
".",
"getTagName",
"(",
")",
"self",
".",
"_element",
"=",
"el",
".",
"tagToString",
"(",
")",
"self",
".",
"_istag",
"=",
"el",
".",
"isTag",
"(",
")",
"self",
".",
"_isendtag",
"=",
"el",
".",
"isEndTag",
"(",
")",
"self",
".",
"_iscomment",
"=",
"el",
".",
"isComment",
"(",
")",
"self",
".",
"_isnonpairtag",
"=",
"el",
".",
"isNonPairTag",
"(",
")"
] |
Replace value in this element with values from `el`.
This useful when you don't want change all references to object.
Args:
el (obj): :class:`HTMLElement` instance.
|
[
"Replace",
"value",
"in",
"this",
"element",
"with",
"values",
"from",
"el",
"."
] |
4756f93dd048500b038ece2323fe26e46b6bfdea
|
https://github.com/Bystroushaak/pyDHTMLParser/blob/4756f93dd048500b038ece2323fe26e46b6bfdea/src/dhtmlparser/htmlelement/html_element.py#L133-L153
|
239,919
|
renzon/gaepagseguro
|
gaepagseguro/save_commands.py
|
SaveItemCmd.handle_previous
|
def handle_previous(self, command):
"""
Method to generate item from a ItemForm. The form must be exposed on form attribute
@param command: a command tha expose data through form attributte
"""
self.result = command.items
self._to_commit = self.result
|
python
|
def handle_previous(self, command):
"""
Method to generate item from a ItemForm. The form must be exposed on form attribute
@param command: a command tha expose data through form attributte
"""
self.result = command.items
self._to_commit = self.result
|
[
"def",
"handle_previous",
"(",
"self",
",",
"command",
")",
":",
"self",
".",
"result",
"=",
"command",
".",
"items",
"self",
".",
"_to_commit",
"=",
"self",
".",
"result"
] |
Method to generate item from a ItemForm. The form must be exposed on form attribute
@param command: a command tha expose data through form attributte
|
[
"Method",
"to",
"generate",
"item",
"from",
"a",
"ItemForm",
".",
"The",
"form",
"must",
"be",
"exposed",
"on",
"form",
"attribute"
] |
c88f00580c380ff5b35d873311d6c786fa1f29d2
|
https://github.com/renzon/gaepagseguro/blob/c88f00580c380ff5b35d873311d6c786fa1f29d2/gaepagseguro/save_commands.py#L11-L17
|
239,920
|
ErikBjare/pyzenobase
|
examples/upload_lifelogger_spreadsheet/main.py
|
Lifelogger_to_Zenobase.get_dates
|
def get_dates(raw_table) -> "list of dates":
"""
Goes through the first column of input table and
returns the first sequence of dates it finds.
"""
dates = []
found_first = False
for i, dstr in enumerate([raw_table[i][0] for i in range(0, len(raw_table))]):
if dstr:
if len(dstr.split("/")) == 3:
d = datetime.datetime.strptime(dstr, '%m/%d/%Y')
elif len(dstr.split("-")) == 3:
d = datetime.datetime.strptime(dstr, '%Y-%m-%d')
else:
# Not necessarily an error, could just be a non-date cell
logging.debug("unknown date-format: {}".format(dstr))
continue
dates.append(d)
if not found_first:
found_first = True
logging.debug("Found first date: '{}' at i: {}".format(d.isoformat(), i))
elif found_first:
logging.debug("Last date: {}".format(d))
break
return dates
|
python
|
def get_dates(raw_table) -> "list of dates":
"""
Goes through the first column of input table and
returns the first sequence of dates it finds.
"""
dates = []
found_first = False
for i, dstr in enumerate([raw_table[i][0] for i in range(0, len(raw_table))]):
if dstr:
if len(dstr.split("/")) == 3:
d = datetime.datetime.strptime(dstr, '%m/%d/%Y')
elif len(dstr.split("-")) == 3:
d = datetime.datetime.strptime(dstr, '%Y-%m-%d')
else:
# Not necessarily an error, could just be a non-date cell
logging.debug("unknown date-format: {}".format(dstr))
continue
dates.append(d)
if not found_first:
found_first = True
logging.debug("Found first date: '{}' at i: {}".format(d.isoformat(), i))
elif found_first:
logging.debug("Last date: {}".format(d))
break
return dates
|
[
"def",
"get_dates",
"(",
"raw_table",
")",
"->",
"\"list of dates\"",
":",
"dates",
"=",
"[",
"]",
"found_first",
"=",
"False",
"for",
"i",
",",
"dstr",
"in",
"enumerate",
"(",
"[",
"raw_table",
"[",
"i",
"]",
"[",
"0",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"raw_table",
")",
")",
"]",
")",
":",
"if",
"dstr",
":",
"if",
"len",
"(",
"dstr",
".",
"split",
"(",
"\"/\"",
")",
")",
"==",
"3",
":",
"d",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"dstr",
",",
"'%m/%d/%Y'",
")",
"elif",
"len",
"(",
"dstr",
".",
"split",
"(",
"\"-\"",
")",
")",
"==",
"3",
":",
"d",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"dstr",
",",
"'%Y-%m-%d'",
")",
"else",
":",
"# Not necessarily an error, could just be a non-date cell",
"logging",
".",
"debug",
"(",
"\"unknown date-format: {}\"",
".",
"format",
"(",
"dstr",
")",
")",
"continue",
"dates",
".",
"append",
"(",
"d",
")",
"if",
"not",
"found_first",
":",
"found_first",
"=",
"True",
"logging",
".",
"debug",
"(",
"\"Found first date: '{}' at i: {}\"",
".",
"format",
"(",
"d",
".",
"isoformat",
"(",
")",
",",
"i",
")",
")",
"elif",
"found_first",
":",
"logging",
".",
"debug",
"(",
"\"Last date: {}\"",
".",
"format",
"(",
"d",
")",
")",
"break",
"return",
"dates"
] |
Goes through the first column of input table and
returns the first sequence of dates it finds.
|
[
"Goes",
"through",
"the",
"first",
"column",
"of",
"input",
"table",
"and",
"returns",
"the",
"first",
"sequence",
"of",
"dates",
"it",
"finds",
"."
] |
eb0572c7441a350bf5578bc5287f3be53d32ea19
|
https://github.com/ErikBjare/pyzenobase/blob/eb0572c7441a350bf5578bc5287f3be53d32ea19/examples/upload_lifelogger_spreadsheet/main.py#L63-L87
|
239,921
|
ErikBjare/pyzenobase
|
examples/upload_lifelogger_spreadsheet/main.py
|
Lifelogger_to_Zenobase.get_main
|
def get_main(self) -> 'table[category: str][label: str][date: date]':
"""
Returns a table with the above typesignature
"""
raw_table = self.get_raw_table("M")
categories = raw_table[0]
labels = raw_table[1]
dates = self.get_dates(raw_table)
def next_cat_col(i):
n = 1
while True:
if i+n > len(categories)-1:
return i
if categories[i+n]:
return i+n
n += 1
def get_category_labels(i):
end_col = next_cat_col(i)
return zip(range(i, end_col), labels[i:end_col])
def get_label_cells(category, label):
ci = categories.index(category)
i = labels.index(label, ci)
cells = {}
for j, d in enumerate(dates):
cell = raw_table[j+2][i]
if cell and cell != "#VALUE!":
cells[d] = cell
return cells
table = {}
for i, cat in enumerate(categories):
if not cat:
continue
table[cat] = {}
for i, label in get_category_labels(i):
table[cat][label] = get_label_cells(cat, label)
return table
|
python
|
def get_main(self) -> 'table[category: str][label: str][date: date]':
"""
Returns a table with the above typesignature
"""
raw_table = self.get_raw_table("M")
categories = raw_table[0]
labels = raw_table[1]
dates = self.get_dates(raw_table)
def next_cat_col(i):
n = 1
while True:
if i+n > len(categories)-1:
return i
if categories[i+n]:
return i+n
n += 1
def get_category_labels(i):
end_col = next_cat_col(i)
return zip(range(i, end_col), labels[i:end_col])
def get_label_cells(category, label):
ci = categories.index(category)
i = labels.index(label, ci)
cells = {}
for j, d in enumerate(dates):
cell = raw_table[j+2][i]
if cell and cell != "#VALUE!":
cells[d] = cell
return cells
table = {}
for i, cat in enumerate(categories):
if not cat:
continue
table[cat] = {}
for i, label in get_category_labels(i):
table[cat][label] = get_label_cells(cat, label)
return table
|
[
"def",
"get_main",
"(",
"self",
")",
"->",
"'table[category: str][label: str][date: date]'",
":",
"raw_table",
"=",
"self",
".",
"get_raw_table",
"(",
"\"M\"",
")",
"categories",
"=",
"raw_table",
"[",
"0",
"]",
"labels",
"=",
"raw_table",
"[",
"1",
"]",
"dates",
"=",
"self",
".",
"get_dates",
"(",
"raw_table",
")",
"def",
"next_cat_col",
"(",
"i",
")",
":",
"n",
"=",
"1",
"while",
"True",
":",
"if",
"i",
"+",
"n",
">",
"len",
"(",
"categories",
")",
"-",
"1",
":",
"return",
"i",
"if",
"categories",
"[",
"i",
"+",
"n",
"]",
":",
"return",
"i",
"+",
"n",
"n",
"+=",
"1",
"def",
"get_category_labels",
"(",
"i",
")",
":",
"end_col",
"=",
"next_cat_col",
"(",
"i",
")",
"return",
"zip",
"(",
"range",
"(",
"i",
",",
"end_col",
")",
",",
"labels",
"[",
"i",
":",
"end_col",
"]",
")",
"def",
"get_label_cells",
"(",
"category",
",",
"label",
")",
":",
"ci",
"=",
"categories",
".",
"index",
"(",
"category",
")",
"i",
"=",
"labels",
".",
"index",
"(",
"label",
",",
"ci",
")",
"cells",
"=",
"{",
"}",
"for",
"j",
",",
"d",
"in",
"enumerate",
"(",
"dates",
")",
":",
"cell",
"=",
"raw_table",
"[",
"j",
"+",
"2",
"]",
"[",
"i",
"]",
"if",
"cell",
"and",
"cell",
"!=",
"\"#VALUE!\"",
":",
"cells",
"[",
"d",
"]",
"=",
"cell",
"return",
"cells",
"table",
"=",
"{",
"}",
"for",
"i",
",",
"cat",
"in",
"enumerate",
"(",
"categories",
")",
":",
"if",
"not",
"cat",
":",
"continue",
"table",
"[",
"cat",
"]",
"=",
"{",
"}",
"for",
"i",
",",
"label",
"in",
"get_category_labels",
"(",
"i",
")",
":",
"table",
"[",
"cat",
"]",
"[",
"label",
"]",
"=",
"get_label_cells",
"(",
"cat",
",",
"label",
")",
"return",
"table"
] |
Returns a table with the above typesignature
|
[
"Returns",
"a",
"table",
"with",
"the",
"above",
"typesignature"
] |
eb0572c7441a350bf5578bc5287f3be53d32ea19
|
https://github.com/ErikBjare/pyzenobase/blob/eb0572c7441a350bf5578bc5287f3be53d32ea19/examples/upload_lifelogger_spreadsheet/main.py#L89-L130
|
239,922
|
funkybob/antfarm
|
antfarm/response.py
|
Response.build_headers
|
def build_headers(self):
'''
Return the list of headers as two-tuples
'''
if not 'Content-Type' in self.headers:
content_type = self.content_type
if self.encoding != DEFAULT_ENCODING:
content_type += '; charset=%s' % self.encoding
self.headers['Content-Type'] = content_type
headers = list(self.headers.items())
# Append cookies
headers += [
('Set-Cookie', cookie.OutputString())
for cookie in self.cookies.values()
]
return headers
|
python
|
def build_headers(self):
'''
Return the list of headers as two-tuples
'''
if not 'Content-Type' in self.headers:
content_type = self.content_type
if self.encoding != DEFAULT_ENCODING:
content_type += '; charset=%s' % self.encoding
self.headers['Content-Type'] = content_type
headers = list(self.headers.items())
# Append cookies
headers += [
('Set-Cookie', cookie.OutputString())
for cookie in self.cookies.values()
]
return headers
|
[
"def",
"build_headers",
"(",
"self",
")",
":",
"if",
"not",
"'Content-Type'",
"in",
"self",
".",
"headers",
":",
"content_type",
"=",
"self",
".",
"content_type",
"if",
"self",
".",
"encoding",
"!=",
"DEFAULT_ENCODING",
":",
"content_type",
"+=",
"'; charset=%s'",
"%",
"self",
".",
"encoding",
"self",
".",
"headers",
"[",
"'Content-Type'",
"]",
"=",
"content_type",
"headers",
"=",
"list",
"(",
"self",
".",
"headers",
".",
"items",
"(",
")",
")",
"# Append cookies",
"headers",
"+=",
"[",
"(",
"'Set-Cookie'",
",",
"cookie",
".",
"OutputString",
"(",
")",
")",
"for",
"cookie",
"in",
"self",
".",
"cookies",
".",
"values",
"(",
")",
"]",
"return",
"headers"
] |
Return the list of headers as two-tuples
|
[
"Return",
"the",
"list",
"of",
"headers",
"as",
"two",
"-",
"tuples"
] |
40a7cc450eba09a280b7bc8f7c68a807b0177c62
|
https://github.com/funkybob/antfarm/blob/40a7cc450eba09a280b7bc8f7c68a807b0177c62/antfarm/response.py#L90-L106
|
239,923
|
funkybob/antfarm
|
antfarm/response.py
|
Response.add_cookie
|
def add_cookie(self, key, value, **attrs):
'''
Finer control over cookies. Allow specifying an Morsel arguments.
'''
if attrs:
c = Morsel()
c.set(key, value, **attrs)
self.cookies[key] = c
else:
self.cookies[key] = value
|
python
|
def add_cookie(self, key, value, **attrs):
'''
Finer control over cookies. Allow specifying an Morsel arguments.
'''
if attrs:
c = Morsel()
c.set(key, value, **attrs)
self.cookies[key] = c
else:
self.cookies[key] = value
|
[
"def",
"add_cookie",
"(",
"self",
",",
"key",
",",
"value",
",",
"*",
"*",
"attrs",
")",
":",
"if",
"attrs",
":",
"c",
"=",
"Morsel",
"(",
")",
"c",
".",
"set",
"(",
"key",
",",
"value",
",",
"*",
"*",
"attrs",
")",
"self",
".",
"cookies",
"[",
"key",
"]",
"=",
"c",
"else",
":",
"self",
".",
"cookies",
"[",
"key",
"]",
"=",
"value"
] |
Finer control over cookies. Allow specifying an Morsel arguments.
|
[
"Finer",
"control",
"over",
"cookies",
".",
"Allow",
"specifying",
"an",
"Morsel",
"arguments",
"."
] |
40a7cc450eba09a280b7bc8f7c68a807b0177c62
|
https://github.com/funkybob/antfarm/blob/40a7cc450eba09a280b7bc8f7c68a807b0177c62/antfarm/response.py#L108-L117
|
239,924
|
funkybob/antfarm
|
antfarm/response.py
|
Response.status
|
def status(self):
'''Allow custom status messages'''
message = self.status_message
if message is None:
message = STATUS[self.status_code]
return '%s %s' % (self.status_code, message)
|
python
|
def status(self):
'''Allow custom status messages'''
message = self.status_message
if message is None:
message = STATUS[self.status_code]
return '%s %s' % (self.status_code, message)
|
[
"def",
"status",
"(",
"self",
")",
":",
"message",
"=",
"self",
".",
"status_message",
"if",
"message",
"is",
"None",
":",
"message",
"=",
"STATUS",
"[",
"self",
".",
"status_code",
"]",
"return",
"'%s %s'",
"%",
"(",
"self",
".",
"status_code",
",",
"message",
")"
] |
Allow custom status messages
|
[
"Allow",
"custom",
"status",
"messages"
] |
40a7cc450eba09a280b7bc8f7c68a807b0177c62
|
https://github.com/funkybob/antfarm/blob/40a7cc450eba09a280b7bc8f7c68a807b0177c62/antfarm/response.py#L120-L125
|
239,925
|
jambonrose/django-decorator-plus
|
decorator_plus/version.py
|
get_git_changeset
|
def get_git_changeset():
"""Get git identifier; taken from Django project."""
git_log = Popen(
'git log --pretty=format:%ct --quiet -1 HEAD',
stdout=PIPE, stderr=PIPE, shell=True, universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')
|
python
|
def get_git_changeset():
"""Get git identifier; taken from Django project."""
git_log = Popen(
'git log --pretty=format:%ct --quiet -1 HEAD',
stdout=PIPE, stderr=PIPE, shell=True, universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')
|
[
"def",
"get_git_changeset",
"(",
")",
":",
"git_log",
"=",
"Popen",
"(",
"'git log --pretty=format:%ct --quiet -1 HEAD'",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
",",
"shell",
"=",
"True",
",",
"universal_newlines",
"=",
"True",
")",
"timestamp",
"=",
"git_log",
".",
"communicate",
"(",
")",
"[",
"0",
"]",
"try",
":",
"timestamp",
"=",
"datetime",
".",
"utcfromtimestamp",
"(",
"int",
"(",
"timestamp",
")",
")",
"except",
"ValueError",
":",
"return",
"None",
"return",
"timestamp",
".",
"strftime",
"(",
"'%Y%m%d%H%M%S'",
")"
] |
Get git identifier; taken from Django project.
|
[
"Get",
"git",
"identifier",
";",
"taken",
"from",
"Django",
"project",
"."
] |
2039ef7a4b5f8abb29e455e8218c207fcae24e2c
|
https://github.com/jambonrose/django-decorator-plus/blob/2039ef7a4b5f8abb29e455e8218c207fcae24e2c/decorator_plus/version.py#L25-L35
|
239,926
|
jambonrose/django-decorator-plus
|
decorator_plus/version.py
|
get_version
|
def get_version(version_tuple):
"""Convert 4-tuple into a PEP 440 compliant string."""
if version_tuple[3] == FINAL and version_tuple[4] != 0:
raise Exception(
'Project version number misconfigured:\n'
' version may not be final and have segment number.')
if version_tuple[3] not in (DEV, FINAL) and version_tuple[4] == 0:
raise Exception(
'Project version number misconfigured:\n'
' version must have segment number.')
if version_tuple[3] == DEV:
segment_num = get_git_changeset()
else:
segment_num = str(abs(version_tuple[4]))
# X.X.X
sem_ver = ".".join([
str(abs(int(number)))
for number in version_tuple[:3]
])
if version_tuple[3] != FINAL:
if version_tuple[3] in (ALPHA, BETA, RC):
sem_ver = "%s%s%s" % (sem_ver, version_tuple[3], segment_num)
elif version_tuple[3] in (DEV, POST):
sem_ver = "%s%s%s%s" % (
sem_ver, SEPARATOR, version_tuple[3], segment_num)
else:
raise Exception(
'Project version number misconfigured:\n'
' Unrecognized release type')
return sem_ver
|
python
|
def get_version(version_tuple):
"""Convert 4-tuple into a PEP 440 compliant string."""
if version_tuple[3] == FINAL and version_tuple[4] != 0:
raise Exception(
'Project version number misconfigured:\n'
' version may not be final and have segment number.')
if version_tuple[3] not in (DEV, FINAL) and version_tuple[4] == 0:
raise Exception(
'Project version number misconfigured:\n'
' version must have segment number.')
if version_tuple[3] == DEV:
segment_num = get_git_changeset()
else:
segment_num = str(abs(version_tuple[4]))
# X.X.X
sem_ver = ".".join([
str(abs(int(number)))
for number in version_tuple[:3]
])
if version_tuple[3] != FINAL:
if version_tuple[3] in (ALPHA, BETA, RC):
sem_ver = "%s%s%s" % (sem_ver, version_tuple[3], segment_num)
elif version_tuple[3] in (DEV, POST):
sem_ver = "%s%s%s%s" % (
sem_ver, SEPARATOR, version_tuple[3], segment_num)
else:
raise Exception(
'Project version number misconfigured:\n'
' Unrecognized release type')
return sem_ver
|
[
"def",
"get_version",
"(",
"version_tuple",
")",
":",
"if",
"version_tuple",
"[",
"3",
"]",
"==",
"FINAL",
"and",
"version_tuple",
"[",
"4",
"]",
"!=",
"0",
":",
"raise",
"Exception",
"(",
"'Project version number misconfigured:\\n'",
"' version may not be final and have segment number.'",
")",
"if",
"version_tuple",
"[",
"3",
"]",
"not",
"in",
"(",
"DEV",
",",
"FINAL",
")",
"and",
"version_tuple",
"[",
"4",
"]",
"==",
"0",
":",
"raise",
"Exception",
"(",
"'Project version number misconfigured:\\n'",
"' version must have segment number.'",
")",
"if",
"version_tuple",
"[",
"3",
"]",
"==",
"DEV",
":",
"segment_num",
"=",
"get_git_changeset",
"(",
")",
"else",
":",
"segment_num",
"=",
"str",
"(",
"abs",
"(",
"version_tuple",
"[",
"4",
"]",
")",
")",
"# X.X.X",
"sem_ver",
"=",
"\".\"",
".",
"join",
"(",
"[",
"str",
"(",
"abs",
"(",
"int",
"(",
"number",
")",
")",
")",
"for",
"number",
"in",
"version_tuple",
"[",
":",
"3",
"]",
"]",
")",
"if",
"version_tuple",
"[",
"3",
"]",
"!=",
"FINAL",
":",
"if",
"version_tuple",
"[",
"3",
"]",
"in",
"(",
"ALPHA",
",",
"BETA",
",",
"RC",
")",
":",
"sem_ver",
"=",
"\"%s%s%s\"",
"%",
"(",
"sem_ver",
",",
"version_tuple",
"[",
"3",
"]",
",",
"segment_num",
")",
"elif",
"version_tuple",
"[",
"3",
"]",
"in",
"(",
"DEV",
",",
"POST",
")",
":",
"sem_ver",
"=",
"\"%s%s%s%s\"",
"%",
"(",
"sem_ver",
",",
"SEPARATOR",
",",
"version_tuple",
"[",
"3",
"]",
",",
"segment_num",
")",
"else",
":",
"raise",
"Exception",
"(",
"'Project version number misconfigured:\\n'",
"' Unrecognized release type'",
")",
"return",
"sem_ver"
] |
Convert 4-tuple into a PEP 440 compliant string.
|
[
"Convert",
"4",
"-",
"tuple",
"into",
"a",
"PEP",
"440",
"compliant",
"string",
"."
] |
2039ef7a4b5f8abb29e455e8218c207fcae24e2c
|
https://github.com/jambonrose/django-decorator-plus/blob/2039ef7a4b5f8abb29e455e8218c207fcae24e2c/decorator_plus/version.py#L38-L72
|
239,927
|
Meseira/subordinate
|
subordinate/idmap.py
|
IdMap.append
|
def append(self, name):
"""If name is not in the map, append it with an empty id range set."""
if not isinstance(name, str):
raise TypeError(
"argument 'name' must be a string, not {}".format(
name.__class__.__name__
)
)
if not name:
raise ValueError("argument 'name' cannot be empty")
if not name in self.__map:
self.__map[name] = IdRangeSet()
|
python
|
def append(self, name):
"""If name is not in the map, append it with an empty id range set."""
if not isinstance(name, str):
raise TypeError(
"argument 'name' must be a string, not {}".format(
name.__class__.__name__
)
)
if not name:
raise ValueError("argument 'name' cannot be empty")
if not name in self.__map:
self.__map[name] = IdRangeSet()
|
[
"def",
"append",
"(",
"self",
",",
"name",
")",
":",
"if",
"not",
"isinstance",
"(",
"name",
",",
"str",
")",
":",
"raise",
"TypeError",
"(",
"\"argument 'name' must be a string, not {}\"",
".",
"format",
"(",
"name",
".",
"__class__",
".",
"__name__",
")",
")",
"if",
"not",
"name",
":",
"raise",
"ValueError",
"(",
"\"argument 'name' cannot be empty\"",
")",
"if",
"not",
"name",
"in",
"self",
".",
"__map",
":",
"self",
".",
"__map",
"[",
"name",
"]",
"=",
"IdRangeSet",
"(",
")"
] |
If name is not in the map, append it with an empty id range set.
|
[
"If",
"name",
"is",
"not",
"in",
"the",
"map",
"append",
"it",
"with",
"an",
"empty",
"id",
"range",
"set",
"."
] |
3438df304af3dccc5bd1515231402afa708f1cc3
|
https://github.com/Meseira/subordinate/blob/3438df304af3dccc5bd1515231402afa708f1cc3/subordinate/idmap.py#L74-L88
|
239,928
|
Meseira/subordinate
|
subordinate/idmap.py
|
IdMap.who_has
|
def who_has(self, subid):
"""Return a list of names who own subid in their id range set."""
answer = []
for name in self.__map:
if subid in self.__map[name] and not name in answer:
answer.append(name)
return answer
|
python
|
def who_has(self, subid):
"""Return a list of names who own subid in their id range set."""
answer = []
for name in self.__map:
if subid in self.__map[name] and not name in answer:
answer.append(name)
return answer
|
[
"def",
"who_has",
"(",
"self",
",",
"subid",
")",
":",
"answer",
"=",
"[",
"]",
"for",
"name",
"in",
"self",
".",
"__map",
":",
"if",
"subid",
"in",
"self",
".",
"__map",
"[",
"name",
"]",
"and",
"not",
"name",
"in",
"answer",
":",
"answer",
".",
"append",
"(",
"name",
")",
"return",
"answer"
] |
Return a list of names who own subid in their id range set.
|
[
"Return",
"a",
"list",
"of",
"names",
"who",
"own",
"subid",
"in",
"their",
"id",
"range",
"set",
"."
] |
3438df304af3dccc5bd1515231402afa708f1cc3
|
https://github.com/Meseira/subordinate/blob/3438df304af3dccc5bd1515231402afa708f1cc3/subordinate/idmap.py#L177-L185
|
239,929
|
fdb/aufmachen
|
aufmachen/crawler.py
|
cache_path_for_url
|
def cache_path_for_url(url):
"""Return the path where the URL might be cached."""
m = hashlib.md5()
m.update(url)
digest = m.hexdigest()
return os.path.join(CACHE_DIRECTORY, '%s.html' % digest)
|
python
|
def cache_path_for_url(url):
"""Return the path where the URL might be cached."""
m = hashlib.md5()
m.update(url)
digest = m.hexdigest()
return os.path.join(CACHE_DIRECTORY, '%s.html' % digest)
|
[
"def",
"cache_path_for_url",
"(",
"url",
")",
":",
"m",
"=",
"hashlib",
".",
"md5",
"(",
")",
"m",
".",
"update",
"(",
"url",
")",
"digest",
"=",
"m",
".",
"hexdigest",
"(",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"CACHE_DIRECTORY",
",",
"'%s.html'",
"%",
"digest",
")"
] |
Return the path where the URL might be cached.
|
[
"Return",
"the",
"path",
"where",
"the",
"URL",
"might",
"be",
"cached",
"."
] |
f2986a0cf087ac53969f82b84d872e3f1c6986f4
|
https://github.com/fdb/aufmachen/blob/f2986a0cf087ac53969f82b84d872e3f1c6986f4/aufmachen/crawler.py#L71-L76
|
239,930
|
fdb/aufmachen
|
aufmachen/crawler.py
|
get_url
|
def get_url(url, data=None, cached=True, cache_key=None, crawler='urllib'):
"""Retrieves the HTML code for a given URL.
If a cached version is not available, uses phantom_retrieve to fetch the page.
data - Additional data that gets passed onto the crawler.
cached - If True, retrieves the URL from the cache if it is available. If False, will still store the page in cache.
cache_key - If set, will be used instead of the URL to lookup the cached version of the page.
crawler - A string referencing one of the builtin crawlers.
Returns the HTML as a unicode string.
Raises a HttpNotFound exception if the page could not be found.
"""
if cache_key is None:
cache_key = url
cache_path = cache_path_for_url(cache_key)
if cached and os.path.exists(cache_path):
with open(cache_path) as f:
html = f.read().decode('utf-8')
else:
if FAIL_IF_NOT_CACHED:
raise BaseException("URL is not in cache and FAIL_IF_NOT_CACHED is True: %s" % url)
crawler_fn = CRAWLERS[crawler]
status, html = crawler_fn(url, data)
if status != 200:
raise HttpNotFound(url)
_ensure_directory(CACHE_DIRECTORY)
with open(cache_path, 'w') as f:
f.write(html.encode('utf-8'))
return html
|
python
|
def get_url(url, data=None, cached=True, cache_key=None, crawler='urllib'):
"""Retrieves the HTML code for a given URL.
If a cached version is not available, uses phantom_retrieve to fetch the page.
data - Additional data that gets passed onto the crawler.
cached - If True, retrieves the URL from the cache if it is available. If False, will still store the page in cache.
cache_key - If set, will be used instead of the URL to lookup the cached version of the page.
crawler - A string referencing one of the builtin crawlers.
Returns the HTML as a unicode string.
Raises a HttpNotFound exception if the page could not be found.
"""
if cache_key is None:
cache_key = url
cache_path = cache_path_for_url(cache_key)
if cached and os.path.exists(cache_path):
with open(cache_path) as f:
html = f.read().decode('utf-8')
else:
if FAIL_IF_NOT_CACHED:
raise BaseException("URL is not in cache and FAIL_IF_NOT_CACHED is True: %s" % url)
crawler_fn = CRAWLERS[crawler]
status, html = crawler_fn(url, data)
if status != 200:
raise HttpNotFound(url)
_ensure_directory(CACHE_DIRECTORY)
with open(cache_path, 'w') as f:
f.write(html.encode('utf-8'))
return html
|
[
"def",
"get_url",
"(",
"url",
",",
"data",
"=",
"None",
",",
"cached",
"=",
"True",
",",
"cache_key",
"=",
"None",
",",
"crawler",
"=",
"'urllib'",
")",
":",
"if",
"cache_key",
"is",
"None",
":",
"cache_key",
"=",
"url",
"cache_path",
"=",
"cache_path_for_url",
"(",
"cache_key",
")",
"if",
"cached",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"cache_path",
")",
":",
"with",
"open",
"(",
"cache_path",
")",
"as",
"f",
":",
"html",
"=",
"f",
".",
"read",
"(",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"else",
":",
"if",
"FAIL_IF_NOT_CACHED",
":",
"raise",
"BaseException",
"(",
"\"URL is not in cache and FAIL_IF_NOT_CACHED is True: %s\"",
"%",
"url",
")",
"crawler_fn",
"=",
"CRAWLERS",
"[",
"crawler",
"]",
"status",
",",
"html",
"=",
"crawler_fn",
"(",
"url",
",",
"data",
")",
"if",
"status",
"!=",
"200",
":",
"raise",
"HttpNotFound",
"(",
"url",
")",
"_ensure_directory",
"(",
"CACHE_DIRECTORY",
")",
"with",
"open",
"(",
"cache_path",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"html",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"return",
"html"
] |
Retrieves the HTML code for a given URL.
If a cached version is not available, uses phantom_retrieve to fetch the page.
data - Additional data that gets passed onto the crawler.
cached - If True, retrieves the URL from the cache if it is available. If False, will still store the page in cache.
cache_key - If set, will be used instead of the URL to lookup the cached version of the page.
crawler - A string referencing one of the builtin crawlers.
Returns the HTML as a unicode string.
Raises a HttpNotFound exception if the page could not be found.
|
[
"Retrieves",
"the",
"HTML",
"code",
"for",
"a",
"given",
"URL",
".",
"If",
"a",
"cached",
"version",
"is",
"not",
"available",
"uses",
"phantom_retrieve",
"to",
"fetch",
"the",
"page",
"."
] |
f2986a0cf087ac53969f82b84d872e3f1c6986f4
|
https://github.com/fdb/aufmachen/blob/f2986a0cf087ac53969f82b84d872e3f1c6986f4/aufmachen/crawler.py#L89-L117
|
239,931
|
ArabellaTech/aa-intercom
|
aa_intercom/models.py
|
IntercomEvent.get_intercom_data
|
def get_intercom_data(self):
"""Specify the data sent to Intercom API according to event type"""
data = {
"event_name": self.get_type_display(), # event type
"created_at": calendar.timegm(self.created.utctimetuple()), # date
"metadata": self.metadata
}
if self.user:
data["user_id"] = self.user.intercom_id
return data
|
python
|
def get_intercom_data(self):
"""Specify the data sent to Intercom API according to event type"""
data = {
"event_name": self.get_type_display(), # event type
"created_at": calendar.timegm(self.created.utctimetuple()), # date
"metadata": self.metadata
}
if self.user:
data["user_id"] = self.user.intercom_id
return data
|
[
"def",
"get_intercom_data",
"(",
"self",
")",
":",
"data",
"=",
"{",
"\"event_name\"",
":",
"self",
".",
"get_type_display",
"(",
")",
",",
"# event type",
"\"created_at\"",
":",
"calendar",
".",
"timegm",
"(",
"self",
".",
"created",
".",
"utctimetuple",
"(",
")",
")",
",",
"# date",
"\"metadata\"",
":",
"self",
".",
"metadata",
"}",
"if",
"self",
".",
"user",
":",
"data",
"[",
"\"user_id\"",
"]",
"=",
"self",
".",
"user",
".",
"intercom_id",
"return",
"data"
] |
Specify the data sent to Intercom API according to event type
|
[
"Specify",
"the",
"data",
"sent",
"to",
"Intercom",
"API",
"according",
"to",
"event",
"type"
] |
f7e2ab63967529660f9c2fe4f1d0bf3cec1502c2
|
https://github.com/ArabellaTech/aa-intercom/blob/f7e2ab63967529660f9c2fe4f1d0bf3cec1502c2/aa_intercom/models.py#L33-L42
|
239,932
|
openp2pdesign/makerlabs
|
makerlabs/makeinitaly_foundation.py
|
get_lab_text
|
def get_lab_text(lab_slug, language):
"""Gets text description in English or Italian from a single lab from makeinitaly.foundation."""
if language == "English" or language == "english" or language == "EN" or language == "En":
language = "en"
elif language == "Italian" or language == "italian" or language == "IT" or language == "It" or language == "it":
language = "it"
else:
language = "en"
wiki = MediaWiki(makeinitaly__foundation_api_url)
wiki_response = wiki.call(
{'action': 'query',
'titles': lab_slug + "/" + language,
'prop': 'revisions',
'rvprop': 'content'})
# If we don't know the pageid...
for i in wiki_response["query"]["pages"]:
if "revisions" in wiki_response["query"]["pages"][i]:
content = wiki_response["query"]["pages"][i]["revisions"][0]["*"]
else:
content = ""
# Clean the resulting string/list
newstr01 = content.replace("}}", "")
newstr02 = newstr01.replace("{{", "")
result = newstr02.rstrip("\n|").split("\n|")
return result[0]
|
python
|
def get_lab_text(lab_slug, language):
"""Gets text description in English or Italian from a single lab from makeinitaly.foundation."""
if language == "English" or language == "english" or language == "EN" or language == "En":
language = "en"
elif language == "Italian" or language == "italian" or language == "IT" or language == "It" or language == "it":
language = "it"
else:
language = "en"
wiki = MediaWiki(makeinitaly__foundation_api_url)
wiki_response = wiki.call(
{'action': 'query',
'titles': lab_slug + "/" + language,
'prop': 'revisions',
'rvprop': 'content'})
# If we don't know the pageid...
for i in wiki_response["query"]["pages"]:
if "revisions" in wiki_response["query"]["pages"][i]:
content = wiki_response["query"]["pages"][i]["revisions"][0]["*"]
else:
content = ""
# Clean the resulting string/list
newstr01 = content.replace("}}", "")
newstr02 = newstr01.replace("{{", "")
result = newstr02.rstrip("\n|").split("\n|")
return result[0]
|
[
"def",
"get_lab_text",
"(",
"lab_slug",
",",
"language",
")",
":",
"if",
"language",
"==",
"\"English\"",
"or",
"language",
"==",
"\"english\"",
"or",
"language",
"==",
"\"EN\"",
"or",
"language",
"==",
"\"En\"",
":",
"language",
"=",
"\"en\"",
"elif",
"language",
"==",
"\"Italian\"",
"or",
"language",
"==",
"\"italian\"",
"or",
"language",
"==",
"\"IT\"",
"or",
"language",
"==",
"\"It\"",
"or",
"language",
"==",
"\"it\"",
":",
"language",
"=",
"\"it\"",
"else",
":",
"language",
"=",
"\"en\"",
"wiki",
"=",
"MediaWiki",
"(",
"makeinitaly__foundation_api_url",
")",
"wiki_response",
"=",
"wiki",
".",
"call",
"(",
"{",
"'action'",
":",
"'query'",
",",
"'titles'",
":",
"lab_slug",
"+",
"\"/\"",
"+",
"language",
",",
"'prop'",
":",
"'revisions'",
",",
"'rvprop'",
":",
"'content'",
"}",
")",
"# If we don't know the pageid...",
"for",
"i",
"in",
"wiki_response",
"[",
"\"query\"",
"]",
"[",
"\"pages\"",
"]",
":",
"if",
"\"revisions\"",
"in",
"wiki_response",
"[",
"\"query\"",
"]",
"[",
"\"pages\"",
"]",
"[",
"i",
"]",
":",
"content",
"=",
"wiki_response",
"[",
"\"query\"",
"]",
"[",
"\"pages\"",
"]",
"[",
"i",
"]",
"[",
"\"revisions\"",
"]",
"[",
"0",
"]",
"[",
"\"*\"",
"]",
"else",
":",
"content",
"=",
"\"\"",
"# Clean the resulting string/list",
"newstr01",
"=",
"content",
".",
"replace",
"(",
"\"}}\"",
",",
"\"\"",
")",
"newstr02",
"=",
"newstr01",
".",
"replace",
"(",
"\"{{\"",
",",
"\"\"",
")",
"result",
"=",
"newstr02",
".",
"rstrip",
"(",
"\"\\n|\"",
")",
".",
"split",
"(",
"\"\\n|\"",
")",
"return",
"result",
"[",
"0",
"]"
] |
Gets text description in English or Italian from a single lab from makeinitaly.foundation.
|
[
"Gets",
"text",
"description",
"in",
"English",
"or",
"Italian",
"from",
"a",
"single",
"lab",
"from",
"makeinitaly",
".",
"foundation",
"."
] |
b5838440174f10d370abb671358db9a99d7739fd
|
https://github.com/openp2pdesign/makerlabs/blob/b5838440174f10d370abb671358db9a99d7739fd/makerlabs/makeinitaly_foundation.py#L34-L61
|
239,933
|
openp2pdesign/makerlabs
|
makerlabs/makeinitaly_foundation.py
|
get_single_lab
|
def get_single_lab(lab_slug):
"""Gets data from a single lab from makeinitaly.foundation."""
wiki = MediaWiki(makeinitaly__foundation_api_url)
wiki_response = wiki.call(
{'action': 'query',
'titles': lab_slug,
'prop': 'revisions',
'rvprop': 'content'})
# If we don't know the pageid...
for i in wiki_response["query"]["pages"]:
content = wiki_response["query"]["pages"][i]["revisions"][0]["*"]
# Clean the resulting string/list
newstr01 = content.replace("}}", "")
newstr02 = newstr01.replace("{{", "")
result = newstr02.rstrip("\n|").split("\n|")
# result.remove(u'FabLab')
# Transform the data into a Lab object
current_lab = MILab()
# Add existing data
for i in result:
if "coordinates=" in i:
value = i.replace("coordinates=", "")
current_lab.coordinates = value
latlong = []
if ", " in value:
latlong = value.rstrip(", ").split(", ")
elif " , " in value:
latlong = value.rstrip(" , ").split(" , ")
else:
latlong = ["", ""]
current_lab.latitude = latlong[0]
current_lab.longitude = latlong[1]
elif "province=" in i:
value = i.replace("province=", "")
current_lab.province = value.upper()
elif "region=" in i:
value = i.replace("region=", "")
current_lab.region = value
elif "address=" in i:
value = i.replace("address=", "")
current_lab.address = value
elif "city=" in i:
value = i.replace("city=", "")
current_lab.city = value
elif "fablabsio=" in i:
value = i.replace("fablabsio=", "")
current_lab.fablabsio = value
elif "website=" in i:
value = i.replace("website=", "")
current_lab.website = value
elif "facebook=" in i:
value = i.replace("facebook=", "")
current_lab.facebook = value
elif "twitter=" in i:
value = i.replace("twitter=", "")
current_lab.twitter = value
elif "email=" in i:
value = i.replace("email=", "")
current_lab.email = value
elif "manager=" in i:
value = i.replace("manager=", "")
current_lab.manager = value
elif "birthyear=" in i:
value = i.replace("birthyear=", "")
current_lab.birthyear = value
current_lab.text_en = get_lab_text(lab_slug=lab_slug, language="en")
current_lab.text_it = get_lab_text(lab_slug=lab_slug, language="it")
return current_lab
|
python
|
def get_single_lab(lab_slug):
"""Gets data from a single lab from makeinitaly.foundation."""
wiki = MediaWiki(makeinitaly__foundation_api_url)
wiki_response = wiki.call(
{'action': 'query',
'titles': lab_slug,
'prop': 'revisions',
'rvprop': 'content'})
# If we don't know the pageid...
for i in wiki_response["query"]["pages"]:
content = wiki_response["query"]["pages"][i]["revisions"][0]["*"]
# Clean the resulting string/list
newstr01 = content.replace("}}", "")
newstr02 = newstr01.replace("{{", "")
result = newstr02.rstrip("\n|").split("\n|")
# result.remove(u'FabLab')
# Transform the data into a Lab object
current_lab = MILab()
# Add existing data
for i in result:
if "coordinates=" in i:
value = i.replace("coordinates=", "")
current_lab.coordinates = value
latlong = []
if ", " in value:
latlong = value.rstrip(", ").split(", ")
elif " , " in value:
latlong = value.rstrip(" , ").split(" , ")
else:
latlong = ["", ""]
current_lab.latitude = latlong[0]
current_lab.longitude = latlong[1]
elif "province=" in i:
value = i.replace("province=", "")
current_lab.province = value.upper()
elif "region=" in i:
value = i.replace("region=", "")
current_lab.region = value
elif "address=" in i:
value = i.replace("address=", "")
current_lab.address = value
elif "city=" in i:
value = i.replace("city=", "")
current_lab.city = value
elif "fablabsio=" in i:
value = i.replace("fablabsio=", "")
current_lab.fablabsio = value
elif "website=" in i:
value = i.replace("website=", "")
current_lab.website = value
elif "facebook=" in i:
value = i.replace("facebook=", "")
current_lab.facebook = value
elif "twitter=" in i:
value = i.replace("twitter=", "")
current_lab.twitter = value
elif "email=" in i:
value = i.replace("email=", "")
current_lab.email = value
elif "manager=" in i:
value = i.replace("manager=", "")
current_lab.manager = value
elif "birthyear=" in i:
value = i.replace("birthyear=", "")
current_lab.birthyear = value
current_lab.text_en = get_lab_text(lab_slug=lab_slug, language="en")
current_lab.text_it = get_lab_text(lab_slug=lab_slug, language="it")
return current_lab
|
[
"def",
"get_single_lab",
"(",
"lab_slug",
")",
":",
"wiki",
"=",
"MediaWiki",
"(",
"makeinitaly__foundation_api_url",
")",
"wiki_response",
"=",
"wiki",
".",
"call",
"(",
"{",
"'action'",
":",
"'query'",
",",
"'titles'",
":",
"lab_slug",
",",
"'prop'",
":",
"'revisions'",
",",
"'rvprop'",
":",
"'content'",
"}",
")",
"# If we don't know the pageid...",
"for",
"i",
"in",
"wiki_response",
"[",
"\"query\"",
"]",
"[",
"\"pages\"",
"]",
":",
"content",
"=",
"wiki_response",
"[",
"\"query\"",
"]",
"[",
"\"pages\"",
"]",
"[",
"i",
"]",
"[",
"\"revisions\"",
"]",
"[",
"0",
"]",
"[",
"\"*\"",
"]",
"# Clean the resulting string/list",
"newstr01",
"=",
"content",
".",
"replace",
"(",
"\"}}\"",
",",
"\"\"",
")",
"newstr02",
"=",
"newstr01",
".",
"replace",
"(",
"\"{{\"",
",",
"\"\"",
")",
"result",
"=",
"newstr02",
".",
"rstrip",
"(",
"\"\\n|\"",
")",
".",
"split",
"(",
"\"\\n|\"",
")",
"# result.remove(u'FabLab')",
"# Transform the data into a Lab object",
"current_lab",
"=",
"MILab",
"(",
")",
"# Add existing data",
"for",
"i",
"in",
"result",
":",
"if",
"\"coordinates=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"coordinates=\"",
",",
"\"\"",
")",
"current_lab",
".",
"coordinates",
"=",
"value",
"latlong",
"=",
"[",
"]",
"if",
"\", \"",
"in",
"value",
":",
"latlong",
"=",
"value",
".",
"rstrip",
"(",
"\", \"",
")",
".",
"split",
"(",
"\", \"",
")",
"elif",
"\" , \"",
"in",
"value",
":",
"latlong",
"=",
"value",
".",
"rstrip",
"(",
"\" , \"",
")",
".",
"split",
"(",
"\" , \"",
")",
"else",
":",
"latlong",
"=",
"[",
"\"\"",
",",
"\"\"",
"]",
"current_lab",
".",
"latitude",
"=",
"latlong",
"[",
"0",
"]",
"current_lab",
".",
"longitude",
"=",
"latlong",
"[",
"1",
"]",
"elif",
"\"province=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"province=\"",
",",
"\"\"",
")",
"current_lab",
".",
"province",
"=",
"value",
".",
"upper",
"(",
")",
"elif",
"\"region=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"region=\"",
",",
"\"\"",
")",
"current_lab",
".",
"region",
"=",
"value",
"elif",
"\"address=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"address=\"",
",",
"\"\"",
")",
"current_lab",
".",
"address",
"=",
"value",
"elif",
"\"city=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"city=\"",
",",
"\"\"",
")",
"current_lab",
".",
"city",
"=",
"value",
"elif",
"\"fablabsio=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"fablabsio=\"",
",",
"\"\"",
")",
"current_lab",
".",
"fablabsio",
"=",
"value",
"elif",
"\"website=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"website=\"",
",",
"\"\"",
")",
"current_lab",
".",
"website",
"=",
"value",
"elif",
"\"facebook=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"facebook=\"",
",",
"\"\"",
")",
"current_lab",
".",
"facebook",
"=",
"value",
"elif",
"\"twitter=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"twitter=\"",
",",
"\"\"",
")",
"current_lab",
".",
"twitter",
"=",
"value",
"elif",
"\"email=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"email=\"",
",",
"\"\"",
")",
"current_lab",
".",
"email",
"=",
"value",
"elif",
"\"manager=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"manager=\"",
",",
"\"\"",
")",
"current_lab",
".",
"manager",
"=",
"value",
"elif",
"\"birthyear=\"",
"in",
"i",
":",
"value",
"=",
"i",
".",
"replace",
"(",
"\"birthyear=\"",
",",
"\"\"",
")",
"current_lab",
".",
"birthyear",
"=",
"value",
"current_lab",
".",
"text_en",
"=",
"get_lab_text",
"(",
"lab_slug",
"=",
"lab_slug",
",",
"language",
"=",
"\"en\"",
")",
"current_lab",
".",
"text_it",
"=",
"get_lab_text",
"(",
"lab_slug",
"=",
"lab_slug",
",",
"language",
"=",
"\"it\"",
")",
"return",
"current_lab"
] |
Gets data from a single lab from makeinitaly.foundation.
|
[
"Gets",
"data",
"from",
"a",
"single",
"lab",
"from",
"makeinitaly",
".",
"foundation",
"."
] |
b5838440174f10d370abb671358db9a99d7739fd
|
https://github.com/openp2pdesign/makerlabs/blob/b5838440174f10d370abb671358db9a99d7739fd/makerlabs/makeinitaly_foundation.py#L64-L137
|
239,934
|
openp2pdesign/makerlabs
|
makerlabs/makeinitaly_foundation.py
|
get_labs
|
def get_labs(format):
"""Gets data from all labs from makeinitaly.foundation."""
labs = []
# Get the first page of data
wiki = MediaWiki(makeinitaly__foundation_api_url)
wiki_response = wiki.call(
{'action': 'query',
'list': 'categorymembers',
'cmtitle': 'Category:Italian_FabLabs',
'cmlimit': '500'})
if "query-continue" in wiki_response:
nextpage = wiki_response[
"query-continue"]["categorymembers"]["cmcontinue"]
urls = []
for i in wiki_response["query"]["categorymembers"]:
urls.append(i["title"].replace(" ", "_"))
# Load all the Labs in the first page
for i in urls:
current_lab = get_single_lab(i)
labs.append(current_lab)
# Load all the Labs from the other pages
while "query-continue" in wiki_response:
wiki = MediaWiki(makeinitaly__foundation_api_url)
wiki_response = wiki.call({'action': 'query',
'list': 'categorymembers',
'cmtitle': 'Category:Hackerspace',
'cmlimit': '500',
"cmcontinue": nextpage})
urls = []
for i in wiki_response["query"]["categorymembers"]:
urls.append(i["title"].replace(" ", "_"))
# Load all the Labs
for i in urls:
current_lab = get_single_lab(i, data_format)
labs.append(current_lab)
if "query-continue" in wiki_response:
nextpage = wiki_response[
"query-continue"]["categorymembers"]["cmcontinue"]
else:
break
# Transform the list into a dictionary
labs_dict = {}
for j, k in enumerate(labs):
labs_dict[j] = k.__dict__
# Return a dictiornary / json
if format.lower() == "dict" or format.lower() == "json":
output = labs_dict
# Return a geojson
elif format.lower() == "geojson" or format.lower() == "geo":
labs_list = []
for l in labs_dict:
single = labs_dict[l].__dict__
single_lab = Feature(
type="Feature",
geometry=Point((single["latitude"], single["longitude"])),
properties=single)
labs_list.append(single_lab)
output = dumps(FeatureCollection(labs_list))
# Return a Pandas DataFrame
elif format.lower() == "pandas" or format.lower() == "dataframe":
output = {}
for j in labs_dict:
output[j] = labs_dict[j].__dict__
# Transform the dict into a Pandas DataFrame
output = pd.DataFrame.from_dict(output)
output = output.transpose()
# Return an object
elif format.lower() == "object" or format.lower() == "obj":
output = labs
# Default: return an object
else:
output = labs
# Return a proper json
if format.lower() == "json":
output = json.dumps(labs_dict)
return output
|
python
|
def get_labs(format):
"""Gets data from all labs from makeinitaly.foundation."""
labs = []
# Get the first page of data
wiki = MediaWiki(makeinitaly__foundation_api_url)
wiki_response = wiki.call(
{'action': 'query',
'list': 'categorymembers',
'cmtitle': 'Category:Italian_FabLabs',
'cmlimit': '500'})
if "query-continue" in wiki_response:
nextpage = wiki_response[
"query-continue"]["categorymembers"]["cmcontinue"]
urls = []
for i in wiki_response["query"]["categorymembers"]:
urls.append(i["title"].replace(" ", "_"))
# Load all the Labs in the first page
for i in urls:
current_lab = get_single_lab(i)
labs.append(current_lab)
# Load all the Labs from the other pages
while "query-continue" in wiki_response:
wiki = MediaWiki(makeinitaly__foundation_api_url)
wiki_response = wiki.call({'action': 'query',
'list': 'categorymembers',
'cmtitle': 'Category:Hackerspace',
'cmlimit': '500',
"cmcontinue": nextpage})
urls = []
for i in wiki_response["query"]["categorymembers"]:
urls.append(i["title"].replace(" ", "_"))
# Load all the Labs
for i in urls:
current_lab = get_single_lab(i, data_format)
labs.append(current_lab)
if "query-continue" in wiki_response:
nextpage = wiki_response[
"query-continue"]["categorymembers"]["cmcontinue"]
else:
break
# Transform the list into a dictionary
labs_dict = {}
for j, k in enumerate(labs):
labs_dict[j] = k.__dict__
# Return a dictiornary / json
if format.lower() == "dict" or format.lower() == "json":
output = labs_dict
# Return a geojson
elif format.lower() == "geojson" or format.lower() == "geo":
labs_list = []
for l in labs_dict:
single = labs_dict[l].__dict__
single_lab = Feature(
type="Feature",
geometry=Point((single["latitude"], single["longitude"])),
properties=single)
labs_list.append(single_lab)
output = dumps(FeatureCollection(labs_list))
# Return a Pandas DataFrame
elif format.lower() == "pandas" or format.lower() == "dataframe":
output = {}
for j in labs_dict:
output[j] = labs_dict[j].__dict__
# Transform the dict into a Pandas DataFrame
output = pd.DataFrame.from_dict(output)
output = output.transpose()
# Return an object
elif format.lower() == "object" or format.lower() == "obj":
output = labs
# Default: return an object
else:
output = labs
# Return a proper json
if format.lower() == "json":
output = json.dumps(labs_dict)
return output
|
[
"def",
"get_labs",
"(",
"format",
")",
":",
"labs",
"=",
"[",
"]",
"# Get the first page of data",
"wiki",
"=",
"MediaWiki",
"(",
"makeinitaly__foundation_api_url",
")",
"wiki_response",
"=",
"wiki",
".",
"call",
"(",
"{",
"'action'",
":",
"'query'",
",",
"'list'",
":",
"'categorymembers'",
",",
"'cmtitle'",
":",
"'Category:Italian_FabLabs'",
",",
"'cmlimit'",
":",
"'500'",
"}",
")",
"if",
"\"query-continue\"",
"in",
"wiki_response",
":",
"nextpage",
"=",
"wiki_response",
"[",
"\"query-continue\"",
"]",
"[",
"\"categorymembers\"",
"]",
"[",
"\"cmcontinue\"",
"]",
"urls",
"=",
"[",
"]",
"for",
"i",
"in",
"wiki_response",
"[",
"\"query\"",
"]",
"[",
"\"categorymembers\"",
"]",
":",
"urls",
".",
"append",
"(",
"i",
"[",
"\"title\"",
"]",
".",
"replace",
"(",
"\" \"",
",",
"\"_\"",
")",
")",
"# Load all the Labs in the first page",
"for",
"i",
"in",
"urls",
":",
"current_lab",
"=",
"get_single_lab",
"(",
"i",
")",
"labs",
".",
"append",
"(",
"current_lab",
")",
"# Load all the Labs from the other pages",
"while",
"\"query-continue\"",
"in",
"wiki_response",
":",
"wiki",
"=",
"MediaWiki",
"(",
"makeinitaly__foundation_api_url",
")",
"wiki_response",
"=",
"wiki",
".",
"call",
"(",
"{",
"'action'",
":",
"'query'",
",",
"'list'",
":",
"'categorymembers'",
",",
"'cmtitle'",
":",
"'Category:Hackerspace'",
",",
"'cmlimit'",
":",
"'500'",
",",
"\"cmcontinue\"",
":",
"nextpage",
"}",
")",
"urls",
"=",
"[",
"]",
"for",
"i",
"in",
"wiki_response",
"[",
"\"query\"",
"]",
"[",
"\"categorymembers\"",
"]",
":",
"urls",
".",
"append",
"(",
"i",
"[",
"\"title\"",
"]",
".",
"replace",
"(",
"\" \"",
",",
"\"_\"",
")",
")",
"# Load all the Labs",
"for",
"i",
"in",
"urls",
":",
"current_lab",
"=",
"get_single_lab",
"(",
"i",
",",
"data_format",
")",
"labs",
".",
"append",
"(",
"current_lab",
")",
"if",
"\"query-continue\"",
"in",
"wiki_response",
":",
"nextpage",
"=",
"wiki_response",
"[",
"\"query-continue\"",
"]",
"[",
"\"categorymembers\"",
"]",
"[",
"\"cmcontinue\"",
"]",
"else",
":",
"break",
"# Transform the list into a dictionary",
"labs_dict",
"=",
"{",
"}",
"for",
"j",
",",
"k",
"in",
"enumerate",
"(",
"labs",
")",
":",
"labs_dict",
"[",
"j",
"]",
"=",
"k",
".",
"__dict__",
"# Return a dictiornary / json",
"if",
"format",
".",
"lower",
"(",
")",
"==",
"\"dict\"",
"or",
"format",
".",
"lower",
"(",
")",
"==",
"\"json\"",
":",
"output",
"=",
"labs_dict",
"# Return a geojson",
"elif",
"format",
".",
"lower",
"(",
")",
"==",
"\"geojson\"",
"or",
"format",
".",
"lower",
"(",
")",
"==",
"\"geo\"",
":",
"labs_list",
"=",
"[",
"]",
"for",
"l",
"in",
"labs_dict",
":",
"single",
"=",
"labs_dict",
"[",
"l",
"]",
".",
"__dict__",
"single_lab",
"=",
"Feature",
"(",
"type",
"=",
"\"Feature\"",
",",
"geometry",
"=",
"Point",
"(",
"(",
"single",
"[",
"\"latitude\"",
"]",
",",
"single",
"[",
"\"longitude\"",
"]",
")",
")",
",",
"properties",
"=",
"single",
")",
"labs_list",
".",
"append",
"(",
"single_lab",
")",
"output",
"=",
"dumps",
"(",
"FeatureCollection",
"(",
"labs_list",
")",
")",
"# Return a Pandas DataFrame",
"elif",
"format",
".",
"lower",
"(",
")",
"==",
"\"pandas\"",
"or",
"format",
".",
"lower",
"(",
")",
"==",
"\"dataframe\"",
":",
"output",
"=",
"{",
"}",
"for",
"j",
"in",
"labs_dict",
":",
"output",
"[",
"j",
"]",
"=",
"labs_dict",
"[",
"j",
"]",
".",
"__dict__",
"# Transform the dict into a Pandas DataFrame",
"output",
"=",
"pd",
".",
"DataFrame",
".",
"from_dict",
"(",
"output",
")",
"output",
"=",
"output",
".",
"transpose",
"(",
")",
"# Return an object",
"elif",
"format",
".",
"lower",
"(",
")",
"==",
"\"object\"",
"or",
"format",
".",
"lower",
"(",
")",
"==",
"\"obj\"",
":",
"output",
"=",
"labs",
"# Default: return an object",
"else",
":",
"output",
"=",
"labs",
"# Return a proper json",
"if",
"format",
".",
"lower",
"(",
")",
"==",
"\"json\"",
":",
"output",
"=",
"json",
".",
"dumps",
"(",
"labs_dict",
")",
"return",
"output"
] |
Gets data from all labs from makeinitaly.foundation.
|
[
"Gets",
"data",
"from",
"all",
"labs",
"from",
"makeinitaly",
".",
"foundation",
"."
] |
b5838440174f10d370abb671358db9a99d7739fd
|
https://github.com/openp2pdesign/makerlabs/blob/b5838440174f10d370abb671358db9a99d7739fd/makerlabs/makeinitaly_foundation.py#L140-L225
|
239,935
|
twneale/uni
|
uni/checker.py
|
SpecChecker.path_eval
|
def path_eval(self, obj, keypath):
'''Given an object and a mongo-style dotted key path, return the
object value referenced by that key path.
'''
segs = keypath.split('.')
this = obj
for seg in segs:
if isinstance(this, dict):
try:
this = this[seg]
except KeyError:
raise self.InvalidPath()
elif isinstance(this, (list, tuple)):
if seg.isdigit():
this = this[int(seg)]
else:
try:
this = getattr(this, seg)
except AttributeError:
raise self.InvalidPath()
return this
|
python
|
def path_eval(self, obj, keypath):
'''Given an object and a mongo-style dotted key path, return the
object value referenced by that key path.
'''
segs = keypath.split('.')
this = obj
for seg in segs:
if isinstance(this, dict):
try:
this = this[seg]
except KeyError:
raise self.InvalidPath()
elif isinstance(this, (list, tuple)):
if seg.isdigit():
this = this[int(seg)]
else:
try:
this = getattr(this, seg)
except AttributeError:
raise self.InvalidPath()
return this
|
[
"def",
"path_eval",
"(",
"self",
",",
"obj",
",",
"keypath",
")",
":",
"segs",
"=",
"keypath",
".",
"split",
"(",
"'.'",
")",
"this",
"=",
"obj",
"for",
"seg",
"in",
"segs",
":",
"if",
"isinstance",
"(",
"this",
",",
"dict",
")",
":",
"try",
":",
"this",
"=",
"this",
"[",
"seg",
"]",
"except",
"KeyError",
":",
"raise",
"self",
".",
"InvalidPath",
"(",
")",
"elif",
"isinstance",
"(",
"this",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"if",
"seg",
".",
"isdigit",
"(",
")",
":",
"this",
"=",
"this",
"[",
"int",
"(",
"seg",
")",
"]",
"else",
":",
"try",
":",
"this",
"=",
"getattr",
"(",
"this",
",",
"seg",
")",
"except",
"AttributeError",
":",
"raise",
"self",
".",
"InvalidPath",
"(",
")",
"return",
"this"
] |
Given an object and a mongo-style dotted key path, return the
object value referenced by that key path.
|
[
"Given",
"an",
"object",
"and",
"a",
"mongo",
"-",
"style",
"dotted",
"key",
"path",
"return",
"the",
"object",
"value",
"referenced",
"by",
"that",
"key",
"path",
"."
] |
1d2f3ef2cb97f544e878b8a1cde37ca8420af4e5
|
https://github.com/twneale/uni/blob/1d2f3ef2cb97f544e878b8a1cde37ca8420af4e5/uni/checker.py#L43-L63
|
239,936
|
twneale/uni
|
uni/checker.py
|
SpecChecker.check
|
def check(self, spec, data):
'''Given a mongo-style spec and some data or python object,
check whether the object complies with the spec. Fails eagerly.
'''
path_eval = self.path_eval
for keypath, specvalue in spec.items():
if keypath.startswith('$'):
optext = keypath
checkable = data
args = (optext, specvalue, checkable)
generator = self.dispatch_operator(*args)
else:
try:
checkable = path_eval(data, keypath)
except self.InvalidPath:
# The spec referenced an item or attribute that
# doesn't exist. Fail!
return False
generator = self.dispatch_literal(specvalue, checkable)
for result in generator:
if not result:
return False
return True
|
python
|
def check(self, spec, data):
'''Given a mongo-style spec and some data or python object,
check whether the object complies with the spec. Fails eagerly.
'''
path_eval = self.path_eval
for keypath, specvalue in spec.items():
if keypath.startswith('$'):
optext = keypath
checkable = data
args = (optext, specvalue, checkable)
generator = self.dispatch_operator(*args)
else:
try:
checkable = path_eval(data, keypath)
except self.InvalidPath:
# The spec referenced an item or attribute that
# doesn't exist. Fail!
return False
generator = self.dispatch_literal(specvalue, checkable)
for result in generator:
if not result:
return False
return True
|
[
"def",
"check",
"(",
"self",
",",
"spec",
",",
"data",
")",
":",
"path_eval",
"=",
"self",
".",
"path_eval",
"for",
"keypath",
",",
"specvalue",
"in",
"spec",
".",
"items",
"(",
")",
":",
"if",
"keypath",
".",
"startswith",
"(",
"'$'",
")",
":",
"optext",
"=",
"keypath",
"checkable",
"=",
"data",
"args",
"=",
"(",
"optext",
",",
"specvalue",
",",
"checkable",
")",
"generator",
"=",
"self",
".",
"dispatch_operator",
"(",
"*",
"args",
")",
"else",
":",
"try",
":",
"checkable",
"=",
"path_eval",
"(",
"data",
",",
"keypath",
")",
"except",
"self",
".",
"InvalidPath",
":",
"# The spec referenced an item or attribute that",
"# doesn't exist. Fail!",
"return",
"False",
"generator",
"=",
"self",
".",
"dispatch_literal",
"(",
"specvalue",
",",
"checkable",
")",
"for",
"result",
"in",
"generator",
":",
"if",
"not",
"result",
":",
"return",
"False",
"return",
"True"
] |
Given a mongo-style spec and some data or python object,
check whether the object complies with the spec. Fails eagerly.
|
[
"Given",
"a",
"mongo",
"-",
"style",
"spec",
"and",
"some",
"data",
"or",
"python",
"object",
"check",
"whether",
"the",
"object",
"complies",
"with",
"the",
"spec",
".",
"Fails",
"eagerly",
"."
] |
1d2f3ef2cb97f544e878b8a1cde37ca8420af4e5
|
https://github.com/twneale/uni/blob/1d2f3ef2cb97f544e878b8a1cde37ca8420af4e5/uni/checker.py#L65-L87
|
239,937
|
twneale/uni
|
uni/checker.py
|
SpecChecker.handle_literal
|
def handle_literal(self, val, checkable):
'''This one's tricky...check for equality,
then for contains.
'''
# I.e., spec: {'x': 1}, data: {'x': 1}
if val == checkable:
yield True
return
# I.e., spec: {'x': 1}, data: {'x': [1, 2, 3]}
else:
try:
yield val in checkable
return
except TypeError:
pass
yield False
|
python
|
def handle_literal(self, val, checkable):
'''This one's tricky...check for equality,
then for contains.
'''
# I.e., spec: {'x': 1}, data: {'x': 1}
if val == checkable:
yield True
return
# I.e., spec: {'x': 1}, data: {'x': [1, 2, 3]}
else:
try:
yield val in checkable
return
except TypeError:
pass
yield False
|
[
"def",
"handle_literal",
"(",
"self",
",",
"val",
",",
"checkable",
")",
":",
"# I.e., spec: {'x': 1}, data: {'x': 1}",
"if",
"val",
"==",
"checkable",
":",
"yield",
"True",
"return",
"# I.e., spec: {'x': 1}, data: {'x': [1, 2, 3]}",
"else",
":",
"try",
":",
"yield",
"val",
"in",
"checkable",
"return",
"except",
"TypeError",
":",
"pass",
"yield",
"False"
] |
This one's tricky...check for equality,
then for contains.
|
[
"This",
"one",
"s",
"tricky",
"...",
"check",
"for",
"equality",
"then",
"for",
"contains",
"."
] |
1d2f3ef2cb97f544e878b8a1cde37ca8420af4e5
|
https://github.com/twneale/uni/blob/1d2f3ef2cb97f544e878b8a1cde37ca8420af4e5/uni/checker.py#L95-L110
|
239,938
|
anjos/rrbob
|
rr/preprocessor.py
|
normalize
|
def normalize(X, norm):
'''Applies the given norm to the input data set
Parameters:
X (numpy.ndarray): A 3D numpy ndarray in which the rows represent examples
while the columns, features of the data set you want to normalize. Every
depth corresponds to data for a particular class
norm (tuple): A tuple containing two 1D numpy ndarrays corresponding to the
normalization parameters extracted with :py:func:`estimated_norm` above.
Returns:
numpy.ndarray: A 3D numpy ndarray with the same dimensions as the input
array ``X``, but with its values normalized according to the norm input.
'''
return numpy.array([(k - norm[0]) / norm[1] for k in X])
|
python
|
def normalize(X, norm):
'''Applies the given norm to the input data set
Parameters:
X (numpy.ndarray): A 3D numpy ndarray in which the rows represent examples
while the columns, features of the data set you want to normalize. Every
depth corresponds to data for a particular class
norm (tuple): A tuple containing two 1D numpy ndarrays corresponding to the
normalization parameters extracted with :py:func:`estimated_norm` above.
Returns:
numpy.ndarray: A 3D numpy ndarray with the same dimensions as the input
array ``X``, but with its values normalized according to the norm input.
'''
return numpy.array([(k - norm[0]) / norm[1] for k in X])
|
[
"def",
"normalize",
"(",
"X",
",",
"norm",
")",
":",
"return",
"numpy",
".",
"array",
"(",
"[",
"(",
"k",
"-",
"norm",
"[",
"0",
"]",
")",
"/",
"norm",
"[",
"1",
"]",
"for",
"k",
"in",
"X",
"]",
")"
] |
Applies the given norm to the input data set
Parameters:
X (numpy.ndarray): A 3D numpy ndarray in which the rows represent examples
while the columns, features of the data set you want to normalize. Every
depth corresponds to data for a particular class
norm (tuple): A tuple containing two 1D numpy ndarrays corresponding to the
normalization parameters extracted with :py:func:`estimated_norm` above.
Returns:
numpy.ndarray: A 3D numpy ndarray with the same dimensions as the input
array ``X``, but with its values normalized according to the norm input.
|
[
"Applies",
"the",
"given",
"norm",
"to",
"the",
"input",
"data",
"set"
] |
d32d35bab2aa2698d3caa923fd02afb6d67f3235
|
https://github.com/anjos/rrbob/blob/d32d35bab2aa2698d3caa923fd02afb6d67f3235/rr/preprocessor.py#L37-L58
|
239,939
|
krukas/Trionyx
|
trionyx/trionyx/search.py
|
auto_register_search_models
|
def auto_register_search_models():
"""Auto register all search models"""
for config in models_config.get_all_configs():
if config.disable_search_index:
continue
search.register(
config.model.objects.get_queryset(),
ModelSearchAdapter,
fields=config.search_fields,
exclude=config.search_exclude_fields,
)
|
python
|
def auto_register_search_models():
"""Auto register all search models"""
for config in models_config.get_all_configs():
if config.disable_search_index:
continue
search.register(
config.model.objects.get_queryset(),
ModelSearchAdapter,
fields=config.search_fields,
exclude=config.search_exclude_fields,
)
|
[
"def",
"auto_register_search_models",
"(",
")",
":",
"for",
"config",
"in",
"models_config",
".",
"get_all_configs",
"(",
")",
":",
"if",
"config",
".",
"disable_search_index",
":",
"continue",
"search",
".",
"register",
"(",
"config",
".",
"model",
".",
"objects",
".",
"get_queryset",
"(",
")",
",",
"ModelSearchAdapter",
",",
"fields",
"=",
"config",
".",
"search_fields",
",",
"exclude",
"=",
"config",
".",
"search_exclude_fields",
",",
")"
] |
Auto register all search models
|
[
"Auto",
"register",
"all",
"search",
"models"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/search.py#L42-L53
|
239,940
|
krukas/Trionyx
|
trionyx/trionyx/search.py
|
ModelSearchAdapter.get_title
|
def get_title(self, obj):
"""Set search entry title for object"""
search_title = self.get_model_config_value(obj, 'search_title')
if not search_title:
return super().get_title(obj)
return search_title.format(**obj.__dict__)
|
python
|
def get_title(self, obj):
"""Set search entry title for object"""
search_title = self.get_model_config_value(obj, 'search_title')
if not search_title:
return super().get_title(obj)
return search_title.format(**obj.__dict__)
|
[
"def",
"get_title",
"(",
"self",
",",
"obj",
")",
":",
"search_title",
"=",
"self",
".",
"get_model_config_value",
"(",
"obj",
",",
"'search_title'",
")",
"if",
"not",
"search_title",
":",
"return",
"super",
"(",
")",
".",
"get_title",
"(",
"obj",
")",
"return",
"search_title",
".",
"format",
"(",
"*",
"*",
"obj",
".",
"__dict__",
")"
] |
Set search entry title for object
|
[
"Set",
"search",
"entry",
"title",
"for",
"object"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/search.py#L18-L25
|
239,941
|
krukas/Trionyx
|
trionyx/trionyx/search.py
|
ModelSearchAdapter.get_description
|
def get_description(self, obj):
"""Set search entry description for object"""
search_description = self.get_model_config_value(obj, 'search_description')
if not search_description:
return super().get_description(obj)
return search_description.format(**obj.__dict__)
|
python
|
def get_description(self, obj):
"""Set search entry description for object"""
search_description = self.get_model_config_value(obj, 'search_description')
if not search_description:
return super().get_description(obj)
return search_description.format(**obj.__dict__)
|
[
"def",
"get_description",
"(",
"self",
",",
"obj",
")",
":",
"search_description",
"=",
"self",
".",
"get_model_config_value",
"(",
"obj",
",",
"'search_description'",
")",
"if",
"not",
"search_description",
":",
"return",
"super",
"(",
")",
".",
"get_description",
"(",
"obj",
")",
"return",
"search_description",
".",
"format",
"(",
"*",
"*",
"obj",
".",
"__dict__",
")"
] |
Set search entry description for object
|
[
"Set",
"search",
"entry",
"description",
"for",
"object"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/search.py#L27-L34
|
239,942
|
krukas/Trionyx
|
trionyx/trionyx/search.py
|
ModelSearchAdapter.get_model_config_value
|
def get_model_config_value(self, obj, name):
"""Get config value for given model"""
config = models_config.get_config(obj)
return getattr(config, name)
|
python
|
def get_model_config_value(self, obj, name):
"""Get config value for given model"""
config = models_config.get_config(obj)
return getattr(config, name)
|
[
"def",
"get_model_config_value",
"(",
"self",
",",
"obj",
",",
"name",
")",
":",
"config",
"=",
"models_config",
".",
"get_config",
"(",
"obj",
")",
"return",
"getattr",
"(",
"config",
",",
"name",
")"
] |
Get config value for given model
|
[
"Get",
"config",
"value",
"for",
"given",
"model"
] |
edac132cc0797190153f2e60bc7e88cb50e80da6
|
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/search.py#L36-L39
|
239,943
|
ereOn/pyslot
|
pyslot/signal.py
|
Signal.disconnect
|
def disconnect(self, callback):
"""
Disconnects a callback from this signal.
:param callback: The callback to disconnect.
:param weak: A flag that must have the same value than the one
specified during the call to `connect`.
.. warning::
If the callback is not connected at the time of call, a
:class:`ValueError` exception is thrown.
.. note::
You may call `disconnect` from a connected callback.
"""
try:
self._callbacks.remove(callback)
except ValueError:
self._callbacks.remove(ref(callback))
|
python
|
def disconnect(self, callback):
"""
Disconnects a callback from this signal.
:param callback: The callback to disconnect.
:param weak: A flag that must have the same value than the one
specified during the call to `connect`.
.. warning::
If the callback is not connected at the time of call, a
:class:`ValueError` exception is thrown.
.. note::
You may call `disconnect` from a connected callback.
"""
try:
self._callbacks.remove(callback)
except ValueError:
self._callbacks.remove(ref(callback))
|
[
"def",
"disconnect",
"(",
"self",
",",
"callback",
")",
":",
"try",
":",
"self",
".",
"_callbacks",
".",
"remove",
"(",
"callback",
")",
"except",
"ValueError",
":",
"self",
".",
"_callbacks",
".",
"remove",
"(",
"ref",
"(",
"callback",
")",
")"
] |
Disconnects a callback from this signal.
:param callback: The callback to disconnect.
:param weak: A flag that must have the same value than the one
specified during the call to `connect`.
.. warning::
If the callback is not connected at the time of call, a
:class:`ValueError` exception is thrown.
.. note::
You may call `disconnect` from a connected callback.
|
[
"Disconnects",
"a",
"callback",
"from",
"this",
"signal",
"."
] |
9201ce84449ca811afb65fde6cd46a3cb7029182
|
https://github.com/ereOn/pyslot/blob/9201ce84449ca811afb65fde6cd46a3cb7029182/pyslot/signal.py#L57-L75
|
239,944
|
Kopachris/seshet
|
seshet/bot.py
|
_add_channel_names
|
def _add_channel_names(client, e):
"""Add a new channel to self.channels and initialize its user list.
Called as event handler for RPL_NAMES events. Do not call directly.
"""
chan = IRCstr(e.channel)
names = set([IRCstr(n) for n in e.name_list])
client.channels[chan] = SeshetChannel(chan, names)
|
python
|
def _add_channel_names(client, e):
"""Add a new channel to self.channels and initialize its user list.
Called as event handler for RPL_NAMES events. Do not call directly.
"""
chan = IRCstr(e.channel)
names = set([IRCstr(n) for n in e.name_list])
client.channels[chan] = SeshetChannel(chan, names)
|
[
"def",
"_add_channel_names",
"(",
"client",
",",
"e",
")",
":",
"chan",
"=",
"IRCstr",
"(",
"e",
".",
"channel",
")",
"names",
"=",
"set",
"(",
"[",
"IRCstr",
"(",
"n",
")",
"for",
"n",
"in",
"e",
".",
"name_list",
"]",
")",
"client",
".",
"channels",
"[",
"chan",
"]",
"=",
"SeshetChannel",
"(",
"chan",
",",
"names",
")"
] |
Add a new channel to self.channels and initialize its user list.
Called as event handler for RPL_NAMES events. Do not call directly.
|
[
"Add",
"a",
"new",
"channel",
"to",
"self",
".",
"channels",
"and",
"initialize",
"its",
"user",
"list",
"."
] |
d55bae01cff56762c5467138474145a2c17d1932
|
https://github.com/Kopachris/seshet/blob/d55bae01cff56762c5467138474145a2c17d1932/seshet/bot.py#L517-L525
|
239,945
|
Kopachris/seshet
|
seshet/bot.py
|
SeshetUser.join
|
def join(self, channel):
"""Add this user to the channel's user list and add the channel to this
user's list of joined channels.
"""
if channel not in self.channels:
channel.users.add(self.nick)
self.channels.append(channel)
|
python
|
def join(self, channel):
"""Add this user to the channel's user list and add the channel to this
user's list of joined channels.
"""
if channel not in self.channels:
channel.users.add(self.nick)
self.channels.append(channel)
|
[
"def",
"join",
"(",
"self",
",",
"channel",
")",
":",
"if",
"channel",
"not",
"in",
"self",
".",
"channels",
":",
"channel",
".",
"users",
".",
"add",
"(",
"self",
".",
"nick",
")",
"self",
".",
"channels",
".",
"append",
"(",
"channel",
")"
] |
Add this user to the channel's user list and add the channel to this
user's list of joined channels.
|
[
"Add",
"this",
"user",
"to",
"the",
"channel",
"s",
"user",
"list",
"and",
"add",
"the",
"channel",
"to",
"this",
"user",
"s",
"list",
"of",
"joined",
"channels",
"."
] |
d55bae01cff56762c5467138474145a2c17d1932
|
https://github.com/Kopachris/seshet/blob/d55bae01cff56762c5467138474145a2c17d1932/seshet/bot.py#L23-L30
|
239,946
|
Kopachris/seshet
|
seshet/bot.py
|
SeshetUser.part
|
def part(self, channel):
"""Remove this user from the channel's user list and remove the channel
from this user's list of joined channels.
"""
if channel in self.channels:
channel.users.remove(self.nick)
self.channels.remove(channel)
|
python
|
def part(self, channel):
"""Remove this user from the channel's user list and remove the channel
from this user's list of joined channels.
"""
if channel in self.channels:
channel.users.remove(self.nick)
self.channels.remove(channel)
|
[
"def",
"part",
"(",
"self",
",",
"channel",
")",
":",
"if",
"channel",
"in",
"self",
".",
"channels",
":",
"channel",
".",
"users",
".",
"remove",
"(",
"self",
".",
"nick",
")",
"self",
".",
"channels",
".",
"remove",
"(",
"channel",
")"
] |
Remove this user from the channel's user list and remove the channel
from this user's list of joined channels.
|
[
"Remove",
"this",
"user",
"from",
"the",
"channel",
"s",
"user",
"list",
"and",
"remove",
"the",
"channel",
"from",
"this",
"user",
"s",
"list",
"of",
"joined",
"channels",
"."
] |
d55bae01cff56762c5467138474145a2c17d1932
|
https://github.com/Kopachris/seshet/blob/d55bae01cff56762c5467138474145a2c17d1932/seshet/bot.py#L32-L39
|
239,947
|
Kopachris/seshet
|
seshet/bot.py
|
SeshetUser.quit
|
def quit(self):
"""Remove this user from all channels and reinitialize the user's list
of joined channels.
"""
for c in self.channels:
c.users.remove(self.nick)
self.channels = []
|
python
|
def quit(self):
"""Remove this user from all channels and reinitialize the user's list
of joined channels.
"""
for c in self.channels:
c.users.remove(self.nick)
self.channels = []
|
[
"def",
"quit",
"(",
"self",
")",
":",
"for",
"c",
"in",
"self",
".",
"channels",
":",
"c",
".",
"users",
".",
"remove",
"(",
"self",
".",
"nick",
")",
"self",
".",
"channels",
"=",
"[",
"]"
] |
Remove this user from all channels and reinitialize the user's list
of joined channels.
|
[
"Remove",
"this",
"user",
"from",
"all",
"channels",
"and",
"reinitialize",
"the",
"user",
"s",
"list",
"of",
"joined",
"channels",
"."
] |
d55bae01cff56762c5467138474145a2c17d1932
|
https://github.com/Kopachris/seshet/blob/d55bae01cff56762c5467138474145a2c17d1932/seshet/bot.py#L41-L48
|
239,948
|
Kopachris/seshet
|
seshet/bot.py
|
SeshetUser.change_nick
|
def change_nick(self, nick):
"""Update this user's nick in all joined channels."""
old_nick = self.nick
self.nick = IRCstr(nick)
for c in self.channels:
c.users.remove(old_nick)
c.users.add(self.nick)
|
python
|
def change_nick(self, nick):
"""Update this user's nick in all joined channels."""
old_nick = self.nick
self.nick = IRCstr(nick)
for c in self.channels:
c.users.remove(old_nick)
c.users.add(self.nick)
|
[
"def",
"change_nick",
"(",
"self",
",",
"nick",
")",
":",
"old_nick",
"=",
"self",
".",
"nick",
"self",
".",
"nick",
"=",
"IRCstr",
"(",
"nick",
")",
"for",
"c",
"in",
"self",
".",
"channels",
":",
"c",
".",
"users",
".",
"remove",
"(",
"old_nick",
")",
"c",
".",
"users",
".",
"add",
"(",
"self",
".",
"nick",
")"
] |
Update this user's nick in all joined channels.
|
[
"Update",
"this",
"user",
"s",
"nick",
"in",
"all",
"joined",
"channels",
"."
] |
d55bae01cff56762c5467138474145a2c17d1932
|
https://github.com/Kopachris/seshet/blob/d55bae01cff56762c5467138474145a2c17d1932/seshet/bot.py#L50-L58
|
239,949
|
Kopachris/seshet
|
seshet/bot.py
|
SeshetChannel.log_message
|
def log_message(self, user, message):
"""Log a channel message.
This log acts as a sort of cache so that recent activity can be searched
by the bot and command modules without querying the database.
"""
if isinstance(user, SeshetUser):
user = user.nick
elif not isinstance(user, IRCstr):
user = IRCstr(user)
time = datetime.utcnow()
self.message_log.append((time, user, message))
while len(self.message_log) > self._log_size:
del self.message_log[0]
|
python
|
def log_message(self, user, message):
"""Log a channel message.
This log acts as a sort of cache so that recent activity can be searched
by the bot and command modules without querying the database.
"""
if isinstance(user, SeshetUser):
user = user.nick
elif not isinstance(user, IRCstr):
user = IRCstr(user)
time = datetime.utcnow()
self.message_log.append((time, user, message))
while len(self.message_log) > self._log_size:
del self.message_log[0]
|
[
"def",
"log_message",
"(",
"self",
",",
"user",
",",
"message",
")",
":",
"if",
"isinstance",
"(",
"user",
",",
"SeshetUser",
")",
":",
"user",
"=",
"user",
".",
"nick",
"elif",
"not",
"isinstance",
"(",
"user",
",",
"IRCstr",
")",
":",
"user",
"=",
"IRCstr",
"(",
"user",
")",
"time",
"=",
"datetime",
".",
"utcnow",
"(",
")",
"self",
".",
"message_log",
".",
"append",
"(",
"(",
"time",
",",
"user",
",",
"message",
")",
")",
"while",
"len",
"(",
"self",
".",
"message_log",
")",
">",
"self",
".",
"_log_size",
":",
"del",
"self",
".",
"message_log",
"[",
"0",
"]"
] |
Log a channel message.
This log acts as a sort of cache so that recent activity can be searched
by the bot and command modules without querying the database.
|
[
"Log",
"a",
"channel",
"message",
".",
"This",
"log",
"acts",
"as",
"a",
"sort",
"of",
"cache",
"so",
"that",
"recent",
"activity",
"can",
"be",
"searched",
"by",
"the",
"bot",
"and",
"command",
"modules",
"without",
"querying",
"the",
"database",
"."
] |
d55bae01cff56762c5467138474145a2c17d1932
|
https://github.com/Kopachris/seshet/blob/d55bae01cff56762c5467138474145a2c17d1932/seshet/bot.py#L77-L94
|
239,950
|
Archived-Object/ligament
|
ligament/buildcontext.py
|
Context.register_dependency
|
def register_dependency(self, data_src, data_sink):
""" registers a dependency of data_src -> data_sink
by placing appropriate entries in provides_for and depends_on
"""
pdebug("registering dependency %s -> %s" % (data_src, data_sink))
if (data_src not in self._gettask(data_sink).depends_on):
self._gettask(data_sink).depends_on.append(data_src)
if (data_sink not in self._gettask(data_src).provides_for):
self._gettask(data_src).provides_for.append(data_sink)
|
python
|
def register_dependency(self, data_src, data_sink):
""" registers a dependency of data_src -> data_sink
by placing appropriate entries in provides_for and depends_on
"""
pdebug("registering dependency %s -> %s" % (data_src, data_sink))
if (data_src not in self._gettask(data_sink).depends_on):
self._gettask(data_sink).depends_on.append(data_src)
if (data_sink not in self._gettask(data_src).provides_for):
self._gettask(data_src).provides_for.append(data_sink)
|
[
"def",
"register_dependency",
"(",
"self",
",",
"data_src",
",",
"data_sink",
")",
":",
"pdebug",
"(",
"\"registering dependency %s -> %s\"",
"%",
"(",
"data_src",
",",
"data_sink",
")",
")",
"if",
"(",
"data_src",
"not",
"in",
"self",
".",
"_gettask",
"(",
"data_sink",
")",
".",
"depends_on",
")",
":",
"self",
".",
"_gettask",
"(",
"data_sink",
")",
".",
"depends_on",
".",
"append",
"(",
"data_src",
")",
"if",
"(",
"data_sink",
"not",
"in",
"self",
".",
"_gettask",
"(",
"data_src",
")",
".",
"provides_for",
")",
":",
"self",
".",
"_gettask",
"(",
"data_src",
")",
".",
"provides_for",
".",
"append",
"(",
"data_sink",
")"
] |
registers a dependency of data_src -> data_sink
by placing appropriate entries in provides_for and depends_on
|
[
"registers",
"a",
"dependency",
"of",
"data_src",
"-",
">",
"data_sink",
"by",
"placing",
"appropriate",
"entries",
"in",
"provides_for",
"and",
"depends_on"
] |
ff3d78130522676a20dc64086dc8a27b197cc20f
|
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament/buildcontext.py#L55-L66
|
239,951
|
Archived-Object/ligament
|
ligament/buildcontext.py
|
Context.build_task
|
def build_task(self, name):
""" Builds a task by name, resolving any dependencies on the way """
try:
self._gettask(name).value = (
self._gettask(name).task.resolve_and_build())
except TaskExecutionException as e:
perror(e.header, indent="+0")
perror(e.message, indent="+4")
self._gettask(name).value = e.payload
except Exception as e:
perror("error evaluating target '%s' %s" %
(name, type(self._gettask(name).task)))
perror(traceback.format_exc(e), indent='+4')
self._gettask(name).value = None
self._gettask(name).last_build_time = time.time()
|
python
|
def build_task(self, name):
""" Builds a task by name, resolving any dependencies on the way """
try:
self._gettask(name).value = (
self._gettask(name).task.resolve_and_build())
except TaskExecutionException as e:
perror(e.header, indent="+0")
perror(e.message, indent="+4")
self._gettask(name).value = e.payload
except Exception as e:
perror("error evaluating target '%s' %s" %
(name, type(self._gettask(name).task)))
perror(traceback.format_exc(e), indent='+4')
self._gettask(name).value = None
self._gettask(name).last_build_time = time.time()
|
[
"def",
"build_task",
"(",
"self",
",",
"name",
")",
":",
"try",
":",
"self",
".",
"_gettask",
"(",
"name",
")",
".",
"value",
"=",
"(",
"self",
".",
"_gettask",
"(",
"name",
")",
".",
"task",
".",
"resolve_and_build",
"(",
")",
")",
"except",
"TaskExecutionException",
"as",
"e",
":",
"perror",
"(",
"e",
".",
"header",
",",
"indent",
"=",
"\"+0\"",
")",
"perror",
"(",
"e",
".",
"message",
",",
"indent",
"=",
"\"+4\"",
")",
"self",
".",
"_gettask",
"(",
"name",
")",
".",
"value",
"=",
"e",
".",
"payload",
"except",
"Exception",
"as",
"e",
":",
"perror",
"(",
"\"error evaluating target '%s' %s\"",
"%",
"(",
"name",
",",
"type",
"(",
"self",
".",
"_gettask",
"(",
"name",
")",
".",
"task",
")",
")",
")",
"perror",
"(",
"traceback",
".",
"format_exc",
"(",
"e",
")",
",",
"indent",
"=",
"'+4'",
")",
"self",
".",
"_gettask",
"(",
"name",
")",
".",
"value",
"=",
"None",
"self",
".",
"_gettask",
"(",
"name",
")",
".",
"last_build_time",
"=",
"time",
".",
"time",
"(",
")"
] |
Builds a task by name, resolving any dependencies on the way
|
[
"Builds",
"a",
"task",
"by",
"name",
"resolving",
"any",
"dependencies",
"on",
"the",
"way"
] |
ff3d78130522676a20dc64086dc8a27b197cc20f
|
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament/buildcontext.py#L68-L84
|
239,952
|
Archived-Object/ligament
|
ligament/buildcontext.py
|
Context.is_build_needed
|
def is_build_needed(self, data_sink, data_src):
""" returns true if data_src needs to be rebuilt, given that data_sink
has had a rebuild requested.
"""
return (self._gettask(data_src).last_build_time == 0 or
self._gettask(data_src).last_build_time <
self._gettask(data_sink).last_build_time)
|
python
|
def is_build_needed(self, data_sink, data_src):
""" returns true if data_src needs to be rebuilt, given that data_sink
has had a rebuild requested.
"""
return (self._gettask(data_src).last_build_time == 0 or
self._gettask(data_src).last_build_time <
self._gettask(data_sink).last_build_time)
|
[
"def",
"is_build_needed",
"(",
"self",
",",
"data_sink",
",",
"data_src",
")",
":",
"return",
"(",
"self",
".",
"_gettask",
"(",
"data_src",
")",
".",
"last_build_time",
"==",
"0",
"or",
"self",
".",
"_gettask",
"(",
"data_src",
")",
".",
"last_build_time",
"<",
"self",
".",
"_gettask",
"(",
"data_sink",
")",
".",
"last_build_time",
")"
] |
returns true if data_src needs to be rebuilt, given that data_sink
has had a rebuild requested.
|
[
"returns",
"true",
"if",
"data_src",
"needs",
"to",
"be",
"rebuilt",
"given",
"that",
"data_sink",
"has",
"had",
"a",
"rebuild",
"requested",
"."
] |
ff3d78130522676a20dc64086dc8a27b197cc20f
|
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament/buildcontext.py#L86-L92
|
239,953
|
Archived-Object/ligament
|
ligament/buildcontext.py
|
Context.deep_dependendants
|
def deep_dependendants(self, target):
""" Recursively finds the dependents of a given build target.
Assumes the dependency graph is noncyclic
"""
direct_dependents = self._gettask(target).provides_for
return (direct_dependents +
reduce(
lambda a, b: a + b,
[self.deep_dependendants(x) for x in direct_dependents],
[]))
|
python
|
def deep_dependendants(self, target):
""" Recursively finds the dependents of a given build target.
Assumes the dependency graph is noncyclic
"""
direct_dependents = self._gettask(target).provides_for
return (direct_dependents +
reduce(
lambda a, b: a + b,
[self.deep_dependendants(x) for x in direct_dependents],
[]))
|
[
"def",
"deep_dependendants",
"(",
"self",
",",
"target",
")",
":",
"direct_dependents",
"=",
"self",
".",
"_gettask",
"(",
"target",
")",
".",
"provides_for",
"return",
"(",
"direct_dependents",
"+",
"reduce",
"(",
"lambda",
"a",
",",
"b",
":",
"a",
"+",
"b",
",",
"[",
"self",
".",
"deep_dependendants",
"(",
"x",
")",
"for",
"x",
"in",
"direct_dependents",
"]",
",",
"[",
"]",
")",
")"
] |
Recursively finds the dependents of a given build target.
Assumes the dependency graph is noncyclic
|
[
"Recursively",
"finds",
"the",
"dependents",
"of",
"a",
"given",
"build",
"target",
".",
"Assumes",
"the",
"dependency",
"graph",
"is",
"noncyclic"
] |
ff3d78130522676a20dc64086dc8a27b197cc20f
|
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament/buildcontext.py#L114-L124
|
239,954
|
Archived-Object/ligament
|
ligament/buildcontext.py
|
Context.resolve_dependency_graph
|
def resolve_dependency_graph(self, target):
""" resolves the build order for interdependent build targets
Assumes no cyclic dependencies
"""
targets = self.deep_dependendants(target)
# print "deep dependants:", targets
return sorted(targets,
cmp=lambda a, b:
1 if b in self.deep_dependendants(a) else
-1 if a in self.deep_dependendants(b) else
0)
|
python
|
def resolve_dependency_graph(self, target):
""" resolves the build order for interdependent build targets
Assumes no cyclic dependencies
"""
targets = self.deep_dependendants(target)
# print "deep dependants:", targets
return sorted(targets,
cmp=lambda a, b:
1 if b in self.deep_dependendants(a) else
-1 if a in self.deep_dependendants(b) else
0)
|
[
"def",
"resolve_dependency_graph",
"(",
"self",
",",
"target",
")",
":",
"targets",
"=",
"self",
".",
"deep_dependendants",
"(",
"target",
")",
"# print \"deep dependants:\", targets",
"return",
"sorted",
"(",
"targets",
",",
"cmp",
"=",
"lambda",
"a",
",",
"b",
":",
"1",
"if",
"b",
"in",
"self",
".",
"deep_dependendants",
"(",
"a",
")",
"else",
"-",
"1",
"if",
"a",
"in",
"self",
".",
"deep_dependendants",
"(",
"b",
")",
"else",
"0",
")"
] |
resolves the build order for interdependent build targets
Assumes no cyclic dependencies
|
[
"resolves",
"the",
"build",
"order",
"for",
"interdependent",
"build",
"targets"
] |
ff3d78130522676a20dc64086dc8a27b197cc20f
|
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament/buildcontext.py#L126-L137
|
239,955
|
Archived-Object/ligament
|
ligament/buildcontext.py
|
DeferredDependency.resolve
|
def resolve(self):
"""Builds all targets of this dependency and returns the result
of self.function on the resulting values
"""
values = {}
for target_name in self.target_names:
if self.context.is_build_needed(self.parent, target_name):
self.context.build_task(target_name)
if len(self.keyword_chain) == 0:
values[target_name] = self.context.tasks[target_name].value
else:
values[target_name] = reduce(
lambda task, name: getattr(task, name),
self.keyword_chain,
self.context.tasks[target_name].task)
return self.function(**values)
|
python
|
def resolve(self):
"""Builds all targets of this dependency and returns the result
of self.function on the resulting values
"""
values = {}
for target_name in self.target_names:
if self.context.is_build_needed(self.parent, target_name):
self.context.build_task(target_name)
if len(self.keyword_chain) == 0:
values[target_name] = self.context.tasks[target_name].value
else:
values[target_name] = reduce(
lambda task, name: getattr(task, name),
self.keyword_chain,
self.context.tasks[target_name].task)
return self.function(**values)
|
[
"def",
"resolve",
"(",
"self",
")",
":",
"values",
"=",
"{",
"}",
"for",
"target_name",
"in",
"self",
".",
"target_names",
":",
"if",
"self",
".",
"context",
".",
"is_build_needed",
"(",
"self",
".",
"parent",
",",
"target_name",
")",
":",
"self",
".",
"context",
".",
"build_task",
"(",
"target_name",
")",
"if",
"len",
"(",
"self",
".",
"keyword_chain",
")",
"==",
"0",
":",
"values",
"[",
"target_name",
"]",
"=",
"self",
".",
"context",
".",
"tasks",
"[",
"target_name",
"]",
".",
"value",
"else",
":",
"values",
"[",
"target_name",
"]",
"=",
"reduce",
"(",
"lambda",
"task",
",",
"name",
":",
"getattr",
"(",
"task",
",",
"name",
")",
",",
"self",
".",
"keyword_chain",
",",
"self",
".",
"context",
".",
"tasks",
"[",
"target_name",
"]",
".",
"task",
")",
"return",
"self",
".",
"function",
"(",
"*",
"*",
"values",
")"
] |
Builds all targets of this dependency and returns the result
of self.function on the resulting values
|
[
"Builds",
"all",
"targets",
"of",
"this",
"dependency",
"and",
"returns",
"the",
"result",
"of",
"self",
".",
"function",
"on",
"the",
"resulting",
"values"
] |
ff3d78130522676a20dc64086dc8a27b197cc20f
|
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament/buildcontext.py#L188-L204
|
239,956
|
shreyaspotnis/rampage
|
rampage/widgets/KeyFrameWidgets.py
|
QKeyFrameList.updateAllKeys
|
def updateAllKeys(self):
"""Update times for all keys in the layout."""
for kf, key in zip(self.kf_list, self.sorted_key_list()):
kf.update(key, self.dct[key])
|
python
|
def updateAllKeys(self):
"""Update times for all keys in the layout."""
for kf, key in zip(self.kf_list, self.sorted_key_list()):
kf.update(key, self.dct[key])
|
[
"def",
"updateAllKeys",
"(",
"self",
")",
":",
"for",
"kf",
",",
"key",
"in",
"zip",
"(",
"self",
".",
"kf_list",
",",
"self",
".",
"sorted_key_list",
"(",
")",
")",
":",
"kf",
".",
"update",
"(",
"key",
",",
"self",
".",
"dct",
"[",
"key",
"]",
")"
] |
Update times for all keys in the layout.
|
[
"Update",
"times",
"for",
"all",
"keys",
"in",
"the",
"layout",
"."
] |
e2565aef7ee16ee06523de975e8aa41aca14e3b2
|
https://github.com/shreyaspotnis/rampage/blob/e2565aef7ee16ee06523de975e8aa41aca14e3b2/rampage/widgets/KeyFrameWidgets.py#L472-L475
|
239,957
|
unfoldingWord-dev/tx-shared-tools
|
general_tools/print_utils.py
|
print_with_header
|
def print_with_header(header, message, color, indent=0):
"""
Use one of the functions below for printing, not this one.
"""
print()
padding = ' ' * indent
print(padding + color + BOLD + header + ENDC + color + message + ENDC)
|
python
|
def print_with_header(header, message, color, indent=0):
"""
Use one of the functions below for printing, not this one.
"""
print()
padding = ' ' * indent
print(padding + color + BOLD + header + ENDC + color + message + ENDC)
|
[
"def",
"print_with_header",
"(",
"header",
",",
"message",
",",
"color",
",",
"indent",
"=",
"0",
")",
":",
"print",
"(",
")",
"padding",
"=",
"' '",
"*",
"indent",
"print",
"(",
"padding",
"+",
"color",
"+",
"BOLD",
"+",
"header",
"+",
"ENDC",
"+",
"color",
"+",
"message",
"+",
"ENDC",
")"
] |
Use one of the functions below for printing, not this one.
|
[
"Use",
"one",
"of",
"the",
"functions",
"below",
"for",
"printing",
"not",
"this",
"one",
"."
] |
6ff5cd024e1ab54c53dd1bc788bbc78e2358772e
|
https://github.com/unfoldingWord-dev/tx-shared-tools/blob/6ff5cd024e1ab54c53dd1bc788bbc78e2358772e/general_tools/print_utils.py#L22-L28
|
239,958
|
macbre/phantomas-python
|
phantomas/client.py
|
Phantomas.run
|
def run(self):
""" Perform phantomas run """
self._logger.info("running for <{url}>".format(url=self._url))
args = format_args(self._options)
self._logger.debug("command: `{cmd}` / args: {args}".
format(cmd=self._cmd, args=args))
# run the process
try:
process = Popen(
args=[self._cmd] + args,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE
)
pid = process.pid
self._logger.debug("running as PID #{pid}".format(pid=pid))
except OSError as ex:
raise PhantomasRunError(
"Failed to run phantomas: {0}".format(ex), ex.errno)
# wait to complete
try:
stdout, stderr = process.communicate()
returncode = process.returncode
except Exception:
raise PhantomasRunError("Failed to complete the run")
# for Python 3.x - decode bytes to string
stdout = stdout.decode('utf8')
stderr = stderr.decode('utf8')
# check the response code
self._logger.debug("completed with return code #{returncode}".
format(returncode=returncode))
if stderr != '':
self._logger.debug("stderr: {stderr}".format(stderr=stderr))
raise PhantomasFailedError(stderr.strip(), returncode)
# try parsing the response
try:
results = json.loads(stdout)
except Exception:
raise PhantomasResponseParsingError("Unable to parse the response")
if self._options.get("runs", 0) > 1:
return Runs(self._url, results)
else:
return Results(self._url, results)
|
python
|
def run(self):
""" Perform phantomas run """
self._logger.info("running for <{url}>".format(url=self._url))
args = format_args(self._options)
self._logger.debug("command: `{cmd}` / args: {args}".
format(cmd=self._cmd, args=args))
# run the process
try:
process = Popen(
args=[self._cmd] + args,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE
)
pid = process.pid
self._logger.debug("running as PID #{pid}".format(pid=pid))
except OSError as ex:
raise PhantomasRunError(
"Failed to run phantomas: {0}".format(ex), ex.errno)
# wait to complete
try:
stdout, stderr = process.communicate()
returncode = process.returncode
except Exception:
raise PhantomasRunError("Failed to complete the run")
# for Python 3.x - decode bytes to string
stdout = stdout.decode('utf8')
stderr = stderr.decode('utf8')
# check the response code
self._logger.debug("completed with return code #{returncode}".
format(returncode=returncode))
if stderr != '':
self._logger.debug("stderr: {stderr}".format(stderr=stderr))
raise PhantomasFailedError(stderr.strip(), returncode)
# try parsing the response
try:
results = json.loads(stdout)
except Exception:
raise PhantomasResponseParsingError("Unable to parse the response")
if self._options.get("runs", 0) > 1:
return Runs(self._url, results)
else:
return Results(self._url, results)
|
[
"def",
"run",
"(",
"self",
")",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"\"running for <{url}>\"",
".",
"format",
"(",
"url",
"=",
"self",
".",
"_url",
")",
")",
"args",
"=",
"format_args",
"(",
"self",
".",
"_options",
")",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"command: `{cmd}` / args: {args}\"",
".",
"format",
"(",
"cmd",
"=",
"self",
".",
"_cmd",
",",
"args",
"=",
"args",
")",
")",
"# run the process",
"try",
":",
"process",
"=",
"Popen",
"(",
"args",
"=",
"[",
"self",
".",
"_cmd",
"]",
"+",
"args",
",",
"stdin",
"=",
"PIPE",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
")",
"pid",
"=",
"process",
".",
"pid",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"running as PID #{pid}\"",
".",
"format",
"(",
"pid",
"=",
"pid",
")",
")",
"except",
"OSError",
"as",
"ex",
":",
"raise",
"PhantomasRunError",
"(",
"\"Failed to run phantomas: {0}\"",
".",
"format",
"(",
"ex",
")",
",",
"ex",
".",
"errno",
")",
"# wait to complete",
"try",
":",
"stdout",
",",
"stderr",
"=",
"process",
".",
"communicate",
"(",
")",
"returncode",
"=",
"process",
".",
"returncode",
"except",
"Exception",
":",
"raise",
"PhantomasRunError",
"(",
"\"Failed to complete the run\"",
")",
"# for Python 3.x - decode bytes to string",
"stdout",
"=",
"stdout",
".",
"decode",
"(",
"'utf8'",
")",
"stderr",
"=",
"stderr",
".",
"decode",
"(",
"'utf8'",
")",
"# check the response code",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"completed with return code #{returncode}\"",
".",
"format",
"(",
"returncode",
"=",
"returncode",
")",
")",
"if",
"stderr",
"!=",
"''",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"stderr: {stderr}\"",
".",
"format",
"(",
"stderr",
"=",
"stderr",
")",
")",
"raise",
"PhantomasFailedError",
"(",
"stderr",
".",
"strip",
"(",
")",
",",
"returncode",
")",
"# try parsing the response",
"try",
":",
"results",
"=",
"json",
".",
"loads",
"(",
"stdout",
")",
"except",
"Exception",
":",
"raise",
"PhantomasResponseParsingError",
"(",
"\"Unable to parse the response\"",
")",
"if",
"self",
".",
"_options",
".",
"get",
"(",
"\"runs\"",
",",
"0",
")",
">",
"1",
":",
"return",
"Runs",
"(",
"self",
".",
"_url",
",",
"results",
")",
"else",
":",
"return",
"Results",
"(",
"self",
".",
"_url",
",",
"results",
")"
] |
Perform phantomas run
|
[
"Perform",
"phantomas",
"run"
] |
63b1b1bd3fc97feb460beb6ae509bfb5cccf04f5
|
https://github.com/macbre/phantomas-python/blob/63b1b1bd3fc97feb460beb6ae509bfb5cccf04f5/phantomas/client.py#L44-L96
|
239,959
|
EnTeQuAk/django-babel-underscore
|
src/django_babel_underscore/__init__.py
|
extract
|
def extract(fileobj, keywords, comment_tags, options):
"""Extracts translation messages from underscore template files.
This method does also extract django templates. If a template does not
contain any django translation tags we always fallback to underscore extraction.
This is a plugin to Babel, written according to
http://babel.pocoo.org/docs/messages/#writing-extraction-methods
:param fileobj: the file-like object the messages should be extracted
from
:param keywords: a list of keywords (i.e. function names) that should
be recognized as translation functions
:param comment_tags: a list of translator tags to search for and
include in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)``
tuples
:rtype: ``iterator``
"""
encoding = options.get('encoding', 'utf-8')
original_position = fileobj.tell()
text = fileobj.read().decode(encoding)
if django.VERSION[:2] >= (1, 9):
tokens = Lexer(text).tokenize()
else:
tokens = Lexer(text, None).tokenize()
vars = [token.token_type != TOKEN_TEXT for token in tokens]
could_be_django = any(list(vars))
if could_be_django:
fileobj.seek(original_position)
iterator = extract_django(fileobj, keywords, comment_tags, options)
for lineno, funcname, message, comments in iterator:
yield lineno, funcname, message, comments
else:
# Underscore template extraction
comments = []
fileobj.seek(original_position)
for lineno, line in enumerate(fileobj, 1):
funcname = None
stream = TokenStream.from_tuple_iter(tokenize(line, underscore.rules))
while not stream.eof:
if stream.current.type == 'gettext_begin':
stream.expect('gettext_begin')
funcname = stream.expect('func_name').value
args, kwargs = parse_arguments(stream, 'gettext_end')
strings = []
for arg, argtype in args:
if argtype == 'func_string_arg':
strings.append(force_text(arg))
else:
strings.append(None)
for arg in kwargs:
strings.append(None)
if len(strings) == 1:
strings = strings[0]
else:
strings = tuple(strings)
yield lineno, funcname, strings, []
stream.next()
|
python
|
def extract(fileobj, keywords, comment_tags, options):
"""Extracts translation messages from underscore template files.
This method does also extract django templates. If a template does not
contain any django translation tags we always fallback to underscore extraction.
This is a plugin to Babel, written according to
http://babel.pocoo.org/docs/messages/#writing-extraction-methods
:param fileobj: the file-like object the messages should be extracted
from
:param keywords: a list of keywords (i.e. function names) that should
be recognized as translation functions
:param comment_tags: a list of translator tags to search for and
include in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)``
tuples
:rtype: ``iterator``
"""
encoding = options.get('encoding', 'utf-8')
original_position = fileobj.tell()
text = fileobj.read().decode(encoding)
if django.VERSION[:2] >= (1, 9):
tokens = Lexer(text).tokenize()
else:
tokens = Lexer(text, None).tokenize()
vars = [token.token_type != TOKEN_TEXT for token in tokens]
could_be_django = any(list(vars))
if could_be_django:
fileobj.seek(original_position)
iterator = extract_django(fileobj, keywords, comment_tags, options)
for lineno, funcname, message, comments in iterator:
yield lineno, funcname, message, comments
else:
# Underscore template extraction
comments = []
fileobj.seek(original_position)
for lineno, line in enumerate(fileobj, 1):
funcname = None
stream = TokenStream.from_tuple_iter(tokenize(line, underscore.rules))
while not stream.eof:
if stream.current.type == 'gettext_begin':
stream.expect('gettext_begin')
funcname = stream.expect('func_name').value
args, kwargs = parse_arguments(stream, 'gettext_end')
strings = []
for arg, argtype in args:
if argtype == 'func_string_arg':
strings.append(force_text(arg))
else:
strings.append(None)
for arg in kwargs:
strings.append(None)
if len(strings) == 1:
strings = strings[0]
else:
strings = tuple(strings)
yield lineno, funcname, strings, []
stream.next()
|
[
"def",
"extract",
"(",
"fileobj",
",",
"keywords",
",",
"comment_tags",
",",
"options",
")",
":",
"encoding",
"=",
"options",
".",
"get",
"(",
"'encoding'",
",",
"'utf-8'",
")",
"original_position",
"=",
"fileobj",
".",
"tell",
"(",
")",
"text",
"=",
"fileobj",
".",
"read",
"(",
")",
".",
"decode",
"(",
"encoding",
")",
"if",
"django",
".",
"VERSION",
"[",
":",
"2",
"]",
">=",
"(",
"1",
",",
"9",
")",
":",
"tokens",
"=",
"Lexer",
"(",
"text",
")",
".",
"tokenize",
"(",
")",
"else",
":",
"tokens",
"=",
"Lexer",
"(",
"text",
",",
"None",
")",
".",
"tokenize",
"(",
")",
"vars",
"=",
"[",
"token",
".",
"token_type",
"!=",
"TOKEN_TEXT",
"for",
"token",
"in",
"tokens",
"]",
"could_be_django",
"=",
"any",
"(",
"list",
"(",
"vars",
")",
")",
"if",
"could_be_django",
":",
"fileobj",
".",
"seek",
"(",
"original_position",
")",
"iterator",
"=",
"extract_django",
"(",
"fileobj",
",",
"keywords",
",",
"comment_tags",
",",
"options",
")",
"for",
"lineno",
",",
"funcname",
",",
"message",
",",
"comments",
"in",
"iterator",
":",
"yield",
"lineno",
",",
"funcname",
",",
"message",
",",
"comments",
"else",
":",
"# Underscore template extraction",
"comments",
"=",
"[",
"]",
"fileobj",
".",
"seek",
"(",
"original_position",
")",
"for",
"lineno",
",",
"line",
"in",
"enumerate",
"(",
"fileobj",
",",
"1",
")",
":",
"funcname",
"=",
"None",
"stream",
"=",
"TokenStream",
".",
"from_tuple_iter",
"(",
"tokenize",
"(",
"line",
",",
"underscore",
".",
"rules",
")",
")",
"while",
"not",
"stream",
".",
"eof",
":",
"if",
"stream",
".",
"current",
".",
"type",
"==",
"'gettext_begin'",
":",
"stream",
".",
"expect",
"(",
"'gettext_begin'",
")",
"funcname",
"=",
"stream",
".",
"expect",
"(",
"'func_name'",
")",
".",
"value",
"args",
",",
"kwargs",
"=",
"parse_arguments",
"(",
"stream",
",",
"'gettext_end'",
")",
"strings",
"=",
"[",
"]",
"for",
"arg",
",",
"argtype",
"in",
"args",
":",
"if",
"argtype",
"==",
"'func_string_arg'",
":",
"strings",
".",
"append",
"(",
"force_text",
"(",
"arg",
")",
")",
"else",
":",
"strings",
".",
"append",
"(",
"None",
")",
"for",
"arg",
"in",
"kwargs",
":",
"strings",
".",
"append",
"(",
"None",
")",
"if",
"len",
"(",
"strings",
")",
"==",
"1",
":",
"strings",
"=",
"strings",
"[",
"0",
"]",
"else",
":",
"strings",
"=",
"tuple",
"(",
"strings",
")",
"yield",
"lineno",
",",
"funcname",
",",
"strings",
",",
"[",
"]",
"stream",
".",
"next",
"(",
")"
] |
Extracts translation messages from underscore template files.
This method does also extract django templates. If a template does not
contain any django translation tags we always fallback to underscore extraction.
This is a plugin to Babel, written according to
http://babel.pocoo.org/docs/messages/#writing-extraction-methods
:param fileobj: the file-like object the messages should be extracted
from
:param keywords: a list of keywords (i.e. function names) that should
be recognized as translation functions
:param comment_tags: a list of translator tags to search for and
include in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)``
tuples
:rtype: ``iterator``
|
[
"Extracts",
"translation",
"messages",
"from",
"underscore",
"template",
"files",
"."
] |
cba715691850c956a1ab97c9f2457c6a4016d877
|
https://github.com/EnTeQuAk/django-babel-underscore/blob/cba715691850c956a1ab97c9f2457c6a4016d877/src/django_babel_underscore/__init__.py#L16-L90
|
239,960
|
boatd/python-boatd
|
boatdclient/point.py
|
Point.from_radians
|
def from_radians(cls, lat_radians, long_radians):
'''
Return a new instance of Point from a pair of coordinates in radians.
'''
return cls(math.degrees(lat_radians), math.degrees(long_radians))
|
python
|
def from_radians(cls, lat_radians, long_radians):
'''
Return a new instance of Point from a pair of coordinates in radians.
'''
return cls(math.degrees(lat_radians), math.degrees(long_radians))
|
[
"def",
"from_radians",
"(",
"cls",
",",
"lat_radians",
",",
"long_radians",
")",
":",
"return",
"cls",
"(",
"math",
".",
"degrees",
"(",
"lat_radians",
")",
",",
"math",
".",
"degrees",
"(",
"long_radians",
")",
")"
] |
Return a new instance of Point from a pair of coordinates in radians.
|
[
"Return",
"a",
"new",
"instance",
"of",
"Point",
"from",
"a",
"pair",
"of",
"coordinates",
"in",
"radians",
"."
] |
404ff0d0c389f6ed84ddbfea1c41db6569ad2ed4
|
https://github.com/boatd/python-boatd/blob/404ff0d0c389f6ed84ddbfea1c41db6569ad2ed4/boatdclient/point.py#L17-L21
|
239,961
|
boatd/python-boatd
|
boatdclient/point.py
|
Point.distance_to
|
def distance_to(self, point):
'''
Return the distance between this point and another point in meters.
:param point: Point to measure distance to
:type point: Point
:returns: The distance to the other point
:rtype: float
'''
angle = math.acos(
sin(self.lat_radians) * sin(point.lat_radians) +
cos(self.lat_radians) * cos(point.lat_radians) *
cos(self.long_radians - point.long_radians)
)
return angle * EARTH_RADIUS
|
python
|
def distance_to(self, point):
'''
Return the distance between this point and another point in meters.
:param point: Point to measure distance to
:type point: Point
:returns: The distance to the other point
:rtype: float
'''
angle = math.acos(
sin(self.lat_radians) * sin(point.lat_radians) +
cos(self.lat_radians) * cos(point.lat_radians) *
cos(self.long_radians - point.long_radians)
)
return angle * EARTH_RADIUS
|
[
"def",
"distance_to",
"(",
"self",
",",
"point",
")",
":",
"angle",
"=",
"math",
".",
"acos",
"(",
"sin",
"(",
"self",
".",
"lat_radians",
")",
"*",
"sin",
"(",
"point",
".",
"lat_radians",
")",
"+",
"cos",
"(",
"self",
".",
"lat_radians",
")",
"*",
"cos",
"(",
"point",
".",
"lat_radians",
")",
"*",
"cos",
"(",
"self",
".",
"long_radians",
"-",
"point",
".",
"long_radians",
")",
")",
"return",
"angle",
"*",
"EARTH_RADIUS"
] |
Return the distance between this point and another point in meters.
:param point: Point to measure distance to
:type point: Point
:returns: The distance to the other point
:rtype: float
|
[
"Return",
"the",
"distance",
"between",
"this",
"point",
"and",
"another",
"point",
"in",
"meters",
"."
] |
404ff0d0c389f6ed84ddbfea1c41db6569ad2ed4
|
https://github.com/boatd/python-boatd/blob/404ff0d0c389f6ed84ddbfea1c41db6569ad2ed4/boatdclient/point.py#L63-L78
|
239,962
|
boatd/python-boatd
|
boatdclient/point.py
|
Point.bearing_to
|
def bearing_to(self, point):
'''
Return the bearing to another point.
:param point: Point to measure bearing to
:type point: Point
:returns: The bearing to the other point
:rtype: Bearing
'''
delta_long = point.long_radians - self.long_radians
y = sin(delta_long) * cos(point.lat_radians)
x = (
cos(self.lat_radians) * sin(point.lat_radians) -
sin(self.lat_radians) * cos(point.lat_radians) * cos(delta_long)
)
radians = math.atan2(y, x)
return Bearing.from_radians(radians)
|
python
|
def bearing_to(self, point):
'''
Return the bearing to another point.
:param point: Point to measure bearing to
:type point: Point
:returns: The bearing to the other point
:rtype: Bearing
'''
delta_long = point.long_radians - self.long_radians
y = sin(delta_long) * cos(point.lat_radians)
x = (
cos(self.lat_radians) * sin(point.lat_radians) -
sin(self.lat_radians) * cos(point.lat_radians) * cos(delta_long)
)
radians = math.atan2(y, x)
return Bearing.from_radians(radians)
|
[
"def",
"bearing_to",
"(",
"self",
",",
"point",
")",
":",
"delta_long",
"=",
"point",
".",
"long_radians",
"-",
"self",
".",
"long_radians",
"y",
"=",
"sin",
"(",
"delta_long",
")",
"*",
"cos",
"(",
"point",
".",
"lat_radians",
")",
"x",
"=",
"(",
"cos",
"(",
"self",
".",
"lat_radians",
")",
"*",
"sin",
"(",
"point",
".",
"lat_radians",
")",
"-",
"sin",
"(",
"self",
".",
"lat_radians",
")",
"*",
"cos",
"(",
"point",
".",
"lat_radians",
")",
"*",
"cos",
"(",
"delta_long",
")",
")",
"radians",
"=",
"math",
".",
"atan2",
"(",
"y",
",",
"x",
")",
"return",
"Bearing",
".",
"from_radians",
"(",
"radians",
")"
] |
Return the bearing to another point.
:param point: Point to measure bearing to
:type point: Point
:returns: The bearing to the other point
:rtype: Bearing
|
[
"Return",
"the",
"bearing",
"to",
"another",
"point",
"."
] |
404ff0d0c389f6ed84ddbfea1c41db6569ad2ed4
|
https://github.com/boatd/python-boatd/blob/404ff0d0c389f6ed84ddbfea1c41db6569ad2ed4/boatdclient/point.py#L80-L97
|
239,963
|
boatd/python-boatd
|
boatdclient/point.py
|
Point.relative_point
|
def relative_point(self, bearing_to_point, distance):
'''
Return a waypoint at a location described relative to the current point
:param bearing_to_point: Relative bearing from the current waypoint
:type bearing_to_point: Bearing
:param distance: Distance from the current waypoint
:type distance: float
:return: The point described by the parameters
'''
bearing = math.radians(360 - bearing_to_point)
rad_distance = (distance / EARTH_RADIUS)
lat1 = (self.lat_radians)
lon1 = (self.long_radians)
lat3 = math.asin(math.sin(lat1) * math.cos(rad_distance) + math.cos(lat1) * math.sin(rad_distance) * math.cos(bearing))
lon3 = lon1 + math.atan2(math.sin(bearing) * math.sin(rad_distance) * math.cos(lat1) , math.cos(rad_distance) - math.sin(lat1) * math.sin(lat3))
return Point(math.degrees(lat3), math.degrees(lon3))
|
python
|
def relative_point(self, bearing_to_point, distance):
'''
Return a waypoint at a location described relative to the current point
:param bearing_to_point: Relative bearing from the current waypoint
:type bearing_to_point: Bearing
:param distance: Distance from the current waypoint
:type distance: float
:return: The point described by the parameters
'''
bearing = math.radians(360 - bearing_to_point)
rad_distance = (distance / EARTH_RADIUS)
lat1 = (self.lat_radians)
lon1 = (self.long_radians)
lat3 = math.asin(math.sin(lat1) * math.cos(rad_distance) + math.cos(lat1) * math.sin(rad_distance) * math.cos(bearing))
lon3 = lon1 + math.atan2(math.sin(bearing) * math.sin(rad_distance) * math.cos(lat1) , math.cos(rad_distance) - math.sin(lat1) * math.sin(lat3))
return Point(math.degrees(lat3), math.degrees(lon3))
|
[
"def",
"relative_point",
"(",
"self",
",",
"bearing_to_point",
",",
"distance",
")",
":",
"bearing",
"=",
"math",
".",
"radians",
"(",
"360",
"-",
"bearing_to_point",
")",
"rad_distance",
"=",
"(",
"distance",
"/",
"EARTH_RADIUS",
")",
"lat1",
"=",
"(",
"self",
".",
"lat_radians",
")",
"lon1",
"=",
"(",
"self",
".",
"long_radians",
")",
"lat3",
"=",
"math",
".",
"asin",
"(",
"math",
".",
"sin",
"(",
"lat1",
")",
"*",
"math",
".",
"cos",
"(",
"rad_distance",
")",
"+",
"math",
".",
"cos",
"(",
"lat1",
")",
"*",
"math",
".",
"sin",
"(",
"rad_distance",
")",
"*",
"math",
".",
"cos",
"(",
"bearing",
")",
")",
"lon3",
"=",
"lon1",
"+",
"math",
".",
"atan2",
"(",
"math",
".",
"sin",
"(",
"bearing",
")",
"*",
"math",
".",
"sin",
"(",
"rad_distance",
")",
"*",
"math",
".",
"cos",
"(",
"lat1",
")",
",",
"math",
".",
"cos",
"(",
"rad_distance",
")",
"-",
"math",
".",
"sin",
"(",
"lat1",
")",
"*",
"math",
".",
"sin",
"(",
"lat3",
")",
")",
"return",
"Point",
"(",
"math",
".",
"degrees",
"(",
"lat3",
")",
",",
"math",
".",
"degrees",
"(",
"lon3",
")",
")"
] |
Return a waypoint at a location described relative to the current point
:param bearing_to_point: Relative bearing from the current waypoint
:type bearing_to_point: Bearing
:param distance: Distance from the current waypoint
:type distance: float
:return: The point described by the parameters
|
[
"Return",
"a",
"waypoint",
"at",
"a",
"location",
"described",
"relative",
"to",
"the",
"current",
"point"
] |
404ff0d0c389f6ed84ddbfea1c41db6569ad2ed4
|
https://github.com/boatd/python-boatd/blob/404ff0d0c389f6ed84ddbfea1c41db6569ad2ed4/boatdclient/point.py#L132-L150
|
239,964
|
rehandalal/buchner
|
buchner/project-template/PROJECTMODULE/main.py
|
create_app
|
def create_app(settings):
"""Create a new Flask application"""
app = Flask(__name__)
# Import settings from file
for name in dir(settings):
value = getattr(settings, name)
if not (name.startswith('_') or isinstance(value, ModuleType)
or isinstance(value, FunctionType)):
app.config[name] = value
# Bootstrapping
if 'INSTALLED_APPS' in app.config:
app.installed_apps = app.config.get('INSTALLED_APPS', [])
# Extensions
Funnel(app)
Mobility(app)
# Register blueprints
for app_path in app.installed_apps:
app.register_blueprint(
getattr(__import__('{0}.views'.format(app_path),
fromlist=['blueprint']),
'blueprint'))
# Register error handlers
register_error_handlers(app)
@app.context_processor
def context_processor():
return dict(config=app.config)
@app.teardown_request
def teardown_request(exception=None):
# Remove the database session if it exists
if hasattr(app, 'db_session'):
app.db_session.close()
return app
|
python
|
def create_app(settings):
"""Create a new Flask application"""
app = Flask(__name__)
# Import settings from file
for name in dir(settings):
value = getattr(settings, name)
if not (name.startswith('_') or isinstance(value, ModuleType)
or isinstance(value, FunctionType)):
app.config[name] = value
# Bootstrapping
if 'INSTALLED_APPS' in app.config:
app.installed_apps = app.config.get('INSTALLED_APPS', [])
# Extensions
Funnel(app)
Mobility(app)
# Register blueprints
for app_path in app.installed_apps:
app.register_blueprint(
getattr(__import__('{0}.views'.format(app_path),
fromlist=['blueprint']),
'blueprint'))
# Register error handlers
register_error_handlers(app)
@app.context_processor
def context_processor():
return dict(config=app.config)
@app.teardown_request
def teardown_request(exception=None):
# Remove the database session if it exists
if hasattr(app, 'db_session'):
app.db_session.close()
return app
|
[
"def",
"create_app",
"(",
"settings",
")",
":",
"app",
"=",
"Flask",
"(",
"__name__",
")",
"# Import settings from file",
"for",
"name",
"in",
"dir",
"(",
"settings",
")",
":",
"value",
"=",
"getattr",
"(",
"settings",
",",
"name",
")",
"if",
"not",
"(",
"name",
".",
"startswith",
"(",
"'_'",
")",
"or",
"isinstance",
"(",
"value",
",",
"ModuleType",
")",
"or",
"isinstance",
"(",
"value",
",",
"FunctionType",
")",
")",
":",
"app",
".",
"config",
"[",
"name",
"]",
"=",
"value",
"# Bootstrapping",
"if",
"'INSTALLED_APPS'",
"in",
"app",
".",
"config",
":",
"app",
".",
"installed_apps",
"=",
"app",
".",
"config",
".",
"get",
"(",
"'INSTALLED_APPS'",
",",
"[",
"]",
")",
"# Extensions",
"Funnel",
"(",
"app",
")",
"Mobility",
"(",
"app",
")",
"# Register blueprints",
"for",
"app_path",
"in",
"app",
".",
"installed_apps",
":",
"app",
".",
"register_blueprint",
"(",
"getattr",
"(",
"__import__",
"(",
"'{0}.views'",
".",
"format",
"(",
"app_path",
")",
",",
"fromlist",
"=",
"[",
"'blueprint'",
"]",
")",
",",
"'blueprint'",
")",
")",
"# Register error handlers",
"register_error_handlers",
"(",
"app",
")",
"@",
"app",
".",
"context_processor",
"def",
"context_processor",
"(",
")",
":",
"return",
"dict",
"(",
"config",
"=",
"app",
".",
"config",
")",
"@",
"app",
".",
"teardown_request",
"def",
"teardown_request",
"(",
"exception",
"=",
"None",
")",
":",
"# Remove the database session if it exists",
"if",
"hasattr",
"(",
"app",
",",
"'db_session'",
")",
":",
"app",
".",
"db_session",
".",
"close",
"(",
")",
"return",
"app"
] |
Create a new Flask application
|
[
"Create",
"a",
"new",
"Flask",
"application"
] |
dc22a61c493b9d4a74d76e8b42a319aa13e385f3
|
https://github.com/rehandalal/buchner/blob/dc22a61c493b9d4a74d76e8b42a319aa13e385f3/buchner/project-template/PROJECTMODULE/main.py#L10-L49
|
239,965
|
vinu76jsr/pipsort
|
lib/pipsort/cli.py
|
main
|
def main(argv=None):
""" Execute the application CLI.
Arguments are taken from sys.argv by default.
"""
args = _cmdline(argv)
config.load(args.config)
results = get_package_list(args.search_term)
results = sorted(results, key=lambda a: sort_function(a[1]), reverse=True)
results_normalized = list()
last_result = None
for result in results:
if result[0] == last_result:
continue
results_normalized.append(result)
last_result = result[0]
print('\n'.join(["%s - %s" % (_[0], _[1]) for _ in results_normalized]))
return 0
|
python
|
def main(argv=None):
""" Execute the application CLI.
Arguments are taken from sys.argv by default.
"""
args = _cmdline(argv)
config.load(args.config)
results = get_package_list(args.search_term)
results = sorted(results, key=lambda a: sort_function(a[1]), reverse=True)
results_normalized = list()
last_result = None
for result in results:
if result[0] == last_result:
continue
results_normalized.append(result)
last_result = result[0]
print('\n'.join(["%s - %s" % (_[0], _[1]) for _ in results_normalized]))
return 0
|
[
"def",
"main",
"(",
"argv",
"=",
"None",
")",
":",
"args",
"=",
"_cmdline",
"(",
"argv",
")",
"config",
".",
"load",
"(",
"args",
".",
"config",
")",
"results",
"=",
"get_package_list",
"(",
"args",
".",
"search_term",
")",
"results",
"=",
"sorted",
"(",
"results",
",",
"key",
"=",
"lambda",
"a",
":",
"sort_function",
"(",
"a",
"[",
"1",
"]",
")",
",",
"reverse",
"=",
"True",
")",
"results_normalized",
"=",
"list",
"(",
")",
"last_result",
"=",
"None",
"for",
"result",
"in",
"results",
":",
"if",
"result",
"[",
"0",
"]",
"==",
"last_result",
":",
"continue",
"results_normalized",
".",
"append",
"(",
"result",
")",
"last_result",
"=",
"result",
"[",
"0",
"]",
"print",
"(",
"'\\n'",
".",
"join",
"(",
"[",
"\"%s - %s\"",
"%",
"(",
"_",
"[",
"0",
"]",
",",
"_",
"[",
"1",
"]",
")",
"for",
"_",
"in",
"results_normalized",
"]",
")",
")",
"return",
"0"
] |
Execute the application CLI.
Arguments are taken from sys.argv by default.
|
[
"Execute",
"the",
"application",
"CLI",
"."
] |
71ead1269de85ee0255741390bf1da85d81b7d16
|
https://github.com/vinu76jsr/pipsort/blob/71ead1269de85ee0255741390bf1da85d81b7d16/lib/pipsort/cli.py#L60-L78
|
239,966
|
Perfectial/django-view-acl
|
view_acl/utils.py
|
generate_permissions
|
def generate_permissions(urlpatterns, permissions={}):
"""Generate names for permissions."""
for pattern in urlpatterns:
if isinstance(pattern, urlresolvers.RegexURLPattern):
perm = generate_perm_name(pattern.callback)
if is_allowed_view(perm) and perm not in permissions:
permissions[ACL_CODE_PREFIX + perm] = ACL_NAME_PREFIX + perm
elif isinstance(pattern, urlresolvers.RegexURLResolver):
generate_permissions(pattern.url_patterns, permissions)
return permissions
|
python
|
def generate_permissions(urlpatterns, permissions={}):
"""Generate names for permissions."""
for pattern in urlpatterns:
if isinstance(pattern, urlresolvers.RegexURLPattern):
perm = generate_perm_name(pattern.callback)
if is_allowed_view(perm) and perm not in permissions:
permissions[ACL_CODE_PREFIX + perm] = ACL_NAME_PREFIX + perm
elif isinstance(pattern, urlresolvers.RegexURLResolver):
generate_permissions(pattern.url_patterns, permissions)
return permissions
|
[
"def",
"generate_permissions",
"(",
"urlpatterns",
",",
"permissions",
"=",
"{",
"}",
")",
":",
"for",
"pattern",
"in",
"urlpatterns",
":",
"if",
"isinstance",
"(",
"pattern",
",",
"urlresolvers",
".",
"RegexURLPattern",
")",
":",
"perm",
"=",
"generate_perm_name",
"(",
"pattern",
".",
"callback",
")",
"if",
"is_allowed_view",
"(",
"perm",
")",
"and",
"perm",
"not",
"in",
"permissions",
":",
"permissions",
"[",
"ACL_CODE_PREFIX",
"+",
"perm",
"]",
"=",
"ACL_NAME_PREFIX",
"+",
"perm",
"elif",
"isinstance",
"(",
"pattern",
",",
"urlresolvers",
".",
"RegexURLResolver",
")",
":",
"generate_permissions",
"(",
"pattern",
".",
"url_patterns",
",",
"permissions",
")",
"return",
"permissions"
] |
Generate names for permissions.
|
[
"Generate",
"names",
"for",
"permissions",
"."
] |
71f514f65761895bc64d5ca735997c5455c254fa
|
https://github.com/Perfectial/django-view-acl/blob/71f514f65761895bc64d5ca735997c5455c254fa/view_acl/utils.py#L16-L25
|
239,967
|
Perfectial/django-view-acl
|
view_acl/utils.py
|
is_allowed_view
|
def is_allowed_view(perm):
"""Check if permission is in acl list."""
# Check if permission is in excluded list
for view in ACL_EXCLUDED_VIEWS:
module, separator, view_name = view.partition('*')
if view and perm.startswith(module):
return False
# Check if permission is in acl list
for view in ACL_ALLOWED_VIEWS:
module, separator, view_name = view.partition('*')
if separator and not module and not view_name:
return True
elif separator and module and perm.startswith(module):
return True
elif separator and view_name and perm.endswith(view_name):
return True
elif not separator and view == perm:
return True
return False
|
python
|
def is_allowed_view(perm):
"""Check if permission is in acl list."""
# Check if permission is in excluded list
for view in ACL_EXCLUDED_VIEWS:
module, separator, view_name = view.partition('*')
if view and perm.startswith(module):
return False
# Check if permission is in acl list
for view in ACL_ALLOWED_VIEWS:
module, separator, view_name = view.partition('*')
if separator and not module and not view_name:
return True
elif separator and module and perm.startswith(module):
return True
elif separator and view_name and perm.endswith(view_name):
return True
elif not separator and view == perm:
return True
return False
|
[
"def",
"is_allowed_view",
"(",
"perm",
")",
":",
"# Check if permission is in excluded list",
"for",
"view",
"in",
"ACL_EXCLUDED_VIEWS",
":",
"module",
",",
"separator",
",",
"view_name",
"=",
"view",
".",
"partition",
"(",
"'*'",
")",
"if",
"view",
"and",
"perm",
".",
"startswith",
"(",
"module",
")",
":",
"return",
"False",
"# Check if permission is in acl list",
"for",
"view",
"in",
"ACL_ALLOWED_VIEWS",
":",
"module",
",",
"separator",
",",
"view_name",
"=",
"view",
".",
"partition",
"(",
"'*'",
")",
"if",
"separator",
"and",
"not",
"module",
"and",
"not",
"view_name",
":",
"return",
"True",
"elif",
"separator",
"and",
"module",
"and",
"perm",
".",
"startswith",
"(",
"module",
")",
":",
"return",
"True",
"elif",
"separator",
"and",
"view_name",
"and",
"perm",
".",
"endswith",
"(",
"view_name",
")",
":",
"return",
"True",
"elif",
"not",
"separator",
"and",
"view",
"==",
"perm",
":",
"return",
"True",
"return",
"False"
] |
Check if permission is in acl list.
|
[
"Check",
"if",
"permission",
"is",
"in",
"acl",
"list",
"."
] |
71f514f65761895bc64d5ca735997c5455c254fa
|
https://github.com/Perfectial/django-view-acl/blob/71f514f65761895bc64d5ca735997c5455c254fa/view_acl/utils.py#L34-L54
|
239,968
|
openp2pdesign/makerlabs
|
makerlabs/utils.py
|
get_location
|
def get_location(query, format, api_key):
"""Get geographic data of a lab in a coherent way for all labs."""
# Play nice with the API...
sleep(1)
geolocator = OpenCage(api_key=api_key, timeout=10)
# Variables for storing the data
data = {"city": None,
"address_1": None,
"postal_code": None,
"country": None,
"county": None,
"state": None,
"country_code": None,
"latitude": None,
"longitude": None,
"continent": None}
road = ""
number = ""
# Default None values
location_data = {"city": None,
"road": None,
"house_number": None,
"postcode": None,
"country": None,
"county": None,
"state": None,
"ISO_3166-1_alpha-2": None,
"country_code": None,
"lat": None,
"lng": None}
# Reverse geocoding ... from coordinates to address
if format == "reverse":
# If the query (coordinates) is not empty
if query is None or len(query) < 3:
pass
else:
location = geolocator.reverse(query)
if location is not None:
location_data = location[0].raw[u'components']
location_data["lat"] = location[0].raw[u'geometry']["lat"]
location_data["lng"] = location[0].raw[u'geometry']["lng"]
# Direct geocoding ... from address to coordinates and full address
if format == "direct":
# If the query (address) is not empty
if query is None or len(query) < 3:
pass
else:
location = geolocator.geocode(query)
if location is not None:
location_data = location.raw[u'components']
location_data["lat"] = location.raw[u'geometry']["lat"]
location_data["lng"] = location.raw[u'geometry']["lng"]
# Extract the meaningful data
for component in location_data:
if component == "town" or component == "city":
data["city"] = location_data[component]
if component == "road":
road = location_data[component]
if component == "house_number":
number = location_data[component]
if component == "postcode":
data["postal_code"] = location_data[component]
if component == "country":
data["country"] = location_data[component]
if component == "county":
data["county"] = location_data[component]
if component == "state":
data["state"] = location_data[component]
if component == "ISO_3166-1_alpha-2":
data["country_code"] = location_data[component]
# The address need to be reconstructed
data["address_1"] = unicode(road) + " " + unicode(number)
data["latitude"] = location_data["lat"]
data["longitude"] = location_data["lng"]
# Format the country code to three letters
try:
country_data = transformations.cca2_to_ccn(data["country_code"])
data["country_code"] = transformations.ccn_to_cca3(country_data)
except:
data["country_code"] = None
# Get the continent
try:
country_data = transformations.cc_to_cn(data["country_code"])
data["continent"] = transformations.cn_to_ctn(country_data)
except:
data["continent"] = None
# Return the final data
return data
|
python
|
def get_location(query, format, api_key):
"""Get geographic data of a lab in a coherent way for all labs."""
# Play nice with the API...
sleep(1)
geolocator = OpenCage(api_key=api_key, timeout=10)
# Variables for storing the data
data = {"city": None,
"address_1": None,
"postal_code": None,
"country": None,
"county": None,
"state": None,
"country_code": None,
"latitude": None,
"longitude": None,
"continent": None}
road = ""
number = ""
# Default None values
location_data = {"city": None,
"road": None,
"house_number": None,
"postcode": None,
"country": None,
"county": None,
"state": None,
"ISO_3166-1_alpha-2": None,
"country_code": None,
"lat": None,
"lng": None}
# Reverse geocoding ... from coordinates to address
if format == "reverse":
# If the query (coordinates) is not empty
if query is None or len(query) < 3:
pass
else:
location = geolocator.reverse(query)
if location is not None:
location_data = location[0].raw[u'components']
location_data["lat"] = location[0].raw[u'geometry']["lat"]
location_data["lng"] = location[0].raw[u'geometry']["lng"]
# Direct geocoding ... from address to coordinates and full address
if format == "direct":
# If the query (address) is not empty
if query is None or len(query) < 3:
pass
else:
location = geolocator.geocode(query)
if location is not None:
location_data = location.raw[u'components']
location_data["lat"] = location.raw[u'geometry']["lat"]
location_data["lng"] = location.raw[u'geometry']["lng"]
# Extract the meaningful data
for component in location_data:
if component == "town" or component == "city":
data["city"] = location_data[component]
if component == "road":
road = location_data[component]
if component == "house_number":
number = location_data[component]
if component == "postcode":
data["postal_code"] = location_data[component]
if component == "country":
data["country"] = location_data[component]
if component == "county":
data["county"] = location_data[component]
if component == "state":
data["state"] = location_data[component]
if component == "ISO_3166-1_alpha-2":
data["country_code"] = location_data[component]
# The address need to be reconstructed
data["address_1"] = unicode(road) + " " + unicode(number)
data["latitude"] = location_data["lat"]
data["longitude"] = location_data["lng"]
# Format the country code to three letters
try:
country_data = transformations.cca2_to_ccn(data["country_code"])
data["country_code"] = transformations.ccn_to_cca3(country_data)
except:
data["country_code"] = None
# Get the continent
try:
country_data = transformations.cc_to_cn(data["country_code"])
data["continent"] = transformations.cn_to_ctn(country_data)
except:
data["continent"] = None
# Return the final data
return data
|
[
"def",
"get_location",
"(",
"query",
",",
"format",
",",
"api_key",
")",
":",
"# Play nice with the API...",
"sleep",
"(",
"1",
")",
"geolocator",
"=",
"OpenCage",
"(",
"api_key",
"=",
"api_key",
",",
"timeout",
"=",
"10",
")",
"# Variables for storing the data",
"data",
"=",
"{",
"\"city\"",
":",
"None",
",",
"\"address_1\"",
":",
"None",
",",
"\"postal_code\"",
":",
"None",
",",
"\"country\"",
":",
"None",
",",
"\"county\"",
":",
"None",
",",
"\"state\"",
":",
"None",
",",
"\"country_code\"",
":",
"None",
",",
"\"latitude\"",
":",
"None",
",",
"\"longitude\"",
":",
"None",
",",
"\"continent\"",
":",
"None",
"}",
"road",
"=",
"\"\"",
"number",
"=",
"\"\"",
"# Default None values",
"location_data",
"=",
"{",
"\"city\"",
":",
"None",
",",
"\"road\"",
":",
"None",
",",
"\"house_number\"",
":",
"None",
",",
"\"postcode\"",
":",
"None",
",",
"\"country\"",
":",
"None",
",",
"\"county\"",
":",
"None",
",",
"\"state\"",
":",
"None",
",",
"\"ISO_3166-1_alpha-2\"",
":",
"None",
",",
"\"country_code\"",
":",
"None",
",",
"\"lat\"",
":",
"None",
",",
"\"lng\"",
":",
"None",
"}",
"# Reverse geocoding ... from coordinates to address",
"if",
"format",
"==",
"\"reverse\"",
":",
"# If the query (coordinates) is not empty",
"if",
"query",
"is",
"None",
"or",
"len",
"(",
"query",
")",
"<",
"3",
":",
"pass",
"else",
":",
"location",
"=",
"geolocator",
".",
"reverse",
"(",
"query",
")",
"if",
"location",
"is",
"not",
"None",
":",
"location_data",
"=",
"location",
"[",
"0",
"]",
".",
"raw",
"[",
"u'components'",
"]",
"location_data",
"[",
"\"lat\"",
"]",
"=",
"location",
"[",
"0",
"]",
".",
"raw",
"[",
"u'geometry'",
"]",
"[",
"\"lat\"",
"]",
"location_data",
"[",
"\"lng\"",
"]",
"=",
"location",
"[",
"0",
"]",
".",
"raw",
"[",
"u'geometry'",
"]",
"[",
"\"lng\"",
"]",
"# Direct geocoding ... from address to coordinates and full address",
"if",
"format",
"==",
"\"direct\"",
":",
"# If the query (address) is not empty",
"if",
"query",
"is",
"None",
"or",
"len",
"(",
"query",
")",
"<",
"3",
":",
"pass",
"else",
":",
"location",
"=",
"geolocator",
".",
"geocode",
"(",
"query",
")",
"if",
"location",
"is",
"not",
"None",
":",
"location_data",
"=",
"location",
".",
"raw",
"[",
"u'components'",
"]",
"location_data",
"[",
"\"lat\"",
"]",
"=",
"location",
".",
"raw",
"[",
"u'geometry'",
"]",
"[",
"\"lat\"",
"]",
"location_data",
"[",
"\"lng\"",
"]",
"=",
"location",
".",
"raw",
"[",
"u'geometry'",
"]",
"[",
"\"lng\"",
"]",
"# Extract the meaningful data",
"for",
"component",
"in",
"location_data",
":",
"if",
"component",
"==",
"\"town\"",
"or",
"component",
"==",
"\"city\"",
":",
"data",
"[",
"\"city\"",
"]",
"=",
"location_data",
"[",
"component",
"]",
"if",
"component",
"==",
"\"road\"",
":",
"road",
"=",
"location_data",
"[",
"component",
"]",
"if",
"component",
"==",
"\"house_number\"",
":",
"number",
"=",
"location_data",
"[",
"component",
"]",
"if",
"component",
"==",
"\"postcode\"",
":",
"data",
"[",
"\"postal_code\"",
"]",
"=",
"location_data",
"[",
"component",
"]",
"if",
"component",
"==",
"\"country\"",
":",
"data",
"[",
"\"country\"",
"]",
"=",
"location_data",
"[",
"component",
"]",
"if",
"component",
"==",
"\"county\"",
":",
"data",
"[",
"\"county\"",
"]",
"=",
"location_data",
"[",
"component",
"]",
"if",
"component",
"==",
"\"state\"",
":",
"data",
"[",
"\"state\"",
"]",
"=",
"location_data",
"[",
"component",
"]",
"if",
"component",
"==",
"\"ISO_3166-1_alpha-2\"",
":",
"data",
"[",
"\"country_code\"",
"]",
"=",
"location_data",
"[",
"component",
"]",
"# The address need to be reconstructed",
"data",
"[",
"\"address_1\"",
"]",
"=",
"unicode",
"(",
"road",
")",
"+",
"\" \"",
"+",
"unicode",
"(",
"number",
")",
"data",
"[",
"\"latitude\"",
"]",
"=",
"location_data",
"[",
"\"lat\"",
"]",
"data",
"[",
"\"longitude\"",
"]",
"=",
"location_data",
"[",
"\"lng\"",
"]",
"# Format the country code to three letters",
"try",
":",
"country_data",
"=",
"transformations",
".",
"cca2_to_ccn",
"(",
"data",
"[",
"\"country_code\"",
"]",
")",
"data",
"[",
"\"country_code\"",
"]",
"=",
"transformations",
".",
"ccn_to_cca3",
"(",
"country_data",
")",
"except",
":",
"data",
"[",
"\"country_code\"",
"]",
"=",
"None",
"# Get the continent",
"try",
":",
"country_data",
"=",
"transformations",
".",
"cc_to_cn",
"(",
"data",
"[",
"\"country_code\"",
"]",
")",
"data",
"[",
"\"continent\"",
"]",
"=",
"transformations",
".",
"cn_to_ctn",
"(",
"country_data",
")",
"except",
":",
"data",
"[",
"\"continent\"",
"]",
"=",
"None",
"# Return the final data",
"return",
"data"
] |
Get geographic data of a lab in a coherent way for all labs.
|
[
"Get",
"geographic",
"data",
"of",
"a",
"lab",
"in",
"a",
"coherent",
"way",
"for",
"all",
"labs",
"."
] |
b5838440174f10d370abb671358db9a99d7739fd
|
https://github.com/openp2pdesign/makerlabs/blob/b5838440174f10d370abb671358db9a99d7739fd/makerlabs/utils.py#L18-L110
|
239,969
|
vilmibm/done
|
sql_interp/sql_interp.py
|
SQLInterp.interp
|
def interp(self, *args):
"""
This method takes a list of SQL snippets and returns a SQL statement and
a list of bind variables to be passed to the DB API's execute method.
"""
sql = ""
bind = ()
def _append_sql(sql, part):
"Handle whitespace when appending properly."
if len(sql) == 0:
return part
elif sql[-1] == ' ':
return sql + part
else:
return sql + ' ' + part
for arg in args:
if type(arg) is str:
# Strings are treated as raw SQL.
sql = _append_sql(sql, arg)
elif isinstance(arg, Esc):
# If this is an instance of Esc, ask the object
# how to represent the data given the context.
arg_sql, arg_bind = arg.to_string(sql)
sql = _append_sql(sql, arg_sql)
bind += arg_bind
else:
# Any argument given that is not a string or Esc
# is an error.
arg_sql, arg_bind = self.esc(arg).to_string(sql)
sql = _append_sql(sql, arg_sql)
bind += arg_bind
return (sql, bind)
|
python
|
def interp(self, *args):
"""
This method takes a list of SQL snippets and returns a SQL statement and
a list of bind variables to be passed to the DB API's execute method.
"""
sql = ""
bind = ()
def _append_sql(sql, part):
"Handle whitespace when appending properly."
if len(sql) == 0:
return part
elif sql[-1] == ' ':
return sql + part
else:
return sql + ' ' + part
for arg in args:
if type(arg) is str:
# Strings are treated as raw SQL.
sql = _append_sql(sql, arg)
elif isinstance(arg, Esc):
# If this is an instance of Esc, ask the object
# how to represent the data given the context.
arg_sql, arg_bind = arg.to_string(sql)
sql = _append_sql(sql, arg_sql)
bind += arg_bind
else:
# Any argument given that is not a string or Esc
# is an error.
arg_sql, arg_bind = self.esc(arg).to_string(sql)
sql = _append_sql(sql, arg_sql)
bind += arg_bind
return (sql, bind)
|
[
"def",
"interp",
"(",
"self",
",",
"*",
"args",
")",
":",
"sql",
"=",
"\"\"",
"bind",
"=",
"(",
")",
"def",
"_append_sql",
"(",
"sql",
",",
"part",
")",
":",
"\"Handle whitespace when appending properly.\"",
"if",
"len",
"(",
"sql",
")",
"==",
"0",
":",
"return",
"part",
"elif",
"sql",
"[",
"-",
"1",
"]",
"==",
"' '",
":",
"return",
"sql",
"+",
"part",
"else",
":",
"return",
"sql",
"+",
"' '",
"+",
"part",
"for",
"arg",
"in",
"args",
":",
"if",
"type",
"(",
"arg",
")",
"is",
"str",
":",
"# Strings are treated as raw SQL.",
"sql",
"=",
"_append_sql",
"(",
"sql",
",",
"arg",
")",
"elif",
"isinstance",
"(",
"arg",
",",
"Esc",
")",
":",
"# If this is an instance of Esc, ask the object",
"# how to represent the data given the context.",
"arg_sql",
",",
"arg_bind",
"=",
"arg",
".",
"to_string",
"(",
"sql",
")",
"sql",
"=",
"_append_sql",
"(",
"sql",
",",
"arg_sql",
")",
"bind",
"+=",
"arg_bind",
"else",
":",
"# Any argument given that is not a string or Esc",
"# is an error.",
"arg_sql",
",",
"arg_bind",
"=",
"self",
".",
"esc",
"(",
"arg",
")",
".",
"to_string",
"(",
"sql",
")",
"sql",
"=",
"_append_sql",
"(",
"sql",
",",
"arg_sql",
")",
"bind",
"+=",
"arg_bind",
"return",
"(",
"sql",
",",
"bind",
")"
] |
This method takes a list of SQL snippets and returns a SQL statement and
a list of bind variables to be passed to the DB API's execute method.
|
[
"This",
"method",
"takes",
"a",
"list",
"of",
"SQL",
"snippets",
"and",
"returns",
"a",
"SQL",
"statement",
"and",
"a",
"list",
"of",
"bind",
"variables",
"to",
"be",
"passed",
"to",
"the",
"DB",
"API",
"s",
"execute",
"method",
"."
] |
7e5b60d2900ceddefa49de352a19b794199b51a8
|
https://github.com/vilmibm/done/blob/7e5b60d2900ceddefa49de352a19b794199b51a8/sql_interp/sql_interp.py#L16-L50
|
239,970
|
vilmibm/done
|
sql_interp/sql_interp.py
|
SQLInterp.esc
|
def esc(self, val):
"""
Returns the given object in the appropriate wrapper class from esc_types.py.
In most cases, you will not need to call this directly. However, if you are
passing a string to the interp method that should be used as an SQL bind value
and not raw SQL, you must pass it to this method to avoid a SQL injection
vulnerability. For example:
>>> sqli = SQLInterp()
>>> first_name = 'John'
The following is wrong! This could lead to a SQL injection attack.
>>> sqli.interp("SELECT * FROM table WHERE first_name =", first_name)
('SELECT * FROM table WHERE first_name = John', ())
This is the correct way.
>>> sqli.interp("SELECT * FROM table WHERE first_name =", sqli.esc(first_name))
('SELECT * FROM table WHERE first_name = ?', ('John',))
"""
if type(val) in self.type_map:
return self.type_map[type(val)](val)
else:
return Esc(val)
|
python
|
def esc(self, val):
"""
Returns the given object in the appropriate wrapper class from esc_types.py.
In most cases, you will not need to call this directly. However, if you are
passing a string to the interp method that should be used as an SQL bind value
and not raw SQL, you must pass it to this method to avoid a SQL injection
vulnerability. For example:
>>> sqli = SQLInterp()
>>> first_name = 'John'
The following is wrong! This could lead to a SQL injection attack.
>>> sqli.interp("SELECT * FROM table WHERE first_name =", first_name)
('SELECT * FROM table WHERE first_name = John', ())
This is the correct way.
>>> sqli.interp("SELECT * FROM table WHERE first_name =", sqli.esc(first_name))
('SELECT * FROM table WHERE first_name = ?', ('John',))
"""
if type(val) in self.type_map:
return self.type_map[type(val)](val)
else:
return Esc(val)
|
[
"def",
"esc",
"(",
"self",
",",
"val",
")",
":",
"if",
"type",
"(",
"val",
")",
"in",
"self",
".",
"type_map",
":",
"return",
"self",
".",
"type_map",
"[",
"type",
"(",
"val",
")",
"]",
"(",
"val",
")",
"else",
":",
"return",
"Esc",
"(",
"val",
")"
] |
Returns the given object in the appropriate wrapper class from esc_types.py.
In most cases, you will not need to call this directly. However, if you are
passing a string to the interp method that should be used as an SQL bind value
and not raw SQL, you must pass it to this method to avoid a SQL injection
vulnerability. For example:
>>> sqli = SQLInterp()
>>> first_name = 'John'
The following is wrong! This could lead to a SQL injection attack.
>>> sqli.interp("SELECT * FROM table WHERE first_name =", first_name)
('SELECT * FROM table WHERE first_name = John', ())
This is the correct way.
>>> sqli.interp("SELECT * FROM table WHERE first_name =", sqli.esc(first_name))
('SELECT * FROM table WHERE first_name = ?', ('John',))
|
[
"Returns",
"the",
"given",
"object",
"in",
"the",
"appropriate",
"wrapper",
"class",
"from",
"esc_types",
".",
"py",
"."
] |
7e5b60d2900ceddefa49de352a19b794199b51a8
|
https://github.com/vilmibm/done/blob/7e5b60d2900ceddefa49de352a19b794199b51a8/sql_interp/sql_interp.py#L52-L77
|
239,971
|
arcus-io/puppetdb-python
|
puppetdb/utils.py
|
api_request
|
def api_request(api_base_url='http://localhost:8080/', path='', method='get',
data=None, params={}, verify=True, cert=list()):
"""
Wrapper function for requests
:param api_base_url: Base URL for requests
:param path: Path to request
:param method: HTTP method
:param data: Data for post (ignored for GETs)
:param params: Dict of key, value query params
:param verify: True/False/CA_File_Name to perform SSL Verification of CA Chain
:param cert: list of cert and key to use for client authentication
"""
method = method.lower()
headers = {
'Accept': 'application/json',
'Content-type': 'application/json',
}
methods = {
'get': requests.get,
'post': requests.post,
}
if path[0] != '/':
path = '/{0}'.format(path)
if params:
path += '?{0}'.format(urllib.urlencode(params))
url = '{0}{1}'.format(api_base_url, path)
resp = methods[method](url, data=json.dumps(data), headers=headers,
verify=verify, cert=cert)
return resp
|
python
|
def api_request(api_base_url='http://localhost:8080/', path='', method='get',
data=None, params={}, verify=True, cert=list()):
"""
Wrapper function for requests
:param api_base_url: Base URL for requests
:param path: Path to request
:param method: HTTP method
:param data: Data for post (ignored for GETs)
:param params: Dict of key, value query params
:param verify: True/False/CA_File_Name to perform SSL Verification of CA Chain
:param cert: list of cert and key to use for client authentication
"""
method = method.lower()
headers = {
'Accept': 'application/json',
'Content-type': 'application/json',
}
methods = {
'get': requests.get,
'post': requests.post,
}
if path[0] != '/':
path = '/{0}'.format(path)
if params:
path += '?{0}'.format(urllib.urlencode(params))
url = '{0}{1}'.format(api_base_url, path)
resp = methods[method](url, data=json.dumps(data), headers=headers,
verify=verify, cert=cert)
return resp
|
[
"def",
"api_request",
"(",
"api_base_url",
"=",
"'http://localhost:8080/'",
",",
"path",
"=",
"''",
",",
"method",
"=",
"'get'",
",",
"data",
"=",
"None",
",",
"params",
"=",
"{",
"}",
",",
"verify",
"=",
"True",
",",
"cert",
"=",
"list",
"(",
")",
")",
":",
"method",
"=",
"method",
".",
"lower",
"(",
")",
"headers",
"=",
"{",
"'Accept'",
":",
"'application/json'",
",",
"'Content-type'",
":",
"'application/json'",
",",
"}",
"methods",
"=",
"{",
"'get'",
":",
"requests",
".",
"get",
",",
"'post'",
":",
"requests",
".",
"post",
",",
"}",
"if",
"path",
"[",
"0",
"]",
"!=",
"'/'",
":",
"path",
"=",
"'/{0}'",
".",
"format",
"(",
"path",
")",
"if",
"params",
":",
"path",
"+=",
"'?{0}'",
".",
"format",
"(",
"urllib",
".",
"urlencode",
"(",
"params",
")",
")",
"url",
"=",
"'{0}{1}'",
".",
"format",
"(",
"api_base_url",
",",
"path",
")",
"resp",
"=",
"methods",
"[",
"method",
"]",
"(",
"url",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
",",
"headers",
"=",
"headers",
",",
"verify",
"=",
"verify",
",",
"cert",
"=",
"cert",
")",
"return",
"resp"
] |
Wrapper function for requests
:param api_base_url: Base URL for requests
:param path: Path to request
:param method: HTTP method
:param data: Data for post (ignored for GETs)
:param params: Dict of key, value query params
:param verify: True/False/CA_File_Name to perform SSL Verification of CA Chain
:param cert: list of cert and key to use for client authentication
|
[
"Wrapper",
"function",
"for",
"requests"
] |
d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2
|
https://github.com/arcus-io/puppetdb-python/blob/d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2/puppetdb/utils.py#L29-L59
|
239,972
|
fr33jc/bang
|
bang/cmd_bang.py
|
set_ssh_creds
|
def set_ssh_creds(config, args):
"""
Set ssh credentials into config.
Note that these values might also be set in ~/.bangrc. If they are
specified both in ~/.bangrc and as command-line arguments to ``bang``, then
the command-line arguments win.
"""
creds = config.get(A.DEPLOYER_CREDS, {})
creds[A.creds.SSH_USER] = args.user if args.user else creds.get(
A.creds.SSH_USER,
DEFAULT_SSH_USER,
)
if args.ask_pass:
creds[A.creds.SSH_PASS] = getpass.getpass('SSH Password: ')
config[A.DEPLOYER_CREDS] = creds
|
python
|
def set_ssh_creds(config, args):
"""
Set ssh credentials into config.
Note that these values might also be set in ~/.bangrc. If they are
specified both in ~/.bangrc and as command-line arguments to ``bang``, then
the command-line arguments win.
"""
creds = config.get(A.DEPLOYER_CREDS, {})
creds[A.creds.SSH_USER] = args.user if args.user else creds.get(
A.creds.SSH_USER,
DEFAULT_SSH_USER,
)
if args.ask_pass:
creds[A.creds.SSH_PASS] = getpass.getpass('SSH Password: ')
config[A.DEPLOYER_CREDS] = creds
|
[
"def",
"set_ssh_creds",
"(",
"config",
",",
"args",
")",
":",
"creds",
"=",
"config",
".",
"get",
"(",
"A",
".",
"DEPLOYER_CREDS",
",",
"{",
"}",
")",
"creds",
"[",
"A",
".",
"creds",
".",
"SSH_USER",
"]",
"=",
"args",
".",
"user",
"if",
"args",
".",
"user",
"else",
"creds",
".",
"get",
"(",
"A",
".",
"creds",
".",
"SSH_USER",
",",
"DEFAULT_SSH_USER",
",",
")",
"if",
"args",
".",
"ask_pass",
":",
"creds",
"[",
"A",
".",
"creds",
".",
"SSH_PASS",
"]",
"=",
"getpass",
".",
"getpass",
"(",
"'SSH Password: '",
")",
"config",
"[",
"A",
".",
"DEPLOYER_CREDS",
"]",
"=",
"creds"
] |
Set ssh credentials into config.
Note that these values might also be set in ~/.bangrc. If they are
specified both in ~/.bangrc and as command-line arguments to ``bang``, then
the command-line arguments win.
|
[
"Set",
"ssh",
"credentials",
"into",
"config",
"."
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/cmd_bang.py#L39-L55
|
239,973
|
fr33jc/bang
|
bang/cmd_bang.py
|
run_bang
|
def run_bang(alt_args=None):
"""
Runs bang with optional list of strings as command line options.
If ``alt_args`` is not specified, defaults to parsing ``sys.argv`` for
command line options.
"""
parser = get_parser()
args = parser.parse_args(alt_args)
source = args.config_specs or get_env_configs()
if not source:
return
config = Config.from_config_specs(source)
if args.playbooks:
config[A.PLAYBOOKS] = args.playbooks
if args.dump_config:
if args.dump_config in ('yaml', 'yml'):
import yaml
print yaml.safe_dump(dict(config))
elif args.dump_config == 'json':
import json
print json.dumps(config)
else:
print config
sys.exit()
set_ssh_creds(config, args)
annoy(config)
stack = Stack(config)
if args.ansible_list:
stack.show_inventory(
os.isatty(sys.stdout.fileno())
)
return
initialize_logging(config)
# TODO: config.validate()
if args.deploy:
stack.deploy()
if args.configure:
stack.configure()
config.autoinc()
|
python
|
def run_bang(alt_args=None):
"""
Runs bang with optional list of strings as command line options.
If ``alt_args`` is not specified, defaults to parsing ``sys.argv`` for
command line options.
"""
parser = get_parser()
args = parser.parse_args(alt_args)
source = args.config_specs or get_env_configs()
if not source:
return
config = Config.from_config_specs(source)
if args.playbooks:
config[A.PLAYBOOKS] = args.playbooks
if args.dump_config:
if args.dump_config in ('yaml', 'yml'):
import yaml
print yaml.safe_dump(dict(config))
elif args.dump_config == 'json':
import json
print json.dumps(config)
else:
print config
sys.exit()
set_ssh_creds(config, args)
annoy(config)
stack = Stack(config)
if args.ansible_list:
stack.show_inventory(
os.isatty(sys.stdout.fileno())
)
return
initialize_logging(config)
# TODO: config.validate()
if args.deploy:
stack.deploy()
if args.configure:
stack.configure()
config.autoinc()
|
[
"def",
"run_bang",
"(",
"alt_args",
"=",
"None",
")",
":",
"parser",
"=",
"get_parser",
"(",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"alt_args",
")",
"source",
"=",
"args",
".",
"config_specs",
"or",
"get_env_configs",
"(",
")",
"if",
"not",
"source",
":",
"return",
"config",
"=",
"Config",
".",
"from_config_specs",
"(",
"source",
")",
"if",
"args",
".",
"playbooks",
":",
"config",
"[",
"A",
".",
"PLAYBOOKS",
"]",
"=",
"args",
".",
"playbooks",
"if",
"args",
".",
"dump_config",
":",
"if",
"args",
".",
"dump_config",
"in",
"(",
"'yaml'",
",",
"'yml'",
")",
":",
"import",
"yaml",
"print",
"yaml",
".",
"safe_dump",
"(",
"dict",
"(",
"config",
")",
")",
"elif",
"args",
".",
"dump_config",
"==",
"'json'",
":",
"import",
"json",
"print",
"json",
".",
"dumps",
"(",
"config",
")",
"else",
":",
"print",
"config",
"sys",
".",
"exit",
"(",
")",
"set_ssh_creds",
"(",
"config",
",",
"args",
")",
"annoy",
"(",
"config",
")",
"stack",
"=",
"Stack",
"(",
"config",
")",
"if",
"args",
".",
"ansible_list",
":",
"stack",
".",
"show_inventory",
"(",
"os",
".",
"isatty",
"(",
"sys",
".",
"stdout",
".",
"fileno",
"(",
")",
")",
")",
"return",
"initialize_logging",
"(",
"config",
")",
"# TODO: config.validate()",
"if",
"args",
".",
"deploy",
":",
"stack",
".",
"deploy",
"(",
")",
"if",
"args",
".",
"configure",
":",
"stack",
".",
"configure",
"(",
")",
"config",
".",
"autoinc",
"(",
")"
] |
Runs bang with optional list of strings as command line options.
If ``alt_args`` is not specified, defaults to parsing ``sys.argv`` for
command line options.
|
[
"Runs",
"bang",
"with",
"optional",
"list",
"of",
"strings",
"as",
"command",
"line",
"options",
"."
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/cmd_bang.py#L209-L258
|
239,974
|
listen-lavender/webcrawl
|
webcrawl/daemon.py
|
Daemon.monitor
|
def monitor(self, timeout):
"""
Monitor the process, check whether it runs out of time.
"""
def check(self, timeout):
time.sleep(timeout)
self.stop()
wather = threading.Thread(target=check)
wather.setDaemon(True)
wather.start()
|
python
|
def monitor(self, timeout):
"""
Monitor the process, check whether it runs out of time.
"""
def check(self, timeout):
time.sleep(timeout)
self.stop()
wather = threading.Thread(target=check)
wather.setDaemon(True)
wather.start()
|
[
"def",
"monitor",
"(",
"self",
",",
"timeout",
")",
":",
"def",
"check",
"(",
"self",
",",
"timeout",
")",
":",
"time",
".",
"sleep",
"(",
"timeout",
")",
"self",
".",
"stop",
"(",
")",
"wather",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"check",
")",
"wather",
".",
"setDaemon",
"(",
"True",
")",
"wather",
".",
"start",
"(",
")"
] |
Monitor the process, check whether it runs out of time.
|
[
"Monitor",
"the",
"process",
"check",
"whether",
"it",
"runs",
"out",
"of",
"time",
"."
] |
905dcfa6e6934aac764045660c0efcef28eae1e6
|
https://github.com/listen-lavender/webcrawl/blob/905dcfa6e6934aac764045660c0efcef28eae1e6/webcrawl/daemon.py#L164-L174
|
239,975
|
DecBayComp/RWA-python
|
rwa/hdf5.py
|
hdf5_storable
|
def hdf5_storable(type_or_storable, *args, **kwargs):
'''Registers a `Storable` instance in the global service.'''
if not isinstance(type_or_storable, Storable):
type_or_storable = default_storable(type_or_storable)
hdf5_service.registerStorable(type_or_storable, *args, **kwargs)
|
python
|
def hdf5_storable(type_or_storable, *args, **kwargs):
'''Registers a `Storable` instance in the global service.'''
if not isinstance(type_or_storable, Storable):
type_or_storable = default_storable(type_or_storable)
hdf5_service.registerStorable(type_or_storable, *args, **kwargs)
|
[
"def",
"hdf5_storable",
"(",
"type_or_storable",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"isinstance",
"(",
"type_or_storable",
",",
"Storable",
")",
":",
"type_or_storable",
"=",
"default_storable",
"(",
"type_or_storable",
")",
"hdf5_service",
".",
"registerStorable",
"(",
"type_or_storable",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Registers a `Storable` instance in the global service.
|
[
"Registers",
"a",
"Storable",
"instance",
"in",
"the",
"global",
"service",
"."
] |
734a52e15a0e8c244d84d74acf3fd64721074732
|
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/hdf5.py#L266-L270
|
239,976
|
DecBayComp/RWA-python
|
rwa/hdf5.py
|
hdf5_not_storable
|
def hdf5_not_storable(_type, *args, **kwargs):
'''Tags a type as not serializable.'''
hdf5_service.registerStorable(not_storable(_type), *args, **kwargs)
|
python
|
def hdf5_not_storable(_type, *args, **kwargs):
'''Tags a type as not serializable.'''
hdf5_service.registerStorable(not_storable(_type), *args, **kwargs)
|
[
"def",
"hdf5_not_storable",
"(",
"_type",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"hdf5_service",
".",
"registerStorable",
"(",
"not_storable",
"(",
"_type",
")",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Tags a type as not serializable.
|
[
"Tags",
"a",
"type",
"as",
"not",
"serializable",
"."
] |
734a52e15a0e8c244d84d74acf3fd64721074732
|
https://github.com/DecBayComp/RWA-python/blob/734a52e15a0e8c244d84d74acf3fd64721074732/rwa/hdf5.py#L272-L274
|
239,977
|
Archived-Object/ligament
|
ligament_precompiler_template/__init__.py
|
Precompiler.compile_and_process
|
def compile_and_process(self, in_path):
"""compile a file, save it to the ouput file if the inline flag true"""
out_path = self.path_mapping[in_path]
if not self.embed:
pdebug("[%s::%s] %s -> %s" % (
self.compiler_name,
self.name,
os.path.relpath(in_path),
os.path.relpath(out_path)),
groups=["build_task"],
autobreak=True)
else:
pdebug("[%s::%s] %s -> <cache>" % (
self.compiler_name,
self.name,
os.path.relpath(in_path)),
groups=["build_task"],
autobreak=True)
compiled_string = self.compile_file(in_path)
if not self.embed:
if compiled_string != "":
with open(out_path, "w") as f:
f.write(compiled_string)
return compiled_string
|
python
|
def compile_and_process(self, in_path):
"""compile a file, save it to the ouput file if the inline flag true"""
out_path = self.path_mapping[in_path]
if not self.embed:
pdebug("[%s::%s] %s -> %s" % (
self.compiler_name,
self.name,
os.path.relpath(in_path),
os.path.relpath(out_path)),
groups=["build_task"],
autobreak=True)
else:
pdebug("[%s::%s] %s -> <cache>" % (
self.compiler_name,
self.name,
os.path.relpath(in_path)),
groups=["build_task"],
autobreak=True)
compiled_string = self.compile_file(in_path)
if not self.embed:
if compiled_string != "":
with open(out_path, "w") as f:
f.write(compiled_string)
return compiled_string
|
[
"def",
"compile_and_process",
"(",
"self",
",",
"in_path",
")",
":",
"out_path",
"=",
"self",
".",
"path_mapping",
"[",
"in_path",
"]",
"if",
"not",
"self",
".",
"embed",
":",
"pdebug",
"(",
"\"[%s::%s] %s -> %s\"",
"%",
"(",
"self",
".",
"compiler_name",
",",
"self",
".",
"name",
",",
"os",
".",
"path",
".",
"relpath",
"(",
"in_path",
")",
",",
"os",
".",
"path",
".",
"relpath",
"(",
"out_path",
")",
")",
",",
"groups",
"=",
"[",
"\"build_task\"",
"]",
",",
"autobreak",
"=",
"True",
")",
"else",
":",
"pdebug",
"(",
"\"[%s::%s] %s -> <cache>\"",
"%",
"(",
"self",
".",
"compiler_name",
",",
"self",
".",
"name",
",",
"os",
".",
"path",
".",
"relpath",
"(",
"in_path",
")",
")",
",",
"groups",
"=",
"[",
"\"build_task\"",
"]",
",",
"autobreak",
"=",
"True",
")",
"compiled_string",
"=",
"self",
".",
"compile_file",
"(",
"in_path",
")",
"if",
"not",
"self",
".",
"embed",
":",
"if",
"compiled_string",
"!=",
"\"\"",
":",
"with",
"open",
"(",
"out_path",
",",
"\"w\"",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"compiled_string",
")",
"return",
"compiled_string"
] |
compile a file, save it to the ouput file if the inline flag true
|
[
"compile",
"a",
"file",
"save",
"it",
"to",
"the",
"ouput",
"file",
"if",
"the",
"inline",
"flag",
"true"
] |
ff3d78130522676a20dc64086dc8a27b197cc20f
|
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament_precompiler_template/__init__.py#L87-L114
|
239,978
|
Archived-Object/ligament
|
ligament_precompiler_template/__init__.py
|
Precompiler.collect_output
|
def collect_output(self):
""" helper function to gather the results of `compile_and_process` on
all target files
"""
if self.embed:
if self.concat:
concat_scripts = [self.compiled_scripts[path]
for path in self.build_order]
return [self.embed_template_string % '\n'.join(concat_scripts)]
else:
return [self.embed_template_string %
self.compiled_scripts[path]
for path in self.build_order]
else:
return [self.external_template_string %
os.path.join(
self.relative_directory,
os.path.relpath(
self.out_path_of(path),
self.output_directory))
for path in self.build_order
if self.compiled_scripts[path] != ""]
|
python
|
def collect_output(self):
""" helper function to gather the results of `compile_and_process` on
all target files
"""
if self.embed:
if self.concat:
concat_scripts = [self.compiled_scripts[path]
for path in self.build_order]
return [self.embed_template_string % '\n'.join(concat_scripts)]
else:
return [self.embed_template_string %
self.compiled_scripts[path]
for path in self.build_order]
else:
return [self.external_template_string %
os.path.join(
self.relative_directory,
os.path.relpath(
self.out_path_of(path),
self.output_directory))
for path in self.build_order
if self.compiled_scripts[path] != ""]
|
[
"def",
"collect_output",
"(",
"self",
")",
":",
"if",
"self",
".",
"embed",
":",
"if",
"self",
".",
"concat",
":",
"concat_scripts",
"=",
"[",
"self",
".",
"compiled_scripts",
"[",
"path",
"]",
"for",
"path",
"in",
"self",
".",
"build_order",
"]",
"return",
"[",
"self",
".",
"embed_template_string",
"%",
"'\\n'",
".",
"join",
"(",
"concat_scripts",
")",
"]",
"else",
":",
"return",
"[",
"self",
".",
"embed_template_string",
"%",
"self",
".",
"compiled_scripts",
"[",
"path",
"]",
"for",
"path",
"in",
"self",
".",
"build_order",
"]",
"else",
":",
"return",
"[",
"self",
".",
"external_template_string",
"%",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"relative_directory",
",",
"os",
".",
"path",
".",
"relpath",
"(",
"self",
".",
"out_path_of",
"(",
"path",
")",
",",
"self",
".",
"output_directory",
")",
")",
"for",
"path",
"in",
"self",
".",
"build_order",
"if",
"self",
".",
"compiled_scripts",
"[",
"path",
"]",
"!=",
"\"\"",
"]"
] |
helper function to gather the results of `compile_and_process` on
all target files
|
[
"helper",
"function",
"to",
"gather",
"the",
"results",
"of",
"compile_and_process",
"on",
"all",
"target",
"files"
] |
ff3d78130522676a20dc64086dc8a27b197cc20f
|
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament_precompiler_template/__init__.py#L116-L139
|
239,979
|
Archived-Object/ligament
|
ligament_precompiler_template/__init__.py
|
Precompiler.build
|
def build(self):
"""build the scripts and return a string"""
if not self.embed:
mkdir_recursive(self.output_directory)
# get list of script files in build order
self.build_order = remove_dups(
reduce(lambda a, b: a + glob.glob(b),
self.build_targets,
[]))
self.build_order_output = [self.out_path_of(t)
for (t) in self.build_order]
self.path_mapping = dict(zip(
self.build_order,
self.build_order_output))
self.compiled_scripts = {}
exceptions, values = partition(
lambda x: isinstance(x, Exception),
[self.compile_and_process(target)
for target in self.build_order])
self.compiled_scripts.update(dict(values))
saneExceptions, insaneExceptions = partition(
lambda x: isinstance(x, TaskExecutionException),
exceptions)
if len(insaneExceptions) != 0:
raise insaneExceptions[0]
if len(exceptions) != 0:
raise TaskExecutionException(
"Precompiler Errors (%s):" % type(self).__name__,
"\n".join([
x.header + "\n " +
x.message.replace("\n", "\n ")
for x in exceptions]))
return self.collect_output()
|
python
|
def build(self):
"""build the scripts and return a string"""
if not self.embed:
mkdir_recursive(self.output_directory)
# get list of script files in build order
self.build_order = remove_dups(
reduce(lambda a, b: a + glob.glob(b),
self.build_targets,
[]))
self.build_order_output = [self.out_path_of(t)
for (t) in self.build_order]
self.path_mapping = dict(zip(
self.build_order,
self.build_order_output))
self.compiled_scripts = {}
exceptions, values = partition(
lambda x: isinstance(x, Exception),
[self.compile_and_process(target)
for target in self.build_order])
self.compiled_scripts.update(dict(values))
saneExceptions, insaneExceptions = partition(
lambda x: isinstance(x, TaskExecutionException),
exceptions)
if len(insaneExceptions) != 0:
raise insaneExceptions[0]
if len(exceptions) != 0:
raise TaskExecutionException(
"Precompiler Errors (%s):" % type(self).__name__,
"\n".join([
x.header + "\n " +
x.message.replace("\n", "\n ")
for x in exceptions]))
return self.collect_output()
|
[
"def",
"build",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"embed",
":",
"mkdir_recursive",
"(",
"self",
".",
"output_directory",
")",
"# get list of script files in build order",
"self",
".",
"build_order",
"=",
"remove_dups",
"(",
"reduce",
"(",
"lambda",
"a",
",",
"b",
":",
"a",
"+",
"glob",
".",
"glob",
"(",
"b",
")",
",",
"self",
".",
"build_targets",
",",
"[",
"]",
")",
")",
"self",
".",
"build_order_output",
"=",
"[",
"self",
".",
"out_path_of",
"(",
"t",
")",
"for",
"(",
"t",
")",
"in",
"self",
".",
"build_order",
"]",
"self",
".",
"path_mapping",
"=",
"dict",
"(",
"zip",
"(",
"self",
".",
"build_order",
",",
"self",
".",
"build_order_output",
")",
")",
"self",
".",
"compiled_scripts",
"=",
"{",
"}",
"exceptions",
",",
"values",
"=",
"partition",
"(",
"lambda",
"x",
":",
"isinstance",
"(",
"x",
",",
"Exception",
")",
",",
"[",
"self",
".",
"compile_and_process",
"(",
"target",
")",
"for",
"target",
"in",
"self",
".",
"build_order",
"]",
")",
"self",
".",
"compiled_scripts",
".",
"update",
"(",
"dict",
"(",
"values",
")",
")",
"saneExceptions",
",",
"insaneExceptions",
"=",
"partition",
"(",
"lambda",
"x",
":",
"isinstance",
"(",
"x",
",",
"TaskExecutionException",
")",
",",
"exceptions",
")",
"if",
"len",
"(",
"insaneExceptions",
")",
"!=",
"0",
":",
"raise",
"insaneExceptions",
"[",
"0",
"]",
"if",
"len",
"(",
"exceptions",
")",
"!=",
"0",
":",
"raise",
"TaskExecutionException",
"(",
"\"Precompiler Errors (%s):\"",
"%",
"type",
"(",
"self",
")",
".",
"__name__",
",",
"\"\\n\"",
".",
"join",
"(",
"[",
"x",
".",
"header",
"+",
"\"\\n \"",
"+",
"x",
".",
"message",
".",
"replace",
"(",
"\"\\n\"",
",",
"\"\\n \"",
")",
"for",
"x",
"in",
"exceptions",
"]",
")",
")",
"return",
"self",
".",
"collect_output",
"(",
")"
] |
build the scripts and return a string
|
[
"build",
"the",
"scripts",
"and",
"return",
"a",
"string"
] |
ff3d78130522676a20dc64086dc8a27b197cc20f
|
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament_precompiler_template/__init__.py#L141-L181
|
239,980
|
bernoulli-metrics/bernoulli-python
|
bernoulli/client_api.py
|
get_experiments
|
def get_experiments(experiment_ids=None, user_id=None, client_id=None, bucket_if_necessary=True, user_data=None):
"""
Retrieve the experiments the user is a part of
@param experiment_ids : Either a single or list of experiment ids to retreive
@param user_id : An identifier for the user
@param client_id : Bernoulli Client ID - will default to BERNOULLI_CLIENT_ID ENV variable
@param bucket_if_necessary : Choose a variant for the user if one has not been chosen
@param user_data : Dictionary of user information to be used by the segment filters
"""
if not client_id:
client_id = os.environ.get('BERNOULLI_CLIENT_ID')
if not client_id:
raise Exception("client_id is required")
if type(experiment_ids) is dict:
experiment_ids = ','.join(experiment_ids)
params = {
'clientId': client_id,
'experimentIds': experiment_ids,
'userId': user_id,
'bucketIfNecessary': bucket_if_necessary,
}
if user_data is None:
user_data = {}
try:
response = requests.get(BASE_URL, params=dict(params.items() + user_data.items()))
except requests.ConnectionError:
raise Exception("Unable to access service")
val = response.json()
if val['status'] != 'ok':
raise Exception(val['message'])
return val['value']
|
python
|
def get_experiments(experiment_ids=None, user_id=None, client_id=None, bucket_if_necessary=True, user_data=None):
"""
Retrieve the experiments the user is a part of
@param experiment_ids : Either a single or list of experiment ids to retreive
@param user_id : An identifier for the user
@param client_id : Bernoulli Client ID - will default to BERNOULLI_CLIENT_ID ENV variable
@param bucket_if_necessary : Choose a variant for the user if one has not been chosen
@param user_data : Dictionary of user information to be used by the segment filters
"""
if not client_id:
client_id = os.environ.get('BERNOULLI_CLIENT_ID')
if not client_id:
raise Exception("client_id is required")
if type(experiment_ids) is dict:
experiment_ids = ','.join(experiment_ids)
params = {
'clientId': client_id,
'experimentIds': experiment_ids,
'userId': user_id,
'bucketIfNecessary': bucket_if_necessary,
}
if user_data is None:
user_data = {}
try:
response = requests.get(BASE_URL, params=dict(params.items() + user_data.items()))
except requests.ConnectionError:
raise Exception("Unable to access service")
val = response.json()
if val['status'] != 'ok':
raise Exception(val['message'])
return val['value']
|
[
"def",
"get_experiments",
"(",
"experiment_ids",
"=",
"None",
",",
"user_id",
"=",
"None",
",",
"client_id",
"=",
"None",
",",
"bucket_if_necessary",
"=",
"True",
",",
"user_data",
"=",
"None",
")",
":",
"if",
"not",
"client_id",
":",
"client_id",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'BERNOULLI_CLIENT_ID'",
")",
"if",
"not",
"client_id",
":",
"raise",
"Exception",
"(",
"\"client_id is required\"",
")",
"if",
"type",
"(",
"experiment_ids",
")",
"is",
"dict",
":",
"experiment_ids",
"=",
"','",
".",
"join",
"(",
"experiment_ids",
")",
"params",
"=",
"{",
"'clientId'",
":",
"client_id",
",",
"'experimentIds'",
":",
"experiment_ids",
",",
"'userId'",
":",
"user_id",
",",
"'bucketIfNecessary'",
":",
"bucket_if_necessary",
",",
"}",
"if",
"user_data",
"is",
"None",
":",
"user_data",
"=",
"{",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"BASE_URL",
",",
"params",
"=",
"dict",
"(",
"params",
".",
"items",
"(",
")",
"+",
"user_data",
".",
"items",
"(",
")",
")",
")",
"except",
"requests",
".",
"ConnectionError",
":",
"raise",
"Exception",
"(",
"\"Unable to access service\"",
")",
"val",
"=",
"response",
".",
"json",
"(",
")",
"if",
"val",
"[",
"'status'",
"]",
"!=",
"'ok'",
":",
"raise",
"Exception",
"(",
"val",
"[",
"'message'",
"]",
")",
"return",
"val",
"[",
"'value'",
"]"
] |
Retrieve the experiments the user is a part of
@param experiment_ids : Either a single or list of experiment ids to retreive
@param user_id : An identifier for the user
@param client_id : Bernoulli Client ID - will default to BERNOULLI_CLIENT_ID ENV variable
@param bucket_if_necessary : Choose a variant for the user if one has not been chosen
@param user_data : Dictionary of user information to be used by the segment filters
|
[
"Retrieve",
"the",
"experiments",
"the",
"user",
"is",
"a",
"part",
"of"
] |
572bd165ac354eb4b95ac3abdfe74b67f4fd703b
|
https://github.com/bernoulli-metrics/bernoulli-python/blob/572bd165ac354eb4b95ac3abdfe74b67f4fd703b/bernoulli/client_api.py#L6-L44
|
239,981
|
bernoulli-metrics/bernoulli-python
|
bernoulli/client_api.py
|
record_goal_attained
|
def record_goal_attained(experiment_id, user_id, client_id = None):
"""
Record that a variant was successful for a user
@param experiment_id : A single experiment id
@param user_id : An identifier for the user
@param client_id : Bernoulli Client ID
"""
if not client_id:
client_id = os.environ.get('BERNOULLI_CLIENT_ID')
if not client_id:
raise Exception("client_id is required")
try:
response = requests.post(BASE_URL, data={
'clientId': client_id,
'userId': user_id,
'experimentId': experiment_id,
})
except requests.ConnectionError:
raise Exception("Unable to access services")
val = response.json()
if val['status'] != 'ok':
raise Exception(val['message'])
return val['value']
|
python
|
def record_goal_attained(experiment_id, user_id, client_id = None):
"""
Record that a variant was successful for a user
@param experiment_id : A single experiment id
@param user_id : An identifier for the user
@param client_id : Bernoulli Client ID
"""
if not client_id:
client_id = os.environ.get('BERNOULLI_CLIENT_ID')
if not client_id:
raise Exception("client_id is required")
try:
response = requests.post(BASE_URL, data={
'clientId': client_id,
'userId': user_id,
'experimentId': experiment_id,
})
except requests.ConnectionError:
raise Exception("Unable to access services")
val = response.json()
if val['status'] != 'ok':
raise Exception(val['message'])
return val['value']
|
[
"def",
"record_goal_attained",
"(",
"experiment_id",
",",
"user_id",
",",
"client_id",
"=",
"None",
")",
":",
"if",
"not",
"client_id",
":",
"client_id",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'BERNOULLI_CLIENT_ID'",
")",
"if",
"not",
"client_id",
":",
"raise",
"Exception",
"(",
"\"client_id is required\"",
")",
"try",
":",
"response",
"=",
"requests",
".",
"post",
"(",
"BASE_URL",
",",
"data",
"=",
"{",
"'clientId'",
":",
"client_id",
",",
"'userId'",
":",
"user_id",
",",
"'experimentId'",
":",
"experiment_id",
",",
"}",
")",
"except",
"requests",
".",
"ConnectionError",
":",
"raise",
"Exception",
"(",
"\"Unable to access services\"",
")",
"val",
"=",
"response",
".",
"json",
"(",
")",
"if",
"val",
"[",
"'status'",
"]",
"!=",
"'ok'",
":",
"raise",
"Exception",
"(",
"val",
"[",
"'message'",
"]",
")",
"return",
"val",
"[",
"'value'",
"]"
] |
Record that a variant was successful for a user
@param experiment_id : A single experiment id
@param user_id : An identifier for the user
@param client_id : Bernoulli Client ID
|
[
"Record",
"that",
"a",
"variant",
"was",
"successful",
"for",
"a",
"user"
] |
572bd165ac354eb4b95ac3abdfe74b67f4fd703b
|
https://github.com/bernoulli-metrics/bernoulli-python/blob/572bd165ac354eb4b95ac3abdfe74b67f4fd703b/bernoulli/client_api.py#L46-L73
|
239,982
|
Fuyukai/ConfigMaster
|
configmaster/YAMLConfigFile.py
|
yaml_dump_hook
|
def yaml_dump_hook(cfg, text: bool=False):
"""
Dumps all the data into a YAML file.
"""
data = cfg.config.dump()
if not text:
yaml.dump(data, cfg.fd, Dumper=cfg.dumper, default_flow_style=False)
else:
return yaml.dump(data, Dumper=cfg.dumper, default_flow_style=False)
|
python
|
def yaml_dump_hook(cfg, text: bool=False):
"""
Dumps all the data into a YAML file.
"""
data = cfg.config.dump()
if not text:
yaml.dump(data, cfg.fd, Dumper=cfg.dumper, default_flow_style=False)
else:
return yaml.dump(data, Dumper=cfg.dumper, default_flow_style=False)
|
[
"def",
"yaml_dump_hook",
"(",
"cfg",
",",
"text",
":",
"bool",
"=",
"False",
")",
":",
"data",
"=",
"cfg",
".",
"config",
".",
"dump",
"(",
")",
"if",
"not",
"text",
":",
"yaml",
".",
"dump",
"(",
"data",
",",
"cfg",
".",
"fd",
",",
"Dumper",
"=",
"cfg",
".",
"dumper",
",",
"default_flow_style",
"=",
"False",
")",
"else",
":",
"return",
"yaml",
".",
"dump",
"(",
"data",
",",
"Dumper",
"=",
"cfg",
".",
"dumper",
",",
"default_flow_style",
"=",
"False",
")"
] |
Dumps all the data into a YAML file.
|
[
"Dumps",
"all",
"the",
"data",
"into",
"a",
"YAML",
"file",
"."
] |
8018aa415da55c84edaa8a49664f674758a14edd
|
https://github.com/Fuyukai/ConfigMaster/blob/8018aa415da55c84edaa8a49664f674758a14edd/configmaster/YAMLConfigFile.py#L94-L103
|
239,983
|
bretth/djset
|
djset/commands.py
|
_create_djset
|
def _create_djset(args, cls):
""" Return a DjSecret object """
name = args.get('--name')
settings = args.get('--settings')
if name:
return cls(name=name)
elif settings:
return cls(name=settings)
else:
return cls()
|
python
|
def _create_djset(args, cls):
""" Return a DjSecret object """
name = args.get('--name')
settings = args.get('--settings')
if name:
return cls(name=name)
elif settings:
return cls(name=settings)
else:
return cls()
|
[
"def",
"_create_djset",
"(",
"args",
",",
"cls",
")",
":",
"name",
"=",
"args",
".",
"get",
"(",
"'--name'",
")",
"settings",
"=",
"args",
".",
"get",
"(",
"'--settings'",
")",
"if",
"name",
":",
"return",
"cls",
"(",
"name",
"=",
"name",
")",
"elif",
"settings",
":",
"return",
"cls",
"(",
"name",
"=",
"settings",
")",
"else",
":",
"return",
"cls",
"(",
")"
] |
Return a DjSecret object
|
[
"Return",
"a",
"DjSecret",
"object"
] |
e04cbcadc311f6edec50a718415d0004aa304034
|
https://github.com/bretth/djset/blob/e04cbcadc311f6edec50a718415d0004aa304034/djset/commands.py#L16-L25
|
239,984
|
bretth/djset
|
djset/commands.py
|
_parse_args
|
def _parse_args(args, cls):
""" Parse a docopt dictionary of arguments """
d = _create_djset(args, cls)
key_value_pair = args.get('<key>=<value>')
key = args.get('<key>')
func = None
if args.get('add') and key_value_pair:
fargs = tuple(args.get('<key>=<value>').split('='))
if fargs[1]:
func = d.set
elif args.get('remove') and key:
func = d.remove
fargs = (args.get('<key>'),)
kwargs = {'glob': args.get('--global')}
if func:
return func, fargs, kwargs
else:
return None, None, None
|
python
|
def _parse_args(args, cls):
""" Parse a docopt dictionary of arguments """
d = _create_djset(args, cls)
key_value_pair = args.get('<key>=<value>')
key = args.get('<key>')
func = None
if args.get('add') and key_value_pair:
fargs = tuple(args.get('<key>=<value>').split('='))
if fargs[1]:
func = d.set
elif args.get('remove') and key:
func = d.remove
fargs = (args.get('<key>'),)
kwargs = {'glob': args.get('--global')}
if func:
return func, fargs, kwargs
else:
return None, None, None
|
[
"def",
"_parse_args",
"(",
"args",
",",
"cls",
")",
":",
"d",
"=",
"_create_djset",
"(",
"args",
",",
"cls",
")",
"key_value_pair",
"=",
"args",
".",
"get",
"(",
"'<key>=<value>'",
")",
"key",
"=",
"args",
".",
"get",
"(",
"'<key>'",
")",
"func",
"=",
"None",
"if",
"args",
".",
"get",
"(",
"'add'",
")",
"and",
"key_value_pair",
":",
"fargs",
"=",
"tuple",
"(",
"args",
".",
"get",
"(",
"'<key>=<value>'",
")",
".",
"split",
"(",
"'='",
")",
")",
"if",
"fargs",
"[",
"1",
"]",
":",
"func",
"=",
"d",
".",
"set",
"elif",
"args",
".",
"get",
"(",
"'remove'",
")",
"and",
"key",
":",
"func",
"=",
"d",
".",
"remove",
"fargs",
"=",
"(",
"args",
".",
"get",
"(",
"'<key>'",
")",
",",
")",
"kwargs",
"=",
"{",
"'glob'",
":",
"args",
".",
"get",
"(",
"'--global'",
")",
"}",
"if",
"func",
":",
"return",
"func",
",",
"fargs",
",",
"kwargs",
"else",
":",
"return",
"None",
",",
"None",
",",
"None"
] |
Parse a docopt dictionary of arguments
|
[
"Parse",
"a",
"docopt",
"dictionary",
"of",
"arguments"
] |
e04cbcadc311f6edec50a718415d0004aa304034
|
https://github.com/bretth/djset/blob/e04cbcadc311f6edec50a718415d0004aa304034/djset/commands.py#L28-L47
|
239,985
|
ddorn/pyconfiglib
|
configlib/core.py
|
prompt_update_all
|
def prompt_update_all(config: 'Config'):
"""Prompt each field of the configuration to the user."""
click.echo()
click.echo('Welcome !')
click.echo('Press enter to keep the defaults or enter a new value to update the configuration.')
click.echo('Press Ctrl+C at any time to quit and save')
click.echo()
for field in config:
type_ = config.__type__(field)
hint = config.__hint__(field) + ' ({})'.format(type_.__name__)
if isinstance(type_, conftypes.SubConfigType):
continue
# we prompt the paths through prompt_file and not click
if type_ is conftypes.path:
config[field] = prompt_file(hint, default=config[field])
continue
if isinstance(type_, conftypes.ConfigType):
# config[field] is always real data, but we want to show something that is the closest
# possible to what the user needs to enter
# thus, we show what we would store in the json
default = type_.save(config[field])
else:
default = config[field]
# a too long hint is awful
if len(str(default)) > 14:
default = str(default)[:10] + '...'
# ask untill we have the right type
value = click.prompt(hint, default=default, type=type_)
# click doesnt convert() the default if nothing is entered, so it wont be valid
# however we don't care because default means that we don't have to update
if value == default:
LOGGER.debug('same value and default, skipping set. %r == %r', value, default)
continue
config[field] = value
|
python
|
def prompt_update_all(config: 'Config'):
"""Prompt each field of the configuration to the user."""
click.echo()
click.echo('Welcome !')
click.echo('Press enter to keep the defaults or enter a new value to update the configuration.')
click.echo('Press Ctrl+C at any time to quit and save')
click.echo()
for field in config:
type_ = config.__type__(field)
hint = config.__hint__(field) + ' ({})'.format(type_.__name__)
if isinstance(type_, conftypes.SubConfigType):
continue
# we prompt the paths through prompt_file and not click
if type_ is conftypes.path:
config[field] = prompt_file(hint, default=config[field])
continue
if isinstance(type_, conftypes.ConfigType):
# config[field] is always real data, but we want to show something that is the closest
# possible to what the user needs to enter
# thus, we show what we would store in the json
default = type_.save(config[field])
else:
default = config[field]
# a too long hint is awful
if len(str(default)) > 14:
default = str(default)[:10] + '...'
# ask untill we have the right type
value = click.prompt(hint, default=default, type=type_)
# click doesnt convert() the default if nothing is entered, so it wont be valid
# however we don't care because default means that we don't have to update
if value == default:
LOGGER.debug('same value and default, skipping set. %r == %r', value, default)
continue
config[field] = value
|
[
"def",
"prompt_update_all",
"(",
"config",
":",
"'Config'",
")",
":",
"click",
".",
"echo",
"(",
")",
"click",
".",
"echo",
"(",
"'Welcome !'",
")",
"click",
".",
"echo",
"(",
"'Press enter to keep the defaults or enter a new value to update the configuration.'",
")",
"click",
".",
"echo",
"(",
"'Press Ctrl+C at any time to quit and save'",
")",
"click",
".",
"echo",
"(",
")",
"for",
"field",
"in",
"config",
":",
"type_",
"=",
"config",
".",
"__type__",
"(",
"field",
")",
"hint",
"=",
"config",
".",
"__hint__",
"(",
"field",
")",
"+",
"' ({})'",
".",
"format",
"(",
"type_",
".",
"__name__",
")",
"if",
"isinstance",
"(",
"type_",
",",
"conftypes",
".",
"SubConfigType",
")",
":",
"continue",
"# we prompt the paths through prompt_file and not click",
"if",
"type_",
"is",
"conftypes",
".",
"path",
":",
"config",
"[",
"field",
"]",
"=",
"prompt_file",
"(",
"hint",
",",
"default",
"=",
"config",
"[",
"field",
"]",
")",
"continue",
"if",
"isinstance",
"(",
"type_",
",",
"conftypes",
".",
"ConfigType",
")",
":",
"# config[field] is always real data, but we want to show something that is the closest",
"# possible to what the user needs to enter",
"# thus, we show what we would store in the json",
"default",
"=",
"type_",
".",
"save",
"(",
"config",
"[",
"field",
"]",
")",
"else",
":",
"default",
"=",
"config",
"[",
"field",
"]",
"# a too long hint is awful",
"if",
"len",
"(",
"str",
"(",
"default",
")",
")",
">",
"14",
":",
"default",
"=",
"str",
"(",
"default",
")",
"[",
":",
"10",
"]",
"+",
"'...'",
"# ask untill we have the right type",
"value",
"=",
"click",
".",
"prompt",
"(",
"hint",
",",
"default",
"=",
"default",
",",
"type",
"=",
"type_",
")",
"# click doesnt convert() the default if nothing is entered, so it wont be valid",
"# however we don't care because default means that we don't have to update",
"if",
"value",
"==",
"default",
":",
"LOGGER",
".",
"debug",
"(",
"'same value and default, skipping set. %r == %r'",
",",
"value",
",",
"default",
")",
"continue",
"config",
"[",
"field",
"]",
"=",
"value"
] |
Prompt each field of the configuration to the user.
|
[
"Prompt",
"each",
"field",
"of",
"the",
"configuration",
"to",
"the",
"user",
"."
] |
3ad01d0bb9344e18719d82a5928b4d8e5fe726ac
|
https://github.com/ddorn/pyconfiglib/blob/3ad01d0bb9344e18719d82a5928b4d8e5fe726ac/configlib/core.py#L85-L128
|
239,986
|
ddorn/pyconfiglib
|
configlib/core.py
|
update_config
|
def update_config(configclass: type(Config)):
"""Command line function to update and the a config."""
# we build the real click command inside the function, because it needs to be done
# dynamically, depending on the config.
# we ignore the type errors, keeping the the defaults if needed
# everything will be updated anyway
config = configclass() # type: Config
def print_list(ctx, param, value):
# they do like that in the doc (http://click.pocoo.org/6/options/#callbacks-and-eager-options)
# so I do the same... but I don't now why.
# the only goal is to call __print_list__()
if not value or ctx.resilient_parsing:
return param
config.__print_list__()
ctx.exit()
def show_conf(ctx, param, value):
# see print_list
if not value or ctx.resilient_parsing:
return param
config.__show__()
ctx.exit()
def reset(ctx, param, value):
# see print_list
if not value or ctx.resilient_parsing:
return param
click.confirm('Are you sure you want to reset ALL fields to the defaults ? This action is not reversible.', abort=True)
# that doesn't exist
configclass.__config_path__, config_path = '', configclass.__config_path__
# So the file won't be opened and only the default will be loaded.
config = configclass()
# Thus we can save the defaults
# To the right place again
configclass.__config_path__ = config_path
config.__save__()
ctx.exit()
def clean(ctx, param, value):
# see print_list
if not value or ctx.resilient_parsing:
return param
config.__save__()
click.echo('Cleaned !')
ctx.exit()
@click.command(context_settings={'ignore_unknown_options': True})
@click.option('-c', '--clean', is_eager=True, is_flag=True, expose_value=False, callback=clean,
help='Clean the file where the configutation is stored.')
@click.option('-l', '--list', is_eager=True, is_flag=True, expose_value=False, callback=print_list,
help='List the availaible configuration fields.')
@click.option('--reset', is_flag=True, is_eager=True, expose_value=False, callback=reset,
help='Reset all the fields to their default value.')
@click.option('-s', '--show', is_eager=True, is_flag=True, expose_value=False, callback=show_conf,
help='View the configuration.')
@click.argument('fields-to-set', nargs=-1, type=click.UNPROCESSED)
def command(fields_to_set: 'Tuple[str]'):
"""
I manage your configuration.
If you call me with no argument, you will be able to set each field
in an interactive prompt. I can show your configuration with -s,
list the available field with -l and set them by --name-of-field=whatever.
"""
# with a context manager, the config is always saved at the end
with config:
if len(fields_to_set) == 1 and '=' not in fields_to_set[0]:
# we want to update a part of the config
sub = fields_to_set[0]
if sub in config:
if isinstance(config[sub], SubConfig):
# the part is a subconfig
prompt_update_all(config[sub])
else:
# TODO: dynamic prompt for one field
raise click.BadParameter('%s is not a SubConfig of the configuration')
else:
raise click.BadParameter('%s is not a field of the configuration')
elif fields_to_set:
dct = {}
for field in fields_to_set:
field, _, value = field.partition('=')
dct[field] = value
# save directly what is passed if something was passed whitout the interactive prompt
config.__update__(dct)
else:
# or update all
prompt_update_all(config)
# this is the real function for the CLI
LOGGER.debug('start command')
command()
LOGGER.debug('end command')
|
python
|
def update_config(configclass: type(Config)):
"""Command line function to update and the a config."""
# we build the real click command inside the function, because it needs to be done
# dynamically, depending on the config.
# we ignore the type errors, keeping the the defaults if needed
# everything will be updated anyway
config = configclass() # type: Config
def print_list(ctx, param, value):
# they do like that in the doc (http://click.pocoo.org/6/options/#callbacks-and-eager-options)
# so I do the same... but I don't now why.
# the only goal is to call __print_list__()
if not value or ctx.resilient_parsing:
return param
config.__print_list__()
ctx.exit()
def show_conf(ctx, param, value):
# see print_list
if not value or ctx.resilient_parsing:
return param
config.__show__()
ctx.exit()
def reset(ctx, param, value):
# see print_list
if not value or ctx.resilient_parsing:
return param
click.confirm('Are you sure you want to reset ALL fields to the defaults ? This action is not reversible.', abort=True)
# that doesn't exist
configclass.__config_path__, config_path = '', configclass.__config_path__
# So the file won't be opened and only the default will be loaded.
config = configclass()
# Thus we can save the defaults
# To the right place again
configclass.__config_path__ = config_path
config.__save__()
ctx.exit()
def clean(ctx, param, value):
# see print_list
if not value or ctx.resilient_parsing:
return param
config.__save__()
click.echo('Cleaned !')
ctx.exit()
@click.command(context_settings={'ignore_unknown_options': True})
@click.option('-c', '--clean', is_eager=True, is_flag=True, expose_value=False, callback=clean,
help='Clean the file where the configutation is stored.')
@click.option('-l', '--list', is_eager=True, is_flag=True, expose_value=False, callback=print_list,
help='List the availaible configuration fields.')
@click.option('--reset', is_flag=True, is_eager=True, expose_value=False, callback=reset,
help='Reset all the fields to their default value.')
@click.option('-s', '--show', is_eager=True, is_flag=True, expose_value=False, callback=show_conf,
help='View the configuration.')
@click.argument('fields-to-set', nargs=-1, type=click.UNPROCESSED)
def command(fields_to_set: 'Tuple[str]'):
"""
I manage your configuration.
If you call me with no argument, you will be able to set each field
in an interactive prompt. I can show your configuration with -s,
list the available field with -l and set them by --name-of-field=whatever.
"""
# with a context manager, the config is always saved at the end
with config:
if len(fields_to_set) == 1 and '=' not in fields_to_set[0]:
# we want to update a part of the config
sub = fields_to_set[0]
if sub in config:
if isinstance(config[sub], SubConfig):
# the part is a subconfig
prompt_update_all(config[sub])
else:
# TODO: dynamic prompt for one field
raise click.BadParameter('%s is not a SubConfig of the configuration')
else:
raise click.BadParameter('%s is not a field of the configuration')
elif fields_to_set:
dct = {}
for field in fields_to_set:
field, _, value = field.partition('=')
dct[field] = value
# save directly what is passed if something was passed whitout the interactive prompt
config.__update__(dct)
else:
# or update all
prompt_update_all(config)
# this is the real function for the CLI
LOGGER.debug('start command')
command()
LOGGER.debug('end command')
|
[
"def",
"update_config",
"(",
"configclass",
":",
"type",
"(",
"Config",
")",
")",
":",
"# we build the real click command inside the function, because it needs to be done",
"# dynamically, depending on the config.",
"# we ignore the type errors, keeping the the defaults if needed",
"# everything will be updated anyway",
"config",
"=",
"configclass",
"(",
")",
"# type: Config",
"def",
"print_list",
"(",
"ctx",
",",
"param",
",",
"value",
")",
":",
"# they do like that in the doc (http://click.pocoo.org/6/options/#callbacks-and-eager-options)",
"# so I do the same... but I don't now why.",
"# the only goal is to call __print_list__()",
"if",
"not",
"value",
"or",
"ctx",
".",
"resilient_parsing",
":",
"return",
"param",
"config",
".",
"__print_list__",
"(",
")",
"ctx",
".",
"exit",
"(",
")",
"def",
"show_conf",
"(",
"ctx",
",",
"param",
",",
"value",
")",
":",
"# see print_list",
"if",
"not",
"value",
"or",
"ctx",
".",
"resilient_parsing",
":",
"return",
"param",
"config",
".",
"__show__",
"(",
")",
"ctx",
".",
"exit",
"(",
")",
"def",
"reset",
"(",
"ctx",
",",
"param",
",",
"value",
")",
":",
"# see print_list",
"if",
"not",
"value",
"or",
"ctx",
".",
"resilient_parsing",
":",
"return",
"param",
"click",
".",
"confirm",
"(",
"'Are you sure you want to reset ALL fields to the defaults ? This action is not reversible.'",
",",
"abort",
"=",
"True",
")",
"# that doesn't exist",
"configclass",
".",
"__config_path__",
",",
"config_path",
"=",
"''",
",",
"configclass",
".",
"__config_path__",
"# So the file won't be opened and only the default will be loaded.",
"config",
"=",
"configclass",
"(",
")",
"# Thus we can save the defaults",
"# To the right place again",
"configclass",
".",
"__config_path__",
"=",
"config_path",
"config",
".",
"__save__",
"(",
")",
"ctx",
".",
"exit",
"(",
")",
"def",
"clean",
"(",
"ctx",
",",
"param",
",",
"value",
")",
":",
"# see print_list",
"if",
"not",
"value",
"or",
"ctx",
".",
"resilient_parsing",
":",
"return",
"param",
"config",
".",
"__save__",
"(",
")",
"click",
".",
"echo",
"(",
"'Cleaned !'",
")",
"ctx",
".",
"exit",
"(",
")",
"@",
"click",
".",
"command",
"(",
"context_settings",
"=",
"{",
"'ignore_unknown_options'",
":",
"True",
"}",
")",
"@",
"click",
".",
"option",
"(",
"'-c'",
",",
"'--clean'",
",",
"is_eager",
"=",
"True",
",",
"is_flag",
"=",
"True",
",",
"expose_value",
"=",
"False",
",",
"callback",
"=",
"clean",
",",
"help",
"=",
"'Clean the file where the configutation is stored.'",
")",
"@",
"click",
".",
"option",
"(",
"'-l'",
",",
"'--list'",
",",
"is_eager",
"=",
"True",
",",
"is_flag",
"=",
"True",
",",
"expose_value",
"=",
"False",
",",
"callback",
"=",
"print_list",
",",
"help",
"=",
"'List the availaible configuration fields.'",
")",
"@",
"click",
".",
"option",
"(",
"'--reset'",
",",
"is_flag",
"=",
"True",
",",
"is_eager",
"=",
"True",
",",
"expose_value",
"=",
"False",
",",
"callback",
"=",
"reset",
",",
"help",
"=",
"'Reset all the fields to their default value.'",
")",
"@",
"click",
".",
"option",
"(",
"'-s'",
",",
"'--show'",
",",
"is_eager",
"=",
"True",
",",
"is_flag",
"=",
"True",
",",
"expose_value",
"=",
"False",
",",
"callback",
"=",
"show_conf",
",",
"help",
"=",
"'View the configuration.'",
")",
"@",
"click",
".",
"argument",
"(",
"'fields-to-set'",
",",
"nargs",
"=",
"-",
"1",
",",
"type",
"=",
"click",
".",
"UNPROCESSED",
")",
"def",
"command",
"(",
"fields_to_set",
":",
"'Tuple[str]'",
")",
":",
"\"\"\"\n I manage your configuration.\n\n If you call me with no argument, you will be able to set each field\n in an interactive prompt. I can show your configuration with -s,\n list the available field with -l and set them by --name-of-field=whatever.\n \"\"\"",
"# with a context manager, the config is always saved at the end",
"with",
"config",
":",
"if",
"len",
"(",
"fields_to_set",
")",
"==",
"1",
"and",
"'='",
"not",
"in",
"fields_to_set",
"[",
"0",
"]",
":",
"# we want to update a part of the config",
"sub",
"=",
"fields_to_set",
"[",
"0",
"]",
"if",
"sub",
"in",
"config",
":",
"if",
"isinstance",
"(",
"config",
"[",
"sub",
"]",
",",
"SubConfig",
")",
":",
"# the part is a subconfig",
"prompt_update_all",
"(",
"config",
"[",
"sub",
"]",
")",
"else",
":",
"# TODO: dynamic prompt for one field",
"raise",
"click",
".",
"BadParameter",
"(",
"'%s is not a SubConfig of the configuration'",
")",
"else",
":",
"raise",
"click",
".",
"BadParameter",
"(",
"'%s is not a field of the configuration'",
")",
"elif",
"fields_to_set",
":",
"dct",
"=",
"{",
"}",
"for",
"field",
"in",
"fields_to_set",
":",
"field",
",",
"_",
",",
"value",
"=",
"field",
".",
"partition",
"(",
"'='",
")",
"dct",
"[",
"field",
"]",
"=",
"value",
"# save directly what is passed if something was passed whitout the interactive prompt",
"config",
".",
"__update__",
"(",
"dct",
")",
"else",
":",
"# or update all",
"prompt_update_all",
"(",
"config",
")",
"# this is the real function for the CLI",
"LOGGER",
".",
"debug",
"(",
"'start command'",
")",
"command",
"(",
")",
"LOGGER",
".",
"debug",
"(",
"'end command'",
")"
] |
Command line function to update and the a config.
|
[
"Command",
"line",
"function",
"to",
"update",
"and",
"the",
"a",
"config",
"."
] |
3ad01d0bb9344e18719d82a5928b4d8e5fe726ac
|
https://github.com/ddorn/pyconfiglib/blob/3ad01d0bb9344e18719d82a5928b4d8e5fe726ac/configlib/core.py#L495-L603
|
239,987
|
Deisss/python-sockjsroom
|
examples/chat/server.py
|
configureLogger
|
def configureLogger(logFolder, logFile):
''' Start the logger instance and configure it '''
# Set debug level
logLevel = 'DEBUG'
logger = logging.getLogger()
logger.setLevel(logLevel)
# Format
formatter = logging.Formatter('%(asctime)s - %(levelname)s | %(name)s -> %(message)s', '%Y-%m-%d %H:%M:%S')
# Remove default handler to keep only clean one
for hdlr in logger.handlers:
logger.removeHandler(hdlr)
# Create missing folder if needed
if not os.path.exists(logFolder):
os.makedirs(logFolder, 0700)
#
# ----------------------------
# CREATE CONSOLE HANDLER
# ----------------------------
#
# Create console handler
consoleh = logging.StreamHandler()
consoleh.setLevel(logLevel)
consoleh.setFormatter(formatter)
# Set our custom handler
logger.addHandler(consoleh)
#
# ----------------------------
# CREATE FILE HANDLER
# ----------------------------
#
fileh = logging.FileHandler(logFile, 'a')
fileh.setLevel(logLevel)
fileh.setFormatter(formatter)
# Set our custom handler
logger.addHandler(fileh)
|
python
|
def configureLogger(logFolder, logFile):
''' Start the logger instance and configure it '''
# Set debug level
logLevel = 'DEBUG'
logger = logging.getLogger()
logger.setLevel(logLevel)
# Format
formatter = logging.Formatter('%(asctime)s - %(levelname)s | %(name)s -> %(message)s', '%Y-%m-%d %H:%M:%S')
# Remove default handler to keep only clean one
for hdlr in logger.handlers:
logger.removeHandler(hdlr)
# Create missing folder if needed
if not os.path.exists(logFolder):
os.makedirs(logFolder, 0700)
#
# ----------------------------
# CREATE CONSOLE HANDLER
# ----------------------------
#
# Create console handler
consoleh = logging.StreamHandler()
consoleh.setLevel(logLevel)
consoleh.setFormatter(formatter)
# Set our custom handler
logger.addHandler(consoleh)
#
# ----------------------------
# CREATE FILE HANDLER
# ----------------------------
#
fileh = logging.FileHandler(logFile, 'a')
fileh.setLevel(logLevel)
fileh.setFormatter(formatter)
# Set our custom handler
logger.addHandler(fileh)
|
[
"def",
"configureLogger",
"(",
"logFolder",
",",
"logFile",
")",
":",
"# Set debug level",
"logLevel",
"=",
"'DEBUG'",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
")",
"logger",
".",
"setLevel",
"(",
"logLevel",
")",
"# Format",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"'%(asctime)s - %(levelname)s | %(name)s -> %(message)s'",
",",
"'%Y-%m-%d %H:%M:%S'",
")",
"# Remove default handler to keep only clean one",
"for",
"hdlr",
"in",
"logger",
".",
"handlers",
":",
"logger",
".",
"removeHandler",
"(",
"hdlr",
")",
"# Create missing folder if needed",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"logFolder",
")",
":",
"os",
".",
"makedirs",
"(",
"logFolder",
",",
"0700",
")",
"#",
"# ----------------------------",
"# CREATE CONSOLE HANDLER",
"# ----------------------------",
"#",
"# Create console handler",
"consoleh",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"consoleh",
".",
"setLevel",
"(",
"logLevel",
")",
"consoleh",
".",
"setFormatter",
"(",
"formatter",
")",
"# Set our custom handler",
"logger",
".",
"addHandler",
"(",
"consoleh",
")",
"#",
"# ----------------------------",
"# CREATE FILE HANDLER",
"# ----------------------------",
"#",
"fileh",
"=",
"logging",
".",
"FileHandler",
"(",
"logFile",
",",
"'a'",
")",
"fileh",
".",
"setLevel",
"(",
"logLevel",
")",
"fileh",
".",
"setFormatter",
"(",
"formatter",
")",
"# Set our custom handler",
"logger",
".",
"addHandler",
"(",
"fileh",
")"
] |
Start the logger instance and configure it
|
[
"Start",
"the",
"logger",
"instance",
"and",
"configure",
"it"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/examples/chat/server.py#L111-L153
|
239,988
|
Deisss/python-sockjsroom
|
examples/chat/server.py
|
printWelcomeMessage
|
def printWelcomeMessage(msg, place=10):
''' Print any welcome message '''
logging.debug('*' * 30)
welcome = ' ' * place
welcome+= msg
logging.debug(welcome)
logging.debug('*' * 30 + '\n')
|
python
|
def printWelcomeMessage(msg, place=10):
''' Print any welcome message '''
logging.debug('*' * 30)
welcome = ' ' * place
welcome+= msg
logging.debug(welcome)
logging.debug('*' * 30 + '\n')
|
[
"def",
"printWelcomeMessage",
"(",
"msg",
",",
"place",
"=",
"10",
")",
":",
"logging",
".",
"debug",
"(",
"'*'",
"*",
"30",
")",
"welcome",
"=",
"' '",
"*",
"place",
"welcome",
"+=",
"msg",
"logging",
".",
"debug",
"(",
"welcome",
")",
"logging",
".",
"debug",
"(",
"'*'",
"*",
"30",
"+",
"'\\n'",
")"
] |
Print any welcome message
|
[
"Print",
"any",
"welcome",
"message"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/examples/chat/server.py#L155-L162
|
239,989
|
Deisss/python-sockjsroom
|
examples/chat/server.py
|
ChatSocketHandler.on_chat
|
def on_chat(self, data):
''' Transfert a message to everybody '''
# XXX: we cannot use on_message as it's 'official' one already used
# by sockjsroom to create multiple on_* elements (like on_chat),
# so we use on_chat instead of on_message
# data => message
if self.roomId != '-1':
self.publishToRoom(self.roomId, 'chat', {
'username': self.username,
'time': datetime.now(),
'message': str(data['message'])
})
|
python
|
def on_chat(self, data):
''' Transfert a message to everybody '''
# XXX: we cannot use on_message as it's 'official' one already used
# by sockjsroom to create multiple on_* elements (like on_chat),
# so we use on_chat instead of on_message
# data => message
if self.roomId != '-1':
self.publishToRoom(self.roomId, 'chat', {
'username': self.username,
'time': datetime.now(),
'message': str(data['message'])
})
|
[
"def",
"on_chat",
"(",
"self",
",",
"data",
")",
":",
"# XXX: we cannot use on_message as it's 'official' one already used",
"# by sockjsroom to create multiple on_* elements (like on_chat),",
"# so we use on_chat instead of on_message",
"# data => message",
"if",
"self",
".",
"roomId",
"!=",
"'-1'",
":",
"self",
".",
"publishToRoom",
"(",
"self",
".",
"roomId",
",",
"'chat'",
",",
"{",
"'username'",
":",
"self",
".",
"username",
",",
"'time'",
":",
"datetime",
".",
"now",
"(",
")",
",",
"'message'",
":",
"str",
"(",
"data",
"[",
"'message'",
"]",
")",
"}",
")"
] |
Transfert a message to everybody
|
[
"Transfert",
"a",
"message",
"to",
"everybody"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/examples/chat/server.py#L61-L73
|
239,990
|
Deisss/python-sockjsroom
|
examples/chat/server.py
|
ChatSocketHandler.on_leave
|
def on_leave(self):
''' Quit chat room '''
# Only if user has time to call self.initialize
# (sometimes it's not the case)
if self.roomId != '-1':
# Debug
logging.debug('chat: leave room (roomId: %s)' % self.roomId)
# Say to other users the current user leave room
self.publishToOther(self.roomId, 'leave', {
'username': self.username
})
# Remove sockjsroom link to this room
self.leave(self.roomId)
# Erasing data
self.initialize()
|
python
|
def on_leave(self):
''' Quit chat room '''
# Only if user has time to call self.initialize
# (sometimes it's not the case)
if self.roomId != '-1':
# Debug
logging.debug('chat: leave room (roomId: %s)' % self.roomId)
# Say to other users the current user leave room
self.publishToOther(self.roomId, 'leave', {
'username': self.username
})
# Remove sockjsroom link to this room
self.leave(self.roomId)
# Erasing data
self.initialize()
|
[
"def",
"on_leave",
"(",
"self",
")",
":",
"# Only if user has time to call self.initialize",
"# (sometimes it's not the case)",
"if",
"self",
".",
"roomId",
"!=",
"'-1'",
":",
"# Debug",
"logging",
".",
"debug",
"(",
"'chat: leave room (roomId: %s)'",
"%",
"self",
".",
"roomId",
")",
"# Say to other users the current user leave room",
"self",
".",
"publishToOther",
"(",
"self",
".",
"roomId",
",",
"'leave'",
",",
"{",
"'username'",
":",
"self",
".",
"username",
"}",
")",
"# Remove sockjsroom link to this room",
"self",
".",
"leave",
"(",
"self",
".",
"roomId",
")",
"# Erasing data",
"self",
".",
"initialize",
"(",
")"
] |
Quit chat room
|
[
"Quit",
"chat",
"room"
] |
7c20187571d39e7fede848dc98f954235ca77241
|
https://github.com/Deisss/python-sockjsroom/blob/7c20187571d39e7fede848dc98f954235ca77241/examples/chat/server.py#L76-L93
|
239,991
|
klmitch/tendril
|
tendril/udp.py
|
UDPTendrilManager.connect
|
def connect(self, target, acceptor):
"""
Initiate a connection from the tendril manager's endpoint.
Once the connection is completed, a UDPTendril object will be
created and passed to the given acceptor.
:param target: The target of the connection attempt.
:param acceptor: A callable which will initialize the state of
the new UDPTendril object.
"""
# Call some common sanity-checks
super(UDPTendrilManager, self).connect(target, acceptor, None)
# Construct the Tendril
tend = UDPTendril(self, self.local_addr, target)
try:
# Set up the application
tend.application = acceptor(tend)
except application.RejectConnection:
# The acceptor raised a RejectConnection
return None
# OK, let's track the tendril
self._track_tendril(tend)
# Might as well return the tendril, too
return tend
|
python
|
def connect(self, target, acceptor):
"""
Initiate a connection from the tendril manager's endpoint.
Once the connection is completed, a UDPTendril object will be
created and passed to the given acceptor.
:param target: The target of the connection attempt.
:param acceptor: A callable which will initialize the state of
the new UDPTendril object.
"""
# Call some common sanity-checks
super(UDPTendrilManager, self).connect(target, acceptor, None)
# Construct the Tendril
tend = UDPTendril(self, self.local_addr, target)
try:
# Set up the application
tend.application = acceptor(tend)
except application.RejectConnection:
# The acceptor raised a RejectConnection
return None
# OK, let's track the tendril
self._track_tendril(tend)
# Might as well return the tendril, too
return tend
|
[
"def",
"connect",
"(",
"self",
",",
"target",
",",
"acceptor",
")",
":",
"# Call some common sanity-checks",
"super",
"(",
"UDPTendrilManager",
",",
"self",
")",
".",
"connect",
"(",
"target",
",",
"acceptor",
",",
"None",
")",
"# Construct the Tendril",
"tend",
"=",
"UDPTendril",
"(",
"self",
",",
"self",
".",
"local_addr",
",",
"target",
")",
"try",
":",
"# Set up the application",
"tend",
".",
"application",
"=",
"acceptor",
"(",
"tend",
")",
"except",
"application",
".",
"RejectConnection",
":",
"# The acceptor raised a RejectConnection",
"return",
"None",
"# OK, let's track the tendril",
"self",
".",
"_track_tendril",
"(",
"tend",
")",
"# Might as well return the tendril, too",
"return",
"tend"
] |
Initiate a connection from the tendril manager's endpoint.
Once the connection is completed, a UDPTendril object will be
created and passed to the given acceptor.
:param target: The target of the connection attempt.
:param acceptor: A callable which will initialize the state of
the new UDPTendril object.
|
[
"Initiate",
"a",
"connection",
"from",
"the",
"tendril",
"manager",
"s",
"endpoint",
".",
"Once",
"the",
"connection",
"is",
"completed",
"a",
"UDPTendril",
"object",
"will",
"be",
"created",
"and",
"passed",
"to",
"the",
"given",
"acceptor",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/udp.py#L157-L185
|
239,992
|
klmitch/tendril
|
tendril/udp.py
|
UDPTendrilManager.listener
|
def listener(self, acceptor, wrapper):
"""
Listens for new connections to the manager's endpoint. Once a
new connection is received, a UDPTendril object is generated
for it and it is passed to the acceptor, which must initialize
the state of the connection. If no acceptor is given, no new
connections can be initialized.
:param acceptor: If given, specifies a callable that will be
called with each newly received UDPTendril;
that callable is responsible for initial
acceptance of the connection and for setting
up the initial state of the connection. If
not given, no new connections will be
accepted by the UDPTendrilManager.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
"""
# OK, set up the socket
sock = socket.socket(self.addr_family, socket.SOCK_DGRAM)
with utils.SocketCloser(sock):
# Bind to our endpoint
sock.bind(self.endpoint)
# Get the assigned port number
self.local_addr = sock.getsockname()
# Call any wrappers
if wrapper:
sock = wrapper(sock)
# Senders need the socket, too...
self._sock = sock
self._sock_event.set()
# OK, now go into the listening loop with an error threshold
# of 10
closer = utils.SocketCloser(sock, 10,
ignore=[application.RejectConnection])
while True:
with closer:
data, addr = sock.recvfrom(self.recv_bufsize)
# Look up the tendril or create a new one
try:
tend = self[(self.local_addr, addr)]
except KeyError:
if not acceptor:
# Can't accept new connections
continue
# Construct a Tendril
tend = UDPTendril(self, self.local_addr, addr)
# Set up the application
tend.application = acceptor(tend)
# OK, let's track the tendril
self._track_tendril(tend)
# We now have a tendril; process the received data
try:
tend._recv_frameify(data)
except Exception as exc:
# Close the Tendril
tend.close()
# Notify the application what happened
tend.closed(exc)
|
python
|
def listener(self, acceptor, wrapper):
"""
Listens for new connections to the manager's endpoint. Once a
new connection is received, a UDPTendril object is generated
for it and it is passed to the acceptor, which must initialize
the state of the connection. If no acceptor is given, no new
connections can be initialized.
:param acceptor: If given, specifies a callable that will be
called with each newly received UDPTendril;
that callable is responsible for initial
acceptance of the connection and for setting
up the initial state of the connection. If
not given, no new connections will be
accepted by the UDPTendrilManager.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
"""
# OK, set up the socket
sock = socket.socket(self.addr_family, socket.SOCK_DGRAM)
with utils.SocketCloser(sock):
# Bind to our endpoint
sock.bind(self.endpoint)
# Get the assigned port number
self.local_addr = sock.getsockname()
# Call any wrappers
if wrapper:
sock = wrapper(sock)
# Senders need the socket, too...
self._sock = sock
self._sock_event.set()
# OK, now go into the listening loop with an error threshold
# of 10
closer = utils.SocketCloser(sock, 10,
ignore=[application.RejectConnection])
while True:
with closer:
data, addr = sock.recvfrom(self.recv_bufsize)
# Look up the tendril or create a new one
try:
tend = self[(self.local_addr, addr)]
except KeyError:
if not acceptor:
# Can't accept new connections
continue
# Construct a Tendril
tend = UDPTendril(self, self.local_addr, addr)
# Set up the application
tend.application = acceptor(tend)
# OK, let's track the tendril
self._track_tendril(tend)
# We now have a tendril; process the received data
try:
tend._recv_frameify(data)
except Exception as exc:
# Close the Tendril
tend.close()
# Notify the application what happened
tend.closed(exc)
|
[
"def",
"listener",
"(",
"self",
",",
"acceptor",
",",
"wrapper",
")",
":",
"# OK, set up the socket",
"sock",
"=",
"socket",
".",
"socket",
"(",
"self",
".",
"addr_family",
",",
"socket",
".",
"SOCK_DGRAM",
")",
"with",
"utils",
".",
"SocketCloser",
"(",
"sock",
")",
":",
"# Bind to our endpoint",
"sock",
".",
"bind",
"(",
"self",
".",
"endpoint",
")",
"# Get the assigned port number",
"self",
".",
"local_addr",
"=",
"sock",
".",
"getsockname",
"(",
")",
"# Call any wrappers",
"if",
"wrapper",
":",
"sock",
"=",
"wrapper",
"(",
"sock",
")",
"# Senders need the socket, too...",
"self",
".",
"_sock",
"=",
"sock",
"self",
".",
"_sock_event",
".",
"set",
"(",
")",
"# OK, now go into the listening loop with an error threshold",
"# of 10",
"closer",
"=",
"utils",
".",
"SocketCloser",
"(",
"sock",
",",
"10",
",",
"ignore",
"=",
"[",
"application",
".",
"RejectConnection",
"]",
")",
"while",
"True",
":",
"with",
"closer",
":",
"data",
",",
"addr",
"=",
"sock",
".",
"recvfrom",
"(",
"self",
".",
"recv_bufsize",
")",
"# Look up the tendril or create a new one",
"try",
":",
"tend",
"=",
"self",
"[",
"(",
"self",
".",
"local_addr",
",",
"addr",
")",
"]",
"except",
"KeyError",
":",
"if",
"not",
"acceptor",
":",
"# Can't accept new connections",
"continue",
"# Construct a Tendril",
"tend",
"=",
"UDPTendril",
"(",
"self",
",",
"self",
".",
"local_addr",
",",
"addr",
")",
"# Set up the application",
"tend",
".",
"application",
"=",
"acceptor",
"(",
"tend",
")",
"# OK, let's track the tendril",
"self",
".",
"_track_tendril",
"(",
"tend",
")",
"# We now have a tendril; process the received data",
"try",
":",
"tend",
".",
"_recv_frameify",
"(",
"data",
")",
"except",
"Exception",
"as",
"exc",
":",
"# Close the Tendril",
"tend",
".",
"close",
"(",
")",
"# Notify the application what happened",
"tend",
".",
"closed",
"(",
"exc",
")"
] |
Listens for new connections to the manager's endpoint. Once a
new connection is received, a UDPTendril object is generated
for it and it is passed to the acceptor, which must initialize
the state of the connection. If no acceptor is given, no new
connections can be initialized.
:param acceptor: If given, specifies a callable that will be
called with each newly received UDPTendril;
that callable is responsible for initial
acceptance of the connection and for setting
up the initial state of the connection. If
not given, no new connections will be
accepted by the UDPTendrilManager.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
|
[
"Listens",
"for",
"new",
"connections",
"to",
"the",
"manager",
"s",
"endpoint",
".",
"Once",
"a",
"new",
"connection",
"is",
"received",
"a",
"UDPTendril",
"object",
"is",
"generated",
"for",
"it",
"and",
"it",
"is",
"passed",
"to",
"the",
"acceptor",
"which",
"must",
"initialize",
"the",
"state",
"of",
"the",
"connection",
".",
"If",
"no",
"acceptor",
"is",
"given",
"no",
"new",
"connections",
"can",
"be",
"initialized",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/udp.py#L187-L260
|
239,993
|
fr33jc/bang
|
bang/providers/hpcloud/load_balancer.py
|
HPLoadBalancer.add_lb_nodes
|
def add_lb_nodes(self, lb_id, nodes):
"""
Adds nodes to an existing LBaaS instance
:param string lb_id: Balancer id
:param list nodes: Nodes to add. {address, port, [condition]}
:rtype :class:`list`
"""
log.info("Adding load balancer nodes %s" % nodes)
resp, body = self._request(
'post',
'/loadbalancers/%s/nodes' % lb_id,
data={'nodes': nodes})
return body
|
python
|
def add_lb_nodes(self, lb_id, nodes):
"""
Adds nodes to an existing LBaaS instance
:param string lb_id: Balancer id
:param list nodes: Nodes to add. {address, port, [condition]}
:rtype :class:`list`
"""
log.info("Adding load balancer nodes %s" % nodes)
resp, body = self._request(
'post',
'/loadbalancers/%s/nodes' % lb_id,
data={'nodes': nodes})
return body
|
[
"def",
"add_lb_nodes",
"(",
"self",
",",
"lb_id",
",",
"nodes",
")",
":",
"log",
".",
"info",
"(",
"\"Adding load balancer nodes %s\"",
"%",
"nodes",
")",
"resp",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'post'",
",",
"'/loadbalancers/%s/nodes'",
"%",
"lb_id",
",",
"data",
"=",
"{",
"'nodes'",
":",
"nodes",
"}",
")",
"return",
"body"
] |
Adds nodes to an existing LBaaS instance
:param string lb_id: Balancer id
:param list nodes: Nodes to add. {address, port, [condition]}
:rtype :class:`list`
|
[
"Adds",
"nodes",
"to",
"an",
"existing",
"LBaaS",
"instance"
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/providers/hpcloud/load_balancer.py#L126-L141
|
239,994
|
fr33jc/bang
|
bang/providers/hpcloud/load_balancer.py
|
HPLoadBalancer.match_lb_nodes
|
def match_lb_nodes(self, lb_id, existing_nodes, host_addresses, host_port):
"""
Add and remove nodes to match the host addresses
and port given, based on existing_nodes. HPCS doesn't
allow a load balancer with no backends, so we'll add
first, delete after.
:param string lb_id: Load balancer id
:param :class:`list` of :class:`dict` existing_nodes: Existing nodes
:param :class:`list` host_addresses: Node host addresses
:param string port: Node port
"""
delete_filter = lambda n: \
n['address'] not in host_addresses or \
str(n['port']) != str(host_port)
delete_nodes = filter(delete_filter, existing_nodes)
delete_node_ids = [n['id'] for n in delete_nodes]
delete_node_hosts = [n['address'] for n in delete_nodes]
current_nodes = set([n['address'] for n in existing_nodes])
current_nodes -= set(delete_node_hosts)
add_nodes = host_addresses - current_nodes
if add_nodes:
nodes_to_add = [
{'address': n, 'port': str(host_port)}
for n in add_nodes
]
args = (lb_id, nodes_to_add)
self.add_lb_nodes(*args)
if delete_node_ids:
args = (lb_id, delete_node_ids)
self.remove_lb_nodes(*args)
log.info("Were %d nodes. Added %d nodes; deleted %d nodes" %
(len(existing_nodes), len(add_nodes), len(delete_nodes)))
|
python
|
def match_lb_nodes(self, lb_id, existing_nodes, host_addresses, host_port):
"""
Add and remove nodes to match the host addresses
and port given, based on existing_nodes. HPCS doesn't
allow a load balancer with no backends, so we'll add
first, delete after.
:param string lb_id: Load balancer id
:param :class:`list` of :class:`dict` existing_nodes: Existing nodes
:param :class:`list` host_addresses: Node host addresses
:param string port: Node port
"""
delete_filter = lambda n: \
n['address'] not in host_addresses or \
str(n['port']) != str(host_port)
delete_nodes = filter(delete_filter, existing_nodes)
delete_node_ids = [n['id'] for n in delete_nodes]
delete_node_hosts = [n['address'] for n in delete_nodes]
current_nodes = set([n['address'] for n in existing_nodes])
current_nodes -= set(delete_node_hosts)
add_nodes = host_addresses - current_nodes
if add_nodes:
nodes_to_add = [
{'address': n, 'port': str(host_port)}
for n in add_nodes
]
args = (lb_id, nodes_to_add)
self.add_lb_nodes(*args)
if delete_node_ids:
args = (lb_id, delete_node_ids)
self.remove_lb_nodes(*args)
log.info("Were %d nodes. Added %d nodes; deleted %d nodes" %
(len(existing_nodes), len(add_nodes), len(delete_nodes)))
|
[
"def",
"match_lb_nodes",
"(",
"self",
",",
"lb_id",
",",
"existing_nodes",
",",
"host_addresses",
",",
"host_port",
")",
":",
"delete_filter",
"=",
"lambda",
"n",
":",
"n",
"[",
"'address'",
"]",
"not",
"in",
"host_addresses",
"or",
"str",
"(",
"n",
"[",
"'port'",
"]",
")",
"!=",
"str",
"(",
"host_port",
")",
"delete_nodes",
"=",
"filter",
"(",
"delete_filter",
",",
"existing_nodes",
")",
"delete_node_ids",
"=",
"[",
"n",
"[",
"'id'",
"]",
"for",
"n",
"in",
"delete_nodes",
"]",
"delete_node_hosts",
"=",
"[",
"n",
"[",
"'address'",
"]",
"for",
"n",
"in",
"delete_nodes",
"]",
"current_nodes",
"=",
"set",
"(",
"[",
"n",
"[",
"'address'",
"]",
"for",
"n",
"in",
"existing_nodes",
"]",
")",
"current_nodes",
"-=",
"set",
"(",
"delete_node_hosts",
")",
"add_nodes",
"=",
"host_addresses",
"-",
"current_nodes",
"if",
"add_nodes",
":",
"nodes_to_add",
"=",
"[",
"{",
"'address'",
":",
"n",
",",
"'port'",
":",
"str",
"(",
"host_port",
")",
"}",
"for",
"n",
"in",
"add_nodes",
"]",
"args",
"=",
"(",
"lb_id",
",",
"nodes_to_add",
")",
"self",
".",
"add_lb_nodes",
"(",
"*",
"args",
")",
"if",
"delete_node_ids",
":",
"args",
"=",
"(",
"lb_id",
",",
"delete_node_ids",
")",
"self",
".",
"remove_lb_nodes",
"(",
"*",
"args",
")",
"log",
".",
"info",
"(",
"\"Were %d nodes. Added %d nodes; deleted %d nodes\"",
"%",
"(",
"len",
"(",
"existing_nodes",
")",
",",
"len",
"(",
"add_nodes",
")",
",",
"len",
"(",
"delete_nodes",
")",
")",
")"
] |
Add and remove nodes to match the host addresses
and port given, based on existing_nodes. HPCS doesn't
allow a load balancer with no backends, so we'll add
first, delete after.
:param string lb_id: Load balancer id
:param :class:`list` of :class:`dict` existing_nodes: Existing nodes
:param :class:`list` host_addresses: Node host addresses
:param string port: Node port
|
[
"Add",
"and",
"remove",
"nodes",
"to",
"match",
"the",
"host",
"addresses",
"and",
"port",
"given",
"based",
"on",
"existing_nodes",
".",
"HPCS",
"doesn",
"t",
"allow",
"a",
"load",
"balancer",
"with",
"no",
"backends",
"so",
"we",
"ll",
"add",
"first",
"delete",
"after",
"."
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/providers/hpcloud/load_balancer.py#L143-L183
|
239,995
|
fr33jc/bang
|
bang/providers/hpcloud/load_balancer.py
|
HPLoadBalancer.remove_lb_nodes
|
def remove_lb_nodes(self, lb_id, node_ids):
"""
Remove one or more nodes
:param string lb_id: Balancer id
:param list node_ids: List of node ids
"""
log.info("Removing load balancer nodes %s" % node_ids)
for node_id in node_ids:
self._request('delete', '/loadbalancers/%s/nodes/%s' % (lb_id, node_id))
|
python
|
def remove_lb_nodes(self, lb_id, node_ids):
"""
Remove one or more nodes
:param string lb_id: Balancer id
:param list node_ids: List of node ids
"""
log.info("Removing load balancer nodes %s" % node_ids)
for node_id in node_ids:
self._request('delete', '/loadbalancers/%s/nodes/%s' % (lb_id, node_id))
|
[
"def",
"remove_lb_nodes",
"(",
"self",
",",
"lb_id",
",",
"node_ids",
")",
":",
"log",
".",
"info",
"(",
"\"Removing load balancer nodes %s\"",
"%",
"node_ids",
")",
"for",
"node_id",
"in",
"node_ids",
":",
"self",
".",
"_request",
"(",
"'delete'",
",",
"'/loadbalancers/%s/nodes/%s'",
"%",
"(",
"lb_id",
",",
"node_id",
")",
")"
] |
Remove one or more nodes
:param string lb_id: Balancer id
:param list node_ids: List of node ids
|
[
"Remove",
"one",
"or",
"more",
"nodes"
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/providers/hpcloud/load_balancer.py#L185-L195
|
239,996
|
estilen/simplebrowser
|
simplebrowser/browser.py
|
SimpleBrowser.soup
|
def soup(self, *args, **kwargs):
"""Parse the currently loaded website.
Optionally, SoupStrainer can be used to only parse relevant
parts of the page. This can be particularly useful if the website is
complex or perfomance is a factor.
<https://www.crummy.com/software/BeautifulSoup/bs4/doc/#soupstrainer>
Args:
*args: Optional positional arguments that `SoupStrainer` takes.
**kwargs: Optional keyword argument that `SoupStrainer` takes.
Returns:
A `BeautifulSoup` object.
Raises:
NoWebsiteLoadedError: If no website is currently loaded.
ParsingError: If the current response isn't supported by `bs4`
"""
if self._url is None:
raise NoWebsiteLoadedError('website parsing requires a loaded website')
content_type = self._response.headers.get('Content-Type', '')
if not any(markup in content_type for markup in ('html', 'xml')):
raise ParsingError('unsupported content type \'{}\''.format(content_type))
strainer = SoupStrainer(*args, **kwargs)
return BeautifulSoup(self._response.content, self.parser, parse_only=strainer)
|
python
|
def soup(self, *args, **kwargs):
"""Parse the currently loaded website.
Optionally, SoupStrainer can be used to only parse relevant
parts of the page. This can be particularly useful if the website is
complex or perfomance is a factor.
<https://www.crummy.com/software/BeautifulSoup/bs4/doc/#soupstrainer>
Args:
*args: Optional positional arguments that `SoupStrainer` takes.
**kwargs: Optional keyword argument that `SoupStrainer` takes.
Returns:
A `BeautifulSoup` object.
Raises:
NoWebsiteLoadedError: If no website is currently loaded.
ParsingError: If the current response isn't supported by `bs4`
"""
if self._url is None:
raise NoWebsiteLoadedError('website parsing requires a loaded website')
content_type = self._response.headers.get('Content-Type', '')
if not any(markup in content_type for markup in ('html', 'xml')):
raise ParsingError('unsupported content type \'{}\''.format(content_type))
strainer = SoupStrainer(*args, **kwargs)
return BeautifulSoup(self._response.content, self.parser, parse_only=strainer)
|
[
"def",
"soup",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"_url",
"is",
"None",
":",
"raise",
"NoWebsiteLoadedError",
"(",
"'website parsing requires a loaded website'",
")",
"content_type",
"=",
"self",
".",
"_response",
".",
"headers",
".",
"get",
"(",
"'Content-Type'",
",",
"''",
")",
"if",
"not",
"any",
"(",
"markup",
"in",
"content_type",
"for",
"markup",
"in",
"(",
"'html'",
",",
"'xml'",
")",
")",
":",
"raise",
"ParsingError",
"(",
"'unsupported content type \\'{}\\''",
".",
"format",
"(",
"content_type",
")",
")",
"strainer",
"=",
"SoupStrainer",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"BeautifulSoup",
"(",
"self",
".",
"_response",
".",
"content",
",",
"self",
".",
"parser",
",",
"parse_only",
"=",
"strainer",
")"
] |
Parse the currently loaded website.
Optionally, SoupStrainer can be used to only parse relevant
parts of the page. This can be particularly useful if the website is
complex or perfomance is a factor.
<https://www.crummy.com/software/BeautifulSoup/bs4/doc/#soupstrainer>
Args:
*args: Optional positional arguments that `SoupStrainer` takes.
**kwargs: Optional keyword argument that `SoupStrainer` takes.
Returns:
A `BeautifulSoup` object.
Raises:
NoWebsiteLoadedError: If no website is currently loaded.
ParsingError: If the current response isn't supported by `bs4`
|
[
"Parse",
"the",
"currently",
"loaded",
"website",
"."
] |
76c1c0d770f8a209a7a32fa65ed46f9d6e60f91b
|
https://github.com/estilen/simplebrowser/blob/76c1c0d770f8a209a7a32fa65ed46f9d6e60f91b/simplebrowser/browser.py#L49-L76
|
239,997
|
estilen/simplebrowser
|
simplebrowser/browser.py
|
SimpleBrowser.get
|
def get(self, url, **kwargs):
"""Send a GET request to the specified URL.
Method directly wraps around `Session.get` and updates browser
attributes.
<http://docs.python-requests.org/en/master/api/#requests.get>
Args:
url: URL for the new `Request` object.
**kwargs: Optional arguments that `Request` takes.
Returns:
`Response` object of a successful request.
"""
response = self.session.get(url, **kwargs)
self._url = response.url
self._response = response
return response
|
python
|
def get(self, url, **kwargs):
"""Send a GET request to the specified URL.
Method directly wraps around `Session.get` and updates browser
attributes.
<http://docs.python-requests.org/en/master/api/#requests.get>
Args:
url: URL for the new `Request` object.
**kwargs: Optional arguments that `Request` takes.
Returns:
`Response` object of a successful request.
"""
response = self.session.get(url, **kwargs)
self._url = response.url
self._response = response
return response
|
[
"def",
"get",
"(",
"self",
",",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"response",
"=",
"self",
".",
"session",
".",
"get",
"(",
"url",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"_url",
"=",
"response",
".",
"url",
"self",
".",
"_response",
"=",
"response",
"return",
"response"
] |
Send a GET request to the specified URL.
Method directly wraps around `Session.get` and updates browser
attributes.
<http://docs.python-requests.org/en/master/api/#requests.get>
Args:
url: URL for the new `Request` object.
**kwargs: Optional arguments that `Request` takes.
Returns:
`Response` object of a successful request.
|
[
"Send",
"a",
"GET",
"request",
"to",
"the",
"specified",
"URL",
"."
] |
76c1c0d770f8a209a7a32fa65ed46f9d6e60f91b
|
https://github.com/estilen/simplebrowser/blob/76c1c0d770f8a209a7a32fa65ed46f9d6e60f91b/simplebrowser/browser.py#L78-L95
|
239,998
|
estilen/simplebrowser
|
simplebrowser/browser.py
|
SimpleBrowser.post
|
def post(self, **kwargs):
"""Send a POST request to the currently loaded website's URL.
The browser will automatically fill out the form. If `data` dict has
been passed into ``kwargs``, the contained input values will override
the automatically filled out values.
Returns:
`Response` object of a successful request.
Raises:
NoWebsiteLoadedError: If no website is currently loaded.
"""
if self._url is None:
raise NoWebsiteLoadedError('request submission requires a loaded website')
data = kwargs.get('data', {})
for i in self.soup('form').select('input[name]'):
if i.get('name') not in data:
data[i.get('name')] = i.get('value', '')
kwargs['data'] = data
response = self.session.post(self._url, **kwargs)
self._url = response.url
self._response = response
return response
|
python
|
def post(self, **kwargs):
"""Send a POST request to the currently loaded website's URL.
The browser will automatically fill out the form. If `data` dict has
been passed into ``kwargs``, the contained input values will override
the automatically filled out values.
Returns:
`Response` object of a successful request.
Raises:
NoWebsiteLoadedError: If no website is currently loaded.
"""
if self._url is None:
raise NoWebsiteLoadedError('request submission requires a loaded website')
data = kwargs.get('data', {})
for i in self.soup('form').select('input[name]'):
if i.get('name') not in data:
data[i.get('name')] = i.get('value', '')
kwargs['data'] = data
response = self.session.post(self._url, **kwargs)
self._url = response.url
self._response = response
return response
|
[
"def",
"post",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"_url",
"is",
"None",
":",
"raise",
"NoWebsiteLoadedError",
"(",
"'request submission requires a loaded website'",
")",
"data",
"=",
"kwargs",
".",
"get",
"(",
"'data'",
",",
"{",
"}",
")",
"for",
"i",
"in",
"self",
".",
"soup",
"(",
"'form'",
")",
".",
"select",
"(",
"'input[name]'",
")",
":",
"if",
"i",
".",
"get",
"(",
"'name'",
")",
"not",
"in",
"data",
":",
"data",
"[",
"i",
".",
"get",
"(",
"'name'",
")",
"]",
"=",
"i",
".",
"get",
"(",
"'value'",
",",
"''",
")",
"kwargs",
"[",
"'data'",
"]",
"=",
"data",
"response",
"=",
"self",
".",
"session",
".",
"post",
"(",
"self",
".",
"_url",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"_url",
"=",
"response",
".",
"url",
"self",
".",
"_response",
"=",
"response",
"return",
"response"
] |
Send a POST request to the currently loaded website's URL.
The browser will automatically fill out the form. If `data` dict has
been passed into ``kwargs``, the contained input values will override
the automatically filled out values.
Returns:
`Response` object of a successful request.
Raises:
NoWebsiteLoadedError: If no website is currently loaded.
|
[
"Send",
"a",
"POST",
"request",
"to",
"the",
"currently",
"loaded",
"website",
"s",
"URL",
"."
] |
76c1c0d770f8a209a7a32fa65ed46f9d6e60f91b
|
https://github.com/estilen/simplebrowser/blob/76c1c0d770f8a209a7a32fa65ed46f9d6e60f91b/simplebrowser/browser.py#L97-L122
|
239,999
|
tBaxter/tango-shared-core
|
build/lib/tango_shared/views.py
|
build_howto
|
def build_howto(request=None):
"""
Searches for "how_to.md" files in app directories.
Creates user-friendly admin how-to section from apps that have them.
"""
how_tos = {}
for app in settings.INSTALLED_APPS:
mod = import_module(app)
app_dir = os.path.dirname(mod.__file__)
how_to_file = os.path.join(app_dir, 'how_to.md')
if os.path.exists(how_to_file):
contents = open(how_to_file).read()
how_tos[app] = markdown.markdown(contents)
return render(request, 'admin/how-to/index.html', {'how_tos': how_tos})
|
python
|
def build_howto(request=None):
"""
Searches for "how_to.md" files in app directories.
Creates user-friendly admin how-to section from apps that have them.
"""
how_tos = {}
for app in settings.INSTALLED_APPS:
mod = import_module(app)
app_dir = os.path.dirname(mod.__file__)
how_to_file = os.path.join(app_dir, 'how_to.md')
if os.path.exists(how_to_file):
contents = open(how_to_file).read()
how_tos[app] = markdown.markdown(contents)
return render(request, 'admin/how-to/index.html', {'how_tos': how_tos})
|
[
"def",
"build_howto",
"(",
"request",
"=",
"None",
")",
":",
"how_tos",
"=",
"{",
"}",
"for",
"app",
"in",
"settings",
".",
"INSTALLED_APPS",
":",
"mod",
"=",
"import_module",
"(",
"app",
")",
"app_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"mod",
".",
"__file__",
")",
"how_to_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app_dir",
",",
"'how_to.md'",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"how_to_file",
")",
":",
"contents",
"=",
"open",
"(",
"how_to_file",
")",
".",
"read",
"(",
")",
"how_tos",
"[",
"app",
"]",
"=",
"markdown",
".",
"markdown",
"(",
"contents",
")",
"return",
"render",
"(",
"request",
",",
"'admin/how-to/index.html'",
",",
"{",
"'how_tos'",
":",
"how_tos",
"}",
")"
] |
Searches for "how_to.md" files in app directories.
Creates user-friendly admin how-to section from apps that have them.
|
[
"Searches",
"for",
"how_to",
".",
"md",
"files",
"in",
"app",
"directories",
".",
"Creates",
"user",
"-",
"friendly",
"admin",
"how",
"-",
"to",
"section",
"from",
"apps",
"that",
"have",
"them",
"."
] |
35fc10aef1ceedcdb4d6d866d44a22efff718812
|
https://github.com/tBaxter/tango-shared-core/blob/35fc10aef1ceedcdb4d6d866d44a22efff718812/build/lib/tango_shared/views.py#L41-L57
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.