id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
240,800
|
mayfield/shellish
|
shellish/command/command.py
|
Command.get_commands_from
|
def get_commands_from(self, args):
""" We have to code the key names for each depth. This method scans
for each level and returns a list of the command arguments. """
commands = []
for i in itertools.count(0):
try:
commands.append(getattr(args, self.arg_label_fmt % i))
except AttributeError:
break
return commands
|
python
|
def get_commands_from(self, args):
""" We have to code the key names for each depth. This method scans
for each level and returns a list of the command arguments. """
commands = []
for i in itertools.count(0):
try:
commands.append(getattr(args, self.arg_label_fmt % i))
except AttributeError:
break
return commands
|
[
"def",
"get_commands_from",
"(",
"self",
",",
"args",
")",
":",
"commands",
"=",
"[",
"]",
"for",
"i",
"in",
"itertools",
".",
"count",
"(",
"0",
")",
":",
"try",
":",
"commands",
".",
"append",
"(",
"getattr",
"(",
"args",
",",
"self",
".",
"arg_label_fmt",
"%",
"i",
")",
")",
"except",
"AttributeError",
":",
"break",
"return",
"commands"
] |
We have to code the key names for each depth. This method scans
for each level and returns a list of the command arguments.
|
[
"We",
"have",
"to",
"code",
"the",
"key",
"names",
"for",
"each",
"depth",
".",
"This",
"method",
"scans",
"for",
"each",
"level",
"and",
"returns",
"a",
"list",
"of",
"the",
"command",
"arguments",
"."
] |
df0f0e4612d138c34d8cb99b66ab5b8e47f1414a
|
https://github.com/mayfield/shellish/blob/df0f0e4612d138c34d8cb99b66ab5b8e47f1414a/shellish/command/command.py#L458-L467
|
240,801
|
Met48/inibin
|
inibin/util.py
|
_take_bits
|
def _take_bits(buf, count):
"""Return the booleans that were packed into bytes."""
# TODO: Verify output
bytes_count = (count + 7) // 8
bytes_mod = count % 8
data = _unpack_from(buf, 'B', bytes_count)
values = []
for i, byte in enumerate(data):
for _ in range(8 if i != bytes_count - 1 else bytes_mod):
# TODO: Convert to True / False
values.append(byte & 0b10000000)
byte <<= 1
return values
|
python
|
def _take_bits(buf, count):
"""Return the booleans that were packed into bytes."""
# TODO: Verify output
bytes_count = (count + 7) // 8
bytes_mod = count % 8
data = _unpack_from(buf, 'B', bytes_count)
values = []
for i, byte in enumerate(data):
for _ in range(8 if i != bytes_count - 1 else bytes_mod):
# TODO: Convert to True / False
values.append(byte & 0b10000000)
byte <<= 1
return values
|
[
"def",
"_take_bits",
"(",
"buf",
",",
"count",
")",
":",
"# TODO: Verify output",
"bytes_count",
"=",
"(",
"count",
"+",
"7",
")",
"//",
"8",
"bytes_mod",
"=",
"count",
"%",
"8",
"data",
"=",
"_unpack_from",
"(",
"buf",
",",
"'B'",
",",
"bytes_count",
")",
"values",
"=",
"[",
"]",
"for",
"i",
",",
"byte",
"in",
"enumerate",
"(",
"data",
")",
":",
"for",
"_",
"in",
"range",
"(",
"8",
"if",
"i",
"!=",
"bytes_count",
"-",
"1",
"else",
"bytes_mod",
")",
":",
"# TODO: Convert to True / False",
"values",
".",
"append",
"(",
"byte",
"&",
"0b10000000",
")",
"byte",
"<<=",
"1",
"return",
"values"
] |
Return the booleans that were packed into bytes.
|
[
"Return",
"the",
"booleans",
"that",
"were",
"packed",
"into",
"bytes",
"."
] |
6abfeb22cfc6750fc95abe395cf6bd024a04f394
|
https://github.com/Met48/inibin/blob/6abfeb22cfc6750fc95abe395cf6bd024a04f394/inibin/util.py#L3-L16
|
240,802
|
Met48/inibin
|
inibin/util.py
|
_fix_keys
|
def _fix_keys(key_mapping, inibin, string_table=None):
"""
Return a human-readable dictionary from the inibin.
Arguments:
key_mapping -- Dictionary used for conversion. Supports nesting. Every other
value should be a numeric inibin key, or a tuple of the key and a
function to apply to the result.
inibin -- The dictionary returned from reading an inibin.
string_table -- Used to translate strings. Any string with a key in
string_table will be replaced. Typically loaded from a fontconfig_*.txt.
"""
if string_table is None:
string_table = {}
def walk(node, out_node):
# Walk the nodes of the key mapping
for key, value in node.items():
if isinstance(value, dict):
if key not in out_node:
out_node[key] = {}
walk(value, out_node[key])
else:
# Can either be just the index, or the index plus a function to apply
func = None
if isinstance(value, tuple):
func = value[-1]
index = value[0]
else:
index = value
if index is None or index not in inibin:
out_node[key] = None
continue
val = inibin[index]
# Try numeric conversion
# Inibins often store numbers in strings
if isinstance(val, bytes):
try:
val = int(val)
except ValueError:
try:
val = float(val)
except ValueError:
val = val.decode('utf8')
# Check if value is a reference to a fontconfig key
if val in string_table:
val = string_table[val]
# Apply the function
if callable(func):
val = func(val)
out_node[key] = val
out = {}
walk(key_mapping, out)
return out
|
python
|
def _fix_keys(key_mapping, inibin, string_table=None):
"""
Return a human-readable dictionary from the inibin.
Arguments:
key_mapping -- Dictionary used for conversion. Supports nesting. Every other
value should be a numeric inibin key, or a tuple of the key and a
function to apply to the result.
inibin -- The dictionary returned from reading an inibin.
string_table -- Used to translate strings. Any string with a key in
string_table will be replaced. Typically loaded from a fontconfig_*.txt.
"""
if string_table is None:
string_table = {}
def walk(node, out_node):
# Walk the nodes of the key mapping
for key, value in node.items():
if isinstance(value, dict):
if key not in out_node:
out_node[key] = {}
walk(value, out_node[key])
else:
# Can either be just the index, or the index plus a function to apply
func = None
if isinstance(value, tuple):
func = value[-1]
index = value[0]
else:
index = value
if index is None or index not in inibin:
out_node[key] = None
continue
val = inibin[index]
# Try numeric conversion
# Inibins often store numbers in strings
if isinstance(val, bytes):
try:
val = int(val)
except ValueError:
try:
val = float(val)
except ValueError:
val = val.decode('utf8')
# Check if value is a reference to a fontconfig key
if val in string_table:
val = string_table[val]
# Apply the function
if callable(func):
val = func(val)
out_node[key] = val
out = {}
walk(key_mapping, out)
return out
|
[
"def",
"_fix_keys",
"(",
"key_mapping",
",",
"inibin",
",",
"string_table",
"=",
"None",
")",
":",
"if",
"string_table",
"is",
"None",
":",
"string_table",
"=",
"{",
"}",
"def",
"walk",
"(",
"node",
",",
"out_node",
")",
":",
"# Walk the nodes of the key mapping",
"for",
"key",
",",
"value",
"in",
"node",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"if",
"key",
"not",
"in",
"out_node",
":",
"out_node",
"[",
"key",
"]",
"=",
"{",
"}",
"walk",
"(",
"value",
",",
"out_node",
"[",
"key",
"]",
")",
"else",
":",
"# Can either be just the index, or the index plus a function to apply",
"func",
"=",
"None",
"if",
"isinstance",
"(",
"value",
",",
"tuple",
")",
":",
"func",
"=",
"value",
"[",
"-",
"1",
"]",
"index",
"=",
"value",
"[",
"0",
"]",
"else",
":",
"index",
"=",
"value",
"if",
"index",
"is",
"None",
"or",
"index",
"not",
"in",
"inibin",
":",
"out_node",
"[",
"key",
"]",
"=",
"None",
"continue",
"val",
"=",
"inibin",
"[",
"index",
"]",
"# Try numeric conversion",
"# Inibins often store numbers in strings",
"if",
"isinstance",
"(",
"val",
",",
"bytes",
")",
":",
"try",
":",
"val",
"=",
"int",
"(",
"val",
")",
"except",
"ValueError",
":",
"try",
":",
"val",
"=",
"float",
"(",
"val",
")",
"except",
"ValueError",
":",
"val",
"=",
"val",
".",
"decode",
"(",
"'utf8'",
")",
"# Check if value is a reference to a fontconfig key",
"if",
"val",
"in",
"string_table",
":",
"val",
"=",
"string_table",
"[",
"val",
"]",
"# Apply the function",
"if",
"callable",
"(",
"func",
")",
":",
"val",
"=",
"func",
"(",
"val",
")",
"out_node",
"[",
"key",
"]",
"=",
"val",
"out",
"=",
"{",
"}",
"walk",
"(",
"key_mapping",
",",
"out",
")",
"return",
"out"
] |
Return a human-readable dictionary from the inibin.
Arguments:
key_mapping -- Dictionary used for conversion. Supports nesting. Every other
value should be a numeric inibin key, or a tuple of the key and a
function to apply to the result.
inibin -- The dictionary returned from reading an inibin.
string_table -- Used to translate strings. Any string with a key in
string_table will be replaced. Typically loaded from a fontconfig_*.txt.
|
[
"Return",
"a",
"human",
"-",
"readable",
"dictionary",
"from",
"the",
"inibin",
"."
] |
6abfeb22cfc6750fc95abe395cf6bd024a04f394
|
https://github.com/Met48/inibin/blob/6abfeb22cfc6750fc95abe395cf6bd024a04f394/inibin/util.py#L19-L80
|
240,803
|
Met48/inibin
|
inibin/util.py
|
_unpack_from
|
def _unpack_from(buf, format_s, count=None, little_endian=True):
"""Read a binary format from the buffer."""
if count is not None:
assert count > 0
format_s = '%i%s' % (count, format_s)
if little_endian is True:
format_s = '<' + format_s
else:
format_s = '>' + format_s
size = struct.calcsize(format_s)
res = struct.unpack_from(format_s, buf.read(size))
if count is not None:
return res
else:
return res[0]
|
python
|
def _unpack_from(buf, format_s, count=None, little_endian=True):
"""Read a binary format from the buffer."""
if count is not None:
assert count > 0
format_s = '%i%s' % (count, format_s)
if little_endian is True:
format_s = '<' + format_s
else:
format_s = '>' + format_s
size = struct.calcsize(format_s)
res = struct.unpack_from(format_s, buf.read(size))
if count is not None:
return res
else:
return res[0]
|
[
"def",
"_unpack_from",
"(",
"buf",
",",
"format_s",
",",
"count",
"=",
"None",
",",
"little_endian",
"=",
"True",
")",
":",
"if",
"count",
"is",
"not",
"None",
":",
"assert",
"count",
">",
"0",
"format_s",
"=",
"'%i%s'",
"%",
"(",
"count",
",",
"format_s",
")",
"if",
"little_endian",
"is",
"True",
":",
"format_s",
"=",
"'<'",
"+",
"format_s",
"else",
":",
"format_s",
"=",
"'>'",
"+",
"format_s",
"size",
"=",
"struct",
".",
"calcsize",
"(",
"format_s",
")",
"res",
"=",
"struct",
".",
"unpack_from",
"(",
"format_s",
",",
"buf",
".",
"read",
"(",
"size",
")",
")",
"if",
"count",
"is",
"not",
"None",
":",
"return",
"res",
"else",
":",
"return",
"res",
"[",
"0",
"]"
] |
Read a binary format from the buffer.
|
[
"Read",
"a",
"binary",
"format",
"from",
"the",
"buffer",
"."
] |
6abfeb22cfc6750fc95abe395cf6bd024a04f394
|
https://github.com/Met48/inibin/blob/6abfeb22cfc6750fc95abe395cf6bd024a04f394/inibin/util.py#L83-L101
|
240,804
|
mrstephenneal/dirutility
|
dirutility/permissions.py
|
set_permissions_mode_from_octal
|
def set_permissions_mode_from_octal(file_path, code):
"""
Set permissions for a file or directory.
:param file_path: Path to a file or directory
:param code: Permission code in absolute notation (octal)
:return:
"""
# Unpack permissions tuple
user, group, other = tuple(str(code[-3:])) if len(str(code)) > 3 else tuple(str(code))
user, group, other = int(user), int(group), int(other)
mode = get_permissions_mode(user,
'user') & get_permissions_mode(group, 'group') & get_permissions_mode(other, 'other')
os.chmod(file_path, mode)
|
python
|
def set_permissions_mode_from_octal(file_path, code):
"""
Set permissions for a file or directory.
:param file_path: Path to a file or directory
:param code: Permission code in absolute notation (octal)
:return:
"""
# Unpack permissions tuple
user, group, other = tuple(str(code[-3:])) if len(str(code)) > 3 else tuple(str(code))
user, group, other = int(user), int(group), int(other)
mode = get_permissions_mode(user,
'user') & get_permissions_mode(group, 'group') & get_permissions_mode(other, 'other')
os.chmod(file_path, mode)
|
[
"def",
"set_permissions_mode_from_octal",
"(",
"file_path",
",",
"code",
")",
":",
"# Unpack permissions tuple",
"user",
",",
"group",
",",
"other",
"=",
"tuple",
"(",
"str",
"(",
"code",
"[",
"-",
"3",
":",
"]",
")",
")",
"if",
"len",
"(",
"str",
"(",
"code",
")",
")",
">",
"3",
"else",
"tuple",
"(",
"str",
"(",
"code",
")",
")",
"user",
",",
"group",
",",
"other",
"=",
"int",
"(",
"user",
")",
",",
"int",
"(",
"group",
")",
",",
"int",
"(",
"other",
")",
"mode",
"=",
"get_permissions_mode",
"(",
"user",
",",
"'user'",
")",
"&",
"get_permissions_mode",
"(",
"group",
",",
"'group'",
")",
"&",
"get_permissions_mode",
"(",
"other",
",",
"'other'",
")",
"os",
".",
"chmod",
"(",
"file_path",
",",
"mode",
")"
] |
Set permissions for a file or directory.
:param file_path: Path to a file or directory
:param code: Permission code in absolute notation (octal)
:return:
|
[
"Set",
"permissions",
"for",
"a",
"file",
"or",
"directory",
"."
] |
339378659e2d7e09c53acfc51c5df745bb0cd517
|
https://github.com/mrstephenneal/dirutility/blob/339378659e2d7e09c53acfc51c5df745bb0cd517/dirutility/permissions.py#L82-L95
|
240,805
|
mrstephenneal/dirutility
|
dirutility/permissions.py
|
get_permissions_mode
|
def get_permissions_mode(permission_octal, name):
"""Retrieve a user name group permissions bitwise code."""
read = PERMISSIONS[name]['read']
write = PERMISSIONS[name]['write']
execute = PERMISSIONS[name]['execute']
# Read
if permission_octal == 4:
return read & ~write & ~execute
# Write
elif permission_octal == 2:
return ~read & write & ~execute
# Execute
elif permission_octal == 1:
return ~read & ~write & execute
# Read & Write
elif permission_octal == 6:
return read & write & ~execute
# Read & Execute
elif permission_octal == 5:
return read & ~write & execute
# Write & Execute
elif permission_octal == 3:
return ~read & write & execute
# Read, Write & Execute
elif permission_octal == 7:
return read & write & execute
# No read, write or execute by default
else:
return ~read & ~write & ~execute
|
python
|
def get_permissions_mode(permission_octal, name):
"""Retrieve a user name group permissions bitwise code."""
read = PERMISSIONS[name]['read']
write = PERMISSIONS[name]['write']
execute = PERMISSIONS[name]['execute']
# Read
if permission_octal == 4:
return read & ~write & ~execute
# Write
elif permission_octal == 2:
return ~read & write & ~execute
# Execute
elif permission_octal == 1:
return ~read & ~write & execute
# Read & Write
elif permission_octal == 6:
return read & write & ~execute
# Read & Execute
elif permission_octal == 5:
return read & ~write & execute
# Write & Execute
elif permission_octal == 3:
return ~read & write & execute
# Read, Write & Execute
elif permission_octal == 7:
return read & write & execute
# No read, write or execute by default
else:
return ~read & ~write & ~execute
|
[
"def",
"get_permissions_mode",
"(",
"permission_octal",
",",
"name",
")",
":",
"read",
"=",
"PERMISSIONS",
"[",
"name",
"]",
"[",
"'read'",
"]",
"write",
"=",
"PERMISSIONS",
"[",
"name",
"]",
"[",
"'write'",
"]",
"execute",
"=",
"PERMISSIONS",
"[",
"name",
"]",
"[",
"'execute'",
"]",
"# Read",
"if",
"permission_octal",
"==",
"4",
":",
"return",
"read",
"&",
"~",
"write",
"&",
"~",
"execute",
"# Write",
"elif",
"permission_octal",
"==",
"2",
":",
"return",
"~",
"read",
"&",
"write",
"&",
"~",
"execute",
"# Execute",
"elif",
"permission_octal",
"==",
"1",
":",
"return",
"~",
"read",
"&",
"~",
"write",
"&",
"execute",
"# Read & Write",
"elif",
"permission_octal",
"==",
"6",
":",
"return",
"read",
"&",
"write",
"&",
"~",
"execute",
"# Read & Execute",
"elif",
"permission_octal",
"==",
"5",
":",
"return",
"read",
"&",
"~",
"write",
"&",
"execute",
"# Write & Execute",
"elif",
"permission_octal",
"==",
"3",
":",
"return",
"~",
"read",
"&",
"write",
"&",
"execute",
"# Read, Write & Execute",
"elif",
"permission_octal",
"==",
"7",
":",
"return",
"read",
"&",
"write",
"&",
"execute",
"# No read, write or execute by default",
"else",
":",
"return",
"~",
"read",
"&",
"~",
"write",
"&",
"~",
"execute"
] |
Retrieve a user name group permissions bitwise code.
|
[
"Retrieve",
"a",
"user",
"name",
"group",
"permissions",
"bitwise",
"code",
"."
] |
339378659e2d7e09c53acfc51c5df745bb0cd517
|
https://github.com/mrstephenneal/dirutility/blob/339378659e2d7e09c53acfc51c5df745bb0cd517/dirutility/permissions.py#L98-L134
|
240,806
|
mrstephenneal/dirutility
|
dirutility/permissions.py
|
Permissions.revoke_access
|
def revoke_access(self):
"""Revoke all access to this path."""
reading = PERMISSIONS['user']['execute'] + PERMISSIONS['group']['execute'] + PERMISSIONS['other']['execute']
os.chmod(self.file_path, reading)
|
python
|
def revoke_access(self):
"""Revoke all access to this path."""
reading = PERMISSIONS['user']['execute'] + PERMISSIONS['group']['execute'] + PERMISSIONS['other']['execute']
os.chmod(self.file_path, reading)
|
[
"def",
"revoke_access",
"(",
"self",
")",
":",
"reading",
"=",
"PERMISSIONS",
"[",
"'user'",
"]",
"[",
"'execute'",
"]",
"+",
"PERMISSIONS",
"[",
"'group'",
"]",
"[",
"'execute'",
"]",
"+",
"PERMISSIONS",
"[",
"'other'",
"]",
"[",
"'execute'",
"]",
"os",
".",
"chmod",
"(",
"self",
".",
"file_path",
",",
"reading",
")"
] |
Revoke all access to this path.
|
[
"Revoke",
"all",
"access",
"to",
"this",
"path",
"."
] |
339378659e2d7e09c53acfc51c5df745bb0cd517
|
https://github.com/mrstephenneal/dirutility/blob/339378659e2d7e09c53acfc51c5df745bb0cd517/dirutility/permissions.py#L161-L164
|
240,807
|
donovan-duplessis/pwnurl
|
pwnurl/common/helpers.py
|
module_functions
|
def module_functions(modulestr):
""" Return ordered dictionary of all functions declared in module """
funcs = dict(inspect.getmembers(import_module(modulestr),
inspect.isfunction))
return OrderedDict(sorted(funcs.items(), key=lambda f: f[0]))
|
python
|
def module_functions(modulestr):
""" Return ordered dictionary of all functions declared in module """
funcs = dict(inspect.getmembers(import_module(modulestr),
inspect.isfunction))
return OrderedDict(sorted(funcs.items(), key=lambda f: f[0]))
|
[
"def",
"module_functions",
"(",
"modulestr",
")",
":",
"funcs",
"=",
"dict",
"(",
"inspect",
".",
"getmembers",
"(",
"import_module",
"(",
"modulestr",
")",
",",
"inspect",
".",
"isfunction",
")",
")",
"return",
"OrderedDict",
"(",
"sorted",
"(",
"funcs",
".",
"items",
"(",
")",
",",
"key",
"=",
"lambda",
"f",
":",
"f",
"[",
"0",
"]",
")",
")"
] |
Return ordered dictionary of all functions declared in module
|
[
"Return",
"ordered",
"dictionary",
"of",
"all",
"functions",
"declared",
"in",
"module"
] |
a13e27694f738228d186ea437b4d15ef5a925a87
|
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/common/helpers.py#L32-L38
|
240,808
|
donovan-duplessis/pwnurl
|
pwnurl/common/helpers.py
|
flash_errors
|
def flash_errors(form, category='warning'):
""" Flash all form error messages """
for (field, errors) in form.errors.items():
for error in errors:
flash('{0} - {1}'.format(getattr(form, field).label.text, error),
category)
|
python
|
def flash_errors(form, category='warning'):
""" Flash all form error messages """
for (field, errors) in form.errors.items():
for error in errors:
flash('{0} - {1}'.format(getattr(form, field).label.text, error),
category)
|
[
"def",
"flash_errors",
"(",
"form",
",",
"category",
"=",
"'warning'",
")",
":",
"for",
"(",
"field",
",",
"errors",
")",
"in",
"form",
".",
"errors",
".",
"items",
"(",
")",
":",
"for",
"error",
"in",
"errors",
":",
"flash",
"(",
"'{0} - {1}'",
".",
"format",
"(",
"getattr",
"(",
"form",
",",
"field",
")",
".",
"label",
".",
"text",
",",
"error",
")",
",",
"category",
")"
] |
Flash all form error messages
|
[
"Flash",
"all",
"form",
"error",
"messages"
] |
a13e27694f738228d186ea437b4d15ef5a925a87
|
https://github.com/donovan-duplessis/pwnurl/blob/a13e27694f738228d186ea437b4d15ef5a925a87/pwnurl/common/helpers.py#L41-L47
|
240,809
|
BlackEarth/bf
|
bf/scss.py
|
SCSS.render_css
|
def render_css(self, fn=None, text=None, margin='', indent='\t'):
"""output css using the Sass processor"""
fn = fn or os.path.splitext(self.fn)[0]+'.css'
if not os.path.exists(os.path.dirname(fn)):
os.makedirs(os.path.dirname(fn))
curdir = os.path.abspath(os.curdir)
os.chdir(os.path.dirname(fn)) # needed in order for scss to relative @import
text = text or self.render_styles()
if text != '': text = sass.compile(string=text)
os.chdir(curdir)
return CSS(fn=fn, text=text)
|
python
|
def render_css(self, fn=None, text=None, margin='', indent='\t'):
"""output css using the Sass processor"""
fn = fn or os.path.splitext(self.fn)[0]+'.css'
if not os.path.exists(os.path.dirname(fn)):
os.makedirs(os.path.dirname(fn))
curdir = os.path.abspath(os.curdir)
os.chdir(os.path.dirname(fn)) # needed in order for scss to relative @import
text = text or self.render_styles()
if text != '': text = sass.compile(string=text)
os.chdir(curdir)
return CSS(fn=fn, text=text)
|
[
"def",
"render_css",
"(",
"self",
",",
"fn",
"=",
"None",
",",
"text",
"=",
"None",
",",
"margin",
"=",
"''",
",",
"indent",
"=",
"'\\t'",
")",
":",
"fn",
"=",
"fn",
"or",
"os",
".",
"path",
".",
"splitext",
"(",
"self",
".",
"fn",
")",
"[",
"0",
"]",
"+",
"'.css'",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"fn",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"fn",
")",
")",
"curdir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"curdir",
")",
"os",
".",
"chdir",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"fn",
")",
")",
"# needed in order for scss to relative @import",
"text",
"=",
"text",
"or",
"self",
".",
"render_styles",
"(",
")",
"if",
"text",
"!=",
"''",
":",
"text",
"=",
"sass",
".",
"compile",
"(",
"string",
"=",
"text",
")",
"os",
".",
"chdir",
"(",
"curdir",
")",
"return",
"CSS",
"(",
"fn",
"=",
"fn",
",",
"text",
"=",
"text",
")"
] |
output css using the Sass processor
|
[
"output",
"css",
"using",
"the",
"Sass",
"processor"
] |
376041168874bbd6dee5ccfeece4a9e553223316
|
https://github.com/BlackEarth/bf/blob/376041168874bbd6dee5ccfeece4a9e553223316/bf/scss.py#L13-L23
|
240,810
|
universalcore/unicore.hub.client
|
unicore/hub/client/userclient.py
|
User.get
|
def get(self, field):
"""
Returns the value of a user field.
:param str field:
The name of the user field.
:returns: str -- the value
"""
if field in ('username', 'uuid', 'app_data'):
return self.data[field]
else:
return self.data.get('app_data', {})[field]
|
python
|
def get(self, field):
"""
Returns the value of a user field.
:param str field:
The name of the user field.
:returns: str -- the value
"""
if field in ('username', 'uuid', 'app_data'):
return self.data[field]
else:
return self.data.get('app_data', {})[field]
|
[
"def",
"get",
"(",
"self",
",",
"field",
")",
":",
"if",
"field",
"in",
"(",
"'username'",
",",
"'uuid'",
",",
"'app_data'",
")",
":",
"return",
"self",
".",
"data",
"[",
"field",
"]",
"else",
":",
"return",
"self",
".",
"data",
".",
"get",
"(",
"'app_data'",
",",
"{",
"}",
")",
"[",
"field",
"]"
] |
Returns the value of a user field.
:param str field:
The name of the user field.
:returns: str -- the value
|
[
"Returns",
"the",
"value",
"of",
"a",
"user",
"field",
"."
] |
c706f4d31e493bd4e7ea8236780a9b271b850b8b
|
https://github.com/universalcore/unicore.hub.client/blob/c706f4d31e493bd4e7ea8236780a9b271b850b8b/unicore/hub/client/userclient.py#L79-L90
|
240,811
|
csaez/wishlib
|
wishlib/si/siwrapper.py
|
SIWrapper.auto_update
|
def auto_update(cls, function):
"""
This class method could be used as decorator on subclasses, it ensures
update method is called after function execution.
"""
def wrapper(self, *args, **kwargs):
f = function(self, *args, **kwargs)
self.update()
return f
return wrapper
|
python
|
def auto_update(cls, function):
"""
This class method could be used as decorator on subclasses, it ensures
update method is called after function execution.
"""
def wrapper(self, *args, **kwargs):
f = function(self, *args, **kwargs)
self.update()
return f
return wrapper
|
[
"def",
"auto_update",
"(",
"cls",
",",
"function",
")",
":",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"f",
"=",
"function",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"update",
"(",
")",
"return",
"f",
"return",
"wrapper"
] |
This class method could be used as decorator on subclasses, it ensures
update method is called after function execution.
|
[
"This",
"class",
"method",
"could",
"be",
"used",
"as",
"decorator",
"on",
"subclasses",
"it",
"ensures",
"update",
"method",
"is",
"called",
"after",
"function",
"execution",
"."
] |
c212fa7875006a332a4cefbf69885ced9647bc2f
|
https://github.com/csaez/wishlib/blob/c212fa7875006a332a4cefbf69885ced9647bc2f/wishlib/si/siwrapper.py#L69-L79
|
240,812
|
csaez/wishlib
|
wishlib/si/siwrapper.py
|
SIWrapper._validate_key
|
def _validate_key(self, key):
"""Returns a boolean indicating if the attribute name is valid or not"""
return not any([key.startswith(i) for i in self.EXCEPTIONS])
|
python
|
def _validate_key(self, key):
"""Returns a boolean indicating if the attribute name is valid or not"""
return not any([key.startswith(i) for i in self.EXCEPTIONS])
|
[
"def",
"_validate_key",
"(",
"self",
",",
"key",
")",
":",
"return",
"not",
"any",
"(",
"[",
"key",
".",
"startswith",
"(",
"i",
")",
"for",
"i",
"in",
"self",
".",
"EXCEPTIONS",
"]",
")"
] |
Returns a boolean indicating if the attribute name is valid or not
|
[
"Returns",
"a",
"boolean",
"indicating",
"if",
"the",
"attribute",
"name",
"is",
"valid",
"or",
"not"
] |
c212fa7875006a332a4cefbf69885ced9647bc2f
|
https://github.com/csaez/wishlib/blob/c212fa7875006a332a4cefbf69885ced9647bc2f/wishlib/si/siwrapper.py#L128-L130
|
240,813
|
jalanb/pysyte
|
pysyte/colours/colour_numbers.py
|
_extract_html_hex
|
def _extract_html_hex(string):
"""Get the first 3 or 6 hex digits in the string"""
try:
hex_string = string and _hex_regexp().search(string).group(0) or ''
except AttributeError:
return None
if len(hex_string) == 3:
hex_string = hex_string[0] * 2 + hex_string[1] * 2 + hex_string[2] * 2
return hex_string
|
python
|
def _extract_html_hex(string):
"""Get the first 3 or 6 hex digits in the string"""
try:
hex_string = string and _hex_regexp().search(string).group(0) or ''
except AttributeError:
return None
if len(hex_string) == 3:
hex_string = hex_string[0] * 2 + hex_string[1] * 2 + hex_string[2] * 2
return hex_string
|
[
"def",
"_extract_html_hex",
"(",
"string",
")",
":",
"try",
":",
"hex_string",
"=",
"string",
"and",
"_hex_regexp",
"(",
")",
".",
"search",
"(",
"string",
")",
".",
"group",
"(",
"0",
")",
"or",
"''",
"except",
"AttributeError",
":",
"return",
"None",
"if",
"len",
"(",
"hex_string",
")",
"==",
"3",
":",
"hex_string",
"=",
"hex_string",
"[",
"0",
"]",
"*",
"2",
"+",
"hex_string",
"[",
"1",
"]",
"*",
"2",
"+",
"hex_string",
"[",
"2",
"]",
"*",
"2",
"return",
"hex_string"
] |
Get the first 3 or 6 hex digits in the string
|
[
"Get",
"the",
"first",
"3",
"or",
"6",
"hex",
"digits",
"in",
"the",
"string"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/colours/colour_numbers.py#L71-L79
|
240,814
|
jalanb/pysyte
|
pysyte/colours/colour_numbers.py
|
name_to_int
|
def name_to_int(name):
"""Get a number for that colour name
if not a name, then not a number
"""
if not name:
return float('nan')
lower = name.lower()
cga_names = {s: i for i, s in enumerate(colour_names.cga())}
return cga_names.get(lower) or html_to_small_ansi(lower)
|
python
|
def name_to_int(name):
"""Get a number for that colour name
if not a name, then not a number
"""
if not name:
return float('nan')
lower = name.lower()
cga_names = {s: i for i, s in enumerate(colour_names.cga())}
return cga_names.get(lower) or html_to_small_ansi(lower)
|
[
"def",
"name_to_int",
"(",
"name",
")",
":",
"if",
"not",
"name",
":",
"return",
"float",
"(",
"'nan'",
")",
"lower",
"=",
"name",
".",
"lower",
"(",
")",
"cga_names",
"=",
"{",
"s",
":",
"i",
"for",
"i",
",",
"s",
"in",
"enumerate",
"(",
"colour_names",
".",
"cga",
"(",
")",
")",
"}",
"return",
"cga_names",
".",
"get",
"(",
"lower",
")",
"or",
"html_to_small_ansi",
"(",
"lower",
")"
] |
Get a number for that colour name
if not a name, then not a number
|
[
"Get",
"a",
"number",
"for",
"that",
"colour",
"name"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/colours/colour_numbers.py#L180-L189
|
240,815
|
antonev/authorizeme
|
authorizeme.py
|
Authorization.add_rule
|
def add_rule(self, rule_class, target_class=_Nothing):
"""Adds an authorization rule.
:param rule_class: a class of authorization rule.
:param target_class: (optional) a class
or an iterable with classes to associate the rule with.
"""
if isinstance(target_class, Iterable):
for cls in target_class:
self._rules[cls] = rule_class
else:
self._rules[target_class] = rule_class
|
python
|
def add_rule(self, rule_class, target_class=_Nothing):
"""Adds an authorization rule.
:param rule_class: a class of authorization rule.
:param target_class: (optional) a class
or an iterable with classes to associate the rule with.
"""
if isinstance(target_class, Iterable):
for cls in target_class:
self._rules[cls] = rule_class
else:
self._rules[target_class] = rule_class
|
[
"def",
"add_rule",
"(",
"self",
",",
"rule_class",
",",
"target_class",
"=",
"_Nothing",
")",
":",
"if",
"isinstance",
"(",
"target_class",
",",
"Iterable",
")",
":",
"for",
"cls",
"in",
"target_class",
":",
"self",
".",
"_rules",
"[",
"cls",
"]",
"=",
"rule_class",
"else",
":",
"self",
".",
"_rules",
"[",
"target_class",
"]",
"=",
"rule_class"
] |
Adds an authorization rule.
:param rule_class: a class of authorization rule.
:param target_class: (optional) a class
or an iterable with classes to associate the rule with.
|
[
"Adds",
"an",
"authorization",
"rule",
"."
] |
aede202b6d58d19632175ad0c159064737b2b8d9
|
https://github.com/antonev/authorizeme/blob/aede202b6d58d19632175ad0c159064737b2b8d9/authorizeme.py#L37-L48
|
240,816
|
antonev/authorizeme
|
authorizeme.py
|
Authorization.check
|
def check(self, user, permission, obj=_nothing):
"""Raises AuthorizationError when a user has no permission.
:param user: a user.
:param permission: permission to check.
:param obj: (optional) an object to check permission for.
"""
if not self.allows(user, permission, obj):
raise AuthorizationError(
'Can\'t {} object of class {}'.format(
permission, type(obj)))
|
python
|
def check(self, user, permission, obj=_nothing):
"""Raises AuthorizationError when a user has no permission.
:param user: a user.
:param permission: permission to check.
:param obj: (optional) an object to check permission for.
"""
if not self.allows(user, permission, obj):
raise AuthorizationError(
'Can\'t {} object of class {}'.format(
permission, type(obj)))
|
[
"def",
"check",
"(",
"self",
",",
"user",
",",
"permission",
",",
"obj",
"=",
"_nothing",
")",
":",
"if",
"not",
"self",
".",
"allows",
"(",
"user",
",",
"permission",
",",
"obj",
")",
":",
"raise",
"AuthorizationError",
"(",
"'Can\\'t {} object of class {}'",
".",
"format",
"(",
"permission",
",",
"type",
"(",
"obj",
")",
")",
")"
] |
Raises AuthorizationError when a user has no permission.
:param user: a user.
:param permission: permission to check.
:param obj: (optional) an object to check permission for.
|
[
"Raises",
"AuthorizationError",
"when",
"a",
"user",
"has",
"no",
"permission",
"."
] |
aede202b6d58d19632175ad0c159064737b2b8d9
|
https://github.com/antonev/authorizeme/blob/aede202b6d58d19632175ad0c159064737b2b8d9/authorizeme.py#L67-L77
|
240,817
|
antonev/authorizeme
|
authorizeme.py
|
Authorization.allows
|
def allows(self, user, permission, obj=_nothing):
"""Checks that a user has permission. Returns True or False.
:param user: a user.
:param permission: permission to check.
:param obj: (optional) an object to check permission for.
"""
rule = self._get_rule(obj)
if not isinstance(permission, basestring):
return all(
self._use_rule(rule, user, perm, obj)
for perm in permission
)
return self._use_rule(rule, user, permission, obj)
|
python
|
def allows(self, user, permission, obj=_nothing):
"""Checks that a user has permission. Returns True or False.
:param user: a user.
:param permission: permission to check.
:param obj: (optional) an object to check permission for.
"""
rule = self._get_rule(obj)
if not isinstance(permission, basestring):
return all(
self._use_rule(rule, user, perm, obj)
for perm in permission
)
return self._use_rule(rule, user, permission, obj)
|
[
"def",
"allows",
"(",
"self",
",",
"user",
",",
"permission",
",",
"obj",
"=",
"_nothing",
")",
":",
"rule",
"=",
"self",
".",
"_get_rule",
"(",
"obj",
")",
"if",
"not",
"isinstance",
"(",
"permission",
",",
"basestring",
")",
":",
"return",
"all",
"(",
"self",
".",
"_use_rule",
"(",
"rule",
",",
"user",
",",
"perm",
",",
"obj",
")",
"for",
"perm",
"in",
"permission",
")",
"return",
"self",
".",
"_use_rule",
"(",
"rule",
",",
"user",
",",
"permission",
",",
"obj",
")"
] |
Checks that a user has permission. Returns True or False.
:param user: a user.
:param permission: permission to check.
:param obj: (optional) an object to check permission for.
|
[
"Checks",
"that",
"a",
"user",
"has",
"permission",
".",
"Returns",
"True",
"or",
"False",
"."
] |
aede202b6d58d19632175ad0c159064737b2b8d9
|
https://github.com/antonev/authorizeme/blob/aede202b6d58d19632175ad0c159064737b2b8d9/authorizeme.py#L79-L94
|
240,818
|
antonev/authorizeme
|
authorizeme.py
|
Authorization.get_permissions
|
def get_permissions(self, user, obj=_nothing):
"""Returns permissions of a user.
:param user: a user.
:param obj: (optional) an object to get permissions for.
"""
rule = self._get_rule(obj)
all_permissions = (
attr[len('can_'):] for attr in dir(rule)
if attr.startswith('can_')
)
return set(
permission for permission in all_permissions
if self.allows(user, permission, obj)
)
|
python
|
def get_permissions(self, user, obj=_nothing):
"""Returns permissions of a user.
:param user: a user.
:param obj: (optional) an object to get permissions for.
"""
rule = self._get_rule(obj)
all_permissions = (
attr[len('can_'):] for attr in dir(rule)
if attr.startswith('can_')
)
return set(
permission for permission in all_permissions
if self.allows(user, permission, obj)
)
|
[
"def",
"get_permissions",
"(",
"self",
",",
"user",
",",
"obj",
"=",
"_nothing",
")",
":",
"rule",
"=",
"self",
".",
"_get_rule",
"(",
"obj",
")",
"all_permissions",
"=",
"(",
"attr",
"[",
"len",
"(",
"'can_'",
")",
":",
"]",
"for",
"attr",
"in",
"dir",
"(",
"rule",
")",
"if",
"attr",
".",
"startswith",
"(",
"'can_'",
")",
")",
"return",
"set",
"(",
"permission",
"for",
"permission",
"in",
"all_permissions",
"if",
"self",
".",
"allows",
"(",
"user",
",",
"permission",
",",
"obj",
")",
")"
] |
Returns permissions of a user.
:param user: a user.
:param obj: (optional) an object to get permissions for.
|
[
"Returns",
"permissions",
"of",
"a",
"user",
"."
] |
aede202b6d58d19632175ad0c159064737b2b8d9
|
https://github.com/antonev/authorizeme/blob/aede202b6d58d19632175ad0c159064737b2b8d9/authorizeme.py#L125-L141
|
240,819
|
marccarre/py_sak
|
py_sak/functions.py
|
log_debug
|
def log_debug(func, *args, **kwargs):
''' Wrap call of provided function with debug log statements. '''
logging.debug('Starting "%s" in thread %s...', func.__name__, current_thread())
results = func(*args, **kwargs)
logging.debug('Successfully finished "%s" in thread %s.', func.__name__, current_thread())
return results
|
python
|
def log_debug(func, *args, **kwargs):
''' Wrap call of provided function with debug log statements. '''
logging.debug('Starting "%s" in thread %s...', func.__name__, current_thread())
results = func(*args, **kwargs)
logging.debug('Successfully finished "%s" in thread %s.', func.__name__, current_thread())
return results
|
[
"def",
"log_debug",
"(",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"logging",
".",
"debug",
"(",
"'Starting \"%s\" in thread %s...'",
",",
"func",
".",
"__name__",
",",
"current_thread",
"(",
")",
")",
"results",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"logging",
".",
"debug",
"(",
"'Successfully finished \"%s\" in thread %s.'",
",",
"func",
".",
"__name__",
",",
"current_thread",
"(",
")",
")",
"return",
"results"
] |
Wrap call of provided function with debug log statements.
|
[
"Wrap",
"call",
"of",
"provided",
"function",
"with",
"debug",
"log",
"statements",
"."
] |
8ad4cafbd725d2700a31b50526804c0330d828dd
|
https://github.com/marccarre/py_sak/blob/8ad4cafbd725d2700a31b50526804c0330d828dd/py_sak/functions.py#L11-L16
|
240,820
|
MacHu-GWU/angora-project
|
angora/timelib/timewrapper.py
|
TimeWrapper.std_datetimestr
|
def std_datetimestr(self, datetimestr):
"""Reformat a datetime string to standard format.
"""
return datetime.strftime(
self.str2datetime(datetimestr), self.std_datetimeformat)
|
python
|
def std_datetimestr(self, datetimestr):
"""Reformat a datetime string to standard format.
"""
return datetime.strftime(
self.str2datetime(datetimestr), self.std_datetimeformat)
|
[
"def",
"std_datetimestr",
"(",
"self",
",",
"datetimestr",
")",
":",
"return",
"datetime",
".",
"strftime",
"(",
"self",
".",
"str2datetime",
"(",
"datetimestr",
")",
",",
"self",
".",
"std_datetimeformat",
")"
] |
Reformat a datetime string to standard format.
|
[
"Reformat",
"a",
"datetime",
"string",
"to",
"standard",
"format",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/timelib/timewrapper.py#L271-L275
|
240,821
|
MacHu-GWU/angora-project
|
angora/timelib/timewrapper.py
|
TimeWrapper._freq_parser
|
def _freq_parser(self, freq):
"""
day, hour, min, sec,
"""
freq = freq.lower().strip()
try:
if "day" in freq:
freq = freq.replace("day", "")
return timedelta(days=int(freq))
elif "hour" in freq:
freq = freq.replace("hour", "")
return timedelta(hours=int(freq))
elif "min" in freq:
freq = freq.replace("min", "")
return timedelta(minutes=int(freq))
elif "sec" in freq:
freq = freq.replace("sec", "")
return timedelta(seconds=int(freq))
else:
raise Exception("%s is invalid format. use day, hour, min, sec." % freq)
except:
raise Exception("%s is invalid format. use day, hour, min, sec." % freq)
|
python
|
def _freq_parser(self, freq):
"""
day, hour, min, sec,
"""
freq = freq.lower().strip()
try:
if "day" in freq:
freq = freq.replace("day", "")
return timedelta(days=int(freq))
elif "hour" in freq:
freq = freq.replace("hour", "")
return timedelta(hours=int(freq))
elif "min" in freq:
freq = freq.replace("min", "")
return timedelta(minutes=int(freq))
elif "sec" in freq:
freq = freq.replace("sec", "")
return timedelta(seconds=int(freq))
else:
raise Exception("%s is invalid format. use day, hour, min, sec." % freq)
except:
raise Exception("%s is invalid format. use day, hour, min, sec." % freq)
|
[
"def",
"_freq_parser",
"(",
"self",
",",
"freq",
")",
":",
"freq",
"=",
"freq",
".",
"lower",
"(",
")",
".",
"strip",
"(",
")",
"try",
":",
"if",
"\"day\"",
"in",
"freq",
":",
"freq",
"=",
"freq",
".",
"replace",
"(",
"\"day\"",
",",
"\"\"",
")",
"return",
"timedelta",
"(",
"days",
"=",
"int",
"(",
"freq",
")",
")",
"elif",
"\"hour\"",
"in",
"freq",
":",
"freq",
"=",
"freq",
".",
"replace",
"(",
"\"hour\"",
",",
"\"\"",
")",
"return",
"timedelta",
"(",
"hours",
"=",
"int",
"(",
"freq",
")",
")",
"elif",
"\"min\"",
"in",
"freq",
":",
"freq",
"=",
"freq",
".",
"replace",
"(",
"\"min\"",
",",
"\"\"",
")",
"return",
"timedelta",
"(",
"minutes",
"=",
"int",
"(",
"freq",
")",
")",
"elif",
"\"sec\"",
"in",
"freq",
":",
"freq",
"=",
"freq",
".",
"replace",
"(",
"\"sec\"",
",",
"\"\"",
")",
"return",
"timedelta",
"(",
"seconds",
"=",
"int",
"(",
"freq",
")",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"%s is invalid format. use day, hour, min, sec.\"",
"%",
"freq",
")",
"except",
":",
"raise",
"Exception",
"(",
"\"%s is invalid format. use day, hour, min, sec.\"",
"%",
"freq",
")"
] |
day, hour, min, sec,
|
[
"day",
"hour",
"min",
"sec"
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/timelib/timewrapper.py#L368-L389
|
240,822
|
the01/python-paps
|
paps/si/app/sensorClient.py
|
SensorClient.unjoin
|
def unjoin(self):
"""
Leave the local audience
:rtype: None
:raises SensorJoinException: Failed to leave
"""
self.debug("()")
if self._joined.is_set():
packet = APPUnjoinMessage(device_id=Id.NOT_SET)
self._send_packet(self._server_ip, self._server_port, packet)
self._joined.clear()
self.info("Left the audience")
|
python
|
def unjoin(self):
"""
Leave the local audience
:rtype: None
:raises SensorJoinException: Failed to leave
"""
self.debug("()")
if self._joined.is_set():
packet = APPUnjoinMessage(device_id=Id.NOT_SET)
self._send_packet(self._server_ip, self._server_port, packet)
self._joined.clear()
self.info("Left the audience")
|
[
"def",
"unjoin",
"(",
"self",
")",
":",
"self",
".",
"debug",
"(",
"\"()\"",
")",
"if",
"self",
".",
"_joined",
".",
"is_set",
"(",
")",
":",
"packet",
"=",
"APPUnjoinMessage",
"(",
"device_id",
"=",
"Id",
".",
"NOT_SET",
")",
"self",
".",
"_send_packet",
"(",
"self",
".",
"_server_ip",
",",
"self",
".",
"_server_port",
",",
"packet",
")",
"self",
".",
"_joined",
".",
"clear",
"(",
")",
"self",
".",
"info",
"(",
"\"Left the audience\"",
")"
] |
Leave the local audience
:rtype: None
:raises SensorJoinException: Failed to leave
|
[
"Leave",
"the",
"local",
"audience"
] |
2dde5a71913e4c7b22901cf05c6ecedd890919c4
|
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/sensorClient.py#L104-L116
|
240,823
|
the01/python-paps
|
paps/si/app/sensorClient.py
|
SensorClient.person_update
|
def person_update(self, people):
"""
Update the status of people
:param people: All people of this sensor
:type people: list[paps.person.Person]
:rtype: None
:raises SensorUpdateException: Failed to update
"""
packet = APPUpdateMessage(device_id=Id.NOT_SET, people=people)
self._send_packet(
self._server_ip, self._server_port, packet,
acknowledge_packet=False
)
|
python
|
def person_update(self, people):
"""
Update the status of people
:param people: All people of this sensor
:type people: list[paps.person.Person]
:rtype: None
:raises SensorUpdateException: Failed to update
"""
packet = APPUpdateMessage(device_id=Id.NOT_SET, people=people)
self._send_packet(
self._server_ip, self._server_port, packet,
acknowledge_packet=False
)
|
[
"def",
"person_update",
"(",
"self",
",",
"people",
")",
":",
"packet",
"=",
"APPUpdateMessage",
"(",
"device_id",
"=",
"Id",
".",
"NOT_SET",
",",
"people",
"=",
"people",
")",
"self",
".",
"_send_packet",
"(",
"self",
".",
"_server_ip",
",",
"self",
".",
"_server_port",
",",
"packet",
",",
"acknowledge_packet",
"=",
"False",
")"
] |
Update the status of people
:param people: All people of this sensor
:type people: list[paps.person.Person]
:rtype: None
:raises SensorUpdateException: Failed to update
|
[
"Update",
"the",
"status",
"of",
"people"
] |
2dde5a71913e4c7b22901cf05c6ecedd890919c4
|
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/sensorClient.py#L149-L163
|
240,824
|
the01/python-paps
|
paps/si/app/sensorClient.py
|
SensorClient._do_config_packet
|
def _do_config_packet(self, packet, ip, port):
"""
Apply config to this instance
:param packet: Packet with config
:type packet: paps.si.app.message.APPMessage
:param ip: Ip of server
:type ip: str
:param port: Port of server
:type port: int
:rtype: None
"""
self.debug("()")
if packet.header.device_id != Id.SERVER:
# Only allow config packets from server
self.warning("Config packets only allowed from server")
return
try:
config = packet.payload
self.debug(u"{}".format(config))
if not isinstance(config, dict):
self.error("Wrong payload type")
raise RuntimeError("Wrong type")
config.setdefault("server_ip", ip)
config.setdefault("server_port", port)
self.config(config)
self._joined.set()
except:
self.exception("Failed to configure")
self.error(u"Faulty packet {}".format(format_data(packet.payload)))
return
|
python
|
def _do_config_packet(self, packet, ip, port):
"""
Apply config to this instance
:param packet: Packet with config
:type packet: paps.si.app.message.APPMessage
:param ip: Ip of server
:type ip: str
:param port: Port of server
:type port: int
:rtype: None
"""
self.debug("()")
if packet.header.device_id != Id.SERVER:
# Only allow config packets from server
self.warning("Config packets only allowed from server")
return
try:
config = packet.payload
self.debug(u"{}".format(config))
if not isinstance(config, dict):
self.error("Wrong payload type")
raise RuntimeError("Wrong type")
config.setdefault("server_ip", ip)
config.setdefault("server_port", port)
self.config(config)
self._joined.set()
except:
self.exception("Failed to configure")
self.error(u"Faulty packet {}".format(format_data(packet.payload)))
return
|
[
"def",
"_do_config_packet",
"(",
"self",
",",
"packet",
",",
"ip",
",",
"port",
")",
":",
"self",
".",
"debug",
"(",
"\"()\"",
")",
"if",
"packet",
".",
"header",
".",
"device_id",
"!=",
"Id",
".",
"SERVER",
":",
"# Only allow config packets from server",
"self",
".",
"warning",
"(",
"\"Config packets only allowed from server\"",
")",
"return",
"try",
":",
"config",
"=",
"packet",
".",
"payload",
"self",
".",
"debug",
"(",
"u\"{}\"",
".",
"format",
"(",
"config",
")",
")",
"if",
"not",
"isinstance",
"(",
"config",
",",
"dict",
")",
":",
"self",
".",
"error",
"(",
"\"Wrong payload type\"",
")",
"raise",
"RuntimeError",
"(",
"\"Wrong type\"",
")",
"config",
".",
"setdefault",
"(",
"\"server_ip\"",
",",
"ip",
")",
"config",
".",
"setdefault",
"(",
"\"server_port\"",
",",
"port",
")",
"self",
".",
"config",
"(",
"config",
")",
"self",
".",
"_joined",
".",
"set",
"(",
")",
"except",
":",
"self",
".",
"exception",
"(",
"\"Failed to configure\"",
")",
"self",
".",
"error",
"(",
"u\"Faulty packet {}\"",
".",
"format",
"(",
"format_data",
"(",
"packet",
".",
"payload",
")",
")",
")",
"return"
] |
Apply config to this instance
:param packet: Packet with config
:type packet: paps.si.app.message.APPMessage
:param ip: Ip of server
:type ip: str
:param port: Port of server
:type port: int
:rtype: None
|
[
"Apply",
"config",
"to",
"this",
"instance"
] |
2dde5a71913e4c7b22901cf05c6ecedd890919c4
|
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/sensorClient.py#L184-L216
|
240,825
|
ArtoLabs/SimpleSteem
|
simplesteem/util.py
|
Util.goodnode
|
def goodnode(self, nodelist):
''' Goes through the provided list
and returns the first server node
that does not return an error.
'''
l = len(nodelist)
for n in range(self.current_node(l), l):
self.msg.message("Trying node " + str(n) + ": " + nodelist[n])
try:
req = urllib.request.Request(url=nodelist[n])
urllib.request.urlopen(req)
except HTTPError as e:
self.msg.error_message(e)
self.currentnode = int(self.currentnode) + 1
else:
self.msg.message("Using " + nodelist[n])
return nodelist[n]
|
python
|
def goodnode(self, nodelist):
''' Goes through the provided list
and returns the first server node
that does not return an error.
'''
l = len(nodelist)
for n in range(self.current_node(l), l):
self.msg.message("Trying node " + str(n) + ": " + nodelist[n])
try:
req = urllib.request.Request(url=nodelist[n])
urllib.request.urlopen(req)
except HTTPError as e:
self.msg.error_message(e)
self.currentnode = int(self.currentnode) + 1
else:
self.msg.message("Using " + nodelist[n])
return nodelist[n]
|
[
"def",
"goodnode",
"(",
"self",
",",
"nodelist",
")",
":",
"l",
"=",
"len",
"(",
"nodelist",
")",
"for",
"n",
"in",
"range",
"(",
"self",
".",
"current_node",
"(",
"l",
")",
",",
"l",
")",
":",
"self",
".",
"msg",
".",
"message",
"(",
"\"Trying node \"",
"+",
"str",
"(",
"n",
")",
"+",
"\": \"",
"+",
"nodelist",
"[",
"n",
"]",
")",
"try",
":",
"req",
"=",
"urllib",
".",
"request",
".",
"Request",
"(",
"url",
"=",
"nodelist",
"[",
"n",
"]",
")",
"urllib",
".",
"request",
".",
"urlopen",
"(",
"req",
")",
"except",
"HTTPError",
"as",
"e",
":",
"self",
".",
"msg",
".",
"error_message",
"(",
"e",
")",
"self",
".",
"currentnode",
"=",
"int",
"(",
"self",
".",
"currentnode",
")",
"+",
"1",
"else",
":",
"self",
".",
"msg",
".",
"message",
"(",
"\"Using \"",
"+",
"nodelist",
"[",
"n",
"]",
")",
"return",
"nodelist",
"[",
"n",
"]"
] |
Goes through the provided list
and returns the first server node
that does not return an error.
|
[
"Goes",
"through",
"the",
"provided",
"list",
"and",
"returns",
"the",
"first",
"server",
"node",
"that",
"does",
"not",
"return",
"an",
"error",
"."
] |
ce8be0ae81f8878b460bc156693f1957f7dd34a3
|
https://github.com/ArtoLabs/SimpleSteem/blob/ce8be0ae81f8878b460bc156693f1957f7dd34a3/simplesteem/util.py#L52-L68
|
240,826
|
ArtoLabs/SimpleSteem
|
simplesteem/util.py
|
Util.permlink
|
def permlink(self, identifier):
''' Deconstructs an identifier into
an account name and permlink
'''
temp = identifier.split("@")
temp2 = temp[1].split("/")
return [temp2[0], temp2[1]]
|
python
|
def permlink(self, identifier):
''' Deconstructs an identifier into
an account name and permlink
'''
temp = identifier.split("@")
temp2 = temp[1].split("/")
return [temp2[0], temp2[1]]
|
[
"def",
"permlink",
"(",
"self",
",",
"identifier",
")",
":",
"temp",
"=",
"identifier",
".",
"split",
"(",
"\"@\"",
")",
"temp2",
"=",
"temp",
"[",
"1",
"]",
".",
"split",
"(",
"\"/\"",
")",
"return",
"[",
"temp2",
"[",
"0",
"]",
",",
"temp2",
"[",
"1",
"]",
"]"
] |
Deconstructs an identifier into
an account name and permlink
|
[
"Deconstructs",
"an",
"identifier",
"into",
"an",
"account",
"name",
"and",
"permlink"
] |
ce8be0ae81f8878b460bc156693f1957f7dd34a3
|
https://github.com/ArtoLabs/SimpleSteem/blob/ce8be0ae81f8878b460bc156693f1957f7dd34a3/simplesteem/util.py#L78-L84
|
240,827
|
ArtoLabs/SimpleSteem
|
simplesteem/util.py
|
Util.scale_vote
|
def scale_vote(self, value):
''' Scales a vote value between 1 and 100
to 150 to 10000 as required by Steem-Python
for certain method calls
'''
value = int(value) * 100
if value < 100:
value = 100
if value > 10000:
value = 10000
return value
|
python
|
def scale_vote(self, value):
''' Scales a vote value between 1 and 100
to 150 to 10000 as required by Steem-Python
for certain method calls
'''
value = int(value) * 100
if value < 100:
value = 100
if value > 10000:
value = 10000
return value
|
[
"def",
"scale_vote",
"(",
"self",
",",
"value",
")",
":",
"value",
"=",
"int",
"(",
"value",
")",
"*",
"100",
"if",
"value",
"<",
"100",
":",
"value",
"=",
"100",
"if",
"value",
">",
"10000",
":",
"value",
"=",
"10000",
"return",
"value"
] |
Scales a vote value between 1 and 100
to 150 to 10000 as required by Steem-Python
for certain method calls
|
[
"Scales",
"a",
"vote",
"value",
"between",
"1",
"and",
"100",
"to",
"150",
"to",
"10000",
"as",
"required",
"by",
"Steem",
"-",
"Python",
"for",
"certain",
"method",
"calls"
] |
ce8be0ae81f8878b460bc156693f1957f7dd34a3
|
https://github.com/ArtoLabs/SimpleSteem/blob/ce8be0ae81f8878b460bc156693f1957f7dd34a3/simplesteem/util.py#L100-L110
|
240,828
|
ArtoLabs/SimpleSteem
|
simplesteem/util.py
|
Util.calc_regenerated
|
def calc_regenerated(self, lastvotetime):
''' Uses math formula to calculate the amount
of steem power that would have been regenerated
given a certain datetime object
'''
delta = datetime.utcnow() - datetime.strptime(lastvotetime,'%Y-%m-%dT%H:%M:%S')
td = delta.days
ts = delta.seconds
tt = (td * 86400) + ts
return tt * 10000 / 86400 / 5
|
python
|
def calc_regenerated(self, lastvotetime):
''' Uses math formula to calculate the amount
of steem power that would have been regenerated
given a certain datetime object
'''
delta = datetime.utcnow() - datetime.strptime(lastvotetime,'%Y-%m-%dT%H:%M:%S')
td = delta.days
ts = delta.seconds
tt = (td * 86400) + ts
return tt * 10000 / 86400 / 5
|
[
"def",
"calc_regenerated",
"(",
"self",
",",
"lastvotetime",
")",
":",
"delta",
"=",
"datetime",
".",
"utcnow",
"(",
")",
"-",
"datetime",
".",
"strptime",
"(",
"lastvotetime",
",",
"'%Y-%m-%dT%H:%M:%S'",
")",
"td",
"=",
"delta",
".",
"days",
"ts",
"=",
"delta",
".",
"seconds",
"tt",
"=",
"(",
"td",
"*",
"86400",
")",
"+",
"ts",
"return",
"tt",
"*",
"10000",
"/",
"86400",
"/",
"5"
] |
Uses math formula to calculate the amount
of steem power that would have been regenerated
given a certain datetime object
|
[
"Uses",
"math",
"formula",
"to",
"calculate",
"the",
"amount",
"of",
"steem",
"power",
"that",
"would",
"have",
"been",
"regenerated",
"given",
"a",
"certain",
"datetime",
"object"
] |
ce8be0ae81f8878b460bc156693f1957f7dd34a3
|
https://github.com/ArtoLabs/SimpleSteem/blob/ce8be0ae81f8878b460bc156693f1957f7dd34a3/simplesteem/util.py#L113-L122
|
240,829
|
ArtoLabs/SimpleSteem
|
simplesteem/util.py
|
Util.retry
|
def retry(self, msg, e, retry_num, waittime):
''' Creates the retry message and waits the
given default time when a method call fails
or a server does not respond appropriately.
'''
self.msg.error_message(msg)
self.msg.error_message(e)
self.msg.error_message("Attempt number " + str(retry_num) + ". Retrying in " + str(waittime) + " seconds.")
time.sleep(waittime)
|
python
|
def retry(self, msg, e, retry_num, waittime):
''' Creates the retry message and waits the
given default time when a method call fails
or a server does not respond appropriately.
'''
self.msg.error_message(msg)
self.msg.error_message(e)
self.msg.error_message("Attempt number " + str(retry_num) + ". Retrying in " + str(waittime) + " seconds.")
time.sleep(waittime)
|
[
"def",
"retry",
"(",
"self",
",",
"msg",
",",
"e",
",",
"retry_num",
",",
"waittime",
")",
":",
"self",
".",
"msg",
".",
"error_message",
"(",
"msg",
")",
"self",
".",
"msg",
".",
"error_message",
"(",
"e",
")",
"self",
".",
"msg",
".",
"error_message",
"(",
"\"Attempt number \"",
"+",
"str",
"(",
"retry_num",
")",
"+",
"\". Retrying in \"",
"+",
"str",
"(",
"waittime",
")",
"+",
"\" seconds.\"",
")",
"time",
".",
"sleep",
"(",
"waittime",
")"
] |
Creates the retry message and waits the
given default time when a method call fails
or a server does not respond appropriately.
|
[
"Creates",
"the",
"retry",
"message",
"and",
"waits",
"the",
"given",
"default",
"time",
"when",
"a",
"method",
"call",
"fails",
"or",
"a",
"server",
"does",
"not",
"respond",
"appropriately",
"."
] |
ce8be0ae81f8878b460bc156693f1957f7dd34a3
|
https://github.com/ArtoLabs/SimpleSteem/blob/ce8be0ae81f8878b460bc156693f1957f7dd34a3/simplesteem/util.py#L125-L133
|
240,830
|
sunjinopensource/asynmsg
|
asynmsg.py
|
_Session.send_message
|
def send_message(self, msg_id, msg_data):
"""msg_data can be None"""
if self._error.has_error() or self._force_close_time > 0:
return False
byte_msg = self.message_packer.pack(msg_id, msg_data)
length = struct.calcsize(self.__class__.message_packer.size_fmt) + len(byte_msg)
if length > self.__class__.max_message_size:
raise MessageSizeOverflowError(msg_id, length, self.__class__.max_message_size)
self._out_buffer += struct.pack(self.__class__.message_packer.size_fmt, length)
self._out_buffer += byte_msg
return True
|
python
|
def send_message(self, msg_id, msg_data):
"""msg_data can be None"""
if self._error.has_error() or self._force_close_time > 0:
return False
byte_msg = self.message_packer.pack(msg_id, msg_data)
length = struct.calcsize(self.__class__.message_packer.size_fmt) + len(byte_msg)
if length > self.__class__.max_message_size:
raise MessageSizeOverflowError(msg_id, length, self.__class__.max_message_size)
self._out_buffer += struct.pack(self.__class__.message_packer.size_fmt, length)
self._out_buffer += byte_msg
return True
|
[
"def",
"send_message",
"(",
"self",
",",
"msg_id",
",",
"msg_data",
")",
":",
"if",
"self",
".",
"_error",
".",
"has_error",
"(",
")",
"or",
"self",
".",
"_force_close_time",
">",
"0",
":",
"return",
"False",
"byte_msg",
"=",
"self",
".",
"message_packer",
".",
"pack",
"(",
"msg_id",
",",
"msg_data",
")",
"length",
"=",
"struct",
".",
"calcsize",
"(",
"self",
".",
"__class__",
".",
"message_packer",
".",
"size_fmt",
")",
"+",
"len",
"(",
"byte_msg",
")",
"if",
"length",
">",
"self",
".",
"__class__",
".",
"max_message_size",
":",
"raise",
"MessageSizeOverflowError",
"(",
"msg_id",
",",
"length",
",",
"self",
".",
"__class__",
".",
"max_message_size",
")",
"self",
".",
"_out_buffer",
"+=",
"struct",
".",
"pack",
"(",
"self",
".",
"__class__",
".",
"message_packer",
".",
"size_fmt",
",",
"length",
")",
"self",
".",
"_out_buffer",
"+=",
"byte_msg",
"return",
"True"
] |
msg_data can be None
|
[
"msg_data",
"can",
"be",
"None"
] |
9c1d14f859cc6702446c3bb30b9916280429bd1d
|
https://github.com/sunjinopensource/asynmsg/blob/9c1d14f859cc6702446c3bb30b9916280429bd1d/asynmsg.py#L495-L508
|
240,831
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
abspath
|
def abspath(relpath, root=None):
"""Returns an absolute path based on the given root and relative path."""
root = root or cwd()
if op.isfile(root):
root = op.dirname(root)
return op.abspath(op.join(root, relpath))
|
python
|
def abspath(relpath, root=None):
"""Returns an absolute path based on the given root and relative path."""
root = root or cwd()
if op.isfile(root):
root = op.dirname(root)
return op.abspath(op.join(root, relpath))
|
[
"def",
"abspath",
"(",
"relpath",
",",
"root",
"=",
"None",
")",
":",
"root",
"=",
"root",
"or",
"cwd",
"(",
")",
"if",
"op",
".",
"isfile",
"(",
"root",
")",
":",
"root",
"=",
"op",
".",
"dirname",
"(",
"root",
")",
"return",
"op",
".",
"abspath",
"(",
"op",
".",
"join",
"(",
"root",
",",
"relpath",
")",
")"
] |
Returns an absolute path based on the given root and relative path.
|
[
"Returns",
"an",
"absolute",
"path",
"based",
"on",
"the",
"given",
"root",
"and",
"relative",
"path",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L203-L208
|
240,832
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
makedirs
|
def makedirs(path, ignore_extsep=False):
"""Makes all directories required for given path; returns true if successful
and false otherwise.
**Examples**:
::
auxly.filesys.makedirs("bar/baz")
"""
if not ignore_extsep and op.basename(path).find(os.extsep) > -1:
path = op.dirname(path)
try:
os.makedirs(path)
except:
return False
return True
|
python
|
def makedirs(path, ignore_extsep=False):
"""Makes all directories required for given path; returns true if successful
and false otherwise.
**Examples**:
::
auxly.filesys.makedirs("bar/baz")
"""
if not ignore_extsep and op.basename(path).find(os.extsep) > -1:
path = op.dirname(path)
try:
os.makedirs(path)
except:
return False
return True
|
[
"def",
"makedirs",
"(",
"path",
",",
"ignore_extsep",
"=",
"False",
")",
":",
"if",
"not",
"ignore_extsep",
"and",
"op",
".",
"basename",
"(",
"path",
")",
".",
"find",
"(",
"os",
".",
"extsep",
")",
">",
"-",
"1",
":",
"path",
"=",
"op",
".",
"dirname",
"(",
"path",
")",
"try",
":",
"os",
".",
"makedirs",
"(",
"path",
")",
"except",
":",
"return",
"False",
"return",
"True"
] |
Makes all directories required for given path; returns true if successful
and false otherwise.
**Examples**:
::
auxly.filesys.makedirs("bar/baz")
|
[
"Makes",
"all",
"directories",
"required",
"for",
"given",
"path",
";",
"returns",
"true",
"if",
"successful",
"and",
"false",
"otherwise",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L223-L237
|
240,833
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
delete
|
def delete(path, regex=None, recurse=False, test=False):
"""Deletes the file or directory at `path`. If `path` is a directory and
`regex` is provided, matching files will be deleted; `recurse` controls
whether subdirectories are recursed. A list of deleted items is returned.
If `test` is true, nothing will be deleted and a list of items that would
have been deleted is returned.
"""
deleted = []
if op.isfile(path):
if not test: os.remove(path)
else: return [path]
return [] if op.exists(path) else [path]
elif op.isdir(path):
if regex:
for r,ds,fs in os.walk(path):
for i in fs:
if _is_match(regex, i):
deleted += delete(op.join(r,i), test=test)
if not recurse:
break
else:
if not test: shutil.rmtree(path)
else: return [path]
return [] if op.exists(path) else [path]
return deleted
|
python
|
def delete(path, regex=None, recurse=False, test=False):
"""Deletes the file or directory at `path`. If `path` is a directory and
`regex` is provided, matching files will be deleted; `recurse` controls
whether subdirectories are recursed. A list of deleted items is returned.
If `test` is true, nothing will be deleted and a list of items that would
have been deleted is returned.
"""
deleted = []
if op.isfile(path):
if not test: os.remove(path)
else: return [path]
return [] if op.exists(path) else [path]
elif op.isdir(path):
if regex:
for r,ds,fs in os.walk(path):
for i in fs:
if _is_match(regex, i):
deleted += delete(op.join(r,i), test=test)
if not recurse:
break
else:
if not test: shutil.rmtree(path)
else: return [path]
return [] if op.exists(path) else [path]
return deleted
|
[
"def",
"delete",
"(",
"path",
",",
"regex",
"=",
"None",
",",
"recurse",
"=",
"False",
",",
"test",
"=",
"False",
")",
":",
"deleted",
"=",
"[",
"]",
"if",
"op",
".",
"isfile",
"(",
"path",
")",
":",
"if",
"not",
"test",
":",
"os",
".",
"remove",
"(",
"path",
")",
"else",
":",
"return",
"[",
"path",
"]",
"return",
"[",
"]",
"if",
"op",
".",
"exists",
"(",
"path",
")",
"else",
"[",
"path",
"]",
"elif",
"op",
".",
"isdir",
"(",
"path",
")",
":",
"if",
"regex",
":",
"for",
"r",
",",
"ds",
",",
"fs",
"in",
"os",
".",
"walk",
"(",
"path",
")",
":",
"for",
"i",
"in",
"fs",
":",
"if",
"_is_match",
"(",
"regex",
",",
"i",
")",
":",
"deleted",
"+=",
"delete",
"(",
"op",
".",
"join",
"(",
"r",
",",
"i",
")",
",",
"test",
"=",
"test",
")",
"if",
"not",
"recurse",
":",
"break",
"else",
":",
"if",
"not",
"test",
":",
"shutil",
".",
"rmtree",
"(",
"path",
")",
"else",
":",
"return",
"[",
"path",
"]",
"return",
"[",
"]",
"if",
"op",
".",
"exists",
"(",
"path",
")",
"else",
"[",
"path",
"]",
"return",
"deleted"
] |
Deletes the file or directory at `path`. If `path` is a directory and
`regex` is provided, matching files will be deleted; `recurse` controls
whether subdirectories are recursed. A list of deleted items is returned.
If `test` is true, nothing will be deleted and a list of items that would
have been deleted is returned.
|
[
"Deletes",
"the",
"file",
"or",
"directory",
"at",
"path",
".",
"If",
"path",
"is",
"a",
"directory",
"and",
"regex",
"is",
"provided",
"matching",
"files",
"will",
"be",
"deleted",
";",
"recurse",
"controls",
"whether",
"subdirectories",
"are",
"recursed",
".",
"A",
"list",
"of",
"deleted",
"items",
"is",
"returned",
".",
"If",
"test",
"is",
"true",
"nothing",
"will",
"be",
"deleted",
"and",
"a",
"list",
"of",
"items",
"that",
"would",
"have",
"been",
"deleted",
"is",
"returned",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L239-L263
|
240,834
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
walkfiles
|
def walkfiles(startdir, regex=None, recurse=True):
"""Yields the absolute paths of files found within the given start
directory. Can optionally filter paths using a regex pattern."""
for r,_,fs in os.walk(startdir):
if not recurse and startdir != r:
return
for f in fs:
path = op.abspath(op.join(r,f))
if regex and not _is_match(regex, path):
continue
if op.isfile(path):
yield path
|
python
|
def walkfiles(startdir, regex=None, recurse=True):
"""Yields the absolute paths of files found within the given start
directory. Can optionally filter paths using a regex pattern."""
for r,_,fs in os.walk(startdir):
if not recurse and startdir != r:
return
for f in fs:
path = op.abspath(op.join(r,f))
if regex and not _is_match(regex, path):
continue
if op.isfile(path):
yield path
|
[
"def",
"walkfiles",
"(",
"startdir",
",",
"regex",
"=",
"None",
",",
"recurse",
"=",
"True",
")",
":",
"for",
"r",
",",
"_",
",",
"fs",
"in",
"os",
".",
"walk",
"(",
"startdir",
")",
":",
"if",
"not",
"recurse",
"and",
"startdir",
"!=",
"r",
":",
"return",
"for",
"f",
"in",
"fs",
":",
"path",
"=",
"op",
".",
"abspath",
"(",
"op",
".",
"join",
"(",
"r",
",",
"f",
")",
")",
"if",
"regex",
"and",
"not",
"_is_match",
"(",
"regex",
",",
"path",
")",
":",
"continue",
"if",
"op",
".",
"isfile",
"(",
"path",
")",
":",
"yield",
"path"
] |
Yields the absolute paths of files found within the given start
directory. Can optionally filter paths using a regex pattern.
|
[
"Yields",
"the",
"absolute",
"paths",
"of",
"files",
"found",
"within",
"the",
"given",
"start",
"directory",
".",
"Can",
"optionally",
"filter",
"paths",
"using",
"a",
"regex",
"pattern",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L265-L276
|
240,835
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
countfiles
|
def countfiles(path, recurse=False):
"""Returns the number of files under the given directory path."""
if not op.isdir(path):
return 0
count = 0
for r,ds,fs in os.walk(path):
count += len(fs)
if not recurse:
break
return count
|
python
|
def countfiles(path, recurse=False):
"""Returns the number of files under the given directory path."""
if not op.isdir(path):
return 0
count = 0
for r,ds,fs in os.walk(path):
count += len(fs)
if not recurse:
break
return count
|
[
"def",
"countfiles",
"(",
"path",
",",
"recurse",
"=",
"False",
")",
":",
"if",
"not",
"op",
".",
"isdir",
"(",
"path",
")",
":",
"return",
"0",
"count",
"=",
"0",
"for",
"r",
",",
"ds",
",",
"fs",
"in",
"os",
".",
"walk",
"(",
"path",
")",
":",
"count",
"+=",
"len",
"(",
"fs",
")",
"if",
"not",
"recurse",
":",
"break",
"return",
"count"
] |
Returns the number of files under the given directory path.
|
[
"Returns",
"the",
"number",
"of",
"files",
"under",
"the",
"given",
"directory",
"path",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L278-L287
|
240,836
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
isempty
|
def isempty(path):
"""Returns True if the given file or directory path is empty.
**Examples**:
::
auxly.filesys.isempty("foo.txt") # Works on files...
auxly.filesys.isempty("bar") # ...or directories!
"""
if op.isdir(path):
return [] == os.listdir(path)
elif op.isfile(path):
return 0 == os.stat(path).st_size
return None
|
python
|
def isempty(path):
"""Returns True if the given file or directory path is empty.
**Examples**:
::
auxly.filesys.isempty("foo.txt") # Works on files...
auxly.filesys.isempty("bar") # ...or directories!
"""
if op.isdir(path):
return [] == os.listdir(path)
elif op.isfile(path):
return 0 == os.stat(path).st_size
return None
|
[
"def",
"isempty",
"(",
"path",
")",
":",
"if",
"op",
".",
"isdir",
"(",
"path",
")",
":",
"return",
"[",
"]",
"==",
"os",
".",
"listdir",
"(",
"path",
")",
"elif",
"op",
".",
"isfile",
"(",
"path",
")",
":",
"return",
"0",
"==",
"os",
".",
"stat",
"(",
"path",
")",
".",
"st_size",
"return",
"None"
] |
Returns True if the given file or directory path is empty.
**Examples**:
::
auxly.filesys.isempty("foo.txt") # Works on files...
auxly.filesys.isempty("bar") # ...or directories!
|
[
"Returns",
"True",
"if",
"the",
"given",
"file",
"or",
"directory",
"path",
"is",
"empty",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L300-L312
|
240,837
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
getsize
|
def getsize(path, recurse=False):
"""Returns the size of the file or directory in bytes."""
if not op.isdir(path):
return op.getsize(path)
size = 0
for r,_,fs in os.walk(path):
for f in fs:
size += getsize(op.join(r,f))
if not recurse:
break
return size
|
python
|
def getsize(path, recurse=False):
"""Returns the size of the file or directory in bytes."""
if not op.isdir(path):
return op.getsize(path)
size = 0
for r,_,fs in os.walk(path):
for f in fs:
size += getsize(op.join(r,f))
if not recurse:
break
return size
|
[
"def",
"getsize",
"(",
"path",
",",
"recurse",
"=",
"False",
")",
":",
"if",
"not",
"op",
".",
"isdir",
"(",
"path",
")",
":",
"return",
"op",
".",
"getsize",
"(",
"path",
")",
"size",
"=",
"0",
"for",
"r",
",",
"_",
",",
"fs",
"in",
"os",
".",
"walk",
"(",
"path",
")",
":",
"for",
"f",
"in",
"fs",
":",
"size",
"+=",
"getsize",
"(",
"op",
".",
"join",
"(",
"r",
",",
"f",
")",
")",
"if",
"not",
"recurse",
":",
"break",
"return",
"size"
] |
Returns the size of the file or directory in bytes.
|
[
"Returns",
"the",
"size",
"of",
"the",
"file",
"or",
"directory",
"in",
"bytes",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L314-L324
|
240,838
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
copy
|
def copy(srcpath, dstpath, overwrite=True):
"""Copies the file or directory at `srcpath` to `dstpath`. Returns True if
successful, False otherwise."""
# Handle bail conditions.
if not op.exists(srcpath):
return False
if not overwrite:
if op.isfile(dstpath):
return False
if op.isdir(dstpath):
chkpath = op.join(dstpath, op.basename(srcpath))
if op.isdir(chkpath) or op.isfile(chkpath):
return False
srcpath = op.abspath(srcpath)
dstpath = op.abspath(dstpath)
# Handle copying.
if op.isdir(srcpath):
dstdir = dstpath
if op.isfile(dstpath):
dstdir = op.dirname(dstpath)
elif op.isdir(dstpath):
# Make sure srcdir is copied INTO dstdir.
dstdir = op.join(dstpath, op.basename(srcpath))
makedirs(dstdir)
for r,ds,fs in os.walk(srcpath):
basedir = r.replace(srcpath, "").rstrip(os.sep).strip(os.sep)
curdir = op.join(dstdir, basedir)
makedirs(curdir)
for f in fs:
if not copy(op.join(r,f), op.join(curdir, f), overwrite=overwrite):
return False
elif op.isfile(srcpath):
makedirs(dstpath)
shutil.copy2(srcpath, dstpath)
return op.exists(dstpath)
|
python
|
def copy(srcpath, dstpath, overwrite=True):
"""Copies the file or directory at `srcpath` to `dstpath`. Returns True if
successful, False otherwise."""
# Handle bail conditions.
if not op.exists(srcpath):
return False
if not overwrite:
if op.isfile(dstpath):
return False
if op.isdir(dstpath):
chkpath = op.join(dstpath, op.basename(srcpath))
if op.isdir(chkpath) or op.isfile(chkpath):
return False
srcpath = op.abspath(srcpath)
dstpath = op.abspath(dstpath)
# Handle copying.
if op.isdir(srcpath):
dstdir = dstpath
if op.isfile(dstpath):
dstdir = op.dirname(dstpath)
elif op.isdir(dstpath):
# Make sure srcdir is copied INTO dstdir.
dstdir = op.join(dstpath, op.basename(srcpath))
makedirs(dstdir)
for r,ds,fs in os.walk(srcpath):
basedir = r.replace(srcpath, "").rstrip(os.sep).strip(os.sep)
curdir = op.join(dstdir, basedir)
makedirs(curdir)
for f in fs:
if not copy(op.join(r,f), op.join(curdir, f), overwrite=overwrite):
return False
elif op.isfile(srcpath):
makedirs(dstpath)
shutil.copy2(srcpath, dstpath)
return op.exists(dstpath)
|
[
"def",
"copy",
"(",
"srcpath",
",",
"dstpath",
",",
"overwrite",
"=",
"True",
")",
":",
"# Handle bail conditions.",
"if",
"not",
"op",
".",
"exists",
"(",
"srcpath",
")",
":",
"return",
"False",
"if",
"not",
"overwrite",
":",
"if",
"op",
".",
"isfile",
"(",
"dstpath",
")",
":",
"return",
"False",
"if",
"op",
".",
"isdir",
"(",
"dstpath",
")",
":",
"chkpath",
"=",
"op",
".",
"join",
"(",
"dstpath",
",",
"op",
".",
"basename",
"(",
"srcpath",
")",
")",
"if",
"op",
".",
"isdir",
"(",
"chkpath",
")",
"or",
"op",
".",
"isfile",
"(",
"chkpath",
")",
":",
"return",
"False",
"srcpath",
"=",
"op",
".",
"abspath",
"(",
"srcpath",
")",
"dstpath",
"=",
"op",
".",
"abspath",
"(",
"dstpath",
")",
"# Handle copying.",
"if",
"op",
".",
"isdir",
"(",
"srcpath",
")",
":",
"dstdir",
"=",
"dstpath",
"if",
"op",
".",
"isfile",
"(",
"dstpath",
")",
":",
"dstdir",
"=",
"op",
".",
"dirname",
"(",
"dstpath",
")",
"elif",
"op",
".",
"isdir",
"(",
"dstpath",
")",
":",
"# Make sure srcdir is copied INTO dstdir.",
"dstdir",
"=",
"op",
".",
"join",
"(",
"dstpath",
",",
"op",
".",
"basename",
"(",
"srcpath",
")",
")",
"makedirs",
"(",
"dstdir",
")",
"for",
"r",
",",
"ds",
",",
"fs",
"in",
"os",
".",
"walk",
"(",
"srcpath",
")",
":",
"basedir",
"=",
"r",
".",
"replace",
"(",
"srcpath",
",",
"\"\"",
")",
".",
"rstrip",
"(",
"os",
".",
"sep",
")",
".",
"strip",
"(",
"os",
".",
"sep",
")",
"curdir",
"=",
"op",
".",
"join",
"(",
"dstdir",
",",
"basedir",
")",
"makedirs",
"(",
"curdir",
")",
"for",
"f",
"in",
"fs",
":",
"if",
"not",
"copy",
"(",
"op",
".",
"join",
"(",
"r",
",",
"f",
")",
",",
"op",
".",
"join",
"(",
"curdir",
",",
"f",
")",
",",
"overwrite",
"=",
"overwrite",
")",
":",
"return",
"False",
"elif",
"op",
".",
"isfile",
"(",
"srcpath",
")",
":",
"makedirs",
"(",
"dstpath",
")",
"shutil",
".",
"copy2",
"(",
"srcpath",
",",
"dstpath",
")",
"return",
"op",
".",
"exists",
"(",
"dstpath",
")"
] |
Copies the file or directory at `srcpath` to `dstpath`. Returns True if
successful, False otherwise.
|
[
"Copies",
"the",
"file",
"or",
"directory",
"at",
"srcpath",
"to",
"dstpath",
".",
"Returns",
"True",
"if",
"successful",
"False",
"otherwise",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L326-L362
|
240,839
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
move
|
def move(srcpath, dstpath, overwrite=True):
"""Moves the file or directory at `srcpath` to `dstpath`. Returns True if
successful, False otherwise."""
# TODO: (JRR@201612230924) Consider adding smarter checks to prevent files ending up with directory names; e.g. if dstpath directory does not exist.
if not op.exists(srcpath):
return False
if srcpath == dstpath:
return True
if op.isfile(srcpath) and op.isdir(dstpath):
verfunc = op.isfile
verpath = op.join(dstpath, op.basename(srcpath))
elif op.isfile(srcpath):
verfunc = op.isfile
verpath = dstpath
makedirs(dstpath)
elif op.isdir(srcpath) and op.isdir(dstpath):
verfunc = op.isdir
verpath = op.join(dstpath, op.basename(srcpath))
elif op.isdir(srcpath):
verfunc = op.isdir
verpath = dstpath
else:
return False
if op.isfile(verpath):
if not overwrite:
return False
else:
# On Windows, filename case is ignored so the following check will
# prevent unintentionally deleting the srcpath before moving.
if "nt" == os.name and srcpath.lower() == dstpath.lower():
pass
elif not delete(verpath):
return False
try:
shutil.move(srcpath, dstpath)
except:
return False
return verfunc(verpath)
|
python
|
def move(srcpath, dstpath, overwrite=True):
"""Moves the file or directory at `srcpath` to `dstpath`. Returns True if
successful, False otherwise."""
# TODO: (JRR@201612230924) Consider adding smarter checks to prevent files ending up with directory names; e.g. if dstpath directory does not exist.
if not op.exists(srcpath):
return False
if srcpath == dstpath:
return True
if op.isfile(srcpath) and op.isdir(dstpath):
verfunc = op.isfile
verpath = op.join(dstpath, op.basename(srcpath))
elif op.isfile(srcpath):
verfunc = op.isfile
verpath = dstpath
makedirs(dstpath)
elif op.isdir(srcpath) and op.isdir(dstpath):
verfunc = op.isdir
verpath = op.join(dstpath, op.basename(srcpath))
elif op.isdir(srcpath):
verfunc = op.isdir
verpath = dstpath
else:
return False
if op.isfile(verpath):
if not overwrite:
return False
else:
# On Windows, filename case is ignored so the following check will
# prevent unintentionally deleting the srcpath before moving.
if "nt" == os.name and srcpath.lower() == dstpath.lower():
pass
elif not delete(verpath):
return False
try:
shutil.move(srcpath, dstpath)
except:
return False
return verfunc(verpath)
|
[
"def",
"move",
"(",
"srcpath",
",",
"dstpath",
",",
"overwrite",
"=",
"True",
")",
":",
"# TODO: (JRR@201612230924) Consider adding smarter checks to prevent files ending up with directory names; e.g. if dstpath directory does not exist.",
"if",
"not",
"op",
".",
"exists",
"(",
"srcpath",
")",
":",
"return",
"False",
"if",
"srcpath",
"==",
"dstpath",
":",
"return",
"True",
"if",
"op",
".",
"isfile",
"(",
"srcpath",
")",
"and",
"op",
".",
"isdir",
"(",
"dstpath",
")",
":",
"verfunc",
"=",
"op",
".",
"isfile",
"verpath",
"=",
"op",
".",
"join",
"(",
"dstpath",
",",
"op",
".",
"basename",
"(",
"srcpath",
")",
")",
"elif",
"op",
".",
"isfile",
"(",
"srcpath",
")",
":",
"verfunc",
"=",
"op",
".",
"isfile",
"verpath",
"=",
"dstpath",
"makedirs",
"(",
"dstpath",
")",
"elif",
"op",
".",
"isdir",
"(",
"srcpath",
")",
"and",
"op",
".",
"isdir",
"(",
"dstpath",
")",
":",
"verfunc",
"=",
"op",
".",
"isdir",
"verpath",
"=",
"op",
".",
"join",
"(",
"dstpath",
",",
"op",
".",
"basename",
"(",
"srcpath",
")",
")",
"elif",
"op",
".",
"isdir",
"(",
"srcpath",
")",
":",
"verfunc",
"=",
"op",
".",
"isdir",
"verpath",
"=",
"dstpath",
"else",
":",
"return",
"False",
"if",
"op",
".",
"isfile",
"(",
"verpath",
")",
":",
"if",
"not",
"overwrite",
":",
"return",
"False",
"else",
":",
"# On Windows, filename case is ignored so the following check will",
"# prevent unintentionally deleting the srcpath before moving.",
"if",
"\"nt\"",
"==",
"os",
".",
"name",
"and",
"srcpath",
".",
"lower",
"(",
")",
"==",
"dstpath",
".",
"lower",
"(",
")",
":",
"pass",
"elif",
"not",
"delete",
"(",
"verpath",
")",
":",
"return",
"False",
"try",
":",
"shutil",
".",
"move",
"(",
"srcpath",
",",
"dstpath",
")",
"except",
":",
"return",
"False",
"return",
"verfunc",
"(",
"verpath",
")"
] |
Moves the file or directory at `srcpath` to `dstpath`. Returns True if
successful, False otherwise.
|
[
"Moves",
"the",
"file",
"or",
"directory",
"at",
"srcpath",
"to",
"dstpath",
".",
"Returns",
"True",
"if",
"successful",
"False",
"otherwise",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L364-L401
|
240,840
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
_FileSysObject.dirpath
|
def dirpath(self):
"""Returns a Path object for the directory associated with this object."""
if self.isfile():
return Path(op.dirname(self._fspath))
else:
return Path(self)
|
python
|
def dirpath(self):
"""Returns a Path object for the directory associated with this object."""
if self.isfile():
return Path(op.dirname(self._fspath))
else:
return Path(self)
|
[
"def",
"dirpath",
"(",
"self",
")",
":",
"if",
"self",
".",
"isfile",
"(",
")",
":",
"return",
"Path",
"(",
"op",
".",
"dirname",
"(",
"self",
".",
"_fspath",
")",
")",
"else",
":",
"return",
"Path",
"(",
"self",
")"
] |
Returns a Path object for the directory associated with this object.
|
[
"Returns",
"a",
"Path",
"object",
"for",
"the",
"directory",
"associated",
"with",
"this",
"object",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L65-L70
|
240,841
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
File.read
|
def read(self, encoding=None):
"""Reads from the file and returns result as a string."""
encoding = encoding or ENCODING
try:
with codecs.open(self.path, encoding=encoding) as fi:
return fi.read()
except:
return None
|
python
|
def read(self, encoding=None):
"""Reads from the file and returns result as a string."""
encoding = encoding or ENCODING
try:
with codecs.open(self.path, encoding=encoding) as fi:
return fi.read()
except:
return None
|
[
"def",
"read",
"(",
"self",
",",
"encoding",
"=",
"None",
")",
":",
"encoding",
"=",
"encoding",
"or",
"ENCODING",
"try",
":",
"with",
"codecs",
".",
"open",
"(",
"self",
".",
"path",
",",
"encoding",
"=",
"encoding",
")",
"as",
"fi",
":",
"return",
"fi",
".",
"read",
"(",
")",
"except",
":",
"return",
"None"
] |
Reads from the file and returns result as a string.
|
[
"Reads",
"from",
"the",
"file",
"and",
"returns",
"result",
"as",
"a",
"string",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L131-L138
|
240,842
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
File.readlines
|
def readlines(self, encoding=None):
"""Reads from the file and returns result as a list of lines."""
try:
encoding = encoding or ENCODING
with codecs.open(self.path, encoding=None) as fi:
return fi.readlines()
except:
return []
|
python
|
def readlines(self, encoding=None):
"""Reads from the file and returns result as a list of lines."""
try:
encoding = encoding or ENCODING
with codecs.open(self.path, encoding=None) as fi:
return fi.readlines()
except:
return []
|
[
"def",
"readlines",
"(",
"self",
",",
"encoding",
"=",
"None",
")",
":",
"try",
":",
"encoding",
"=",
"encoding",
"or",
"ENCODING",
"with",
"codecs",
".",
"open",
"(",
"self",
".",
"path",
",",
"encoding",
"=",
"None",
")",
"as",
"fi",
":",
"return",
"fi",
".",
"readlines",
"(",
")",
"except",
":",
"return",
"[",
"]"
] |
Reads from the file and returns result as a list of lines.
|
[
"Reads",
"from",
"the",
"file",
"and",
"returns",
"result",
"as",
"a",
"list",
"of",
"lines",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L139-L146
|
240,843
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
File._write
|
def _write(self, content, mode, encoding=None, linesep=False):
"""Handles file writes."""
makedirs(self.path)
try:
encoding = encoding or ENCODING
if "b" not in mode:
try:
content = str(content)
except:
pass
if linesep:
content += os.linesep
with codecs.open(self.path, mode, encoding=encoding) as fo:
fo.write(content)
return True
except:
return False
|
python
|
def _write(self, content, mode, encoding=None, linesep=False):
"""Handles file writes."""
makedirs(self.path)
try:
encoding = encoding or ENCODING
if "b" not in mode:
try:
content = str(content)
except:
pass
if linesep:
content += os.linesep
with codecs.open(self.path, mode, encoding=encoding) as fo:
fo.write(content)
return True
except:
return False
|
[
"def",
"_write",
"(",
"self",
",",
"content",
",",
"mode",
",",
"encoding",
"=",
"None",
",",
"linesep",
"=",
"False",
")",
":",
"makedirs",
"(",
"self",
".",
"path",
")",
"try",
":",
"encoding",
"=",
"encoding",
"or",
"ENCODING",
"if",
"\"b\"",
"not",
"in",
"mode",
":",
"try",
":",
"content",
"=",
"str",
"(",
"content",
")",
"except",
":",
"pass",
"if",
"linesep",
":",
"content",
"+=",
"os",
".",
"linesep",
"with",
"codecs",
".",
"open",
"(",
"self",
".",
"path",
",",
"mode",
",",
"encoding",
"=",
"encoding",
")",
"as",
"fo",
":",
"fo",
".",
"write",
"(",
"content",
")",
"return",
"True",
"except",
":",
"return",
"False"
] |
Handles file writes.
|
[
"Handles",
"file",
"writes",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L147-L163
|
240,844
|
jeffrimko/Auxly
|
lib/auxly/filesys.py
|
File.append
|
def append(self, content, binary=False, encoding=None):
"""Appends the given content to the file. Existing content is
preserved. Returns true if successful, false otherwise."""
mode = "ab" if binary else "a"
return self._write(content, mode, encoding=encoding, linesep=False)
|
python
|
def append(self, content, binary=False, encoding=None):
"""Appends the given content to the file. Existing content is
preserved. Returns true if successful, false otherwise."""
mode = "ab" if binary else "a"
return self._write(content, mode, encoding=encoding, linesep=False)
|
[
"def",
"append",
"(",
"self",
",",
"content",
",",
"binary",
"=",
"False",
",",
"encoding",
"=",
"None",
")",
":",
"mode",
"=",
"\"ab\"",
"if",
"binary",
"else",
"\"a\"",
"return",
"self",
".",
"_write",
"(",
"content",
",",
"mode",
",",
"encoding",
"=",
"encoding",
",",
"linesep",
"=",
"False",
")"
] |
Appends the given content to the file. Existing content is
preserved. Returns true if successful, false otherwise.
|
[
"Appends",
"the",
"given",
"content",
"to",
"the",
"file",
".",
"Existing",
"content",
"is",
"preserved",
".",
"Returns",
"true",
"if",
"successful",
"false",
"otherwise",
"."
] |
5aae876bcb6ca117c81d904f9455764cdc78cd48
|
https://github.com/jeffrimko/Auxly/blob/5aae876bcb6ca117c81d904f9455764cdc78cd48/lib/auxly/filesys.py#L164-L168
|
240,845
|
pyvec/pyvodb
|
pyvodb/cli/cliutil.py
|
handle_raw_output
|
def handle_raw_output(ctx, data):
"""If a raw output format is set, dump data and exit"""
if ctx.obj['format'] == 'json':
print(json_dump(data))
exit(0)
if ctx.obj['format'] == 'yaml':
print(yaml_dump(data), end='')
exit(0)
|
python
|
def handle_raw_output(ctx, data):
"""If a raw output format is set, dump data and exit"""
if ctx.obj['format'] == 'json':
print(json_dump(data))
exit(0)
if ctx.obj['format'] == 'yaml':
print(yaml_dump(data), end='')
exit(0)
|
[
"def",
"handle_raw_output",
"(",
"ctx",
",",
"data",
")",
":",
"if",
"ctx",
".",
"obj",
"[",
"'format'",
"]",
"==",
"'json'",
":",
"print",
"(",
"json_dump",
"(",
"data",
")",
")",
"exit",
"(",
"0",
")",
"if",
"ctx",
".",
"obj",
"[",
"'format'",
"]",
"==",
"'yaml'",
":",
"print",
"(",
"yaml_dump",
"(",
"data",
")",
",",
"end",
"=",
"''",
")",
"exit",
"(",
"0",
")"
] |
If a raw output format is set, dump data and exit
|
[
"If",
"a",
"raw",
"output",
"format",
"is",
"set",
"dump",
"data",
"and",
"exit"
] |
07183333df26eb12c5c2b98802cde3fb3a6c1339
|
https://github.com/pyvec/pyvodb/blob/07183333df26eb12c5c2b98802cde3fb3a6c1339/pyvodb/cli/cliutil.py#L10-L17
|
240,846
|
mrstephenneal/dirutility
|
dirutility/walk/multiprocess.py
|
Sprinter._get_root_files
|
def _get_root_files(self, directory):
"""Retrieve files within the root directory"""
if len(self.filepaths) is 0:
if self.filters:
root_files = [(directory, f) for f in os.listdir(directory)
if os.path.isfile(os.path.join(directory, f))
and self.filters.validate(f)
and self.filters.get_level(f) == self.filters.max_level]
else:
root_files = [(directory, f) for f in os.listdir(directory)
if os.path.isfile(os.path.join(directory, f))]
self.add_path(root_files)
|
python
|
def _get_root_files(self, directory):
"""Retrieve files within the root directory"""
if len(self.filepaths) is 0:
if self.filters:
root_files = [(directory, f) for f in os.listdir(directory)
if os.path.isfile(os.path.join(directory, f))
and self.filters.validate(f)
and self.filters.get_level(f) == self.filters.max_level]
else:
root_files = [(directory, f) for f in os.listdir(directory)
if os.path.isfile(os.path.join(directory, f))]
self.add_path(root_files)
|
[
"def",
"_get_root_files",
"(",
"self",
",",
"directory",
")",
":",
"if",
"len",
"(",
"self",
".",
"filepaths",
")",
"is",
"0",
":",
"if",
"self",
".",
"filters",
":",
"root_files",
"=",
"[",
"(",
"directory",
",",
"f",
")",
"for",
"f",
"in",
"os",
".",
"listdir",
"(",
"directory",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"f",
")",
")",
"and",
"self",
".",
"filters",
".",
"validate",
"(",
"f",
")",
"and",
"self",
".",
"filters",
".",
"get_level",
"(",
"f",
")",
"==",
"self",
".",
"filters",
".",
"max_level",
"]",
"else",
":",
"root_files",
"=",
"[",
"(",
"directory",
",",
"f",
")",
"for",
"f",
"in",
"os",
".",
"listdir",
"(",
"directory",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"f",
")",
")",
"]",
"self",
".",
"add_path",
"(",
"root_files",
")"
] |
Retrieve files within the root directory
|
[
"Retrieve",
"files",
"within",
"the",
"root",
"directory"
] |
339378659e2d7e09c53acfc51c5df745bb0cd517
|
https://github.com/mrstephenneal/dirutility/blob/339378659e2d7e09c53acfc51c5df745bb0cd517/dirutility/walk/multiprocess.py#L45-L56
|
240,847
|
mrstephenneal/dirutility
|
dirutility/walk/multiprocess.py
|
Sprinter.sprinter
|
def sprinter(self):
"""
Called when parallelize is True.
This function will generate the file names in a directory tree by adding directories to a Queue and
continuously exploring directories in the Queue until Queue is emptied.
Significantly faster than crawler method for larger directory trees.
"""
self._printer('Multiprocess Walk')
# Loop through directories in case there is more than one (1)
for directory in self.directory:
self._get_root_files(directory) # Add file within root directory if filepaths is empty
# acquire the list of paths
first_level_dirs = next(os.walk(directory))[1]
for path in first_level_dirs:
self.unsearched.put((directory, path))
self._printer('Pool Processing STARTED')
pool = Pool(self.pool_size)
pool.map_async(self.parallel_worker, range(self.pool_size))
pool.close()
self.unsearched.join()
self._printer('Pool Processing ENDED')
return self.filepaths
|
python
|
def sprinter(self):
"""
Called when parallelize is True.
This function will generate the file names in a directory tree by adding directories to a Queue and
continuously exploring directories in the Queue until Queue is emptied.
Significantly faster than crawler method for larger directory trees.
"""
self._printer('Multiprocess Walk')
# Loop through directories in case there is more than one (1)
for directory in self.directory:
self._get_root_files(directory) # Add file within root directory if filepaths is empty
# acquire the list of paths
first_level_dirs = next(os.walk(directory))[1]
for path in first_level_dirs:
self.unsearched.put((directory, path))
self._printer('Pool Processing STARTED')
pool = Pool(self.pool_size)
pool.map_async(self.parallel_worker, range(self.pool_size))
pool.close()
self.unsearched.join()
self._printer('Pool Processing ENDED')
return self.filepaths
|
[
"def",
"sprinter",
"(",
"self",
")",
":",
"self",
".",
"_printer",
"(",
"'Multiprocess Walk'",
")",
"# Loop through directories in case there is more than one (1)",
"for",
"directory",
"in",
"self",
".",
"directory",
":",
"self",
".",
"_get_root_files",
"(",
"directory",
")",
"# Add file within root directory if filepaths is empty",
"# acquire the list of paths",
"first_level_dirs",
"=",
"next",
"(",
"os",
".",
"walk",
"(",
"directory",
")",
")",
"[",
"1",
"]",
"for",
"path",
"in",
"first_level_dirs",
":",
"self",
".",
"unsearched",
".",
"put",
"(",
"(",
"directory",
",",
"path",
")",
")",
"self",
".",
"_printer",
"(",
"'Pool Processing STARTED'",
")",
"pool",
"=",
"Pool",
"(",
"self",
".",
"pool_size",
")",
"pool",
".",
"map_async",
"(",
"self",
".",
"parallel_worker",
",",
"range",
"(",
"self",
".",
"pool_size",
")",
")",
"pool",
".",
"close",
"(",
")",
"self",
".",
"unsearched",
".",
"join",
"(",
")",
"self",
".",
"_printer",
"(",
"'Pool Processing ENDED'",
")",
"return",
"self",
".",
"filepaths"
] |
Called when parallelize is True.
This function will generate the file names in a directory tree by adding directories to a Queue and
continuously exploring directories in the Queue until Queue is emptied.
Significantly faster than crawler method for larger directory trees.
|
[
"Called",
"when",
"parallelize",
"is",
"True",
".",
"This",
"function",
"will",
"generate",
"the",
"file",
"names",
"in",
"a",
"directory",
"tree",
"by",
"adding",
"directories",
"to",
"a",
"Queue",
"and",
"continuously",
"exploring",
"directories",
"in",
"the",
"Queue",
"until",
"Queue",
"is",
"emptied",
".",
"Significantly",
"faster",
"than",
"crawler",
"method",
"for",
"larger",
"directory",
"trees",
"."
] |
339378659e2d7e09c53acfc51c5df745bb0cd517
|
https://github.com/mrstephenneal/dirutility/blob/339378659e2d7e09c53acfc51c5df745bb0cd517/dirutility/walk/multiprocess.py#L131-L152
|
240,848
|
humilis/humilis-firehose
|
scripts/empty-bucket.py
|
empty_bucket
|
def empty_bucket(outputs_file):
"""Empty the bucket associated to the test deployment."""
with open(outputs_file, "r") as f:
outputs = yaml.load(f)
bucket = outputs["storage"]["BucketName"]
print("Emptying bucket {} ...".format(bucket))
os.system("aws s3 rm s3://{} --recursive".format(bucket))
print("Bucket {} has been emptied".format(bucket))
|
python
|
def empty_bucket(outputs_file):
"""Empty the bucket associated to the test deployment."""
with open(outputs_file, "r") as f:
outputs = yaml.load(f)
bucket = outputs["storage"]["BucketName"]
print("Emptying bucket {} ...".format(bucket))
os.system("aws s3 rm s3://{} --recursive".format(bucket))
print("Bucket {} has been emptied".format(bucket))
|
[
"def",
"empty_bucket",
"(",
"outputs_file",
")",
":",
"with",
"open",
"(",
"outputs_file",
",",
"\"r\"",
")",
"as",
"f",
":",
"outputs",
"=",
"yaml",
".",
"load",
"(",
"f",
")",
"bucket",
"=",
"outputs",
"[",
"\"storage\"",
"]",
"[",
"\"BucketName\"",
"]",
"print",
"(",
"\"Emptying bucket {} ...\"",
".",
"format",
"(",
"bucket",
")",
")",
"os",
".",
"system",
"(",
"\"aws s3 rm s3://{} --recursive\"",
".",
"format",
"(",
"bucket",
")",
")",
"print",
"(",
"\"Bucket {} has been emptied\"",
".",
"format",
"(",
"bucket",
")",
")"
] |
Empty the bucket associated to the test deployment.
|
[
"Empty",
"the",
"bucket",
"associated",
"to",
"the",
"test",
"deployment",
"."
] |
8611e4b18d534bbafb638597da1304fc87be62b5
|
https://github.com/humilis/humilis-firehose/blob/8611e4b18d534bbafb638597da1304fc87be62b5/scripts/empty-bucket.py#L9-L18
|
240,849
|
MacHu-GWU/angora-project
|
angora/text/formatter.py
|
fmt_title
|
def fmt_title(text):
"""Article title formatter.
Except functional words, first letter uppercase. Example:
"Google Killing Annoying Browsing Feature"
**中文文档**
文章标题的格式, 除了虚词, 每个英文单词的第一个字母大写。
"""
text = text.strip()
if len(text) == 0: # if empty string, return it
return text
else:
text = text.lower() # lower all char
# delete redundant empty space
chunks = [chunk for chunk in text.split(" ") if len(chunk) >= 1]
new_chunks = list()
for chunk in chunks:
if chunk not in _function_words:
chunk = chunk[0].upper() + chunk[1:]
new_chunks.append(chunk)
new_chunks[0] = new_chunks[0][0].upper() + new_chunks[0][1:]
return " ".join(new_chunks)
|
python
|
def fmt_title(text):
"""Article title formatter.
Except functional words, first letter uppercase. Example:
"Google Killing Annoying Browsing Feature"
**中文文档**
文章标题的格式, 除了虚词, 每个英文单词的第一个字母大写。
"""
text = text.strip()
if len(text) == 0: # if empty string, return it
return text
else:
text = text.lower() # lower all char
# delete redundant empty space
chunks = [chunk for chunk in text.split(" ") if len(chunk) >= 1]
new_chunks = list()
for chunk in chunks:
if chunk not in _function_words:
chunk = chunk[0].upper() + chunk[1:]
new_chunks.append(chunk)
new_chunks[0] = new_chunks[0][0].upper() + new_chunks[0][1:]
return " ".join(new_chunks)
|
[
"def",
"fmt_title",
"(",
"text",
")",
":",
"text",
"=",
"text",
".",
"strip",
"(",
")",
"if",
"len",
"(",
"text",
")",
"==",
"0",
":",
"# if empty string, return it",
"return",
"text",
"else",
":",
"text",
"=",
"text",
".",
"lower",
"(",
")",
"# lower all char",
"# delete redundant empty space",
"chunks",
"=",
"[",
"chunk",
"for",
"chunk",
"in",
"text",
".",
"split",
"(",
"\" \"",
")",
"if",
"len",
"(",
"chunk",
")",
">=",
"1",
"]",
"new_chunks",
"=",
"list",
"(",
")",
"for",
"chunk",
"in",
"chunks",
":",
"if",
"chunk",
"not",
"in",
"_function_words",
":",
"chunk",
"=",
"chunk",
"[",
"0",
"]",
".",
"upper",
"(",
")",
"+",
"chunk",
"[",
"1",
":",
"]",
"new_chunks",
".",
"append",
"(",
"chunk",
")",
"new_chunks",
"[",
"0",
"]",
"=",
"new_chunks",
"[",
"0",
"]",
"[",
"0",
"]",
".",
"upper",
"(",
")",
"+",
"new_chunks",
"[",
"0",
"]",
"[",
"1",
":",
"]",
"return",
"\" \"",
".",
"join",
"(",
"new_chunks",
")"
] |
Article title formatter.
Except functional words, first letter uppercase. Example:
"Google Killing Annoying Browsing Feature"
**中文文档**
文章标题的格式, 除了虚词, 每个英文单词的第一个字母大写。
|
[
"Article",
"title",
"formatter",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/text/formatter.py#L36-L62
|
240,850
|
MacHu-GWU/angora-project
|
angora/text/formatter.py
|
fmt_sentence
|
def fmt_sentence(text):
"""English sentence formatter.
First letter is always upper case. Example:
"Do you want to build a snow man?"
**中文文档**
句子格式。每句话的第一个单词第一个字母大写。
"""
text = text.strip()
if len(text) == 0: # if empty string, return it
return text
else:
text = text.lower() # lower all char
# delete redundant empty space
chunks = [chunk for chunk in text.split(" ") if len(chunk) >= 1]
chunks[0] = chunks[0][0].upper() + chunks[0][1:]
return " ".join(chunks)
|
python
|
def fmt_sentence(text):
"""English sentence formatter.
First letter is always upper case. Example:
"Do you want to build a snow man?"
**中文文档**
句子格式。每句话的第一个单词第一个字母大写。
"""
text = text.strip()
if len(text) == 0: # if empty string, return it
return text
else:
text = text.lower() # lower all char
# delete redundant empty space
chunks = [chunk for chunk in text.split(" ") if len(chunk) >= 1]
chunks[0] = chunks[0][0].upper() + chunks[0][1:]
return " ".join(chunks)
|
[
"def",
"fmt_sentence",
"(",
"text",
")",
":",
"text",
"=",
"text",
".",
"strip",
"(",
")",
"if",
"len",
"(",
"text",
")",
"==",
"0",
":",
"# if empty string, return it",
"return",
"text",
"else",
":",
"text",
"=",
"text",
".",
"lower",
"(",
")",
"# lower all char",
"# delete redundant empty space",
"chunks",
"=",
"[",
"chunk",
"for",
"chunk",
"in",
"text",
".",
"split",
"(",
"\" \"",
")",
"if",
"len",
"(",
"chunk",
")",
">=",
"1",
"]",
"chunks",
"[",
"0",
"]",
"=",
"chunks",
"[",
"0",
"]",
"[",
"0",
"]",
".",
"upper",
"(",
")",
"+",
"chunks",
"[",
"0",
"]",
"[",
"1",
":",
"]",
"return",
"\" \"",
".",
"join",
"(",
"chunks",
")"
] |
English sentence formatter.
First letter is always upper case. Example:
"Do you want to build a snow man?"
**中文文档**
句子格式。每句话的第一个单词第一个字母大写。
|
[
"English",
"sentence",
"formatter",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/text/formatter.py#L65-L83
|
240,851
|
MacHu-GWU/angora-project
|
angora/text/formatter.py
|
fmt_filename
|
def fmt_filename(text):
"""File name formatter.
Remove all file system forbidden char from text.
**中文文档**
移除文件系统中不允许的字符。
"""
forbidden_char = ["\\", "/", ":", "*", "?", "|", "<", ">", '"']
for char in forbidden_char:
text = text.replace(char, "")
return text
|
python
|
def fmt_filename(text):
"""File name formatter.
Remove all file system forbidden char from text.
**中文文档**
移除文件系统中不允许的字符。
"""
forbidden_char = ["\\", "/", ":", "*", "?", "|", "<", ">", '"']
for char in forbidden_char:
text = text.replace(char, "")
return text
|
[
"def",
"fmt_filename",
"(",
"text",
")",
":",
"forbidden_char",
"=",
"[",
"\"\\\\\"",
",",
"\"/\"",
",",
"\":\"",
",",
"\"*\"",
",",
"\"?\"",
",",
"\"|\"",
",",
"\"<\"",
",",
"\">\"",
",",
"'\"'",
"]",
"for",
"char",
"in",
"forbidden_char",
":",
"text",
"=",
"text",
".",
"replace",
"(",
"char",
",",
"\"\"",
")",
"return",
"text"
] |
File name formatter.
Remove all file system forbidden char from text.
**中文文档**
移除文件系统中不允许的字符。
|
[
"File",
"name",
"formatter",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/text/formatter.py#L107-L119
|
240,852
|
saltonmassally/Eve-Statsd
|
eve_statsd/__init__.py
|
StatsD._init_request_hooks
|
def _init_request_hooks(self):
""" initialize pre request hooks"""
for method_type in ('pre', 'post'):
for method in _METHODS:
event = getattr(self.app, 'on_' + method_type + '_' + method)
event_hook = getattr(hooks, method_type + '_' + method)
event += event_hook
|
python
|
def _init_request_hooks(self):
""" initialize pre request hooks"""
for method_type in ('pre', 'post'):
for method in _METHODS:
event = getattr(self.app, 'on_' + method_type + '_' + method)
event_hook = getattr(hooks, method_type + '_' + method)
event += event_hook
|
[
"def",
"_init_request_hooks",
"(",
"self",
")",
":",
"for",
"method_type",
"in",
"(",
"'pre'",
",",
"'post'",
")",
":",
"for",
"method",
"in",
"_METHODS",
":",
"event",
"=",
"getattr",
"(",
"self",
".",
"app",
",",
"'on_'",
"+",
"method_type",
"+",
"'_'",
"+",
"method",
")",
"event_hook",
"=",
"getattr",
"(",
"hooks",
",",
"method_type",
"+",
"'_'",
"+",
"method",
")",
"event",
"+=",
"event_hook"
] |
initialize pre request hooks
|
[
"initialize",
"pre",
"request",
"hooks"
] |
bd7cf83db4ca70da47e130ac7600739260179d79
|
https://github.com/saltonmassally/Eve-Statsd/blob/bd7cf83db4ca70da47e130ac7600739260179d79/eve_statsd/__init__.py#L50-L56
|
240,853
|
sbuss/pypercube
|
pypercube/metric.py
|
Metric.from_json
|
def from_json(cls, json_obj):
"""Build a MetricResponse from JSON.
:param json_obj: JSON data representing a Cube Metric.
:type json_obj: `String` or `json`
:throws: `InvalidMetricError` when any of {type,time,data} fields are
not present in json_obj.
"""
if isinstance(json_obj, str):
json_obj = json.loads(json_obj)
time = None
value = None
if cls.TIME_FIELD_NAME in json_obj:
time = json_obj[cls.TIME_FIELD_NAME]
else:
raise InvalidMetricError("{field} must be present!".format(
field=cls.TIME_FIELD_NAME))
if cls.VALUE_FIELD_NAME in json_obj:
value = json_obj[cls.VALUE_FIELD_NAME]
return cls(time, value)
|
python
|
def from_json(cls, json_obj):
"""Build a MetricResponse from JSON.
:param json_obj: JSON data representing a Cube Metric.
:type json_obj: `String` or `json`
:throws: `InvalidMetricError` when any of {type,time,data} fields are
not present in json_obj.
"""
if isinstance(json_obj, str):
json_obj = json.loads(json_obj)
time = None
value = None
if cls.TIME_FIELD_NAME in json_obj:
time = json_obj[cls.TIME_FIELD_NAME]
else:
raise InvalidMetricError("{field} must be present!".format(
field=cls.TIME_FIELD_NAME))
if cls.VALUE_FIELD_NAME in json_obj:
value = json_obj[cls.VALUE_FIELD_NAME]
return cls(time, value)
|
[
"def",
"from_json",
"(",
"cls",
",",
"json_obj",
")",
":",
"if",
"isinstance",
"(",
"json_obj",
",",
"str",
")",
":",
"json_obj",
"=",
"json",
".",
"loads",
"(",
"json_obj",
")",
"time",
"=",
"None",
"value",
"=",
"None",
"if",
"cls",
".",
"TIME_FIELD_NAME",
"in",
"json_obj",
":",
"time",
"=",
"json_obj",
"[",
"cls",
".",
"TIME_FIELD_NAME",
"]",
"else",
":",
"raise",
"InvalidMetricError",
"(",
"\"{field} must be present!\"",
".",
"format",
"(",
"field",
"=",
"cls",
".",
"TIME_FIELD_NAME",
")",
")",
"if",
"cls",
".",
"VALUE_FIELD_NAME",
"in",
"json_obj",
":",
"value",
"=",
"json_obj",
"[",
"cls",
".",
"VALUE_FIELD_NAME",
"]",
"return",
"cls",
"(",
"time",
",",
"value",
")"
] |
Build a MetricResponse from JSON.
:param json_obj: JSON data representing a Cube Metric.
:type json_obj: `String` or `json`
:throws: `InvalidMetricError` when any of {type,time,data} fields are
not present in json_obj.
|
[
"Build",
"a",
"MetricResponse",
"from",
"JSON",
"."
] |
e9d2cca9c004b8bad6d1e0b68b080f887a186a22
|
https://github.com/sbuss/pypercube/blob/e9d2cca9c004b8bad6d1e0b68b080f887a186a22/pypercube/metric.py#L25-L48
|
240,854
|
mdeous/fatbotslim
|
fatbotslim/cli.py
|
make_parser
|
def make_parser():
"""
Creates an argument parser configured with options to run a bot
from the command line.
:return: configured argument parser
:rtype: :class:`argparse.ArgumentParser`
"""
parser = ArgumentParser(
description='Start an IRC bot instance from the command line.',
formatter_class=ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'-v', '--version',
action='version',
version='{0} v{1}'.format(NAME, VERSION)
)
parser.add_argument(
'-s', '--server',
metavar='HOST',
required=True,
help='the host to connect to'
)
parser.add_argument(
'-p', '--port',
metavar='PORT',
type=int,
default=6667,
help='the port the server is listening on'
)
parser.add_argument(
'-n', '--nick',
metavar='NAME',
required=True,
help="the bot's nickname"
)
parser.add_argument(
'-N', '--name',
metavar='NAME',
default=NAME,
help="the bot's real name"
)
parser.add_argument(
'-c', '--channels',
metavar='CHAN',
nargs='*',
help='join this channel upon connection'
)
parser.add_argument(
'-l', '--log',
metavar='LEVEL',
default='INFO',
help='minimal level for displayed logging messages'
)
parser.add_argument(
'-S', '--ssl',
action='store_true',
help='connect to the server using SSL'
)
return parser
|
python
|
def make_parser():
"""
Creates an argument parser configured with options to run a bot
from the command line.
:return: configured argument parser
:rtype: :class:`argparse.ArgumentParser`
"""
parser = ArgumentParser(
description='Start an IRC bot instance from the command line.',
formatter_class=ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'-v', '--version',
action='version',
version='{0} v{1}'.format(NAME, VERSION)
)
parser.add_argument(
'-s', '--server',
metavar='HOST',
required=True,
help='the host to connect to'
)
parser.add_argument(
'-p', '--port',
metavar='PORT',
type=int,
default=6667,
help='the port the server is listening on'
)
parser.add_argument(
'-n', '--nick',
metavar='NAME',
required=True,
help="the bot's nickname"
)
parser.add_argument(
'-N', '--name',
metavar='NAME',
default=NAME,
help="the bot's real name"
)
parser.add_argument(
'-c', '--channels',
metavar='CHAN',
nargs='*',
help='join this channel upon connection'
)
parser.add_argument(
'-l', '--log',
metavar='LEVEL',
default='INFO',
help='minimal level for displayed logging messages'
)
parser.add_argument(
'-S', '--ssl',
action='store_true',
help='connect to the server using SSL'
)
return parser
|
[
"def",
"make_parser",
"(",
")",
":",
"parser",
"=",
"ArgumentParser",
"(",
"description",
"=",
"'Start an IRC bot instance from the command line.'",
",",
"formatter_class",
"=",
"ArgumentDefaultsHelpFormatter",
",",
")",
"parser",
".",
"add_argument",
"(",
"'-v'",
",",
"'--version'",
",",
"action",
"=",
"'version'",
",",
"version",
"=",
"'{0} v{1}'",
".",
"format",
"(",
"NAME",
",",
"VERSION",
")",
")",
"parser",
".",
"add_argument",
"(",
"'-s'",
",",
"'--server'",
",",
"metavar",
"=",
"'HOST'",
",",
"required",
"=",
"True",
",",
"help",
"=",
"'the host to connect to'",
")",
"parser",
".",
"add_argument",
"(",
"'-p'",
",",
"'--port'",
",",
"metavar",
"=",
"'PORT'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"6667",
",",
"help",
"=",
"'the port the server is listening on'",
")",
"parser",
".",
"add_argument",
"(",
"'-n'",
",",
"'--nick'",
",",
"metavar",
"=",
"'NAME'",
",",
"required",
"=",
"True",
",",
"help",
"=",
"\"the bot's nickname\"",
")",
"parser",
".",
"add_argument",
"(",
"'-N'",
",",
"'--name'",
",",
"metavar",
"=",
"'NAME'",
",",
"default",
"=",
"NAME",
",",
"help",
"=",
"\"the bot's real name\"",
")",
"parser",
".",
"add_argument",
"(",
"'-c'",
",",
"'--channels'",
",",
"metavar",
"=",
"'CHAN'",
",",
"nargs",
"=",
"'*'",
",",
"help",
"=",
"'join this channel upon connection'",
")",
"parser",
".",
"add_argument",
"(",
"'-l'",
",",
"'--log'",
",",
"metavar",
"=",
"'LEVEL'",
",",
"default",
"=",
"'INFO'",
",",
"help",
"=",
"'minimal level for displayed logging messages'",
")",
"parser",
".",
"add_argument",
"(",
"'-S'",
",",
"'--ssl'",
",",
"action",
"=",
"'store_true'",
",",
"help",
"=",
"'connect to the server using SSL'",
")",
"return",
"parser"
] |
Creates an argument parser configured with options to run a bot
from the command line.
:return: configured argument parser
:rtype: :class:`argparse.ArgumentParser`
|
[
"Creates",
"an",
"argument",
"parser",
"configured",
"with",
"options",
"to",
"run",
"a",
"bot",
"from",
"the",
"command",
"line",
"."
] |
341595d24454a79caee23750eac271f9d0626c88
|
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/cli.py#L37-L96
|
240,855
|
mdeous/fatbotslim
|
fatbotslim/cli.py
|
make_bot
|
def make_bot():
"""
Creates a new bot instance ready to be launched.
"""
parser = make_parser()
args = parser.parse_args()
settings = {
'server': args.server,
'port': args.port,
'ssl': args.ssl,
'nick': args.nick,
'realname': args.name,
'channels': args.channels or [],
'loglevel': args.log,
}
return IRC(settings)
|
python
|
def make_bot():
"""
Creates a new bot instance ready to be launched.
"""
parser = make_parser()
args = parser.parse_args()
settings = {
'server': args.server,
'port': args.port,
'ssl': args.ssl,
'nick': args.nick,
'realname': args.name,
'channels': args.channels or [],
'loglevel': args.log,
}
return IRC(settings)
|
[
"def",
"make_bot",
"(",
")",
":",
"parser",
"=",
"make_parser",
"(",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"settings",
"=",
"{",
"'server'",
":",
"args",
".",
"server",
",",
"'port'",
":",
"args",
".",
"port",
",",
"'ssl'",
":",
"args",
".",
"ssl",
",",
"'nick'",
":",
"args",
".",
"nick",
",",
"'realname'",
":",
"args",
".",
"name",
",",
"'channels'",
":",
"args",
".",
"channels",
"or",
"[",
"]",
",",
"'loglevel'",
":",
"args",
".",
"log",
",",
"}",
"return",
"IRC",
"(",
"settings",
")"
] |
Creates a new bot instance ready to be launched.
|
[
"Creates",
"a",
"new",
"bot",
"instance",
"ready",
"to",
"be",
"launched",
"."
] |
341595d24454a79caee23750eac271f9d0626c88
|
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/cli.py#L99-L114
|
240,856
|
mdeous/fatbotslim
|
fatbotslim/cli.py
|
main
|
def main(bot):
"""
Entry point for the command line launcher.
:param bot: the IRC bot to run
:type bot: :class:`fatbotslim.irc.bot.IRC`
"""
greenlet = spawn(bot.run)
try:
greenlet.join()
except KeyboardInterrupt:
print '' # cosmetics matters
log.info("Killed by user, disconnecting...")
bot.disconnect()
finally:
greenlet.kill()
|
python
|
def main(bot):
"""
Entry point for the command line launcher.
:param bot: the IRC bot to run
:type bot: :class:`fatbotslim.irc.bot.IRC`
"""
greenlet = spawn(bot.run)
try:
greenlet.join()
except KeyboardInterrupt:
print '' # cosmetics matters
log.info("Killed by user, disconnecting...")
bot.disconnect()
finally:
greenlet.kill()
|
[
"def",
"main",
"(",
"bot",
")",
":",
"greenlet",
"=",
"spawn",
"(",
"bot",
".",
"run",
")",
"try",
":",
"greenlet",
".",
"join",
"(",
")",
"except",
"KeyboardInterrupt",
":",
"print",
"''",
"# cosmetics matters",
"log",
".",
"info",
"(",
"\"Killed by user, disconnecting...\"",
")",
"bot",
".",
"disconnect",
"(",
")",
"finally",
":",
"greenlet",
".",
"kill",
"(",
")"
] |
Entry point for the command line launcher.
:param bot: the IRC bot to run
:type bot: :class:`fatbotslim.irc.bot.IRC`
|
[
"Entry",
"point",
"for",
"the",
"command",
"line",
"launcher",
"."
] |
341595d24454a79caee23750eac271f9d0626c88
|
https://github.com/mdeous/fatbotslim/blob/341595d24454a79caee23750eac271f9d0626c88/fatbotslim/cli.py#L117-L132
|
240,857
|
fr33jc/bang
|
bang/stack.py
|
Stack.find_first
|
def find_first(self, attr_name, resources, extra_prefix=''):
"""
Returns the boto object for the first resource in ``resources`` that
belongs to this stack. Uses the attribute specified by ``attr_name``
to match the stack name.
E.g. An RDS instance for a stack named ``foo`` might be named
``foo-mydb-fis8932ifs``. This call::
find_first('id', conn.get_all_dbinstances())
would return the boto.rds.dbinstance.DBInstance object whose ``id`` is
``foo-mydb-fis8932ifs``.
Returns None if a matching resource is not found.
If specified, ``extra_prefix`` is appended to the stack name prefix
before matching.
"""
prefix = self.name + '-' + (extra_prefix + '-' if extra_prefix else '')
for res in resources:
attr = getattr(res, attr_name)
if attr.startswith(prefix):
return res
|
python
|
def find_first(self, attr_name, resources, extra_prefix=''):
"""
Returns the boto object for the first resource in ``resources`` that
belongs to this stack. Uses the attribute specified by ``attr_name``
to match the stack name.
E.g. An RDS instance for a stack named ``foo`` might be named
``foo-mydb-fis8932ifs``. This call::
find_first('id', conn.get_all_dbinstances())
would return the boto.rds.dbinstance.DBInstance object whose ``id`` is
``foo-mydb-fis8932ifs``.
Returns None if a matching resource is not found.
If specified, ``extra_prefix`` is appended to the stack name prefix
before matching.
"""
prefix = self.name + '-' + (extra_prefix + '-' if extra_prefix else '')
for res in resources:
attr = getattr(res, attr_name)
if attr.startswith(prefix):
return res
|
[
"def",
"find_first",
"(",
"self",
",",
"attr_name",
",",
"resources",
",",
"extra_prefix",
"=",
"''",
")",
":",
"prefix",
"=",
"self",
".",
"name",
"+",
"'-'",
"+",
"(",
"extra_prefix",
"+",
"'-'",
"if",
"extra_prefix",
"else",
"''",
")",
"for",
"res",
"in",
"resources",
":",
"attr",
"=",
"getattr",
"(",
"res",
",",
"attr_name",
")",
"if",
"attr",
".",
"startswith",
"(",
"prefix",
")",
":",
"return",
"res"
] |
Returns the boto object for the first resource in ``resources`` that
belongs to this stack. Uses the attribute specified by ``attr_name``
to match the stack name.
E.g. An RDS instance for a stack named ``foo`` might be named
``foo-mydb-fis8932ifs``. This call::
find_first('id', conn.get_all_dbinstances())
would return the boto.rds.dbinstance.DBInstance object whose ``id`` is
``foo-mydb-fis8932ifs``.
Returns None if a matching resource is not found.
If specified, ``extra_prefix`` is appended to the stack name prefix
before matching.
|
[
"Returns",
"the",
"boto",
"object",
"for",
"the",
"first",
"resource",
"in",
"resources",
"that",
"belongs",
"to",
"this",
"stack",
".",
"Uses",
"the",
"attribute",
"specified",
"by",
"attr_name",
"to",
"match",
"the",
"stack",
"name",
"."
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/stack.py#L113-L136
|
240,858
|
fr33jc/bang
|
bang/stack.py
|
Stack.add_lb_secgroup
|
def add_lb_secgroup(self, lb_name, hosts, port):
"""
Used by the load balancer deployer to register a hostname
for a load balancer, in order that security group rules can be
applied later. This is multiprocess-safe, but since keys are
accessed only be a single load balancer deployer there should be
no conflicts.
:param str lb_name: The load balancer name (as per the config file)
:param :class:`list` hosts: The load balancer host[s], once known
:param port: The backend port that the LB will connect on
"""
self.lb_sec_groups.merge(lb_name, {'hosts': hosts, 'port': port})
|
python
|
def add_lb_secgroup(self, lb_name, hosts, port):
"""
Used by the load balancer deployer to register a hostname
for a load balancer, in order that security group rules can be
applied later. This is multiprocess-safe, but since keys are
accessed only be a single load balancer deployer there should be
no conflicts.
:param str lb_name: The load balancer name (as per the config file)
:param :class:`list` hosts: The load balancer host[s], once known
:param port: The backend port that the LB will connect on
"""
self.lb_sec_groups.merge(lb_name, {'hosts': hosts, 'port': port})
|
[
"def",
"add_lb_secgroup",
"(",
"self",
",",
"lb_name",
",",
"hosts",
",",
"port",
")",
":",
"self",
".",
"lb_sec_groups",
".",
"merge",
"(",
"lb_name",
",",
"{",
"'hosts'",
":",
"hosts",
",",
"'port'",
":",
"port",
"}",
")"
] |
Used by the load balancer deployer to register a hostname
for a load balancer, in order that security group rules can be
applied later. This is multiprocess-safe, but since keys are
accessed only be a single load balancer deployer there should be
no conflicts.
:param str lb_name: The load balancer name (as per the config file)
:param :class:`list` hosts: The load balancer host[s], once known
:param port: The backend port that the LB will connect on
|
[
"Used",
"by",
"the",
"load",
"balancer",
"deployer",
"to",
"register",
"a",
"hostname",
"for",
"a",
"load",
"balancer",
"in",
"order",
"that",
"security",
"group",
"rules",
"can",
"be",
"applied",
"later",
".",
"This",
"is",
"multiprocess",
"-",
"safe",
"but",
"since",
"keys",
"are",
"accessed",
"only",
"be",
"a",
"single",
"load",
"balancer",
"deployer",
"there",
"should",
"be",
"no",
"conflicts",
"."
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/stack.py#L138-L152
|
240,859
|
fr33jc/bang
|
bang/stack.py
|
Stack.add_host
|
def add_host(self, host, group_names=None, host_vars=None):
"""
Used by deployers to add hosts to the inventory.
:param str host: The host identifier (e.g. hostname, IP address) to
use in the inventory.
:param list group_names: A list of group names to which the host
belongs. **Note: This list will be sorted in-place.**
:param dict host_vars: A mapping object of host *variables*. This can
be a nested structure, and is used as the source of all the
variables provided to the ansible playbooks. **Note: Additional
key-value pairs (e.g. dynamic ansible values like
``inventory_hostname``) will be inserted into this mapping
object.**
"""
gnames = group_names if group_names else []
hvars = host_vars if host_vars else {}
# Add in ansible's magic variables. Assign them here because this is
# just about the earliest point we can calculate them before anything
# ansible-related (e.g. Stack.configure(), ``bang --host``) executes.
gnames.sort()
hvars[A.server.GROUP_NAMES] = gnames
hvars[A.server.INV_NAME] = host
hvars[A.server.INV_NAME_SHORT] = host.split('.')[0]
self.groups_and_vars.merge(host, hvars)
for gname in group_names:
self.groups_and_vars.append(gname, host)
|
python
|
def add_host(self, host, group_names=None, host_vars=None):
"""
Used by deployers to add hosts to the inventory.
:param str host: The host identifier (e.g. hostname, IP address) to
use in the inventory.
:param list group_names: A list of group names to which the host
belongs. **Note: This list will be sorted in-place.**
:param dict host_vars: A mapping object of host *variables*. This can
be a nested structure, and is used as the source of all the
variables provided to the ansible playbooks. **Note: Additional
key-value pairs (e.g. dynamic ansible values like
``inventory_hostname``) will be inserted into this mapping
object.**
"""
gnames = group_names if group_names else []
hvars = host_vars if host_vars else {}
# Add in ansible's magic variables. Assign them here because this is
# just about the earliest point we can calculate them before anything
# ansible-related (e.g. Stack.configure(), ``bang --host``) executes.
gnames.sort()
hvars[A.server.GROUP_NAMES] = gnames
hvars[A.server.INV_NAME] = host
hvars[A.server.INV_NAME_SHORT] = host.split('.')[0]
self.groups_and_vars.merge(host, hvars)
for gname in group_names:
self.groups_and_vars.append(gname, host)
|
[
"def",
"add_host",
"(",
"self",
",",
"host",
",",
"group_names",
"=",
"None",
",",
"host_vars",
"=",
"None",
")",
":",
"gnames",
"=",
"group_names",
"if",
"group_names",
"else",
"[",
"]",
"hvars",
"=",
"host_vars",
"if",
"host_vars",
"else",
"{",
"}",
"# Add in ansible's magic variables. Assign them here because this is",
"# just about the earliest point we can calculate them before anything",
"# ansible-related (e.g. Stack.configure(), ``bang --host``) executes.",
"gnames",
".",
"sort",
"(",
")",
"hvars",
"[",
"A",
".",
"server",
".",
"GROUP_NAMES",
"]",
"=",
"gnames",
"hvars",
"[",
"A",
".",
"server",
".",
"INV_NAME",
"]",
"=",
"host",
"hvars",
"[",
"A",
".",
"server",
".",
"INV_NAME_SHORT",
"]",
"=",
"host",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"self",
".",
"groups_and_vars",
".",
"merge",
"(",
"host",
",",
"hvars",
")",
"for",
"gname",
"in",
"group_names",
":",
"self",
".",
"groups_and_vars",
".",
"append",
"(",
"gname",
",",
"host",
")"
] |
Used by deployers to add hosts to the inventory.
:param str host: The host identifier (e.g. hostname, IP address) to
use in the inventory.
:param list group_names: A list of group names to which the host
belongs. **Note: This list will be sorted in-place.**
:param dict host_vars: A mapping object of host *variables*. This can
be a nested structure, and is used as the source of all the
variables provided to the ansible playbooks. **Note: Additional
key-value pairs (e.g. dynamic ansible values like
``inventory_hostname``) will be inserted into this mapping
object.**
|
[
"Used",
"by",
"deployers",
"to",
"add",
"hosts",
"to",
"the",
"inventory",
"."
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/stack.py#L154-L186
|
240,860
|
fr33jc/bang
|
bang/stack.py
|
Stack.describe
|
def describe(self):
"""Iterates through the deployers but doesn't run anything"""
for stage, corunners in self.get_deployers():
print self.name, "STAGE ", stage
for d in corunners:
print d.__class__.__name__, ",".join(
[p[1].__name__ for p in d.phases]
)
|
python
|
def describe(self):
"""Iterates through the deployers but doesn't run anything"""
for stage, corunners in self.get_deployers():
print self.name, "STAGE ", stage
for d in corunners:
print d.__class__.__name__, ",".join(
[p[1].__name__ for p in d.phases]
)
|
[
"def",
"describe",
"(",
"self",
")",
":",
"for",
"stage",
",",
"corunners",
"in",
"self",
".",
"get_deployers",
"(",
")",
":",
"print",
"self",
".",
"name",
",",
"\"STAGE \"",
",",
"stage",
"for",
"d",
"in",
"corunners",
":",
"print",
"d",
".",
"__class__",
".",
"__name__",
",",
"\",\"",
".",
"join",
"(",
"[",
"p",
"[",
"1",
"]",
".",
"__name__",
"for",
"p",
"in",
"d",
".",
"phases",
"]",
")"
] |
Iterates through the deployers but doesn't run anything
|
[
"Iterates",
"through",
"the",
"deployers",
"but",
"doesn",
"t",
"run",
"anything"
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/stack.py#L188-L195
|
240,861
|
fr33jc/bang
|
bang/stack.py
|
Stack.configure
|
def configure(self):
"""
Executes the ansible playbooks that configure the servers in the stack.
Assumes that the root playbook directory is ``./playbooks/`` relative
to the stack configuration file. Also sets the ansible *module_path*
to be ``./common_modules/`` relative to the stack configuration file.
E.g. If the stack configuration file is::
$HOME/bang-stacks/my_web_service.yml
then the root playbook directory is::
$HOME/bang-stacks/playbooks/
and the ansible module path is::
$HOME/bang-stacks/common_modules/
"""
cfg = self.config
bang_config_dir = os.path.abspath(
os.path.dirname(cfg.filepath)
)
playbook_dir = os.path.join(bang_config_dir, 'playbooks')
creds = cfg.get(A.DEPLOYER_CREDS, {})
pb_kwargs = {
# this allows connection reuse using "ControlPersist":
'transport': 'ssh',
'module_path': os.path.join(bang_config_dir, 'common_modules'),
'remote_pass': creds.get(A.creds.SSH_PASS),
# TODO: determine forks
# 'forks': options.forks,
}
# only add the 'remote_user' kwarg if it's in the config, otherwise use
# ansible's default behaviour.
ssh_user = creds.get(A.creds.SSH_USER)
if ssh_user:
pb_kwargs['remote_user'] = ssh_user
ansible_cfg = cfg.get(A.ANSIBLE, {})
ansible_verbosity = ansible_cfg.get(A.ansible.VERBOSITY, 1)
ansible.utils.VERBOSITY = ansible_verbosity
for playbook in cfg.get(A.PLAYBOOKS, []):
playbook_path = os.path.join(playbook_dir, playbook)
# gratuitously stolen from main() in ``ansible-playbook``
stats = callbacks.AggregateStats()
playbook_cb = callbacks.PlaybookCallbacks(
verbose=ansible_verbosity
)
runner_cb = callbacks.PlaybookRunnerCallbacks(
stats,
verbose=ansible_verbosity
)
vault_password = ansible_cfg.get(A.ansible.VAULT_PASS)
extra_kwargs = {
'playbook': playbook_path,
# TODO: do we really need new instances of the following
# for each playbook?
'callbacks': playbook_cb,
'runner_callbacks': runner_cb,
'stats': stats,
# ``host_list`` is used to generate the inventory, but
# don't worry, we override the inventory later
'host_list': [],
'vault_password': vault_password,
}
pb_kwargs.update(extra_kwargs)
pb = PlayBook(**pb_kwargs)
inventory = BangsibleInventory(
copy.deepcopy(self.groups_and_vars.lists),
copy.deepcopy(self.groups_and_vars.dicts),
vault_password=vault_password
)
inventory.set_playbook_basedir(playbook_dir)
pb.inventory = inventory
pb.run()
hosts = sorted(pb.stats.processed.keys())
playbook_cb.on_stats(pb.stats)
failed = False
for h in hosts:
hsum = pb.stats.summarize(h)
if hsum['failures'] or hsum['unreachable']:
failed = True
print "%-30s : %s" % (h, hsum)
# TODO: sort this out
# print "%-30s : %s %s %s %s " % (
# hostcolor(h, hsum),
# colorize('ok', hsum['ok'], 'green'),
# colorize('changed', hsum['changed'], 'yellow'),
# colorize('unreachable', hsum['unreachable'], 'red'),
# colorize('failed', hsum['failures'], 'red'))
if failed:
raise BangError("Server configuration failed!")
|
python
|
def configure(self):
"""
Executes the ansible playbooks that configure the servers in the stack.
Assumes that the root playbook directory is ``./playbooks/`` relative
to the stack configuration file. Also sets the ansible *module_path*
to be ``./common_modules/`` relative to the stack configuration file.
E.g. If the stack configuration file is::
$HOME/bang-stacks/my_web_service.yml
then the root playbook directory is::
$HOME/bang-stacks/playbooks/
and the ansible module path is::
$HOME/bang-stacks/common_modules/
"""
cfg = self.config
bang_config_dir = os.path.abspath(
os.path.dirname(cfg.filepath)
)
playbook_dir = os.path.join(bang_config_dir, 'playbooks')
creds = cfg.get(A.DEPLOYER_CREDS, {})
pb_kwargs = {
# this allows connection reuse using "ControlPersist":
'transport': 'ssh',
'module_path': os.path.join(bang_config_dir, 'common_modules'),
'remote_pass': creds.get(A.creds.SSH_PASS),
# TODO: determine forks
# 'forks': options.forks,
}
# only add the 'remote_user' kwarg if it's in the config, otherwise use
# ansible's default behaviour.
ssh_user = creds.get(A.creds.SSH_USER)
if ssh_user:
pb_kwargs['remote_user'] = ssh_user
ansible_cfg = cfg.get(A.ANSIBLE, {})
ansible_verbosity = ansible_cfg.get(A.ansible.VERBOSITY, 1)
ansible.utils.VERBOSITY = ansible_verbosity
for playbook in cfg.get(A.PLAYBOOKS, []):
playbook_path = os.path.join(playbook_dir, playbook)
# gratuitously stolen from main() in ``ansible-playbook``
stats = callbacks.AggregateStats()
playbook_cb = callbacks.PlaybookCallbacks(
verbose=ansible_verbosity
)
runner_cb = callbacks.PlaybookRunnerCallbacks(
stats,
verbose=ansible_verbosity
)
vault_password = ansible_cfg.get(A.ansible.VAULT_PASS)
extra_kwargs = {
'playbook': playbook_path,
# TODO: do we really need new instances of the following
# for each playbook?
'callbacks': playbook_cb,
'runner_callbacks': runner_cb,
'stats': stats,
# ``host_list`` is used to generate the inventory, but
# don't worry, we override the inventory later
'host_list': [],
'vault_password': vault_password,
}
pb_kwargs.update(extra_kwargs)
pb = PlayBook(**pb_kwargs)
inventory = BangsibleInventory(
copy.deepcopy(self.groups_and_vars.lists),
copy.deepcopy(self.groups_and_vars.dicts),
vault_password=vault_password
)
inventory.set_playbook_basedir(playbook_dir)
pb.inventory = inventory
pb.run()
hosts = sorted(pb.stats.processed.keys())
playbook_cb.on_stats(pb.stats)
failed = False
for h in hosts:
hsum = pb.stats.summarize(h)
if hsum['failures'] or hsum['unreachable']:
failed = True
print "%-30s : %s" % (h, hsum)
# TODO: sort this out
# print "%-30s : %s %s %s %s " % (
# hostcolor(h, hsum),
# colorize('ok', hsum['ok'], 'green'),
# colorize('changed', hsum['changed'], 'yellow'),
# colorize('unreachable', hsum['unreachable'], 'red'),
# colorize('failed', hsum['failures'], 'red'))
if failed:
raise BangError("Server configuration failed!")
|
[
"def",
"configure",
"(",
"self",
")",
":",
"cfg",
"=",
"self",
".",
"config",
"bang_config_dir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"cfg",
".",
"filepath",
")",
")",
"playbook_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"bang_config_dir",
",",
"'playbooks'",
")",
"creds",
"=",
"cfg",
".",
"get",
"(",
"A",
".",
"DEPLOYER_CREDS",
",",
"{",
"}",
")",
"pb_kwargs",
"=",
"{",
"# this allows connection reuse using \"ControlPersist\":",
"'transport'",
":",
"'ssh'",
",",
"'module_path'",
":",
"os",
".",
"path",
".",
"join",
"(",
"bang_config_dir",
",",
"'common_modules'",
")",
",",
"'remote_pass'",
":",
"creds",
".",
"get",
"(",
"A",
".",
"creds",
".",
"SSH_PASS",
")",
",",
"# TODO: determine forks",
"# 'forks': options.forks,",
"}",
"# only add the 'remote_user' kwarg if it's in the config, otherwise use",
"# ansible's default behaviour.",
"ssh_user",
"=",
"creds",
".",
"get",
"(",
"A",
".",
"creds",
".",
"SSH_USER",
")",
"if",
"ssh_user",
":",
"pb_kwargs",
"[",
"'remote_user'",
"]",
"=",
"ssh_user",
"ansible_cfg",
"=",
"cfg",
".",
"get",
"(",
"A",
".",
"ANSIBLE",
",",
"{",
"}",
")",
"ansible_verbosity",
"=",
"ansible_cfg",
".",
"get",
"(",
"A",
".",
"ansible",
".",
"VERBOSITY",
",",
"1",
")",
"ansible",
".",
"utils",
".",
"VERBOSITY",
"=",
"ansible_verbosity",
"for",
"playbook",
"in",
"cfg",
".",
"get",
"(",
"A",
".",
"PLAYBOOKS",
",",
"[",
"]",
")",
":",
"playbook_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"playbook_dir",
",",
"playbook",
")",
"# gratuitously stolen from main() in ``ansible-playbook``",
"stats",
"=",
"callbacks",
".",
"AggregateStats",
"(",
")",
"playbook_cb",
"=",
"callbacks",
".",
"PlaybookCallbacks",
"(",
"verbose",
"=",
"ansible_verbosity",
")",
"runner_cb",
"=",
"callbacks",
".",
"PlaybookRunnerCallbacks",
"(",
"stats",
",",
"verbose",
"=",
"ansible_verbosity",
")",
"vault_password",
"=",
"ansible_cfg",
".",
"get",
"(",
"A",
".",
"ansible",
".",
"VAULT_PASS",
")",
"extra_kwargs",
"=",
"{",
"'playbook'",
":",
"playbook_path",
",",
"# TODO: do we really need new instances of the following",
"# for each playbook?",
"'callbacks'",
":",
"playbook_cb",
",",
"'runner_callbacks'",
":",
"runner_cb",
",",
"'stats'",
":",
"stats",
",",
"# ``host_list`` is used to generate the inventory, but",
"# don't worry, we override the inventory later",
"'host_list'",
":",
"[",
"]",
",",
"'vault_password'",
":",
"vault_password",
",",
"}",
"pb_kwargs",
".",
"update",
"(",
"extra_kwargs",
")",
"pb",
"=",
"PlayBook",
"(",
"*",
"*",
"pb_kwargs",
")",
"inventory",
"=",
"BangsibleInventory",
"(",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"groups_and_vars",
".",
"lists",
")",
",",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"groups_and_vars",
".",
"dicts",
")",
",",
"vault_password",
"=",
"vault_password",
")",
"inventory",
".",
"set_playbook_basedir",
"(",
"playbook_dir",
")",
"pb",
".",
"inventory",
"=",
"inventory",
"pb",
".",
"run",
"(",
")",
"hosts",
"=",
"sorted",
"(",
"pb",
".",
"stats",
".",
"processed",
".",
"keys",
"(",
")",
")",
"playbook_cb",
".",
"on_stats",
"(",
"pb",
".",
"stats",
")",
"failed",
"=",
"False",
"for",
"h",
"in",
"hosts",
":",
"hsum",
"=",
"pb",
".",
"stats",
".",
"summarize",
"(",
"h",
")",
"if",
"hsum",
"[",
"'failures'",
"]",
"or",
"hsum",
"[",
"'unreachable'",
"]",
":",
"failed",
"=",
"True",
"print",
"\"%-30s : %s\"",
"%",
"(",
"h",
",",
"hsum",
")",
"# TODO: sort this out",
"# print \"%-30s : %s %s %s %s \" % (",
"# hostcolor(h, hsum),",
"# colorize('ok', hsum['ok'], 'green'),",
"# colorize('changed', hsum['changed'], 'yellow'),",
"# colorize('unreachable', hsum['unreachable'], 'red'),",
"# colorize('failed', hsum['failures'], 'red'))",
"if",
"failed",
":",
"raise",
"BangError",
"(",
"\"Server configuration failed!\"",
")"
] |
Executes the ansible playbooks that configure the servers in the stack.
Assumes that the root playbook directory is ``./playbooks/`` relative
to the stack configuration file. Also sets the ansible *module_path*
to be ``./common_modules/`` relative to the stack configuration file.
E.g. If the stack configuration file is::
$HOME/bang-stacks/my_web_service.yml
then the root playbook directory is::
$HOME/bang-stacks/playbooks/
and the ansible module path is::
$HOME/bang-stacks/common_modules/
|
[
"Executes",
"the",
"ansible",
"playbooks",
"that",
"configure",
"the",
"servers",
"in",
"the",
"stack",
"."
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/stack.py#L234-L336
|
240,862
|
krinj/k-util
|
k_util/core.py
|
interpolate_color
|
def interpolate_color(c1, c2, factor: float) -> list:
""" Linear interpolate two 3-channel colors, using channel based interpolation. """
assert(len(c1) == len(c2))
new_color = []
for i in range(len(c1)):
new_color.append(int(interpolate(c1[i], c2[i], factor)))
return new_color
|
python
|
def interpolate_color(c1, c2, factor: float) -> list:
""" Linear interpolate two 3-channel colors, using channel based interpolation. """
assert(len(c1) == len(c2))
new_color = []
for i in range(len(c1)):
new_color.append(int(interpolate(c1[i], c2[i], factor)))
return new_color
|
[
"def",
"interpolate_color",
"(",
"c1",
",",
"c2",
",",
"factor",
":",
"float",
")",
"->",
"list",
":",
"assert",
"(",
"len",
"(",
"c1",
")",
"==",
"len",
"(",
"c2",
")",
")",
"new_color",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"c1",
")",
")",
":",
"new_color",
".",
"append",
"(",
"int",
"(",
"interpolate",
"(",
"c1",
"[",
"i",
"]",
",",
"c2",
"[",
"i",
"]",
",",
"factor",
")",
")",
")",
"return",
"new_color"
] |
Linear interpolate two 3-channel colors, using channel based interpolation.
|
[
"Linear",
"interpolate",
"two",
"3",
"-",
"channel",
"colors",
"using",
"channel",
"based",
"interpolation",
"."
] |
b118826b1d6f49ca4e1ca7327d5b171db332ac23
|
https://github.com/krinj/k-util/blob/b118826b1d6f49ca4e1ca7327d5b171db332ac23/k_util/core.py#L11-L18
|
240,863
|
JNRowe/jnrbase
|
jnrbase/human_time.py
|
human_timestamp
|
def human_timestamp(__timestamp: datetime.datetime) -> str:
"""Format a relative time.
Args:
__timestamp: Event to generate relative timestamp against
Returns:
Human readable date and time offset
"""
numstr = '. a two three four five six seven eight nine ten'.split()
matches = [
60 * 60 * 24 * 365,
60 * 60 * 24 * 28,
60 * 60 * 24 * 7,
60 * 60 * 24,
60 * 60,
60,
1,
]
match_names = ['year', 'month', 'week', 'day', 'hour', 'minute', 'second']
if __timestamp.tzinfo is None:
__timestamp = __timestamp.replace(tzinfo=datetime.timezone.utc)
now = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc)
delta = int((now - __timestamp).total_seconds())
for scale in matches:
i = delta // scale
if i:
name = match_names[matches.index(scale)]
break
else:
i = 0 # Too small
if i == 0:
result = 'right now'
elif i == 1 and name in ('year', 'month', 'week'):
result = 'last {}'.format(name)
elif i == 1 and name == 'day':
result = 'yesterday'
elif i == 1 and name == 'hour':
result = 'about an hour ago'
else:
result = 'about {} {}{} ago'.format(i if i > 10 else numstr[i], name,
's' if i > 1 else '')
return result
|
python
|
def human_timestamp(__timestamp: datetime.datetime) -> str:
"""Format a relative time.
Args:
__timestamp: Event to generate relative timestamp against
Returns:
Human readable date and time offset
"""
numstr = '. a two three four five six seven eight nine ten'.split()
matches = [
60 * 60 * 24 * 365,
60 * 60 * 24 * 28,
60 * 60 * 24 * 7,
60 * 60 * 24,
60 * 60,
60,
1,
]
match_names = ['year', 'month', 'week', 'day', 'hour', 'minute', 'second']
if __timestamp.tzinfo is None:
__timestamp = __timestamp.replace(tzinfo=datetime.timezone.utc)
now = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc)
delta = int((now - __timestamp).total_seconds())
for scale in matches:
i = delta // scale
if i:
name = match_names[matches.index(scale)]
break
else:
i = 0 # Too small
if i == 0:
result = 'right now'
elif i == 1 and name in ('year', 'month', 'week'):
result = 'last {}'.format(name)
elif i == 1 and name == 'day':
result = 'yesterday'
elif i == 1 and name == 'hour':
result = 'about an hour ago'
else:
result = 'about {} {}{} ago'.format(i if i > 10 else numstr[i], name,
's' if i > 1 else '')
return result
|
[
"def",
"human_timestamp",
"(",
"__timestamp",
":",
"datetime",
".",
"datetime",
")",
"->",
"str",
":",
"numstr",
"=",
"'. a two three four five six seven eight nine ten'",
".",
"split",
"(",
")",
"matches",
"=",
"[",
"60",
"*",
"60",
"*",
"24",
"*",
"365",
",",
"60",
"*",
"60",
"*",
"24",
"*",
"28",
",",
"60",
"*",
"60",
"*",
"24",
"*",
"7",
",",
"60",
"*",
"60",
"*",
"24",
",",
"60",
"*",
"60",
",",
"60",
",",
"1",
",",
"]",
"match_names",
"=",
"[",
"'year'",
",",
"'month'",
",",
"'week'",
",",
"'day'",
",",
"'hour'",
",",
"'minute'",
",",
"'second'",
"]",
"if",
"__timestamp",
".",
"tzinfo",
"is",
"None",
":",
"__timestamp",
"=",
"__timestamp",
".",
"replace",
"(",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
"now",
"=",
"datetime",
".",
"datetime",
".",
"utcnow",
"(",
")",
".",
"replace",
"(",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
"delta",
"=",
"int",
"(",
"(",
"now",
"-",
"__timestamp",
")",
".",
"total_seconds",
"(",
")",
")",
"for",
"scale",
"in",
"matches",
":",
"i",
"=",
"delta",
"//",
"scale",
"if",
"i",
":",
"name",
"=",
"match_names",
"[",
"matches",
".",
"index",
"(",
"scale",
")",
"]",
"break",
"else",
":",
"i",
"=",
"0",
"# Too small",
"if",
"i",
"==",
"0",
":",
"result",
"=",
"'right now'",
"elif",
"i",
"==",
"1",
"and",
"name",
"in",
"(",
"'year'",
",",
"'month'",
",",
"'week'",
")",
":",
"result",
"=",
"'last {}'",
".",
"format",
"(",
"name",
")",
"elif",
"i",
"==",
"1",
"and",
"name",
"==",
"'day'",
":",
"result",
"=",
"'yesterday'",
"elif",
"i",
"==",
"1",
"and",
"name",
"==",
"'hour'",
":",
"result",
"=",
"'about an hour ago'",
"else",
":",
"result",
"=",
"'about {} {}{} ago'",
".",
"format",
"(",
"i",
"if",
"i",
">",
"10",
"else",
"numstr",
"[",
"i",
"]",
",",
"name",
",",
"'s'",
"if",
"i",
">",
"1",
"else",
"''",
")",
"return",
"result"
] |
Format a relative time.
Args:
__timestamp: Event to generate relative timestamp against
Returns:
Human readable date and time offset
|
[
"Format",
"a",
"relative",
"time",
"."
] |
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
|
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/human_time.py#L25-L71
|
240,864
|
bluecap-se/yarr.client
|
yarr_client/app.py
|
configurate_app
|
def configurate_app(config_file=''):
"""
Configures Flask app
:param config_file: Absolute path to Py config file, optional
:returns: App object, host and port
"""
# Load config
app.config.from_pyfile('defaults.py')
app.config.from_pyfile(config_file, silent=True)
if app.config.get('MINIFY_HTML', False):
app.jinja_env.add_extension('flask_utils.jinja2htmlcompress.HTMLCompress')
# Setup web assets
assets = Environment(app)
js = Bundle('common.js', filters='jsmin', output='gen/main.%(version)s.js')
css = Bundle('common.css', filters='cssmin', output='gen/main.%(version)s.css')
assets.register('js_all', js)
assets.register('css_all', css)
# Set host and port
port = app.config.get('PORT', 5000)
host = app.config.get('HOST', '127.0.0.1')
return app, host, port
|
python
|
def configurate_app(config_file=''):
"""
Configures Flask app
:param config_file: Absolute path to Py config file, optional
:returns: App object, host and port
"""
# Load config
app.config.from_pyfile('defaults.py')
app.config.from_pyfile(config_file, silent=True)
if app.config.get('MINIFY_HTML', False):
app.jinja_env.add_extension('flask_utils.jinja2htmlcompress.HTMLCompress')
# Setup web assets
assets = Environment(app)
js = Bundle('common.js', filters='jsmin', output='gen/main.%(version)s.js')
css = Bundle('common.css', filters='cssmin', output='gen/main.%(version)s.css')
assets.register('js_all', js)
assets.register('css_all', css)
# Set host and port
port = app.config.get('PORT', 5000)
host = app.config.get('HOST', '127.0.0.1')
return app, host, port
|
[
"def",
"configurate_app",
"(",
"config_file",
"=",
"''",
")",
":",
"# Load config",
"app",
".",
"config",
".",
"from_pyfile",
"(",
"'defaults.py'",
")",
"app",
".",
"config",
".",
"from_pyfile",
"(",
"config_file",
",",
"silent",
"=",
"True",
")",
"if",
"app",
".",
"config",
".",
"get",
"(",
"'MINIFY_HTML'",
",",
"False",
")",
":",
"app",
".",
"jinja_env",
".",
"add_extension",
"(",
"'flask_utils.jinja2htmlcompress.HTMLCompress'",
")",
"# Setup web assets",
"assets",
"=",
"Environment",
"(",
"app",
")",
"js",
"=",
"Bundle",
"(",
"'common.js'",
",",
"filters",
"=",
"'jsmin'",
",",
"output",
"=",
"'gen/main.%(version)s.js'",
")",
"css",
"=",
"Bundle",
"(",
"'common.css'",
",",
"filters",
"=",
"'cssmin'",
",",
"output",
"=",
"'gen/main.%(version)s.css'",
")",
"assets",
".",
"register",
"(",
"'js_all'",
",",
"js",
")",
"assets",
".",
"register",
"(",
"'css_all'",
",",
"css",
")",
"# Set host and port",
"port",
"=",
"app",
".",
"config",
".",
"get",
"(",
"'PORT'",
",",
"5000",
")",
"host",
"=",
"app",
".",
"config",
".",
"get",
"(",
"'HOST'",
",",
"'127.0.0.1'",
")",
"return",
"app",
",",
"host",
",",
"port"
] |
Configures Flask app
:param config_file: Absolute path to Py config file, optional
:returns: App object, host and port
|
[
"Configures",
"Flask",
"app"
] |
ec0c053fe8a7e92eb27b68b7a135caecde30c81d
|
https://github.com/bluecap-se/yarr.client/blob/ec0c053fe8a7e92eb27b68b7a135caecde30c81d/yarr_client/app.py#L16-L43
|
240,865
|
bluecap-se/yarr.client
|
yarr_client/app.py
|
create_request
|
def create_request(query):
"""
Creates a GET request to Yarr! server
:param query: Free-text search query
:returns: Requests object
"""
yarr_url = app.config.get('YARR_URL', False)
if not yarr_url:
raise('No URL to Yarr! server specified in config.')
api_token = app.config.get('YARR_API_TOKEN', False)
headers = {'X-API-KEY': api_token} if api_token else {}
payload = {'q': query}
url = '%s/search' % yarr_url
return requests.get(url, params=payload, headers=headers)
|
python
|
def create_request(query):
"""
Creates a GET request to Yarr! server
:param query: Free-text search query
:returns: Requests object
"""
yarr_url = app.config.get('YARR_URL', False)
if not yarr_url:
raise('No URL to Yarr! server specified in config.')
api_token = app.config.get('YARR_API_TOKEN', False)
headers = {'X-API-KEY': api_token} if api_token else {}
payload = {'q': query}
url = '%s/search' % yarr_url
return requests.get(url, params=payload, headers=headers)
|
[
"def",
"create_request",
"(",
"query",
")",
":",
"yarr_url",
"=",
"app",
".",
"config",
".",
"get",
"(",
"'YARR_URL'",
",",
"False",
")",
"if",
"not",
"yarr_url",
":",
"raise",
"(",
"'No URL to Yarr! server specified in config.'",
")",
"api_token",
"=",
"app",
".",
"config",
".",
"get",
"(",
"'YARR_API_TOKEN'",
",",
"False",
")",
"headers",
"=",
"{",
"'X-API-KEY'",
":",
"api_token",
"}",
"if",
"api_token",
"else",
"{",
"}",
"payload",
"=",
"{",
"'q'",
":",
"query",
"}",
"url",
"=",
"'%s/search'",
"%",
"yarr_url",
"return",
"requests",
".",
"get",
"(",
"url",
",",
"params",
"=",
"payload",
",",
"headers",
"=",
"headers",
")"
] |
Creates a GET request to Yarr! server
:param query: Free-text search query
:returns: Requests object
|
[
"Creates",
"a",
"GET",
"request",
"to",
"Yarr!",
"server"
] |
ec0c053fe8a7e92eb27b68b7a135caecde30c81d
|
https://github.com/bluecap-se/yarr.client/blob/ec0c053fe8a7e92eb27b68b7a135caecde30c81d/yarr_client/app.py#L46-L63
|
240,866
|
swgillespie/tsquare
|
tsquare/core.py
|
TSquareAPI.requires_authentication
|
def requires_authentication(func):
"""
Function decorator that throws an exception if the user
is not authenticated, and executes the function normally
if the user is authenticated.
"""
def _auth(self, *args, **kwargs):
if not self._authenticated:
raise NotAuthenticatedException('Function {} requires'
.format(func.__name__)
+ ' authentication')
else:
return func(self, *args, **kwargs)
return _auth
|
python
|
def requires_authentication(func):
"""
Function decorator that throws an exception if the user
is not authenticated, and executes the function normally
if the user is authenticated.
"""
def _auth(self, *args, **kwargs):
if not self._authenticated:
raise NotAuthenticatedException('Function {} requires'
.format(func.__name__)
+ ' authentication')
else:
return func(self, *args, **kwargs)
return _auth
|
[
"def",
"requires_authentication",
"(",
"func",
")",
":",
"def",
"_auth",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"_authenticated",
":",
"raise",
"NotAuthenticatedException",
"(",
"'Function {} requires'",
".",
"format",
"(",
"func",
".",
"__name__",
")",
"+",
"' authentication'",
")",
"else",
":",
"return",
"func",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"_auth"
] |
Function decorator that throws an exception if the user
is not authenticated, and executes the function normally
if the user is authenticated.
|
[
"Function",
"decorator",
"that",
"throws",
"an",
"exception",
"if",
"the",
"user",
"is",
"not",
"authenticated",
"and",
"executes",
"the",
"function",
"normally",
"if",
"the",
"user",
"is",
"authenticated",
"."
] |
242adb2c27e6c65a1f75db32a4636ea3f1d22a3a
|
https://github.com/swgillespie/tsquare/blob/242adb2c27e6c65a1f75db32a4636ea3f1d22a3a/tsquare/core.py#L31-L44
|
240,867
|
swgillespie/tsquare
|
tsquare/core.py
|
TSquareAPI.get_user_info
|
def get_user_info(self):
"""
Returns a TSquareUser object representing the currently logged in user.
Throws a NotAuthenticatedException if the user is not authenticated.
"""
response = self._session.get(BASE_URL_TSQUARE + '/user/current.json')
response.raise_for_status() # raises an exception if not 200: OK
user_data = response.json()
del user_data['password'] # tsquare doesn't store passwords
return TSquareUser(**user_data)
|
python
|
def get_user_info(self):
"""
Returns a TSquareUser object representing the currently logged in user.
Throws a NotAuthenticatedException if the user is not authenticated.
"""
response = self._session.get(BASE_URL_TSQUARE + '/user/current.json')
response.raise_for_status() # raises an exception if not 200: OK
user_data = response.json()
del user_data['password'] # tsquare doesn't store passwords
return TSquareUser(**user_data)
|
[
"def",
"get_user_info",
"(",
"self",
")",
":",
"response",
"=",
"self",
".",
"_session",
".",
"get",
"(",
"BASE_URL_TSQUARE",
"+",
"'/user/current.json'",
")",
"response",
".",
"raise_for_status",
"(",
")",
"# raises an exception if not 200: OK",
"user_data",
"=",
"response",
".",
"json",
"(",
")",
"del",
"user_data",
"[",
"'password'",
"]",
"# tsquare doesn't store passwords",
"return",
"TSquareUser",
"(",
"*",
"*",
"user_data",
")"
] |
Returns a TSquareUser object representing the currently logged in user.
Throws a NotAuthenticatedException if the user is not authenticated.
|
[
"Returns",
"a",
"TSquareUser",
"object",
"representing",
"the",
"currently",
"logged",
"in",
"user",
".",
"Throws",
"a",
"NotAuthenticatedException",
"if",
"the",
"user",
"is",
"not",
"authenticated",
"."
] |
242adb2c27e6c65a1f75db32a4636ea3f1d22a3a
|
https://github.com/swgillespie/tsquare/blob/242adb2c27e6c65a1f75db32a4636ea3f1d22a3a/tsquare/core.py#L75-L84
|
240,868
|
swgillespie/tsquare
|
tsquare/core.py
|
TSquareAPI.get_site_by_id
|
def get_site_by_id(self, id):
"""
Looks up a site by ID and returns a TSquareSite representing that
object, or throws an exception if no such site is found.
@param id - The entityID of the site to look up
@returns A TSquareSite object
"""
response = self._session.get(BASE_URL_TSQUARE + '/site/{}.json'.format(id))
response.raise_for_status()
site_data = response.json()
return TSquareSite(**site_data)
|
python
|
def get_site_by_id(self, id):
"""
Looks up a site by ID and returns a TSquareSite representing that
object, or throws an exception if no such site is found.
@param id - The entityID of the site to look up
@returns A TSquareSite object
"""
response = self._session.get(BASE_URL_TSQUARE + '/site/{}.json'.format(id))
response.raise_for_status()
site_data = response.json()
return TSquareSite(**site_data)
|
[
"def",
"get_site_by_id",
"(",
"self",
",",
"id",
")",
":",
"response",
"=",
"self",
".",
"_session",
".",
"get",
"(",
"BASE_URL_TSQUARE",
"+",
"'/site/{}.json'",
".",
"format",
"(",
"id",
")",
")",
"response",
".",
"raise_for_status",
"(",
")",
"site_data",
"=",
"response",
".",
"json",
"(",
")",
"return",
"TSquareSite",
"(",
"*",
"*",
"site_data",
")"
] |
Looks up a site by ID and returns a TSquareSite representing that
object, or throws an exception if no such site is found.
@param id - The entityID of the site to look up
@returns A TSquareSite object
|
[
"Looks",
"up",
"a",
"site",
"by",
"ID",
"and",
"returns",
"a",
"TSquareSite",
"representing",
"that",
"object",
"or",
"throws",
"an",
"exception",
"if",
"no",
"such",
"site",
"is",
"found",
"."
] |
242adb2c27e6c65a1f75db32a4636ea3f1d22a3a
|
https://github.com/swgillespie/tsquare/blob/242adb2c27e6c65a1f75db32a4636ea3f1d22a3a/tsquare/core.py#L87-L97
|
240,869
|
swgillespie/tsquare
|
tsquare/core.py
|
TSquareAPI.get_sites
|
def get_sites(self, filter_func=lambda x: True):
"""
Returns a list of TSquareSite objects that represent the sites available
to a user.
@param filter_func - A function taking in a Site object as a parameter
that returns a True or False, depending on whether
or not that site should be returned by this
function. Filter_func should be used to create
filters on the list of sites (i.e. user's
preferences on what sites to display by default).
If not specified, no filter is applied.
@returns - A list of TSquareSite objects encapsulating t-square's JSON
response.
"""
response = self._session.get(BASE_URL_TSQUARE + 'site.json')
response.raise_for_status() # raise an exception if not 200: OK
site_list = response.json()['site_collection']
if not site_list:
# this means that this t-square session expired. It's up
# to the user to re-authenticate.
self._authenticated = False
raise SessionExpiredException('The session has expired')
result_list = []
for site in site_list:
t_site = TSquareSite(**site)
if not hasattr(t_site, "props"):
t_site.props = {}
if not 'banner-crn' in t_site.props:
t_site.props['banner-crn'] = None
if not 'term' in t_site.props:
t_site.props['term'] = None
if not 'term_eid' in t_site.props:
t_site.props['term_eid'] = None
if filter_func(t_site):
result_list.append(t_site)
return result_list
|
python
|
def get_sites(self, filter_func=lambda x: True):
"""
Returns a list of TSquareSite objects that represent the sites available
to a user.
@param filter_func - A function taking in a Site object as a parameter
that returns a True or False, depending on whether
or not that site should be returned by this
function. Filter_func should be used to create
filters on the list of sites (i.e. user's
preferences on what sites to display by default).
If not specified, no filter is applied.
@returns - A list of TSquareSite objects encapsulating t-square's JSON
response.
"""
response = self._session.get(BASE_URL_TSQUARE + 'site.json')
response.raise_for_status() # raise an exception if not 200: OK
site_list = response.json()['site_collection']
if not site_list:
# this means that this t-square session expired. It's up
# to the user to re-authenticate.
self._authenticated = False
raise SessionExpiredException('The session has expired')
result_list = []
for site in site_list:
t_site = TSquareSite(**site)
if not hasattr(t_site, "props"):
t_site.props = {}
if not 'banner-crn' in t_site.props:
t_site.props['banner-crn'] = None
if not 'term' in t_site.props:
t_site.props['term'] = None
if not 'term_eid' in t_site.props:
t_site.props['term_eid'] = None
if filter_func(t_site):
result_list.append(t_site)
return result_list
|
[
"def",
"get_sites",
"(",
"self",
",",
"filter_func",
"=",
"lambda",
"x",
":",
"True",
")",
":",
"response",
"=",
"self",
".",
"_session",
".",
"get",
"(",
"BASE_URL_TSQUARE",
"+",
"'site.json'",
")",
"response",
".",
"raise_for_status",
"(",
")",
"# raise an exception if not 200: OK",
"site_list",
"=",
"response",
".",
"json",
"(",
")",
"[",
"'site_collection'",
"]",
"if",
"not",
"site_list",
":",
"# this means that this t-square session expired. It's up",
"# to the user to re-authenticate.",
"self",
".",
"_authenticated",
"=",
"False",
"raise",
"SessionExpiredException",
"(",
"'The session has expired'",
")",
"result_list",
"=",
"[",
"]",
"for",
"site",
"in",
"site_list",
":",
"t_site",
"=",
"TSquareSite",
"(",
"*",
"*",
"site",
")",
"if",
"not",
"hasattr",
"(",
"t_site",
",",
"\"props\"",
")",
":",
"t_site",
".",
"props",
"=",
"{",
"}",
"if",
"not",
"'banner-crn'",
"in",
"t_site",
".",
"props",
":",
"t_site",
".",
"props",
"[",
"'banner-crn'",
"]",
"=",
"None",
"if",
"not",
"'term'",
"in",
"t_site",
".",
"props",
":",
"t_site",
".",
"props",
"[",
"'term'",
"]",
"=",
"None",
"if",
"not",
"'term_eid'",
"in",
"t_site",
".",
"props",
":",
"t_site",
".",
"props",
"[",
"'term_eid'",
"]",
"=",
"None",
"if",
"filter_func",
"(",
"t_site",
")",
":",
"result_list",
".",
"append",
"(",
"t_site",
")",
"return",
"result_list"
] |
Returns a list of TSquareSite objects that represent the sites available
to a user.
@param filter_func - A function taking in a Site object as a parameter
that returns a True or False, depending on whether
or not that site should be returned by this
function. Filter_func should be used to create
filters on the list of sites (i.e. user's
preferences on what sites to display by default).
If not specified, no filter is applied.
@returns - A list of TSquareSite objects encapsulating t-square's JSON
response.
|
[
"Returns",
"a",
"list",
"of",
"TSquareSite",
"objects",
"that",
"represent",
"the",
"sites",
"available",
"to",
"a",
"user",
"."
] |
242adb2c27e6c65a1f75db32a4636ea3f1d22a3a
|
https://github.com/swgillespie/tsquare/blob/242adb2c27e6c65a1f75db32a4636ea3f1d22a3a/tsquare/core.py#L100-L135
|
240,870
|
swgillespie/tsquare
|
tsquare/core.py
|
TSquareAPI.get_announcements
|
def get_announcements(self, site=None, num=10, age=20):
"""
Gets announcements from a site if site is not None, or from every
site otherwise. Returns a list of TSquareAnnouncement objects.
@param site_obj (TSquareSite) If non-None, gets only the announcements
from that site. If none, get anouncements
from all sites.
@param num - The number of announcements to fetch. Default is 10.
@param age - 'How far back' to go to retreive announcements. Default
is 20, which means that only announcements that are
less than 20 days old will be returned, even if there
less than 'num' of them.
@returns - A list of TSquareAnnouncement objects. The length will be
at most num, and it may be less than num depending on
the number of announcements whose age is less than age.
"""
url = BASE_URL_TSQUARE + 'announcement/'
if site:
url += 'site/{}.json?n={}&d={}'.format(site.id, num, age)
else:
url += 'user.json?n={}&d={}'.format(num, age)
request = self._session.get(url)
request.raise_for_status()
announcement_list = request.json()['announcement_collection']
return map(lambda x: TSquareAnnouncement(**x), announcement_list)
|
python
|
def get_announcements(self, site=None, num=10, age=20):
"""
Gets announcements from a site if site is not None, or from every
site otherwise. Returns a list of TSquareAnnouncement objects.
@param site_obj (TSquareSite) If non-None, gets only the announcements
from that site. If none, get anouncements
from all sites.
@param num - The number of announcements to fetch. Default is 10.
@param age - 'How far back' to go to retreive announcements. Default
is 20, which means that only announcements that are
less than 20 days old will be returned, even if there
less than 'num' of them.
@returns - A list of TSquareAnnouncement objects. The length will be
at most num, and it may be less than num depending on
the number of announcements whose age is less than age.
"""
url = BASE_URL_TSQUARE + 'announcement/'
if site:
url += 'site/{}.json?n={}&d={}'.format(site.id, num, age)
else:
url += 'user.json?n={}&d={}'.format(num, age)
request = self._session.get(url)
request.raise_for_status()
announcement_list = request.json()['announcement_collection']
return map(lambda x: TSquareAnnouncement(**x), announcement_list)
|
[
"def",
"get_announcements",
"(",
"self",
",",
"site",
"=",
"None",
",",
"num",
"=",
"10",
",",
"age",
"=",
"20",
")",
":",
"url",
"=",
"BASE_URL_TSQUARE",
"+",
"'announcement/'",
"if",
"site",
":",
"url",
"+=",
"'site/{}.json?n={}&d={}'",
".",
"format",
"(",
"site",
".",
"id",
",",
"num",
",",
"age",
")",
"else",
":",
"url",
"+=",
"'user.json?n={}&d={}'",
".",
"format",
"(",
"num",
",",
"age",
")",
"request",
"=",
"self",
".",
"_session",
".",
"get",
"(",
"url",
")",
"request",
".",
"raise_for_status",
"(",
")",
"announcement_list",
"=",
"request",
".",
"json",
"(",
")",
"[",
"'announcement_collection'",
"]",
"return",
"map",
"(",
"lambda",
"x",
":",
"TSquareAnnouncement",
"(",
"*",
"*",
"x",
")",
",",
"announcement_list",
")"
] |
Gets announcements from a site if site is not None, or from every
site otherwise. Returns a list of TSquareAnnouncement objects.
@param site_obj (TSquareSite) If non-None, gets only the announcements
from that site. If none, get anouncements
from all sites.
@param num - The number of announcements to fetch. Default is 10.
@param age - 'How far back' to go to retreive announcements. Default
is 20, which means that only announcements that are
less than 20 days old will be returned, even if there
less than 'num' of them.
@returns - A list of TSquareAnnouncement objects. The length will be
at most num, and it may be less than num depending on
the number of announcements whose age is less than age.
|
[
"Gets",
"announcements",
"from",
"a",
"site",
"if",
"site",
"is",
"not",
"None",
"or",
"from",
"every",
"site",
"otherwise",
".",
"Returns",
"a",
"list",
"of",
"TSquareAnnouncement",
"objects",
"."
] |
242adb2c27e6c65a1f75db32a4636ea3f1d22a3a
|
https://github.com/swgillespie/tsquare/blob/242adb2c27e6c65a1f75db32a4636ea3f1d22a3a/tsquare/core.py#L138-L162
|
240,871
|
swgillespie/tsquare
|
tsquare/core.py
|
TSquareAPI.get_tools
|
def get_tools(self, site):
"""
Gets all tools associated with a site.
@param site (TSquareSite) - The site to search for tools
@returns A list of dictionaries representing Tsquare tools.
"""
# hack - gotta bypass the tsquare REST api because it kinda sucks with tools
url = site.entityURL.replace('direct', 'portal')
response = self._session.get(url)
response.raise_for_status()
# scrape the resulting html
tools_dict_list = self._html_iface.get_tools(response.text)
return [TSquareTool(**x) for x in tools_dict_list]
|
python
|
def get_tools(self, site):
"""
Gets all tools associated with a site.
@param site (TSquareSite) - The site to search for tools
@returns A list of dictionaries representing Tsquare tools.
"""
# hack - gotta bypass the tsquare REST api because it kinda sucks with tools
url = site.entityURL.replace('direct', 'portal')
response = self._session.get(url)
response.raise_for_status()
# scrape the resulting html
tools_dict_list = self._html_iface.get_tools(response.text)
return [TSquareTool(**x) for x in tools_dict_list]
|
[
"def",
"get_tools",
"(",
"self",
",",
"site",
")",
":",
"# hack - gotta bypass the tsquare REST api because it kinda sucks with tools",
"url",
"=",
"site",
".",
"entityURL",
".",
"replace",
"(",
"'direct'",
",",
"'portal'",
")",
"response",
"=",
"self",
".",
"_session",
".",
"get",
"(",
"url",
")",
"response",
".",
"raise_for_status",
"(",
")",
"# scrape the resulting html",
"tools_dict_list",
"=",
"self",
".",
"_html_iface",
".",
"get_tools",
"(",
"response",
".",
"text",
")",
"return",
"[",
"TSquareTool",
"(",
"*",
"*",
"x",
")",
"for",
"x",
"in",
"tools_dict_list",
"]"
] |
Gets all tools associated with a site.
@param site (TSquareSite) - The site to search for tools
@returns A list of dictionaries representing Tsquare tools.
|
[
"Gets",
"all",
"tools",
"associated",
"with",
"a",
"site",
"."
] |
242adb2c27e6c65a1f75db32a4636ea3f1d22a3a
|
https://github.com/swgillespie/tsquare/blob/242adb2c27e6c65a1f75db32a4636ea3f1d22a3a/tsquare/core.py#L165-L177
|
240,872
|
swgillespie/tsquare
|
tsquare/core.py
|
TSquareAPI.get_grades
|
def get_grades(self, site):
"""
Gets a list of grades associated with a site. The return type is a dictionary
whose keys are assignment categories, similar to how the page is laid out
in TSquare.
"""
tools = self.get_tools(site)
grade_tool_filter = [x.href for x in tools if x.name == 'gradebook-tool']
if not grade_tool_filter:
return []
response = self._session.get(grade_tool_filter[0])
response.raise_for_status()
iframes = self._html_iface.get_iframes(response.text)
iframe_url = ''
for frame in iframes:
if frame['title'] == 'Gradebook ':
iframe_url = frame['src']
if iframe_url == '':
print "WARNING: NO GRADEBOOK IFRAMES FOUND"
response = self._session.get(iframe_url)
response.raise_for_status()
grade_dict_list = self._html_iface.get_grades(response.text)
return grade_dict_list
|
python
|
def get_grades(self, site):
"""
Gets a list of grades associated with a site. The return type is a dictionary
whose keys are assignment categories, similar to how the page is laid out
in TSquare.
"""
tools = self.get_tools(site)
grade_tool_filter = [x.href for x in tools if x.name == 'gradebook-tool']
if not grade_tool_filter:
return []
response = self._session.get(grade_tool_filter[0])
response.raise_for_status()
iframes = self._html_iface.get_iframes(response.text)
iframe_url = ''
for frame in iframes:
if frame['title'] == 'Gradebook ':
iframe_url = frame['src']
if iframe_url == '':
print "WARNING: NO GRADEBOOK IFRAMES FOUND"
response = self._session.get(iframe_url)
response.raise_for_status()
grade_dict_list = self._html_iface.get_grades(response.text)
return grade_dict_list
|
[
"def",
"get_grades",
"(",
"self",
",",
"site",
")",
":",
"tools",
"=",
"self",
".",
"get_tools",
"(",
"site",
")",
"grade_tool_filter",
"=",
"[",
"x",
".",
"href",
"for",
"x",
"in",
"tools",
"if",
"x",
".",
"name",
"==",
"'gradebook-tool'",
"]",
"if",
"not",
"grade_tool_filter",
":",
"return",
"[",
"]",
"response",
"=",
"self",
".",
"_session",
".",
"get",
"(",
"grade_tool_filter",
"[",
"0",
"]",
")",
"response",
".",
"raise_for_status",
"(",
")",
"iframes",
"=",
"self",
".",
"_html_iface",
".",
"get_iframes",
"(",
"response",
".",
"text",
")",
"iframe_url",
"=",
"''",
"for",
"frame",
"in",
"iframes",
":",
"if",
"frame",
"[",
"'title'",
"]",
"==",
"'Gradebook '",
":",
"iframe_url",
"=",
"frame",
"[",
"'src'",
"]",
"if",
"iframe_url",
"==",
"''",
":",
"print",
"\"WARNING: NO GRADEBOOK IFRAMES FOUND\"",
"response",
"=",
"self",
".",
"_session",
".",
"get",
"(",
"iframe_url",
")",
"response",
".",
"raise_for_status",
"(",
")",
"grade_dict_list",
"=",
"self",
".",
"_html_iface",
".",
"get_grades",
"(",
"response",
".",
"text",
")",
"return",
"grade_dict_list"
] |
Gets a list of grades associated with a site. The return type is a dictionary
whose keys are assignment categories, similar to how the page is laid out
in TSquare.
|
[
"Gets",
"a",
"list",
"of",
"grades",
"associated",
"with",
"a",
"site",
".",
"The",
"return",
"type",
"is",
"a",
"dictionary",
"whose",
"keys",
"are",
"assignment",
"categories",
"similar",
"to",
"how",
"the",
"page",
"is",
"laid",
"out",
"in",
"TSquare",
"."
] |
242adb2c27e6c65a1f75db32a4636ea3f1d22a3a
|
https://github.com/swgillespie/tsquare/blob/242adb2c27e6c65a1f75db32a4636ea3f1d22a3a/tsquare/core.py#L209-L231
|
240,873
|
swgillespie/tsquare
|
tsquare/core.py
|
TSquareAPI.get_syllabus
|
def get_syllabus(self, site):
"""
Gets the syllabus for a course. The syllabus may or may not
contain HTML, depending on the site. TSquare does not enforce
whether or not pages are allowed to have HTML, so it is impossible
to tell.
"""
tools = self.get_tools(site)
syllabus_filter = [x.href for x in tools if x.name == 'syllabus']
if not syllabus_filter:
return ''
response = self._session.get(syllabus_filter[0])
response.raise_for_status()
iframes = self._html_iface.get_iframes(response.text)
iframe_url = ''
for frame in iframes:
if frame['title'] == 'Syllabus ':
iframe_url = frame['src']
if iframe_url == '':
print "WARHING: NO SYLLABUS IFRAME FOUND"
response = self._session.get(iframe_url)
response.raise_for_status()
syllabus_html = self._html_iface.get_syllabus(response.text)
return syllabus_html
|
python
|
def get_syllabus(self, site):
"""
Gets the syllabus for a course. The syllabus may or may not
contain HTML, depending on the site. TSquare does not enforce
whether or not pages are allowed to have HTML, so it is impossible
to tell.
"""
tools = self.get_tools(site)
syllabus_filter = [x.href for x in tools if x.name == 'syllabus']
if not syllabus_filter:
return ''
response = self._session.get(syllabus_filter[0])
response.raise_for_status()
iframes = self._html_iface.get_iframes(response.text)
iframe_url = ''
for frame in iframes:
if frame['title'] == 'Syllabus ':
iframe_url = frame['src']
if iframe_url == '':
print "WARHING: NO SYLLABUS IFRAME FOUND"
response = self._session.get(iframe_url)
response.raise_for_status()
syllabus_html = self._html_iface.get_syllabus(response.text)
return syllabus_html
|
[
"def",
"get_syllabus",
"(",
"self",
",",
"site",
")",
":",
"tools",
"=",
"self",
".",
"get_tools",
"(",
"site",
")",
"syllabus_filter",
"=",
"[",
"x",
".",
"href",
"for",
"x",
"in",
"tools",
"if",
"x",
".",
"name",
"==",
"'syllabus'",
"]",
"if",
"not",
"syllabus_filter",
":",
"return",
"''",
"response",
"=",
"self",
".",
"_session",
".",
"get",
"(",
"syllabus_filter",
"[",
"0",
"]",
")",
"response",
".",
"raise_for_status",
"(",
")",
"iframes",
"=",
"self",
".",
"_html_iface",
".",
"get_iframes",
"(",
"response",
".",
"text",
")",
"iframe_url",
"=",
"''",
"for",
"frame",
"in",
"iframes",
":",
"if",
"frame",
"[",
"'title'",
"]",
"==",
"'Syllabus '",
":",
"iframe_url",
"=",
"frame",
"[",
"'src'",
"]",
"if",
"iframe_url",
"==",
"''",
":",
"print",
"\"WARHING: NO SYLLABUS IFRAME FOUND\"",
"response",
"=",
"self",
".",
"_session",
".",
"get",
"(",
"iframe_url",
")",
"response",
".",
"raise_for_status",
"(",
")",
"syllabus_html",
"=",
"self",
".",
"_html_iface",
".",
"get_syllabus",
"(",
"response",
".",
"text",
")",
"return",
"syllabus_html"
] |
Gets the syllabus for a course. The syllabus may or may not
contain HTML, depending on the site. TSquare does not enforce
whether or not pages are allowed to have HTML, so it is impossible
to tell.
|
[
"Gets",
"the",
"syllabus",
"for",
"a",
"course",
".",
"The",
"syllabus",
"may",
"or",
"may",
"not",
"contain",
"HTML",
"depending",
"on",
"the",
"site",
".",
"TSquare",
"does",
"not",
"enforce",
"whether",
"or",
"not",
"pages",
"are",
"allowed",
"to",
"have",
"HTML",
"so",
"it",
"is",
"impossible",
"to",
"tell",
"."
] |
242adb2c27e6c65a1f75db32a4636ea3f1d22a3a
|
https://github.com/swgillespie/tsquare/blob/242adb2c27e6c65a1f75db32a4636ea3f1d22a3a/tsquare/core.py#L234-L257
|
240,874
|
FlorianLudwig/rueckenwind
|
rw/scope.py
|
setup_app_scope
|
def setup_app_scope(name, scope):
"""activate plugins accordingly to config"""
# load plugins
plugins = []
for plugin_name, active in get('settings').get('rw.plugins', {}).items():
plugin = __import__(plugin_name)
plugin_path = plugin_name.split('.')[1:] + ['plugin']
for sub in plugin_path:
plugin = getattr(plugin, sub)
plugins.append(scope.activate(plugin))
yield plugins
raise rw.gen.Return(scope['settings'])
|
python
|
def setup_app_scope(name, scope):
"""activate plugins accordingly to config"""
# load plugins
plugins = []
for plugin_name, active in get('settings').get('rw.plugins', {}).items():
plugin = __import__(plugin_name)
plugin_path = plugin_name.split('.')[1:] + ['plugin']
for sub in plugin_path:
plugin = getattr(plugin, sub)
plugins.append(scope.activate(plugin))
yield plugins
raise rw.gen.Return(scope['settings'])
|
[
"def",
"setup_app_scope",
"(",
"name",
",",
"scope",
")",
":",
"# load plugins",
"plugins",
"=",
"[",
"]",
"for",
"plugin_name",
",",
"active",
"in",
"get",
"(",
"'settings'",
")",
".",
"get",
"(",
"'rw.plugins'",
",",
"{",
"}",
")",
".",
"items",
"(",
")",
":",
"plugin",
"=",
"__import__",
"(",
"plugin_name",
")",
"plugin_path",
"=",
"plugin_name",
".",
"split",
"(",
"'.'",
")",
"[",
"1",
":",
"]",
"+",
"[",
"'plugin'",
"]",
"for",
"sub",
"in",
"plugin_path",
":",
"plugin",
"=",
"getattr",
"(",
"plugin",
",",
"sub",
")",
"plugins",
".",
"append",
"(",
"scope",
".",
"activate",
"(",
"plugin",
")",
")",
"yield",
"plugins",
"raise",
"rw",
".",
"gen",
".",
"Return",
"(",
"scope",
"[",
"'settings'",
"]",
")"
] |
activate plugins accordingly to config
|
[
"activate",
"plugins",
"accordingly",
"to",
"config"
] |
47fec7af05ea10b3cf6d59b9f7bf4d12c02dddea
|
https://github.com/FlorianLudwig/rueckenwind/blob/47fec7af05ea10b3cf6d59b9f7bf4d12c02dddea/rw/scope.py#L181-L194
|
240,875
|
MacHu-GWU/angora-project
|
angora/gadget/configuration.py
|
Configuration.add_section
|
def add_section(self, section_name):
"""Add an empty section.
"""
if section_name == "DEFAULT":
raise Exception("'DEFAULT' is reserved section name.")
if section_name in self._sections:
raise Exception(
"Error! %s is already one of the sections" % section_name)
else:
self._sections[section_name] = Section(section_name)
|
python
|
def add_section(self, section_name):
"""Add an empty section.
"""
if section_name == "DEFAULT":
raise Exception("'DEFAULT' is reserved section name.")
if section_name in self._sections:
raise Exception(
"Error! %s is already one of the sections" % section_name)
else:
self._sections[section_name] = Section(section_name)
|
[
"def",
"add_section",
"(",
"self",
",",
"section_name",
")",
":",
"if",
"section_name",
"==",
"\"DEFAULT\"",
":",
"raise",
"Exception",
"(",
"\"'DEFAULT' is reserved section name.\"",
")",
"if",
"section_name",
"in",
"self",
".",
"_sections",
":",
"raise",
"Exception",
"(",
"\"Error! %s is already one of the sections\"",
"%",
"section_name",
")",
"else",
":",
"self",
".",
"_sections",
"[",
"section_name",
"]",
"=",
"Section",
"(",
"section_name",
")"
] |
Add an empty section.
|
[
"Add",
"an",
"empty",
"section",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/gadget/configuration.py#L253-L263
|
240,876
|
MacHu-GWU/angora-project
|
angora/gadget/configuration.py
|
Configuration.remove_section
|
def remove_section(self, section_name):
"""Remove a section, it cannot be the DEFAULT section.
"""
if section_name == "DEFAULT":
raise Exception("'DEFAULT' is reserved section name.")
if section_name in self._sections:
del self._sections[section_name]
else:
raise Exception("Error! cannot find section '%s'.")
|
python
|
def remove_section(self, section_name):
"""Remove a section, it cannot be the DEFAULT section.
"""
if section_name == "DEFAULT":
raise Exception("'DEFAULT' is reserved section name.")
if section_name in self._sections:
del self._sections[section_name]
else:
raise Exception("Error! cannot find section '%s'.")
|
[
"def",
"remove_section",
"(",
"self",
",",
"section_name",
")",
":",
"if",
"section_name",
"==",
"\"DEFAULT\"",
":",
"raise",
"Exception",
"(",
"\"'DEFAULT' is reserved section name.\"",
")",
"if",
"section_name",
"in",
"self",
".",
"_sections",
":",
"del",
"self",
".",
"_sections",
"[",
"section_name",
"]",
"else",
":",
"raise",
"Exception",
"(",
"\"Error! cannot find section '%s'.\"",
")"
] |
Remove a section, it cannot be the DEFAULT section.
|
[
"Remove",
"a",
"section",
"it",
"cannot",
"be",
"the",
"DEFAULT",
"section",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/gadget/configuration.py#L265-L274
|
240,877
|
MacHu-GWU/angora-project
|
angora/gadget/configuration.py
|
Configuration.set_section
|
def set_section(self, section):
"""Set a section. If section already exists, overwrite the old one.
"""
if not isinstance(section, Section):
raise Exception("You")
try:
self.remove_section(section.name)
except:
pass
self._sections[section.name] = copy.deepcopy(section)
|
python
|
def set_section(self, section):
"""Set a section. If section already exists, overwrite the old one.
"""
if not isinstance(section, Section):
raise Exception("You")
try:
self.remove_section(section.name)
except:
pass
self._sections[section.name] = copy.deepcopy(section)
|
[
"def",
"set_section",
"(",
"self",
",",
"section",
")",
":",
"if",
"not",
"isinstance",
"(",
"section",
",",
"Section",
")",
":",
"raise",
"Exception",
"(",
"\"You\"",
")",
"try",
":",
"self",
".",
"remove_section",
"(",
"section",
".",
"name",
")",
"except",
":",
"pass",
"self",
".",
"_sections",
"[",
"section",
".",
"name",
"]",
"=",
"copy",
".",
"deepcopy",
"(",
"section",
")"
] |
Set a section. If section already exists, overwrite the old one.
|
[
"Set",
"a",
"section",
".",
"If",
"section",
"already",
"exists",
"overwrite",
"the",
"old",
"one",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/gadget/configuration.py#L276-L287
|
240,878
|
timeyyy/apptools
|
peasoup/peasoup.py
|
setup_logger
|
def setup_logger(log_file, level=logging.DEBUG):
'''One function call to set up logging with some nice logs about the machine'''
cfg = AppBuilder.get_pcfg()
logger = cfg['log_module']
# todo make sure structlog is compliant and that logbook is also the correct name???
assert logger in ("logging", "logbook", "structlog"), 'bad logger specified'
exec("import {0};logging = {0}".format(logger))
AppBuilder.logger = logging
logging.basicConfig(
filename=log_file,
filemode='w',
level=level,
format='%(asctime)s:%(levelname)s: %(message)s') # one run
logging.debug('System is: %s' % platform.platform())
logging.debug('Python archetecture is: %s' % platform.architecture()[0])
logging.debug('Machine archetecture is: %s' % platform.machine())
set_windows_permissions(log_file)
|
python
|
def setup_logger(log_file, level=logging.DEBUG):
'''One function call to set up logging with some nice logs about the machine'''
cfg = AppBuilder.get_pcfg()
logger = cfg['log_module']
# todo make sure structlog is compliant and that logbook is also the correct name???
assert logger in ("logging", "logbook", "structlog"), 'bad logger specified'
exec("import {0};logging = {0}".format(logger))
AppBuilder.logger = logging
logging.basicConfig(
filename=log_file,
filemode='w',
level=level,
format='%(asctime)s:%(levelname)s: %(message)s') # one run
logging.debug('System is: %s' % platform.platform())
logging.debug('Python archetecture is: %s' % platform.architecture()[0])
logging.debug('Machine archetecture is: %s' % platform.machine())
set_windows_permissions(log_file)
|
[
"def",
"setup_logger",
"(",
"log_file",
",",
"level",
"=",
"logging",
".",
"DEBUG",
")",
":",
"cfg",
"=",
"AppBuilder",
".",
"get_pcfg",
"(",
")",
"logger",
"=",
"cfg",
"[",
"'log_module'",
"]",
"# todo make sure structlog is compliant and that logbook is also the correct name???",
"assert",
"logger",
"in",
"(",
"\"logging\"",
",",
"\"logbook\"",
",",
"\"structlog\"",
")",
",",
"'bad logger specified'",
"exec",
"(",
"\"import {0};logging = {0}\"",
".",
"format",
"(",
"logger",
")",
")",
"AppBuilder",
".",
"logger",
"=",
"logging",
"logging",
".",
"basicConfig",
"(",
"filename",
"=",
"log_file",
",",
"filemode",
"=",
"'w'",
",",
"level",
"=",
"level",
",",
"format",
"=",
"'%(asctime)s:%(levelname)s: %(message)s'",
")",
"# one run",
"logging",
".",
"debug",
"(",
"'System is: %s'",
"%",
"platform",
".",
"platform",
"(",
")",
")",
"logging",
".",
"debug",
"(",
"'Python archetecture is: %s'",
"%",
"platform",
".",
"architecture",
"(",
")",
"[",
"0",
"]",
")",
"logging",
".",
"debug",
"(",
"'Machine archetecture is: %s'",
"%",
"platform",
".",
"machine",
"(",
")",
")",
"set_windows_permissions",
"(",
"log_file",
")"
] |
One function call to set up logging with some nice logs about the machine
|
[
"One",
"function",
"call",
"to",
"set",
"up",
"logging",
"with",
"some",
"nice",
"logs",
"about",
"the",
"machine"
] |
d3c0f324b0c2689c35f5601348276f4efd6cb240
|
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/peasoup.py#L424-L443
|
240,879
|
timeyyy/apptools
|
peasoup/peasoup.py
|
set_windows_permissions
|
def set_windows_permissions(filename):
'''
At least on windows 7 if a file is created on an Admin account,
Other users will not be given execute or full control.
However if a user creates the file himself it will work...
So just always change permissions after creating a file on windows
Change the permissions for Allusers of the application
The Everyone Group
Full access
http://timgolden.me.uk/python/win32_how_do_i/add-security-to-a-file.html
'''
#Todo rename this to allow_all, also make international not just for english..
if os.name == 'nt':
try:
everyone, domain, type = win32security.LookupAccountName(
"", "Everyone")
except Exception:
# Todo fails on non english langauge systesm ... FU WINDOWS
# Just allow permission for the current user then...
everyone, domain, type = win32security.LookupAccountName ("", win32api.GetUserName())
# ~ user, domain, type = win32security.LookupAccountName ("", win32api.GetUserName())
#~ userx, domain, type = win32security.LookupAccountName ("", "User")
#~ usery, domain, type = win32security.LookupAccountName ("", "User Y")
sd = win32security.GetFileSecurity(
filename,
win32security.DACL_SECURITY_INFORMATION)
# instead of dacl = win32security.ACL()
dacl = sd.GetSecurityDescriptorDacl()
#~ dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_GENERIC_READ | con.FILE_GENERIC_WRITE, everyone)
#~ dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_ALL_ACCESS, user)
dacl.AddAccessAllowedAce(
win32security.ACL_REVISION,
con.FILE_ALL_ACCESS,
everyone)
sd.SetSecurityDescriptorDacl(1, dacl, 0) # may not be necessary
win32security.SetFileSecurity(
filename,
win32security.DACL_SECURITY_INFORMATION,
sd)
|
python
|
def set_windows_permissions(filename):
'''
At least on windows 7 if a file is created on an Admin account,
Other users will not be given execute or full control.
However if a user creates the file himself it will work...
So just always change permissions after creating a file on windows
Change the permissions for Allusers of the application
The Everyone Group
Full access
http://timgolden.me.uk/python/win32_how_do_i/add-security-to-a-file.html
'''
#Todo rename this to allow_all, also make international not just for english..
if os.name == 'nt':
try:
everyone, domain, type = win32security.LookupAccountName(
"", "Everyone")
except Exception:
# Todo fails on non english langauge systesm ... FU WINDOWS
# Just allow permission for the current user then...
everyone, domain, type = win32security.LookupAccountName ("", win32api.GetUserName())
# ~ user, domain, type = win32security.LookupAccountName ("", win32api.GetUserName())
#~ userx, domain, type = win32security.LookupAccountName ("", "User")
#~ usery, domain, type = win32security.LookupAccountName ("", "User Y")
sd = win32security.GetFileSecurity(
filename,
win32security.DACL_SECURITY_INFORMATION)
# instead of dacl = win32security.ACL()
dacl = sd.GetSecurityDescriptorDacl()
#~ dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_GENERIC_READ | con.FILE_GENERIC_WRITE, everyone)
#~ dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_ALL_ACCESS, user)
dacl.AddAccessAllowedAce(
win32security.ACL_REVISION,
con.FILE_ALL_ACCESS,
everyone)
sd.SetSecurityDescriptorDacl(1, dacl, 0) # may not be necessary
win32security.SetFileSecurity(
filename,
win32security.DACL_SECURITY_INFORMATION,
sd)
|
[
"def",
"set_windows_permissions",
"(",
"filename",
")",
":",
"#Todo rename this to allow_all, also make international not just for english..",
"if",
"os",
".",
"name",
"==",
"'nt'",
":",
"try",
":",
"everyone",
",",
"domain",
",",
"type",
"=",
"win32security",
".",
"LookupAccountName",
"(",
"\"\"",
",",
"\"Everyone\"",
")",
"except",
"Exception",
":",
"# Todo fails on non english langauge systesm ... FU WINDOWS",
"# Just allow permission for the current user then...",
"everyone",
",",
"domain",
",",
"type",
"=",
"win32security",
".",
"LookupAccountName",
"(",
"\"\"",
",",
"win32api",
".",
"GetUserName",
"(",
")",
")",
"# ~ user, domain, type = win32security.LookupAccountName (\"\", win32api.GetUserName())",
"#~ userx, domain, type = win32security.LookupAccountName (\"\", \"User\")",
"#~ usery, domain, type = win32security.LookupAccountName (\"\", \"User Y\")",
"sd",
"=",
"win32security",
".",
"GetFileSecurity",
"(",
"filename",
",",
"win32security",
".",
"DACL_SECURITY_INFORMATION",
")",
"# instead of dacl = win32security.ACL()",
"dacl",
"=",
"sd",
".",
"GetSecurityDescriptorDacl",
"(",
")",
"#~ dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_GENERIC_READ | con.FILE_GENERIC_WRITE, everyone)",
"#~ dacl.AddAccessAllowedAce(win32security.ACL_REVISION, con.FILE_ALL_ACCESS, user)",
"dacl",
".",
"AddAccessAllowedAce",
"(",
"win32security",
".",
"ACL_REVISION",
",",
"con",
".",
"FILE_ALL_ACCESS",
",",
"everyone",
")",
"sd",
".",
"SetSecurityDescriptorDacl",
"(",
"1",
",",
"dacl",
",",
"0",
")",
"# may not be necessary",
"win32security",
".",
"SetFileSecurity",
"(",
"filename",
",",
"win32security",
".",
"DACL_SECURITY_INFORMATION",
",",
"sd",
")"
] |
At least on windows 7 if a file is created on an Admin account,
Other users will not be given execute or full control.
However if a user creates the file himself it will work...
So just always change permissions after creating a file on windows
Change the permissions for Allusers of the application
The Everyone Group
Full access
http://timgolden.me.uk/python/win32_how_do_i/add-security-to-a-file.html
|
[
"At",
"least",
"on",
"windows",
"7",
"if",
"a",
"file",
"is",
"created",
"on",
"an",
"Admin",
"account",
"Other",
"users",
"will",
"not",
"be",
"given",
"execute",
"or",
"full",
"control",
".",
"However",
"if",
"a",
"user",
"creates",
"the",
"file",
"himself",
"it",
"will",
"work",
"...",
"So",
"just",
"always",
"change",
"permissions",
"after",
"creating",
"a",
"file",
"on",
"windows"
] |
d3c0f324b0c2689c35f5601348276f4efd6cb240
|
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/peasoup.py#L446-L489
|
240,880
|
timeyyy/apptools
|
peasoup/peasoup.py
|
setup_raven
|
def setup_raven():
'''we setup sentry to get all stuff from our logs'''
pcfg = AppBuilder.get_pcfg()
from raven.handlers.logging import SentryHandler
from raven import Client
from raven.conf import setup_logging
client = Client(pcfg['raven_dsn'])
handler = SentryHandler(client)
# TODO VERIFY THIS -> This is the way to do it if you have a paid account, each log call is an event so this isn't going to work for free accounts...
handler.setLevel(pcfg["raven_loglevel"])
setup_logging(handler)
return client
|
python
|
def setup_raven():
'''we setup sentry to get all stuff from our logs'''
pcfg = AppBuilder.get_pcfg()
from raven.handlers.logging import SentryHandler
from raven import Client
from raven.conf import setup_logging
client = Client(pcfg['raven_dsn'])
handler = SentryHandler(client)
# TODO VERIFY THIS -> This is the way to do it if you have a paid account, each log call is an event so this isn't going to work for free accounts...
handler.setLevel(pcfg["raven_loglevel"])
setup_logging(handler)
return client
|
[
"def",
"setup_raven",
"(",
")",
":",
"pcfg",
"=",
"AppBuilder",
".",
"get_pcfg",
"(",
")",
"from",
"raven",
".",
"handlers",
".",
"logging",
"import",
"SentryHandler",
"from",
"raven",
"import",
"Client",
"from",
"raven",
".",
"conf",
"import",
"setup_logging",
"client",
"=",
"Client",
"(",
"pcfg",
"[",
"'raven_dsn'",
"]",
")",
"handler",
"=",
"SentryHandler",
"(",
"client",
")",
"# TODO VERIFY THIS -> This is the way to do it if you have a paid account, each log call is an event so this isn't going to work for free accounts...",
"handler",
".",
"setLevel",
"(",
"pcfg",
"[",
"\"raven_loglevel\"",
"]",
")",
"setup_logging",
"(",
"handler",
")",
"return",
"client"
] |
we setup sentry to get all stuff from our logs
|
[
"we",
"setup",
"sentry",
"to",
"get",
"all",
"stuff",
"from",
"our",
"logs"
] |
d3c0f324b0c2689c35f5601348276f4efd6cb240
|
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/peasoup.py#L492-L503
|
240,881
|
timeyyy/apptools
|
peasoup/peasoup.py
|
CfgDict.save
|
def save(self):
'''saves our config objet to file'''
if self.app.cfg_mode == 'json':
with open(self.app.cfg_file, 'w') as opened_file:
json.dump(self.app.cfg, opened_file)
else:
with open(self.app.cfg_file, 'w')as opened_file:
yaml.dump(self.app.cfg, opened_file)
|
python
|
def save(self):
'''saves our config objet to file'''
if self.app.cfg_mode == 'json':
with open(self.app.cfg_file, 'w') as opened_file:
json.dump(self.app.cfg, opened_file)
else:
with open(self.app.cfg_file, 'w')as opened_file:
yaml.dump(self.app.cfg, opened_file)
|
[
"def",
"save",
"(",
"self",
")",
":",
"if",
"self",
".",
"app",
".",
"cfg_mode",
"==",
"'json'",
":",
"with",
"open",
"(",
"self",
".",
"app",
".",
"cfg_file",
",",
"'w'",
")",
"as",
"opened_file",
":",
"json",
".",
"dump",
"(",
"self",
".",
"app",
".",
"cfg",
",",
"opened_file",
")",
"else",
":",
"with",
"open",
"(",
"self",
".",
"app",
".",
"cfg_file",
",",
"'w'",
")",
"as",
"opened_file",
":",
"yaml",
".",
"dump",
"(",
"self",
".",
"app",
".",
"cfg",
",",
"opened_file",
")"
] |
saves our config objet to file
|
[
"saves",
"our",
"config",
"objet",
"to",
"file"
] |
d3c0f324b0c2689c35f5601348276f4efd6cb240
|
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/peasoup.py#L64-L71
|
240,882
|
timeyyy/apptools
|
peasoup/peasoup.py
|
AppBuilder.create_cfg
|
def create_cfg(self, cfg_file, defaults=None, mode='json'):
'''
set mode to json or yaml? probably remove this option..Todo
Creates the config file for your app with default values
The file will only be created if it doesn't exits
also sets up the first_run attribute.
also sets correct windows permissions
you can add custom stuff to the config by doing
app.cfg['fkdsfa'] = 'fdsaf'
# todo auto save on change
remember to call cfg.save()
'''
assert mode in ('json', 'yaml')
self.cfg_mode = mode
self.cfg_file = cfg_file
try:
self.cfg = CfgDict(app=self, cfg=self.load_cfg())
logging.info('cfg file found : %s' % self.cfg_file)
except FileNotFoundError:
self.cfg = CfgDict(app=self, cfg={'first_run': True})
with suppress(TypeError):
self.cfg.update(defaults)
self.cfg.save()
set_windows_permissions(self.cfg_file)
logging.info(
'Created cfg file for first time!: %s' %
self.cfg_file)
if self._check_first_run():
self.first_run = True
else:
self.first_run = False
|
python
|
def create_cfg(self, cfg_file, defaults=None, mode='json'):
'''
set mode to json or yaml? probably remove this option..Todo
Creates the config file for your app with default values
The file will only be created if it doesn't exits
also sets up the first_run attribute.
also sets correct windows permissions
you can add custom stuff to the config by doing
app.cfg['fkdsfa'] = 'fdsaf'
# todo auto save on change
remember to call cfg.save()
'''
assert mode in ('json', 'yaml')
self.cfg_mode = mode
self.cfg_file = cfg_file
try:
self.cfg = CfgDict(app=self, cfg=self.load_cfg())
logging.info('cfg file found : %s' % self.cfg_file)
except FileNotFoundError:
self.cfg = CfgDict(app=self, cfg={'first_run': True})
with suppress(TypeError):
self.cfg.update(defaults)
self.cfg.save()
set_windows_permissions(self.cfg_file)
logging.info(
'Created cfg file for first time!: %s' %
self.cfg_file)
if self._check_first_run():
self.first_run = True
else:
self.first_run = False
|
[
"def",
"create_cfg",
"(",
"self",
",",
"cfg_file",
",",
"defaults",
"=",
"None",
",",
"mode",
"=",
"'json'",
")",
":",
"assert",
"mode",
"in",
"(",
"'json'",
",",
"'yaml'",
")",
"self",
".",
"cfg_mode",
"=",
"mode",
"self",
".",
"cfg_file",
"=",
"cfg_file",
"try",
":",
"self",
".",
"cfg",
"=",
"CfgDict",
"(",
"app",
"=",
"self",
",",
"cfg",
"=",
"self",
".",
"load_cfg",
"(",
")",
")",
"logging",
".",
"info",
"(",
"'cfg file found : %s'",
"%",
"self",
".",
"cfg_file",
")",
"except",
"FileNotFoundError",
":",
"self",
".",
"cfg",
"=",
"CfgDict",
"(",
"app",
"=",
"self",
",",
"cfg",
"=",
"{",
"'first_run'",
":",
"True",
"}",
")",
"with",
"suppress",
"(",
"TypeError",
")",
":",
"self",
".",
"cfg",
".",
"update",
"(",
"defaults",
")",
"self",
".",
"cfg",
".",
"save",
"(",
")",
"set_windows_permissions",
"(",
"self",
".",
"cfg_file",
")",
"logging",
".",
"info",
"(",
"'Created cfg file for first time!: %s'",
"%",
"self",
".",
"cfg_file",
")",
"if",
"self",
".",
"_check_first_run",
"(",
")",
":",
"self",
".",
"first_run",
"=",
"True",
"else",
":",
"self",
".",
"first_run",
"=",
"False"
] |
set mode to json or yaml? probably remove this option..Todo
Creates the config file for your app with default values
The file will only be created if it doesn't exits
also sets up the first_run attribute.
also sets correct windows permissions
you can add custom stuff to the config by doing
app.cfg['fkdsfa'] = 'fdsaf'
# todo auto save on change
remember to call cfg.save()
|
[
"set",
"mode",
"to",
"json",
"or",
"yaml?",
"probably",
"remove",
"this",
"option",
"..",
"Todo"
] |
d3c0f324b0c2689c35f5601348276f4efd6cb240
|
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/peasoup.py#L106-L142
|
240,883
|
timeyyy/apptools
|
peasoup/peasoup.py
|
AppBuilder.load_cfg
|
def load_cfg(self):
'''loads our config object accessible via self.cfg'''
if self.cfg_mode == 'json':
with open(self.cfg_file) as opened_file:
return json.load(opened_file)
else:
with open(self.cfg_file) as ymlfile:
return yaml.safe_load(ymlfile)
|
python
|
def load_cfg(self):
'''loads our config object accessible via self.cfg'''
if self.cfg_mode == 'json':
with open(self.cfg_file) as opened_file:
return json.load(opened_file)
else:
with open(self.cfg_file) as ymlfile:
return yaml.safe_load(ymlfile)
|
[
"def",
"load_cfg",
"(",
"self",
")",
":",
"if",
"self",
".",
"cfg_mode",
"==",
"'json'",
":",
"with",
"open",
"(",
"self",
".",
"cfg_file",
")",
"as",
"opened_file",
":",
"return",
"json",
".",
"load",
"(",
"opened_file",
")",
"else",
":",
"with",
"open",
"(",
"self",
".",
"cfg_file",
")",
"as",
"ymlfile",
":",
"return",
"yaml",
".",
"safe_load",
"(",
"ymlfile",
")"
] |
loads our config object accessible via self.cfg
|
[
"loads",
"our",
"config",
"object",
"accessible",
"via",
"self",
".",
"cfg"
] |
d3c0f324b0c2689c35f5601348276f4efd6cb240
|
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/peasoup.py#L144-L151
|
240,884
|
timeyyy/apptools
|
peasoup/peasoup.py
|
AppBuilder.check_if_open
|
def check_if_open(self, path=None, appdata=False, verbose=False):
'''
Allows only one version of the app to be open at a time.
If you are calling create_cfg() before calling this,
you don't need to give a path. Otherwise a file path must be
given so we can save our file there.
Set appdata to True to run uac_bypass on the path, otherwise
leave it as False
'''
#~ To know if the system crashed, or if the prgram was exited smoothly
#~ turn verbose to True and the function will return a named tuple # TBD
#~ if os.name == 'nt':
#~ hwnd = int(self.root.wm_frame(),0)
#~ #saving a hwnd reference so we can check if we still open later on
#~ with open (self.check_file,'a') as f:
#~ f.write(str(hwnd))
#~ logging.info('adding hwnd to running info :'+str(hwnd))
#~
logging.info('Checking if our app is already Open')
if not path and self.cfg:
self._check_if_open_using_config()
elif path:
if appdata:
file = path.split(os.sep)[-1]
self.check_file = self.uac_bypass(file=file)
else:
self.check_file = path
self._check_if_open_using_path()
self.shutdown_cleanup['release_singleton'] = self.release_singleton
|
python
|
def check_if_open(self, path=None, appdata=False, verbose=False):
'''
Allows only one version of the app to be open at a time.
If you are calling create_cfg() before calling this,
you don't need to give a path. Otherwise a file path must be
given so we can save our file there.
Set appdata to True to run uac_bypass on the path, otherwise
leave it as False
'''
#~ To know if the system crashed, or if the prgram was exited smoothly
#~ turn verbose to True and the function will return a named tuple # TBD
#~ if os.name == 'nt':
#~ hwnd = int(self.root.wm_frame(),0)
#~ #saving a hwnd reference so we can check if we still open later on
#~ with open (self.check_file,'a') as f:
#~ f.write(str(hwnd))
#~ logging.info('adding hwnd to running info :'+str(hwnd))
#~
logging.info('Checking if our app is already Open')
if not path and self.cfg:
self._check_if_open_using_config()
elif path:
if appdata:
file = path.split(os.sep)[-1]
self.check_file = self.uac_bypass(file=file)
else:
self.check_file = path
self._check_if_open_using_path()
self.shutdown_cleanup['release_singleton'] = self.release_singleton
|
[
"def",
"check_if_open",
"(",
"self",
",",
"path",
"=",
"None",
",",
"appdata",
"=",
"False",
",",
"verbose",
"=",
"False",
")",
":",
"#~ To know if the system crashed, or if the prgram was exited smoothly",
"#~ turn verbose to True and the function will return a named tuple # TBD",
"#~ if os.name == 'nt':",
"#~ hwnd = int(self.root.wm_frame(),0)",
"#~ #saving a hwnd reference so we can check if we still open later on",
"#~ with open (self.check_file,'a') as f:",
"#~ f.write(str(hwnd))",
"#~ logging.info('adding hwnd to running info :'+str(hwnd))",
"#~",
"logging",
".",
"info",
"(",
"'Checking if our app is already Open'",
")",
"if",
"not",
"path",
"and",
"self",
".",
"cfg",
":",
"self",
".",
"_check_if_open_using_config",
"(",
")",
"elif",
"path",
":",
"if",
"appdata",
":",
"file",
"=",
"path",
".",
"split",
"(",
"os",
".",
"sep",
")",
"[",
"-",
"1",
"]",
"self",
".",
"check_file",
"=",
"self",
".",
"uac_bypass",
"(",
"file",
"=",
"file",
")",
"else",
":",
"self",
".",
"check_file",
"=",
"path",
"self",
".",
"_check_if_open_using_path",
"(",
")",
"self",
".",
"shutdown_cleanup",
"[",
"'release_singleton'",
"]",
"=",
"self",
".",
"release_singleton"
] |
Allows only one version of the app to be open at a time.
If you are calling create_cfg() before calling this,
you don't need to give a path. Otherwise a file path must be
given so we can save our file there.
Set appdata to True to run uac_bypass on the path, otherwise
leave it as False
|
[
"Allows",
"only",
"one",
"version",
"of",
"the",
"app",
"to",
"be",
"open",
"at",
"a",
"time",
"."
] |
d3c0f324b0c2689c35f5601348276f4efd6cb240
|
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/peasoup.py#L256-L287
|
240,885
|
timeyyy/apptools
|
peasoup/peasoup.py
|
AppBuilder.release_singleton
|
def release_singleton(self):
'''deletes the data that lets our program know if it is
running as singleton when calling check_if_open,
i.e check_if_open will return fals after calling this
'''
with suppress(KeyError):
del self.cfg['is_programming_running_info']
with suppress(FileNotFoundError, AttributeError):
os.remove(self.check_file)
|
python
|
def release_singleton(self):
'''deletes the data that lets our program know if it is
running as singleton when calling check_if_open,
i.e check_if_open will return fals after calling this
'''
with suppress(KeyError):
del self.cfg['is_programming_running_info']
with suppress(FileNotFoundError, AttributeError):
os.remove(self.check_file)
|
[
"def",
"release_singleton",
"(",
"self",
")",
":",
"with",
"suppress",
"(",
"KeyError",
")",
":",
"del",
"self",
".",
"cfg",
"[",
"'is_programming_running_info'",
"]",
"with",
"suppress",
"(",
"FileNotFoundError",
",",
"AttributeError",
")",
":",
"os",
".",
"remove",
"(",
"self",
".",
"check_file",
")"
] |
deletes the data that lets our program know if it is
running as singleton when calling check_if_open,
i.e check_if_open will return fals after calling this
|
[
"deletes",
"the",
"data",
"that",
"lets",
"our",
"program",
"know",
"if",
"it",
"is",
"running",
"as",
"singleton",
"when",
"calling",
"check_if_open",
"i",
".",
"e",
"check_if_open",
"will",
"return",
"fals",
"after",
"calling",
"this"
] |
d3c0f324b0c2689c35f5601348276f4efd6cb240
|
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/peasoup.py#L289-L297
|
240,886
|
zlobspb/txtarantool
|
txtarantool.py
|
Response._unpack_int_base128
|
def _unpack_int_base128(varint, offset):
"""Implement Perl unpack's 'w' option, aka base 128 decoding."""
res = ord(varint[offset])
if ord(varint[offset]) >= 0x80:
offset += 1
res = ((res - 0x80) << 7) + ord(varint[offset])
if ord(varint[offset]) >= 0x80:
offset += 1
res = ((res - 0x80) << 7) + ord(varint[offset])
if ord(varint[offset]) >= 0x80:
offset += 1
res = ((res - 0x80) << 7) + ord(varint[offset])
if ord(varint[offset]) >= 0x80:
offset += 1
res = ((res - 0x80) << 7) + ord(varint[offset])
return res, offset + 1
|
python
|
def _unpack_int_base128(varint, offset):
"""Implement Perl unpack's 'w' option, aka base 128 decoding."""
res = ord(varint[offset])
if ord(varint[offset]) >= 0x80:
offset += 1
res = ((res - 0x80) << 7) + ord(varint[offset])
if ord(varint[offset]) >= 0x80:
offset += 1
res = ((res - 0x80) << 7) + ord(varint[offset])
if ord(varint[offset]) >= 0x80:
offset += 1
res = ((res - 0x80) << 7) + ord(varint[offset])
if ord(varint[offset]) >= 0x80:
offset += 1
res = ((res - 0x80) << 7) + ord(varint[offset])
return res, offset + 1
|
[
"def",
"_unpack_int_base128",
"(",
"varint",
",",
"offset",
")",
":",
"res",
"=",
"ord",
"(",
"varint",
"[",
"offset",
"]",
")",
"if",
"ord",
"(",
"varint",
"[",
"offset",
"]",
")",
">=",
"0x80",
":",
"offset",
"+=",
"1",
"res",
"=",
"(",
"(",
"res",
"-",
"0x80",
")",
"<<",
"7",
")",
"+",
"ord",
"(",
"varint",
"[",
"offset",
"]",
")",
"if",
"ord",
"(",
"varint",
"[",
"offset",
"]",
")",
">=",
"0x80",
":",
"offset",
"+=",
"1",
"res",
"=",
"(",
"(",
"res",
"-",
"0x80",
")",
"<<",
"7",
")",
"+",
"ord",
"(",
"varint",
"[",
"offset",
"]",
")",
"if",
"ord",
"(",
"varint",
"[",
"offset",
"]",
")",
">=",
"0x80",
":",
"offset",
"+=",
"1",
"res",
"=",
"(",
"(",
"res",
"-",
"0x80",
")",
"<<",
"7",
")",
"+",
"ord",
"(",
"varint",
"[",
"offset",
"]",
")",
"if",
"ord",
"(",
"varint",
"[",
"offset",
"]",
")",
">=",
"0x80",
":",
"offset",
"+=",
"1",
"res",
"=",
"(",
"(",
"res",
"-",
"0x80",
")",
"<<",
"7",
")",
"+",
"ord",
"(",
"varint",
"[",
"offset",
"]",
")",
"return",
"res",
",",
"offset",
"+",
"1"
] |
Implement Perl unpack's 'w' option, aka base 128 decoding.
|
[
"Implement",
"Perl",
"unpack",
"s",
"w",
"option",
"aka",
"base",
"128",
"decoding",
"."
] |
e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350
|
https://github.com/zlobspb/txtarantool/blob/e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350/txtarantool.py#L548-L563
|
240,887
|
zlobspb/txtarantool
|
txtarantool.py
|
Response._unpack_body
|
def _unpack_body(self, buff):
"""
Parse the response body.
After body unpacking its data available as python list of tuples
For each request type the response body has the same format:
<insert_response_body> ::= <count> | <count><fq_tuple>
<update_response_body> ::= <count> | <count><fq_tuple>
<delete_response_body> ::= <count> | <count><fq_tuple>
<select_response_body> ::= <count><fq_tuple>*
<call_response_body> ::= <count><fq_tuple>
:param buff: buffer containing request body
:type byff: ctypes buffer
"""
# Unpack <return_code> and <count> (how many records affected or selected)
self._return_code = struct_L.unpack_from(buff, offset=0)[0]
# Separate return_code and completion_code
self._completion_status = self._return_code & 0x00ff
self._return_code >>= 8
# In case of an error unpack the body as an error message
if self._return_code != 0:
self._return_message = unicode(buff[4:-1], self.charset, self.errors)
if self._completion_status == 2:
raise TarantoolError(self._return_code, self._return_message)
# Unpack <count> (how many records affected or selected)
self._rowcount = struct_L.unpack_from(buff, offset=4)[0]
# If the response doesn't contain any tuple - there is nothing to unpack
if self._body_length == 8:
return
# Parse response tuples (<fq_tuple>)
if self._rowcount > 0:
offset = 8 # The first 4 bytes in the response body is the <count> we have already read
while offset < self._body_length:
# In resonse tuples have the form <size><tuple> (<fq_tuple> ::= <size><tuple>).
# Attribute <size> takes into account only size of tuple's <field> payload,
# but does not include 4-byte of <cardinality> field.
#Therefore the actual size of the <tuple> is greater to 4 bytes.
tuple_size = struct.unpack_from("<L", buff, offset)[0] + 4
tuple_data = struct.unpack_from("<%ds" % (tuple_size), buff, offset+4)[0]
tuple_value = self._unpack_tuple(tuple_data)
if self.field_types:
self.append(self._cast_tuple(tuple_value))
else:
self.append(tuple_value)
offset = offset + tuple_size + 4
|
python
|
def _unpack_body(self, buff):
"""
Parse the response body.
After body unpacking its data available as python list of tuples
For each request type the response body has the same format:
<insert_response_body> ::= <count> | <count><fq_tuple>
<update_response_body> ::= <count> | <count><fq_tuple>
<delete_response_body> ::= <count> | <count><fq_tuple>
<select_response_body> ::= <count><fq_tuple>*
<call_response_body> ::= <count><fq_tuple>
:param buff: buffer containing request body
:type byff: ctypes buffer
"""
# Unpack <return_code> and <count> (how many records affected or selected)
self._return_code = struct_L.unpack_from(buff, offset=0)[0]
# Separate return_code and completion_code
self._completion_status = self._return_code & 0x00ff
self._return_code >>= 8
# In case of an error unpack the body as an error message
if self._return_code != 0:
self._return_message = unicode(buff[4:-1], self.charset, self.errors)
if self._completion_status == 2:
raise TarantoolError(self._return_code, self._return_message)
# Unpack <count> (how many records affected or selected)
self._rowcount = struct_L.unpack_from(buff, offset=4)[0]
# If the response doesn't contain any tuple - there is nothing to unpack
if self._body_length == 8:
return
# Parse response tuples (<fq_tuple>)
if self._rowcount > 0:
offset = 8 # The first 4 bytes in the response body is the <count> we have already read
while offset < self._body_length:
# In resonse tuples have the form <size><tuple> (<fq_tuple> ::= <size><tuple>).
# Attribute <size> takes into account only size of tuple's <field> payload,
# but does not include 4-byte of <cardinality> field.
#Therefore the actual size of the <tuple> is greater to 4 bytes.
tuple_size = struct.unpack_from("<L", buff, offset)[0] + 4
tuple_data = struct.unpack_from("<%ds" % (tuple_size), buff, offset+4)[0]
tuple_value = self._unpack_tuple(tuple_data)
if self.field_types:
self.append(self._cast_tuple(tuple_value))
else:
self.append(tuple_value)
offset = offset + tuple_size + 4
|
[
"def",
"_unpack_body",
"(",
"self",
",",
"buff",
")",
":",
"# Unpack <return_code> and <count> (how many records affected or selected)",
"self",
".",
"_return_code",
"=",
"struct_L",
".",
"unpack_from",
"(",
"buff",
",",
"offset",
"=",
"0",
")",
"[",
"0",
"]",
"# Separate return_code and completion_code",
"self",
".",
"_completion_status",
"=",
"self",
".",
"_return_code",
"&",
"0x00ff",
"self",
".",
"_return_code",
">>=",
"8",
"# In case of an error unpack the body as an error message",
"if",
"self",
".",
"_return_code",
"!=",
"0",
":",
"self",
".",
"_return_message",
"=",
"unicode",
"(",
"buff",
"[",
"4",
":",
"-",
"1",
"]",
",",
"self",
".",
"charset",
",",
"self",
".",
"errors",
")",
"if",
"self",
".",
"_completion_status",
"==",
"2",
":",
"raise",
"TarantoolError",
"(",
"self",
".",
"_return_code",
",",
"self",
".",
"_return_message",
")",
"# Unpack <count> (how many records affected or selected)",
"self",
".",
"_rowcount",
"=",
"struct_L",
".",
"unpack_from",
"(",
"buff",
",",
"offset",
"=",
"4",
")",
"[",
"0",
"]",
"# If the response doesn't contain any tuple - there is nothing to unpack",
"if",
"self",
".",
"_body_length",
"==",
"8",
":",
"return",
"# Parse response tuples (<fq_tuple>)",
"if",
"self",
".",
"_rowcount",
">",
"0",
":",
"offset",
"=",
"8",
"# The first 4 bytes in the response body is the <count> we have already read",
"while",
"offset",
"<",
"self",
".",
"_body_length",
":",
"# In resonse tuples have the form <size><tuple> (<fq_tuple> ::= <size><tuple>).",
"# Attribute <size> takes into account only size of tuple's <field> payload,",
"# but does not include 4-byte of <cardinality> field.",
"#Therefore the actual size of the <tuple> is greater to 4 bytes.",
"tuple_size",
"=",
"struct",
".",
"unpack_from",
"(",
"\"<L\"",
",",
"buff",
",",
"offset",
")",
"[",
"0",
"]",
"+",
"4",
"tuple_data",
"=",
"struct",
".",
"unpack_from",
"(",
"\"<%ds\"",
"%",
"(",
"tuple_size",
")",
",",
"buff",
",",
"offset",
"+",
"4",
")",
"[",
"0",
"]",
"tuple_value",
"=",
"self",
".",
"_unpack_tuple",
"(",
"tuple_data",
")",
"if",
"self",
".",
"field_types",
":",
"self",
".",
"append",
"(",
"self",
".",
"_cast_tuple",
"(",
"tuple_value",
")",
")",
"else",
":",
"self",
".",
"append",
"(",
"tuple_value",
")",
"offset",
"=",
"offset",
"+",
"tuple_size",
"+",
"4"
] |
Parse the response body.
After body unpacking its data available as python list of tuples
For each request type the response body has the same format:
<insert_response_body> ::= <count> | <count><fq_tuple>
<update_response_body> ::= <count> | <count><fq_tuple>
<delete_response_body> ::= <count> | <count><fq_tuple>
<select_response_body> ::= <count><fq_tuple>*
<call_response_body> ::= <count><fq_tuple>
:param buff: buffer containing request body
:type byff: ctypes buffer
|
[
"Parse",
"the",
"response",
"body",
".",
"After",
"body",
"unpacking",
"its",
"data",
"available",
"as",
"python",
"list",
"of",
"tuples"
] |
e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350
|
https://github.com/zlobspb/txtarantool/blob/e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350/txtarantool.py#L587-L639
|
240,888
|
zlobspb/txtarantool
|
txtarantool.py
|
Response._cast_field
|
def _cast_field(self, cast_to, value):
"""
Convert field type from raw bytes to native python type
:param cast_to: native python type to cast to
:type cast_to: a type object (one of bytes, int, unicode (str for py3k))
:param value: raw value from the database
:type value: bytes
:return: converted value
:rtype: value of native python type (one of bytes, int, unicode (str for py3k))
"""
if cast_to in (int, long, str):
return cast_to(value)
elif cast_to == unicode:
try:
value = value.decode(self.charset, self.errors)
except UnicodeEncodeError, e:
raise InvalidData("Error encoding unicode value '%s': %s" % (repr(value), e))
return value
elif cast_to in (any, bytes):
return value
else:
raise TypeError("Invalid field type %s" % (cast_to))
|
python
|
def _cast_field(self, cast_to, value):
"""
Convert field type from raw bytes to native python type
:param cast_to: native python type to cast to
:type cast_to: a type object (one of bytes, int, unicode (str for py3k))
:param value: raw value from the database
:type value: bytes
:return: converted value
:rtype: value of native python type (one of bytes, int, unicode (str for py3k))
"""
if cast_to in (int, long, str):
return cast_to(value)
elif cast_to == unicode:
try:
value = value.decode(self.charset, self.errors)
except UnicodeEncodeError, e:
raise InvalidData("Error encoding unicode value '%s': %s" % (repr(value), e))
return value
elif cast_to in (any, bytes):
return value
else:
raise TypeError("Invalid field type %s" % (cast_to))
|
[
"def",
"_cast_field",
"(",
"self",
",",
"cast_to",
",",
"value",
")",
":",
"if",
"cast_to",
"in",
"(",
"int",
",",
"long",
",",
"str",
")",
":",
"return",
"cast_to",
"(",
"value",
")",
"elif",
"cast_to",
"==",
"unicode",
":",
"try",
":",
"value",
"=",
"value",
".",
"decode",
"(",
"self",
".",
"charset",
",",
"self",
".",
"errors",
")",
"except",
"UnicodeEncodeError",
",",
"e",
":",
"raise",
"InvalidData",
"(",
"\"Error encoding unicode value '%s': %s\"",
"%",
"(",
"repr",
"(",
"value",
")",
",",
"e",
")",
")",
"return",
"value",
"elif",
"cast_to",
"in",
"(",
"any",
",",
"bytes",
")",
":",
"return",
"value",
"else",
":",
"raise",
"TypeError",
"(",
"\"Invalid field type %s\"",
"%",
"(",
"cast_to",
")",
")"
] |
Convert field type from raw bytes to native python type
:param cast_to: native python type to cast to
:type cast_to: a type object (one of bytes, int, unicode (str for py3k))
:param value: raw value from the database
:type value: bytes
:return: converted value
:rtype: value of native python type (one of bytes, int, unicode (str for py3k))
|
[
"Convert",
"field",
"type",
"from",
"raw",
"bytes",
"to",
"native",
"python",
"type"
] |
e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350
|
https://github.com/zlobspb/txtarantool/blob/e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350/txtarantool.py#L686-L710
|
240,889
|
zlobspb/txtarantool
|
txtarantool.py
|
Response._cast_tuple
|
def _cast_tuple(self, values):
"""
Convert values of the tuple from raw bytes to native python types
:param values: tuple of the raw database values
:type value: tuple of bytes
:return: converted tuple value
:rtype: value of native python types (bytes, int, unicode (or str for py3k))
"""
result = []
for i, value in enumerate(values):
if i < len(self.field_types):
result.append(self._cast_field(self.field_types[i], value))
else:
result.append(self._cast_field(self.field_types[-1], value))
return tuple(result)
|
python
|
def _cast_tuple(self, values):
"""
Convert values of the tuple from raw bytes to native python types
:param values: tuple of the raw database values
:type value: tuple of bytes
:return: converted tuple value
:rtype: value of native python types (bytes, int, unicode (or str for py3k))
"""
result = []
for i, value in enumerate(values):
if i < len(self.field_types):
result.append(self._cast_field(self.field_types[i], value))
else:
result.append(self._cast_field(self.field_types[-1], value))
return tuple(result)
|
[
"def",
"_cast_tuple",
"(",
"self",
",",
"values",
")",
":",
"result",
"=",
"[",
"]",
"for",
"i",
",",
"value",
"in",
"enumerate",
"(",
"values",
")",
":",
"if",
"i",
"<",
"len",
"(",
"self",
".",
"field_types",
")",
":",
"result",
".",
"append",
"(",
"self",
".",
"_cast_field",
"(",
"self",
".",
"field_types",
"[",
"i",
"]",
",",
"value",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"self",
".",
"_cast_field",
"(",
"self",
".",
"field_types",
"[",
"-",
"1",
"]",
",",
"value",
")",
")",
"return",
"tuple",
"(",
"result",
")"
] |
Convert values of the tuple from raw bytes to native python types
:param values: tuple of the raw database values
:type value: tuple of bytes
:return: converted tuple value
:rtype: value of native python types (bytes, int, unicode (or str for py3k))
|
[
"Convert",
"values",
"of",
"the",
"tuple",
"from",
"raw",
"bytes",
"to",
"native",
"python",
"types"
] |
e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350
|
https://github.com/zlobspb/txtarantool/blob/e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350/txtarantool.py#L712-L729
|
240,890
|
zlobspb/txtarantool
|
txtarantool.py
|
TarantoolProtocol.ping
|
def ping(self):
"""
send ping packet to tarantool server and receive response with empty body
"""
d = self.replyQueue.get_ping()
packet = RequestPing(self.charset, self.errors)
self.transport.write(bytes(packet))
return d.addCallback(self.handle_reply, self.charset, self.errors, None)
|
python
|
def ping(self):
"""
send ping packet to tarantool server and receive response with empty body
"""
d = self.replyQueue.get_ping()
packet = RequestPing(self.charset, self.errors)
self.transport.write(bytes(packet))
return d.addCallback(self.handle_reply, self.charset, self.errors, None)
|
[
"def",
"ping",
"(",
"self",
")",
":",
"d",
"=",
"self",
".",
"replyQueue",
".",
"get_ping",
"(",
")",
"packet",
"=",
"RequestPing",
"(",
"self",
".",
"charset",
",",
"self",
".",
"errors",
")",
"self",
".",
"transport",
".",
"write",
"(",
"bytes",
"(",
"packet",
")",
")",
"return",
"d",
".",
"addCallback",
"(",
"self",
".",
"handle_reply",
",",
"self",
".",
"charset",
",",
"self",
".",
"errors",
",",
"None",
")"
] |
send ping packet to tarantool server and receive response with empty body
|
[
"send",
"ping",
"packet",
"to",
"tarantool",
"server",
"and",
"receive",
"response",
"with",
"empty",
"body"
] |
e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350
|
https://github.com/zlobspb/txtarantool/blob/e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350/txtarantool.py#L865-L872
|
240,891
|
zlobspb/txtarantool
|
txtarantool.py
|
TarantoolProtocol.insert
|
def insert(self, space_no, *args):
"""
insert tuple, if primary key exists server will return error
"""
d = self.replyQueue.get()
packet = RequestInsert(self.charset, self.errors, d._ipro_request_id, space_no, Request.TNT_FLAG_ADD, *args)
self.transport.write(bytes(packet))
return d.addCallback(self.handle_reply, self.charset, self.errors, None)
|
python
|
def insert(self, space_no, *args):
"""
insert tuple, if primary key exists server will return error
"""
d = self.replyQueue.get()
packet = RequestInsert(self.charset, self.errors, d._ipro_request_id, space_no, Request.TNT_FLAG_ADD, *args)
self.transport.write(bytes(packet))
return d.addCallback(self.handle_reply, self.charset, self.errors, None)
|
[
"def",
"insert",
"(",
"self",
",",
"space_no",
",",
"*",
"args",
")",
":",
"d",
"=",
"self",
".",
"replyQueue",
".",
"get",
"(",
")",
"packet",
"=",
"RequestInsert",
"(",
"self",
".",
"charset",
",",
"self",
".",
"errors",
",",
"d",
".",
"_ipro_request_id",
",",
"space_no",
",",
"Request",
".",
"TNT_FLAG_ADD",
",",
"*",
"args",
")",
"self",
".",
"transport",
".",
"write",
"(",
"bytes",
"(",
"packet",
")",
")",
"return",
"d",
".",
"addCallback",
"(",
"self",
".",
"handle_reply",
",",
"self",
".",
"charset",
",",
"self",
".",
"errors",
",",
"None",
")"
] |
insert tuple, if primary key exists server will return error
|
[
"insert",
"tuple",
"if",
"primary",
"key",
"exists",
"server",
"will",
"return",
"error"
] |
e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350
|
https://github.com/zlobspb/txtarantool/blob/e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350/txtarantool.py#L874-L881
|
240,892
|
zlobspb/txtarantool
|
txtarantool.py
|
TarantoolProtocol.delete
|
def delete(self, space_no, *args):
"""
delete tuple by primary key
"""
d = self.replyQueue.get()
packet = RequestDelete(self.charset, self.errors, d._ipro_request_id, space_no, 0, *args)
self.transport.write(bytes(packet))
return d.addCallback(self.handle_reply, self.charset, self.errors, None)
|
python
|
def delete(self, space_no, *args):
"""
delete tuple by primary key
"""
d = self.replyQueue.get()
packet = RequestDelete(self.charset, self.errors, d._ipro_request_id, space_no, 0, *args)
self.transport.write(bytes(packet))
return d.addCallback(self.handle_reply, self.charset, self.errors, None)
|
[
"def",
"delete",
"(",
"self",
",",
"space_no",
",",
"*",
"args",
")",
":",
"d",
"=",
"self",
".",
"replyQueue",
".",
"get",
"(",
")",
"packet",
"=",
"RequestDelete",
"(",
"self",
".",
"charset",
",",
"self",
".",
"errors",
",",
"d",
".",
"_ipro_request_id",
",",
"space_no",
",",
"0",
",",
"*",
"args",
")",
"self",
".",
"transport",
".",
"write",
"(",
"bytes",
"(",
"packet",
")",
")",
"return",
"d",
".",
"addCallback",
"(",
"self",
".",
"handle_reply",
",",
"self",
".",
"charset",
",",
"self",
".",
"errors",
",",
"None",
")"
] |
delete tuple by primary key
|
[
"delete",
"tuple",
"by",
"primary",
"key"
] |
e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350
|
https://github.com/zlobspb/txtarantool/blob/e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350/txtarantool.py#L930-L937
|
240,893
|
zlobspb/txtarantool
|
txtarantool.py
|
TarantoolProtocol.call
|
def call(self, proc_name, field_types, *args):
"""
call server procedure
"""
d = self.replyQueue.get()
packet = RequestCall(self.charset, self.errors, d._ipro_request_id, proc_name, 0, *args)
self.transport.write(bytes(packet))
return d.addCallback(self.handle_reply, self.charset, self.errors, field_types)
|
python
|
def call(self, proc_name, field_types, *args):
"""
call server procedure
"""
d = self.replyQueue.get()
packet = RequestCall(self.charset, self.errors, d._ipro_request_id, proc_name, 0, *args)
self.transport.write(bytes(packet))
return d.addCallback(self.handle_reply, self.charset, self.errors, field_types)
|
[
"def",
"call",
"(",
"self",
",",
"proc_name",
",",
"field_types",
",",
"*",
"args",
")",
":",
"d",
"=",
"self",
".",
"replyQueue",
".",
"get",
"(",
")",
"packet",
"=",
"RequestCall",
"(",
"self",
".",
"charset",
",",
"self",
".",
"errors",
",",
"d",
".",
"_ipro_request_id",
",",
"proc_name",
",",
"0",
",",
"*",
"args",
")",
"self",
".",
"transport",
".",
"write",
"(",
"bytes",
"(",
"packet",
")",
")",
"return",
"d",
".",
"addCallback",
"(",
"self",
".",
"handle_reply",
",",
"self",
".",
"charset",
",",
"self",
".",
"errors",
",",
"field_types",
")"
] |
call server procedure
|
[
"call",
"server",
"procedure"
] |
e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350
|
https://github.com/zlobspb/txtarantool/blob/e8d451d53e1c99ccf1f23ce36a9c589fa2ed0350/txtarantool.py#L985-L992
|
240,894
|
lukaszb/porunga
|
examples/fibs/fibs.py
|
fibs
|
def fibs(n, m):
"""
Yields Fibonacci numbers starting from ``n`` and ending at ``m``.
"""
a = b = 1
for x in range(3, m + 1):
a, b = b, a + b
if x >= n:
yield b
|
python
|
def fibs(n, m):
"""
Yields Fibonacci numbers starting from ``n`` and ending at ``m``.
"""
a = b = 1
for x in range(3, m + 1):
a, b = b, a + b
if x >= n:
yield b
|
[
"def",
"fibs",
"(",
"n",
",",
"m",
")",
":",
"a",
"=",
"b",
"=",
"1",
"for",
"x",
"in",
"range",
"(",
"3",
",",
"m",
"+",
"1",
")",
":",
"a",
",",
"b",
"=",
"b",
",",
"a",
"+",
"b",
"if",
"x",
">=",
"n",
":",
"yield",
"b"
] |
Yields Fibonacci numbers starting from ``n`` and ending at ``m``.
|
[
"Yields",
"Fibonacci",
"numbers",
"starting",
"from",
"n",
"and",
"ending",
"at",
"m",
"."
] |
13177ff9bc654ac25cf09def6b526eb38e40e483
|
https://github.com/lukaszb/porunga/blob/13177ff9bc654ac25cf09def6b526eb38e40e483/examples/fibs/fibs.py#L6-L14
|
240,895
|
emencia/emencia-django-countries
|
emencia/django/countries/migrations/0002_auto_load_initial_data.py
|
unload_fixture
|
def unload_fixture(apps, schema_editor):
"""
Brutally deleting all 'Country' model entries for reversing operation
"""
appmodel = apps.get_model(APP_LABEL, COUNTRY_MODELNAME)
appmodel.objects.all().delete()
|
python
|
def unload_fixture(apps, schema_editor):
"""
Brutally deleting all 'Country' model entries for reversing operation
"""
appmodel = apps.get_model(APP_LABEL, COUNTRY_MODELNAME)
appmodel.objects.all().delete()
|
[
"def",
"unload_fixture",
"(",
"apps",
",",
"schema_editor",
")",
":",
"appmodel",
"=",
"apps",
".",
"get_model",
"(",
"APP_LABEL",
",",
"COUNTRY_MODELNAME",
")",
"appmodel",
".",
"objects",
".",
"all",
"(",
")",
".",
"delete",
"(",
")"
] |
Brutally deleting all 'Country' model entries for reversing operation
|
[
"Brutally",
"deleting",
"all",
"Country",
"model",
"entries",
"for",
"reversing",
"operation"
] |
5ae8719f4b43caeca2c69c9e37e6d6bc5d7b0290
|
https://github.com/emencia/emencia-django-countries/blob/5ae8719f4b43caeca2c69c9e37e6d6bc5d7b0290/emencia/django/countries/migrations/0002_auto_load_initial_data.py#L20-L25
|
240,896
|
picleslivre/plumber
|
plumber.py
|
precondition
|
def precondition(precond):
"""
Runs the callable responsible for making some assertions
about the data structure expected for the transformation.
If the precondition is not achieved, a UnmetPrecondition
exception must be raised, and then the transformation pipe
is bypassed.
"""
def decorator(f):
"""`f` can be a reference to a method or function. In
both cases the `data` is expected to be passed as the
first positional argument (obviously respecting the
`self` argument when it is a method).
"""
def decorated(*args):
if len(args) > 2:
raise TypeError('%s takes only 1 argument (or 2 for instance methods)' % f.__name__)
try:
instance, data = args
if not isinstance(instance, Pipe):
raise TypeError('%s is not a valid pipe instance' % instance)
except ValueError: # tuple unpacking error
data = args[0]
try:
precond(data)
except UnmetPrecondition:
# bypass the pipe
return data
else:
return f(*args)
return decorated
return decorator
|
python
|
def precondition(precond):
"""
Runs the callable responsible for making some assertions
about the data structure expected for the transformation.
If the precondition is not achieved, a UnmetPrecondition
exception must be raised, and then the transformation pipe
is bypassed.
"""
def decorator(f):
"""`f` can be a reference to a method or function. In
both cases the `data` is expected to be passed as the
first positional argument (obviously respecting the
`self` argument when it is a method).
"""
def decorated(*args):
if len(args) > 2:
raise TypeError('%s takes only 1 argument (or 2 for instance methods)' % f.__name__)
try:
instance, data = args
if not isinstance(instance, Pipe):
raise TypeError('%s is not a valid pipe instance' % instance)
except ValueError: # tuple unpacking error
data = args[0]
try:
precond(data)
except UnmetPrecondition:
# bypass the pipe
return data
else:
return f(*args)
return decorated
return decorator
|
[
"def",
"precondition",
"(",
"precond",
")",
":",
"def",
"decorator",
"(",
"f",
")",
":",
"\"\"\"`f` can be a reference to a method or function. In\n both cases the `data` is expected to be passed as the\n first positional argument (obviously respecting the\n `self` argument when it is a method).\n \"\"\"",
"def",
"decorated",
"(",
"*",
"args",
")",
":",
"if",
"len",
"(",
"args",
")",
">",
"2",
":",
"raise",
"TypeError",
"(",
"'%s takes only 1 argument (or 2 for instance methods)'",
"%",
"f",
".",
"__name__",
")",
"try",
":",
"instance",
",",
"data",
"=",
"args",
"if",
"not",
"isinstance",
"(",
"instance",
",",
"Pipe",
")",
":",
"raise",
"TypeError",
"(",
"'%s is not a valid pipe instance'",
"%",
"instance",
")",
"except",
"ValueError",
":",
"# tuple unpacking error",
"data",
"=",
"args",
"[",
"0",
"]",
"try",
":",
"precond",
"(",
"data",
")",
"except",
"UnmetPrecondition",
":",
"# bypass the pipe",
"return",
"data",
"else",
":",
"return",
"f",
"(",
"*",
"args",
")",
"return",
"decorated",
"return",
"decorator"
] |
Runs the callable responsible for making some assertions
about the data structure expected for the transformation.
If the precondition is not achieved, a UnmetPrecondition
exception must be raised, and then the transformation pipe
is bypassed.
|
[
"Runs",
"the",
"callable",
"responsible",
"for",
"making",
"some",
"assertions",
"about",
"the",
"data",
"structure",
"expected",
"for",
"the",
"transformation",
"."
] |
f5019ef20679b3e9a31c6c84e4f4529d72dc8db9
|
https://github.com/picleslivre/plumber/blob/f5019ef20679b3e9a31c6c84e4f4529d72dc8db9/plumber.py#L76-L111
|
240,897
|
picleslivre/plumber
|
plumber.py
|
Pipeline.run
|
def run(self, data, rewrap=False, prefetch=0):
"""
Wires the pipeline and returns a lazy object of
the transformed data.
:param data: must be an iterable, where a full document
must be returned for each loop
:param rewrap: (optional) is a bool that indicates the need to rewrap
data in cases where iterating over it produces undesired results,
for instance ``dict`` instances.
:param prefetch: (optional) is an int defining the number of items to
be prefetched once the pipeline starts yielding data. The
default prefetching mechanism is based on threads, so be
careful with CPU-bound processing pipelines.
"""
if rewrap:
data = [data]
for _filter in self._filters:
_filter.feed(data)
data = _filter
else:
iterable = self._prefetch_callable(data, prefetch) if prefetch else data
for out_data in iterable:
yield out_data
|
python
|
def run(self, data, rewrap=False, prefetch=0):
"""
Wires the pipeline and returns a lazy object of
the transformed data.
:param data: must be an iterable, where a full document
must be returned for each loop
:param rewrap: (optional) is a bool that indicates the need to rewrap
data in cases where iterating over it produces undesired results,
for instance ``dict`` instances.
:param prefetch: (optional) is an int defining the number of items to
be prefetched once the pipeline starts yielding data. The
default prefetching mechanism is based on threads, so be
careful with CPU-bound processing pipelines.
"""
if rewrap:
data = [data]
for _filter in self._filters:
_filter.feed(data)
data = _filter
else:
iterable = self._prefetch_callable(data, prefetch) if prefetch else data
for out_data in iterable:
yield out_data
|
[
"def",
"run",
"(",
"self",
",",
"data",
",",
"rewrap",
"=",
"False",
",",
"prefetch",
"=",
"0",
")",
":",
"if",
"rewrap",
":",
"data",
"=",
"[",
"data",
"]",
"for",
"_filter",
"in",
"self",
".",
"_filters",
":",
"_filter",
".",
"feed",
"(",
"data",
")",
"data",
"=",
"_filter",
"else",
":",
"iterable",
"=",
"self",
".",
"_prefetch_callable",
"(",
"data",
",",
"prefetch",
")",
"if",
"prefetch",
"else",
"data",
"for",
"out_data",
"in",
"iterable",
":",
"yield",
"out_data"
] |
Wires the pipeline and returns a lazy object of
the transformed data.
:param data: must be an iterable, where a full document
must be returned for each loop
:param rewrap: (optional) is a bool that indicates the need to rewrap
data in cases where iterating over it produces undesired results,
for instance ``dict`` instances.
:param prefetch: (optional) is an int defining the number of items to
be prefetched once the pipeline starts yielding data. The
default prefetching mechanism is based on threads, so be
careful with CPU-bound processing pipelines.
|
[
"Wires",
"the",
"pipeline",
"and",
"returns",
"a",
"lazy",
"object",
"of",
"the",
"transformed",
"data",
"."
] |
f5019ef20679b3e9a31c6c84e4f4529d72dc8db9
|
https://github.com/picleslivre/plumber/blob/f5019ef20679b3e9a31c6c84e4f4529d72dc8db9/plumber.py#L203-L229
|
240,898
|
dillonhicks/rekt
|
rekt/utils.py
|
camel_case_to_snake_case
|
def camel_case_to_snake_case(name):
"""
HelloWorld -> hello_world
"""
s1 = _FIRST_CAP_RE.sub(r'\1_\2', name)
return _ALL_CAP_RE.sub(r'\1_\2', s1).lower()
|
python
|
def camel_case_to_snake_case(name):
"""
HelloWorld -> hello_world
"""
s1 = _FIRST_CAP_RE.sub(r'\1_\2', name)
return _ALL_CAP_RE.sub(r'\1_\2', s1).lower()
|
[
"def",
"camel_case_to_snake_case",
"(",
"name",
")",
":",
"s1",
"=",
"_FIRST_CAP_RE",
".",
"sub",
"(",
"r'\\1_\\2'",
",",
"name",
")",
"return",
"_ALL_CAP_RE",
".",
"sub",
"(",
"r'\\1_\\2'",
",",
"s1",
")",
".",
"lower",
"(",
")"
] |
HelloWorld -> hello_world
|
[
"HelloWorld",
"-",
">",
"hello_world"
] |
3848b272726c78214cb96b906f9b9f289497f27e
|
https://github.com/dillonhicks/rekt/blob/3848b272726c78214cb96b906f9b9f289497f27e/rekt/utils.py#L33-L38
|
240,899
|
dillonhicks/rekt
|
rekt/utils.py
|
load_builtin_config
|
def load_builtin_config(name, module_name=__name__, specs_path=specs.__path__):
"""
Uses package info magic to find the resource file located in the specs
submodule.
"""
config_path = Path(next(iter(specs_path)))
config_path = config_path / PurePath(resource_filename(module_name, name + '.yaml'))
return load_config(config_path)
|
python
|
def load_builtin_config(name, module_name=__name__, specs_path=specs.__path__):
"""
Uses package info magic to find the resource file located in the specs
submodule.
"""
config_path = Path(next(iter(specs_path)))
config_path = config_path / PurePath(resource_filename(module_name, name + '.yaml'))
return load_config(config_path)
|
[
"def",
"load_builtin_config",
"(",
"name",
",",
"module_name",
"=",
"__name__",
",",
"specs_path",
"=",
"specs",
".",
"__path__",
")",
":",
"config_path",
"=",
"Path",
"(",
"next",
"(",
"iter",
"(",
"specs_path",
")",
")",
")",
"config_path",
"=",
"config_path",
"/",
"PurePath",
"(",
"resource_filename",
"(",
"module_name",
",",
"name",
"+",
"'.yaml'",
")",
")",
"return",
"load_config",
"(",
"config_path",
")"
] |
Uses package info magic to find the resource file located in the specs
submodule.
|
[
"Uses",
"package",
"info",
"magic",
"to",
"find",
"the",
"resource",
"file",
"located",
"in",
"the",
"specs",
"submodule",
"."
] |
3848b272726c78214cb96b906f9b9f289497f27e
|
https://github.com/dillonhicks/rekt/blob/3848b272726c78214cb96b906f9b9f289497f27e/rekt/utils.py#L47-L54
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.