partition
stringclasses 3
values | func_name
stringlengths 1
134
| docstring
stringlengths 1
46.9k
| path
stringlengths 4
223
| original_string
stringlengths 75
104k
| code
stringlengths 75
104k
| docstring_tokens
listlengths 1
1.97k
| repo
stringlengths 7
55
| language
stringclasses 1
value | url
stringlengths 87
315
| code_tokens
listlengths 19
28.4k
| sha
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|---|---|---|
test
|
Qurl.inc
|
Increment value
|
qurl_templatetag/qurl.py
|
def inc(self, name, value=1):
""" Increment value """
clone = self._clone()
clone._qsl = [(q, v) if q != name else (q, int(v) + value)
for (q, v) in self._qsl]
if name not in dict(clone._qsl).keys():
clone._qsl.append((name, value))
return clone
|
def inc(self, name, value=1):
""" Increment value """
clone = self._clone()
clone._qsl = [(q, v) if q != name else (q, int(v) + value)
for (q, v) in self._qsl]
if name not in dict(clone._qsl).keys():
clone._qsl.append((name, value))
return clone
|
[
"Increment",
"value"
] |
sophilabs/django-qurl-templatetag
|
python
|
https://github.com/sophilabs/django-qurl-templatetag/blob/8a785b112437d05cb54846b79012967fee1cb534/qurl_templatetag/qurl.py#L53-L60
|
[
"def",
"inc",
"(",
"self",
",",
"name",
",",
"value",
"=",
"1",
")",
":",
"clone",
"=",
"self",
".",
"_clone",
"(",
")",
"clone",
".",
"_qsl",
"=",
"[",
"(",
"q",
",",
"v",
")",
"if",
"q",
"!=",
"name",
"else",
"(",
"q",
",",
"int",
"(",
"v",
")",
"+",
"value",
")",
"for",
"(",
"q",
",",
"v",
")",
"in",
"self",
".",
"_qsl",
"]",
"if",
"name",
"not",
"in",
"dict",
"(",
"clone",
".",
"_qsl",
")",
".",
"keys",
"(",
")",
":",
"clone",
".",
"_qsl",
".",
"append",
"(",
"(",
"name",
",",
"value",
")",
")",
"return",
"clone"
] |
8a785b112437d05cb54846b79012967fee1cb534
|
test
|
parse_options
|
Commandline options arguments parsing.
:return: parsed commandline arguments.
:rtype: optparse.Values.
|
check_supervisord.py
|
def parse_options():
"""
Commandline options arguments parsing.
:return: parsed commandline arguments.
:rtype: optparse.Values.
"""
version = "%%prog {version}".format(version=__version__)
parser = OptionParser(version=version)
parser.add_option(
"-s", "--server", action="store", dest="server",
type="string", default="", metavar="SERVER",
help="server name, IP address or unix socket path"
)
parser.add_option(
"-p", "--port", action="store", type="int", dest="port",
default=9001, metavar="PORT", help="port number"
)
parser.add_option(
"-P", "--programs", action="store", dest="programs", type="string", default="",
metavar="PROGRAMS", help="comma separated programs list, or empty for all programs in supervisord response"
)
parser.add_option(
"-u", "--username", action="store", dest="username", type="string", default="",
metavar="USERNAME", help="supervisord user"
)
parser.add_option(
"-S", "--password", action="store", dest="password", type="string", default="",
metavar="PASSWORD", help="supervisord user password"
)
parser.add_option(
"-q", "--quiet", metavar="QUIET", action="store_true", default=False, dest="quiet", help="be quiet"
)
parser.add_option(
"--stopped-state", action="store", dest="stopped_state", type="choice", choices=EXIT_CODES.keys(), default=EXIT_CODE_OK,
metavar="STOPPED_STATE", help="stopped state"
)
parser.add_option(
"--network-errors-exit-code", action="store", dest="network_errors_exit_code", type="choice", choices=EXIT_CODES.keys(), default=EXIT_CODE_UNKNOWN,
metavar="NETWORK_ERRORS_EXIT_CODE", help="network errors exit code"
)
options = parser.parse_args(sys.argv)[0]
STATE2TEMPLATE[STATE_STOPPED] = options.stopped_state # update stopped state value from command line argument
# check mandatory command line options supplied
if not options.server:
parser.error("Required server address option missing")
if options.username and not options.password:
parser.error("Required supervisord user password")
return options
|
def parse_options():
"""
Commandline options arguments parsing.
:return: parsed commandline arguments.
:rtype: optparse.Values.
"""
version = "%%prog {version}".format(version=__version__)
parser = OptionParser(version=version)
parser.add_option(
"-s", "--server", action="store", dest="server",
type="string", default="", metavar="SERVER",
help="server name, IP address or unix socket path"
)
parser.add_option(
"-p", "--port", action="store", type="int", dest="port",
default=9001, metavar="PORT", help="port number"
)
parser.add_option(
"-P", "--programs", action="store", dest="programs", type="string", default="",
metavar="PROGRAMS", help="comma separated programs list, or empty for all programs in supervisord response"
)
parser.add_option(
"-u", "--username", action="store", dest="username", type="string", default="",
metavar="USERNAME", help="supervisord user"
)
parser.add_option(
"-S", "--password", action="store", dest="password", type="string", default="",
metavar="PASSWORD", help="supervisord user password"
)
parser.add_option(
"-q", "--quiet", metavar="QUIET", action="store_true", default=False, dest="quiet", help="be quiet"
)
parser.add_option(
"--stopped-state", action="store", dest="stopped_state", type="choice", choices=EXIT_CODES.keys(), default=EXIT_CODE_OK,
metavar="STOPPED_STATE", help="stopped state"
)
parser.add_option(
"--network-errors-exit-code", action="store", dest="network_errors_exit_code", type="choice", choices=EXIT_CODES.keys(), default=EXIT_CODE_UNKNOWN,
metavar="NETWORK_ERRORS_EXIT_CODE", help="network errors exit code"
)
options = parser.parse_args(sys.argv)[0]
STATE2TEMPLATE[STATE_STOPPED] = options.stopped_state # update stopped state value from command line argument
# check mandatory command line options supplied
if not options.server:
parser.error("Required server address option missing")
if options.username and not options.password:
parser.error("Required supervisord user password")
return options
|
[
"Commandline",
"options",
"arguments",
"parsing",
"."
] |
vint21h/nagios-check-supervisord
|
python
|
https://github.com/vint21h/nagios-check-supervisord/blob/a40a542499197a4b5658bd6cc3b34326fe8d0ada/check_supervisord.py#L91-L143
|
[
"def",
"parse_options",
"(",
")",
":",
"version",
"=",
"\"%%prog {version}\"",
".",
"format",
"(",
"version",
"=",
"__version__",
")",
"parser",
"=",
"OptionParser",
"(",
"version",
"=",
"version",
")",
"parser",
".",
"add_option",
"(",
"\"-s\"",
",",
"\"--server\"",
",",
"action",
"=",
"\"store\"",
",",
"dest",
"=",
"\"server\"",
",",
"type",
"=",
"\"string\"",
",",
"default",
"=",
"\"\"",
",",
"metavar",
"=",
"\"SERVER\"",
",",
"help",
"=",
"\"server name, IP address or unix socket path\"",
")",
"parser",
".",
"add_option",
"(",
"\"-p\"",
",",
"\"--port\"",
",",
"action",
"=",
"\"store\"",
",",
"type",
"=",
"\"int\"",
",",
"dest",
"=",
"\"port\"",
",",
"default",
"=",
"9001",
",",
"metavar",
"=",
"\"PORT\"",
",",
"help",
"=",
"\"port number\"",
")",
"parser",
".",
"add_option",
"(",
"\"-P\"",
",",
"\"--programs\"",
",",
"action",
"=",
"\"store\"",
",",
"dest",
"=",
"\"programs\"",
",",
"type",
"=",
"\"string\"",
",",
"default",
"=",
"\"\"",
",",
"metavar",
"=",
"\"PROGRAMS\"",
",",
"help",
"=",
"\"comma separated programs list, or empty for all programs in supervisord response\"",
")",
"parser",
".",
"add_option",
"(",
"\"-u\"",
",",
"\"--username\"",
",",
"action",
"=",
"\"store\"",
",",
"dest",
"=",
"\"username\"",
",",
"type",
"=",
"\"string\"",
",",
"default",
"=",
"\"\"",
",",
"metavar",
"=",
"\"USERNAME\"",
",",
"help",
"=",
"\"supervisord user\"",
")",
"parser",
".",
"add_option",
"(",
"\"-S\"",
",",
"\"--password\"",
",",
"action",
"=",
"\"store\"",
",",
"dest",
"=",
"\"password\"",
",",
"type",
"=",
"\"string\"",
",",
"default",
"=",
"\"\"",
",",
"metavar",
"=",
"\"PASSWORD\"",
",",
"help",
"=",
"\"supervisord user password\"",
")",
"parser",
".",
"add_option",
"(",
"\"-q\"",
",",
"\"--quiet\"",
",",
"metavar",
"=",
"\"QUIET\"",
",",
"action",
"=",
"\"store_true\"",
",",
"default",
"=",
"False",
",",
"dest",
"=",
"\"quiet\"",
",",
"help",
"=",
"\"be quiet\"",
")",
"parser",
".",
"add_option",
"(",
"\"--stopped-state\"",
",",
"action",
"=",
"\"store\"",
",",
"dest",
"=",
"\"stopped_state\"",
",",
"type",
"=",
"\"choice\"",
",",
"choices",
"=",
"EXIT_CODES",
".",
"keys",
"(",
")",
",",
"default",
"=",
"EXIT_CODE_OK",
",",
"metavar",
"=",
"\"STOPPED_STATE\"",
",",
"help",
"=",
"\"stopped state\"",
")",
"parser",
".",
"add_option",
"(",
"\"--network-errors-exit-code\"",
",",
"action",
"=",
"\"store\"",
",",
"dest",
"=",
"\"network_errors_exit_code\"",
",",
"type",
"=",
"\"choice\"",
",",
"choices",
"=",
"EXIT_CODES",
".",
"keys",
"(",
")",
",",
"default",
"=",
"EXIT_CODE_UNKNOWN",
",",
"metavar",
"=",
"\"NETWORK_ERRORS_EXIT_CODE\"",
",",
"help",
"=",
"\"network errors exit code\"",
")",
"options",
"=",
"parser",
".",
"parse_args",
"(",
"sys",
".",
"argv",
")",
"[",
"0",
"]",
"STATE2TEMPLATE",
"[",
"STATE_STOPPED",
"]",
"=",
"options",
".",
"stopped_state",
"# update stopped state value from command line argument",
"# check mandatory command line options supplied",
"if",
"not",
"options",
".",
"server",
":",
"parser",
".",
"error",
"(",
"\"Required server address option missing\"",
")",
"if",
"options",
".",
"username",
"and",
"not",
"options",
".",
"password",
":",
"parser",
".",
"error",
"(",
"\"Required supervisord user password\"",
")",
"return",
"options"
] |
a40a542499197a4b5658bd6cc3b34326fe8d0ada
|
test
|
get_status
|
Get programs statuses.
:param options: parsed commandline arguments.
:type options: optparse.Values.
:return: supervisord XML-RPC call result.
:rtype: dict.
|
check_supervisord.py
|
def get_status(options):
"""
Get programs statuses.
:param options: parsed commandline arguments.
:type options: optparse.Values.
:return: supervisord XML-RPC call result.
:rtype: dict.
"""
payload = { # server connection URI formatted string payload
"username": options.username,
"password": options.password,
"server": options.server,
"port": options.port,
}
try:
if options.server.startswith("/") and stat.S_ISSOCK(os.stat(options.server).st_mode): # communicate with server via unix socket (simple check is server address is path and path is unix socket)
try:
import supervisor.xmlrpc
except ImportError as error:
sys.stderr.write("ERROR: Couldn't load module. {error}\n".format(error=error))
sys.stderr.write("ERROR: Unix socket support not available! Please install nagios-check-supervisord with unix socket support: 'nagios-check-supervisord[unix-socket-support]' or install 'supervisor' separately.\n")
sys.exit(-1)
if all([options.username, options.password, ]): # with auth
connection = xmlrpclib.ServerProxy("https://", transport=supervisor.xmlrpc.SupervisorTransport(options.username, options.password, serverurl=URI[URI_TPL_SOCKET].format(**payload)))
else:
connection = xmlrpclib.ServerProxy("https://", transport=supervisor.xmlrpc.SupervisorTransport(None, None, serverurl=URI[URI_TPL_SOCKET].format(**payload)))
else: # communicate with server via http
if all([options.username, options.password, ]): # with auth
connection = xmlrpclib.Server(URI[URI_TPL_HTTP_AUTH].format(**payload))
else:
connection = xmlrpclib.Server(URI[URI_TPL_HTTP].format(**payload))
return connection.supervisor.getAllProcessInfo()
except Exception as error:
if not options.quiet:
sys.stdout.write("ERROR: Server communication problem. {error}\n".format(error=error))
sys.exit(EXIT_CODES.get(options.network_errors_exit_code, EXIT_CODE_UNKNOWN))
|
def get_status(options):
"""
Get programs statuses.
:param options: parsed commandline arguments.
:type options: optparse.Values.
:return: supervisord XML-RPC call result.
:rtype: dict.
"""
payload = { # server connection URI formatted string payload
"username": options.username,
"password": options.password,
"server": options.server,
"port": options.port,
}
try:
if options.server.startswith("/") and stat.S_ISSOCK(os.stat(options.server).st_mode): # communicate with server via unix socket (simple check is server address is path and path is unix socket)
try:
import supervisor.xmlrpc
except ImportError as error:
sys.stderr.write("ERROR: Couldn't load module. {error}\n".format(error=error))
sys.stderr.write("ERROR: Unix socket support not available! Please install nagios-check-supervisord with unix socket support: 'nagios-check-supervisord[unix-socket-support]' or install 'supervisor' separately.\n")
sys.exit(-1)
if all([options.username, options.password, ]): # with auth
connection = xmlrpclib.ServerProxy("https://", transport=supervisor.xmlrpc.SupervisorTransport(options.username, options.password, serverurl=URI[URI_TPL_SOCKET].format(**payload)))
else:
connection = xmlrpclib.ServerProxy("https://", transport=supervisor.xmlrpc.SupervisorTransport(None, None, serverurl=URI[URI_TPL_SOCKET].format(**payload)))
else: # communicate with server via http
if all([options.username, options.password, ]): # with auth
connection = xmlrpclib.Server(URI[URI_TPL_HTTP_AUTH].format(**payload))
else:
connection = xmlrpclib.Server(URI[URI_TPL_HTTP].format(**payload))
return connection.supervisor.getAllProcessInfo()
except Exception as error:
if not options.quiet:
sys.stdout.write("ERROR: Server communication problem. {error}\n".format(error=error))
sys.exit(EXIT_CODES.get(options.network_errors_exit_code, EXIT_CODE_UNKNOWN))
|
[
"Get",
"programs",
"statuses",
"."
] |
vint21h/nagios-check-supervisord
|
python
|
https://github.com/vint21h/nagios-check-supervisord/blob/a40a542499197a4b5658bd6cc3b34326fe8d0ada/check_supervisord.py#L146-L188
|
[
"def",
"get_status",
"(",
"options",
")",
":",
"payload",
"=",
"{",
"# server connection URI formatted string payload",
"\"username\"",
":",
"options",
".",
"username",
",",
"\"password\"",
":",
"options",
".",
"password",
",",
"\"server\"",
":",
"options",
".",
"server",
",",
"\"port\"",
":",
"options",
".",
"port",
",",
"}",
"try",
":",
"if",
"options",
".",
"server",
".",
"startswith",
"(",
"\"/\"",
")",
"and",
"stat",
".",
"S_ISSOCK",
"(",
"os",
".",
"stat",
"(",
"options",
".",
"server",
")",
".",
"st_mode",
")",
":",
"# communicate with server via unix socket (simple check is server address is path and path is unix socket)",
"try",
":",
"import",
"supervisor",
".",
"xmlrpc",
"except",
"ImportError",
"as",
"error",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"ERROR: Couldn't load module. {error}\\n\"",
".",
"format",
"(",
"error",
"=",
"error",
")",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"ERROR: Unix socket support not available! Please install nagios-check-supervisord with unix socket support: 'nagios-check-supervisord[unix-socket-support]' or install 'supervisor' separately.\\n\"",
")",
"sys",
".",
"exit",
"(",
"-",
"1",
")",
"if",
"all",
"(",
"[",
"options",
".",
"username",
",",
"options",
".",
"password",
",",
"]",
")",
":",
"# with auth",
"connection",
"=",
"xmlrpclib",
".",
"ServerProxy",
"(",
"\"https://\"",
",",
"transport",
"=",
"supervisor",
".",
"xmlrpc",
".",
"SupervisorTransport",
"(",
"options",
".",
"username",
",",
"options",
".",
"password",
",",
"serverurl",
"=",
"URI",
"[",
"URI_TPL_SOCKET",
"]",
".",
"format",
"(",
"*",
"*",
"payload",
")",
")",
")",
"else",
":",
"connection",
"=",
"xmlrpclib",
".",
"ServerProxy",
"(",
"\"https://\"",
",",
"transport",
"=",
"supervisor",
".",
"xmlrpc",
".",
"SupervisorTransport",
"(",
"None",
",",
"None",
",",
"serverurl",
"=",
"URI",
"[",
"URI_TPL_SOCKET",
"]",
".",
"format",
"(",
"*",
"*",
"payload",
")",
")",
")",
"else",
":",
"# communicate with server via http",
"if",
"all",
"(",
"[",
"options",
".",
"username",
",",
"options",
".",
"password",
",",
"]",
")",
":",
"# with auth",
"connection",
"=",
"xmlrpclib",
".",
"Server",
"(",
"URI",
"[",
"URI_TPL_HTTP_AUTH",
"]",
".",
"format",
"(",
"*",
"*",
"payload",
")",
")",
"else",
":",
"connection",
"=",
"xmlrpclib",
".",
"Server",
"(",
"URI",
"[",
"URI_TPL_HTTP",
"]",
".",
"format",
"(",
"*",
"*",
"payload",
")",
")",
"return",
"connection",
".",
"supervisor",
".",
"getAllProcessInfo",
"(",
")",
"except",
"Exception",
"as",
"error",
":",
"if",
"not",
"options",
".",
"quiet",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"\"ERROR: Server communication problem. {error}\\n\"",
".",
"format",
"(",
"error",
"=",
"error",
")",
")",
"sys",
".",
"exit",
"(",
"EXIT_CODES",
".",
"get",
"(",
"options",
".",
"network_errors_exit_code",
",",
"EXIT_CODE_UNKNOWN",
")",
")"
] |
a40a542499197a4b5658bd6cc3b34326fe8d0ada
|
test
|
create_output
|
Create Nagios and human readable supervisord statuses.
:param data: supervisord XML-RPC call result.
:type data: dict.
:param options: parsed commandline arguments.
:type options: optparse.Values.
:return: Nagios and human readable supervisord statuses and exit code.
:rtype: (str, int).
|
check_supervisord.py
|
def create_output(data, options):
"""
Create Nagios and human readable supervisord statuses.
:param data: supervisord XML-RPC call result.
:type data: dict.
:param options: parsed commandline arguments.
:type options: optparse.Values.
:return: Nagios and human readable supervisord statuses and exit code.
:rtype: (str, int).
"""
output = {}
programs = map(strip, options.programs.strip().split(",")) if options.programs else map(lambda x: x["name"], data)
for program in programs:
try:
program_data = filter(lambda x: x["name"] == program, data)[0]
output.update({
program: {
"name": program,
"template": STATE2TEMPLATE[program_data["statename"]],
"status": program_data["spawnerr"] if program_data["spawnerr"] else program_data["statename"],
}
})
except IndexError:
output.update({
program: {
"name": program,
"template": "unknown",
"status": "",
}
})
# getting main status for check (for multiple check need to get main status by priority)
statuses = [status[0] for status in sorted([(status, OUTPUT_TEMPLATES[status]["priority"]) for status in list(set([output[d]["template"] for d in output.keys()]))], key=lambda x: x[1])]
# if no programs found or configured by supervisord set status ok and custom message
status = statuses[0] if statuses else EXIT_CODE_OK
text = ", ".join([OUTPUT_TEMPLATES[output[program]["template"]]["text"].format(**output[program]) for program in sorted(output.keys(), key=lambda x: OUTPUT_TEMPLATES[output[x]["template"]]["priority"])]) if statuses else "No program configured/found"
# create exit code (unknown if something happened wrong)
code = EXIT_CODES.get(status, EXIT_CODE_UNKNOWN)
# return full status string with main status for multiple programs and all programs states
return "{status}: {output}\n".format(**{
"status": status.upper(),
"output": text,
}), code
|
def create_output(data, options):
"""
Create Nagios and human readable supervisord statuses.
:param data: supervisord XML-RPC call result.
:type data: dict.
:param options: parsed commandline arguments.
:type options: optparse.Values.
:return: Nagios and human readable supervisord statuses and exit code.
:rtype: (str, int).
"""
output = {}
programs = map(strip, options.programs.strip().split(",")) if options.programs else map(lambda x: x["name"], data)
for program in programs:
try:
program_data = filter(lambda x: x["name"] == program, data)[0]
output.update({
program: {
"name": program,
"template": STATE2TEMPLATE[program_data["statename"]],
"status": program_data["spawnerr"] if program_data["spawnerr"] else program_data["statename"],
}
})
except IndexError:
output.update({
program: {
"name": program,
"template": "unknown",
"status": "",
}
})
# getting main status for check (for multiple check need to get main status by priority)
statuses = [status[0] for status in sorted([(status, OUTPUT_TEMPLATES[status]["priority"]) for status in list(set([output[d]["template"] for d in output.keys()]))], key=lambda x: x[1])]
# if no programs found or configured by supervisord set status ok and custom message
status = statuses[0] if statuses else EXIT_CODE_OK
text = ", ".join([OUTPUT_TEMPLATES[output[program]["template"]]["text"].format(**output[program]) for program in sorted(output.keys(), key=lambda x: OUTPUT_TEMPLATES[output[x]["template"]]["priority"])]) if statuses else "No program configured/found"
# create exit code (unknown if something happened wrong)
code = EXIT_CODES.get(status, EXIT_CODE_UNKNOWN)
# return full status string with main status for multiple programs and all programs states
return "{status}: {output}\n".format(**{
"status": status.upper(),
"output": text,
}), code
|
[
"Create",
"Nagios",
"and",
"human",
"readable",
"supervisord",
"statuses",
"."
] |
vint21h/nagios-check-supervisord
|
python
|
https://github.com/vint21h/nagios-check-supervisord/blob/a40a542499197a4b5658bd6cc3b34326fe8d0ada/check_supervisord.py#L191-L238
|
[
"def",
"create_output",
"(",
"data",
",",
"options",
")",
":",
"output",
"=",
"{",
"}",
"programs",
"=",
"map",
"(",
"strip",
",",
"options",
".",
"programs",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\",\"",
")",
")",
"if",
"options",
".",
"programs",
"else",
"map",
"(",
"lambda",
"x",
":",
"x",
"[",
"\"name\"",
"]",
",",
"data",
")",
"for",
"program",
"in",
"programs",
":",
"try",
":",
"program_data",
"=",
"filter",
"(",
"lambda",
"x",
":",
"x",
"[",
"\"name\"",
"]",
"==",
"program",
",",
"data",
")",
"[",
"0",
"]",
"output",
".",
"update",
"(",
"{",
"program",
":",
"{",
"\"name\"",
":",
"program",
",",
"\"template\"",
":",
"STATE2TEMPLATE",
"[",
"program_data",
"[",
"\"statename\"",
"]",
"]",
",",
"\"status\"",
":",
"program_data",
"[",
"\"spawnerr\"",
"]",
"if",
"program_data",
"[",
"\"spawnerr\"",
"]",
"else",
"program_data",
"[",
"\"statename\"",
"]",
",",
"}",
"}",
")",
"except",
"IndexError",
":",
"output",
".",
"update",
"(",
"{",
"program",
":",
"{",
"\"name\"",
":",
"program",
",",
"\"template\"",
":",
"\"unknown\"",
",",
"\"status\"",
":",
"\"\"",
",",
"}",
"}",
")",
"# getting main status for check (for multiple check need to get main status by priority)",
"statuses",
"=",
"[",
"status",
"[",
"0",
"]",
"for",
"status",
"in",
"sorted",
"(",
"[",
"(",
"status",
",",
"OUTPUT_TEMPLATES",
"[",
"status",
"]",
"[",
"\"priority\"",
"]",
")",
"for",
"status",
"in",
"list",
"(",
"set",
"(",
"[",
"output",
"[",
"d",
"]",
"[",
"\"template\"",
"]",
"for",
"d",
"in",
"output",
".",
"keys",
"(",
")",
"]",
")",
")",
"]",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"1",
"]",
")",
"]",
"# if no programs found or configured by supervisord set status ok and custom message",
"status",
"=",
"statuses",
"[",
"0",
"]",
"if",
"statuses",
"else",
"EXIT_CODE_OK",
"text",
"=",
"\", \"",
".",
"join",
"(",
"[",
"OUTPUT_TEMPLATES",
"[",
"output",
"[",
"program",
"]",
"[",
"\"template\"",
"]",
"]",
"[",
"\"text\"",
"]",
".",
"format",
"(",
"*",
"*",
"output",
"[",
"program",
"]",
")",
"for",
"program",
"in",
"sorted",
"(",
"output",
".",
"keys",
"(",
")",
",",
"key",
"=",
"lambda",
"x",
":",
"OUTPUT_TEMPLATES",
"[",
"output",
"[",
"x",
"]",
"[",
"\"template\"",
"]",
"]",
"[",
"\"priority\"",
"]",
")",
"]",
")",
"if",
"statuses",
"else",
"\"No program configured/found\"",
"# create exit code (unknown if something happened wrong)",
"code",
"=",
"EXIT_CODES",
".",
"get",
"(",
"status",
",",
"EXIT_CODE_UNKNOWN",
")",
"# return full status string with main status for multiple programs and all programs states",
"return",
"\"{status}: {output}\\n\"",
".",
"format",
"(",
"*",
"*",
"{",
"\"status\"",
":",
"status",
".",
"upper",
"(",
")",
",",
"\"output\"",
":",
"text",
",",
"}",
")",
",",
"code"
] |
a40a542499197a4b5658bd6cc3b34326fe8d0ada
|
test
|
main
|
Program main.
|
check_supervisord.py
|
def main():
"""
Program main.
"""
options = parse_options()
output, code = create_output(get_status(options), options)
sys.stdout.write(output)
sys.exit(code)
|
def main():
"""
Program main.
"""
options = parse_options()
output, code = create_output(get_status(options), options)
sys.stdout.write(output)
sys.exit(code)
|
[
"Program",
"main",
"."
] |
vint21h/nagios-check-supervisord
|
python
|
https://github.com/vint21h/nagios-check-supervisord/blob/a40a542499197a4b5658bd6cc3b34326fe8d0ada/check_supervisord.py#L241-L249
|
[
"def",
"main",
"(",
")",
":",
"options",
"=",
"parse_options",
"(",
")",
"output",
",",
"code",
"=",
"create_output",
"(",
"get_status",
"(",
"options",
")",
",",
"options",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"output",
")",
"sys",
".",
"exit",
"(",
"code",
")"
] |
a40a542499197a4b5658bd6cc3b34326fe8d0ada
|
test
|
validate
|
Validate a decoded SNS message.
Parameters:
message:
Decoded SNS message.
get_certificate:
Function that receives a URL, and returns the certificate from that
URL as a string. The default doesn't implement caching.
certificate_url_regex:
Regex that validates the signing certificate URL. Default value
checks it's hosted on an AWS-controlled domain, in the format
"https://sns.<data-center>.amazonaws.com/"
max_age:
Maximum age of an SNS message before it fails validation, expressed
as a `datetime.timedelta`. Defaults to one hour, the max. lifetime
of an SNS message.
|
src/validatesns/__init__.py
|
def validate(
message,
get_certificate=lambda url: urlopen(url).read(),
certificate_url_regex=DEFAULT_CERTIFICATE_URL_REGEX,
max_age=DEFAULT_MAX_AGE
):
"""
Validate a decoded SNS message.
Parameters:
message:
Decoded SNS message.
get_certificate:
Function that receives a URL, and returns the certificate from that
URL as a string. The default doesn't implement caching.
certificate_url_regex:
Regex that validates the signing certificate URL. Default value
checks it's hosted on an AWS-controlled domain, in the format
"https://sns.<data-center>.amazonaws.com/"
max_age:
Maximum age of an SNS message before it fails validation, expressed
as a `datetime.timedelta`. Defaults to one hour, the max. lifetime
of an SNS message.
"""
# Check the signing certicate URL.
SigningCertURLValidator(certificate_url_regex).validate(message)
# Check the message age.
if not isinstance(max_age, datetime.timedelta):
raise ValueError("max_age must be None or a timedelta object")
MessageAgeValidator(max_age).validate(message)
# Passed the basic checks, let's download the cert.
# We've validated the URL, so aren't worried about a malicious server.
certificate = get_certificate(message["SigningCertURL"])
# Check the cryptographic signature.
SignatureValidator(certificate).validate(message)
|
def validate(
message,
get_certificate=lambda url: urlopen(url).read(),
certificate_url_regex=DEFAULT_CERTIFICATE_URL_REGEX,
max_age=DEFAULT_MAX_AGE
):
"""
Validate a decoded SNS message.
Parameters:
message:
Decoded SNS message.
get_certificate:
Function that receives a URL, and returns the certificate from that
URL as a string. The default doesn't implement caching.
certificate_url_regex:
Regex that validates the signing certificate URL. Default value
checks it's hosted on an AWS-controlled domain, in the format
"https://sns.<data-center>.amazonaws.com/"
max_age:
Maximum age of an SNS message before it fails validation, expressed
as a `datetime.timedelta`. Defaults to one hour, the max. lifetime
of an SNS message.
"""
# Check the signing certicate URL.
SigningCertURLValidator(certificate_url_regex).validate(message)
# Check the message age.
if not isinstance(max_age, datetime.timedelta):
raise ValueError("max_age must be None or a timedelta object")
MessageAgeValidator(max_age).validate(message)
# Passed the basic checks, let's download the cert.
# We've validated the URL, so aren't worried about a malicious server.
certificate = get_certificate(message["SigningCertURL"])
# Check the cryptographic signature.
SignatureValidator(certificate).validate(message)
|
[
"Validate",
"a",
"decoded",
"SNS",
"message",
"."
] |
nathforge/validatesns
|
python
|
https://github.com/nathforge/validatesns/blob/39f7f7d1fae215746bb9763856045b501fae05f4/src/validatesns/__init__.py#L28-L69
|
[
"def",
"validate",
"(",
"message",
",",
"get_certificate",
"=",
"lambda",
"url",
":",
"urlopen",
"(",
"url",
")",
".",
"read",
"(",
")",
",",
"certificate_url_regex",
"=",
"DEFAULT_CERTIFICATE_URL_REGEX",
",",
"max_age",
"=",
"DEFAULT_MAX_AGE",
")",
":",
"# Check the signing certicate URL.",
"SigningCertURLValidator",
"(",
"certificate_url_regex",
")",
".",
"validate",
"(",
"message",
")",
"# Check the message age.",
"if",
"not",
"isinstance",
"(",
"max_age",
",",
"datetime",
".",
"timedelta",
")",
":",
"raise",
"ValueError",
"(",
"\"max_age must be None or a timedelta object\"",
")",
"MessageAgeValidator",
"(",
"max_age",
")",
".",
"validate",
"(",
"message",
")",
"# Passed the basic checks, let's download the cert.",
"# We've validated the URL, so aren't worried about a malicious server.",
"certificate",
"=",
"get_certificate",
"(",
"message",
"[",
"\"SigningCertURL\"",
"]",
")",
"# Check the cryptographic signature.",
"SignatureValidator",
"(",
"certificate",
")",
".",
"validate",
"(",
"message",
")"
] |
39f7f7d1fae215746bb9763856045b501fae05f4
|
test
|
write_fcs
|
Write numpy data to an .fcs file (FCS3.0 file format)
Parameters
----------
filename: str or pathlib.Path
Path to the output .fcs file
ch_names: list of str, length C
Names of the output channels
data: 2d ndarray of shape (N,C)
The numpy array data to store as .fcs file format.
endianness: str
Set to "little" or "big" to define the byte order used.
compat_chn_names: bool
Compatibility mode for 3rd party flow analysis software:
The characters " ", "?", and "_" are removed in the output
channel names.
compat_copy: bool
Do not override the input array `data` when modified in
compatibility mode.
compat_negative: bool
Compatibliity mode for 3rd party flow analysis software:
Flip the sign of `data` if its mean is smaller than zero.
compat_percent: bool
Compatibliity mode for 3rd party flow analysis software:
If a column in `data` contains values only between 0 and 1,
they are multiplied by 100.
compat_max_int16: int
Compatibliity mode for 3rd party flow analysis software:
If a column in `data` has a maximum above this value,
then the display-maximum is set to 2**15.
Notes
-----
- These commonly used unicode characters are replaced: "µ", "²"
- If the input data contain NaN values, the corresponding rows
are excluded due to incompatibility with the FCS file format.
|
fcswrite/fcswrite.py
|
def write_fcs(filename, chn_names, data,
endianness="big",
compat_chn_names=True,
compat_copy=True,
compat_negative=True,
compat_percent=True,
compat_max_int16=10000):
"""Write numpy data to an .fcs file (FCS3.0 file format)
Parameters
----------
filename: str or pathlib.Path
Path to the output .fcs file
ch_names: list of str, length C
Names of the output channels
data: 2d ndarray of shape (N,C)
The numpy array data to store as .fcs file format.
endianness: str
Set to "little" or "big" to define the byte order used.
compat_chn_names: bool
Compatibility mode for 3rd party flow analysis software:
The characters " ", "?", and "_" are removed in the output
channel names.
compat_copy: bool
Do not override the input array `data` when modified in
compatibility mode.
compat_negative: bool
Compatibliity mode for 3rd party flow analysis software:
Flip the sign of `data` if its mean is smaller than zero.
compat_percent: bool
Compatibliity mode for 3rd party flow analysis software:
If a column in `data` contains values only between 0 and 1,
they are multiplied by 100.
compat_max_int16: int
Compatibliity mode for 3rd party flow analysis software:
If a column in `data` has a maximum above this value,
then the display-maximum is set to 2**15.
Notes
-----
- These commonly used unicode characters are replaced: "µ", "²"
- If the input data contain NaN values, the corresponding rows
are excluded due to incompatibility with the FCS file format.
"""
filename = pathlib.Path(filename)
if not isinstance(data, np.ndarray):
data = np.array(data, dtype=float)
# remove rows with nan values
nanrows = np.isnan(data).any(axis=1)
if np.sum(nanrows):
msg = "Rows containing NaNs are not written to {}!".format(filename)
warnings.warn(msg)
data = data[~nanrows]
if endianness not in ["little", "big"]:
raise ValueError("`endianness` must be 'little' or 'big'!")
msg = "length of `chn_names` must match length of 2nd axis of `data`"
assert len(chn_names) == data.shape[1], msg
rpl = [["µ", "u"],
["²", "2"],
]
if compat_chn_names:
# Compatibility mode: Clean up headers.
rpl += [[" ", ""],
["?", ""],
["_", ""],
]
for ii in range(len(chn_names)):
for (a, b) in rpl:
chn_names[ii] = chn_names[ii].replace(a, b)
# Data with values between 0 and 1
pcnt_cands = []
for ch in range(data.shape[1]):
if data[:, ch].min() >= 0 and data[:, ch].max() <= 1:
pcnt_cands.append(ch)
if compat_percent and pcnt_cands:
# Compatibility mode: Scale values b/w 0 and 1 to percent
if compat_copy:
# copy if requested
data = data.copy()
for ch in pcnt_cands:
data[:, ch] *= 100
if compat_negative:
toflip = []
for ch in range(data.shape[1]):
if np.mean(data[:, ch]) < 0:
toflip.append(ch)
if len(toflip):
if compat_copy:
# copy if requested
data = data.copy()
for ch in toflip:
data[:, ch] *= -1
# DATA segment
data1 = data.flatten().tolist()
DATA = struct.pack('>%sf' % len(data1), *data1)
# TEXT segment
header_size = 256
if endianness == "little":
# use little endian
byteord = '1,2,3,4'
else:
# use big endian
byteord = '4,3,2,1'
TEXT = '/$BEGINANALYSIS/0/$ENDANALYSIS/0'
TEXT += '/$BEGINSTEXT/0/$ENDSTEXT/0'
# Add placeholders for $BEGINDATA and $ENDDATA, because we don't
# know yet how long TEXT is.
TEXT += '/$BEGINDATA/{data_start_byte}/$ENDDATA/{data_end_byte}'
TEXT += '/$BYTEORD/{0}/$DATATYPE/F'.format(byteord)
TEXT += '/$MODE/L/$NEXTDATA/0/$TOT/{0}'.format(data.shape[0])
TEXT += '/$PAR/{0}'.format(data.shape[1])
# Check for content of data columns and set range
for jj in range(data.shape[1]):
# Set data maximum to that of int16
if (compat_max_int16 and
np.max(data[:, jj]) > compat_max_int16 and
np.max(data[:, jj]) < 2**15):
pnrange = int(2**15)
# Set range for data with values between 0 and 1
elif jj in pcnt_cands:
if compat_percent: # scaled to 100%
pnrange = 100
else: # not scaled
pnrange = 1
# default: set range to maxium value found in column
else:
pnrange = int(abs(np.max(data[:, jj])))
# TODO:
# - Set log/lin
fmt_str = '/$P{0}B/32/$P{0}E/0,0/$P{0}N/{1}/$P{0}R/{2}/$P{0}D/Linear'
TEXT += fmt_str.format(jj+1, chn_names[jj], pnrange)
TEXT += '/'
# SET $BEGINDATA and $ENDDATA using the current size of TEXT plus padding.
text_padding = 47 # for visual separation and safety
data_start_byte = header_size + len(TEXT) + text_padding
data_end_byte = data_start_byte + len(DATA) - 1
TEXT = TEXT.format(data_start_byte=data_start_byte,
data_end_byte=data_end_byte)
lentxt = len(TEXT)
# Pad TEXT segment with spaces until data_start_byte
TEXT = TEXT.ljust(data_start_byte - header_size, " ")
# HEADER segment
ver = 'FCS3.0'
textfirst = '{0: >8}'.format(header_size)
textlast = '{0: >8}'.format(lentxt + header_size - 1)
# Starting with FCS 3.0, data segment can end beyond byte 99,999,999,
# in which case a zero is written in each of the two header fields (the
# values are given in the text segment keywords $BEGINDATA and $ENDDATA)
if data_end_byte <= 99999999:
datafirst = '{0: >8}'.format(data_start_byte)
datalast = '{0: >8}'.format(data_end_byte)
else:
datafirst = '{0: >8}'.format(0)
datalast = '{0: >8}'.format(0)
anafirst = '{0: >8}'.format(0)
analast = '{0: >8}'.format(0)
HEADER = '{0: <256}'.format(ver + ' '
+ textfirst
+ textlast
+ datafirst
+ datalast
+ anafirst
+ analast)
# Write data
with filename.open("wb") as fd:
fd.write(HEADER.encode("ascii", "replace"))
fd.write(TEXT.encode("ascii", "replace"))
fd.write(DATA)
fd.write(b'00000000')
|
def write_fcs(filename, chn_names, data,
endianness="big",
compat_chn_names=True,
compat_copy=True,
compat_negative=True,
compat_percent=True,
compat_max_int16=10000):
"""Write numpy data to an .fcs file (FCS3.0 file format)
Parameters
----------
filename: str or pathlib.Path
Path to the output .fcs file
ch_names: list of str, length C
Names of the output channels
data: 2d ndarray of shape (N,C)
The numpy array data to store as .fcs file format.
endianness: str
Set to "little" or "big" to define the byte order used.
compat_chn_names: bool
Compatibility mode for 3rd party flow analysis software:
The characters " ", "?", and "_" are removed in the output
channel names.
compat_copy: bool
Do not override the input array `data` when modified in
compatibility mode.
compat_negative: bool
Compatibliity mode for 3rd party flow analysis software:
Flip the sign of `data` if its mean is smaller than zero.
compat_percent: bool
Compatibliity mode for 3rd party flow analysis software:
If a column in `data` contains values only between 0 and 1,
they are multiplied by 100.
compat_max_int16: int
Compatibliity mode for 3rd party flow analysis software:
If a column in `data` has a maximum above this value,
then the display-maximum is set to 2**15.
Notes
-----
- These commonly used unicode characters are replaced: "µ", "²"
- If the input data contain NaN values, the corresponding rows
are excluded due to incompatibility with the FCS file format.
"""
filename = pathlib.Path(filename)
if not isinstance(data, np.ndarray):
data = np.array(data, dtype=float)
# remove rows with nan values
nanrows = np.isnan(data).any(axis=1)
if np.sum(nanrows):
msg = "Rows containing NaNs are not written to {}!".format(filename)
warnings.warn(msg)
data = data[~nanrows]
if endianness not in ["little", "big"]:
raise ValueError("`endianness` must be 'little' or 'big'!")
msg = "length of `chn_names` must match length of 2nd axis of `data`"
assert len(chn_names) == data.shape[1], msg
rpl = [["µ", "u"],
["²", "2"],
]
if compat_chn_names:
# Compatibility mode: Clean up headers.
rpl += [[" ", ""],
["?", ""],
["_", ""],
]
for ii in range(len(chn_names)):
for (a, b) in rpl:
chn_names[ii] = chn_names[ii].replace(a, b)
# Data with values between 0 and 1
pcnt_cands = []
for ch in range(data.shape[1]):
if data[:, ch].min() >= 0 and data[:, ch].max() <= 1:
pcnt_cands.append(ch)
if compat_percent and pcnt_cands:
# Compatibility mode: Scale values b/w 0 and 1 to percent
if compat_copy:
# copy if requested
data = data.copy()
for ch in pcnt_cands:
data[:, ch] *= 100
if compat_negative:
toflip = []
for ch in range(data.shape[1]):
if np.mean(data[:, ch]) < 0:
toflip.append(ch)
if len(toflip):
if compat_copy:
# copy if requested
data = data.copy()
for ch in toflip:
data[:, ch] *= -1
# DATA segment
data1 = data.flatten().tolist()
DATA = struct.pack('>%sf' % len(data1), *data1)
# TEXT segment
header_size = 256
if endianness == "little":
# use little endian
byteord = '1,2,3,4'
else:
# use big endian
byteord = '4,3,2,1'
TEXT = '/$BEGINANALYSIS/0/$ENDANALYSIS/0'
TEXT += '/$BEGINSTEXT/0/$ENDSTEXT/0'
# Add placeholders for $BEGINDATA and $ENDDATA, because we don't
# know yet how long TEXT is.
TEXT += '/$BEGINDATA/{data_start_byte}/$ENDDATA/{data_end_byte}'
TEXT += '/$BYTEORD/{0}/$DATATYPE/F'.format(byteord)
TEXT += '/$MODE/L/$NEXTDATA/0/$TOT/{0}'.format(data.shape[0])
TEXT += '/$PAR/{0}'.format(data.shape[1])
# Check for content of data columns and set range
for jj in range(data.shape[1]):
# Set data maximum to that of int16
if (compat_max_int16 and
np.max(data[:, jj]) > compat_max_int16 and
np.max(data[:, jj]) < 2**15):
pnrange = int(2**15)
# Set range for data with values between 0 and 1
elif jj in pcnt_cands:
if compat_percent: # scaled to 100%
pnrange = 100
else: # not scaled
pnrange = 1
# default: set range to maxium value found in column
else:
pnrange = int(abs(np.max(data[:, jj])))
# TODO:
# - Set log/lin
fmt_str = '/$P{0}B/32/$P{0}E/0,0/$P{0}N/{1}/$P{0}R/{2}/$P{0}D/Linear'
TEXT += fmt_str.format(jj+1, chn_names[jj], pnrange)
TEXT += '/'
# SET $BEGINDATA and $ENDDATA using the current size of TEXT plus padding.
text_padding = 47 # for visual separation and safety
data_start_byte = header_size + len(TEXT) + text_padding
data_end_byte = data_start_byte + len(DATA) - 1
TEXT = TEXT.format(data_start_byte=data_start_byte,
data_end_byte=data_end_byte)
lentxt = len(TEXT)
# Pad TEXT segment with spaces until data_start_byte
TEXT = TEXT.ljust(data_start_byte - header_size, " ")
# HEADER segment
ver = 'FCS3.0'
textfirst = '{0: >8}'.format(header_size)
textlast = '{0: >8}'.format(lentxt + header_size - 1)
# Starting with FCS 3.0, data segment can end beyond byte 99,999,999,
# in which case a zero is written in each of the two header fields (the
# values are given in the text segment keywords $BEGINDATA and $ENDDATA)
if data_end_byte <= 99999999:
datafirst = '{0: >8}'.format(data_start_byte)
datalast = '{0: >8}'.format(data_end_byte)
else:
datafirst = '{0: >8}'.format(0)
datalast = '{0: >8}'.format(0)
anafirst = '{0: >8}'.format(0)
analast = '{0: >8}'.format(0)
HEADER = '{0: <256}'.format(ver + ' '
+ textfirst
+ textlast
+ datafirst
+ datalast
+ anafirst
+ analast)
# Write data
with filename.open("wb") as fd:
fd.write(HEADER.encode("ascii", "replace"))
fd.write(TEXT.encode("ascii", "replace"))
fd.write(DATA)
fd.write(b'00000000')
|
[
"Write",
"numpy",
"data",
"to",
"an",
".",
"fcs",
"file",
"(",
"FCS3",
".",
"0",
"file",
"format",
")"
] |
ZELLMECHANIK-DRESDEN/fcswrite
|
python
|
https://github.com/ZELLMECHANIK-DRESDEN/fcswrite/blob/5584983aa1eb927660183252039e73285c0724b3/fcswrite/fcswrite.py#L13-L201
|
[
"def",
"write_fcs",
"(",
"filename",
",",
"chn_names",
",",
"data",
",",
"endianness",
"=",
"\"big\"",
",",
"compat_chn_names",
"=",
"True",
",",
"compat_copy",
"=",
"True",
",",
"compat_negative",
"=",
"True",
",",
"compat_percent",
"=",
"True",
",",
"compat_max_int16",
"=",
"10000",
")",
":",
"filename",
"=",
"pathlib",
".",
"Path",
"(",
"filename",
")",
"if",
"not",
"isinstance",
"(",
"data",
",",
"np",
".",
"ndarray",
")",
":",
"data",
"=",
"np",
".",
"array",
"(",
"data",
",",
"dtype",
"=",
"float",
")",
"# remove rows with nan values",
"nanrows",
"=",
"np",
".",
"isnan",
"(",
"data",
")",
".",
"any",
"(",
"axis",
"=",
"1",
")",
"if",
"np",
".",
"sum",
"(",
"nanrows",
")",
":",
"msg",
"=",
"\"Rows containing NaNs are not written to {}!\"",
".",
"format",
"(",
"filename",
")",
"warnings",
".",
"warn",
"(",
"msg",
")",
"data",
"=",
"data",
"[",
"~",
"nanrows",
"]",
"if",
"endianness",
"not",
"in",
"[",
"\"little\"",
",",
"\"big\"",
"]",
":",
"raise",
"ValueError",
"(",
"\"`endianness` must be 'little' or 'big'!\"",
")",
"msg",
"=",
"\"length of `chn_names` must match length of 2nd axis of `data`\"",
"assert",
"len",
"(",
"chn_names",
")",
"==",
"data",
".",
"shape",
"[",
"1",
"]",
",",
"msg",
"rpl",
"=",
"[",
"[",
"\"µ\",",
" ",
"u\"]",
",",
"",
"[",
"\"²\",",
" ",
"2\"]",
",",
"",
"]",
"if",
"compat_chn_names",
":",
"# Compatibility mode: Clean up headers.",
"rpl",
"+=",
"[",
"[",
"\" \"",
",",
"\"\"",
"]",
",",
"[",
"\"?\"",
",",
"\"\"",
"]",
",",
"[",
"\"_\"",
",",
"\"\"",
"]",
",",
"]",
"for",
"ii",
"in",
"range",
"(",
"len",
"(",
"chn_names",
")",
")",
":",
"for",
"(",
"a",
",",
"b",
")",
"in",
"rpl",
":",
"chn_names",
"[",
"ii",
"]",
"=",
"chn_names",
"[",
"ii",
"]",
".",
"replace",
"(",
"a",
",",
"b",
")",
"# Data with values between 0 and 1",
"pcnt_cands",
"=",
"[",
"]",
"for",
"ch",
"in",
"range",
"(",
"data",
".",
"shape",
"[",
"1",
"]",
")",
":",
"if",
"data",
"[",
":",
",",
"ch",
"]",
".",
"min",
"(",
")",
">=",
"0",
"and",
"data",
"[",
":",
",",
"ch",
"]",
".",
"max",
"(",
")",
"<=",
"1",
":",
"pcnt_cands",
".",
"append",
"(",
"ch",
")",
"if",
"compat_percent",
"and",
"pcnt_cands",
":",
"# Compatibility mode: Scale values b/w 0 and 1 to percent",
"if",
"compat_copy",
":",
"# copy if requested",
"data",
"=",
"data",
".",
"copy",
"(",
")",
"for",
"ch",
"in",
"pcnt_cands",
":",
"data",
"[",
":",
",",
"ch",
"]",
"*=",
"100",
"if",
"compat_negative",
":",
"toflip",
"=",
"[",
"]",
"for",
"ch",
"in",
"range",
"(",
"data",
".",
"shape",
"[",
"1",
"]",
")",
":",
"if",
"np",
".",
"mean",
"(",
"data",
"[",
":",
",",
"ch",
"]",
")",
"<",
"0",
":",
"toflip",
".",
"append",
"(",
"ch",
")",
"if",
"len",
"(",
"toflip",
")",
":",
"if",
"compat_copy",
":",
"# copy if requested",
"data",
"=",
"data",
".",
"copy",
"(",
")",
"for",
"ch",
"in",
"toflip",
":",
"data",
"[",
":",
",",
"ch",
"]",
"*=",
"-",
"1",
"# DATA segment",
"data1",
"=",
"data",
".",
"flatten",
"(",
")",
".",
"tolist",
"(",
")",
"DATA",
"=",
"struct",
".",
"pack",
"(",
"'>%sf'",
"%",
"len",
"(",
"data1",
")",
",",
"*",
"data1",
")",
"# TEXT segment",
"header_size",
"=",
"256",
"if",
"endianness",
"==",
"\"little\"",
":",
"# use little endian",
"byteord",
"=",
"'1,2,3,4'",
"else",
":",
"# use big endian",
"byteord",
"=",
"'4,3,2,1'",
"TEXT",
"=",
"'/$BEGINANALYSIS/0/$ENDANALYSIS/0'",
"TEXT",
"+=",
"'/$BEGINSTEXT/0/$ENDSTEXT/0'",
"# Add placeholders for $BEGINDATA and $ENDDATA, because we don't",
"# know yet how long TEXT is.",
"TEXT",
"+=",
"'/$BEGINDATA/{data_start_byte}/$ENDDATA/{data_end_byte}'",
"TEXT",
"+=",
"'/$BYTEORD/{0}/$DATATYPE/F'",
".",
"format",
"(",
"byteord",
")",
"TEXT",
"+=",
"'/$MODE/L/$NEXTDATA/0/$TOT/{0}'",
".",
"format",
"(",
"data",
".",
"shape",
"[",
"0",
"]",
")",
"TEXT",
"+=",
"'/$PAR/{0}'",
".",
"format",
"(",
"data",
".",
"shape",
"[",
"1",
"]",
")",
"# Check for content of data columns and set range",
"for",
"jj",
"in",
"range",
"(",
"data",
".",
"shape",
"[",
"1",
"]",
")",
":",
"# Set data maximum to that of int16",
"if",
"(",
"compat_max_int16",
"and",
"np",
".",
"max",
"(",
"data",
"[",
":",
",",
"jj",
"]",
")",
">",
"compat_max_int16",
"and",
"np",
".",
"max",
"(",
"data",
"[",
":",
",",
"jj",
"]",
")",
"<",
"2",
"**",
"15",
")",
":",
"pnrange",
"=",
"int",
"(",
"2",
"**",
"15",
")",
"# Set range for data with values between 0 and 1",
"elif",
"jj",
"in",
"pcnt_cands",
":",
"if",
"compat_percent",
":",
"# scaled to 100%",
"pnrange",
"=",
"100",
"else",
":",
"# not scaled",
"pnrange",
"=",
"1",
"# default: set range to maxium value found in column",
"else",
":",
"pnrange",
"=",
"int",
"(",
"abs",
"(",
"np",
".",
"max",
"(",
"data",
"[",
":",
",",
"jj",
"]",
")",
")",
")",
"# TODO:",
"# - Set log/lin",
"fmt_str",
"=",
"'/$P{0}B/32/$P{0}E/0,0/$P{0}N/{1}/$P{0}R/{2}/$P{0}D/Linear'",
"TEXT",
"+=",
"fmt_str",
".",
"format",
"(",
"jj",
"+",
"1",
",",
"chn_names",
"[",
"jj",
"]",
",",
"pnrange",
")",
"TEXT",
"+=",
"'/'",
"# SET $BEGINDATA and $ENDDATA using the current size of TEXT plus padding.",
"text_padding",
"=",
"47",
"# for visual separation and safety",
"data_start_byte",
"=",
"header_size",
"+",
"len",
"(",
"TEXT",
")",
"+",
"text_padding",
"data_end_byte",
"=",
"data_start_byte",
"+",
"len",
"(",
"DATA",
")",
"-",
"1",
"TEXT",
"=",
"TEXT",
".",
"format",
"(",
"data_start_byte",
"=",
"data_start_byte",
",",
"data_end_byte",
"=",
"data_end_byte",
")",
"lentxt",
"=",
"len",
"(",
"TEXT",
")",
"# Pad TEXT segment with spaces until data_start_byte",
"TEXT",
"=",
"TEXT",
".",
"ljust",
"(",
"data_start_byte",
"-",
"header_size",
",",
"\" \"",
")",
"# HEADER segment",
"ver",
"=",
"'FCS3.0'",
"textfirst",
"=",
"'{0: >8}'",
".",
"format",
"(",
"header_size",
")",
"textlast",
"=",
"'{0: >8}'",
".",
"format",
"(",
"lentxt",
"+",
"header_size",
"-",
"1",
")",
"# Starting with FCS 3.0, data segment can end beyond byte 99,999,999,",
"# in which case a zero is written in each of the two header fields (the",
"# values are given in the text segment keywords $BEGINDATA and $ENDDATA)",
"if",
"data_end_byte",
"<=",
"99999999",
":",
"datafirst",
"=",
"'{0: >8}'",
".",
"format",
"(",
"data_start_byte",
")",
"datalast",
"=",
"'{0: >8}'",
".",
"format",
"(",
"data_end_byte",
")",
"else",
":",
"datafirst",
"=",
"'{0: >8}'",
".",
"format",
"(",
"0",
")",
"datalast",
"=",
"'{0: >8}'",
".",
"format",
"(",
"0",
")",
"anafirst",
"=",
"'{0: >8}'",
".",
"format",
"(",
"0",
")",
"analast",
"=",
"'{0: >8}'",
".",
"format",
"(",
"0",
")",
"HEADER",
"=",
"'{0: <256}'",
".",
"format",
"(",
"ver",
"+",
"' '",
"+",
"textfirst",
"+",
"textlast",
"+",
"datafirst",
"+",
"datalast",
"+",
"anafirst",
"+",
"analast",
")",
"# Write data",
"with",
"filename",
".",
"open",
"(",
"\"wb\"",
")",
"as",
"fd",
":",
"fd",
".",
"write",
"(",
"HEADER",
".",
"encode",
"(",
"\"ascii\"",
",",
"\"replace\"",
")",
")",
"fd",
".",
"write",
"(",
"TEXT",
".",
"encode",
"(",
"\"ascii\"",
",",
"\"replace\"",
")",
")",
"fd",
".",
"write",
"(",
"DATA",
")",
"fd",
".",
"write",
"(",
"b'00000000'",
")"
] |
5584983aa1eb927660183252039e73285c0724b3
|
test
|
read_tdms
|
Read tdms file and return channel names and data
|
examples/tdms2fcs.py
|
def read_tdms(tdms_file):
"""Read tdms file and return channel names and data"""
tdms_file = nptdms.TdmsFile(tdms_file)
ch_names = []
ch_data = []
for o in tdms_file.objects.values():
if o.data is not None and len(o.data):
chn = o.path.split('/')[-1].strip("'")
if "unit_string" in o.properties:
unit = o.properties["unit_string"]
ch_names.append("{} [{}]".format(chn, unit))
else:
ch_names.append(chn)
ch_data.append(o.data)
return ch_names, ch_data
|
def read_tdms(tdms_file):
"""Read tdms file and return channel names and data"""
tdms_file = nptdms.TdmsFile(tdms_file)
ch_names = []
ch_data = []
for o in tdms_file.objects.values():
if o.data is not None and len(o.data):
chn = o.path.split('/')[-1].strip("'")
if "unit_string" in o.properties:
unit = o.properties["unit_string"]
ch_names.append("{} [{}]".format(chn, unit))
else:
ch_names.append(chn)
ch_data.append(o.data)
return ch_names, ch_data
|
[
"Read",
"tdms",
"file",
"and",
"return",
"channel",
"names",
"and",
"data"
] |
ZELLMECHANIK-DRESDEN/fcswrite
|
python
|
https://github.com/ZELLMECHANIK-DRESDEN/fcswrite/blob/5584983aa1eb927660183252039e73285c0724b3/examples/tdms2fcs.py#L12-L29
|
[
"def",
"read_tdms",
"(",
"tdms_file",
")",
":",
"tdms_file",
"=",
"nptdms",
".",
"TdmsFile",
"(",
"tdms_file",
")",
"ch_names",
"=",
"[",
"]",
"ch_data",
"=",
"[",
"]",
"for",
"o",
"in",
"tdms_file",
".",
"objects",
".",
"values",
"(",
")",
":",
"if",
"o",
".",
"data",
"is",
"not",
"None",
"and",
"len",
"(",
"o",
".",
"data",
")",
":",
"chn",
"=",
"o",
".",
"path",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
".",
"strip",
"(",
"\"'\"",
")",
"if",
"\"unit_string\"",
"in",
"o",
".",
"properties",
":",
"unit",
"=",
"o",
".",
"properties",
"[",
"\"unit_string\"",
"]",
"ch_names",
".",
"append",
"(",
"\"{} [{}]\"",
".",
"format",
"(",
"chn",
",",
"unit",
")",
")",
"else",
":",
"ch_names",
".",
"append",
"(",
"chn",
")",
"ch_data",
".",
"append",
"(",
"o",
".",
"data",
")",
"return",
"ch_names",
",",
"ch_data"
] |
5584983aa1eb927660183252039e73285c0724b3
|
test
|
add_deformation
|
From circularity, compute the deformation
This method is useful for RT-DC data sets that contain
the circularity but not the deformation.
|
examples/tdms2fcs.py
|
def add_deformation(chn_names, data):
"""From circularity, compute the deformation
This method is useful for RT-DC data sets that contain
the circularity but not the deformation.
"""
if "deformation" not in chn_names:
for ii, ch in enumerate(chn_names):
if ch == "circularity":
chn_names.append("deformation")
data.append(1-data[ii])
return chn_names, data
|
def add_deformation(chn_names, data):
"""From circularity, compute the deformation
This method is useful for RT-DC data sets that contain
the circularity but not the deformation.
"""
if "deformation" not in chn_names:
for ii, ch in enumerate(chn_names):
if ch == "circularity":
chn_names.append("deformation")
data.append(1-data[ii])
return chn_names, data
|
[
"From",
"circularity",
"compute",
"the",
"deformation"
] |
ZELLMECHANIK-DRESDEN/fcswrite
|
python
|
https://github.com/ZELLMECHANIK-DRESDEN/fcswrite/blob/5584983aa1eb927660183252039e73285c0724b3/examples/tdms2fcs.py#L32-L44
|
[
"def",
"add_deformation",
"(",
"chn_names",
",",
"data",
")",
":",
"if",
"\"deformation\"",
"not",
"in",
"chn_names",
":",
"for",
"ii",
",",
"ch",
"in",
"enumerate",
"(",
"chn_names",
")",
":",
"if",
"ch",
"==",
"\"circularity\"",
":",
"chn_names",
".",
"append",
"(",
"\"deformation\"",
")",
"data",
".",
"append",
"(",
"1",
"-",
"data",
"[",
"ii",
"]",
")",
"return",
"chn_names",
",",
"data"
] |
5584983aa1eb927660183252039e73285c0724b3
|
test
|
tdms2fcs
|
Creates an fcs file for a given tdms file
|
examples/tdms2fcs.py
|
def tdms2fcs(tdms_file):
"""Creates an fcs file for a given tdms file"""
fcs_file = tdms_file[:-4]+"fcs"
chn_names, data = read_tdms(tdms_file)
chn_names, data = add_deformation(chn_names, data)
fcswrite.write_fcs(filename=fcs_file,
chn_names=chn_names,
data=np.array(data).transpose())
|
def tdms2fcs(tdms_file):
"""Creates an fcs file for a given tdms file"""
fcs_file = tdms_file[:-4]+"fcs"
chn_names, data = read_tdms(tdms_file)
chn_names, data = add_deformation(chn_names, data)
fcswrite.write_fcs(filename=fcs_file,
chn_names=chn_names,
data=np.array(data).transpose())
|
[
"Creates",
"an",
"fcs",
"file",
"for",
"a",
"given",
"tdms",
"file"
] |
ZELLMECHANIK-DRESDEN/fcswrite
|
python
|
https://github.com/ZELLMECHANIK-DRESDEN/fcswrite/blob/5584983aa1eb927660183252039e73285c0724b3/examples/tdms2fcs.py#L47-L54
|
[
"def",
"tdms2fcs",
"(",
"tdms_file",
")",
":",
"fcs_file",
"=",
"tdms_file",
"[",
":",
"-",
"4",
"]",
"+",
"\"fcs\"",
"chn_names",
",",
"data",
"=",
"read_tdms",
"(",
"tdms_file",
")",
"chn_names",
",",
"data",
"=",
"add_deformation",
"(",
"chn_names",
",",
"data",
")",
"fcswrite",
".",
"write_fcs",
"(",
"filename",
"=",
"fcs_file",
",",
"chn_names",
"=",
"chn_names",
",",
"data",
"=",
"np",
".",
"array",
"(",
"data",
")",
".",
"transpose",
"(",
")",
")"
] |
5584983aa1eb927660183252039e73285c0724b3
|
test
|
Patch.get_header
|
Returns bytes
|
quilt/patch.py
|
def get_header(self, patch_dir=None):
""" Returns bytes """
lines = []
if patch_dir:
file = patch_dir + File(self.get_name())
name = file.get_name()
else:
name = self.get_name()
with open(name, "rb") as f:
for line in f:
if line.startswith(b"---") or line.startswith(b"Index:"):
break
lines.append(line)
return b"".join(lines)
|
def get_header(self, patch_dir=None):
""" Returns bytes """
lines = []
if patch_dir:
file = patch_dir + File(self.get_name())
name = file.get_name()
else:
name = self.get_name()
with open(name, "rb") as f:
for line in f:
if line.startswith(b"---") or line.startswith(b"Index:"):
break
lines.append(line)
return b"".join(lines)
|
[
"Returns",
"bytes"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/patch.py#L79-L94
|
[
"def",
"get_header",
"(",
"self",
",",
"patch_dir",
"=",
"None",
")",
":",
"lines",
"=",
"[",
"]",
"if",
"patch_dir",
":",
"file",
"=",
"patch_dir",
"+",
"File",
"(",
"self",
".",
"get_name",
"(",
")",
")",
"name",
"=",
"file",
".",
"get_name",
"(",
")",
"else",
":",
"name",
"=",
"self",
".",
"get_name",
"(",
")",
"with",
"open",
"(",
"name",
",",
"\"rb\"",
")",
"as",
"f",
":",
"for",
"line",
"in",
"f",
":",
"if",
"line",
".",
"startswith",
"(",
"b\"---\"",
")",
"or",
"line",
".",
"startswith",
"(",
"b\"Index:\"",
")",
":",
"break",
"lines",
".",
"append",
"(",
"line",
")",
"return",
"b\"\"",
".",
"join",
"(",
"lines",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Diff.equal
|
Returns True if left and right are equal
|
quilt/patch.py
|
def equal(self, cwd):
""" Returns True if left and right are equal
"""
cmd = ["diff"]
cmd.append("-q")
cmd.append(self.left.get_name())
cmd.append(self.right.get_name())
try:
Process(cmd).run(cwd=cwd, suppress_output=True)
except SubprocessError as e:
if e.get_returncode() == 1:
return False
else:
raise e
return True
|
def equal(self, cwd):
""" Returns True if left and right are equal
"""
cmd = ["diff"]
cmd.append("-q")
cmd.append(self.left.get_name())
cmd.append(self.right.get_name())
try:
Process(cmd).run(cwd=cwd, suppress_output=True)
except SubprocessError as e:
if e.get_returncode() == 1:
return False
else:
raise e
return True
|
[
"Returns",
"True",
"if",
"left",
"and",
"right",
"are",
"equal"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/patch.py#L183-L198
|
[
"def",
"equal",
"(",
"self",
",",
"cwd",
")",
":",
"cmd",
"=",
"[",
"\"diff\"",
"]",
"cmd",
".",
"append",
"(",
"\"-q\"",
")",
"cmd",
".",
"append",
"(",
"self",
".",
"left",
".",
"get_name",
"(",
")",
")",
"cmd",
".",
"append",
"(",
"self",
".",
"right",
".",
"get_name",
"(",
")",
")",
"try",
":",
"Process",
"(",
"cmd",
")",
".",
"run",
"(",
"cwd",
"=",
"cwd",
",",
"suppress_output",
"=",
"True",
")",
"except",
"SubprocessError",
"as",
"e",
":",
"if",
"e",
".",
"get_returncode",
"(",
")",
"==",
"1",
":",
"return",
"False",
"else",
":",
"raise",
"e",
"return",
"True"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
New.create
|
Adds a new patch with patchname to the queue
The new patch will be added as the topmost applied patch.
|
quilt/new.py
|
def create(self, patchname):
""" Adds a new patch with patchname to the queue
The new patch will be added as the topmost applied patch.
"""
patch = Patch(patchname)
if self.series.is_patch(patch):
raise PatchAlreadyExists(self.series, patchname)
patch_dir = self.quilt_patches
patch_dir.create()
patchfile = patch_dir + File(patchname)
patchfile.touch()
pc_dir = self.quilt_pc + patchname
if pc_dir.exists():
# be sure that the directory is clear
pc_dir.delete()
# create empty .pc/<patchname> directory as quilt does too
pc_dir.create()
top = self.db.top_patch()
# add new patch after the current topmost applied patch
self.series.add_patches([patch], top)
# "apply" patch
self.db.add_patch(patch)
# create patches/series files
self.series.save()
# create .pc/.version and .pc/applied-patches files
self.db.save()
self.patch_created(patch)
|
def create(self, patchname):
""" Adds a new patch with patchname to the queue
The new patch will be added as the topmost applied patch.
"""
patch = Patch(patchname)
if self.series.is_patch(patch):
raise PatchAlreadyExists(self.series, patchname)
patch_dir = self.quilt_patches
patch_dir.create()
patchfile = patch_dir + File(patchname)
patchfile.touch()
pc_dir = self.quilt_pc + patchname
if pc_dir.exists():
# be sure that the directory is clear
pc_dir.delete()
# create empty .pc/<patchname> directory as quilt does too
pc_dir.create()
top = self.db.top_patch()
# add new patch after the current topmost applied patch
self.series.add_patches([patch], top)
# "apply" patch
self.db.add_patch(patch)
# create patches/series files
self.series.save()
# create .pc/.version and .pc/applied-patches files
self.db.save()
self.patch_created(patch)
|
[
"Adds",
"a",
"new",
"patch",
"with",
"patchname",
"to",
"the",
"queue"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/new.py#L29-L62
|
[
"def",
"create",
"(",
"self",
",",
"patchname",
")",
":",
"patch",
"=",
"Patch",
"(",
"patchname",
")",
"if",
"self",
".",
"series",
".",
"is_patch",
"(",
"patch",
")",
":",
"raise",
"PatchAlreadyExists",
"(",
"self",
".",
"series",
",",
"patchname",
")",
"patch_dir",
"=",
"self",
".",
"quilt_patches",
"patch_dir",
".",
"create",
"(",
")",
"patchfile",
"=",
"patch_dir",
"+",
"File",
"(",
"patchname",
")",
"patchfile",
".",
"touch",
"(",
")",
"pc_dir",
"=",
"self",
".",
"quilt_pc",
"+",
"patchname",
"if",
"pc_dir",
".",
"exists",
"(",
")",
":",
"# be sure that the directory is clear",
"pc_dir",
".",
"delete",
"(",
")",
"# create empty .pc/<patchname> directory as quilt does too",
"pc_dir",
".",
"create",
"(",
")",
"top",
"=",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
"# add new patch after the current topmost applied patch",
"self",
".",
"series",
".",
"add_patches",
"(",
"[",
"patch",
"]",
",",
"top",
")",
"# \"apply\" patch",
"self",
".",
"db",
".",
"add_patch",
"(",
"patch",
")",
"# create patches/series files",
"self",
".",
"series",
".",
"save",
"(",
")",
"# create .pc/.version and .pc/applied-patches files",
"self",
".",
"db",
".",
"save",
"(",
")",
"self",
".",
"patch_created",
"(",
"patch",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Delete.delete_next
|
Delete next unapplied patch
If remove is True the patch file will also be removed. If remove and
backup are True a copy of the deleted patch file will be made.
|
quilt/delete.py
|
def delete_next(self, remove=False, backup=False):
""" Delete next unapplied patch
If remove is True the patch file will also be removed. If remove and
backup are True a copy of the deleted patch file will be made.
"""
patch = self.db.top_patch()
if patch:
after = self.series.patch_after(patch)
else:
after = self.series.first_patch()
if not after:
raise QuiltError("No next patch")
self._delete_patch(after, remove=remove, backup=backup)
|
def delete_next(self, remove=False, backup=False):
""" Delete next unapplied patch
If remove is True the patch file will also be removed. If remove and
backup are True a copy of the deleted patch file will be made.
"""
patch = self.db.top_patch()
if patch:
after = self.series.patch_after(patch)
else:
after = self.series.first_patch()
if not after:
raise QuiltError("No next patch")
self._delete_patch(after, remove=remove, backup=backup)
|
[
"Delete",
"next",
"unapplied",
"patch",
"If",
"remove",
"is",
"True",
"the",
"patch",
"file",
"will",
"also",
"be",
"removed",
".",
"If",
"remove",
"and",
"backup",
"are",
"True",
"a",
"copy",
"of",
"the",
"deleted",
"patch",
"file",
"will",
"be",
"made",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/delete.py#L62-L75
|
[
"def",
"delete_next",
"(",
"self",
",",
"remove",
"=",
"False",
",",
"backup",
"=",
"False",
")",
":",
"patch",
"=",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
"if",
"patch",
":",
"after",
"=",
"self",
".",
"series",
".",
"patch_after",
"(",
"patch",
")",
"else",
":",
"after",
"=",
"self",
".",
"series",
".",
"first_patch",
"(",
")",
"if",
"not",
"after",
":",
"raise",
"QuiltError",
"(",
"\"No next patch\"",
")",
"self",
".",
"_delete_patch",
"(",
"after",
",",
"remove",
"=",
"remove",
",",
"backup",
"=",
"backup",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Delete.delete_patch
|
Delete specified patch from the series
If remove is True the patch file will also be removed. If remove and
backup are True a copy of the deleted patch file will be made.
|
quilt/delete.py
|
def delete_patch(self, patch_name=None, remove=False, backup=False):
""" Delete specified patch from the series
If remove is True the patch file will also be removed. If remove and
backup are True a copy of the deleted patch file will be made.
"""
if patch_name:
patch = Patch(patch_name)
else:
patch = self.db.top_patch()
if not patch:
raise NoAppliedPatch(self.db)
self._delete_patch(patch, remove=remove, backup=backup)
|
def delete_patch(self, patch_name=None, remove=False, backup=False):
""" Delete specified patch from the series
If remove is True the patch file will also be removed. If remove and
backup are True a copy of the deleted patch file will be made.
"""
if patch_name:
patch = Patch(patch_name)
else:
patch = self.db.top_patch()
if not patch:
raise NoAppliedPatch(self.db)
self._delete_patch(patch, remove=remove, backup=backup)
|
[
"Delete",
"specified",
"patch",
"from",
"the",
"series",
"If",
"remove",
"is",
"True",
"the",
"patch",
"file",
"will",
"also",
"be",
"removed",
".",
"If",
"remove",
"and",
"backup",
"are",
"True",
"a",
"copy",
"of",
"the",
"deleted",
"patch",
"file",
"will",
"be",
"made",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/delete.py#L77-L89
|
[
"def",
"delete_patch",
"(",
"self",
",",
"patch_name",
"=",
"None",
",",
"remove",
"=",
"False",
",",
"backup",
"=",
"False",
")",
":",
"if",
"patch_name",
":",
"patch",
"=",
"Patch",
"(",
"patch_name",
")",
"else",
":",
"patch",
"=",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
"if",
"not",
"patch",
":",
"raise",
"NoAppliedPatch",
"(",
"self",
".",
"db",
")",
"self",
".",
"_delete_patch",
"(",
"patch",
",",
"remove",
"=",
"remove",
",",
"backup",
"=",
"backup",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Add._file_in_patch
|
Checks if a backup file of the filename in the current patch
exists
|
quilt/add.py
|
def _file_in_patch(self, filename, patch, ignore):
""" Checks if a backup file of the filename in the current patch
exists """
file = self.quilt_pc + File(os.path.join(patch.get_name(), filename))
if file.exists():
if ignore:
return True
else:
raise QuiltError("File %s is already in patch %s" % (filename,
patch.get_name()))
return False
|
def _file_in_patch(self, filename, patch, ignore):
""" Checks if a backup file of the filename in the current patch
exists """
file = self.quilt_pc + File(os.path.join(patch.get_name(), filename))
if file.exists():
if ignore:
return True
else:
raise QuiltError("File %s is already in patch %s" % (filename,
patch.get_name()))
return False
|
[
"Checks",
"if",
"a",
"backup",
"file",
"of",
"the",
"filename",
"in",
"the",
"current",
"patch",
"exists"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/add.py#L34-L44
|
[
"def",
"_file_in_patch",
"(",
"self",
",",
"filename",
",",
"patch",
",",
"ignore",
")",
":",
"file",
"=",
"self",
".",
"quilt_pc",
"+",
"File",
"(",
"os",
".",
"path",
".",
"join",
"(",
"patch",
".",
"get_name",
"(",
")",
",",
"filename",
")",
")",
"if",
"file",
".",
"exists",
"(",
")",
":",
"if",
"ignore",
":",
"return",
"True",
"else",
":",
"raise",
"QuiltError",
"(",
"\"File %s is already in patch %s\"",
"%",
"(",
"filename",
",",
"patch",
".",
"get_name",
"(",
")",
")",
")",
"return",
"False"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Add._backup_file
|
Creates a backup of file
|
quilt/add.py
|
def _backup_file(self, file, patch):
""" Creates a backup of file """
dest_dir = self.quilt_pc + patch.get_name()
file_dir = file.get_directory()
if file_dir:
#TODO get relative path
dest_dir = dest_dir + file_dir
backup = Backup()
backup.backup_file(file, dest_dir, copy_empty=True)
|
def _backup_file(self, file, patch):
""" Creates a backup of file """
dest_dir = self.quilt_pc + patch.get_name()
file_dir = file.get_directory()
if file_dir:
#TODO get relative path
dest_dir = dest_dir + file_dir
backup = Backup()
backup.backup_file(file, dest_dir, copy_empty=True)
|
[
"Creates",
"a",
"backup",
"of",
"file"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/add.py#L62-L70
|
[
"def",
"_backup_file",
"(",
"self",
",",
"file",
",",
"patch",
")",
":",
"dest_dir",
"=",
"self",
".",
"quilt_pc",
"+",
"patch",
".",
"get_name",
"(",
")",
"file_dir",
"=",
"file",
".",
"get_directory",
"(",
")",
"if",
"file_dir",
":",
"#TODO get relative path",
"dest_dir",
"=",
"dest_dir",
"+",
"file_dir",
"backup",
"=",
"Backup",
"(",
")",
"backup",
".",
"backup_file",
"(",
"file",
",",
"dest_dir",
",",
"copy_empty",
"=",
"True",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Add.add_file
|
Add file to the patch with patch_name.
If patch_name is None or empty the topmost patch will be used.
Adding an already added patch will raise an QuiltError if ignore is
False.
|
quilt/add.py
|
def add_file(self, filename, patch_name=None, ignore=False):
""" Add file to the patch with patch_name.
If patch_name is None or empty the topmost patch will be used.
Adding an already added patch will raise an QuiltError if ignore is
False.
"""
file = File(filename)
if patch_name:
patch = Patch(patch_name)
else:
patch = self.db.top_patch()
if not patch:
raise NoAppliedPatch(self.db)
exists = self._file_in_patch(filename, patch, ignore)
if exists:
return
self._file_in_next_patches(filename, patch)
if file.is_link():
raise QuiltError("Cannot add symbolic link %s" % filename)
self._backup_file(file, patch)
if file.exists():
# be sure user can write original file
os.chmod(filename, file.get_mode() | stat.S_IWUSR | stat.S_IRUSR)
self.file_added(file, patch)
|
def add_file(self, filename, patch_name=None, ignore=False):
""" Add file to the patch with patch_name.
If patch_name is None or empty the topmost patch will be used.
Adding an already added patch will raise an QuiltError if ignore is
False.
"""
file = File(filename)
if patch_name:
patch = Patch(patch_name)
else:
patch = self.db.top_patch()
if not patch:
raise NoAppliedPatch(self.db)
exists = self._file_in_patch(filename, patch, ignore)
if exists:
return
self._file_in_next_patches(filename, patch)
if file.is_link():
raise QuiltError("Cannot add symbolic link %s" % filename)
self._backup_file(file, patch)
if file.exists():
# be sure user can write original file
os.chmod(filename, file.get_mode() | stat.S_IWUSR | stat.S_IRUSR)
self.file_added(file, patch)
|
[
"Add",
"file",
"to",
"the",
"patch",
"with",
"patch_name",
".",
"If",
"patch_name",
"is",
"None",
"or",
"empty",
"the",
"topmost",
"patch",
"will",
"be",
"used",
".",
"Adding",
"an",
"already",
"added",
"patch",
"will",
"raise",
"an",
"QuiltError",
"if",
"ignore",
"is",
"False",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/add.py#L72-L102
|
[
"def",
"add_file",
"(",
"self",
",",
"filename",
",",
"patch_name",
"=",
"None",
",",
"ignore",
"=",
"False",
")",
":",
"file",
"=",
"File",
"(",
"filename",
")",
"if",
"patch_name",
":",
"patch",
"=",
"Patch",
"(",
"patch_name",
")",
"else",
":",
"patch",
"=",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
"if",
"not",
"patch",
":",
"raise",
"NoAppliedPatch",
"(",
"self",
".",
"db",
")",
"exists",
"=",
"self",
".",
"_file_in_patch",
"(",
"filename",
",",
"patch",
",",
"ignore",
")",
"if",
"exists",
":",
"return",
"self",
".",
"_file_in_next_patches",
"(",
"filename",
",",
"patch",
")",
"if",
"file",
".",
"is_link",
"(",
")",
":",
"raise",
"QuiltError",
"(",
"\"Cannot add symbolic link %s\"",
"%",
"filename",
")",
"self",
".",
"_backup_file",
"(",
"file",
",",
"patch",
")",
"if",
"file",
".",
"exists",
"(",
")",
":",
"# be sure user can write original file",
"os",
".",
"chmod",
"(",
"filename",
",",
"file",
".",
"get_mode",
"(",
")",
"|",
"stat",
".",
"S_IWUSR",
"|",
"stat",
".",
"S_IRUSR",
")",
"self",
".",
"file_added",
"(",
"file",
",",
"patch",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Process.run
|
Run command as a subprocess and wait until it is finished.
The command should be given as a list of strings to avoid problems
with shell quoting. If the command exits with a return code other
than 0, a SubprocessError is raised.
|
quilt/utils.py
|
def run(self, suppress_output=False, inputdata=None, **kw):
"""Run command as a subprocess and wait until it is finished.
The command should be given as a list of strings to avoid problems
with shell quoting. If the command exits with a return code other
than 0, a SubprocessError is raised.
"""
if inputdata is not None:
kw["stdin"] = subprocess.PIPE
if suppress_output:
kw["stdout"] = open(os.devnull, "w")
kw["stderr"] = kw["stdout"]
try:
try:
process = subprocess.Popen(self.cmd, **kw)
finally:
if suppress_output:
kw["stdout"].close()
except OSError as e:
msg = "Failed starting command {!r}: {}".format(self.cmd, e)
raise QuiltError(msg)
if inputdata is not None:
process.stdin.write(inputdata)
process.stdin.close()
ret = process.wait()
if ret != 0:
raise SubprocessError(self.cmd, ret)
|
def run(self, suppress_output=False, inputdata=None, **kw):
"""Run command as a subprocess and wait until it is finished.
The command should be given as a list of strings to avoid problems
with shell quoting. If the command exits with a return code other
than 0, a SubprocessError is raised.
"""
if inputdata is not None:
kw["stdin"] = subprocess.PIPE
if suppress_output:
kw["stdout"] = open(os.devnull, "w")
kw["stderr"] = kw["stdout"]
try:
try:
process = subprocess.Popen(self.cmd, **kw)
finally:
if suppress_output:
kw["stdout"].close()
except OSError as e:
msg = "Failed starting command {!r}: {}".format(self.cmd, e)
raise QuiltError(msg)
if inputdata is not None:
process.stdin.write(inputdata)
process.stdin.close()
ret = process.wait()
if ret != 0:
raise SubprocessError(self.cmd, ret)
|
[
"Run",
"command",
"as",
"a",
"subprocess",
"and",
"wait",
"until",
"it",
"is",
"finished",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/utils.py#L76-L103
|
[
"def",
"run",
"(",
"self",
",",
"suppress_output",
"=",
"False",
",",
"inputdata",
"=",
"None",
",",
"*",
"*",
"kw",
")",
":",
"if",
"inputdata",
"is",
"not",
"None",
":",
"kw",
"[",
"\"stdin\"",
"]",
"=",
"subprocess",
".",
"PIPE",
"if",
"suppress_output",
":",
"kw",
"[",
"\"stdout\"",
"]",
"=",
"open",
"(",
"os",
".",
"devnull",
",",
"\"w\"",
")",
"kw",
"[",
"\"stderr\"",
"]",
"=",
"kw",
"[",
"\"stdout\"",
"]",
"try",
":",
"try",
":",
"process",
"=",
"subprocess",
".",
"Popen",
"(",
"self",
".",
"cmd",
",",
"*",
"*",
"kw",
")",
"finally",
":",
"if",
"suppress_output",
":",
"kw",
"[",
"\"stdout\"",
"]",
".",
"close",
"(",
")",
"except",
"OSError",
"as",
"e",
":",
"msg",
"=",
"\"Failed starting command {!r}: {}\"",
".",
"format",
"(",
"self",
".",
"cmd",
",",
"e",
")",
"raise",
"QuiltError",
"(",
"msg",
")",
"if",
"inputdata",
"is",
"not",
"None",
":",
"process",
".",
"stdin",
".",
"write",
"(",
"inputdata",
")",
"process",
".",
"stdin",
".",
"close",
"(",
")",
"ret",
"=",
"process",
".",
"wait",
"(",
")",
"if",
"ret",
"!=",
"0",
":",
"raise",
"SubprocessError",
"(",
"self",
".",
"cmd",
",",
"ret",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Directory.create
|
Creates the directory and all its parent directories if it does not
exist yet
|
quilt/utils.py
|
def create(self):
""" Creates the directory and all its parent directories if it does not
exist yet
"""
if self.dirname and not os.path.exists(self.dirname):
os.makedirs(self.dirname)
|
def create(self):
""" Creates the directory and all its parent directories if it does not
exist yet
"""
if self.dirname and not os.path.exists(self.dirname):
os.makedirs(self.dirname)
|
[
"Creates",
"the",
"directory",
"and",
"all",
"its",
"parent",
"directories",
"if",
"it",
"does",
"not",
"exist",
"yet"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/utils.py#L118-L123
|
[
"def",
"create",
"(",
"self",
")",
":",
"if",
"self",
".",
"dirname",
"and",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"dirname",
")",
":",
"os",
".",
"makedirs",
"(",
"self",
".",
"dirname",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Directory.copy
|
Copy to destination directory recursively.
If symlinks is true, symbolic links in the source tree are represented
as symbolic links in the new tree, but the metadata of the original
links is NOT copied; if false or omitted, the contents and metadata of
the linked files are copied to the new tree.
|
quilt/utils.py
|
def copy(self, dest, symlinks=False):
""" Copy to destination directory recursively.
If symlinks is true, symbolic links in the source tree are represented
as symbolic links in the new tree, but the metadata of the original
links is NOT copied; if false or omitted, the contents and metadata of
the linked files are copied to the new tree.
"""
if isinstance(dest, Directory):
dest = dest.get_name()
shutil.copytree(self.dirname, dest)
|
def copy(self, dest, symlinks=False):
""" Copy to destination directory recursively.
If symlinks is true, symbolic links in the source tree are represented
as symbolic links in the new tree, but the metadata of the original
links is NOT copied; if false or omitted, the contents and metadata of
the linked files are copied to the new tree.
"""
if isinstance(dest, Directory):
dest = dest.get_name()
shutil.copytree(self.dirname, dest)
|
[
"Copy",
"to",
"destination",
"directory",
"recursively",
".",
"If",
"symlinks",
"is",
"true",
"symbolic",
"links",
"in",
"the",
"source",
"tree",
"are",
"represented",
"as",
"symbolic",
"links",
"in",
"the",
"new",
"tree",
"but",
"the",
"metadata",
"of",
"the",
"original",
"links",
"is",
"NOT",
"copied",
";",
"if",
"false",
"or",
"omitted",
"the",
"contents",
"and",
"metadata",
"of",
"the",
"linked",
"files",
"are",
"copied",
"to",
"the",
"new",
"tree",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/utils.py#L180-L190
|
[
"def",
"copy",
"(",
"self",
",",
"dest",
",",
"symlinks",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"dest",
",",
"Directory",
")",
":",
"dest",
"=",
"dest",
".",
"get_name",
"(",
")",
"shutil",
".",
"copytree",
"(",
"self",
".",
"dirname",
",",
"dest",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
File.link
|
Create hard link as link to this file
|
quilt/utils.py
|
def link(self, link):
""" Create hard link as link to this file """
if isinstance(link, File):
link = link.filename
os.link(self.filename, link)
|
def link(self, link):
""" Create hard link as link to this file """
if isinstance(link, File):
link = link.filename
os.link(self.filename, link)
|
[
"Create",
"hard",
"link",
"as",
"link",
"to",
"this",
"file"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/utils.py#L244-L248
|
[
"def",
"link",
"(",
"self",
",",
"link",
")",
":",
"if",
"isinstance",
"(",
"link",
",",
"File",
")",
":",
"link",
"=",
"link",
".",
"filename",
"os",
".",
"link",
"(",
"self",
".",
"filename",
",",
"link",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
File.copy
|
Copy file to destination
|
quilt/utils.py
|
def copy(self, dest):
""" Copy file to destination """
if isinstance(dest, File):
dest_dir = dest.get_directory()
dest_dir.create()
dest = dest.filename
elif isinstance(dest, Directory):
dest = dest.dirname
shutil.copy2(self.filename, dest)
|
def copy(self, dest):
""" Copy file to destination """
if isinstance(dest, File):
dest_dir = dest.get_directory()
dest_dir.create()
dest = dest.filename
elif isinstance(dest, Directory):
dest = dest.dirname
shutil.copy2(self.filename, dest)
|
[
"Copy",
"file",
"to",
"destination"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/utils.py#L250-L259
|
[
"def",
"copy",
"(",
"self",
",",
"dest",
")",
":",
"if",
"isinstance",
"(",
"dest",
",",
"File",
")",
":",
"dest_dir",
"=",
"dest",
".",
"get_directory",
"(",
")",
"dest_dir",
".",
"create",
"(",
")",
"dest",
"=",
"dest",
".",
"filename",
"elif",
"isinstance",
"(",
"dest",
",",
"Directory",
")",
":",
"dest",
"=",
"dest",
".",
"dirname",
"shutil",
".",
"copy2",
"(",
"self",
".",
"filename",
",",
"dest",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
File.get_directory
|
Returns the directory where the file is placed in or None if the
path to the file doesn't contain a directory
|
quilt/utils.py
|
def get_directory(self):
""" Returns the directory where the file is placed in or None if the
path to the file doesn't contain a directory
"""
dirname = os.path.dirname(self.filename)
if dirname:
return Directory(dirname)
else:
return None
|
def get_directory(self):
""" Returns the directory where the file is placed in or None if the
path to the file doesn't contain a directory
"""
dirname = os.path.dirname(self.filename)
if dirname:
return Directory(dirname)
else:
return None
|
[
"Returns",
"the",
"directory",
"where",
"the",
"file",
"is",
"placed",
"in",
"or",
"None",
"if",
"the",
"path",
"to",
"the",
"file",
"doesn",
"t",
"contain",
"a",
"directory"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/utils.py#L280-L288
|
[
"def",
"get_directory",
"(",
"self",
")",
":",
"dirname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"self",
".",
"filename",
")",
"if",
"dirname",
":",
"return",
"Directory",
"(",
"dirname",
")",
"else",
":",
"return",
"None"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Backup.backup_file
|
Backup file in dest_dir Directory.
The return value is a File object pointing to the copied file in the
destination directory or None if no file is copied.
If file exists and it is not empty it is copied to dest_dir.
If file exists and it is empty the file is copied only if copy_empty is
True.
If file does not exist and copy_empty is True a new file in dest_dir
will be created.
In all other cases no file will be copied and None is returned.
|
quilt/backup.py
|
def backup_file(self, file, dest_dir, copy_empty=False):
""" Backup file in dest_dir Directory.
The return value is a File object pointing to the copied file in the
destination directory or None if no file is copied.
If file exists and it is not empty it is copied to dest_dir.
If file exists and it is empty the file is copied only if copy_empty is
True.
If file does not exist and copy_empty is True a new file in dest_dir
will be created.
In all other cases no file will be copied and None is returned.
"""
if file.exists():
if not copy_empty and file.is_empty():
return None
dest_dir.create()
file.copy(dest_dir)
return dest_dir + file.get_basefile()
elif copy_empty:
# create new file in dest_dir
dest_dir = dest_dir + file.get_directory()
dest_dir.create()
dest_file = dest_dir + file.get_basefile()
dest_file.touch()
return dest_file
else:
return None
|
def backup_file(self, file, dest_dir, copy_empty=False):
""" Backup file in dest_dir Directory.
The return value is a File object pointing to the copied file in the
destination directory or None if no file is copied.
If file exists and it is not empty it is copied to dest_dir.
If file exists and it is empty the file is copied only if copy_empty is
True.
If file does not exist and copy_empty is True a new file in dest_dir
will be created.
In all other cases no file will be copied and None is returned.
"""
if file.exists():
if not copy_empty and file.is_empty():
return None
dest_dir.create()
file.copy(dest_dir)
return dest_dir + file.get_basefile()
elif copy_empty:
# create new file in dest_dir
dest_dir = dest_dir + file.get_directory()
dest_dir.create()
dest_file = dest_dir + file.get_basefile()
dest_file.touch()
return dest_file
else:
return None
|
[
"Backup",
"file",
"in",
"dest_dir",
"Directory",
".",
"The",
"return",
"value",
"is",
"a",
"File",
"object",
"pointing",
"to",
"the",
"copied",
"file",
"in",
"the",
"destination",
"directory",
"or",
"None",
"if",
"no",
"file",
"is",
"copied",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/backup.py#L22-L48
|
[
"def",
"backup_file",
"(",
"self",
",",
"file",
",",
"dest_dir",
",",
"copy_empty",
"=",
"False",
")",
":",
"if",
"file",
".",
"exists",
"(",
")",
":",
"if",
"not",
"copy_empty",
"and",
"file",
".",
"is_empty",
"(",
")",
":",
"return",
"None",
"dest_dir",
".",
"create",
"(",
")",
"file",
".",
"copy",
"(",
"dest_dir",
")",
"return",
"dest_dir",
"+",
"file",
".",
"get_basefile",
"(",
")",
"elif",
"copy_empty",
":",
"# create new file in dest_dir",
"dest_dir",
"=",
"dest_dir",
"+",
"file",
".",
"get_directory",
"(",
")",
"dest_dir",
".",
"create",
"(",
")",
"dest_file",
"=",
"dest_dir",
"+",
"file",
".",
"get_basefile",
"(",
")",
"dest_file",
".",
"touch",
"(",
")",
"return",
"dest_file",
"else",
":",
"return",
"None"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Refresh.refresh
|
Refresh patch with patch_name or applied top patch if patch_name is
None
|
quilt/refresh.py
|
def refresh(self, patch_name=None, edit=False):
""" Refresh patch with patch_name or applied top patch if patch_name is
None
"""
if patch_name:
patch = Patch(patch_name)
else:
patch = self.db.top_patch()
if not patch:
raise QuiltError("No patch applied. Nothing to refresh.")
pc_dir = self.quilt_pc + patch.get_name()
patch_file = self.quilt_patches + File(patch.get_name())
files = pc_dir.content()[1]
with TmpFile(prefix="pquilt-") as tmpfile:
f = tmpfile.open()
if patch_file.exists():
header = patch.get_header(self.quilt_patches)
tmpfile.write(header)
for file_name in files:
if file_name == ".timestamp":
continue
orig_file = pc_dir + File(file_name)
new_file = File(file_name)
left_label, right_label, index = self._get_labels(file_name,
orig_file,
new_file)
self._write_index(tmpfile, index)
diff = Diff(orig_file, new_file)
diff.run(self.cwd, fd=f, left_label=left_label,
right_label=right_label)
if tmpfile.is_empty():
raise QuiltError("Nothing to refresh.")
if edit:
self.edit_patch(tmpfile)
tpatch = Patch(tmpfile.get_name())
tpatch.run(pc_dir.get_name(), dry_run=True, quiet=True)
if patch_file.exists():
diff = Diff(patch_file, tmpfile)
if diff.equal(self.cwd):
raise QuiltError("Nothing to refresh.")
tmpfile.copy(patch_file)
timestamp = pc_dir + File(".timestamp")
timestamp.touch()
refresh = self.quilt_pc + File(patch.get_name() + "~refresh")
refresh.delete_if_exists()
self.refreshed(patch)
|
def refresh(self, patch_name=None, edit=False):
""" Refresh patch with patch_name or applied top patch if patch_name is
None
"""
if patch_name:
patch = Patch(patch_name)
else:
patch = self.db.top_patch()
if not patch:
raise QuiltError("No patch applied. Nothing to refresh.")
pc_dir = self.quilt_pc + patch.get_name()
patch_file = self.quilt_patches + File(patch.get_name())
files = pc_dir.content()[1]
with TmpFile(prefix="pquilt-") as tmpfile:
f = tmpfile.open()
if patch_file.exists():
header = patch.get_header(self.quilt_patches)
tmpfile.write(header)
for file_name in files:
if file_name == ".timestamp":
continue
orig_file = pc_dir + File(file_name)
new_file = File(file_name)
left_label, right_label, index = self._get_labels(file_name,
orig_file,
new_file)
self._write_index(tmpfile, index)
diff = Diff(orig_file, new_file)
diff.run(self.cwd, fd=f, left_label=left_label,
right_label=right_label)
if tmpfile.is_empty():
raise QuiltError("Nothing to refresh.")
if edit:
self.edit_patch(tmpfile)
tpatch = Patch(tmpfile.get_name())
tpatch.run(pc_dir.get_name(), dry_run=True, quiet=True)
if patch_file.exists():
diff = Diff(patch_file, tmpfile)
if diff.equal(self.cwd):
raise QuiltError("Nothing to refresh.")
tmpfile.copy(patch_file)
timestamp = pc_dir + File(".timestamp")
timestamp.touch()
refresh = self.quilt_pc + File(patch.get_name() + "~refresh")
refresh.delete_if_exists()
self.refreshed(patch)
|
[
"Refresh",
"patch",
"with",
"patch_name",
"or",
"applied",
"top",
"patch",
"if",
"patch_name",
"is",
"None"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/refresh.py#L36-L94
|
[
"def",
"refresh",
"(",
"self",
",",
"patch_name",
"=",
"None",
",",
"edit",
"=",
"False",
")",
":",
"if",
"patch_name",
":",
"patch",
"=",
"Patch",
"(",
"patch_name",
")",
"else",
":",
"patch",
"=",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
"if",
"not",
"patch",
":",
"raise",
"QuiltError",
"(",
"\"No patch applied. Nothing to refresh.\"",
")",
"pc_dir",
"=",
"self",
".",
"quilt_pc",
"+",
"patch",
".",
"get_name",
"(",
")",
"patch_file",
"=",
"self",
".",
"quilt_patches",
"+",
"File",
"(",
"patch",
".",
"get_name",
"(",
")",
")",
"files",
"=",
"pc_dir",
".",
"content",
"(",
")",
"[",
"1",
"]",
"with",
"TmpFile",
"(",
"prefix",
"=",
"\"pquilt-\"",
")",
"as",
"tmpfile",
":",
"f",
"=",
"tmpfile",
".",
"open",
"(",
")",
"if",
"patch_file",
".",
"exists",
"(",
")",
":",
"header",
"=",
"patch",
".",
"get_header",
"(",
"self",
".",
"quilt_patches",
")",
"tmpfile",
".",
"write",
"(",
"header",
")",
"for",
"file_name",
"in",
"files",
":",
"if",
"file_name",
"==",
"\".timestamp\"",
":",
"continue",
"orig_file",
"=",
"pc_dir",
"+",
"File",
"(",
"file_name",
")",
"new_file",
"=",
"File",
"(",
"file_name",
")",
"left_label",
",",
"right_label",
",",
"index",
"=",
"self",
".",
"_get_labels",
"(",
"file_name",
",",
"orig_file",
",",
"new_file",
")",
"self",
".",
"_write_index",
"(",
"tmpfile",
",",
"index",
")",
"diff",
"=",
"Diff",
"(",
"orig_file",
",",
"new_file",
")",
"diff",
".",
"run",
"(",
"self",
".",
"cwd",
",",
"fd",
"=",
"f",
",",
"left_label",
"=",
"left_label",
",",
"right_label",
"=",
"right_label",
")",
"if",
"tmpfile",
".",
"is_empty",
"(",
")",
":",
"raise",
"QuiltError",
"(",
"\"Nothing to refresh.\"",
")",
"if",
"edit",
":",
"self",
".",
"edit_patch",
"(",
"tmpfile",
")",
"tpatch",
"=",
"Patch",
"(",
"tmpfile",
".",
"get_name",
"(",
")",
")",
"tpatch",
".",
"run",
"(",
"pc_dir",
".",
"get_name",
"(",
")",
",",
"dry_run",
"=",
"True",
",",
"quiet",
"=",
"True",
")",
"if",
"patch_file",
".",
"exists",
"(",
")",
":",
"diff",
"=",
"Diff",
"(",
"patch_file",
",",
"tmpfile",
")",
"if",
"diff",
".",
"equal",
"(",
"self",
".",
"cwd",
")",
":",
"raise",
"QuiltError",
"(",
"\"Nothing to refresh.\"",
")",
"tmpfile",
".",
"copy",
"(",
"patch_file",
")",
"timestamp",
"=",
"pc_dir",
"+",
"File",
"(",
"\".timestamp\"",
")",
"timestamp",
".",
"touch",
"(",
")",
"refresh",
"=",
"self",
".",
"quilt_pc",
"+",
"File",
"(",
"patch",
".",
"get_name",
"(",
")",
"+",
"\"~refresh\"",
")",
"refresh",
".",
"delete_if_exists",
"(",
")",
"self",
".",
"refreshed",
"(",
"patch",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Pop.unapply_patch
|
Unapply patches up to patch_name. patch_name will end up as top
patch
|
quilt/pop.py
|
def unapply_patch(self, patch_name, force=False):
""" Unapply patches up to patch_name. patch_name will end up as top
patch """
self._check(force)
patches = self.db.patches_after(Patch(patch_name))
for patch in reversed(patches):
self._unapply_patch(patch)
self.db.save()
self.unapplied(self.db.top_patch())
|
def unapply_patch(self, patch_name, force=False):
""" Unapply patches up to patch_name. patch_name will end up as top
patch """
self._check(force)
patches = self.db.patches_after(Patch(patch_name))
for patch in reversed(patches):
self._unapply_patch(patch)
self.db.save()
self.unapplied(self.db.top_patch())
|
[
"Unapply",
"patches",
"up",
"to",
"patch_name",
".",
"patch_name",
"will",
"end",
"up",
"as",
"top",
"patch"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/pop.py#L63-L74
|
[
"def",
"unapply_patch",
"(",
"self",
",",
"patch_name",
",",
"force",
"=",
"False",
")",
":",
"self",
".",
"_check",
"(",
"force",
")",
"patches",
"=",
"self",
".",
"db",
".",
"patches_after",
"(",
"Patch",
"(",
"patch_name",
")",
")",
"for",
"patch",
"in",
"reversed",
"(",
"patches",
")",
":",
"self",
".",
"_unapply_patch",
"(",
"patch",
")",
"self",
".",
"db",
".",
"save",
"(",
")",
"self",
".",
"unapplied",
"(",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Pop.unapply_top_patch
|
Unapply top patch
|
quilt/pop.py
|
def unapply_top_patch(self, force=False):
""" Unapply top patch """
self._check(force)
patch = self.db.top_patch()
self._unapply_patch(patch)
self.db.save()
self.unapplied(self.db.top_patch())
|
def unapply_top_patch(self, force=False):
""" Unapply top patch """
self._check(force)
patch = self.db.top_patch()
self._unapply_patch(patch)
self.db.save()
self.unapplied(self.db.top_patch())
|
[
"Unapply",
"top",
"patch"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/pop.py#L76-L85
|
[
"def",
"unapply_top_patch",
"(",
"self",
",",
"force",
"=",
"False",
")",
":",
"self",
".",
"_check",
"(",
"force",
")",
"patch",
"=",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
"self",
".",
"_unapply_patch",
"(",
"patch",
")",
"self",
".",
"db",
".",
"save",
"(",
")",
"self",
".",
"unapplied",
"(",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Pop.unapply_all
|
Unapply all patches
|
quilt/pop.py
|
def unapply_all(self, force=False):
""" Unapply all patches """
self._check(force)
for patch in reversed(self.db.applied_patches()):
self._unapply_patch(patch)
self.db.save()
self.unapplied(self.db.top_patch())
|
def unapply_all(self, force=False):
""" Unapply all patches """
self._check(force)
for patch in reversed(self.db.applied_patches()):
self._unapply_patch(patch)
self.db.save()
self.unapplied(self.db.top_patch())
|
[
"Unapply",
"all",
"patches"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/pop.py#L87-L96
|
[
"def",
"unapply_all",
"(",
"self",
",",
"force",
"=",
"False",
")",
":",
"self",
".",
"_check",
"(",
"force",
")",
"for",
"patch",
"in",
"reversed",
"(",
"self",
".",
"db",
".",
"applied_patches",
"(",
")",
")",
":",
"self",
".",
"_unapply_patch",
"(",
"patch",
")",
"self",
".",
"db",
".",
"save",
"(",
")",
"self",
".",
"unapplied",
"(",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Push.apply_patch
|
Apply all patches up to patch_name
|
quilt/push.py
|
def apply_patch(self, patch_name, force=False, quiet=False):
""" Apply all patches up to patch_name """
self._check()
patch = Patch(patch_name)
patches = self.series.patches_until(patch)[:]
applied = self.db.applied_patches()
for patch in applied:
if patch in patches:
patches.remove(patch)
if not patches:
raise AllPatchesApplied(self.series, self.db.top_patch())
self.applying(patch)
try:
for cur_patch in patches:
self._apply_patch(cur_patch, force, quiet)
finally:
self.db.save()
self.applied(self.db.top_patch())
|
def apply_patch(self, patch_name, force=False, quiet=False):
""" Apply all patches up to patch_name """
self._check()
patch = Patch(patch_name)
patches = self.series.patches_until(patch)[:]
applied = self.db.applied_patches()
for patch in applied:
if patch in patches:
patches.remove(patch)
if not patches:
raise AllPatchesApplied(self.series, self.db.top_patch())
self.applying(patch)
try:
for cur_patch in patches:
self._apply_patch(cur_patch, force, quiet)
finally:
self.db.save()
self.applied(self.db.top_patch())
|
[
"Apply",
"all",
"patches",
"up",
"to",
"patch_name"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/push.py#L87-L109
|
[
"def",
"apply_patch",
"(",
"self",
",",
"patch_name",
",",
"force",
"=",
"False",
",",
"quiet",
"=",
"False",
")",
":",
"self",
".",
"_check",
"(",
")",
"patch",
"=",
"Patch",
"(",
"patch_name",
")",
"patches",
"=",
"self",
".",
"series",
".",
"patches_until",
"(",
"patch",
")",
"[",
":",
"]",
"applied",
"=",
"self",
".",
"db",
".",
"applied_patches",
"(",
")",
"for",
"patch",
"in",
"applied",
":",
"if",
"patch",
"in",
"patches",
":",
"patches",
".",
"remove",
"(",
"patch",
")",
"if",
"not",
"patches",
":",
"raise",
"AllPatchesApplied",
"(",
"self",
".",
"series",
",",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
")",
"self",
".",
"applying",
"(",
"patch",
")",
"try",
":",
"for",
"cur_patch",
"in",
"patches",
":",
"self",
".",
"_apply_patch",
"(",
"cur_patch",
",",
"force",
",",
"quiet",
")",
"finally",
":",
"self",
".",
"db",
".",
"save",
"(",
")",
"self",
".",
"applied",
"(",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Push.apply_next_patch
|
Apply next patch in series file
|
quilt/push.py
|
def apply_next_patch(self, force=False, quiet=False):
""" Apply next patch in series file """
self._check()
top = self.db.top_patch()
if not top:
patch = self.series.first_patch()
else:
patch = self.series.patch_after(top)
if not patch:
raise AllPatchesApplied(self.series, top)
self.applying(patch)
self._apply_patch(patch, force, quiet)
self.db.save()
self.applied(self.db.top_patch())
|
def apply_next_patch(self, force=False, quiet=False):
""" Apply next patch in series file """
self._check()
top = self.db.top_patch()
if not top:
patch = self.series.first_patch()
else:
patch = self.series.patch_after(top)
if not patch:
raise AllPatchesApplied(self.series, top)
self.applying(patch)
self._apply_patch(patch, force, quiet)
self.db.save()
self.applied(self.db.top_patch())
|
[
"Apply",
"next",
"patch",
"in",
"series",
"file"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/push.py#L111-L129
|
[
"def",
"apply_next_patch",
"(",
"self",
",",
"force",
"=",
"False",
",",
"quiet",
"=",
"False",
")",
":",
"self",
".",
"_check",
"(",
")",
"top",
"=",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
"if",
"not",
"top",
":",
"patch",
"=",
"self",
".",
"series",
".",
"first_patch",
"(",
")",
"else",
":",
"patch",
"=",
"self",
".",
"series",
".",
"patch_after",
"(",
"top",
")",
"if",
"not",
"patch",
":",
"raise",
"AllPatchesApplied",
"(",
"self",
".",
"series",
",",
"top",
")",
"self",
".",
"applying",
"(",
"patch",
")",
"self",
".",
"_apply_patch",
"(",
"patch",
",",
"force",
",",
"quiet",
")",
"self",
".",
"db",
".",
"save",
"(",
")",
"self",
".",
"applied",
"(",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Push.apply_all
|
Apply all patches in series file
|
quilt/push.py
|
def apply_all(self, force=False, quiet=False):
""" Apply all patches in series file """
self._check()
top = self.db.top_patch()
if top:
patches = self.series.patches_after(top)
else:
patches = self.series.patches()
if not patches:
raise AllPatchesApplied(self.series, top)
try:
for patch in patches:
self.applying(patch)
self._apply_patch(patch, force, quiet)
finally:
self.db.save()
self.applied(self.db.top_patch())
|
def apply_all(self, force=False, quiet=False):
""" Apply all patches in series file """
self._check()
top = self.db.top_patch()
if top:
patches = self.series.patches_after(top)
else:
patches = self.series.patches()
if not patches:
raise AllPatchesApplied(self.series, top)
try:
for patch in patches:
self.applying(patch)
self._apply_patch(patch, force, quiet)
finally:
self.db.save()
self.applied(self.db.top_patch())
|
[
"Apply",
"all",
"patches",
"in",
"series",
"file"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/push.py#L131-L150
|
[
"def",
"apply_all",
"(",
"self",
",",
"force",
"=",
"False",
",",
"quiet",
"=",
"False",
")",
":",
"self",
".",
"_check",
"(",
")",
"top",
"=",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
"if",
"top",
":",
"patches",
"=",
"self",
".",
"series",
".",
"patches_after",
"(",
"top",
")",
"else",
":",
"patches",
"=",
"self",
".",
"series",
".",
"patches",
"(",
")",
"if",
"not",
"patches",
":",
"raise",
"AllPatchesApplied",
"(",
"self",
".",
"series",
",",
"top",
")",
"try",
":",
"for",
"patch",
"in",
"patches",
":",
"self",
".",
"applying",
"(",
"patch",
")",
"self",
".",
"_apply_patch",
"(",
"patch",
",",
"force",
",",
"quiet",
")",
"finally",
":",
"self",
".",
"db",
".",
"save",
"(",
")",
"self",
".",
"applied",
"(",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.read
|
Reads all patches from the series file
|
quilt/db.py
|
def read(self):
""" Reads all patches from the series file """
self.patchlines = []
self.patch2line = dict()
if self.exists():
with open(self.series_file, "r") as f:
for line in f:
self.add_patch(line)
|
def read(self):
""" Reads all patches from the series file """
self.patchlines = []
self.patch2line = dict()
if self.exists():
with open(self.series_file, "r") as f:
for line in f:
self.add_patch(line)
|
[
"Reads",
"all",
"patches",
"from",
"the",
"series",
"file"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L116-L123
|
[
"def",
"read",
"(",
"self",
")",
":",
"self",
".",
"patchlines",
"=",
"[",
"]",
"self",
".",
"patch2line",
"=",
"dict",
"(",
")",
"if",
"self",
".",
"exists",
"(",
")",
":",
"with",
"open",
"(",
"self",
".",
"series_file",
",",
"\"r\"",
")",
"as",
"f",
":",
"for",
"line",
"in",
"f",
":",
"self",
".",
"add_patch",
"(",
"line",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.save
|
Saves current patches list in the series file
|
quilt/db.py
|
def save(self):
""" Saves current patches list in the series file """
with open(self.series_file, "wb") as f:
for patchline in self.patchlines:
f.write(_encode_str(str(patchline)))
f.write(b"\n")
|
def save(self):
""" Saves current patches list in the series file """
with open(self.series_file, "wb") as f:
for patchline in self.patchlines:
f.write(_encode_str(str(patchline)))
f.write(b"\n")
|
[
"Saves",
"current",
"patches",
"list",
"in",
"the",
"series",
"file"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L125-L130
|
[
"def",
"save",
"(",
"self",
")",
":",
"with",
"open",
"(",
"self",
".",
"series_file",
",",
"\"wb\"",
")",
"as",
"f",
":",
"for",
"patchline",
"in",
"self",
".",
"patchlines",
":",
"f",
".",
"write",
"(",
"_encode_str",
"(",
"str",
"(",
"patchline",
")",
")",
")",
"f",
".",
"write",
"(",
"b\"\\n\"",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.add_patch
|
Add a patch to the patches list
|
quilt/db.py
|
def add_patch(self, patch):
""" Add a patch to the patches list """
patchline = PatchLine(patch)
patch = patchline.get_patch()
if patch:
self.patch2line[patch] = patchline
self.patchlines.append(patchline)
|
def add_patch(self, patch):
""" Add a patch to the patches list """
patchline = PatchLine(patch)
patch = patchline.get_patch()
if patch:
self.patch2line[patch] = patchline
self.patchlines.append(patchline)
|
[
"Add",
"a",
"patch",
"to",
"the",
"patches",
"list"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L132-L138
|
[
"def",
"add_patch",
"(",
"self",
",",
"patch",
")",
":",
"patchline",
"=",
"PatchLine",
"(",
"patch",
")",
"patch",
"=",
"patchline",
".",
"get_patch",
"(",
")",
"if",
"patch",
":",
"self",
".",
"patch2line",
"[",
"patch",
"]",
"=",
"patchline",
"self",
".",
"patchlines",
".",
"append",
"(",
"patchline",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.insert_patches
|
Insert list of patches at the front of the curent patches list
|
quilt/db.py
|
def insert_patches(self, patches):
""" Insert list of patches at the front of the curent patches list """
patchlines = []
for patch_name in patches:
patchline = PatchLine(patch_name)
patch = patchline.get_patch()
if patch:
self.patch2line[patch] = patchline
patchlines.append(patchline)
patchlines.extend(self.patchlines)
self.patchlines = patchlines
|
def insert_patches(self, patches):
""" Insert list of patches at the front of the curent patches list """
patchlines = []
for patch_name in patches:
patchline = PatchLine(patch_name)
patch = patchline.get_patch()
if patch:
self.patch2line[patch] = patchline
patchlines.append(patchline)
patchlines.extend(self.patchlines)
self.patchlines = patchlines
|
[
"Insert",
"list",
"of",
"patches",
"at",
"the",
"front",
"of",
"the",
"curent",
"patches",
"list"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L144-L154
|
[
"def",
"insert_patches",
"(",
"self",
",",
"patches",
")",
":",
"patchlines",
"=",
"[",
"]",
"for",
"patch_name",
"in",
"patches",
":",
"patchline",
"=",
"PatchLine",
"(",
"patch_name",
")",
"patch",
"=",
"patchline",
".",
"get_patch",
"(",
")",
"if",
"patch",
":",
"self",
".",
"patch2line",
"[",
"patch",
"]",
"=",
"patchline",
"patchlines",
".",
"append",
"(",
"patchline",
")",
"patchlines",
".",
"extend",
"(",
"self",
".",
"patchlines",
")",
"self",
".",
"patchlines",
"=",
"patchlines"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.add_patches
|
Add a list of patches to the patches list
|
quilt/db.py
|
def add_patches(self, patches, after=None):
""" Add a list of patches to the patches list """
if after is None:
self.insert_patches(patches)
else:
self._check_patch(after)
patchlines = self._patchlines_before(after)
patchlines.append(self.patch2line[after])
for patch in patches:
patchline = PatchLine(patch)
patchlines.append(patchline)
self.patch2line[patchline.get_patch()] = patchline
patchlines.extend(self._patchlines_after(after))
self.patchlines = patchlines
|
def add_patches(self, patches, after=None):
""" Add a list of patches to the patches list """
if after is None:
self.insert_patches(patches)
else:
self._check_patch(after)
patchlines = self._patchlines_before(after)
patchlines.append(self.patch2line[after])
for patch in patches:
patchline = PatchLine(patch)
patchlines.append(patchline)
self.patch2line[patchline.get_patch()] = patchline
patchlines.extend(self._patchlines_after(after))
self.patchlines = patchlines
|
[
"Add",
"a",
"list",
"of",
"patches",
"to",
"the",
"patches",
"list"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L156-L169
|
[
"def",
"add_patches",
"(",
"self",
",",
"patches",
",",
"after",
"=",
"None",
")",
":",
"if",
"after",
"is",
"None",
":",
"self",
".",
"insert_patches",
"(",
"patches",
")",
"else",
":",
"self",
".",
"_check_patch",
"(",
"after",
")",
"patchlines",
"=",
"self",
".",
"_patchlines_before",
"(",
"after",
")",
"patchlines",
".",
"append",
"(",
"self",
".",
"patch2line",
"[",
"after",
"]",
")",
"for",
"patch",
"in",
"patches",
":",
"patchline",
"=",
"PatchLine",
"(",
"patch",
")",
"patchlines",
".",
"append",
"(",
"patchline",
")",
"self",
".",
"patch2line",
"[",
"patchline",
".",
"get_patch",
"(",
")",
"]",
"=",
"patchline",
"patchlines",
".",
"extend",
"(",
"self",
".",
"_patchlines_after",
"(",
"after",
")",
")",
"self",
".",
"patchlines",
"=",
"patchlines"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.remove_patch
|
Remove a patch from the patches list
|
quilt/db.py
|
def remove_patch(self, patch):
""" Remove a patch from the patches list """
self._check_patch(patch)
patchline = self.patch2line[patch]
del self.patch2line[patch]
self.patchlines.remove(patchline)
|
def remove_patch(self, patch):
""" Remove a patch from the patches list """
self._check_patch(patch)
patchline = self.patch2line[patch]
del self.patch2line[patch]
self.patchlines.remove(patchline)
|
[
"Remove",
"a",
"patch",
"from",
"the",
"patches",
"list"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L171-L176
|
[
"def",
"remove_patch",
"(",
"self",
",",
"patch",
")",
":",
"self",
".",
"_check_patch",
"(",
"patch",
")",
"patchline",
"=",
"self",
".",
"patch2line",
"[",
"patch",
"]",
"del",
"self",
".",
"patch2line",
"[",
"patch",
"]",
"self",
".",
"patchlines",
".",
"remove",
"(",
"patchline",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.patches_after
|
Returns a list of patches after patch from the patches list
|
quilt/db.py
|
def patches_after(self, patch):
""" Returns a list of patches after patch from the patches list """
return [line.get_patch() for line in self._patchlines_after(patch) if
line.get_patch()]
|
def patches_after(self, patch):
""" Returns a list of patches after patch from the patches list """
return [line.get_patch() for line in self._patchlines_after(patch) if
line.get_patch()]
|
[
"Returns",
"a",
"list",
"of",
"patches",
"after",
"patch",
"from",
"the",
"patches",
"list"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L219-L222
|
[
"def",
"patches_after",
"(",
"self",
",",
"patch",
")",
":",
"return",
"[",
"line",
".",
"get_patch",
"(",
")",
"for",
"line",
"in",
"self",
".",
"_patchlines_after",
"(",
"patch",
")",
"if",
"line",
".",
"get_patch",
"(",
")",
"]"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.patches_before
|
Returns a list of patches before patch from the patches list
|
quilt/db.py
|
def patches_before(self, patch):
""" Returns a list of patches before patch from the patches list """
return [line.get_patch() for line in self._patchlines_before(patch)
if line.get_patch()]
|
def patches_before(self, patch):
""" Returns a list of patches before patch from the patches list """
return [line.get_patch() for line in self._patchlines_before(patch)
if line.get_patch()]
|
[
"Returns",
"a",
"list",
"of",
"patches",
"before",
"patch",
"from",
"the",
"patches",
"list"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L233-L236
|
[
"def",
"patches_before",
"(",
"self",
",",
"patch",
")",
":",
"return",
"[",
"line",
".",
"get_patch",
"(",
")",
"for",
"line",
"in",
"self",
".",
"_patchlines_before",
"(",
"patch",
")",
"if",
"line",
".",
"get_patch",
"(",
")",
"]"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.patches_until
|
Returns a list of patches before patch from the patches list
including the provided patch
|
quilt/db.py
|
def patches_until(self, patch):
""" Returns a list of patches before patch from the patches list
including the provided patch
"""
return [line.get_patch() for line in self._patchlines_until(patch) if
line.get_patch()]
|
def patches_until(self, patch):
""" Returns a list of patches before patch from the patches list
including the provided patch
"""
return [line.get_patch() for line in self._patchlines_until(patch) if
line.get_patch()]
|
[
"Returns",
"a",
"list",
"of",
"patches",
"before",
"patch",
"from",
"the",
"patches",
"list",
"including",
"the",
"provided",
"patch"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L247-L252
|
[
"def",
"patches_until",
"(",
"self",
",",
"patch",
")",
":",
"return",
"[",
"line",
".",
"get_patch",
"(",
")",
"for",
"line",
"in",
"self",
".",
"_patchlines_until",
"(",
"patch",
")",
"if",
"line",
".",
"get_patch",
"(",
")",
"]"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
PatchSeries.replace
|
Replace old_patch with new_patch
The method only replaces the patch and doesn't change any comments.
|
quilt/db.py
|
def replace(self, old_patch, new_patch):
""" Replace old_patch with new_patch
The method only replaces the patch and doesn't change any comments.
"""
self._check_patch(old_patch)
old_patchline = self.patch2line[old_patch]
index = self.patchlines.index(old_patchline)
self.patchlines.pop(index)
new_patchline = PatchLine(new_patch)
new_patchline.set_comment(old_patchline.get_comment())
self.patchlines.insert(index, new_patchline)
del self.patch2line[old_patch]
self.patch2line[new_patch] = new_patchline
|
def replace(self, old_patch, new_patch):
""" Replace old_patch with new_patch
The method only replaces the patch and doesn't change any comments.
"""
self._check_patch(old_patch)
old_patchline = self.patch2line[old_patch]
index = self.patchlines.index(old_patchline)
self.patchlines.pop(index)
new_patchline = PatchLine(new_patch)
new_patchline.set_comment(old_patchline.get_comment())
self.patchlines.insert(index, new_patchline)
del self.patch2line[old_patch]
self.patch2line[new_patch] = new_patchline
|
[
"Replace",
"old_patch",
"with",
"new_patch",
"The",
"method",
"only",
"replaces",
"the",
"patch",
"and",
"doesn",
"t",
"change",
"any",
"comments",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L265-L277
|
[
"def",
"replace",
"(",
"self",
",",
"old_patch",
",",
"new_patch",
")",
":",
"self",
".",
"_check_patch",
"(",
"old_patch",
")",
"old_patchline",
"=",
"self",
".",
"patch2line",
"[",
"old_patch",
"]",
"index",
"=",
"self",
".",
"patchlines",
".",
"index",
"(",
"old_patchline",
")",
"self",
".",
"patchlines",
".",
"pop",
"(",
"index",
")",
"new_patchline",
"=",
"PatchLine",
"(",
"new_patch",
")",
"new_patchline",
".",
"set_comment",
"(",
"old_patchline",
".",
"get_comment",
"(",
")",
")",
"self",
".",
"patchlines",
".",
"insert",
"(",
"index",
",",
"new_patchline",
")",
"del",
"self",
".",
"patch2line",
"[",
"old_patch",
"]",
"self",
".",
"patch2line",
"[",
"new_patch",
"]",
"=",
"new_patchline"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Db.create
|
Creates the dirname and inserts a .version file
|
quilt/db.py
|
def create(self):
""" Creates the dirname and inserts a .version file """
if not os.path.exists(self.dirname):
os.makedirs(self.dirname)
self._create_version(self.version_file)
|
def create(self):
""" Creates the dirname and inserts a .version file """
if not os.path.exists(self.dirname):
os.makedirs(self.dirname)
self._create_version(self.version_file)
|
[
"Creates",
"the",
"dirname",
"and",
"inserts",
"a",
".",
"version",
"file"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L296-L300
|
[
"def",
"create",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"dirname",
")",
":",
"os",
".",
"makedirs",
"(",
"self",
".",
"dirname",
")",
"self",
".",
"_create_version",
"(",
"self",
".",
"version_file",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Db.check_version
|
Checks if the .version file in dirname has the correct supported
version number
|
quilt/db.py
|
def check_version(self, version_file):
""" Checks if the .version file in dirname has the correct supported
version number """
# The file contains a version number as a decimal integer, optionally
# followed by a newline
with open(version_file, "r") as f:
version = f.read(10)
version = version.rstrip("\r\n")
if len(version) >= 10 or version != str(DB_VERSION):
raise DBError("The quilt meta-data version of %s is not supported "
"by python-quilt. python-quilt only supports "
"version %s." % (version, DB_VERSION))
|
def check_version(self, version_file):
""" Checks if the .version file in dirname has the correct supported
version number """
# The file contains a version number as a decimal integer, optionally
# followed by a newline
with open(version_file, "r") as f:
version = f.read(10)
version = version.rstrip("\r\n")
if len(version) >= 10 or version != str(DB_VERSION):
raise DBError("The quilt meta-data version of %s is not supported "
"by python-quilt. python-quilt only supports "
"version %s." % (version, DB_VERSION))
|
[
"Checks",
"if",
"the",
".",
"version",
"file",
"in",
"dirname",
"has",
"the",
"correct",
"supported",
"version",
"number"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/db.py#L311-L324
|
[
"def",
"check_version",
"(",
"self",
",",
"version_file",
")",
":",
"# The file contains a version number as a decimal integer, optionally",
"# followed by a newline",
"with",
"open",
"(",
"version_file",
",",
"\"r\"",
")",
"as",
"f",
":",
"version",
"=",
"f",
".",
"read",
"(",
"10",
")",
"version",
"=",
"version",
".",
"rstrip",
"(",
"\"\\r\\n\"",
")",
"if",
"len",
"(",
"version",
")",
">=",
"10",
"or",
"version",
"!=",
"str",
"(",
"DB_VERSION",
")",
":",
"raise",
"DBError",
"(",
"\"The quilt meta-data version of %s is not supported \"",
"\"by python-quilt. python-quilt only supports \"",
"\"version %s.\"",
"%",
"(",
"version",
",",
"DB_VERSION",
")",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
ArgumentGroup.add_to_parser
|
Adds the group and its arguments to a argparse.ArgumentParser instance
@param parser A argparse.ArgumentParser instance
|
quilt/cli/parser.py
|
def add_to_parser(self, parser):
"""
Adds the group and its arguments to a argparse.ArgumentParser instance
@param parser A argparse.ArgumentParser instance
"""
self.group = parser.add_argument_group(self.title, self.description)
for arg in self.arguments:
arg.add_to_parser(self.group)
|
def add_to_parser(self, parser):
"""
Adds the group and its arguments to a argparse.ArgumentParser instance
@param parser A argparse.ArgumentParser instance
"""
self.group = parser.add_argument_group(self.title, self.description)
for arg in self.arguments:
arg.add_to_parser(self.group)
|
[
"Adds",
"the",
"group",
"and",
"its",
"arguments",
"to",
"a",
"argparse",
".",
"ArgumentParser",
"instance"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/cli/parser.py#L166-L174
|
[
"def",
"add_to_parser",
"(",
"self",
",",
"parser",
")",
":",
"self",
".",
"group",
"=",
"parser",
".",
"add_argument_group",
"(",
"self",
".",
"title",
",",
"self",
".",
"description",
")",
"for",
"arg",
"in",
"self",
".",
"arguments",
":",
"arg",
".",
"add_to_parser",
"(",
"self",
".",
"group",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Argument.add_to_parser
|
Adds the argument to an argparse.ArgumentParser instance
@param parser An argparse.ArgumentParser instance
|
quilt/cli/parser.py
|
def add_to_parser(self, parser):
"""
Adds the argument to an argparse.ArgumentParser instance
@param parser An argparse.ArgumentParser instance
"""
kwargs = self._get_kwargs()
args = self._get_args()
parser.add_argument(*args, **kwargs)
|
def add_to_parser(self, parser):
"""
Adds the argument to an argparse.ArgumentParser instance
@param parser An argparse.ArgumentParser instance
"""
kwargs = self._get_kwargs()
args = self._get_args()
parser.add_argument(*args, **kwargs)
|
[
"Adds",
"the",
"argument",
"to",
"an",
"argparse",
".",
"ArgumentParser",
"instance"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/cli/parser.py#L289-L297
|
[
"def",
"add_to_parser",
"(",
"self",
",",
"parser",
")",
":",
"kwargs",
"=",
"self",
".",
"_get_kwargs",
"(",
")",
"args",
"=",
"self",
".",
"_get_args",
"(",
")",
"parser",
".",
"add_argument",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
BaseSubParser.add_to_parser
|
Adds this SubParser to the subparsers created by
argparse.ArgumentParser.add_subparsers method.
@param subparsers Normally a _SubParsersAction instance created by
argparse.ArgumentParser.add_subparsers method
|
quilt/cli/parser.py
|
def add_to_parser(self, subparsers):
"""
Adds this SubParser to the subparsers created by
argparse.ArgumentParser.add_subparsers method.
@param subparsers Normally a _SubParsersAction instance created by
argparse.ArgumentParser.add_subparsers method
"""
parser = subparsers.add_parser(*self._get_args(), **self._get_kwargs())
parser.set_defaults(**self.get_defaults())
for name, group in self.base_argument_groups:
group.add_to_parser(parser)
for name, arg in self.base_arguments:
arg.add_to_parser(parser)
self.add_subparsers(parser)
|
def add_to_parser(self, subparsers):
"""
Adds this SubParser to the subparsers created by
argparse.ArgumentParser.add_subparsers method.
@param subparsers Normally a _SubParsersAction instance created by
argparse.ArgumentParser.add_subparsers method
"""
parser = subparsers.add_parser(*self._get_args(), **self._get_kwargs())
parser.set_defaults(**self.get_defaults())
for name, group in self.base_argument_groups:
group.add_to_parser(parser)
for name, arg in self.base_arguments:
arg.add_to_parser(parser)
self.add_subparsers(parser)
|
[
"Adds",
"this",
"SubParser",
"to",
"the",
"subparsers",
"created",
"by",
"argparse",
".",
"ArgumentParser",
".",
"add_subparsers",
"method",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/cli/parser.py#L372-L386
|
[
"def",
"add_to_parser",
"(",
"self",
",",
"subparsers",
")",
":",
"parser",
"=",
"subparsers",
".",
"add_parser",
"(",
"*",
"self",
".",
"_get_args",
"(",
")",
",",
"*",
"*",
"self",
".",
"_get_kwargs",
"(",
")",
")",
"parser",
".",
"set_defaults",
"(",
"*",
"*",
"self",
".",
"get_defaults",
"(",
")",
")",
"for",
"name",
",",
"group",
"in",
"self",
".",
"base_argument_groups",
":",
"group",
".",
"add_to_parser",
"(",
"parser",
")",
"for",
"name",
",",
"arg",
"in",
"self",
".",
"base_arguments",
":",
"arg",
".",
"add_to_parser",
"(",
"parser",
")",
"self",
".",
"add_subparsers",
"(",
"parser",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
SubParsersMixin.set_subparsers_args
|
Sets args and kwargs that are passed when creating a subparsers group
in an argparse.ArgumentParser i.e. when calling
argparser.ArgumentParser.add_subparsers
|
quilt/cli/parser.py
|
def set_subparsers_args(self, *args, **kwargs):
"""
Sets args and kwargs that are passed when creating a subparsers group
in an argparse.ArgumentParser i.e. when calling
argparser.ArgumentParser.add_subparsers
"""
self.subparsers_args = args
self.subparsers_kwargs = kwargs
|
def set_subparsers_args(self, *args, **kwargs):
"""
Sets args and kwargs that are passed when creating a subparsers group
in an argparse.ArgumentParser i.e. when calling
argparser.ArgumentParser.add_subparsers
"""
self.subparsers_args = args
self.subparsers_kwargs = kwargs
|
[
"Sets",
"args",
"and",
"kwargs",
"that",
"are",
"passed",
"when",
"creating",
"a",
"subparsers",
"group",
"in",
"an",
"argparse",
".",
"ArgumentParser",
"i",
".",
"e",
".",
"when",
"calling",
"argparser",
".",
"ArgumentParser",
".",
"add_subparsers"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/cli/parser.py#L411-L418
|
[
"def",
"set_subparsers_args",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"subparsers_args",
"=",
"args",
"self",
".",
"subparsers_kwargs",
"=",
"kwargs"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
SubParsersMixin.add_subparsers
|
Adds the subparsers to an argparse.ArgumentParser
@param parser An argparse.ArgumentParser instance
|
quilt/cli/parser.py
|
def add_subparsers(self, parser):
"""
Adds the subparsers to an argparse.ArgumentParser
@param parser An argparse.ArgumentParser instance
"""
sgroup = getattr(self, "subparser_group", None)
if sgroup:
sgroup.add_to_parser(self)
if not self.subparsers:
return
args = self.subparsers_args or self.get_default_subparsers_args()
kwargs = self.subparsers_kwargs or self.get_default_subparsers_kwargs()
subs = parser.add_subparsers(*args, **kwargs)
for subparser in self.subparsers:
subparser.add_to_parser(subs)
|
def add_subparsers(self, parser):
"""
Adds the subparsers to an argparse.ArgumentParser
@param parser An argparse.ArgumentParser instance
"""
sgroup = getattr(self, "subparser_group", None)
if sgroup:
sgroup.add_to_parser(self)
if not self.subparsers:
return
args = self.subparsers_args or self.get_default_subparsers_args()
kwargs = self.subparsers_kwargs or self.get_default_subparsers_kwargs()
subs = parser.add_subparsers(*args, **kwargs)
for subparser in self.subparsers:
subparser.add_to_parser(subs)
|
[
"Adds",
"the",
"subparsers",
"to",
"an",
"argparse",
".",
"ArgumentParser"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/cli/parser.py#L442-L460
|
[
"def",
"add_subparsers",
"(",
"self",
",",
"parser",
")",
":",
"sgroup",
"=",
"getattr",
"(",
"self",
",",
"\"subparser_group\"",
",",
"None",
")",
"if",
"sgroup",
":",
"sgroup",
".",
"add_to_parser",
"(",
"self",
")",
"if",
"not",
"self",
".",
"subparsers",
":",
"return",
"args",
"=",
"self",
".",
"subparsers_args",
"or",
"self",
".",
"get_default_subparsers_args",
"(",
")",
"kwargs",
"=",
"self",
".",
"subparsers_kwargs",
"or",
"self",
".",
"get_default_subparsers_kwargs",
"(",
")",
"subs",
"=",
"parser",
".",
"add_subparsers",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"for",
"subparser",
"in",
"self",
".",
"subparsers",
":",
"subparser",
".",
"add_to_parser",
"(",
"subs",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Revert._file_in_patch
|
Checks if a backup file of the filename in the current patch
exists and raises a QuiltError if not.
|
quilt/revert.py
|
def _file_in_patch(self, filename, patch):
""" Checks if a backup file of the filename in the current patch
exists and raises a QuiltError if not.
"""
pc_dir = self.quilt_pc + patch.get_name()
file = pc_dir + File(filename)
if not file.exists():
raise QuiltError("File %s is not in patch %s" % (filename,
patch.get_name()))
|
def _file_in_patch(self, filename, patch):
""" Checks if a backup file of the filename in the current patch
exists and raises a QuiltError if not.
"""
pc_dir = self.quilt_pc + patch.get_name()
file = pc_dir + File(filename)
if not file.exists():
raise QuiltError("File %s is not in patch %s" % (filename,
patch.get_name()))
|
[
"Checks",
"if",
"a",
"backup",
"file",
"of",
"the",
"filename",
"in",
"the",
"current",
"patch",
"exists",
"and",
"raises",
"a",
"QuiltError",
"if",
"not",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/revert.py#L35-L43
|
[
"def",
"_file_in_patch",
"(",
"self",
",",
"filename",
",",
"patch",
")",
":",
"pc_dir",
"=",
"self",
".",
"quilt_pc",
"+",
"patch",
".",
"get_name",
"(",
")",
"file",
"=",
"pc_dir",
"+",
"File",
"(",
"filename",
")",
"if",
"not",
"file",
".",
"exists",
"(",
")",
":",
"raise",
"QuiltError",
"(",
"\"File %s is not in patch %s\"",
"%",
"(",
"filename",
",",
"patch",
".",
"get_name",
"(",
")",
")",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Revert._file_in_next_patches
|
Checks if a backup file of the filename in the applied patches after
patch exists
|
quilt/revert.py
|
def _file_in_next_patches(self, filename, patch):
""" Checks if a backup file of the filename in the applied patches after
patch exists """
if not self.db.is_patch(patch):
# no patches applied
return
patches = self.db.patches_after(patch)
for patch in patches:
file = self.quilt_pc + File(os.path.join(patch.get_name(),
filename))
if file.exists():
raise QuiltError("File %s is modified by patch %s" %
(filename, patch.get_name()))
|
def _file_in_next_patches(self, filename, patch):
""" Checks if a backup file of the filename in the applied patches after
patch exists """
if not self.db.is_patch(patch):
# no patches applied
return
patches = self.db.patches_after(patch)
for patch in patches:
file = self.quilt_pc + File(os.path.join(patch.get_name(),
filename))
if file.exists():
raise QuiltError("File %s is modified by patch %s" %
(filename, patch.get_name()))
|
[
"Checks",
"if",
"a",
"backup",
"file",
"of",
"the",
"filename",
"in",
"the",
"applied",
"patches",
"after",
"patch",
"exists"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/revert.py#L45-L59
|
[
"def",
"_file_in_next_patches",
"(",
"self",
",",
"filename",
",",
"patch",
")",
":",
"if",
"not",
"self",
".",
"db",
".",
"is_patch",
"(",
"patch",
")",
":",
"# no patches applied",
"return",
"patches",
"=",
"self",
".",
"db",
".",
"patches_after",
"(",
"patch",
")",
"for",
"patch",
"in",
"patches",
":",
"file",
"=",
"self",
".",
"quilt_pc",
"+",
"File",
"(",
"os",
".",
"path",
".",
"join",
"(",
"patch",
".",
"get_name",
"(",
")",
",",
"filename",
")",
")",
"if",
"file",
".",
"exists",
"(",
")",
":",
"raise",
"QuiltError",
"(",
"\"File %s is modified by patch %s\"",
"%",
"(",
"filename",
",",
"patch",
".",
"get_name",
"(",
")",
")",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Revert.revert_file
|
Revert not added changes of filename.
If patch_name is None or empty the topmost patch will be used.
|
quilt/revert.py
|
def revert_file(self, filename, patch_name=None):
""" Revert not added changes of filename.
If patch_name is None or empty the topmost patch will be used.
"""
file = File(filename)
if patch_name:
patch = Patch(patch_name)
else:
patch = self.db.top_patch()
if not patch:
raise QuiltError("No patch available. Nothing to revert.")
self._file_in_patch(filename, patch)
self._file_in_next_patches(filename, patch)
pc_dir = self.quilt_pc + patch.get_name()
pc_file = pc_dir + file
if not file.exists() and pc_file.is_empty():
# new and empty file will be reverted
pc_file.delete()
self.file_reverted(file, patch)
return
with TmpDirectory(prefix="pquilt-") as tmpdir:
# apply current patch in temporary directory to revert changes of
# file that aren't committed in the patch
tmp_file = self._apply_patch_temporary(tmpdir, pc_file, patch)
if tmp_file and tmp_file.exists() and not tmp_file.is_empty():
diff = Diff(file, tmp_file)
if diff.equal(self.cwd):
self.file_unchanged(file, patch)
return
dir = file.get_directory()
if not dir:
dir = Directory(os.getcwd())
else:
dir.create()
tmp_file.copy(dir)
self.file_reverted(file, patch)
else:
self.file_unchanged(file, patch)
|
def revert_file(self, filename, patch_name=None):
""" Revert not added changes of filename.
If patch_name is None or empty the topmost patch will be used.
"""
file = File(filename)
if patch_name:
patch = Patch(patch_name)
else:
patch = self.db.top_patch()
if not patch:
raise QuiltError("No patch available. Nothing to revert.")
self._file_in_patch(filename, patch)
self._file_in_next_patches(filename, patch)
pc_dir = self.quilt_pc + patch.get_name()
pc_file = pc_dir + file
if not file.exists() and pc_file.is_empty():
# new and empty file will be reverted
pc_file.delete()
self.file_reverted(file, patch)
return
with TmpDirectory(prefix="pquilt-") as tmpdir:
# apply current patch in temporary directory to revert changes of
# file that aren't committed in the patch
tmp_file = self._apply_patch_temporary(tmpdir, pc_file, patch)
if tmp_file and tmp_file.exists() and not tmp_file.is_empty():
diff = Diff(file, tmp_file)
if diff.equal(self.cwd):
self.file_unchanged(file, patch)
return
dir = file.get_directory()
if not dir:
dir = Directory(os.getcwd())
else:
dir.create()
tmp_file.copy(dir)
self.file_reverted(file, patch)
else:
self.file_unchanged(file, patch)
|
[
"Revert",
"not",
"added",
"changes",
"of",
"filename",
".",
"If",
"patch_name",
"is",
"None",
"or",
"empty",
"the",
"topmost",
"patch",
"will",
"be",
"used",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/revert.py#L77-L121
|
[
"def",
"revert_file",
"(",
"self",
",",
"filename",
",",
"patch_name",
"=",
"None",
")",
":",
"file",
"=",
"File",
"(",
"filename",
")",
"if",
"patch_name",
":",
"patch",
"=",
"Patch",
"(",
"patch_name",
")",
"else",
":",
"patch",
"=",
"self",
".",
"db",
".",
"top_patch",
"(",
")",
"if",
"not",
"patch",
":",
"raise",
"QuiltError",
"(",
"\"No patch available. Nothing to revert.\"",
")",
"self",
".",
"_file_in_patch",
"(",
"filename",
",",
"patch",
")",
"self",
".",
"_file_in_next_patches",
"(",
"filename",
",",
"patch",
")",
"pc_dir",
"=",
"self",
".",
"quilt_pc",
"+",
"patch",
".",
"get_name",
"(",
")",
"pc_file",
"=",
"pc_dir",
"+",
"file",
"if",
"not",
"file",
".",
"exists",
"(",
")",
"and",
"pc_file",
".",
"is_empty",
"(",
")",
":",
"# new and empty file will be reverted",
"pc_file",
".",
"delete",
"(",
")",
"self",
".",
"file_reverted",
"(",
"file",
",",
"patch",
")",
"return",
"with",
"TmpDirectory",
"(",
"prefix",
"=",
"\"pquilt-\"",
")",
"as",
"tmpdir",
":",
"# apply current patch in temporary directory to revert changes of",
"# file that aren't committed in the patch",
"tmp_file",
"=",
"self",
".",
"_apply_patch_temporary",
"(",
"tmpdir",
",",
"pc_file",
",",
"patch",
")",
"if",
"tmp_file",
"and",
"tmp_file",
".",
"exists",
"(",
")",
"and",
"not",
"tmp_file",
".",
"is_empty",
"(",
")",
":",
"diff",
"=",
"Diff",
"(",
"file",
",",
"tmp_file",
")",
"if",
"diff",
".",
"equal",
"(",
"self",
".",
"cwd",
")",
":",
"self",
".",
"file_unchanged",
"(",
"file",
",",
"patch",
")",
"return",
"dir",
"=",
"file",
".",
"get_directory",
"(",
")",
"if",
"not",
"dir",
":",
"dir",
"=",
"Directory",
"(",
"os",
".",
"getcwd",
"(",
")",
")",
"else",
":",
"dir",
".",
"create",
"(",
")",
"tmp_file",
".",
"copy",
"(",
"dir",
")",
"self",
".",
"file_reverted",
"(",
"file",
",",
"patch",
")",
"else",
":",
"self",
".",
"file_unchanged",
"(",
"file",
",",
"patch",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Import.import_patch
|
Import patch into the patch queue
The patch is inserted as the next unapplied patch.
|
quilt/patchimport.py
|
def import_patch(self, patch_name, new_name=None):
""" Import patch into the patch queue
The patch is inserted as the next unapplied patch.
"""
if new_name:
dir_name = os.path.dirname(new_name)
name = os.path.basename(new_name)
dest_dir = self.quilt_patches + Directory(dir_name)
dest_dir.create()
else:
name = os.path.basename(patch_name)
dest_dir = self.quilt_patches
patch_file = File(patch_name)
dest_file = dest_dir + File(name)
patch_file.copy(dest_file)
self._import_patches([name])
|
def import_patch(self, patch_name, new_name=None):
""" Import patch into the patch queue
The patch is inserted as the next unapplied patch.
"""
if new_name:
dir_name = os.path.dirname(new_name)
name = os.path.basename(new_name)
dest_dir = self.quilt_patches + Directory(dir_name)
dest_dir.create()
else:
name = os.path.basename(patch_name)
dest_dir = self.quilt_patches
patch_file = File(patch_name)
dest_file = dest_dir + File(name)
patch_file.copy(dest_file)
self._import_patches([name])
|
[
"Import",
"patch",
"into",
"the",
"patch",
"queue",
"The",
"patch",
"is",
"inserted",
"as",
"the",
"next",
"unapplied",
"patch",
"."
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/patchimport.py#L35-L51
|
[
"def",
"import_patch",
"(",
"self",
",",
"patch_name",
",",
"new_name",
"=",
"None",
")",
":",
"if",
"new_name",
":",
"dir_name",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"new_name",
")",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"new_name",
")",
"dest_dir",
"=",
"self",
".",
"quilt_patches",
"+",
"Directory",
"(",
"dir_name",
")",
"dest_dir",
".",
"create",
"(",
")",
"else",
":",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"patch_name",
")",
"dest_dir",
"=",
"self",
".",
"quilt_patches",
"patch_file",
"=",
"File",
"(",
"patch_name",
")",
"dest_file",
"=",
"dest_dir",
"+",
"File",
"(",
"name",
")",
"patch_file",
".",
"copy",
"(",
"dest_file",
")",
"self",
".",
"_import_patches",
"(",
"[",
"name",
"]",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
Import.import_patches
|
Import several patches into the patch queue
|
quilt/patchimport.py
|
def import_patches(self, patches):
""" Import several patches into the patch queue """
dest_dir = self.quilt_patches
patch_names = []
for patch in patches:
patch_name = os.path.basename(patch)
patch_file = File(patch)
dest_file = dest_dir + File(patch_name)
patch_file.copy(dest_file)
patch_names.append(patch_name)
self._import_patches(patch_names)
|
def import_patches(self, patches):
""" Import several patches into the patch queue """
dest_dir = self.quilt_patches
patch_names = []
for patch in patches:
patch_name = os.path.basename(patch)
patch_file = File(patch)
dest_file = dest_dir + File(patch_name)
patch_file.copy(dest_file)
patch_names.append(patch_name)
self._import_patches(patch_names)
|
[
"Import",
"several",
"patches",
"into",
"the",
"patch",
"queue"
] |
bjoernricks/python-quilt
|
python
|
https://github.com/bjoernricks/python-quilt/blob/fae88237f601848cc34d073584d9dcb409f01777/quilt/patchimport.py#L53-L66
|
[
"def",
"import_patches",
"(",
"self",
",",
"patches",
")",
":",
"dest_dir",
"=",
"self",
".",
"quilt_patches",
"patch_names",
"=",
"[",
"]",
"for",
"patch",
"in",
"patches",
":",
"patch_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"patch",
")",
"patch_file",
"=",
"File",
"(",
"patch",
")",
"dest_file",
"=",
"dest_dir",
"+",
"File",
"(",
"patch_name",
")",
"patch_file",
".",
"copy",
"(",
"dest_file",
")",
"patch_names",
".",
"append",
"(",
"patch_name",
")",
"self",
".",
"_import_patches",
"(",
"patch_names",
")"
] |
fae88237f601848cc34d073584d9dcb409f01777
|
test
|
WayHandler.way
|
Process each way.
|
o2g/osm/handlers/way_handler.py
|
def way(self, w):
"""Process each way."""
if w.id not in self.way_ids:
return
way_points = []
for n in w.nodes:
try:
way_points.append(Point(n.location.lon, n.location.lat))
except o.InvalidLocationError:
logging.debug('InvalidLocationError at way %s node %s', w.id, n.ref)
self.ways[w.id] = Way(w.id, way_points)
|
def way(self, w):
"""Process each way."""
if w.id not in self.way_ids:
return
way_points = []
for n in w.nodes:
try:
way_points.append(Point(n.location.lon, n.location.lat))
except o.InvalidLocationError:
logging.debug('InvalidLocationError at way %s node %s', w.id, n.ref)
self.ways[w.id] = Way(w.id, way_points)
|
[
"Process",
"each",
"way",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/handlers/way_handler.py#L13-L25
|
[
"def",
"way",
"(",
"self",
",",
"w",
")",
":",
"if",
"w",
".",
"id",
"not",
"in",
"self",
".",
"way_ids",
":",
"return",
"way_points",
"=",
"[",
"]",
"for",
"n",
"in",
"w",
".",
"nodes",
":",
"try",
":",
"way_points",
".",
"append",
"(",
"Point",
"(",
"n",
".",
"location",
".",
"lon",
",",
"n",
".",
"location",
".",
"lat",
")",
")",
"except",
"o",
".",
"InvalidLocationError",
":",
"logging",
".",
"debug",
"(",
"'InvalidLocationError at way %s node %s'",
",",
"w",
".",
"id",
",",
"n",
".",
"ref",
")",
"self",
".",
"ways",
"[",
"w",
".",
"id",
"]",
"=",
"Way",
"(",
"w",
".",
"id",
",",
"way_points",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
NodeHandler.missing_node_ids
|
Get a list of nodes not found in OSM data.
|
o2g/osm/handlers/node_handler.py
|
def missing_node_ids(self):
"""Get a list of nodes not found in OSM data."""
present_node_ids = self.nodes.keys()
for nid in self.node_ids:
if nid not in present_node_ids:
yield nid
|
def missing_node_ids(self):
"""Get a list of nodes not found in OSM data."""
present_node_ids = self.nodes.keys()
for nid in self.node_ids:
if nid not in present_node_ids:
yield nid
|
[
"Get",
"a",
"list",
"of",
"nodes",
"not",
"found",
"in",
"OSM",
"data",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/handlers/node_handler.py#L15-L20
|
[
"def",
"missing_node_ids",
"(",
"self",
")",
":",
"present_node_ids",
"=",
"self",
".",
"nodes",
".",
"keys",
"(",
")",
"for",
"nid",
"in",
"self",
".",
"node_ids",
":",
"if",
"nid",
"not",
"in",
"present_node_ids",
":",
"yield",
"nid"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
NodeHandler.node
|
Process each node.
|
o2g/osm/handlers/node_handler.py
|
def node(self, n):
"""Process each node."""
if n.id not in self.node_ids:
return
try:
self.nodes[n.id] =\
Node(n.id,
n.location.lon,
n.location.lat,
{t.k: t.v for t in n.tags})
except o.InvalidLocationError:
logging.debug('InvalidLocationError at node %s', n.id)
|
def node(self, n):
"""Process each node."""
if n.id not in self.node_ids:
return
try:
self.nodes[n.id] =\
Node(n.id,
n.location.lon,
n.location.lat,
{t.k: t.v for t in n.tags})
except o.InvalidLocationError:
logging.debug('InvalidLocationError at node %s', n.id)
|
[
"Process",
"each",
"node",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/handlers/node_handler.py#L22-L34
|
[
"def",
"node",
"(",
"self",
",",
"n",
")",
":",
"if",
"n",
".",
"id",
"not",
"in",
"self",
".",
"node_ids",
":",
"return",
"try",
":",
"self",
".",
"nodes",
"[",
"n",
".",
"id",
"]",
"=",
"Node",
"(",
"n",
".",
"id",
",",
"n",
".",
"location",
".",
"lon",
",",
"n",
".",
"location",
".",
"lat",
",",
"{",
"t",
".",
"k",
":",
"t",
".",
"v",
"for",
"t",
"in",
"n",
".",
"tags",
"}",
")",
"except",
"o",
".",
"InvalidLocationError",
":",
"logging",
".",
"debug",
"(",
"'InvalidLocationError at node %s'",
",",
"n",
".",
"id",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
build_route
|
Extract information of one route.
|
o2g/osm/builders/route_builder.py
|
def build_route(relation):
"""Extract information of one route."""
if relation.tags.get('type') != 'route':
# Build route only for relations of type `route`
return
short_name = create_route_short_name(relation)
color = relation.tags.get('color')
return\
Route(relation.id,
short_name,
create_route_long_name(relation, short_name),
map_osm_route_type_to_gtfs(relation.tags.get('route')),
'https://www.openstreetmap.org/relation/{}'.format(relation.id),
color.strip('#') if color else '',
get_agency_id(relation))
|
def build_route(relation):
"""Extract information of one route."""
if relation.tags.get('type') != 'route':
# Build route only for relations of type `route`
return
short_name = create_route_short_name(relation)
color = relation.tags.get('color')
return\
Route(relation.id,
short_name,
create_route_long_name(relation, short_name),
map_osm_route_type_to_gtfs(relation.tags.get('route')),
'https://www.openstreetmap.org/relation/{}'.format(relation.id),
color.strip('#') if color else '',
get_agency_id(relation))
|
[
"Extract",
"information",
"of",
"one",
"route",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/builders/route_builder.py#L15-L30
|
[
"def",
"build_route",
"(",
"relation",
")",
":",
"if",
"relation",
".",
"tags",
".",
"get",
"(",
"'type'",
")",
"!=",
"'route'",
":",
"# Build route only for relations of type `route`",
"return",
"short_name",
"=",
"create_route_short_name",
"(",
"relation",
")",
"color",
"=",
"relation",
".",
"tags",
".",
"get",
"(",
"'color'",
")",
"return",
"Route",
"(",
"relation",
".",
"id",
",",
"short_name",
",",
"create_route_long_name",
"(",
"relation",
",",
"short_name",
")",
",",
"map_osm_route_type_to_gtfs",
"(",
"relation",
".",
"tags",
".",
"get",
"(",
"'route'",
")",
")",
",",
"'https://www.openstreetmap.org/relation/{}'",
".",
"format",
"(",
"relation",
".",
"id",
")",
",",
"color",
".",
"strip",
"(",
"'#'",
")",
"if",
"color",
"else",
"''",
",",
"get_agency_id",
"(",
"relation",
")",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
create_route_long_name
|
Create a meaningful route name.
|
o2g/osm/builders/route_builder.py
|
def create_route_long_name(relation, short_name):
"""Create a meaningful route name."""
if relation.tags.get('from') and relation.tags.get('to'):
return "{0}-to-{1}".format(relation.tags.get('from'),
relation.tags.get('to'))
name = relation.tags.get('name') or\
relation.tags.get('alt_name') or\
"OSM Route No. {}".format(relation.id)
# Drop route_short_name from this one if it contains it
if short_name and name.startswith(short_name):
# Drop it
return name[len(short_name):]
return name
|
def create_route_long_name(relation, short_name):
"""Create a meaningful route name."""
if relation.tags.get('from') and relation.tags.get('to'):
return "{0}-to-{1}".format(relation.tags.get('from'),
relation.tags.get('to'))
name = relation.tags.get('name') or\
relation.tags.get('alt_name') or\
"OSM Route No. {}".format(relation.id)
# Drop route_short_name from this one if it contains it
if short_name and name.startswith(short_name):
# Drop it
return name[len(short_name):]
return name
|
[
"Create",
"a",
"meaningful",
"route",
"name",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/builders/route_builder.py#L38-L51
|
[
"def",
"create_route_long_name",
"(",
"relation",
",",
"short_name",
")",
":",
"if",
"relation",
".",
"tags",
".",
"get",
"(",
"'from'",
")",
"and",
"relation",
".",
"tags",
".",
"get",
"(",
"'to'",
")",
":",
"return",
"\"{0}-to-{1}\"",
".",
"format",
"(",
"relation",
".",
"tags",
".",
"get",
"(",
"'from'",
")",
",",
"relation",
".",
"tags",
".",
"get",
"(",
"'to'",
")",
")",
"name",
"=",
"relation",
".",
"tags",
".",
"get",
"(",
"'name'",
")",
"or",
"relation",
".",
"tags",
".",
"get",
"(",
"'alt_name'",
")",
"or",
"\"OSM Route No. {}\"",
".",
"format",
"(",
"relation",
".",
"id",
")",
"# Drop route_short_name from this one if it contains it",
"if",
"short_name",
"and",
"name",
".",
"startswith",
"(",
"short_name",
")",
":",
"# Drop it",
"return",
"name",
"[",
"len",
"(",
"short_name",
")",
":",
"]",
"return",
"name"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
get_agency_id
|
Construct an id for agency using its tags.
|
o2g/osm/builders/route_builder.py
|
def get_agency_id(relation):
"""Construct an id for agency using its tags."""
op = relation.tags.get('operator')
if op:
return int(hashlib.sha256(op.encode('utf-8')).hexdigest(), 16) % 10**8
return -1
|
def get_agency_id(relation):
"""Construct an id for agency using its tags."""
op = relation.tags.get('operator')
if op:
return int(hashlib.sha256(op.encode('utf-8')).hexdigest(), 16) % 10**8
return -1
|
[
"Construct",
"an",
"id",
"for",
"agency",
"using",
"its",
"tags",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/builders/route_builder.py#L54-L59
|
[
"def",
"get_agency_id",
"(",
"relation",
")",
":",
"op",
"=",
"relation",
".",
"tags",
".",
"get",
"(",
"'operator'",
")",
"if",
"op",
":",
"return",
"int",
"(",
"hashlib",
".",
"sha256",
"(",
"op",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")",
",",
"16",
")",
"%",
"10",
"**",
"8",
"return",
"-",
"1"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
TransitDataExporter.process
|
Process the files and collect necessary data.
|
o2g/osm/exporter.py
|
def process(self):
"""Process the files and collect necessary data."""
# Extract relations
self.rh = RelationHandler()
self.rh.apply_file(self.filename)
logging.debug('Found %d public transport relations.', len(self.rh.relations))
# Collect ids of interest
node_ids, stop_node_ids, way_ids, reverse_map = self.__collect_ids()
# Extract nodes
self.nh = NodeHandler(node_ids)
self.nh.apply_file(self.filename, locations=True)
count = 0
for idx, missing_node_id in enumerate(self.nh.missing_node_ids):
count += 1
logging.warning(
'[no data] missing stop node. rel: https://osm.org/relation/%s node: https://osm.org/node/%s.',
reverse_map[missing_node_id], missing_node_id)
if count:
logging.warning(
'%d nodes that appear in relations are missing.',
count)
else:
logging.debug('Lucky you! All relation member nodes were found.')
# Extract ways
self.wh = WayHandler(way_ids)
self.wh.apply_file(self.filename, locations=True)
|
def process(self):
"""Process the files and collect necessary data."""
# Extract relations
self.rh = RelationHandler()
self.rh.apply_file(self.filename)
logging.debug('Found %d public transport relations.', len(self.rh.relations))
# Collect ids of interest
node_ids, stop_node_ids, way_ids, reverse_map = self.__collect_ids()
# Extract nodes
self.nh = NodeHandler(node_ids)
self.nh.apply_file(self.filename, locations=True)
count = 0
for idx, missing_node_id in enumerate(self.nh.missing_node_ids):
count += 1
logging.warning(
'[no data] missing stop node. rel: https://osm.org/relation/%s node: https://osm.org/node/%s.',
reverse_map[missing_node_id], missing_node_id)
if count:
logging.warning(
'%d nodes that appear in relations are missing.',
count)
else:
logging.debug('Lucky you! All relation member nodes were found.')
# Extract ways
self.wh = WayHandler(way_ids)
self.wh.apply_file(self.filename, locations=True)
|
[
"Process",
"the",
"files",
"and",
"collect",
"necessary",
"data",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/exporter.py#L31-L63
|
[
"def",
"process",
"(",
"self",
")",
":",
"# Extract relations",
"self",
".",
"rh",
"=",
"RelationHandler",
"(",
")",
"self",
".",
"rh",
".",
"apply_file",
"(",
"self",
".",
"filename",
")",
"logging",
".",
"debug",
"(",
"'Found %d public transport relations.'",
",",
"len",
"(",
"self",
".",
"rh",
".",
"relations",
")",
")",
"# Collect ids of interest",
"node_ids",
",",
"stop_node_ids",
",",
"way_ids",
",",
"reverse_map",
"=",
"self",
".",
"__collect_ids",
"(",
")",
"# Extract nodes",
"self",
".",
"nh",
"=",
"NodeHandler",
"(",
"node_ids",
")",
"self",
".",
"nh",
".",
"apply_file",
"(",
"self",
".",
"filename",
",",
"locations",
"=",
"True",
")",
"count",
"=",
"0",
"for",
"idx",
",",
"missing_node_id",
"in",
"enumerate",
"(",
"self",
".",
"nh",
".",
"missing_node_ids",
")",
":",
"count",
"+=",
"1",
"logging",
".",
"warning",
"(",
"'[no data] missing stop node. rel: https://osm.org/relation/%s node: https://osm.org/node/%s.'",
",",
"reverse_map",
"[",
"missing_node_id",
"]",
",",
"missing_node_id",
")",
"if",
"count",
":",
"logging",
".",
"warning",
"(",
"'%d nodes that appear in relations are missing.'",
",",
"count",
")",
"else",
":",
"logging",
".",
"debug",
"(",
"'Lucky you! All relation member nodes were found.'",
")",
"# Extract ways",
"self",
".",
"wh",
"=",
"WayHandler",
"(",
"way_ids",
")",
"self",
".",
"wh",
".",
"apply_file",
"(",
"self",
".",
"filename",
",",
"locations",
"=",
"True",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
RelationHandler.relation
|
Process each relation.
|
o2g/osm/handlers/relation_handler.py
|
def relation(self, rel):
"""Process each relation."""
rel_type = rel.tags.get('type')
if any([rel.deleted,
not rel.visible,
not self.is_new_version(rel),
rel_type not in ['route', 'public_transport']]):
return
route_tag = rel.tags.get('route')
if rel_type == 'route' and route_tag not in self.transit_route_types:
return
public_transport = rel.tags.get('public_transport')
if rel_type == 'public_transport' and public_transport != 'stop_area':
return
self.relations[rel.id] = \
Relation(rel.id, {
'type': rel_type,
'public_transport': public_transport,
'route': route_tag,
'operator': rel.tags.get('operator'),
'color': rel.tags.get('color'),
'ref': rel.tags.get('ref'),
'from': rel.tags.get('from'),
'to': rel.tags.get('to'),
'name': rel.tags.get('name'),
'alt_name': rel.tags.get('alt_name'),
'url': rel.tags.get('url'),
'contact_website': rel.tags.get('contact:website')},
[(member.type, member.ref, member.role) for member in rel.members])
self.versions[rel.id] = rel.version
|
def relation(self, rel):
"""Process each relation."""
rel_type = rel.tags.get('type')
if any([rel.deleted,
not rel.visible,
not self.is_new_version(rel),
rel_type not in ['route', 'public_transport']]):
return
route_tag = rel.tags.get('route')
if rel_type == 'route' and route_tag not in self.transit_route_types:
return
public_transport = rel.tags.get('public_transport')
if rel_type == 'public_transport' and public_transport != 'stop_area':
return
self.relations[rel.id] = \
Relation(rel.id, {
'type': rel_type,
'public_transport': public_transport,
'route': route_tag,
'operator': rel.tags.get('operator'),
'color': rel.tags.get('color'),
'ref': rel.tags.get('ref'),
'from': rel.tags.get('from'),
'to': rel.tags.get('to'),
'name': rel.tags.get('name'),
'alt_name': rel.tags.get('alt_name'),
'url': rel.tags.get('url'),
'contact_website': rel.tags.get('contact:website')},
[(member.type, member.ref, member.role) for member in rel.members])
self.versions[rel.id] = rel.version
|
[
"Process",
"each",
"relation",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/handlers/relation_handler.py#L23-L56
|
[
"def",
"relation",
"(",
"self",
",",
"rel",
")",
":",
"rel_type",
"=",
"rel",
".",
"tags",
".",
"get",
"(",
"'type'",
")",
"if",
"any",
"(",
"[",
"rel",
".",
"deleted",
",",
"not",
"rel",
".",
"visible",
",",
"not",
"self",
".",
"is_new_version",
"(",
"rel",
")",
",",
"rel_type",
"not",
"in",
"[",
"'route'",
",",
"'public_transport'",
"]",
"]",
")",
":",
"return",
"route_tag",
"=",
"rel",
".",
"tags",
".",
"get",
"(",
"'route'",
")",
"if",
"rel_type",
"==",
"'route'",
"and",
"route_tag",
"not",
"in",
"self",
".",
"transit_route_types",
":",
"return",
"public_transport",
"=",
"rel",
".",
"tags",
".",
"get",
"(",
"'public_transport'",
")",
"if",
"rel_type",
"==",
"'public_transport'",
"and",
"public_transport",
"!=",
"'stop_area'",
":",
"return",
"self",
".",
"relations",
"[",
"rel",
".",
"id",
"]",
"=",
"Relation",
"(",
"rel",
".",
"id",
",",
"{",
"'type'",
":",
"rel_type",
",",
"'public_transport'",
":",
"public_transport",
",",
"'route'",
":",
"route_tag",
",",
"'operator'",
":",
"rel",
".",
"tags",
".",
"get",
"(",
"'operator'",
")",
",",
"'color'",
":",
"rel",
".",
"tags",
".",
"get",
"(",
"'color'",
")",
",",
"'ref'",
":",
"rel",
".",
"tags",
".",
"get",
"(",
"'ref'",
")",
",",
"'from'",
":",
"rel",
".",
"tags",
".",
"get",
"(",
"'from'",
")",
",",
"'to'",
":",
"rel",
".",
"tags",
".",
"get",
"(",
"'to'",
")",
",",
"'name'",
":",
"rel",
".",
"tags",
".",
"get",
"(",
"'name'",
")",
",",
"'alt_name'",
":",
"rel",
".",
"tags",
".",
"get",
"(",
"'alt_name'",
")",
",",
"'url'",
":",
"rel",
".",
"tags",
".",
"get",
"(",
"'url'",
")",
",",
"'contact_website'",
":",
"rel",
".",
"tags",
".",
"get",
"(",
"'contact:website'",
")",
"}",
",",
"[",
"(",
"member",
".",
"type",
",",
"member",
".",
"ref",
",",
"member",
".",
"role",
")",
"for",
"member",
"in",
"rel",
".",
"members",
"]",
")",
"self",
".",
"versions",
"[",
"rel",
".",
"id",
"]",
"=",
"rel",
".",
"version"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
create_dummy_data
|
Create `calendar`, `stop_times`, `trips` and `shapes`.
:return: DummyData namedtuple
|
o2g/gtfs/gtfs_dummy.py
|
def create_dummy_data(routes, stops):
"""Create `calendar`, `stop_times`, `trips` and `shapes`.
:return: DummyData namedtuple
"""
# Build stops per route auxiliary map
stops_per_route = defaultdict(lambda: [])
stops_map = {}
for s in stops:
if not s.route_id:
continue
stops_per_route[s.route_id].append(s)
stops_map[s.stop_id] = s
calendar = _create_dummy_calendar()
trips = \
_create_dummy_trips(
routes,
stops_per_route,
calendar)
stop_times = _create_dummy_stoptimes(trips, stops_per_route)
frequencies = _create_dummy_frequencies(trips)
return DummyData(calendar, stop_times, trips, frequencies)
|
def create_dummy_data(routes, stops):
"""Create `calendar`, `stop_times`, `trips` and `shapes`.
:return: DummyData namedtuple
"""
# Build stops per route auxiliary map
stops_per_route = defaultdict(lambda: [])
stops_map = {}
for s in stops:
if not s.route_id:
continue
stops_per_route[s.route_id].append(s)
stops_map[s.stop_id] = s
calendar = _create_dummy_calendar()
trips = \
_create_dummy_trips(
routes,
stops_per_route,
calendar)
stop_times = _create_dummy_stoptimes(trips, stops_per_route)
frequencies = _create_dummy_frequencies(trips)
return DummyData(calendar, stop_times, trips, frequencies)
|
[
"Create",
"calendar",
"stop_times",
"trips",
"and",
"shapes",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/gtfs/gtfs_dummy.py#L14-L39
|
[
"def",
"create_dummy_data",
"(",
"routes",
",",
"stops",
")",
":",
"# Build stops per route auxiliary map",
"stops_per_route",
"=",
"defaultdict",
"(",
"lambda",
":",
"[",
"]",
")",
"stops_map",
"=",
"{",
"}",
"for",
"s",
"in",
"stops",
":",
"if",
"not",
"s",
".",
"route_id",
":",
"continue",
"stops_per_route",
"[",
"s",
".",
"route_id",
"]",
".",
"append",
"(",
"s",
")",
"stops_map",
"[",
"s",
".",
"stop_id",
"]",
"=",
"s",
"calendar",
"=",
"_create_dummy_calendar",
"(",
")",
"trips",
"=",
"_create_dummy_trips",
"(",
"routes",
",",
"stops_per_route",
",",
"calendar",
")",
"stop_times",
"=",
"_create_dummy_stoptimes",
"(",
"trips",
",",
"stops_per_route",
")",
"frequencies",
"=",
"_create_dummy_frequencies",
"(",
"trips",
")",
"return",
"DummyData",
"(",
"calendar",
",",
"stop_times",
",",
"trips",
",",
"frequencies",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
patch_agencies
|
Fill the fields that are necessary for passing transitfeed checks.
|
o2g/gtfs/gtfs_dummy.py
|
def patch_agencies(agencies):
"""Fill the fields that are necessary for passing transitfeed checks."""
# First return the unknown agency entry
yield Agency(-1, 'http://hiposfer.com', 'Unknown agency', 'Europe/Berlin')
# Then return the rest.
for agency_id, agency_url, agency_name, agency_timezone in agencies:
if not agency_url:
agency_url = 'http://hiposfer.com'
if not agency_timezone:
# Set everything to one time zone to get rid of transitfeeds error.
agency_timezone = 'Europe/Berlin'
yield Agency(agency_id, agency_url, agency_name, agency_timezone)
|
def patch_agencies(agencies):
"""Fill the fields that are necessary for passing transitfeed checks."""
# First return the unknown agency entry
yield Agency(-1, 'http://hiposfer.com', 'Unknown agency', 'Europe/Berlin')
# Then return the rest.
for agency_id, agency_url, agency_name, agency_timezone in agencies:
if not agency_url:
agency_url = 'http://hiposfer.com'
if not agency_timezone:
# Set everything to one time zone to get rid of transitfeeds error.
agency_timezone = 'Europe/Berlin'
yield Agency(agency_id, agency_url, agency_name, agency_timezone)
|
[
"Fill",
"the",
"fields",
"that",
"are",
"necessary",
"for",
"passing",
"transitfeed",
"checks",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/gtfs/gtfs_dummy.py#L42-L54
|
[
"def",
"patch_agencies",
"(",
"agencies",
")",
":",
"# First return the unknown agency entry",
"yield",
"Agency",
"(",
"-",
"1",
",",
"'http://hiposfer.com'",
",",
"'Unknown agency'",
",",
"'Europe/Berlin'",
")",
"# Then return the rest.",
"for",
"agency_id",
",",
"agency_url",
",",
"agency_name",
",",
"agency_timezone",
"in",
"agencies",
":",
"if",
"not",
"agency_url",
":",
"agency_url",
"=",
"'http://hiposfer.com'",
"if",
"not",
"agency_timezone",
":",
"# Set everything to one time zone to get rid of transitfeeds error.",
"agency_timezone",
"=",
"'Europe/Berlin'",
"yield",
"Agency",
"(",
"agency_id",
",",
"agency_url",
",",
"agency_name",
",",
"agency_timezone",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
_create_dummy_trip_stoptimes
|
Create station stop times for each trip.
|
o2g/gtfs/gtfs_dummy.py
|
def _create_dummy_trip_stoptimes(trip_id, stops, first_service_time):
"""Create station stop times for each trip."""
waiting = datetime.timedelta(seconds=30)
arrival = first_service_time
last_departure = first_service_time
last_departure_hour = (arrival + waiting).hour
last_stop = None
departure_hour = None
arrival_hour = None
for stop_sequence, stop in enumerate(stops):
# Avoid time travels
arrival = last_departure + get_time_from_last_stop(last_stop, stop)
departure = arrival + waiting
# Cover the case when the arrival time falls into the next day
if arrival.hour < last_departure_hour:
diff = last_departure_hour
arrival_hour = arrival.hour + diff
departure_hour = departure.hour + diff
last_departure_hour = departure.hour + diff
else:
arrival_hour = arrival.hour
departure_hour = departure.hour
last_departure_hour = departure.hour
# Cover the case when adding waiting time to the arrival time
# falls into the next day
if departure.hour < arrival.hour:
diff = last_departure_hour
departure_hour = departure.hour + diff
last_departure_hour = departure.hour + diff
yield {'trip_id': trip_id,
'arrival_time': '{:02}:{}'.format(
arrival_hour,
arrival.strftime('%M:%S')),
'departure_time': '{:02}:{}'.format(
departure_hour,
departure.strftime('%M:%S')),
'stop_id': stop.stop_id,
'stop_sequence': stop_sequence}
last_stop = stop
last_departure = departure
|
def _create_dummy_trip_stoptimes(trip_id, stops, first_service_time):
"""Create station stop times for each trip."""
waiting = datetime.timedelta(seconds=30)
arrival = first_service_time
last_departure = first_service_time
last_departure_hour = (arrival + waiting).hour
last_stop = None
departure_hour = None
arrival_hour = None
for stop_sequence, stop in enumerate(stops):
# Avoid time travels
arrival = last_departure + get_time_from_last_stop(last_stop, stop)
departure = arrival + waiting
# Cover the case when the arrival time falls into the next day
if arrival.hour < last_departure_hour:
diff = last_departure_hour
arrival_hour = arrival.hour + diff
departure_hour = departure.hour + diff
last_departure_hour = departure.hour + diff
else:
arrival_hour = arrival.hour
departure_hour = departure.hour
last_departure_hour = departure.hour
# Cover the case when adding waiting time to the arrival time
# falls into the next day
if departure.hour < arrival.hour:
diff = last_departure_hour
departure_hour = departure.hour + diff
last_departure_hour = departure.hour + diff
yield {'trip_id': trip_id,
'arrival_time': '{:02}:{}'.format(
arrival_hour,
arrival.strftime('%M:%S')),
'departure_time': '{:02}:{}'.format(
departure_hour,
departure.strftime('%M:%S')),
'stop_id': stop.stop_id,
'stop_sequence': stop_sequence}
last_stop = stop
last_departure = departure
|
[
"Create",
"station",
"stop",
"times",
"for",
"each",
"trip",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/gtfs/gtfs_dummy.py#L117-L162
|
[
"def",
"_create_dummy_trip_stoptimes",
"(",
"trip_id",
",",
"stops",
",",
"first_service_time",
")",
":",
"waiting",
"=",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"30",
")",
"arrival",
"=",
"first_service_time",
"last_departure",
"=",
"first_service_time",
"last_departure_hour",
"=",
"(",
"arrival",
"+",
"waiting",
")",
".",
"hour",
"last_stop",
"=",
"None",
"departure_hour",
"=",
"None",
"arrival_hour",
"=",
"None",
"for",
"stop_sequence",
",",
"stop",
"in",
"enumerate",
"(",
"stops",
")",
":",
"# Avoid time travels",
"arrival",
"=",
"last_departure",
"+",
"get_time_from_last_stop",
"(",
"last_stop",
",",
"stop",
")",
"departure",
"=",
"arrival",
"+",
"waiting",
"# Cover the case when the arrival time falls into the next day",
"if",
"arrival",
".",
"hour",
"<",
"last_departure_hour",
":",
"diff",
"=",
"last_departure_hour",
"arrival_hour",
"=",
"arrival",
".",
"hour",
"+",
"diff",
"departure_hour",
"=",
"departure",
".",
"hour",
"+",
"diff",
"last_departure_hour",
"=",
"departure",
".",
"hour",
"+",
"diff",
"else",
":",
"arrival_hour",
"=",
"arrival",
".",
"hour",
"departure_hour",
"=",
"departure",
".",
"hour",
"last_departure_hour",
"=",
"departure",
".",
"hour",
"# Cover the case when adding waiting time to the arrival time",
"# falls into the next day",
"if",
"departure",
".",
"hour",
"<",
"arrival",
".",
"hour",
":",
"diff",
"=",
"last_departure_hour",
"departure_hour",
"=",
"departure",
".",
"hour",
"+",
"diff",
"last_departure_hour",
"=",
"departure",
".",
"hour",
"+",
"diff",
"yield",
"{",
"'trip_id'",
":",
"trip_id",
",",
"'arrival_time'",
":",
"'{:02}:{}'",
".",
"format",
"(",
"arrival_hour",
",",
"arrival",
".",
"strftime",
"(",
"'%M:%S'",
")",
")",
",",
"'departure_time'",
":",
"'{:02}:{}'",
".",
"format",
"(",
"departure_hour",
",",
"departure",
".",
"strftime",
"(",
"'%M:%S'",
")",
")",
",",
"'stop_id'",
":",
"stop",
".",
"stop_id",
",",
"'stop_sequence'",
":",
"stop_sequence",
"}",
"last_stop",
"=",
"stop",
"last_departure",
"=",
"departure"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
GTFSWriter.write_zipped
|
Write the GTFS feed in the given file.
|
o2g/gtfs/gtfs_writer.py
|
def write_zipped(self, filepath):
"""Write the GTFS feed in the given file."""
with zipfile.ZipFile(filepath, mode='w', compression=zipfile.ZIP_DEFLATED) as zfile:
for name, buffer in self._buffers.items():
encoded_values = io.BytesIO(buffer.getvalue().encode('utf-8'))
zfile.writestr('{}.txt'.format(name),
encoded_values.getbuffer())
for name, path in self._files.items():
zfile.write(path, arcname=name)
|
def write_zipped(self, filepath):
"""Write the GTFS feed in the given file."""
with zipfile.ZipFile(filepath, mode='w', compression=zipfile.ZIP_DEFLATED) as zfile:
for name, buffer in self._buffers.items():
encoded_values = io.BytesIO(buffer.getvalue().encode('utf-8'))
zfile.writestr('{}.txt'.format(name),
encoded_values.getbuffer())
for name, path in self._files.items():
zfile.write(path, arcname=name)
|
[
"Write",
"the",
"GTFS",
"feed",
"in",
"the",
"given",
"file",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/gtfs/gtfs_writer.py#L109-L117
|
[
"def",
"write_zipped",
"(",
"self",
",",
"filepath",
")",
":",
"with",
"zipfile",
".",
"ZipFile",
"(",
"filepath",
",",
"mode",
"=",
"'w'",
",",
"compression",
"=",
"zipfile",
".",
"ZIP_DEFLATED",
")",
"as",
"zfile",
":",
"for",
"name",
",",
"buffer",
"in",
"self",
".",
"_buffers",
".",
"items",
"(",
")",
":",
"encoded_values",
"=",
"io",
".",
"BytesIO",
"(",
"buffer",
".",
"getvalue",
"(",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"zfile",
".",
"writestr",
"(",
"'{}.txt'",
".",
"format",
"(",
"name",
")",
",",
"encoded_values",
".",
"getbuffer",
"(",
")",
")",
"for",
"name",
",",
"path",
"in",
"self",
".",
"_files",
".",
"items",
"(",
")",
":",
"zfile",
".",
"write",
"(",
"path",
",",
"arcname",
"=",
"name",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
GTFSWriter.write_unzipped
|
Write GTFS text files in the given path.
|
o2g/gtfs/gtfs_writer.py
|
def write_unzipped(self, destination):
"""Write GTFS text files in the given path."""
for name, buffer in self._buffers.items():
with open(os.path.join(destination,
'{}.txt'.format(name)),
'w', encoding='utf-8') as file:
file.write(buffer.getvalue())
for name, path in self._files.items():
shutil.copy(path, os.path.join(destination, name))
|
def write_unzipped(self, destination):
"""Write GTFS text files in the given path."""
for name, buffer in self._buffers.items():
with open(os.path.join(destination,
'{}.txt'.format(name)),
'w', encoding='utf-8') as file:
file.write(buffer.getvalue())
for name, path in self._files.items():
shutil.copy(path, os.path.join(destination, name))
|
[
"Write",
"GTFS",
"text",
"files",
"in",
"the",
"given",
"path",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/gtfs/gtfs_writer.py#L119-L127
|
[
"def",
"write_unzipped",
"(",
"self",
",",
"destination",
")",
":",
"for",
"name",
",",
"buffer",
"in",
"self",
".",
"_buffers",
".",
"items",
"(",
")",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"destination",
",",
"'{}.txt'",
".",
"format",
"(",
"name",
")",
")",
",",
"'w'",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"file",
":",
"file",
".",
"write",
"(",
"buffer",
".",
"getvalue",
"(",
")",
")",
"for",
"name",
",",
"path",
"in",
"self",
".",
"_files",
".",
"items",
"(",
")",
":",
"shutil",
".",
"copy",
"(",
"path",
",",
"os",
".",
"path",
".",
"join",
"(",
"destination",
",",
"name",
")",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
build_agency
|
Extract agency information.
|
o2g/osm/builders/agency_builder.py
|
def build_agency(relation, nodes):
"""Extract agency information."""
# TODO: find out the operator for routes without operator tag.
# See: http://wiki.openstreetmap.org/wiki/Key:operator
# Quote from the above link:
#
# If the vast majority of a certain object in an area is operated by a certain
# organization and only very few by others then it may be sufficient to only tag the
# exceptions. For example, when nearly all roads in an area are managed by a local
# authority then it would be sufficient to only tag those that are not with an operator
# tag.
op = relation.tags.get('operator')
agency_url = relation.tags.get('url') or relation.tags.get('contact_website')
if not op:
return
agency_id = int(hashlib.sha256(op.encode('utf8')).hexdigest(), 16) % 10**8
return Agency(agency_id, agency_url, op, '')
|
def build_agency(relation, nodes):
"""Extract agency information."""
# TODO: find out the operator for routes without operator tag.
# See: http://wiki.openstreetmap.org/wiki/Key:operator
# Quote from the above link:
#
# If the vast majority of a certain object in an area is operated by a certain
# organization and only very few by others then it may be sufficient to only tag the
# exceptions. For example, when nearly all roads in an area are managed by a local
# authority then it would be sufficient to only tag those that are not with an operator
# tag.
op = relation.tags.get('operator')
agency_url = relation.tags.get('url') or relation.tags.get('contact_website')
if not op:
return
agency_id = int(hashlib.sha256(op.encode('utf8')).hexdigest(), 16) % 10**8
return Agency(agency_id, agency_url, op, '')
|
[
"Extract",
"agency",
"information",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/builders/agency_builder.py#L21-L40
|
[
"def",
"build_agency",
"(",
"relation",
",",
"nodes",
")",
":",
"# TODO: find out the operator for routes without operator tag.",
"# See: http://wiki.openstreetmap.org/wiki/Key:operator",
"# Quote from the above link:",
"#",
"# If the vast majority of a certain object in an area is operated by a certain",
"# organization and only very few by others then it may be sufficient to only tag the",
"# exceptions. For example, when nearly all roads in an area are managed by a local",
"# authority then it would be sufficient to only tag those that are not with an operator",
"# tag.",
"op",
"=",
"relation",
".",
"tags",
".",
"get",
"(",
"'operator'",
")",
"agency_url",
"=",
"relation",
".",
"tags",
".",
"get",
"(",
"'url'",
")",
"or",
"relation",
".",
"tags",
".",
"get",
"(",
"'contact_website'",
")",
"if",
"not",
"op",
":",
"return",
"agency_id",
"=",
"int",
"(",
"hashlib",
".",
"sha256",
"(",
"op",
".",
"encode",
"(",
"'utf8'",
")",
")",
".",
"hexdigest",
"(",
")",
",",
"16",
")",
"%",
"10",
"**",
"8",
"return",
"Agency",
"(",
"agency_id",
",",
"agency_url",
",",
"op",
",",
"''",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
extract_stops
|
Extract stops in a relation.
|
o2g/osm/builders/stop_builder.py
|
def extract_stops(relation, nodes, visited_stop_ids, stop_to_station_map):
"""Extract stops in a relation."""
# member_role: stop, halt, platform, terminal, etc.
for member_type, member_id, member_role in relation.member_info:
if member_id not in visited_stop_ids and \
member_id in nodes and\
member_role in ('stop', 'halt'):
location_type = ''
visited_stop_ids.add(member_id)
yield Stop(
member_id,
nodes[member_id].tags.get('name') or
"Unnamed {} stop.".format(relation.tags.get('route')),
nodes[member_id].lon if member_id in nodes else '',
nodes[member_id].lat if member_id in nodes else '',
relation.id,
_map_wheelchair(nodes[member_id].tags.get('wheelchair')),
location_type,
stop_to_station_map.get(member_id, ''))
|
def extract_stops(relation, nodes, visited_stop_ids, stop_to_station_map):
"""Extract stops in a relation."""
# member_role: stop, halt, platform, terminal, etc.
for member_type, member_id, member_role in relation.member_info:
if member_id not in visited_stop_ids and \
member_id in nodes and\
member_role in ('stop', 'halt'):
location_type = ''
visited_stop_ids.add(member_id)
yield Stop(
member_id,
nodes[member_id].tags.get('name') or
"Unnamed {} stop.".format(relation.tags.get('route')),
nodes[member_id].lon if member_id in nodes else '',
nodes[member_id].lat if member_id in nodes else '',
relation.id,
_map_wheelchair(nodes[member_id].tags.get('wheelchair')),
location_type,
stop_to_station_map.get(member_id, ''))
|
[
"Extract",
"stops",
"in",
"a",
"relation",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/builders/stop_builder.py#L71-L92
|
[
"def",
"extract_stops",
"(",
"relation",
",",
"nodes",
",",
"visited_stop_ids",
",",
"stop_to_station_map",
")",
":",
"# member_role: stop, halt, platform, terminal, etc.",
"for",
"member_type",
",",
"member_id",
",",
"member_role",
"in",
"relation",
".",
"member_info",
":",
"if",
"member_id",
"not",
"in",
"visited_stop_ids",
"and",
"member_id",
"in",
"nodes",
"and",
"member_role",
"in",
"(",
"'stop'",
",",
"'halt'",
")",
":",
"location_type",
"=",
"''",
"visited_stop_ids",
".",
"add",
"(",
"member_id",
")",
"yield",
"Stop",
"(",
"member_id",
",",
"nodes",
"[",
"member_id",
"]",
".",
"tags",
".",
"get",
"(",
"'name'",
")",
"or",
"\"Unnamed {} stop.\"",
".",
"format",
"(",
"relation",
".",
"tags",
".",
"get",
"(",
"'route'",
")",
")",
",",
"nodes",
"[",
"member_id",
"]",
".",
"lon",
"if",
"member_id",
"in",
"nodes",
"else",
"''",
",",
"nodes",
"[",
"member_id",
"]",
".",
"lat",
"if",
"member_id",
"in",
"nodes",
"else",
"''",
",",
"relation",
".",
"id",
",",
"_map_wheelchair",
"(",
"nodes",
"[",
"member_id",
"]",
".",
"tags",
".",
"get",
"(",
"'wheelchair'",
")",
")",
",",
"location_type",
",",
"stop_to_station_map",
".",
"get",
"(",
"member_id",
",",
"''",
")",
")"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
build_shape
|
Extract shape of one route.
|
o2g/osm/builders/shape_builder.py
|
def build_shape(relation, nodes, ways):
"""Extract shape of one route."""
sequence_index = 0
for member_type, member_id, member_role in relation.member_info:
if member_id in nodes:
yield Shape(
relation.id,
nodes[member_id].lat,
nodes[member_id].lon,
sequence_index)
sequence_index += 1
# Do we need to consider ways too? It dramatically increases the number of shapes.
elif member_id in ways:
continue
# for point in ways[member_id].points:
# shape = Shape(
# relation.id,
# point.lat,
# point.lon,
# sequence_index)
# sequence_index += 1
else:
# Ignore excessive logging for now.
pass
|
def build_shape(relation, nodes, ways):
"""Extract shape of one route."""
sequence_index = 0
for member_type, member_id, member_role in relation.member_info:
if member_id in nodes:
yield Shape(
relation.id,
nodes[member_id].lat,
nodes[member_id].lon,
sequence_index)
sequence_index += 1
# Do we need to consider ways too? It dramatically increases the number of shapes.
elif member_id in ways:
continue
# for point in ways[member_id].points:
# shape = Shape(
# relation.id,
# point.lat,
# point.lon,
# sequence_index)
# sequence_index += 1
else:
# Ignore excessive logging for now.
pass
|
[
"Extract",
"shape",
"of",
"one",
"route",
"."
] |
hiposfer/o2g
|
python
|
https://github.com/hiposfer/o2g/blob/1165ba75a5eb64b3091e9b71ebd589507ae1ebf3/o2g/osm/builders/shape_builder.py#L14-L42
|
[
"def",
"build_shape",
"(",
"relation",
",",
"nodes",
",",
"ways",
")",
":",
"sequence_index",
"=",
"0",
"for",
"member_type",
",",
"member_id",
",",
"member_role",
"in",
"relation",
".",
"member_info",
":",
"if",
"member_id",
"in",
"nodes",
":",
"yield",
"Shape",
"(",
"relation",
".",
"id",
",",
"nodes",
"[",
"member_id",
"]",
".",
"lat",
",",
"nodes",
"[",
"member_id",
"]",
".",
"lon",
",",
"sequence_index",
")",
"sequence_index",
"+=",
"1",
"# Do we need to consider ways too? It dramatically increases the number of shapes.",
"elif",
"member_id",
"in",
"ways",
":",
"continue",
"# for point in ways[member_id].points:",
"# shape = Shape(",
"# relation.id,",
"# point.lat,",
"# point.lon,",
"# sequence_index)",
"# sequence_index += 1",
"else",
":",
"# Ignore excessive logging for now.",
"pass"
] |
1165ba75a5eb64b3091e9b71ebd589507ae1ebf3
|
test
|
U2FDevice.get_supported_versions
|
Gets a list of supported U2F versions from the device.
|
u2flib_host/device.py
|
def get_supported_versions(self):
"""
Gets a list of supported U2F versions from the device.
"""
if not hasattr(self, '_versions'):
try:
self._versions = [self.send_apdu(INS_GET_VERSION).decode()]
except exc.APDUError as e:
# v0 didn't support the instruction.
self._versions = ['v0'] if e.code == 0x6d00 else []
return self._versions
|
def get_supported_versions(self):
"""
Gets a list of supported U2F versions from the device.
"""
if not hasattr(self, '_versions'):
try:
self._versions = [self.send_apdu(INS_GET_VERSION).decode()]
except exc.APDUError as e:
# v0 didn't support the instruction.
self._versions = ['v0'] if e.code == 0x6d00 else []
return self._versions
|
[
"Gets",
"a",
"list",
"of",
"supported",
"U2F",
"versions",
"from",
"the",
"device",
"."
] |
Yubico/python-u2flib-host
|
python
|
https://github.com/Yubico/python-u2flib-host/blob/eadc4dbf3bf516e74ea00d2e5690742a535834cb/u2flib_host/device.py#L69-L80
|
[
"def",
"get_supported_versions",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_versions'",
")",
":",
"try",
":",
"self",
".",
"_versions",
"=",
"[",
"self",
".",
"send_apdu",
"(",
"INS_GET_VERSION",
")",
".",
"decode",
"(",
")",
"]",
"except",
"exc",
".",
"APDUError",
"as",
"e",
":",
"# v0 didn't support the instruction.",
"self",
".",
"_versions",
"=",
"[",
"'v0'",
"]",
"if",
"e",
".",
"code",
"==",
"0x6d00",
"else",
"[",
"]",
"return",
"self",
".",
"_versions"
] |
eadc4dbf3bf516e74ea00d2e5690742a535834cb
|
test
|
U2FDevice.send_apdu
|
Sends an APDU to the device, and waits for a response.
|
u2flib_host/device.py
|
def send_apdu(self, ins, p1=0, p2=0, data=b''):
"""
Sends an APDU to the device, and waits for a response.
"""
if data is None:
data = b''
elif isinstance(data, int):
data = int2byte(data)
size = len(data)
l0 = size >> 16 & 0xff
l1 = size >> 8 & 0xff
l2 = size & 0xff
apdu_data = struct.pack('B B B B B B B %is B B' % size,
0, ins, p1, p2, l0, l1, l2, data, 0x00, 0x00)
try:
resp = self._do_send_apdu(apdu_data)
except Exception as e:
# TODO Use six.reraise if/when Six becomes an agreed dependency.
raise exc.DeviceError(e)
status = struct.unpack('>H', resp[-2:])[0]
data = resp[:-2]
if status != APDU_OK:
raise exc.APDUError(status)
return data
|
def send_apdu(self, ins, p1=0, p2=0, data=b''):
"""
Sends an APDU to the device, and waits for a response.
"""
if data is None:
data = b''
elif isinstance(data, int):
data = int2byte(data)
size = len(data)
l0 = size >> 16 & 0xff
l1 = size >> 8 & 0xff
l2 = size & 0xff
apdu_data = struct.pack('B B B B B B B %is B B' % size,
0, ins, p1, p2, l0, l1, l2, data, 0x00, 0x00)
try:
resp = self._do_send_apdu(apdu_data)
except Exception as e:
# TODO Use six.reraise if/when Six becomes an agreed dependency.
raise exc.DeviceError(e)
status = struct.unpack('>H', resp[-2:])[0]
data = resp[:-2]
if status != APDU_OK:
raise exc.APDUError(status)
return data
|
[
"Sends",
"an",
"APDU",
"to",
"the",
"device",
"and",
"waits",
"for",
"a",
"response",
"."
] |
Yubico/python-u2flib-host
|
python
|
https://github.com/Yubico/python-u2flib-host/blob/eadc4dbf3bf516e74ea00d2e5690742a535834cb/u2flib_host/device.py#L89-L113
|
[
"def",
"send_apdu",
"(",
"self",
",",
"ins",
",",
"p1",
"=",
"0",
",",
"p2",
"=",
"0",
",",
"data",
"=",
"b''",
")",
":",
"if",
"data",
"is",
"None",
":",
"data",
"=",
"b''",
"elif",
"isinstance",
"(",
"data",
",",
"int",
")",
":",
"data",
"=",
"int2byte",
"(",
"data",
")",
"size",
"=",
"len",
"(",
"data",
")",
"l0",
"=",
"size",
">>",
"16",
"&",
"0xff",
"l1",
"=",
"size",
">>",
"8",
"&",
"0xff",
"l2",
"=",
"size",
"&",
"0xff",
"apdu_data",
"=",
"struct",
".",
"pack",
"(",
"'B B B B B B B %is B B'",
"%",
"size",
",",
"0",
",",
"ins",
",",
"p1",
",",
"p2",
",",
"l0",
",",
"l1",
",",
"l2",
",",
"data",
",",
"0x00",
",",
"0x00",
")",
"try",
":",
"resp",
"=",
"self",
".",
"_do_send_apdu",
"(",
"apdu_data",
")",
"except",
"Exception",
"as",
"e",
":",
"# TODO Use six.reraise if/when Six becomes an agreed dependency.",
"raise",
"exc",
".",
"DeviceError",
"(",
"e",
")",
"status",
"=",
"struct",
".",
"unpack",
"(",
"'>H'",
",",
"resp",
"[",
"-",
"2",
":",
"]",
")",
"[",
"0",
"]",
"data",
"=",
"resp",
"[",
":",
"-",
"2",
"]",
"if",
"status",
"!=",
"APDU_OK",
":",
"raise",
"exc",
".",
"APDUError",
"(",
"status",
")",
"return",
"data"
] |
eadc4dbf3bf516e74ea00d2e5690742a535834cb
|
test
|
authenticate
|
Interactively authenticates a AuthenticateRequest using an attached U2F
device.
|
u2flib_host/authenticate.py
|
def authenticate(devices, params, facet, check_only):
"""
Interactively authenticates a AuthenticateRequest using an attached U2F
device.
"""
for device in devices[:]:
try:
device.open()
except:
devices.remove(device)
try:
prompted = False
while devices:
removed = []
for device in devices:
try:
return u2f.authenticate(device, params, facet, check_only)
except exc.APDUError as e:
if e.code == APDU_USE_NOT_SATISFIED:
if check_only:
sys.stderr.write('\nCorrect U2F device present!\n')
sys.exit(0)
if not prompted:
sys.stderr.write('\nTouch the flashing U2F device '
'to authenticate...\n')
prompted = True
else:
removed.append(device)
except exc.DeviceError:
removed.append(device)
devices = [d for d in devices if d not in removed]
for d in removed:
d.close()
time.sleep(0.25)
finally:
for device in devices:
device.close()
sys.stderr.write('\nThe required U2F device is not present!\n')
sys.exit(1)
|
def authenticate(devices, params, facet, check_only):
"""
Interactively authenticates a AuthenticateRequest using an attached U2F
device.
"""
for device in devices[:]:
try:
device.open()
except:
devices.remove(device)
try:
prompted = False
while devices:
removed = []
for device in devices:
try:
return u2f.authenticate(device, params, facet, check_only)
except exc.APDUError as e:
if e.code == APDU_USE_NOT_SATISFIED:
if check_only:
sys.stderr.write('\nCorrect U2F device present!\n')
sys.exit(0)
if not prompted:
sys.stderr.write('\nTouch the flashing U2F device '
'to authenticate...\n')
prompted = True
else:
removed.append(device)
except exc.DeviceError:
removed.append(device)
devices = [d for d in devices if d not in removed]
for d in removed:
d.close()
time.sleep(0.25)
finally:
for device in devices:
device.close()
sys.stderr.write('\nThe required U2F device is not present!\n')
sys.exit(1)
|
[
"Interactively",
"authenticates",
"a",
"AuthenticateRequest",
"using",
"an",
"attached",
"U2F",
"device",
"."
] |
Yubico/python-u2flib-host
|
python
|
https://github.com/Yubico/python-u2flib-host/blob/eadc4dbf3bf516e74ea00d2e5690742a535834cb/u2flib_host/authenticate.py#L41-L80
|
[
"def",
"authenticate",
"(",
"devices",
",",
"params",
",",
"facet",
",",
"check_only",
")",
":",
"for",
"device",
"in",
"devices",
"[",
":",
"]",
":",
"try",
":",
"device",
".",
"open",
"(",
")",
"except",
":",
"devices",
".",
"remove",
"(",
"device",
")",
"try",
":",
"prompted",
"=",
"False",
"while",
"devices",
":",
"removed",
"=",
"[",
"]",
"for",
"device",
"in",
"devices",
":",
"try",
":",
"return",
"u2f",
".",
"authenticate",
"(",
"device",
",",
"params",
",",
"facet",
",",
"check_only",
")",
"except",
"exc",
".",
"APDUError",
"as",
"e",
":",
"if",
"e",
".",
"code",
"==",
"APDU_USE_NOT_SATISFIED",
":",
"if",
"check_only",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'\\nCorrect U2F device present!\\n'",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"if",
"not",
"prompted",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'\\nTouch the flashing U2F device '",
"'to authenticate...\\n'",
")",
"prompted",
"=",
"True",
"else",
":",
"removed",
".",
"append",
"(",
"device",
")",
"except",
"exc",
".",
"DeviceError",
":",
"removed",
".",
"append",
"(",
"device",
")",
"devices",
"=",
"[",
"d",
"for",
"d",
"in",
"devices",
"if",
"d",
"not",
"in",
"removed",
"]",
"for",
"d",
"in",
"removed",
":",
"d",
".",
"close",
"(",
")",
"time",
".",
"sleep",
"(",
"0.25",
")",
"finally",
":",
"for",
"device",
"in",
"devices",
":",
"device",
".",
"close",
"(",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'\\nThe required U2F device is not present!\\n'",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] |
eadc4dbf3bf516e74ea00d2e5690742a535834cb
|
test
|
register
|
Register a U2F device
data = {
"version": "U2F_V2",
"challenge": string, //b64 encoded challenge
"appId": string, //app_id
}
|
u2flib_host/u2f_v2.py
|
def register(device, data, facet):
"""
Register a U2F device
data = {
"version": "U2F_V2",
"challenge": string, //b64 encoded challenge
"appId": string, //app_id
}
"""
if isinstance(data, string_types):
data = json.loads(data)
if data['version'] != VERSION:
raise ValueError('Unsupported U2F version: %s' % data['version'])
app_id = data.get('appId', facet)
verify_facet(app_id, facet)
app_param = sha256(app_id.encode('utf8')).digest()
client_data = {
'typ': 'navigator.id.finishEnrollment',
'challenge': data['challenge'],
'origin': facet
}
client_data = json.dumps(client_data)
client_param = sha256(client_data.encode('utf8')).digest()
request = client_param + app_param
p1 = 0x03
p2 = 0
response = device.send_apdu(INS_ENROLL, p1, p2, request)
return {
'registrationData': websafe_encode(response),
'clientData': websafe_encode(client_data)
}
|
def register(device, data, facet):
"""
Register a U2F device
data = {
"version": "U2F_V2",
"challenge": string, //b64 encoded challenge
"appId": string, //app_id
}
"""
if isinstance(data, string_types):
data = json.loads(data)
if data['version'] != VERSION:
raise ValueError('Unsupported U2F version: %s' % data['version'])
app_id = data.get('appId', facet)
verify_facet(app_id, facet)
app_param = sha256(app_id.encode('utf8')).digest()
client_data = {
'typ': 'navigator.id.finishEnrollment',
'challenge': data['challenge'],
'origin': facet
}
client_data = json.dumps(client_data)
client_param = sha256(client_data.encode('utf8')).digest()
request = client_param + app_param
p1 = 0x03
p2 = 0
response = device.send_apdu(INS_ENROLL, p1, p2, request)
return {
'registrationData': websafe_encode(response),
'clientData': websafe_encode(client_data)
}
|
[
"Register",
"a",
"U2F",
"device"
] |
Yubico/python-u2flib-host
|
python
|
https://github.com/Yubico/python-u2flib-host/blob/eadc4dbf3bf516e74ea00d2e5690742a535834cb/u2flib_host/u2f_v2.py#L39-L78
|
[
"def",
"register",
"(",
"device",
",",
"data",
",",
"facet",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"string_types",
")",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"if",
"data",
"[",
"'version'",
"]",
"!=",
"VERSION",
":",
"raise",
"ValueError",
"(",
"'Unsupported U2F version: %s'",
"%",
"data",
"[",
"'version'",
"]",
")",
"app_id",
"=",
"data",
".",
"get",
"(",
"'appId'",
",",
"facet",
")",
"verify_facet",
"(",
"app_id",
",",
"facet",
")",
"app_param",
"=",
"sha256",
"(",
"app_id",
".",
"encode",
"(",
"'utf8'",
")",
")",
".",
"digest",
"(",
")",
"client_data",
"=",
"{",
"'typ'",
":",
"'navigator.id.finishEnrollment'",
",",
"'challenge'",
":",
"data",
"[",
"'challenge'",
"]",
",",
"'origin'",
":",
"facet",
"}",
"client_data",
"=",
"json",
".",
"dumps",
"(",
"client_data",
")",
"client_param",
"=",
"sha256",
"(",
"client_data",
".",
"encode",
"(",
"'utf8'",
")",
")",
".",
"digest",
"(",
")",
"request",
"=",
"client_param",
"+",
"app_param",
"p1",
"=",
"0x03",
"p2",
"=",
"0",
"response",
"=",
"device",
".",
"send_apdu",
"(",
"INS_ENROLL",
",",
"p1",
",",
"p2",
",",
"request",
")",
"return",
"{",
"'registrationData'",
":",
"websafe_encode",
"(",
"response",
")",
",",
"'clientData'",
":",
"websafe_encode",
"(",
"client_data",
")",
"}"
] |
eadc4dbf3bf516e74ea00d2e5690742a535834cb
|
test
|
authenticate
|
Signs an authentication challenge
data = {
'version': "U2F_V2",
'challenge': websafe_encode(self.challenge),
'appId': self.binding.app_id,
'keyHandle': websafe_encode(self.binding.key_handle)
}
|
u2flib_host/u2f_v2.py
|
def authenticate(device, data, facet, check_only=False):
"""
Signs an authentication challenge
data = {
'version': "U2F_V2",
'challenge': websafe_encode(self.challenge),
'appId': self.binding.app_id,
'keyHandle': websafe_encode(self.binding.key_handle)
}
"""
if isinstance(data, string_types):
data = json.loads(data)
if data['version'] != VERSION:
raise ValueError('Unsupported U2F version: %s' % data['version'])
app_id = data.get('appId', facet)
verify_facet(app_id, facet)
app_param = sha256(app_id.encode('utf8')).digest()
key_handle = websafe_decode(data['keyHandle'])
# Client data
client_data = {
'typ': 'navigator.id.getAssertion',
'challenge': data['challenge'],
'origin': facet
}
client_data = json.dumps(client_data)
client_param = sha256(client_data.encode('utf8')).digest()
request = client_param + app_param + int2byte(
len(key_handle)) + key_handle
p1 = 0x07 if check_only else 0x03
p2 = 0
response = device.send_apdu(INS_SIGN, p1, p2, request)
return {
'clientData': websafe_encode(client_data),
'signatureData': websafe_encode(response),
'keyHandle': data['keyHandle']
}
|
def authenticate(device, data, facet, check_only=False):
"""
Signs an authentication challenge
data = {
'version': "U2F_V2",
'challenge': websafe_encode(self.challenge),
'appId': self.binding.app_id,
'keyHandle': websafe_encode(self.binding.key_handle)
}
"""
if isinstance(data, string_types):
data = json.loads(data)
if data['version'] != VERSION:
raise ValueError('Unsupported U2F version: %s' % data['version'])
app_id = data.get('appId', facet)
verify_facet(app_id, facet)
app_param = sha256(app_id.encode('utf8')).digest()
key_handle = websafe_decode(data['keyHandle'])
# Client data
client_data = {
'typ': 'navigator.id.getAssertion',
'challenge': data['challenge'],
'origin': facet
}
client_data = json.dumps(client_data)
client_param = sha256(client_data.encode('utf8')).digest()
request = client_param + app_param + int2byte(
len(key_handle)) + key_handle
p1 = 0x07 if check_only else 0x03
p2 = 0
response = device.send_apdu(INS_SIGN, p1, p2, request)
return {
'clientData': websafe_encode(client_data),
'signatureData': websafe_encode(response),
'keyHandle': data['keyHandle']
}
|
[
"Signs",
"an",
"authentication",
"challenge"
] |
Yubico/python-u2flib-host
|
python
|
https://github.com/Yubico/python-u2flib-host/blob/eadc4dbf3bf516e74ea00d2e5690742a535834cb/u2flib_host/u2f_v2.py#L81-L126
|
[
"def",
"authenticate",
"(",
"device",
",",
"data",
",",
"facet",
",",
"check_only",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"string_types",
")",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"if",
"data",
"[",
"'version'",
"]",
"!=",
"VERSION",
":",
"raise",
"ValueError",
"(",
"'Unsupported U2F version: %s'",
"%",
"data",
"[",
"'version'",
"]",
")",
"app_id",
"=",
"data",
".",
"get",
"(",
"'appId'",
",",
"facet",
")",
"verify_facet",
"(",
"app_id",
",",
"facet",
")",
"app_param",
"=",
"sha256",
"(",
"app_id",
".",
"encode",
"(",
"'utf8'",
")",
")",
".",
"digest",
"(",
")",
"key_handle",
"=",
"websafe_decode",
"(",
"data",
"[",
"'keyHandle'",
"]",
")",
"# Client data",
"client_data",
"=",
"{",
"'typ'",
":",
"'navigator.id.getAssertion'",
",",
"'challenge'",
":",
"data",
"[",
"'challenge'",
"]",
",",
"'origin'",
":",
"facet",
"}",
"client_data",
"=",
"json",
".",
"dumps",
"(",
"client_data",
")",
"client_param",
"=",
"sha256",
"(",
"client_data",
".",
"encode",
"(",
"'utf8'",
")",
")",
".",
"digest",
"(",
")",
"request",
"=",
"client_param",
"+",
"app_param",
"+",
"int2byte",
"(",
"len",
"(",
"key_handle",
")",
")",
"+",
"key_handle",
"p1",
"=",
"0x07",
"if",
"check_only",
"else",
"0x03",
"p2",
"=",
"0",
"response",
"=",
"device",
".",
"send_apdu",
"(",
"INS_SIGN",
",",
"p1",
",",
"p2",
",",
"request",
")",
"return",
"{",
"'clientData'",
":",
"websafe_encode",
"(",
"client_data",
")",
",",
"'signatureData'",
":",
"websafe_encode",
"(",
"response",
")",
",",
"'keyHandle'",
":",
"data",
"[",
"'keyHandle'",
"]",
"}"
] |
eadc4dbf3bf516e74ea00d2e5690742a535834cb
|
test
|
register
|
Interactively registers a single U2F device, given the RegistrationRequest.
|
u2flib_host/register.py
|
def register(devices, params, facet):
"""
Interactively registers a single U2F device, given the RegistrationRequest.
"""
for device in devices[:]:
try:
device.open()
except:
devices.remove(device)
sys.stderr.write('\nTouch the U2F device you wish to register...\n')
try:
while devices:
removed = []
for device in devices:
try:
return u2f.register(device, params, facet)
except exc.APDUError as e:
if e.code == APDU_USE_NOT_SATISFIED:
pass
else:
removed.append(device)
except exc.DeviceError:
removed.append(device)
devices = [d for d in devices if d not in removed]
for d in removed:
d.close()
time.sleep(0.25)
finally:
for device in devices:
device.close()
sys.stderr.write('\nUnable to register with any U2F device.\n')
sys.exit(1)
|
def register(devices, params, facet):
"""
Interactively registers a single U2F device, given the RegistrationRequest.
"""
for device in devices[:]:
try:
device.open()
except:
devices.remove(device)
sys.stderr.write('\nTouch the U2F device you wish to register...\n')
try:
while devices:
removed = []
for device in devices:
try:
return u2f.register(device, params, facet)
except exc.APDUError as e:
if e.code == APDU_USE_NOT_SATISFIED:
pass
else:
removed.append(device)
except exc.DeviceError:
removed.append(device)
devices = [d for d in devices if d not in removed]
for d in removed:
d.close()
time.sleep(0.25)
finally:
for device in devices:
device.close()
sys.stderr.write('\nUnable to register with any U2F device.\n')
sys.exit(1)
|
[
"Interactively",
"registers",
"a",
"single",
"U2F",
"device",
"given",
"the",
"RegistrationRequest",
"."
] |
Yubico/python-u2flib-host
|
python
|
https://github.com/Yubico/python-u2flib-host/blob/eadc4dbf3bf516e74ea00d2e5690742a535834cb/u2flib_host/register.py#L41-L73
|
[
"def",
"register",
"(",
"devices",
",",
"params",
",",
"facet",
")",
":",
"for",
"device",
"in",
"devices",
"[",
":",
"]",
":",
"try",
":",
"device",
".",
"open",
"(",
")",
"except",
":",
"devices",
".",
"remove",
"(",
"device",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'\\nTouch the U2F device you wish to register...\\n'",
")",
"try",
":",
"while",
"devices",
":",
"removed",
"=",
"[",
"]",
"for",
"device",
"in",
"devices",
":",
"try",
":",
"return",
"u2f",
".",
"register",
"(",
"device",
",",
"params",
",",
"facet",
")",
"except",
"exc",
".",
"APDUError",
"as",
"e",
":",
"if",
"e",
".",
"code",
"==",
"APDU_USE_NOT_SATISFIED",
":",
"pass",
"else",
":",
"removed",
".",
"append",
"(",
"device",
")",
"except",
"exc",
".",
"DeviceError",
":",
"removed",
".",
"append",
"(",
"device",
")",
"devices",
"=",
"[",
"d",
"for",
"d",
"in",
"devices",
"if",
"d",
"not",
"in",
"removed",
"]",
"for",
"d",
"in",
"removed",
":",
"d",
".",
"close",
"(",
")",
"time",
".",
"sleep",
"(",
"0.25",
")",
"finally",
":",
"for",
"device",
"in",
"devices",
":",
"device",
".",
"close",
"(",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'\\nUnable to register with any U2F device.\\n'",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] |
eadc4dbf3bf516e74ea00d2e5690742a535834cb
|
test
|
u2str
|
Recursively converts unicode objects to UTF-8 encoded byte strings.
|
u2flib_host/utils.py
|
def u2str(data):
"""Recursively converts unicode objects to UTF-8 encoded byte strings."""
if isinstance(data, dict):
return {u2str(k): u2str(v) for k, v in data.items()}
elif isinstance(data, list):
return [u2str(x) for x in data]
elif isinstance(data, text_type):
return data.encode('utf-8')
else:
return data
|
def u2str(data):
"""Recursively converts unicode objects to UTF-8 encoded byte strings."""
if isinstance(data, dict):
return {u2str(k): u2str(v) for k, v in data.items()}
elif isinstance(data, list):
return [u2str(x) for x in data]
elif isinstance(data, text_type):
return data.encode('utf-8')
else:
return data
|
[
"Recursively",
"converts",
"unicode",
"objects",
"to",
"UTF",
"-",
"8",
"encoded",
"byte",
"strings",
"."
] |
Yubico/python-u2flib-host
|
python
|
https://github.com/Yubico/python-u2flib-host/blob/eadc4dbf3bf516e74ea00d2e5690742a535834cb/u2flib_host/utils.py#L40-L49
|
[
"def",
"u2str",
"(",
"data",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"return",
"{",
"u2str",
"(",
"k",
")",
":",
"u2str",
"(",
"v",
")",
"for",
"k",
",",
"v",
"in",
"data",
".",
"items",
"(",
")",
"}",
"elif",
"isinstance",
"(",
"data",
",",
"list",
")",
":",
"return",
"[",
"u2str",
"(",
"x",
")",
"for",
"x",
"in",
"data",
"]",
"elif",
"isinstance",
"(",
"data",
",",
"text_type",
")",
":",
"return",
"data",
".",
"encode",
"(",
"'utf-8'",
")",
"else",
":",
"return",
"data"
] |
eadc4dbf3bf516e74ea00d2e5690742a535834cb
|
test
|
wrap_function
|
Wraps a function with reporting to errors backend
|
flawless/client/decorators.py
|
def wrap_function(func=None, error_threshold=None, reraise_exception=True, save_current_stack_trace=True):
''' Wraps a function with reporting to errors backend '''
# This if/else allows wrap_function to behave like a normal decorator when
# used like:
# @wrap_function
# def some_func():
#
# However, it also allows wrap_function to also be passed keyword arguments
# like the following:
# @wrap_function(error_threshold=3, reraise_exception=False)
# def some_func():
if func:
return flawless.client.client._wrap_function_with_error_decorator(
func=func,
error_threshold=error_threshold,
reraise_exception=reraise_exception,
save_current_stack_trace=save_current_stack_trace)
else:
return functools.partial(flawless.client.client._wrap_function_with_error_decorator,
error_threshold=error_threshold,
reraise_exception=reraise_exception,
save_current_stack_trace=save_current_stack_trace)
|
def wrap_function(func=None, error_threshold=None, reraise_exception=True, save_current_stack_trace=True):
''' Wraps a function with reporting to errors backend '''
# This if/else allows wrap_function to behave like a normal decorator when
# used like:
# @wrap_function
# def some_func():
#
# However, it also allows wrap_function to also be passed keyword arguments
# like the following:
# @wrap_function(error_threshold=3, reraise_exception=False)
# def some_func():
if func:
return flawless.client.client._wrap_function_with_error_decorator(
func=func,
error_threshold=error_threshold,
reraise_exception=reraise_exception,
save_current_stack_trace=save_current_stack_trace)
else:
return functools.partial(flawless.client.client._wrap_function_with_error_decorator,
error_threshold=error_threshold,
reraise_exception=reraise_exception,
save_current_stack_trace=save_current_stack_trace)
|
[
"Wraps",
"a",
"function",
"with",
"reporting",
"to",
"errors",
"backend"
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/flawless/client/decorators.py#L20-L41
|
[
"def",
"wrap_function",
"(",
"func",
"=",
"None",
",",
"error_threshold",
"=",
"None",
",",
"reraise_exception",
"=",
"True",
",",
"save_current_stack_trace",
"=",
"True",
")",
":",
"# This if/else allows wrap_function to behave like a normal decorator when",
"# used like:",
"# @wrap_function",
"# def some_func():",
"#",
"# However, it also allows wrap_function to also be passed keyword arguments",
"# like the following:",
"# @wrap_function(error_threshold=3, reraise_exception=False)",
"# def some_func():",
"if",
"func",
":",
"return",
"flawless",
".",
"client",
".",
"client",
".",
"_wrap_function_with_error_decorator",
"(",
"func",
"=",
"func",
",",
"error_threshold",
"=",
"error_threshold",
",",
"reraise_exception",
"=",
"reraise_exception",
",",
"save_current_stack_trace",
"=",
"save_current_stack_trace",
")",
"else",
":",
"return",
"functools",
".",
"partial",
"(",
"flawless",
".",
"client",
".",
"client",
".",
"_wrap_function_with_error_decorator",
",",
"error_threshold",
"=",
"error_threshold",
",",
"reraise_exception",
"=",
"reraise_exception",
",",
"save_current_stack_trace",
"=",
"save_current_stack_trace",
")"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
wrap_class
|
Wraps a class with reporting to errors backend by decorating each function of the class.
Decorators are injected under the classmethod decorator if they exist.
|
flawless/client/decorators.py
|
def wrap_class(cls, error_threshold=None):
''' Wraps a class with reporting to errors backend by decorating each function of the class.
Decorators are injected under the classmethod decorator if they exist.
'''
methods = inspect.getmembers(cls, inspect.ismethod) + inspect.getmembers(cls, inspect.isfunction)
for method_name, method in methods:
wrapped_method = flawless.client.client._wrap_function_with_error_decorator(
method if not im_self(method) else im_func(method),
save_current_stack_trace=False,
error_threshold=error_threshold,
)
if im_self(method):
wrapped_method = classmethod(wrapped_method)
setattr(cls, method_name, wrapped_method)
return cls
|
def wrap_class(cls, error_threshold=None):
''' Wraps a class with reporting to errors backend by decorating each function of the class.
Decorators are injected under the classmethod decorator if they exist.
'''
methods = inspect.getmembers(cls, inspect.ismethod) + inspect.getmembers(cls, inspect.isfunction)
for method_name, method in methods:
wrapped_method = flawless.client.client._wrap_function_with_error_decorator(
method if not im_self(method) else im_func(method),
save_current_stack_trace=False,
error_threshold=error_threshold,
)
if im_self(method):
wrapped_method = classmethod(wrapped_method)
setattr(cls, method_name, wrapped_method)
return cls
|
[
"Wraps",
"a",
"class",
"with",
"reporting",
"to",
"errors",
"backend",
"by",
"decorating",
"each",
"function",
"of",
"the",
"class",
".",
"Decorators",
"are",
"injected",
"under",
"the",
"classmethod",
"decorator",
"if",
"they",
"exist",
"."
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/flawless/client/decorators.py#L44-L58
|
[
"def",
"wrap_class",
"(",
"cls",
",",
"error_threshold",
"=",
"None",
")",
":",
"methods",
"=",
"inspect",
".",
"getmembers",
"(",
"cls",
",",
"inspect",
".",
"ismethod",
")",
"+",
"inspect",
".",
"getmembers",
"(",
"cls",
",",
"inspect",
".",
"isfunction",
")",
"for",
"method_name",
",",
"method",
"in",
"methods",
":",
"wrapped_method",
"=",
"flawless",
".",
"client",
".",
"client",
".",
"_wrap_function_with_error_decorator",
"(",
"method",
"if",
"not",
"im_self",
"(",
"method",
")",
"else",
"im_func",
"(",
"method",
")",
",",
"save_current_stack_trace",
"=",
"False",
",",
"error_threshold",
"=",
"error_threshold",
",",
")",
"if",
"im_self",
"(",
"method",
")",
":",
"wrapped_method",
"=",
"classmethod",
"(",
"wrapped_method",
")",
"setattr",
"(",
"cls",
",",
"method_name",
",",
"wrapped_method",
")",
"return",
"cls"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
FlawlessServiceBaseClass._blame_line
|
Figures out which line in traceback is to blame for the error.
Returns a 3-tuple of (ErrorKey, StackTraceEntry, [email recipients])
|
flawless/server/service.py
|
def _blame_line(self, traceback):
'''Figures out which line in traceback is to blame for the error.
Returns a 3-tuple of (ErrorKey, StackTraceEntry, [email recipients])'''
key = None
blamed_entry = None
email_recipients = []
for stack_line in traceback:
line_type = self._get_line_type(stack_line)
if line_type == api_ttypes.LineType.THIRDPARTY_WHITELIST:
return None, None, None, True
elif line_type in [api_ttypes.LineType.DEFAULT, api_ttypes.LineType.KNOWN_ERROR]:
filepath = self._get_basepath(stack_line.filename)
entry = api_ttypes.CodeIdentifier(filepath, stack_line.function_name, stack_line.text)
blamed_entry = entry
key = api_ttypes.ErrorKey(filepath, stack_line.line_number, stack_line.function_name, stack_line.text)
if filepath in self.watch_all_errors:
email_recipients.extend(self.watch_all_errors[filepath])
return (key, blamed_entry, email_recipients, False)
|
def _blame_line(self, traceback):
'''Figures out which line in traceback is to blame for the error.
Returns a 3-tuple of (ErrorKey, StackTraceEntry, [email recipients])'''
key = None
blamed_entry = None
email_recipients = []
for stack_line in traceback:
line_type = self._get_line_type(stack_line)
if line_type == api_ttypes.LineType.THIRDPARTY_WHITELIST:
return None, None, None, True
elif line_type in [api_ttypes.LineType.DEFAULT, api_ttypes.LineType.KNOWN_ERROR]:
filepath = self._get_basepath(stack_line.filename)
entry = api_ttypes.CodeIdentifier(filepath, stack_line.function_name, stack_line.text)
blamed_entry = entry
key = api_ttypes.ErrorKey(filepath, stack_line.line_number, stack_line.function_name, stack_line.text)
if filepath in self.watch_all_errors:
email_recipients.extend(self.watch_all_errors[filepath])
return (key, blamed_entry, email_recipients, False)
|
[
"Figures",
"out",
"which",
"line",
"in",
"traceback",
"is",
"to",
"blame",
"for",
"the",
"error",
".",
"Returns",
"a",
"3",
"-",
"tuple",
"of",
"(",
"ErrorKey",
"StackTraceEntry",
"[",
"email",
"recipients",
"]",
")"
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/flawless/server/service.py#L189-L206
|
[
"def",
"_blame_line",
"(",
"self",
",",
"traceback",
")",
":",
"key",
"=",
"None",
"blamed_entry",
"=",
"None",
"email_recipients",
"=",
"[",
"]",
"for",
"stack_line",
"in",
"traceback",
":",
"line_type",
"=",
"self",
".",
"_get_line_type",
"(",
"stack_line",
")",
"if",
"line_type",
"==",
"api_ttypes",
".",
"LineType",
".",
"THIRDPARTY_WHITELIST",
":",
"return",
"None",
",",
"None",
",",
"None",
",",
"True",
"elif",
"line_type",
"in",
"[",
"api_ttypes",
".",
"LineType",
".",
"DEFAULT",
",",
"api_ttypes",
".",
"LineType",
".",
"KNOWN_ERROR",
"]",
":",
"filepath",
"=",
"self",
".",
"_get_basepath",
"(",
"stack_line",
".",
"filename",
")",
"entry",
"=",
"api_ttypes",
".",
"CodeIdentifier",
"(",
"filepath",
",",
"stack_line",
".",
"function_name",
",",
"stack_line",
".",
"text",
")",
"blamed_entry",
"=",
"entry",
"key",
"=",
"api_ttypes",
".",
"ErrorKey",
"(",
"filepath",
",",
"stack_line",
".",
"line_number",
",",
"stack_line",
".",
"function_name",
",",
"stack_line",
".",
"text",
")",
"if",
"filepath",
"in",
"self",
".",
"watch_all_errors",
":",
"email_recipients",
".",
"extend",
"(",
"self",
".",
"watch_all_errors",
"[",
"filepath",
"]",
")",
"return",
"(",
"key",
",",
"blamed_entry",
",",
"email_recipients",
",",
"False",
")"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
FlawlessServiceBaseClass._matches_filepath_pattern
|
Given a filepath, and a list of regex patterns, this function returns true
if filepath matches any one of those patterns
|
flawless/server/service.py
|
def _matches_filepath_pattern(self, filepath):
'''Given a filepath, and a list of regex patterns, this function returns true
if filepath matches any one of those patterns'''
if not self.only_blame_patterns:
return True
for pattern in self.only_blame_patterns:
if pattern.match(filepath):
return True
return False
|
def _matches_filepath_pattern(self, filepath):
'''Given a filepath, and a list of regex patterns, this function returns true
if filepath matches any one of those patterns'''
if not self.only_blame_patterns:
return True
for pattern in self.only_blame_patterns:
if pattern.match(filepath):
return True
return False
|
[
"Given",
"a",
"filepath",
"and",
"a",
"list",
"of",
"regex",
"patterns",
"this",
"function",
"returns",
"true",
"if",
"filepath",
"matches",
"any",
"one",
"of",
"those",
"patterns"
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/flawless/server/service.py#L227-L236
|
[
"def",
"_matches_filepath_pattern",
"(",
"self",
",",
"filepath",
")",
":",
"if",
"not",
"self",
".",
"only_blame_patterns",
":",
"return",
"True",
"for",
"pattern",
"in",
"self",
".",
"only_blame_patterns",
":",
"if",
"pattern",
".",
"match",
"(",
"filepath",
")",
":",
"return",
"True",
"return",
"False"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
FlawlessThriftServiceHandler._get_email
|
Given an email address, check the email_remapping table to see if the email
should be sent to a different address. This function also handles overriding
the email domain if ignore_vcs_email_domain is set or the domain was missing
|
flawless/server/service.py
|
def _get_email(self, email):
'''Given an email address, check the email_remapping table to see if the email
should be sent to a different address. This function also handles overriding
the email domain if ignore_vcs_email_domain is set or the domain was missing'''
if not email or "@" not in email:
return None
if email in self.email_remapping.remap:
return self.email_remapping.remap[email]
prefix, domain = email.split("@", 2)
if prefix in self.email_remapping.remap:
return self.email_remapping.remap[prefix]
if "." not in domain or config.ignore_vcs_email_domain:
return "%s@%s" % (prefix, config.email_domain_name)
return email
|
def _get_email(self, email):
'''Given an email address, check the email_remapping table to see if the email
should be sent to a different address. This function also handles overriding
the email domain if ignore_vcs_email_domain is set or the domain was missing'''
if not email or "@" not in email:
return None
if email in self.email_remapping.remap:
return self.email_remapping.remap[email]
prefix, domain = email.split("@", 2)
if prefix in self.email_remapping.remap:
return self.email_remapping.remap[prefix]
if "." not in domain or config.ignore_vcs_email_domain:
return "%s@%s" % (prefix, config.email_domain_name)
return email
|
[
"Given",
"an",
"email",
"address",
"check",
"the",
"email_remapping",
"table",
"to",
"see",
"if",
"the",
"email",
"should",
"be",
"sent",
"to",
"a",
"different",
"address",
".",
"This",
"function",
"also",
"handles",
"overriding",
"the",
"email",
"domain",
"if",
"ignore_vcs_email_domain",
"is",
"set",
"or",
"the",
"domain",
"was",
"missing"
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/flawless/server/service.py#L403-L417
|
[
"def",
"_get_email",
"(",
"self",
",",
"email",
")",
":",
"if",
"not",
"email",
"or",
"\"@\"",
"not",
"in",
"email",
":",
"return",
"None",
"if",
"email",
"in",
"self",
".",
"email_remapping",
".",
"remap",
":",
"return",
"self",
".",
"email_remapping",
".",
"remap",
"[",
"email",
"]",
"prefix",
",",
"domain",
"=",
"email",
".",
"split",
"(",
"\"@\"",
",",
"2",
")",
"if",
"prefix",
"in",
"self",
".",
"email_remapping",
".",
"remap",
":",
"return",
"self",
".",
"email_remapping",
".",
"remap",
"[",
"prefix",
"]",
"if",
"\".\"",
"not",
"in",
"domain",
"or",
"config",
".",
"ignore_vcs_email_domain",
":",
"return",
"\"%s@%s\"",
"%",
"(",
"prefix",
",",
"config",
".",
"email_domain_name",
")",
"return",
"email"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
FlawlessThriftServiceHandler._get_entry
|
Helper function for retrieving a particular entry from the prefix trees
|
flawless/server/service.py
|
def _get_entry(self, entry, entry_tree):
'''Helper function for retrieving a particular entry from the prefix trees'''
for e in entry_tree[entry.filename]:
if entry == e:
return e
|
def _get_entry(self, entry, entry_tree):
'''Helper function for retrieving a particular entry from the prefix trees'''
for e in entry_tree[entry.filename]:
if entry == e:
return e
|
[
"Helper",
"function",
"for",
"retrieving",
"a",
"particular",
"entry",
"from",
"the",
"prefix",
"trees"
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/flawless/server/service.py#L419-L423
|
[
"def",
"_get_entry",
"(",
"self",
",",
"entry",
",",
"entry_tree",
")",
":",
"for",
"e",
"in",
"entry_tree",
"[",
"entry",
".",
"filename",
"]",
":",
"if",
"entry",
"==",
"e",
":",
"return",
"e"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
markdown_to_reST
|
This is not a general purpose converter. Only converts this readme
|
setup.py
|
def markdown_to_reST(text):
'''This is not a general purpose converter. Only converts this readme'''
# Convert parameters to italics and prepend a newline
text = re.sub(pattern=r"\n (\w+) - (.+)\n",
repl=r"\n\n *\g<1>* - \g<2>\n",
string=text)
# Parse [http://url](text), and just leave the url
text = re.sub(pattern=r"\[([^\]]+)\]\([^)]+\)",
repl=r"\g<1>",
string=text)
# Disable formatting of numbered lists
text = re.sub(pattern=r"\n(\d+). ",
repl=r"\n\\\g<1>. ",
string=text)
return text
|
def markdown_to_reST(text):
'''This is not a general purpose converter. Only converts this readme'''
# Convert parameters to italics and prepend a newline
text = re.sub(pattern=r"\n (\w+) - (.+)\n",
repl=r"\n\n *\g<1>* - \g<2>\n",
string=text)
# Parse [http://url](text), and just leave the url
text = re.sub(pattern=r"\[([^\]]+)\]\([^)]+\)",
repl=r"\g<1>",
string=text)
# Disable formatting of numbered lists
text = re.sub(pattern=r"\n(\d+). ",
repl=r"\n\\\g<1>. ",
string=text)
return text
|
[
"This",
"is",
"not",
"a",
"general",
"purpose",
"converter",
".",
"Only",
"converts",
"this",
"readme"
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/setup.py#L19-L35
|
[
"def",
"markdown_to_reST",
"(",
"text",
")",
":",
"# Convert parameters to italics and prepend a newline",
"text",
"=",
"re",
".",
"sub",
"(",
"pattern",
"=",
"r\"\\n (\\w+) - (.+)\\n\"",
",",
"repl",
"=",
"r\"\\n\\n *\\g<1>* - \\g<2>\\n\"",
",",
"string",
"=",
"text",
")",
"# Parse [http://url](text), and just leave the url",
"text",
"=",
"re",
".",
"sub",
"(",
"pattern",
"=",
"r\"\\[([^\\]]+)\\]\\([^)]+\\)\"",
",",
"repl",
"=",
"r\"\\g<1>\"",
",",
"string",
"=",
"text",
")",
"# Disable formatting of numbered lists",
"text",
"=",
"re",
".",
"sub",
"(",
"pattern",
"=",
"r\"\\n(\\d+). \"",
",",
"repl",
"=",
"r\"\\n\\\\\\g<1>. \"",
",",
"string",
"=",
"text",
")",
"return",
"text"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
serve
|
This method starts the server. There are two processes, one is an HTTP server that shows
and admin interface and the second is a Thrift server that the client code calls.
Arguments:
`conf_path` - The path to your flawless.cfg file
`storage_factory` - You can pass in your own storage class that implements StorageInterface. You must implement
storage_cls if you want Flawless to be horizontally scalable, since by default it will just
store everything on the local disk.
|
flawless/server/server.py
|
def serve(conf_path, storage_factory=None):
"""This method starts the server. There are two processes, one is an HTTP server that shows
and admin interface and the second is a Thrift server that the client code calls.
Arguments:
`conf_path` - The path to your flawless.cfg file
`storage_factory` - You can pass in your own storage class that implements StorageInterface. You must implement
storage_cls if you want Flawless to be horizontally scalable, since by default it will just
store everything on the local disk.
"""
flawless.lib.config.init_config(conf_path)
# Try and create datadir if it doesn't exist. For instance it might be in /tmp
if not os.path.exists(config.data_dir_path):
os.makedirs(config.data_dir_path)
storage_factory = storage_factory or (lambda partition: DiskStorage(partition=partition))
# Setup root logger
root_logger = logging.getLogger()
root_handler = logging.handlers.TimedRotatingFileHandler(
filename=config.log_file, when='d', interval=1, backupCount=config.log_days_to_keep)
root_logger.setLevel(getattr(logging, config.log_level))
root_logger.addHandler(root_handler)
child_pid = os.fork()
if child_pid == 0:
# Setup HTTP server
handler = FlawlessWebServiceHandler(storage_factory=storage_factory)
server = SimpleThreadedHTTPServer(('', config.http_port), SimpleRequestHTTPHandler)
server.attach_service(handler)
server.request_queue_size = 50
try:
server.serve_forever()
except (KeyboardInterrupt, SystemExit):
server.server_close()
else:
# Setup Thrift server
handler = FlawlessThriftServiceHandler(storage_factory=storage_factory)
processor = Flawless.Processor(handler)
transport = TSocket.TServerSocket(port=config.port)
tfactory = TTransport.TFramedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
server = TServer.TThreadedServer(processor, transport, tfactory, pfactory)
try:
server.serve()
except (KeyboardInterrupt, SystemExit):
handler.errors_seen.sync()
transport.close()
os.kill(child_pid, signal.SIGINT)
|
def serve(conf_path, storage_factory=None):
"""This method starts the server. There are two processes, one is an HTTP server that shows
and admin interface and the second is a Thrift server that the client code calls.
Arguments:
`conf_path` - The path to your flawless.cfg file
`storage_factory` - You can pass in your own storage class that implements StorageInterface. You must implement
storage_cls if you want Flawless to be horizontally scalable, since by default it will just
store everything on the local disk.
"""
flawless.lib.config.init_config(conf_path)
# Try and create datadir if it doesn't exist. For instance it might be in /tmp
if not os.path.exists(config.data_dir_path):
os.makedirs(config.data_dir_path)
storage_factory = storage_factory or (lambda partition: DiskStorage(partition=partition))
# Setup root logger
root_logger = logging.getLogger()
root_handler = logging.handlers.TimedRotatingFileHandler(
filename=config.log_file, when='d', interval=1, backupCount=config.log_days_to_keep)
root_logger.setLevel(getattr(logging, config.log_level))
root_logger.addHandler(root_handler)
child_pid = os.fork()
if child_pid == 0:
# Setup HTTP server
handler = FlawlessWebServiceHandler(storage_factory=storage_factory)
server = SimpleThreadedHTTPServer(('', config.http_port), SimpleRequestHTTPHandler)
server.attach_service(handler)
server.request_queue_size = 50
try:
server.serve_forever()
except (KeyboardInterrupt, SystemExit):
server.server_close()
else:
# Setup Thrift server
handler = FlawlessThriftServiceHandler(storage_factory=storage_factory)
processor = Flawless.Processor(handler)
transport = TSocket.TServerSocket(port=config.port)
tfactory = TTransport.TFramedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
server = TServer.TThreadedServer(processor, transport, tfactory, pfactory)
try:
server.serve()
except (KeyboardInterrupt, SystemExit):
handler.errors_seen.sync()
transport.close()
os.kill(child_pid, signal.SIGINT)
|
[
"This",
"method",
"starts",
"the",
"server",
".",
"There",
"are",
"two",
"processes",
"one",
"is",
"an",
"HTTP",
"server",
"that",
"shows",
"and",
"admin",
"interface",
"and",
"the",
"second",
"is",
"a",
"Thrift",
"server",
"that",
"the",
"client",
"code",
"calls",
"."
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/flawless/server/server.py#L101-L151
|
[
"def",
"serve",
"(",
"conf_path",
",",
"storage_factory",
"=",
"None",
")",
":",
"flawless",
".",
"lib",
".",
"config",
".",
"init_config",
"(",
"conf_path",
")",
"# Try and create datadir if it doesn't exist. For instance it might be in /tmp",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"config",
".",
"data_dir_path",
")",
":",
"os",
".",
"makedirs",
"(",
"config",
".",
"data_dir_path",
")",
"storage_factory",
"=",
"storage_factory",
"or",
"(",
"lambda",
"partition",
":",
"DiskStorage",
"(",
"partition",
"=",
"partition",
")",
")",
"# Setup root logger",
"root_logger",
"=",
"logging",
".",
"getLogger",
"(",
")",
"root_handler",
"=",
"logging",
".",
"handlers",
".",
"TimedRotatingFileHandler",
"(",
"filename",
"=",
"config",
".",
"log_file",
",",
"when",
"=",
"'d'",
",",
"interval",
"=",
"1",
",",
"backupCount",
"=",
"config",
".",
"log_days_to_keep",
")",
"root_logger",
".",
"setLevel",
"(",
"getattr",
"(",
"logging",
",",
"config",
".",
"log_level",
")",
")",
"root_logger",
".",
"addHandler",
"(",
"root_handler",
")",
"child_pid",
"=",
"os",
".",
"fork",
"(",
")",
"if",
"child_pid",
"==",
"0",
":",
"# Setup HTTP server",
"handler",
"=",
"FlawlessWebServiceHandler",
"(",
"storage_factory",
"=",
"storage_factory",
")",
"server",
"=",
"SimpleThreadedHTTPServer",
"(",
"(",
"''",
",",
"config",
".",
"http_port",
")",
",",
"SimpleRequestHTTPHandler",
")",
"server",
".",
"attach_service",
"(",
"handler",
")",
"server",
".",
"request_queue_size",
"=",
"50",
"try",
":",
"server",
".",
"serve_forever",
"(",
")",
"except",
"(",
"KeyboardInterrupt",
",",
"SystemExit",
")",
":",
"server",
".",
"server_close",
"(",
")",
"else",
":",
"# Setup Thrift server",
"handler",
"=",
"FlawlessThriftServiceHandler",
"(",
"storage_factory",
"=",
"storage_factory",
")",
"processor",
"=",
"Flawless",
".",
"Processor",
"(",
"handler",
")",
"transport",
"=",
"TSocket",
".",
"TServerSocket",
"(",
"port",
"=",
"config",
".",
"port",
")",
"tfactory",
"=",
"TTransport",
".",
"TFramedTransportFactory",
"(",
")",
"pfactory",
"=",
"TBinaryProtocol",
".",
"TBinaryProtocolFactory",
"(",
")",
"server",
"=",
"TServer",
".",
"TThreadedServer",
"(",
"processor",
",",
"transport",
",",
"tfactory",
",",
"pfactory",
")",
"try",
":",
"server",
".",
"serve",
"(",
")",
"except",
"(",
"KeyboardInterrupt",
",",
"SystemExit",
")",
":",
"handler",
".",
"errors_seen",
".",
"sync",
"(",
")",
"transport",
".",
"close",
"(",
")",
"os",
".",
"kill",
"(",
"child_pid",
",",
"signal",
".",
"SIGINT",
")"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
record_error
|
Helper function to record errors to the flawless backend
|
flawless/client/client.py
|
def record_error(hostname, exc_info, preceding_stack=None, error_threshold=None, additional_info=None):
''' Helper function to record errors to the flawless backend '''
stack = []
exc_type, exc_value, sys_traceback = exc_info
while sys_traceback is not None:
stack.append(sys_traceback)
sys_traceback = sys_traceback.tb_next
stack_lines = []
for row in preceding_stack or []:
stack_lines.append(
api_ttypes.StackLine(filename=os.path.abspath(row[0]), line_number=row[1],
function_name=row[2], text=row[3])
)
for index, tb in enumerate(stack):
filename = tb.tb_frame.f_code.co_filename
func_name = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno
line = linecache.getline(filename, lineno, tb.tb_frame.f_globals)
frame_locals = None
if index >= (len(stack) - NUM_FRAMES_TO_SAVE):
# Include some limits on max string length & number of variables to keep things from getting
# out of hand
frame_locals = dict((k, _myrepr(k, v)) for k, v in
list(tb.tb_frame.f_locals.items())[:MAX_LOCALS] if k != "self")
if "self" in tb.tb_frame.f_locals and hasattr(tb.tb_frame.f_locals["self"], "__dict__"):
frame_locals.update(dict(("self." + k, _myrepr(k, v)) for k, v in
list(tb.tb_frame.f_locals["self"].__dict__.items())[:MAX_LOCALS]
if k != "self"))
stack_lines.append(
api_ttypes.StackLine(filename=os.path.abspath(filename), line_number=lineno,
function_name=func_name, text=line, frame_locals=frame_locals)
)
# Check LRU cache & potentially do not send error report if this client has already reported this error
# several times.
key = CachedErrorInfo.get_hash_key(stack_lines)
info = ERROR_CACHE.get(key) or CachedErrorInfo()
info.increment()
ERROR_CACHE[key] = info
if info.should_report():
error_count = info.mark_reported()
_send_request(
api_ttypes.RecordErrorRequest(
traceback=stack_lines,
exception_message=repr(exc_value),
exception_type=exc_type.__module__ + "." + exc_type.__name__,
hostname=hostname,
error_threshold=error_threshold,
additional_info=additional_info,
error_count=error_count,
)
)
|
def record_error(hostname, exc_info, preceding_stack=None, error_threshold=None, additional_info=None):
''' Helper function to record errors to the flawless backend '''
stack = []
exc_type, exc_value, sys_traceback = exc_info
while sys_traceback is not None:
stack.append(sys_traceback)
sys_traceback = sys_traceback.tb_next
stack_lines = []
for row in preceding_stack or []:
stack_lines.append(
api_ttypes.StackLine(filename=os.path.abspath(row[0]), line_number=row[1],
function_name=row[2], text=row[3])
)
for index, tb in enumerate(stack):
filename = tb.tb_frame.f_code.co_filename
func_name = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno
line = linecache.getline(filename, lineno, tb.tb_frame.f_globals)
frame_locals = None
if index >= (len(stack) - NUM_FRAMES_TO_SAVE):
# Include some limits on max string length & number of variables to keep things from getting
# out of hand
frame_locals = dict((k, _myrepr(k, v)) for k, v in
list(tb.tb_frame.f_locals.items())[:MAX_LOCALS] if k != "self")
if "self" in tb.tb_frame.f_locals and hasattr(tb.tb_frame.f_locals["self"], "__dict__"):
frame_locals.update(dict(("self." + k, _myrepr(k, v)) for k, v in
list(tb.tb_frame.f_locals["self"].__dict__.items())[:MAX_LOCALS]
if k != "self"))
stack_lines.append(
api_ttypes.StackLine(filename=os.path.abspath(filename), line_number=lineno,
function_name=func_name, text=line, frame_locals=frame_locals)
)
# Check LRU cache & potentially do not send error report if this client has already reported this error
# several times.
key = CachedErrorInfo.get_hash_key(stack_lines)
info = ERROR_CACHE.get(key) or CachedErrorInfo()
info.increment()
ERROR_CACHE[key] = info
if info.should_report():
error_count = info.mark_reported()
_send_request(
api_ttypes.RecordErrorRequest(
traceback=stack_lines,
exception_message=repr(exc_value),
exception_type=exc_type.__module__ + "." + exc_type.__name__,
hostname=hostname,
error_threshold=error_threshold,
additional_info=additional_info,
error_count=error_count,
)
)
|
[
"Helper",
"function",
"to",
"record",
"errors",
"to",
"the",
"flawless",
"backend"
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/flawless/client/client.py#L173-L228
|
[
"def",
"record_error",
"(",
"hostname",
",",
"exc_info",
",",
"preceding_stack",
"=",
"None",
",",
"error_threshold",
"=",
"None",
",",
"additional_info",
"=",
"None",
")",
":",
"stack",
"=",
"[",
"]",
"exc_type",
",",
"exc_value",
",",
"sys_traceback",
"=",
"exc_info",
"while",
"sys_traceback",
"is",
"not",
"None",
":",
"stack",
".",
"append",
"(",
"sys_traceback",
")",
"sys_traceback",
"=",
"sys_traceback",
".",
"tb_next",
"stack_lines",
"=",
"[",
"]",
"for",
"row",
"in",
"preceding_stack",
"or",
"[",
"]",
":",
"stack_lines",
".",
"append",
"(",
"api_ttypes",
".",
"StackLine",
"(",
"filename",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"row",
"[",
"0",
"]",
")",
",",
"line_number",
"=",
"row",
"[",
"1",
"]",
",",
"function_name",
"=",
"row",
"[",
"2",
"]",
",",
"text",
"=",
"row",
"[",
"3",
"]",
")",
")",
"for",
"index",
",",
"tb",
"in",
"enumerate",
"(",
"stack",
")",
":",
"filename",
"=",
"tb",
".",
"tb_frame",
".",
"f_code",
".",
"co_filename",
"func_name",
"=",
"tb",
".",
"tb_frame",
".",
"f_code",
".",
"co_name",
"lineno",
"=",
"tb",
".",
"tb_lineno",
"line",
"=",
"linecache",
".",
"getline",
"(",
"filename",
",",
"lineno",
",",
"tb",
".",
"tb_frame",
".",
"f_globals",
")",
"frame_locals",
"=",
"None",
"if",
"index",
">=",
"(",
"len",
"(",
"stack",
")",
"-",
"NUM_FRAMES_TO_SAVE",
")",
":",
"# Include some limits on max string length & number of variables to keep things from getting",
"# out of hand",
"frame_locals",
"=",
"dict",
"(",
"(",
"k",
",",
"_myrepr",
"(",
"k",
",",
"v",
")",
")",
"for",
"k",
",",
"v",
"in",
"list",
"(",
"tb",
".",
"tb_frame",
".",
"f_locals",
".",
"items",
"(",
")",
")",
"[",
":",
"MAX_LOCALS",
"]",
"if",
"k",
"!=",
"\"self\"",
")",
"if",
"\"self\"",
"in",
"tb",
".",
"tb_frame",
".",
"f_locals",
"and",
"hasattr",
"(",
"tb",
".",
"tb_frame",
".",
"f_locals",
"[",
"\"self\"",
"]",
",",
"\"__dict__\"",
")",
":",
"frame_locals",
".",
"update",
"(",
"dict",
"(",
"(",
"\"self.\"",
"+",
"k",
",",
"_myrepr",
"(",
"k",
",",
"v",
")",
")",
"for",
"k",
",",
"v",
"in",
"list",
"(",
"tb",
".",
"tb_frame",
".",
"f_locals",
"[",
"\"self\"",
"]",
".",
"__dict__",
".",
"items",
"(",
")",
")",
"[",
":",
"MAX_LOCALS",
"]",
"if",
"k",
"!=",
"\"self\"",
")",
")",
"stack_lines",
".",
"append",
"(",
"api_ttypes",
".",
"StackLine",
"(",
"filename",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"filename",
")",
",",
"line_number",
"=",
"lineno",
",",
"function_name",
"=",
"func_name",
",",
"text",
"=",
"line",
",",
"frame_locals",
"=",
"frame_locals",
")",
")",
"# Check LRU cache & potentially do not send error report if this client has already reported this error",
"# several times.",
"key",
"=",
"CachedErrorInfo",
".",
"get_hash_key",
"(",
"stack_lines",
")",
"info",
"=",
"ERROR_CACHE",
".",
"get",
"(",
"key",
")",
"or",
"CachedErrorInfo",
"(",
")",
"info",
".",
"increment",
"(",
")",
"ERROR_CACHE",
"[",
"key",
"]",
"=",
"info",
"if",
"info",
".",
"should_report",
"(",
")",
":",
"error_count",
"=",
"info",
".",
"mark_reported",
"(",
")",
"_send_request",
"(",
"api_ttypes",
".",
"RecordErrorRequest",
"(",
"traceback",
"=",
"stack_lines",
",",
"exception_message",
"=",
"repr",
"(",
"exc_value",
")",
",",
"exception_type",
"=",
"exc_type",
".",
"__module__",
"+",
"\".\"",
"+",
"exc_type",
".",
"__name__",
",",
"hostname",
"=",
"hostname",
",",
"error_threshold",
"=",
"error_threshold",
",",
"additional_info",
"=",
"additional_info",
",",
"error_count",
"=",
"error_count",
",",
")",
")"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
StorageInterface.migrate_thrift_obj
|
Helper function that can be called when serializing/deserializing thrift objects whose definitions
have changed, we need to make sure we initialize the new attributes to their default value
|
flawless/lib/storage/base.py
|
def migrate_thrift_obj(self, obj):
"""Helper function that can be called when serializing/deserializing thrift objects whose definitions
have changed, we need to make sure we initialize the new attributes to their default value"""
if not hasattr(obj, "thrift_spec"):
return
obj_key_set = set(obj.__dict__.keys())
thrift_field_map = {t[2]: t[4] for t in obj.thrift_spec if t}
obj.__dict__.update({f: copy.copy(thrift_field_map[f]) for f in set(thrift_field_map.keys()) - obj_key_set})
for value in obj.__dict__.values():
self.migrate_thrift_obj(value)
|
def migrate_thrift_obj(self, obj):
"""Helper function that can be called when serializing/deserializing thrift objects whose definitions
have changed, we need to make sure we initialize the new attributes to their default value"""
if not hasattr(obj, "thrift_spec"):
return
obj_key_set = set(obj.__dict__.keys())
thrift_field_map = {t[2]: t[4] for t in obj.thrift_spec if t}
obj.__dict__.update({f: copy.copy(thrift_field_map[f]) for f in set(thrift_field_map.keys()) - obj_key_set})
for value in obj.__dict__.values():
self.migrate_thrift_obj(value)
|
[
"Helper",
"function",
"that",
"can",
"be",
"called",
"when",
"serializing",
"/",
"deserializing",
"thrift",
"objects",
"whose",
"definitions",
"have",
"changed",
"we",
"need",
"to",
"make",
"sure",
"we",
"initialize",
"the",
"new",
"attributes",
"to",
"their",
"default",
"value"
] |
shopkick/flawless
|
python
|
https://github.com/shopkick/flawless/blob/c54b63ca1991c153e6f75080536f6df445aacc64/flawless/lib/storage/base.py#L47-L57
|
[
"def",
"migrate_thrift_obj",
"(",
"self",
",",
"obj",
")",
":",
"if",
"not",
"hasattr",
"(",
"obj",
",",
"\"thrift_spec\"",
")",
":",
"return",
"obj_key_set",
"=",
"set",
"(",
"obj",
".",
"__dict__",
".",
"keys",
"(",
")",
")",
"thrift_field_map",
"=",
"{",
"t",
"[",
"2",
"]",
":",
"t",
"[",
"4",
"]",
"for",
"t",
"in",
"obj",
".",
"thrift_spec",
"if",
"t",
"}",
"obj",
".",
"__dict__",
".",
"update",
"(",
"{",
"f",
":",
"copy",
".",
"copy",
"(",
"thrift_field_map",
"[",
"f",
"]",
")",
"for",
"f",
"in",
"set",
"(",
"thrift_field_map",
".",
"keys",
"(",
")",
")",
"-",
"obj_key_set",
"}",
")",
"for",
"value",
"in",
"obj",
".",
"__dict__",
".",
"values",
"(",
")",
":",
"self",
".",
"migrate_thrift_obj",
"(",
"value",
")"
] |
c54b63ca1991c153e6f75080536f6df445aacc64
|
test
|
url_to_image
|
Fetch an image from url and convert it into a Pillow Image object
|
resizeimage/helpers.py
|
def url_to_image(url):
"""
Fetch an image from url and convert it into a Pillow Image object
"""
r = requests.get(url)
image = StringIO(r.content)
return image
|
def url_to_image(url):
"""
Fetch an image from url and convert it into a Pillow Image object
"""
r = requests.get(url)
image = StringIO(r.content)
return image
|
[
"Fetch",
"an",
"image",
"from",
"url",
"and",
"convert",
"it",
"into",
"a",
"Pillow",
"Image",
"object"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/helpers.py#L14-L20
|
[
"def",
"url_to_image",
"(",
"url",
")",
":",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"image",
"=",
"StringIO",
"(",
"r",
".",
"content",
")",
"return",
"image"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
string_to_image
|
Convert string datas into a Pillow Image object
|
resizeimage/helpers.py
|
def string_to_image(image_string):
"""
Convert string datas into a Pillow Image object
"""
image_filelike = StringIO(image_string)
image = Image.open(image_filelike)
return image
|
def string_to_image(image_string):
"""
Convert string datas into a Pillow Image object
"""
image_filelike = StringIO(image_string)
image = Image.open(image_filelike)
return image
|
[
"Convert",
"string",
"datas",
"into",
"a",
"Pillow",
"Image",
"object"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/helpers.py#L23-L29
|
[
"def",
"string_to_image",
"(",
"image_string",
")",
":",
"image_filelike",
"=",
"StringIO",
"(",
"image_string",
")",
"image",
"=",
"Image",
".",
"open",
"(",
"image_filelike",
")",
"return",
"image"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
validate
|
Return a decorator that validates arguments with provided `validator`
function.
This will also store the validator function as `func.validate`.
The decorator returned by this function, can bypass the validator
if `validate=False` is passed as argument otherwise the fucntion is
called directly.
The validator must raise an exception, if the function can not
be called.
|
resizeimage/resizeimage.py
|
def validate(validator):
"""
Return a decorator that validates arguments with provided `validator`
function.
This will also store the validator function as `func.validate`.
The decorator returned by this function, can bypass the validator
if `validate=False` is passed as argument otherwise the fucntion is
called directly.
The validator must raise an exception, if the function can not
be called.
"""
def decorator(func):
"""Bound decorator to a particular validator function"""
@wraps(func)
def wrapper(image, size, validate=True):
if validate:
validator(image, size)
return func(image, size)
return wrapper
return decorator
|
def validate(validator):
"""
Return a decorator that validates arguments with provided `validator`
function.
This will also store the validator function as `func.validate`.
The decorator returned by this function, can bypass the validator
if `validate=False` is passed as argument otherwise the fucntion is
called directly.
The validator must raise an exception, if the function can not
be called.
"""
def decorator(func):
"""Bound decorator to a particular validator function"""
@wraps(func)
def wrapper(image, size, validate=True):
if validate:
validator(image, size)
return func(image, size)
return wrapper
return decorator
|
[
"Return",
"a",
"decorator",
"that",
"validates",
"arguments",
"with",
"provided",
"validator",
"function",
"."
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L11-L35
|
[
"def",
"validate",
"(",
"validator",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"\"\"\"Bound decorator to a particular validator function\"\"\"",
"@",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"image",
",",
"size",
",",
"validate",
"=",
"True",
")",
":",
"if",
"validate",
":",
"validator",
"(",
"image",
",",
"size",
")",
"return",
"func",
"(",
"image",
",",
"size",
")",
"return",
"wrapper",
"return",
"decorator"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
_is_big_enough
|
Check that the image's size superior to `size`
|
resizeimage/resizeimage.py
|
def _is_big_enough(image, size):
"""Check that the image's size superior to `size`"""
if (size[0] > image.size[0]) and (size[1] > image.size[1]):
raise ImageSizeError(image.size, size)
|
def _is_big_enough(image, size):
"""Check that the image's size superior to `size`"""
if (size[0] > image.size[0]) and (size[1] > image.size[1]):
raise ImageSizeError(image.size, size)
|
[
"Check",
"that",
"the",
"image",
"s",
"size",
"superior",
"to",
"size"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L38-L41
|
[
"def",
"_is_big_enough",
"(",
"image",
",",
"size",
")",
":",
"if",
"(",
"size",
"[",
"0",
"]",
">",
"image",
".",
"size",
"[",
"0",
"]",
")",
"and",
"(",
"size",
"[",
"1",
"]",
">",
"image",
".",
"size",
"[",
"1",
"]",
")",
":",
"raise",
"ImageSizeError",
"(",
"image",
".",
"size",
",",
"size",
")"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
_width_is_big_enough
|
Check that the image width is superior to `width`
|
resizeimage/resizeimage.py
|
def _width_is_big_enough(image, width):
"""Check that the image width is superior to `width`"""
if width > image.size[0]:
raise ImageSizeError(image.size[0], width)
|
def _width_is_big_enough(image, width):
"""Check that the image width is superior to `width`"""
if width > image.size[0]:
raise ImageSizeError(image.size[0], width)
|
[
"Check",
"that",
"the",
"image",
"width",
"is",
"superior",
"to",
"width"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L44-L47
|
[
"def",
"_width_is_big_enough",
"(",
"image",
",",
"width",
")",
":",
"if",
"width",
">",
"image",
".",
"size",
"[",
"0",
"]",
":",
"raise",
"ImageSizeError",
"(",
"image",
".",
"size",
"[",
"0",
"]",
",",
"width",
")"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
_height_is_big_enough
|
Check that the image height is superior to `height`
|
resizeimage/resizeimage.py
|
def _height_is_big_enough(image, height):
"""Check that the image height is superior to `height`"""
if height > image.size[1]:
raise ImageSizeError(image.size[1], height)
|
def _height_is_big_enough(image, height):
"""Check that the image height is superior to `height`"""
if height > image.size[1]:
raise ImageSizeError(image.size[1], height)
|
[
"Check",
"that",
"the",
"image",
"height",
"is",
"superior",
"to",
"height"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L50-L53
|
[
"def",
"_height_is_big_enough",
"(",
"image",
",",
"height",
")",
":",
"if",
"height",
">",
"image",
".",
"size",
"[",
"1",
"]",
":",
"raise",
"ImageSizeError",
"(",
"image",
".",
"size",
"[",
"1",
"]",
",",
"height",
")"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
resize_crop
|
Crop the image with a centered rectangle of the specified size
image: a Pillow image instance
size: a list of two integers [width, height]
|
resizeimage/resizeimage.py
|
def resize_crop(image, size):
"""
Crop the image with a centered rectangle of the specified size
image: a Pillow image instance
size: a list of two integers [width, height]
"""
img_format = image.format
image = image.copy()
old_size = image.size
left = (old_size[0] - size[0]) / 2
top = (old_size[1] - size[1]) / 2
right = old_size[0] - left
bottom = old_size[1] - top
rect = [int(math.ceil(x)) for x in (left, top, right, bottom)]
left, top, right, bottom = rect
crop = image.crop((left, top, right, bottom))
crop.format = img_format
return crop
|
def resize_crop(image, size):
"""
Crop the image with a centered rectangle of the specified size
image: a Pillow image instance
size: a list of two integers [width, height]
"""
img_format = image.format
image = image.copy()
old_size = image.size
left = (old_size[0] - size[0]) / 2
top = (old_size[1] - size[1]) / 2
right = old_size[0] - left
bottom = old_size[1] - top
rect = [int(math.ceil(x)) for x in (left, top, right, bottom)]
left, top, right, bottom = rect
crop = image.crop((left, top, right, bottom))
crop.format = img_format
return crop
|
[
"Crop",
"the",
"image",
"with",
"a",
"centered",
"rectangle",
"of",
"the",
"specified",
"size",
"image",
":",
"a",
"Pillow",
"image",
"instance",
"size",
":",
"a",
"list",
"of",
"two",
"integers",
"[",
"width",
"height",
"]"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L57-L74
|
[
"def",
"resize_crop",
"(",
"image",
",",
"size",
")",
":",
"img_format",
"=",
"image",
".",
"format",
"image",
"=",
"image",
".",
"copy",
"(",
")",
"old_size",
"=",
"image",
".",
"size",
"left",
"=",
"(",
"old_size",
"[",
"0",
"]",
"-",
"size",
"[",
"0",
"]",
")",
"/",
"2",
"top",
"=",
"(",
"old_size",
"[",
"1",
"]",
"-",
"size",
"[",
"1",
"]",
")",
"/",
"2",
"right",
"=",
"old_size",
"[",
"0",
"]",
"-",
"left",
"bottom",
"=",
"old_size",
"[",
"1",
"]",
"-",
"top",
"rect",
"=",
"[",
"int",
"(",
"math",
".",
"ceil",
"(",
"x",
")",
")",
"for",
"x",
"in",
"(",
"left",
",",
"top",
",",
"right",
",",
"bottom",
")",
"]",
"left",
",",
"top",
",",
"right",
",",
"bottom",
"=",
"rect",
"crop",
"=",
"image",
".",
"crop",
"(",
"(",
"left",
",",
"top",
",",
"right",
",",
"bottom",
")",
")",
"crop",
".",
"format",
"=",
"img_format",
"return",
"crop"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
resize_cover
|
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
|
resizeimage/resizeimage.py
|
def resize_cover(image, size, resample=Image.LANCZOS):
"""
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
"""
img_format = image.format
img = image.copy()
img_size = img.size
ratio = max(size[0] / img_size[0], size[1] / img_size[1])
new_size = [
int(math.ceil(img_size[0] * ratio)),
int(math.ceil(img_size[1] * ratio))
]
img = img.resize((new_size[0], new_size[1]), resample)
img = resize_crop(img, size)
img.format = img_format
return img
|
def resize_cover(image, size, resample=Image.LANCZOS):
"""
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
"""
img_format = image.format
img = image.copy()
img_size = img.size
ratio = max(size[0] / img_size[0], size[1] / img_size[1])
new_size = [
int(math.ceil(img_size[0] * ratio)),
int(math.ceil(img_size[1] * ratio))
]
img = img.resize((new_size[0], new_size[1]), resample)
img = resize_crop(img, size)
img.format = img_format
return img
|
[
"Resize",
"image",
"according",
"to",
"size",
".",
"image",
":",
"a",
"Pillow",
"image",
"instance",
"size",
":",
"a",
"list",
"of",
"two",
"integers",
"[",
"width",
"height",
"]"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L78-L95
|
[
"def",
"resize_cover",
"(",
"image",
",",
"size",
",",
"resample",
"=",
"Image",
".",
"LANCZOS",
")",
":",
"img_format",
"=",
"image",
".",
"format",
"img",
"=",
"image",
".",
"copy",
"(",
")",
"img_size",
"=",
"img",
".",
"size",
"ratio",
"=",
"max",
"(",
"size",
"[",
"0",
"]",
"/",
"img_size",
"[",
"0",
"]",
",",
"size",
"[",
"1",
"]",
"/",
"img_size",
"[",
"1",
"]",
")",
"new_size",
"=",
"[",
"int",
"(",
"math",
".",
"ceil",
"(",
"img_size",
"[",
"0",
"]",
"*",
"ratio",
")",
")",
",",
"int",
"(",
"math",
".",
"ceil",
"(",
"img_size",
"[",
"1",
"]",
"*",
"ratio",
")",
")",
"]",
"img",
"=",
"img",
".",
"resize",
"(",
"(",
"new_size",
"[",
"0",
"]",
",",
"new_size",
"[",
"1",
"]",
")",
",",
"resample",
")",
"img",
"=",
"resize_crop",
"(",
"img",
",",
"size",
")",
"img",
".",
"format",
"=",
"img_format",
"return",
"img"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
resize_contain
|
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
|
resizeimage/resizeimage.py
|
def resize_contain(image, size, resample=Image.LANCZOS, bg_color=(255, 255, 255, 0)):
"""
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
"""
img_format = image.format
img = image.copy()
img.thumbnail((size[0], size[1]), resample)
background = Image.new('RGBA', (size[0], size[1]), bg_color)
img_position = (
int(math.ceil((size[0] - img.size[0]) / 2)),
int(math.ceil((size[1] - img.size[1]) / 2))
)
background.paste(img, img_position)
background.format = img_format
return background.convert('RGB')
|
def resize_contain(image, size, resample=Image.LANCZOS, bg_color=(255, 255, 255, 0)):
"""
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
"""
img_format = image.format
img = image.copy()
img.thumbnail((size[0], size[1]), resample)
background = Image.new('RGBA', (size[0], size[1]), bg_color)
img_position = (
int(math.ceil((size[0] - img.size[0]) / 2)),
int(math.ceil((size[1] - img.size[1]) / 2))
)
background.paste(img, img_position)
background.format = img_format
return background.convert('RGB')
|
[
"Resize",
"image",
"according",
"to",
"size",
".",
"image",
":",
"a",
"Pillow",
"image",
"instance",
"size",
":",
"a",
"list",
"of",
"two",
"integers",
"[",
"width",
"height",
"]"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L98-L114
|
[
"def",
"resize_contain",
"(",
"image",
",",
"size",
",",
"resample",
"=",
"Image",
".",
"LANCZOS",
",",
"bg_color",
"=",
"(",
"255",
",",
"255",
",",
"255",
",",
"0",
")",
")",
":",
"img_format",
"=",
"image",
".",
"format",
"img",
"=",
"image",
".",
"copy",
"(",
")",
"img",
".",
"thumbnail",
"(",
"(",
"size",
"[",
"0",
"]",
",",
"size",
"[",
"1",
"]",
")",
",",
"resample",
")",
"background",
"=",
"Image",
".",
"new",
"(",
"'RGBA'",
",",
"(",
"size",
"[",
"0",
"]",
",",
"size",
"[",
"1",
"]",
")",
",",
"bg_color",
")",
"img_position",
"=",
"(",
"int",
"(",
"math",
".",
"ceil",
"(",
"(",
"size",
"[",
"0",
"]",
"-",
"img",
".",
"size",
"[",
"0",
"]",
")",
"/",
"2",
")",
")",
",",
"int",
"(",
"math",
".",
"ceil",
"(",
"(",
"size",
"[",
"1",
"]",
"-",
"img",
".",
"size",
"[",
"1",
"]",
")",
"/",
"2",
")",
")",
")",
"background",
".",
"paste",
"(",
"img",
",",
"img_position",
")",
"background",
".",
"format",
"=",
"img_format",
"return",
"background",
".",
"convert",
"(",
"'RGB'",
")"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
resize_width
|
Resize image according to size.
image: a Pillow image instance
size: an integer or a list or tuple of two integers [width, height]
|
resizeimage/resizeimage.py
|
def resize_width(image, size, resample=Image.LANCZOS):
"""
Resize image according to size.
image: a Pillow image instance
size: an integer or a list or tuple of two integers [width, height]
"""
try:
width = size[0]
except:
width = size
img_format = image.format
img = image.copy()
img_size = img.size
# If the origial image has already the good width, return it
# fix issue #16
if img_size[0] == width:
return image
new_height = int(math.ceil((width / img_size[0]) * img_size[1]))
img.thumbnail((width, new_height), resample)
img.format = img_format
return img
|
def resize_width(image, size, resample=Image.LANCZOS):
"""
Resize image according to size.
image: a Pillow image instance
size: an integer or a list or tuple of two integers [width, height]
"""
try:
width = size[0]
except:
width = size
img_format = image.format
img = image.copy()
img_size = img.size
# If the origial image has already the good width, return it
# fix issue #16
if img_size[0] == width:
return image
new_height = int(math.ceil((width / img_size[0]) * img_size[1]))
img.thumbnail((width, new_height), resample)
img.format = img_format
return img
|
[
"Resize",
"image",
"according",
"to",
"size",
".",
"image",
":",
"a",
"Pillow",
"image",
"instance",
"size",
":",
"an",
"integer",
"or",
"a",
"list",
"or",
"tuple",
"of",
"two",
"integers",
"[",
"width",
"height",
"]"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L118-L138
|
[
"def",
"resize_width",
"(",
"image",
",",
"size",
",",
"resample",
"=",
"Image",
".",
"LANCZOS",
")",
":",
"try",
":",
"width",
"=",
"size",
"[",
"0",
"]",
"except",
":",
"width",
"=",
"size",
"img_format",
"=",
"image",
".",
"format",
"img",
"=",
"image",
".",
"copy",
"(",
")",
"img_size",
"=",
"img",
".",
"size",
"# If the origial image has already the good width, return it",
"# fix issue #16",
"if",
"img_size",
"[",
"0",
"]",
"==",
"width",
":",
"return",
"image",
"new_height",
"=",
"int",
"(",
"math",
".",
"ceil",
"(",
"(",
"width",
"/",
"img_size",
"[",
"0",
"]",
")",
"*",
"img_size",
"[",
"1",
"]",
")",
")",
"img",
".",
"thumbnail",
"(",
"(",
"width",
",",
"new_height",
")",
",",
"resample",
")",
"img",
".",
"format",
"=",
"img_format",
"return",
"img"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
resize_height
|
Resize image according to size.
image: a Pillow image instance
size: an integer or a list or tuple of two integers [width, height]
|
resizeimage/resizeimage.py
|
def resize_height(image, size, resample=Image.LANCZOS):
"""
Resize image according to size.
image: a Pillow image instance
size: an integer or a list or tuple of two integers [width, height]
"""
try:
height = size[1]
except:
height = size
img_format = image.format
img = image.copy()
img_size = img.size
# If the origial image has already the good height, return it
# fix issue #16
if img_size[1] == height:
return image
new_width = int(math.ceil((height / img_size[1]) * img_size[0]))
img.thumbnail((new_width, height), resample)
img.format = img_format
return img
|
def resize_height(image, size, resample=Image.LANCZOS):
"""
Resize image according to size.
image: a Pillow image instance
size: an integer or a list or tuple of two integers [width, height]
"""
try:
height = size[1]
except:
height = size
img_format = image.format
img = image.copy()
img_size = img.size
# If the origial image has already the good height, return it
# fix issue #16
if img_size[1] == height:
return image
new_width = int(math.ceil((height / img_size[1]) * img_size[0]))
img.thumbnail((new_width, height), resample)
img.format = img_format
return img
|
[
"Resize",
"image",
"according",
"to",
"size",
".",
"image",
":",
"a",
"Pillow",
"image",
"instance",
"size",
":",
"an",
"integer",
"or",
"a",
"list",
"or",
"tuple",
"of",
"two",
"integers",
"[",
"width",
"height",
"]"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L142-L162
|
[
"def",
"resize_height",
"(",
"image",
",",
"size",
",",
"resample",
"=",
"Image",
".",
"LANCZOS",
")",
":",
"try",
":",
"height",
"=",
"size",
"[",
"1",
"]",
"except",
":",
"height",
"=",
"size",
"img_format",
"=",
"image",
".",
"format",
"img",
"=",
"image",
".",
"copy",
"(",
")",
"img_size",
"=",
"img",
".",
"size",
"# If the origial image has already the good height, return it",
"# fix issue #16",
"if",
"img_size",
"[",
"1",
"]",
"==",
"height",
":",
"return",
"image",
"new_width",
"=",
"int",
"(",
"math",
".",
"ceil",
"(",
"(",
"height",
"/",
"img_size",
"[",
"1",
"]",
")",
"*",
"img_size",
"[",
"0",
"]",
")",
")",
"img",
".",
"thumbnail",
"(",
"(",
"new_width",
",",
"height",
")",
",",
"resample",
")",
"img",
".",
"format",
"=",
"img_format",
"return",
"img"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
resize_thumbnail
|
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
|
resizeimage/resizeimage.py
|
def resize_thumbnail(image, size, resample=Image.LANCZOS):
"""
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
"""
img_format = image.format
img = image.copy()
img.thumbnail((size[0], size[1]), resample)
img.format = img_format
return img
|
def resize_thumbnail(image, size, resample=Image.LANCZOS):
"""
Resize image according to size.
image: a Pillow image instance
size: a list of two integers [width, height]
"""
img_format = image.format
img = image.copy()
img.thumbnail((size[0], size[1]), resample)
img.format = img_format
return img
|
[
"Resize",
"image",
"according",
"to",
"size",
".",
"image",
":",
"a",
"Pillow",
"image",
"instance",
"size",
":",
"a",
"list",
"of",
"two",
"integers",
"[",
"width",
"height",
"]"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L165-L176
|
[
"def",
"resize_thumbnail",
"(",
"image",
",",
"size",
",",
"resample",
"=",
"Image",
".",
"LANCZOS",
")",
":",
"img_format",
"=",
"image",
".",
"format",
"img",
"=",
"image",
".",
"copy",
"(",
")",
"img",
".",
"thumbnail",
"(",
"(",
"size",
"[",
"0",
"]",
",",
"size",
"[",
"1",
"]",
")",
",",
"resample",
")",
"img",
".",
"format",
"=",
"img_format",
"return",
"img"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
test
|
resize
|
Helper function to access one of the resize function.
method: one among 'crop', 'cover', 'contain', 'width', 'height' or 'thumbnail'
image: a Pillow image instance
size: a list or tuple of two integers [width, height]
|
resizeimage/resizeimage.py
|
def resize(method, *args, **kwargs):
"""
Helper function to access one of the resize function.
method: one among 'crop', 'cover', 'contain', 'width', 'height' or 'thumbnail'
image: a Pillow image instance
size: a list or tuple of two integers [width, height]
"""
if method not in ['crop',
'cover',
'contain',
'width',
'height',
'thumbnail']:
raise ValueError(u"method argument should be one of \
'crop', 'cover', 'contain', 'width', 'height' or 'thumbnail'")
return getattr(sys.modules[__name__], 'resize_%s' % method)(*args, **kwargs)
|
def resize(method, *args, **kwargs):
"""
Helper function to access one of the resize function.
method: one among 'crop', 'cover', 'contain', 'width', 'height' or 'thumbnail'
image: a Pillow image instance
size: a list or tuple of two integers [width, height]
"""
if method not in ['crop',
'cover',
'contain',
'width',
'height',
'thumbnail']:
raise ValueError(u"method argument should be one of \
'crop', 'cover', 'contain', 'width', 'height' or 'thumbnail'")
return getattr(sys.modules[__name__], 'resize_%s' % method)(*args, **kwargs)
|
[
"Helper",
"function",
"to",
"access",
"one",
"of",
"the",
"resize",
"function",
".",
"method",
":",
"one",
"among",
"crop",
"cover",
"contain",
"width",
"height",
"or",
"thumbnail",
"image",
":",
"a",
"Pillow",
"image",
"instance",
"size",
":",
"a",
"list",
"or",
"tuple",
"of",
"two",
"integers",
"[",
"width",
"height",
"]"
] |
VingtCinq/python-resize-image
|
python
|
https://github.com/VingtCinq/python-resize-image/blob/a4e645792ef30c5fcc558df6da6de18b1ecb95ea/resizeimage/resizeimage.py#L179-L194
|
[
"def",
"resize",
"(",
"method",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"method",
"not",
"in",
"[",
"'crop'",
",",
"'cover'",
",",
"'contain'",
",",
"'width'",
",",
"'height'",
",",
"'thumbnail'",
"]",
":",
"raise",
"ValueError",
"(",
"u\"method argument should be one of \\\n 'crop', 'cover', 'contain', 'width', 'height' or 'thumbnail'\"",
")",
"return",
"getattr",
"(",
"sys",
".",
"modules",
"[",
"__name__",
"]",
",",
"'resize_%s'",
"%",
"method",
")",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
a4e645792ef30c5fcc558df6da6de18b1ecb95ea
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.