id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
238,900
|
znerol/txexiftool
|
txexiftool/__init__.py
|
ExiftoolProtocol.dataReceived
|
def dataReceived(self, data):
"""
Parses chunks of bytes into responses.
Whenever a complete response is received, this method extracts its
payload and calls L{responseReceived} to process it.
@param data: A chunk of data representing a (possibly partial) response
@type data: C{bytes}
"""
size = len(self._buffer) + len(data)
if size > self.MAX_LENGTH:
self.lengthLimitExceeded(size)
self._buffer += data
start = 0
for match in self._pattern.finditer(self._buffer):
# The start of the sentinel marks the end of the response.
end = match.start()
tag = int(match.group(1))
self.responseReceived(self._buffer[start:end], tag)
# Advance start position to the beginning of the next line
start = match.end() + 1
if start:
self._buffer = self._buffer[start:]
|
python
|
def dataReceived(self, data):
"""
Parses chunks of bytes into responses.
Whenever a complete response is received, this method extracts its
payload and calls L{responseReceived} to process it.
@param data: A chunk of data representing a (possibly partial) response
@type data: C{bytes}
"""
size = len(self._buffer) + len(data)
if size > self.MAX_LENGTH:
self.lengthLimitExceeded(size)
self._buffer += data
start = 0
for match in self._pattern.finditer(self._buffer):
# The start of the sentinel marks the end of the response.
end = match.start()
tag = int(match.group(1))
self.responseReceived(self._buffer[start:end], tag)
# Advance start position to the beginning of the next line
start = match.end() + 1
if start:
self._buffer = self._buffer[start:]
|
[
"def",
"dataReceived",
"(",
"self",
",",
"data",
")",
":",
"size",
"=",
"len",
"(",
"self",
".",
"_buffer",
")",
"+",
"len",
"(",
"data",
")",
"if",
"size",
">",
"self",
".",
"MAX_LENGTH",
":",
"self",
".",
"lengthLimitExceeded",
"(",
"size",
")",
"self",
".",
"_buffer",
"+=",
"data",
"start",
"=",
"0",
"for",
"match",
"in",
"self",
".",
"_pattern",
".",
"finditer",
"(",
"self",
".",
"_buffer",
")",
":",
"# The start of the sentinel marks the end of the response.",
"end",
"=",
"match",
".",
"start",
"(",
")",
"tag",
"=",
"int",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
"self",
".",
"responseReceived",
"(",
"self",
".",
"_buffer",
"[",
"start",
":",
"end",
"]",
",",
"tag",
")",
"# Advance start position to the beginning of the next line",
"start",
"=",
"match",
".",
"end",
"(",
")",
"+",
"1",
"if",
"start",
":",
"self",
".",
"_buffer",
"=",
"self",
".",
"_buffer",
"[",
"start",
":",
"]"
] |
Parses chunks of bytes into responses.
Whenever a complete response is received, this method extracts its
payload and calls L{responseReceived} to process it.
@param data: A chunk of data representing a (possibly partial) response
@type data: C{bytes}
|
[
"Parses",
"chunks",
"of",
"bytes",
"into",
"responses",
"."
] |
a3d75a31262a492f81072840d4fc818f65bf3265
|
https://github.com/znerol/txexiftool/blob/a3d75a31262a492f81072840d4fc818f65bf3265/txexiftool/__init__.py#L42-L68
|
238,901
|
znerol/txexiftool
|
txexiftool/__init__.py
|
ExiftoolProtocol.responseReceived
|
def responseReceived(self, response, tag):
"""
Receives some characters of a netstring.
Whenever a complete response is received, this method calls the
deferred associated with it.
@param response: A complete response generated by exiftool.
@type response: C{bytes}
@param tag: The tag associated with the response
@type tag: C{int}
"""
self._queue.pop(tag).callback(response)
|
python
|
def responseReceived(self, response, tag):
"""
Receives some characters of a netstring.
Whenever a complete response is received, this method calls the
deferred associated with it.
@param response: A complete response generated by exiftool.
@type response: C{bytes}
@param tag: The tag associated with the response
@type tag: C{int}
"""
self._queue.pop(tag).callback(response)
|
[
"def",
"responseReceived",
"(",
"self",
",",
"response",
",",
"tag",
")",
":",
"self",
".",
"_queue",
".",
"pop",
"(",
"tag",
")",
".",
"callback",
"(",
"response",
")"
] |
Receives some characters of a netstring.
Whenever a complete response is received, this method calls the
deferred associated with it.
@param response: A complete response generated by exiftool.
@type response: C{bytes}
@param tag: The tag associated with the response
@type tag: C{int}
|
[
"Receives",
"some",
"characters",
"of",
"a",
"netstring",
"."
] |
a3d75a31262a492f81072840d4fc818f65bf3265
|
https://github.com/znerol/txexiftool/blob/a3d75a31262a492f81072840d4fc818f65bf3265/txexiftool/__init__.py#L70-L82
|
238,902
|
znerol/txexiftool
|
txexiftool/__init__.py
|
ExiftoolProtocol.execute
|
def execute(self, *args):
"""
Pass one command to exiftool and return a deferred which is fired as
soon as the command completes.
@param *args: Command line arguments passed to exiftool
@type *args: C{unicode}
@rtype: C{Deferred}
@return: A deferred whose callback will be invoked when the command
completed.
"""
result = defer.Deferred()
if self.connected and not self._stopped:
self._tag += 1
args = tuple(args) + ('-execute{:d}'.format(self._tag), '')
safe_args = [fsencode(arg) for arg in args]
self.transport.write(b'\n'.join(safe_args))
result = defer.Deferred()
self._queue[self._tag] = result
else:
result.errback(error.ConnectionClosed('Not connected to exiftool'))
return result
|
python
|
def execute(self, *args):
"""
Pass one command to exiftool and return a deferred which is fired as
soon as the command completes.
@param *args: Command line arguments passed to exiftool
@type *args: C{unicode}
@rtype: C{Deferred}
@return: A deferred whose callback will be invoked when the command
completed.
"""
result = defer.Deferred()
if self.connected and not self._stopped:
self._tag += 1
args = tuple(args) + ('-execute{:d}'.format(self._tag), '')
safe_args = [fsencode(arg) for arg in args]
self.transport.write(b'\n'.join(safe_args))
result = defer.Deferred()
self._queue[self._tag] = result
else:
result.errback(error.ConnectionClosed('Not connected to exiftool'))
return result
|
[
"def",
"execute",
"(",
"self",
",",
"*",
"args",
")",
":",
"result",
"=",
"defer",
".",
"Deferred",
"(",
")",
"if",
"self",
".",
"connected",
"and",
"not",
"self",
".",
"_stopped",
":",
"self",
".",
"_tag",
"+=",
"1",
"args",
"=",
"tuple",
"(",
"args",
")",
"+",
"(",
"'-execute{:d}'",
".",
"format",
"(",
"self",
".",
"_tag",
")",
",",
"''",
")",
"safe_args",
"=",
"[",
"fsencode",
"(",
"arg",
")",
"for",
"arg",
"in",
"args",
"]",
"self",
".",
"transport",
".",
"write",
"(",
"b'\\n'",
".",
"join",
"(",
"safe_args",
")",
")",
"result",
"=",
"defer",
".",
"Deferred",
"(",
")",
"self",
".",
"_queue",
"[",
"self",
".",
"_tag",
"]",
"=",
"result",
"else",
":",
"result",
".",
"errback",
"(",
"error",
".",
"ConnectionClosed",
"(",
"'Not connected to exiftool'",
")",
")",
"return",
"result"
] |
Pass one command to exiftool and return a deferred which is fired as
soon as the command completes.
@param *args: Command line arguments passed to exiftool
@type *args: C{unicode}
@rtype: C{Deferred}
@return: A deferred whose callback will be invoked when the command
completed.
|
[
"Pass",
"one",
"command",
"to",
"exiftool",
"and",
"return",
"a",
"deferred",
"which",
"is",
"fired",
"as",
"soon",
"as",
"the",
"command",
"completes",
"."
] |
a3d75a31262a492f81072840d4fc818f65bf3265
|
https://github.com/znerol/txexiftool/blob/a3d75a31262a492f81072840d4fc818f65bf3265/txexiftool/__init__.py#L95-L121
|
238,903
|
znerol/txexiftool
|
txexiftool/__init__.py
|
ExiftoolProtocol.loseConnection
|
def loseConnection(self):
"""
Close the connection and terminate the exiftool process.
@rtype: C{Deferred}
@return: A deferred whose callback will be invoked when the connection
was closed.
"""
if self._stopped:
result = self._stopped
elif self.connected:
result = defer.Deferred()
self._stopped = result
self.transport.write(b'\n'.join((b'-stay_open', b'False', b'')))
else:
# Already disconnected.
result = defer.succeed(self)
return result
|
python
|
def loseConnection(self):
"""
Close the connection and terminate the exiftool process.
@rtype: C{Deferred}
@return: A deferred whose callback will be invoked when the connection
was closed.
"""
if self._stopped:
result = self._stopped
elif self.connected:
result = defer.Deferred()
self._stopped = result
self.transport.write(b'\n'.join((b'-stay_open', b'False', b'')))
else:
# Already disconnected.
result = defer.succeed(self)
return result
|
[
"def",
"loseConnection",
"(",
"self",
")",
":",
"if",
"self",
".",
"_stopped",
":",
"result",
"=",
"self",
".",
"_stopped",
"elif",
"self",
".",
"connected",
":",
"result",
"=",
"defer",
".",
"Deferred",
"(",
")",
"self",
".",
"_stopped",
"=",
"result",
"self",
".",
"transport",
".",
"write",
"(",
"b'\\n'",
".",
"join",
"(",
"(",
"b'-stay_open'",
",",
"b'False'",
",",
"b''",
")",
")",
")",
"else",
":",
"# Already disconnected.",
"result",
"=",
"defer",
".",
"succeed",
"(",
"self",
")",
"return",
"result"
] |
Close the connection and terminate the exiftool process.
@rtype: C{Deferred}
@return: A deferred whose callback will be invoked when the connection
was closed.
|
[
"Close",
"the",
"connection",
"and",
"terminate",
"the",
"exiftool",
"process",
"."
] |
a3d75a31262a492f81072840d4fc818f65bf3265
|
https://github.com/znerol/txexiftool/blob/a3d75a31262a492f81072840d4fc818f65bf3265/txexiftool/__init__.py#L124-L142
|
238,904
|
znerol/txexiftool
|
txexiftool/__init__.py
|
ExiftoolProtocol.connectionLost
|
def connectionLost(self, reason=protocol.connectionDone):
"""
Check whether termination was intended and invoke the deferred.
If the connection terminated unexpectedly, reraise the failure.
@type reason: L{twisted.python.failure.Failure}
"""
self.connected = 0
for pending in self._queue.values():
pending.errback(reason)
self._queue.clear()
if self._stopped:
result = self if reason.check(error.ConnectionDone) else reason
self._stopped.callback(result)
self._stopped = None
else:
reason.raiseException()
|
python
|
def connectionLost(self, reason=protocol.connectionDone):
"""
Check whether termination was intended and invoke the deferred.
If the connection terminated unexpectedly, reraise the failure.
@type reason: L{twisted.python.failure.Failure}
"""
self.connected = 0
for pending in self._queue.values():
pending.errback(reason)
self._queue.clear()
if self._stopped:
result = self if reason.check(error.ConnectionDone) else reason
self._stopped.callback(result)
self._stopped = None
else:
reason.raiseException()
|
[
"def",
"connectionLost",
"(",
"self",
",",
"reason",
"=",
"protocol",
".",
"connectionDone",
")",
":",
"self",
".",
"connected",
"=",
"0",
"for",
"pending",
"in",
"self",
".",
"_queue",
".",
"values",
"(",
")",
":",
"pending",
".",
"errback",
"(",
"reason",
")",
"self",
".",
"_queue",
".",
"clear",
"(",
")",
"if",
"self",
".",
"_stopped",
":",
"result",
"=",
"self",
"if",
"reason",
".",
"check",
"(",
"error",
".",
"ConnectionDone",
")",
"else",
"reason",
"self",
".",
"_stopped",
".",
"callback",
"(",
"result",
")",
"self",
".",
"_stopped",
"=",
"None",
"else",
":",
"reason",
".",
"raiseException",
"(",
")"
] |
Check whether termination was intended and invoke the deferred.
If the connection terminated unexpectedly, reraise the failure.
@type reason: L{twisted.python.failure.Failure}
|
[
"Check",
"whether",
"termination",
"was",
"intended",
"and",
"invoke",
"the",
"deferred",
"."
] |
a3d75a31262a492f81072840d4fc818f65bf3265
|
https://github.com/znerol/txexiftool/blob/a3d75a31262a492f81072840d4fc818f65bf3265/txexiftool/__init__.py#L144-L163
|
238,905
|
JoaoFelipe/pyposast
|
pyposast/utils.py
|
find_next_character
|
def find_next_character(code, position, char):
"""Find next char and return its first and last positions"""
end = LineCol(code, *position)
while not end.eof and end.char() in WHITESPACE:
end.inc()
if not end.eof and end.char() == char:
return end.tuple(), inc_tuple(end.tuple())
return None, None
|
python
|
def find_next_character(code, position, char):
"""Find next char and return its first and last positions"""
end = LineCol(code, *position)
while not end.eof and end.char() in WHITESPACE:
end.inc()
if not end.eof and end.char() == char:
return end.tuple(), inc_tuple(end.tuple())
return None, None
|
[
"def",
"find_next_character",
"(",
"code",
",",
"position",
",",
"char",
")",
":",
"end",
"=",
"LineCol",
"(",
"code",
",",
"*",
"position",
")",
"while",
"not",
"end",
".",
"eof",
"and",
"end",
".",
"char",
"(",
")",
"in",
"WHITESPACE",
":",
"end",
".",
"inc",
"(",
")",
"if",
"not",
"end",
".",
"eof",
"and",
"end",
".",
"char",
"(",
")",
"==",
"char",
":",
"return",
"end",
".",
"tuple",
"(",
")",
",",
"inc_tuple",
"(",
"end",
".",
"tuple",
"(",
")",
")",
"return",
"None",
",",
"None"
] |
Find next char and return its first and last positions
|
[
"Find",
"next",
"char",
"and",
"return",
"its",
"first",
"and",
"last",
"positions"
] |
497c88c66b451ff2cd7354be1af070c92e119f41
|
https://github.com/JoaoFelipe/pyposast/blob/497c88c66b451ff2cd7354be1af070c92e119f41/pyposast/utils.py#L132-L140
|
238,906
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/ProjectionParams.py
|
ProjectionParams.from_value
|
def from_value(value = None):
"""
Converts specified value into ProjectionParams.
:param value: value to be converted
:return: a newly created ProjectionParams.
"""
if isinstance(value, ProjectionParams):
return value
array = AnyValueArray.from_value(value) if value != None else AnyValueArray()
return ProjectionParams(array)
|
python
|
def from_value(value = None):
"""
Converts specified value into ProjectionParams.
:param value: value to be converted
:return: a newly created ProjectionParams.
"""
if isinstance(value, ProjectionParams):
return value
array = AnyValueArray.from_value(value) if value != None else AnyValueArray()
return ProjectionParams(array)
|
[
"def",
"from_value",
"(",
"value",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"ProjectionParams",
")",
":",
"return",
"value",
"array",
"=",
"AnyValueArray",
".",
"from_value",
"(",
"value",
")",
"if",
"value",
"!=",
"None",
"else",
"AnyValueArray",
"(",
")",
"return",
"ProjectionParams",
"(",
"array",
")"
] |
Converts specified value into ProjectionParams.
:param value: value to be converted
:return: a newly created ProjectionParams.
|
[
"Converts",
"specified",
"value",
"into",
"ProjectionParams",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/ProjectionParams.py#L50-L61
|
238,907
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/ProjectionParams.py
|
ProjectionParams.to_string
|
def to_string(self):
"""
Gets a string representation of the object.
The result is a comma-separated list of projection fields
"field1,field2.field21,field2.field22.field221"
:return: a string representation of the object.
"""
builder = ""
index = 0
while index < self.__len__():
if index > 0:
builder = builder + ','
builder = builder + super(ProjectionParams, self).__getitem__(index)
index = index + 1
return builder
|
python
|
def to_string(self):
"""
Gets a string representation of the object.
The result is a comma-separated list of projection fields
"field1,field2.field21,field2.field22.field221"
:return: a string representation of the object.
"""
builder = ""
index = 0
while index < self.__len__():
if index > 0:
builder = builder + ','
builder = builder + super(ProjectionParams, self).__getitem__(index)
index = index + 1
return builder
|
[
"def",
"to_string",
"(",
"self",
")",
":",
"builder",
"=",
"\"\"",
"index",
"=",
"0",
"while",
"index",
"<",
"self",
".",
"__len__",
"(",
")",
":",
"if",
"index",
">",
"0",
":",
"builder",
"=",
"builder",
"+",
"','",
"builder",
"=",
"builder",
"+",
"super",
"(",
"ProjectionParams",
",",
"self",
")",
".",
"__getitem__",
"(",
"index",
")",
"index",
"=",
"index",
"+",
"1",
"return",
"builder"
] |
Gets a string representation of the object.
The result is a comma-separated list of projection fields
"field1,field2.field21,field2.field22.field221"
:return: a string representation of the object.
|
[
"Gets",
"a",
"string",
"representation",
"of",
"the",
"object",
".",
"The",
"result",
"is",
"a",
"comma",
"-",
"separated",
"list",
"of",
"projection",
"fields",
"field1",
"field2",
".",
"field21",
"field2",
".",
"field22",
".",
"field221"
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/ProjectionParams.py#L79-L96
|
238,908
|
mikicz/arca
|
arca/backend/base.py
|
BaseBackend.snake_case_backend_name
|
def snake_case_backend_name(self):
""" CamelCase -> camel_case
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', type(self).__name__)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
|
python
|
def snake_case_backend_name(self):
""" CamelCase -> camel_case
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', type(self).__name__)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
|
[
"def",
"snake_case_backend_name",
"(",
"self",
")",
":",
"s1",
"=",
"re",
".",
"sub",
"(",
"'(.)([A-Z][a-z]+)'",
",",
"r'\\1_\\2'",
",",
"type",
"(",
"self",
")",
".",
"__name__",
")",
"return",
"re",
".",
"sub",
"(",
"'([a-z0-9])([A-Z])'",
",",
"r'\\1_\\2'",
",",
"s1",
")",
".",
"lower",
"(",
")"
] |
CamelCase -> camel_case
|
[
"CamelCase",
"-",
">",
"camel_case"
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/base.py#L69-L73
|
238,909
|
mikicz/arca
|
arca/backend/base.py
|
BaseBackend.get_setting
|
def get_setting(self, key, default=NOT_SET):
""" Gets a setting for the key.
:raise KeyError: If the key is not set and default isn't provided.
"""
if self._arca is None:
raise LazySettingProperty.SettingsNotReady
return self._arca.settings.get(*self.get_settings_keys(key), default=default)
|
python
|
def get_setting(self, key, default=NOT_SET):
""" Gets a setting for the key.
:raise KeyError: If the key is not set and default isn't provided.
"""
if self._arca is None:
raise LazySettingProperty.SettingsNotReady
return self._arca.settings.get(*self.get_settings_keys(key), default=default)
|
[
"def",
"get_setting",
"(",
"self",
",",
"key",
",",
"default",
"=",
"NOT_SET",
")",
":",
"if",
"self",
".",
"_arca",
"is",
"None",
":",
"raise",
"LazySettingProperty",
".",
"SettingsNotReady",
"return",
"self",
".",
"_arca",
".",
"settings",
".",
"get",
"(",
"*",
"self",
".",
"get_settings_keys",
"(",
"key",
")",
",",
"default",
"=",
"default",
")"
] |
Gets a setting for the key.
:raise KeyError: If the key is not set and default isn't provided.
|
[
"Gets",
"a",
"setting",
"for",
"the",
"key",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/base.py#L82-L89
|
238,910
|
mikicz/arca
|
arca/backend/base.py
|
BaseBackend.hash_file_contents
|
def hash_file_contents(requirements_option: RequirementsOptions, path: Path) -> str:
""" Returns a SHA256 hash of the contents of ``path`` combined with the Arca version.
"""
return hashlib.sha256(path.read_bytes() + bytes(
requirements_option.name + arca.__version__, "utf-8"
)).hexdigest()
|
python
|
def hash_file_contents(requirements_option: RequirementsOptions, path: Path) -> str:
""" Returns a SHA256 hash of the contents of ``path`` combined with the Arca version.
"""
return hashlib.sha256(path.read_bytes() + bytes(
requirements_option.name + arca.__version__, "utf-8"
)).hexdigest()
|
[
"def",
"hash_file_contents",
"(",
"requirements_option",
":",
"RequirementsOptions",
",",
"path",
":",
"Path",
")",
"->",
"str",
":",
"return",
"hashlib",
".",
"sha256",
"(",
"path",
".",
"read_bytes",
"(",
")",
"+",
"bytes",
"(",
"requirements_option",
".",
"name",
"+",
"arca",
".",
"__version__",
",",
"\"utf-8\"",
")",
")",
".",
"hexdigest",
"(",
")"
] |
Returns a SHA256 hash of the contents of ``path`` combined with the Arca version.
|
[
"Returns",
"a",
"SHA256",
"hash",
"of",
"the",
"contents",
"of",
"path",
"combined",
"with",
"the",
"Arca",
"version",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/base.py#L92-L97
|
238,911
|
mikicz/arca
|
arca/backend/base.py
|
BaseBackend.get_requirements_information
|
def get_requirements_information(self, path: Path) -> Tuple[RequirementsOptions, Optional[str]]:
"""
Returns the information needed to install requirements for a repository - what kind is used and the hash
of contents of the defining file.
"""
if self.pipfile_location is not None:
pipfile = path / self.pipfile_location / "Pipfile"
pipfile_lock = path / self.pipfile_location / "Pipfile.lock"
pipfile_exists = pipfile.exists()
pipfile_lock_exists = pipfile_lock.exists()
if pipfile_exists and pipfile_lock_exists:
option = RequirementsOptions.pipfile
return option, self.hash_file_contents(option, pipfile_lock)
elif pipfile_exists:
raise BuildError("Only the Pipfile is included in the repository, Arca does not support that.")
elif pipfile_lock_exists:
raise BuildError("Only the Pipfile.lock file is include in the repository, Arca does not support that.")
if self.requirements_location:
requirements_file = path / self.requirements_location
if requirements_file.exists():
option = RequirementsOptions.requirements_txt
return option, self.hash_file_contents(option, requirements_file)
return RequirementsOptions.no_requirements, None
|
python
|
def get_requirements_information(self, path: Path) -> Tuple[RequirementsOptions, Optional[str]]:
"""
Returns the information needed to install requirements for a repository - what kind is used and the hash
of contents of the defining file.
"""
if self.pipfile_location is not None:
pipfile = path / self.pipfile_location / "Pipfile"
pipfile_lock = path / self.pipfile_location / "Pipfile.lock"
pipfile_exists = pipfile.exists()
pipfile_lock_exists = pipfile_lock.exists()
if pipfile_exists and pipfile_lock_exists:
option = RequirementsOptions.pipfile
return option, self.hash_file_contents(option, pipfile_lock)
elif pipfile_exists:
raise BuildError("Only the Pipfile is included in the repository, Arca does not support that.")
elif pipfile_lock_exists:
raise BuildError("Only the Pipfile.lock file is include in the repository, Arca does not support that.")
if self.requirements_location:
requirements_file = path / self.requirements_location
if requirements_file.exists():
option = RequirementsOptions.requirements_txt
return option, self.hash_file_contents(option, requirements_file)
return RequirementsOptions.no_requirements, None
|
[
"def",
"get_requirements_information",
"(",
"self",
",",
"path",
":",
"Path",
")",
"->",
"Tuple",
"[",
"RequirementsOptions",
",",
"Optional",
"[",
"str",
"]",
"]",
":",
"if",
"self",
".",
"pipfile_location",
"is",
"not",
"None",
":",
"pipfile",
"=",
"path",
"/",
"self",
".",
"pipfile_location",
"/",
"\"Pipfile\"",
"pipfile_lock",
"=",
"path",
"/",
"self",
".",
"pipfile_location",
"/",
"\"Pipfile.lock\"",
"pipfile_exists",
"=",
"pipfile",
".",
"exists",
"(",
")",
"pipfile_lock_exists",
"=",
"pipfile_lock",
".",
"exists",
"(",
")",
"if",
"pipfile_exists",
"and",
"pipfile_lock_exists",
":",
"option",
"=",
"RequirementsOptions",
".",
"pipfile",
"return",
"option",
",",
"self",
".",
"hash_file_contents",
"(",
"option",
",",
"pipfile_lock",
")",
"elif",
"pipfile_exists",
":",
"raise",
"BuildError",
"(",
"\"Only the Pipfile is included in the repository, Arca does not support that.\"",
")",
"elif",
"pipfile_lock_exists",
":",
"raise",
"BuildError",
"(",
"\"Only the Pipfile.lock file is include in the repository, Arca does not support that.\"",
")",
"if",
"self",
".",
"requirements_location",
":",
"requirements_file",
"=",
"path",
"/",
"self",
".",
"requirements_location",
"if",
"requirements_file",
".",
"exists",
"(",
")",
":",
"option",
"=",
"RequirementsOptions",
".",
"requirements_txt",
"return",
"option",
",",
"self",
".",
"hash_file_contents",
"(",
"option",
",",
"requirements_file",
")",
"return",
"RequirementsOptions",
".",
"no_requirements",
",",
"None"
] |
Returns the information needed to install requirements for a repository - what kind is used and the hash
of contents of the defining file.
|
[
"Returns",
"the",
"information",
"needed",
"to",
"install",
"requirements",
"for",
"a",
"repository",
"-",
"what",
"kind",
"is",
"used",
"and",
"the",
"hash",
"of",
"contents",
"of",
"the",
"defining",
"file",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/base.py#L99-L126
|
238,912
|
mikicz/arca
|
arca/backend/base.py
|
BaseBackend.serialized_task
|
def serialized_task(self, task: Task) -> Tuple[str, str]:
""" Returns the name of the task definition file and its contents.
"""
return f"{task.hash}.json", task.json
|
python
|
def serialized_task(self, task: Task) -> Tuple[str, str]:
""" Returns the name of the task definition file and its contents.
"""
return f"{task.hash}.json", task.json
|
[
"def",
"serialized_task",
"(",
"self",
",",
"task",
":",
"Task",
")",
"->",
"Tuple",
"[",
"str",
",",
"str",
"]",
":",
"return",
"f\"{task.hash}.json\"",
",",
"task",
".",
"json"
] |
Returns the name of the task definition file and its contents.
|
[
"Returns",
"the",
"name",
"of",
"the",
"task",
"definition",
"file",
"and",
"its",
"contents",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/base.py#L128-L131
|
238,913
|
mikicz/arca
|
arca/backend/base.py
|
BaseBackend.run
|
def run(self, repo: str, branch: str, task: Task, git_repo: Repo, repo_path: Path) -> Result: # pragma: no cover
"""
Executes the script and returns the result.
Must be implemented by subclasses.
:param repo: Repo URL
:param branch: Branch name
:param task: The requested :class:`Task`
:param git_repo: A :class:`Repo <git.repo.base.Repo>` of the repo/branch
:param repo_path: :class:`Path <pathlib.Path>` to the location where the repo is stored.
:return: The output of the task in a :class:`Result` instance.
"""
raise NotImplementedError
|
python
|
def run(self, repo: str, branch: str, task: Task, git_repo: Repo, repo_path: Path) -> Result: # pragma: no cover
"""
Executes the script and returns the result.
Must be implemented by subclasses.
:param repo: Repo URL
:param branch: Branch name
:param task: The requested :class:`Task`
:param git_repo: A :class:`Repo <git.repo.base.Repo>` of the repo/branch
:param repo_path: :class:`Path <pathlib.Path>` to the location where the repo is stored.
:return: The output of the task in a :class:`Result` instance.
"""
raise NotImplementedError
|
[
"def",
"run",
"(",
"self",
",",
"repo",
":",
"str",
",",
"branch",
":",
"str",
",",
"task",
":",
"Task",
",",
"git_repo",
":",
"Repo",
",",
"repo_path",
":",
"Path",
")",
"->",
"Result",
":",
"# pragma: no cover",
"raise",
"NotImplementedError"
] |
Executes the script and returns the result.
Must be implemented by subclasses.
:param repo: Repo URL
:param branch: Branch name
:param task: The requested :class:`Task`
:param git_repo: A :class:`Repo <git.repo.base.Repo>` of the repo/branch
:param repo_path: :class:`Path <pathlib.Path>` to the location where the repo is stored.
:return: The output of the task in a :class:`Result` instance.
|
[
"Executes",
"the",
"script",
"and",
"returns",
"the",
"result",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/base.py#L133-L146
|
238,914
|
mikicz/arca
|
arca/backend/base.py
|
BaseRunInSubprocessBackend.get_or_create_environment
|
def get_or_create_environment(self, repo: str, branch: str,
git_repo: Repo, repo_path: Path) -> str: # pragma: no cover
"""
Abstract method which must be implemented in subclasses, which must return a str path to a Python executable
which will be used to run the script.
See :meth:`BaseBackend.run <arca.BaseBackend.run>` to see arguments description.
"""
raise NotImplementedError
|
python
|
def get_or_create_environment(self, repo: str, branch: str,
git_repo: Repo, repo_path: Path) -> str: # pragma: no cover
"""
Abstract method which must be implemented in subclasses, which must return a str path to a Python executable
which will be used to run the script.
See :meth:`BaseBackend.run <arca.BaseBackend.run>` to see arguments description.
"""
raise NotImplementedError
|
[
"def",
"get_or_create_environment",
"(",
"self",
",",
"repo",
":",
"str",
",",
"branch",
":",
"str",
",",
"git_repo",
":",
"Repo",
",",
"repo_path",
":",
"Path",
")",
"->",
"str",
":",
"# pragma: no cover",
"raise",
"NotImplementedError"
] |
Abstract method which must be implemented in subclasses, which must return a str path to a Python executable
which will be used to run the script.
See :meth:`BaseBackend.run <arca.BaseBackend.run>` to see arguments description.
|
[
"Abstract",
"method",
"which",
"must",
"be",
"implemented",
"in",
"subclasses",
"which",
"must",
"return",
"a",
"str",
"path",
"to",
"a",
"Python",
"executable",
"which",
"will",
"be",
"used",
"to",
"run",
"the",
"script",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/base.py#L153-L161
|
238,915
|
voicecom/pgtool
|
pgtool/pgtool.py
|
quote_names
|
def quote_names(db, names):
"""psycopg2 doesn't know how to quote identifier names, so we ask the server"""
c = db.cursor()
c.execute("SELECT pg_catalog.quote_ident(n) FROM pg_catalog.unnest(%s::text[]) n", [list(names)])
return [name for (name,) in c]
|
python
|
def quote_names(db, names):
"""psycopg2 doesn't know how to quote identifier names, so we ask the server"""
c = db.cursor()
c.execute("SELECT pg_catalog.quote_ident(n) FROM pg_catalog.unnest(%s::text[]) n", [list(names)])
return [name for (name,) in c]
|
[
"def",
"quote_names",
"(",
"db",
",",
"names",
")",
":",
"c",
"=",
"db",
".",
"cursor",
"(",
")",
"c",
".",
"execute",
"(",
"\"SELECT pg_catalog.quote_ident(n) FROM pg_catalog.unnest(%s::text[]) n\"",
",",
"[",
"list",
"(",
"names",
")",
"]",
")",
"return",
"[",
"name",
"for",
"(",
"name",
",",
")",
"in",
"c",
"]"
] |
psycopg2 doesn't know how to quote identifier names, so we ask the server
|
[
"psycopg2",
"doesn",
"t",
"know",
"how",
"to",
"quote",
"identifier",
"names",
"so",
"we",
"ask",
"the",
"server"
] |
36b8682bfca614d784fe58451e0cbc41315bc72e
|
https://github.com/voicecom/pgtool/blob/36b8682bfca614d784fe58451e0cbc41315bc72e/pgtool/pgtool.py#L63-L67
|
238,916
|
voicecom/pgtool
|
pgtool/pgtool.py
|
execute_catch
|
def execute_catch(c, sql, vars=None):
"""Run a query, but ignore any errors. For error recovery paths where the error handler should not raise another."""
try:
c.execute(sql, vars)
except Exception as err:
cmd = sql.split(' ', 1)[0]
log.error("Error executing %s: %s", cmd, err)
|
python
|
def execute_catch(c, sql, vars=None):
"""Run a query, but ignore any errors. For error recovery paths where the error handler should not raise another."""
try:
c.execute(sql, vars)
except Exception as err:
cmd = sql.split(' ', 1)[0]
log.error("Error executing %s: %s", cmd, err)
|
[
"def",
"execute_catch",
"(",
"c",
",",
"sql",
",",
"vars",
"=",
"None",
")",
":",
"try",
":",
"c",
".",
"execute",
"(",
"sql",
",",
"vars",
")",
"except",
"Exception",
"as",
"err",
":",
"cmd",
"=",
"sql",
".",
"split",
"(",
"' '",
",",
"1",
")",
"[",
"0",
"]",
"log",
".",
"error",
"(",
"\"Error executing %s: %s\"",
",",
"cmd",
",",
"err",
")"
] |
Run a query, but ignore any errors. For error recovery paths where the error handler should not raise another.
|
[
"Run",
"a",
"query",
"but",
"ignore",
"any",
"errors",
".",
"For",
"error",
"recovery",
"paths",
"where",
"the",
"error",
"handler",
"should",
"not",
"raise",
"another",
"."
] |
36b8682bfca614d784fe58451e0cbc41315bc72e
|
https://github.com/voicecom/pgtool/blob/36b8682bfca614d784fe58451e0cbc41315bc72e/pgtool/pgtool.py#L70-L76
|
238,917
|
voicecom/pgtool
|
pgtool/pgtool.py
|
cmd_copy
|
def cmd_copy():
"""Uses CREATE DATABASE ... TEMPLATE to create a duplicate of a database. Additionally copies over database-specific
settings.
When used with --force, an existing database with the same name as DEST is replaced, the original is renamed out of
place in the form DEST_old_YYYYMMDD (unless --no-backup is specified).
"""
db = connect()
if args.force and db_exists(db, args.dest):
tmp_db = generate_alt_dbname(db, args.dest, 'tmp')
pg_copy(db, args.src, tmp_db)
pg_move_extended(db, tmp_db, args.dest)
else:
pg_copy(db, args.src, args.dest)
|
python
|
def cmd_copy():
"""Uses CREATE DATABASE ... TEMPLATE to create a duplicate of a database. Additionally copies over database-specific
settings.
When used with --force, an existing database with the same name as DEST is replaced, the original is renamed out of
place in the form DEST_old_YYYYMMDD (unless --no-backup is specified).
"""
db = connect()
if args.force and db_exists(db, args.dest):
tmp_db = generate_alt_dbname(db, args.dest, 'tmp')
pg_copy(db, args.src, tmp_db)
pg_move_extended(db, tmp_db, args.dest)
else:
pg_copy(db, args.src, args.dest)
|
[
"def",
"cmd_copy",
"(",
")",
":",
"db",
"=",
"connect",
"(",
")",
"if",
"args",
".",
"force",
"and",
"db_exists",
"(",
"db",
",",
"args",
".",
"dest",
")",
":",
"tmp_db",
"=",
"generate_alt_dbname",
"(",
"db",
",",
"args",
".",
"dest",
",",
"'tmp'",
")",
"pg_copy",
"(",
"db",
",",
"args",
".",
"src",
",",
"tmp_db",
")",
"pg_move_extended",
"(",
"db",
",",
"tmp_db",
",",
"args",
".",
"dest",
")",
"else",
":",
"pg_copy",
"(",
"db",
",",
"args",
".",
"src",
",",
"args",
".",
"dest",
")"
] |
Uses CREATE DATABASE ... TEMPLATE to create a duplicate of a database. Additionally copies over database-specific
settings.
When used with --force, an existing database with the same name as DEST is replaced, the original is renamed out of
place in the form DEST_old_YYYYMMDD (unless --no-backup is specified).
|
[
"Uses",
"CREATE",
"DATABASE",
"...",
"TEMPLATE",
"to",
"create",
"a",
"duplicate",
"of",
"a",
"database",
".",
"Additionally",
"copies",
"over",
"database",
"-",
"specific",
"settings",
"."
] |
36b8682bfca614d784fe58451e0cbc41315bc72e
|
https://github.com/voicecom/pgtool/blob/36b8682bfca614d784fe58451e0cbc41315bc72e/pgtool/pgtool.py#L290-L306
|
238,918
|
voicecom/pgtool
|
pgtool/pgtool.py
|
cmd_move
|
def cmd_move(db=None):
"""Rename a database within a server.
When used with --force, an existing database with the same name as DEST is replaced, the original is renamed out of
place in the form DEST_old_YYYYMMDD (unless --no-backup is specified).
"""
if db is None:
db = connect()
pg_move_extended(db, args.src, args.dest)
|
python
|
def cmd_move(db=None):
"""Rename a database within a server.
When used with --force, an existing database with the same name as DEST is replaced, the original is renamed out of
place in the form DEST_old_YYYYMMDD (unless --no-backup is specified).
"""
if db is None:
db = connect()
pg_move_extended(db, args.src, args.dest)
|
[
"def",
"cmd_move",
"(",
"db",
"=",
"None",
")",
":",
"if",
"db",
"is",
"None",
":",
"db",
"=",
"connect",
"(",
")",
"pg_move_extended",
"(",
"db",
",",
"args",
".",
"src",
",",
"args",
".",
"dest",
")"
] |
Rename a database within a server.
When used with --force, an existing database with the same name as DEST is replaced, the original is renamed out of
place in the form DEST_old_YYYYMMDD (unless --no-backup is specified).
|
[
"Rename",
"a",
"database",
"within",
"a",
"server",
"."
] |
36b8682bfca614d784fe58451e0cbc41315bc72e
|
https://github.com/voicecom/pgtool/blob/36b8682bfca614d784fe58451e0cbc41315bc72e/pgtool/pgtool.py#L309-L318
|
238,919
|
voicecom/pgtool
|
pgtool/pgtool.py
|
cmd_reindex
|
def cmd_reindex():
"""Uses CREATE INDEX CONCURRENTLY to create a duplicate index, then tries to swap the new index for the original.
The index swap is done using a short lock timeout to prevent it from interfering with running queries. Retries until
the rename succeeds.
"""
db = connect(args.database)
for idx in args.indexes:
pg_reindex(db, idx)
|
python
|
def cmd_reindex():
"""Uses CREATE INDEX CONCURRENTLY to create a duplicate index, then tries to swap the new index for the original.
The index swap is done using a short lock timeout to prevent it from interfering with running queries. Retries until
the rename succeeds.
"""
db = connect(args.database)
for idx in args.indexes:
pg_reindex(db, idx)
|
[
"def",
"cmd_reindex",
"(",
")",
":",
"db",
"=",
"connect",
"(",
"args",
".",
"database",
")",
"for",
"idx",
"in",
"args",
".",
"indexes",
":",
"pg_reindex",
"(",
"db",
",",
"idx",
")"
] |
Uses CREATE INDEX CONCURRENTLY to create a duplicate index, then tries to swap the new index for the original.
The index swap is done using a short lock timeout to prevent it from interfering with running queries. Retries until
the rename succeeds.
|
[
"Uses",
"CREATE",
"INDEX",
"CONCURRENTLY",
"to",
"create",
"a",
"duplicate",
"index",
"then",
"tries",
"to",
"swap",
"the",
"new",
"index",
"for",
"the",
"original",
"."
] |
36b8682bfca614d784fe58451e0cbc41315bc72e
|
https://github.com/voicecom/pgtool/blob/36b8682bfca614d784fe58451e0cbc41315bc72e/pgtool/pgtool.py#L331-L339
|
238,920
|
luismsgomes/stringology
|
src/stringology/align.py
|
align
|
def align(s1, s2, gap=' ', eq=operator.eq):
'''aligns two strings
>>> print(*align('pharmacy', 'farmácia', gap='_'), sep='\\n')
pharmac_y
_farmácia
>>> print(*align('advantage', 'vantagem', gap='_'), sep='\\n')
advantage_
__vantagem
'''
# first we compute the dynamic programming table
m, n = len(s1), len(s2)
table = [] # the table is extended lazily, one row at a time
row = list(range(n+1)) # the first row is 0, 1, 2, ..., n
table.append(list(row)) # copy row and insert into table
for i in range(m):
p = i
row[0] = i+1
for j in range(n):
t = 0 if eq(s1[i], s2[j]) else 1
p, row[j+1] = row[j+1], min(p+t, row[j]+1, row[j+1]+1)
table.append(list(row)) # copy row and insert into table
# now we trace the best alignment path from cell [m][n] to cell [0],[0]
s1_, s2_ = '', ''
i, j = m, n
while i != 0 and j != 0:
_, i, j, s1_, s2_ = min(
(table[i-1][j-1], i-1, j-1, s1[i-1]+s1_, s2[j-1]+s2_),
(table[i-1][j], i-1, j, s1[i-1]+s1_, gap+s2_),
(table[i][j-1], i, j-1, gap+s1_, s2[j-1]+s2_)
)
if i != 0:
s1_ = s1[:i]+s1_
s2_ = gap*i+s2_
if j != 0:
s1_ = gap*j+s1_
s2_ = s2[:j]+s2_
return s1_, s2_
|
python
|
def align(s1, s2, gap=' ', eq=operator.eq):
'''aligns two strings
>>> print(*align('pharmacy', 'farmácia', gap='_'), sep='\\n')
pharmac_y
_farmácia
>>> print(*align('advantage', 'vantagem', gap='_'), sep='\\n')
advantage_
__vantagem
'''
# first we compute the dynamic programming table
m, n = len(s1), len(s2)
table = [] # the table is extended lazily, one row at a time
row = list(range(n+1)) # the first row is 0, 1, 2, ..., n
table.append(list(row)) # copy row and insert into table
for i in range(m):
p = i
row[0] = i+1
for j in range(n):
t = 0 if eq(s1[i], s2[j]) else 1
p, row[j+1] = row[j+1], min(p+t, row[j]+1, row[j+1]+1)
table.append(list(row)) # copy row and insert into table
# now we trace the best alignment path from cell [m][n] to cell [0],[0]
s1_, s2_ = '', ''
i, j = m, n
while i != 0 and j != 0:
_, i, j, s1_, s2_ = min(
(table[i-1][j-1], i-1, j-1, s1[i-1]+s1_, s2[j-1]+s2_),
(table[i-1][j], i-1, j, s1[i-1]+s1_, gap+s2_),
(table[i][j-1], i, j-1, gap+s1_, s2[j-1]+s2_)
)
if i != 0:
s1_ = s1[:i]+s1_
s2_ = gap*i+s2_
if j != 0:
s1_ = gap*j+s1_
s2_ = s2[:j]+s2_
return s1_, s2_
|
[
"def",
"align",
"(",
"s1",
",",
"s2",
",",
"gap",
"=",
"' '",
",",
"eq",
"=",
"operator",
".",
"eq",
")",
":",
"# first we compute the dynamic programming table",
"m",
",",
"n",
"=",
"len",
"(",
"s1",
")",
",",
"len",
"(",
"s2",
")",
"table",
"=",
"[",
"]",
"# the table is extended lazily, one row at a time",
"row",
"=",
"list",
"(",
"range",
"(",
"n",
"+",
"1",
")",
")",
"# the first row is 0, 1, 2, ..., n",
"table",
".",
"append",
"(",
"list",
"(",
"row",
")",
")",
"# copy row and insert into table",
"for",
"i",
"in",
"range",
"(",
"m",
")",
":",
"p",
"=",
"i",
"row",
"[",
"0",
"]",
"=",
"i",
"+",
"1",
"for",
"j",
"in",
"range",
"(",
"n",
")",
":",
"t",
"=",
"0",
"if",
"eq",
"(",
"s1",
"[",
"i",
"]",
",",
"s2",
"[",
"j",
"]",
")",
"else",
"1",
"p",
",",
"row",
"[",
"j",
"+",
"1",
"]",
"=",
"row",
"[",
"j",
"+",
"1",
"]",
",",
"min",
"(",
"p",
"+",
"t",
",",
"row",
"[",
"j",
"]",
"+",
"1",
",",
"row",
"[",
"j",
"+",
"1",
"]",
"+",
"1",
")",
"table",
".",
"append",
"(",
"list",
"(",
"row",
")",
")",
"# copy row and insert into table",
"# now we trace the best alignment path from cell [m][n] to cell [0],[0]",
"s1_",
",",
"s2_",
"=",
"''",
",",
"''",
"i",
",",
"j",
"=",
"m",
",",
"n",
"while",
"i",
"!=",
"0",
"and",
"j",
"!=",
"0",
":",
"_",
",",
"i",
",",
"j",
",",
"s1_",
",",
"s2_",
"=",
"min",
"(",
"(",
"table",
"[",
"i",
"-",
"1",
"]",
"[",
"j",
"-",
"1",
"]",
",",
"i",
"-",
"1",
",",
"j",
"-",
"1",
",",
"s1",
"[",
"i",
"-",
"1",
"]",
"+",
"s1_",
",",
"s2",
"[",
"j",
"-",
"1",
"]",
"+",
"s2_",
")",
",",
"(",
"table",
"[",
"i",
"-",
"1",
"]",
"[",
"j",
"]",
",",
"i",
"-",
"1",
",",
"j",
",",
"s1",
"[",
"i",
"-",
"1",
"]",
"+",
"s1_",
",",
"gap",
"+",
"s2_",
")",
",",
"(",
"table",
"[",
"i",
"]",
"[",
"j",
"-",
"1",
"]",
",",
"i",
",",
"j",
"-",
"1",
",",
"gap",
"+",
"s1_",
",",
"s2",
"[",
"j",
"-",
"1",
"]",
"+",
"s2_",
")",
")",
"if",
"i",
"!=",
"0",
":",
"s1_",
"=",
"s1",
"[",
":",
"i",
"]",
"+",
"s1_",
"s2_",
"=",
"gap",
"*",
"i",
"+",
"s2_",
"if",
"j",
"!=",
"0",
":",
"s1_",
"=",
"gap",
"*",
"j",
"+",
"s1_",
"s2_",
"=",
"s2",
"[",
":",
"j",
"]",
"+",
"s2_",
"return",
"s1_",
",",
"s2_"
] |
aligns two strings
>>> print(*align('pharmacy', 'farmácia', gap='_'), sep='\\n')
pharmac_y
_farmácia
>>> print(*align('advantage', 'vantagem', gap='_'), sep='\\n')
advantage_
__vantagem
|
[
"aligns",
"two",
"strings"
] |
c627dc5a0d4c6af10946040a6463d5495d39d960
|
https://github.com/luismsgomes/stringology/blob/c627dc5a0d4c6af10946040a6463d5495d39d960/src/stringology/align.py#L4-L44
|
238,921
|
luismsgomes/stringology
|
src/stringology/align.py
|
mismatches
|
def mismatches(s1, s2, context=0, eq=operator.eq):
'''extract mismatched segments from aligned strings
>>> list(mismatches(*align('pharmacy', 'farmácia'), context=1))
[('pha', ' fa'), ('mac', 'mác'), ('c y', 'cia')]
>>> list(mismatches(*align('constitution', 'constituição'), context=1))
[('ution', 'uição')]
>>> list(mismatches(*align('idea', 'ideia'), context=1))
[('e a', 'eia')]
>>> list(mismatches(*align('instructed', 'instruído'), context=1))
[('ucted', 'u ído')]
>>> list(mismatches(*align('concluded', 'concluído'), context=1))
[('uded', 'uído')]
'''
n = len(s1)
assert(len(s2) == n)
lct, rct = context, context if isinstance(context, int) else context
i = None
for j in range(n):
if eq(s1[j], s2[j]):
if i is not None:
# report mismatch segment [i:j] with lct chars of left context
# and rct chars of right context
p, q = max(0, i-lct), min(j+rct, n)
yield s1[p:q], s2[p:q]
i = None
elif i is None:
i = j
if i is not None:
p = max(i-lct, 0)
yield s1[p:], s2[p:]
|
python
|
def mismatches(s1, s2, context=0, eq=operator.eq):
'''extract mismatched segments from aligned strings
>>> list(mismatches(*align('pharmacy', 'farmácia'), context=1))
[('pha', ' fa'), ('mac', 'mác'), ('c y', 'cia')]
>>> list(mismatches(*align('constitution', 'constituição'), context=1))
[('ution', 'uição')]
>>> list(mismatches(*align('idea', 'ideia'), context=1))
[('e a', 'eia')]
>>> list(mismatches(*align('instructed', 'instruído'), context=1))
[('ucted', 'u ído')]
>>> list(mismatches(*align('concluded', 'concluído'), context=1))
[('uded', 'uído')]
'''
n = len(s1)
assert(len(s2) == n)
lct, rct = context, context if isinstance(context, int) else context
i = None
for j in range(n):
if eq(s1[j], s2[j]):
if i is not None:
# report mismatch segment [i:j] with lct chars of left context
# and rct chars of right context
p, q = max(0, i-lct), min(j+rct, n)
yield s1[p:q], s2[p:q]
i = None
elif i is None:
i = j
if i is not None:
p = max(i-lct, 0)
yield s1[p:], s2[p:]
|
[
"def",
"mismatches",
"(",
"s1",
",",
"s2",
",",
"context",
"=",
"0",
",",
"eq",
"=",
"operator",
".",
"eq",
")",
":",
"n",
"=",
"len",
"(",
"s1",
")",
"assert",
"(",
"len",
"(",
"s2",
")",
"==",
"n",
")",
"lct",
",",
"rct",
"=",
"context",
",",
"context",
"if",
"isinstance",
"(",
"context",
",",
"int",
")",
"else",
"context",
"i",
"=",
"None",
"for",
"j",
"in",
"range",
"(",
"n",
")",
":",
"if",
"eq",
"(",
"s1",
"[",
"j",
"]",
",",
"s2",
"[",
"j",
"]",
")",
":",
"if",
"i",
"is",
"not",
"None",
":",
"# report mismatch segment [i:j] with lct chars of left context",
"# and rct chars of right context",
"p",
",",
"q",
"=",
"max",
"(",
"0",
",",
"i",
"-",
"lct",
")",
",",
"min",
"(",
"j",
"+",
"rct",
",",
"n",
")",
"yield",
"s1",
"[",
"p",
":",
"q",
"]",
",",
"s2",
"[",
"p",
":",
"q",
"]",
"i",
"=",
"None",
"elif",
"i",
"is",
"None",
":",
"i",
"=",
"j",
"if",
"i",
"is",
"not",
"None",
":",
"p",
"=",
"max",
"(",
"i",
"-",
"lct",
",",
"0",
")",
"yield",
"s1",
"[",
"p",
":",
"]",
",",
"s2",
"[",
"p",
":",
"]"
] |
extract mismatched segments from aligned strings
>>> list(mismatches(*align('pharmacy', 'farmácia'), context=1))
[('pha', ' fa'), ('mac', 'mác'), ('c y', 'cia')]
>>> list(mismatches(*align('constitution', 'constituição'), context=1))
[('ution', 'uição')]
>>> list(mismatches(*align('idea', 'ideia'), context=1))
[('e a', 'eia')]
>>> list(mismatches(*align('instructed', 'instruído'), context=1))
[('ucted', 'u ído')]
>>> list(mismatches(*align('concluded', 'concluído'), context=1))
[('uded', 'uído')]
|
[
"extract",
"mismatched",
"segments",
"from",
"aligned",
"strings"
] |
c627dc5a0d4c6af10946040a6463d5495d39d960
|
https://github.com/luismsgomes/stringology/blob/c627dc5a0d4c6af10946040a6463d5495d39d960/src/stringology/align.py#L47-L81
|
238,922
|
williamfzc/ConnectionTracer
|
ConnectionTracer/__init__.py
|
_init
|
def _init():
""" build connection and init it"""
connection.connect()
# start track
# all services were provided here:
# https://android.googlesource.com/platform/system/core/+/jb-dev/adb/SERVICES.TXT
ready_data = utils.encode_data('host:track-devices')
connection.adb_socket.send(ready_data)
# get status
status = connection.adb_socket.recv(4)
# make sure track is ready
if status != b'OKAY':
raise RuntimeError('adb server return "{}", not OKAY'.format(str(status)))
|
python
|
def _init():
""" build connection and init it"""
connection.connect()
# start track
# all services were provided here:
# https://android.googlesource.com/platform/system/core/+/jb-dev/adb/SERVICES.TXT
ready_data = utils.encode_data('host:track-devices')
connection.adb_socket.send(ready_data)
# get status
status = connection.adb_socket.recv(4)
# make sure track is ready
if status != b'OKAY':
raise RuntimeError('adb server return "{}", not OKAY'.format(str(status)))
|
[
"def",
"_init",
"(",
")",
":",
"connection",
".",
"connect",
"(",
")",
"# start track",
"# all services were provided here:",
"# https://android.googlesource.com/platform/system/core/+/jb-dev/adb/SERVICES.TXT",
"ready_data",
"=",
"utils",
".",
"encode_data",
"(",
"'host:track-devices'",
")",
"connection",
".",
"adb_socket",
".",
"send",
"(",
"ready_data",
")",
"# get status",
"status",
"=",
"connection",
".",
"adb_socket",
".",
"recv",
"(",
"4",
")",
"# make sure track is ready",
"if",
"status",
"!=",
"b'OKAY'",
":",
"raise",
"RuntimeError",
"(",
"'adb server return \"{}\", not OKAY'",
".",
"format",
"(",
"str",
"(",
"status",
")",
")",
")"
] |
build connection and init it
|
[
"build",
"connection",
"and",
"init",
"it"
] |
190003e374d6903cb82d2d21a1378979dc419ed3
|
https://github.com/williamfzc/ConnectionTracer/blob/190003e374d6903cb82d2d21a1378979dc419ed3/ConnectionTracer/__init__.py#L25-L40
|
238,923
|
gorakhargosh/pepe
|
pepe/content_types.py
|
ContentTypesDatabase.get_comment_group_for_path
|
def get_comment_group_for_path(self, pathname, default_content_type=None):
"""
Obtains the comment group for a specified pathname.
:param pathname:
The path for which the comment group will be obtained.
:return:
Returns the comment group for the specified pathname
or raises a ``ValueError`` if a content type is not found
or raises a ``KeyError`` if a comment group is not found.
Usage:
>>> db = ContentTypesDatabase()
>>> db.add_config(db._test_config, 'test_config.yaml')
>>> g = db.get_comment_group_for_path
>>> g("foobar.py")
[['#', '']]
>>> g("foobar.js")
[['/*', '*/'], ['//', '']]
>>> g('foobar.rst')
Traceback (most recent call last):
...
KeyError: 'No comment groups for content type `structured-text` for file `foobar.rst` found'
# If the content type cannot be determined, we assume the content
# type to be ``python`` in this case.
>>> g('foobar.f37993ajdha73', default_content_type='python')
[['#', '']]
>>> g("foobar.f37993ajdha73")
Traceback (most recent call last):
...
ValueError: No content type defined for file path: foobar.f37993ajdha73
>>> g("foobar.f37993ajdha73", default_content_type=None)
Traceback (most recent call last):
...
ValueError: No content type defined for file path: foobar.f37993ajdha73
"""
content_type = self.guess_content_type(pathname)
if not content_type:
# Content type is not found.
if default_content_type:
content_type = default_content_type
return self.get_comment_group(content_type)
else:
raise ValueError(
"No content type defined for file path: %s" % pathname)
else:
try:
return self.get_comment_group(content_type)
except KeyError:
raise KeyError(
"No comment groups for content type `%s` for file `%s` found" % (
content_type, pathname))
|
python
|
def get_comment_group_for_path(self, pathname, default_content_type=None):
"""
Obtains the comment group for a specified pathname.
:param pathname:
The path for which the comment group will be obtained.
:return:
Returns the comment group for the specified pathname
or raises a ``ValueError`` if a content type is not found
or raises a ``KeyError`` if a comment group is not found.
Usage:
>>> db = ContentTypesDatabase()
>>> db.add_config(db._test_config, 'test_config.yaml')
>>> g = db.get_comment_group_for_path
>>> g("foobar.py")
[['#', '']]
>>> g("foobar.js")
[['/*', '*/'], ['//', '']]
>>> g('foobar.rst')
Traceback (most recent call last):
...
KeyError: 'No comment groups for content type `structured-text` for file `foobar.rst` found'
# If the content type cannot be determined, we assume the content
# type to be ``python`` in this case.
>>> g('foobar.f37993ajdha73', default_content_type='python')
[['#', '']]
>>> g("foobar.f37993ajdha73")
Traceback (most recent call last):
...
ValueError: No content type defined for file path: foobar.f37993ajdha73
>>> g("foobar.f37993ajdha73", default_content_type=None)
Traceback (most recent call last):
...
ValueError: No content type defined for file path: foobar.f37993ajdha73
"""
content_type = self.guess_content_type(pathname)
if not content_type:
# Content type is not found.
if default_content_type:
content_type = default_content_type
return self.get_comment_group(content_type)
else:
raise ValueError(
"No content type defined for file path: %s" % pathname)
else:
try:
return self.get_comment_group(content_type)
except KeyError:
raise KeyError(
"No comment groups for content type `%s` for file `%s` found" % (
content_type, pathname))
|
[
"def",
"get_comment_group_for_path",
"(",
"self",
",",
"pathname",
",",
"default_content_type",
"=",
"None",
")",
":",
"content_type",
"=",
"self",
".",
"guess_content_type",
"(",
"pathname",
")",
"if",
"not",
"content_type",
":",
"# Content type is not found.",
"if",
"default_content_type",
":",
"content_type",
"=",
"default_content_type",
"return",
"self",
".",
"get_comment_group",
"(",
"content_type",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"No content type defined for file path: %s\"",
"%",
"pathname",
")",
"else",
":",
"try",
":",
"return",
"self",
".",
"get_comment_group",
"(",
"content_type",
")",
"except",
"KeyError",
":",
"raise",
"KeyError",
"(",
"\"No comment groups for content type `%s` for file `%s` found\"",
"%",
"(",
"content_type",
",",
"pathname",
")",
")"
] |
Obtains the comment group for a specified pathname.
:param pathname:
The path for which the comment group will be obtained.
:return:
Returns the comment group for the specified pathname
or raises a ``ValueError`` if a content type is not found
or raises a ``KeyError`` if a comment group is not found.
Usage:
>>> db = ContentTypesDatabase()
>>> db.add_config(db._test_config, 'test_config.yaml')
>>> g = db.get_comment_group_for_path
>>> g("foobar.py")
[['#', '']]
>>> g("foobar.js")
[['/*', '*/'], ['//', '']]
>>> g('foobar.rst')
Traceback (most recent call last):
...
KeyError: 'No comment groups for content type `structured-text` for file `foobar.rst` found'
# If the content type cannot be determined, we assume the content
# type to be ``python`` in this case.
>>> g('foobar.f37993ajdha73', default_content_type='python')
[['#', '']]
>>> g("foobar.f37993ajdha73")
Traceback (most recent call last):
...
ValueError: No content type defined for file path: foobar.f37993ajdha73
>>> g("foobar.f37993ajdha73", default_content_type=None)
Traceback (most recent call last):
...
ValueError: No content type defined for file path: foobar.f37993ajdha73
|
[
"Obtains",
"the",
"comment",
"group",
"for",
"a",
"specified",
"pathname",
"."
] |
1e40853378d515c99f03b3f59efa9b943d26eb62
|
https://github.com/gorakhargosh/pepe/blob/1e40853378d515c99f03b3f59efa9b943d26eb62/pepe/content_types.py#L97-L151
|
238,924
|
gorakhargosh/pepe
|
pepe/content_types.py
|
ContentTypesDatabase.add_config_file
|
def add_config_file(self, config_filename):
"""
Parses the content.types file and updates the content types database.
:param config_filename:
The path to the configuration file.
"""
with open(config_filename, 'rb') as f:
content = f.read()
config = yaml.load(content)
self.add_config(config, config_filename)
|
python
|
def add_config_file(self, config_filename):
"""
Parses the content.types file and updates the content types database.
:param config_filename:
The path to the configuration file.
"""
with open(config_filename, 'rb') as f:
content = f.read()
config = yaml.load(content)
self.add_config(config, config_filename)
|
[
"def",
"add_config_file",
"(",
"self",
",",
"config_filename",
")",
":",
"with",
"open",
"(",
"config_filename",
",",
"'rb'",
")",
"as",
"f",
":",
"content",
"=",
"f",
".",
"read",
"(",
")",
"config",
"=",
"yaml",
".",
"load",
"(",
"content",
")",
"self",
".",
"add_config",
"(",
"config",
",",
"config_filename",
")"
] |
Parses the content.types file and updates the content types database.
:param config_filename:
The path to the configuration file.
|
[
"Parses",
"the",
"content",
".",
"types",
"file",
"and",
"updates",
"the",
"content",
"types",
"database",
"."
] |
1e40853378d515c99f03b3f59efa9b943d26eb62
|
https://github.com/gorakhargosh/pepe/blob/1e40853378d515c99f03b3f59efa9b943d26eb62/pepe/content_types.py#L185-L195
|
238,925
|
gorakhargosh/pepe
|
pepe/content_types.py
|
ContentTypesDatabase.guess_content_type
|
def guess_content_type(self, pathname):
"""Guess the content type for the given path.
:param path:
The path of file for which to guess the content type.
:return:
Returns the content type or ``None`` if the content type
could not be determined.
Usage:
>>> db = ContentTypesDatabase()
>>> db.add_config_file('content-types.yaml')
>>> g = db.guess_content_type
>>> assert g("__init__.py") == "python"
>>> assert g("Makefile") == "Makefile"
>>> assert g("Makefile.gmake") == "Makefile"
>>> assert g("Makefile.py") == "python"
>>> assert g("foobar.rb") == "ruby"
>>> assert g("wscript") == "python"
>>> assert g("foo.coffee") == "coffee-script"
>>> assert g("Rakefile") == "ruby"
>>> assert g("foobar.xml") == "xml"
>>> assert g("foobar.html") == "html"
>>> assert g("foo7a738fg") == None
>>> assert g("foo.rst") == "structured-text"
>>> assert g("foo.md") == "structured-text"
>>> assert g("foo.markdown") == "structured-text"
"""
file_basename = os.path.basename(pathname)
content_type = None
# Try to determine from the path.
if not content_type and self._filename_map.has_key(file_basename):
content_type = self._filename_map[file_basename]
#logger.debug("Content type of '%s' is '%s' (determined from full "\
# "path).", pathname, content_type)
# Try to determine from the suffix.
if not content_type and '.' in file_basename:
extension = "." + file_basename.split(".")[-1]
extension = extension_case_transform_func(extension)
try:
content_type = self._extension_map[extension]
#logger.debug("Content type of '%s' is '%s' (determined from "\
# "suffix '%s').", pathname, content_type, extension)
except KeyError:
pass
# Try to determine from the registered set of regular expression patterns.
if not content_type:
for regexp, _content_type in self._regexp_map.iteritems():
if regexp.search(file_basename):
content_type = _content_type
#logger.debug(
# "Content type of '%s' is '%s' (matches regexp '%s')",
# pathname, content_type, regexp.pattern)
break
# Try to determine from the file contents.
if os.path.exists(pathname):
with open(pathname, 'rb') as f:
content = f.read()
if content.startswith("<?xml"): # cheap XML sniffing
content_type = "XML"
# TODO: Try to determine from mime-type.
return content_type
|
python
|
def guess_content_type(self, pathname):
"""Guess the content type for the given path.
:param path:
The path of file for which to guess the content type.
:return:
Returns the content type or ``None`` if the content type
could not be determined.
Usage:
>>> db = ContentTypesDatabase()
>>> db.add_config_file('content-types.yaml')
>>> g = db.guess_content_type
>>> assert g("__init__.py") == "python"
>>> assert g("Makefile") == "Makefile"
>>> assert g("Makefile.gmake") == "Makefile"
>>> assert g("Makefile.py") == "python"
>>> assert g("foobar.rb") == "ruby"
>>> assert g("wscript") == "python"
>>> assert g("foo.coffee") == "coffee-script"
>>> assert g("Rakefile") == "ruby"
>>> assert g("foobar.xml") == "xml"
>>> assert g("foobar.html") == "html"
>>> assert g("foo7a738fg") == None
>>> assert g("foo.rst") == "structured-text"
>>> assert g("foo.md") == "structured-text"
>>> assert g("foo.markdown") == "structured-text"
"""
file_basename = os.path.basename(pathname)
content_type = None
# Try to determine from the path.
if not content_type and self._filename_map.has_key(file_basename):
content_type = self._filename_map[file_basename]
#logger.debug("Content type of '%s' is '%s' (determined from full "\
# "path).", pathname, content_type)
# Try to determine from the suffix.
if not content_type and '.' in file_basename:
extension = "." + file_basename.split(".")[-1]
extension = extension_case_transform_func(extension)
try:
content_type = self._extension_map[extension]
#logger.debug("Content type of '%s' is '%s' (determined from "\
# "suffix '%s').", pathname, content_type, extension)
except KeyError:
pass
# Try to determine from the registered set of regular expression patterns.
if not content_type:
for regexp, _content_type in self._regexp_map.iteritems():
if regexp.search(file_basename):
content_type = _content_type
#logger.debug(
# "Content type of '%s' is '%s' (matches regexp '%s')",
# pathname, content_type, regexp.pattern)
break
# Try to determine from the file contents.
if os.path.exists(pathname):
with open(pathname, 'rb') as f:
content = f.read()
if content.startswith("<?xml"): # cheap XML sniffing
content_type = "XML"
# TODO: Try to determine from mime-type.
return content_type
|
[
"def",
"guess_content_type",
"(",
"self",
",",
"pathname",
")",
":",
"file_basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"pathname",
")",
"content_type",
"=",
"None",
"# Try to determine from the path.",
"if",
"not",
"content_type",
"and",
"self",
".",
"_filename_map",
".",
"has_key",
"(",
"file_basename",
")",
":",
"content_type",
"=",
"self",
".",
"_filename_map",
"[",
"file_basename",
"]",
"#logger.debug(\"Content type of '%s' is '%s' (determined from full \"\\",
"# \"path).\", pathname, content_type)",
"# Try to determine from the suffix.",
"if",
"not",
"content_type",
"and",
"'.'",
"in",
"file_basename",
":",
"extension",
"=",
"\".\"",
"+",
"file_basename",
".",
"split",
"(",
"\".\"",
")",
"[",
"-",
"1",
"]",
"extension",
"=",
"extension_case_transform_func",
"(",
"extension",
")",
"try",
":",
"content_type",
"=",
"self",
".",
"_extension_map",
"[",
"extension",
"]",
"#logger.debug(\"Content type of '%s' is '%s' (determined from \"\\",
"# \"suffix '%s').\", pathname, content_type, extension)",
"except",
"KeyError",
":",
"pass",
"# Try to determine from the registered set of regular expression patterns.",
"if",
"not",
"content_type",
":",
"for",
"regexp",
",",
"_content_type",
"in",
"self",
".",
"_regexp_map",
".",
"iteritems",
"(",
")",
":",
"if",
"regexp",
".",
"search",
"(",
"file_basename",
")",
":",
"content_type",
"=",
"_content_type",
"#logger.debug(",
"# \"Content type of '%s' is '%s' (matches regexp '%s')\",",
"# pathname, content_type, regexp.pattern)",
"break",
"# Try to determine from the file contents.",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"pathname",
")",
":",
"with",
"open",
"(",
"pathname",
",",
"'rb'",
")",
"as",
"f",
":",
"content",
"=",
"f",
".",
"read",
"(",
")",
"if",
"content",
".",
"startswith",
"(",
"\"<?xml\"",
")",
":",
"# cheap XML sniffing",
"content_type",
"=",
"\"XML\"",
"# TODO: Try to determine from mime-type.",
"return",
"content_type"
] |
Guess the content type for the given path.
:param path:
The path of file for which to guess the content type.
:return:
Returns the content type or ``None`` if the content type
could not be determined.
Usage:
>>> db = ContentTypesDatabase()
>>> db.add_config_file('content-types.yaml')
>>> g = db.guess_content_type
>>> assert g("__init__.py") == "python"
>>> assert g("Makefile") == "Makefile"
>>> assert g("Makefile.gmake") == "Makefile"
>>> assert g("Makefile.py") == "python"
>>> assert g("foobar.rb") == "ruby"
>>> assert g("wscript") == "python"
>>> assert g("foo.coffee") == "coffee-script"
>>> assert g("Rakefile") == "ruby"
>>> assert g("foobar.xml") == "xml"
>>> assert g("foobar.html") == "html"
>>> assert g("foo7a738fg") == None
>>> assert g("foo.rst") == "structured-text"
>>> assert g("foo.md") == "structured-text"
>>> assert g("foo.markdown") == "structured-text"
|
[
"Guess",
"the",
"content",
"type",
"for",
"the",
"given",
"path",
"."
] |
1e40853378d515c99f03b3f59efa9b943d26eb62
|
https://github.com/gorakhargosh/pepe/blob/1e40853378d515c99f03b3f59efa9b943d26eb62/pepe/content_types.py#L232-L299
|
238,926
|
williamfzc/ConnectionTracer
|
ConnectionTracer/connection.py
|
connect
|
def connect():
""" create socket and connect to adb server """
global adb_socket
if adb_socket is not None:
raise RuntimeError('connection already existed')
host, port = config.HOST, config.PORT
connection = socket.socket()
try:
connection.connect((host, port))
except ConnectionError as _:
warn_msg = 'failed when connecting to adb server: {}:{}, retrying ...'.format(host, port)
warnings.warn(warn_msg)
reboot_adb_server()
connect()
return
adb_socket = connection
|
python
|
def connect():
""" create socket and connect to adb server """
global adb_socket
if adb_socket is not None:
raise RuntimeError('connection already existed')
host, port = config.HOST, config.PORT
connection = socket.socket()
try:
connection.connect((host, port))
except ConnectionError as _:
warn_msg = 'failed when connecting to adb server: {}:{}, retrying ...'.format(host, port)
warnings.warn(warn_msg)
reboot_adb_server()
connect()
return
adb_socket = connection
|
[
"def",
"connect",
"(",
")",
":",
"global",
"adb_socket",
"if",
"adb_socket",
"is",
"not",
"None",
":",
"raise",
"RuntimeError",
"(",
"'connection already existed'",
")",
"host",
",",
"port",
"=",
"config",
".",
"HOST",
",",
"config",
".",
"PORT",
"connection",
"=",
"socket",
".",
"socket",
"(",
")",
"try",
":",
"connection",
".",
"connect",
"(",
"(",
"host",
",",
"port",
")",
")",
"except",
"ConnectionError",
"as",
"_",
":",
"warn_msg",
"=",
"'failed when connecting to adb server: {}:{}, retrying ...'",
".",
"format",
"(",
"host",
",",
"port",
")",
"warnings",
".",
"warn",
"(",
"warn_msg",
")",
"reboot_adb_server",
"(",
")",
"connect",
"(",
")",
"return",
"adb_socket",
"=",
"connection"
] |
create socket and connect to adb server
|
[
"create",
"socket",
"and",
"connect",
"to",
"adb",
"server"
] |
190003e374d6903cb82d2d21a1378979dc419ed3
|
https://github.com/williamfzc/ConnectionTracer/blob/190003e374d6903cb82d2d21a1378979dc419ed3/ConnectionTracer/connection.py#L12-L30
|
238,927
|
williamfzc/ConnectionTracer
|
ConnectionTracer/connection.py
|
reboot_adb_server
|
def reboot_adb_server():
""" execute 'adb devices' to start adb server """
_reboot_count = 0
_max_retry = 1
def _reboot():
nonlocal _reboot_count
if _reboot_count >= _max_retry:
raise RuntimeError('fail after retry {} times'.format(_max_retry))
_reboot_count += 1
return_code = subprocess.call(['adb', 'devices'], stdout=subprocess.DEVNULL)
if bool(return_code):
warnings.warn('return not zero, execute "adb version" failed')
raise EnvironmentError('adb did not work :(')
return _reboot
|
python
|
def reboot_adb_server():
""" execute 'adb devices' to start adb server """
_reboot_count = 0
_max_retry = 1
def _reboot():
nonlocal _reboot_count
if _reboot_count >= _max_retry:
raise RuntimeError('fail after retry {} times'.format(_max_retry))
_reboot_count += 1
return_code = subprocess.call(['adb', 'devices'], stdout=subprocess.DEVNULL)
if bool(return_code):
warnings.warn('return not zero, execute "adb version" failed')
raise EnvironmentError('adb did not work :(')
return _reboot
|
[
"def",
"reboot_adb_server",
"(",
")",
":",
"_reboot_count",
"=",
"0",
"_max_retry",
"=",
"1",
"def",
"_reboot",
"(",
")",
":",
"nonlocal",
"_reboot_count",
"if",
"_reboot_count",
">=",
"_max_retry",
":",
"raise",
"RuntimeError",
"(",
"'fail after retry {} times'",
".",
"format",
"(",
"_max_retry",
")",
")",
"_reboot_count",
"+=",
"1",
"return_code",
"=",
"subprocess",
".",
"call",
"(",
"[",
"'adb'",
",",
"'devices'",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"DEVNULL",
")",
"if",
"bool",
"(",
"return_code",
")",
":",
"warnings",
".",
"warn",
"(",
"'return not zero, execute \"adb version\" failed'",
")",
"raise",
"EnvironmentError",
"(",
"'adb did not work :('",
")",
"return",
"_reboot"
] |
execute 'adb devices' to start adb server
|
[
"execute",
"adb",
"devices",
"to",
"start",
"adb",
"server"
] |
190003e374d6903cb82d2d21a1378979dc419ed3
|
https://github.com/williamfzc/ConnectionTracer/blob/190003e374d6903cb82d2d21a1378979dc419ed3/ConnectionTracer/connection.py#L49-L65
|
238,928
|
koriakin/binflakes
|
binflakes/sexpr/string.py
|
escape_string
|
def escape_string(value):
"""Converts a string to its S-expression representation, adding quotes
and escaping funny characters.
"""
res = StringIO()
res.write('"')
for c in value:
if c in CHAR_TO_ESCAPE:
res.write(f'\\{CHAR_TO_ESCAPE[c]}')
elif c.isprintable():
res.write(c)
elif ord(c) < 0x100:
res.write(f'\\x{ord(c):02x}')
elif ord(c) < 0x10000:
res.write(f'\\u{ord(c):04x}')
else:
res.write(f'\\U{ord(c):06x}')
res.write('"')
return res.getvalue()
|
python
|
def escape_string(value):
"""Converts a string to its S-expression representation, adding quotes
and escaping funny characters.
"""
res = StringIO()
res.write('"')
for c in value:
if c in CHAR_TO_ESCAPE:
res.write(f'\\{CHAR_TO_ESCAPE[c]}')
elif c.isprintable():
res.write(c)
elif ord(c) < 0x100:
res.write(f'\\x{ord(c):02x}')
elif ord(c) < 0x10000:
res.write(f'\\u{ord(c):04x}')
else:
res.write(f'\\U{ord(c):06x}')
res.write('"')
return res.getvalue()
|
[
"def",
"escape_string",
"(",
"value",
")",
":",
"res",
"=",
"StringIO",
"(",
")",
"res",
".",
"write",
"(",
"'\"'",
")",
"for",
"c",
"in",
"value",
":",
"if",
"c",
"in",
"CHAR_TO_ESCAPE",
":",
"res",
".",
"write",
"(",
"f'\\\\{CHAR_TO_ESCAPE[c]}'",
")",
"elif",
"c",
".",
"isprintable",
"(",
")",
":",
"res",
".",
"write",
"(",
"c",
")",
"elif",
"ord",
"(",
"c",
")",
"<",
"0x100",
":",
"res",
".",
"write",
"(",
"f'\\\\x{ord(c):02x}'",
")",
"elif",
"ord",
"(",
"c",
")",
"<",
"0x10000",
":",
"res",
".",
"write",
"(",
"f'\\\\u{ord(c):04x}'",
")",
"else",
":",
"res",
".",
"write",
"(",
"f'\\\\U{ord(c):06x}'",
")",
"res",
".",
"write",
"(",
"'\"'",
")",
"return",
"res",
".",
"getvalue",
"(",
")"
] |
Converts a string to its S-expression representation, adding quotes
and escaping funny characters.
|
[
"Converts",
"a",
"string",
"to",
"its",
"S",
"-",
"expression",
"representation",
"adding",
"quotes",
"and",
"escaping",
"funny",
"characters",
"."
] |
f059cecadf1c605802a713c62375b5bd5606d53f
|
https://github.com/koriakin/binflakes/blob/f059cecadf1c605802a713c62375b5bd5606d53f/binflakes/sexpr/string.py#L18-L36
|
238,929
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/validate/ObjectComparator.py
|
ObjectComparator.compare
|
def compare(value1, operation, value2):
"""
Perform comparison operation over two arguments.
The operation can be performed over values of any type.
:param value1: the first argument to compare
:param operation: the comparison operation: "==" ("=", "EQ"), "!= " ("<>", "NE"); "<"/">"
("LT"/"GT"), "<="/">=" ("LE"/"GE"); "LIKE".
:param value2: the second argument to compare
:return: result of the comparison operation
"""
if operation == None:
return False
operation = operation.upper()
if operation in ["=", "==", "EQ"]:
return ObjectComparator.are_equal(value1, value2)
if operation in ["!=", "<>", "NE"]:
return ObjectComparator.are_not_equal(value1, value2)
if operation in ["<", "LT"]:
return ObjectComparator.less(value1, value2)
if operation in ["<=", "LE"]:
return ObjectComparator.are_equal(value1, value2) or ObjectComparator.less(value1, value2)
if operation in [">", "GT"]:
return ObjectComparator.more(value1, value2)
if operation in [">=", "GE"]:
return ObjectComparator.are_equal(value1, value2) or ObjectComparator.more(value1, value2)
if operation == "LIKE":
return ObjectComparator.match(value1, value2)
return True
|
python
|
def compare(value1, operation, value2):
"""
Perform comparison operation over two arguments.
The operation can be performed over values of any type.
:param value1: the first argument to compare
:param operation: the comparison operation: "==" ("=", "EQ"), "!= " ("<>", "NE"); "<"/">"
("LT"/"GT"), "<="/">=" ("LE"/"GE"); "LIKE".
:param value2: the second argument to compare
:return: result of the comparison operation
"""
if operation == None:
return False
operation = operation.upper()
if operation in ["=", "==", "EQ"]:
return ObjectComparator.are_equal(value1, value2)
if operation in ["!=", "<>", "NE"]:
return ObjectComparator.are_not_equal(value1, value2)
if operation in ["<", "LT"]:
return ObjectComparator.less(value1, value2)
if operation in ["<=", "LE"]:
return ObjectComparator.are_equal(value1, value2) or ObjectComparator.less(value1, value2)
if operation in [">", "GT"]:
return ObjectComparator.more(value1, value2)
if operation in [">=", "GE"]:
return ObjectComparator.are_equal(value1, value2) or ObjectComparator.more(value1, value2)
if operation == "LIKE":
return ObjectComparator.match(value1, value2)
return True
|
[
"def",
"compare",
"(",
"value1",
",",
"operation",
",",
"value2",
")",
":",
"if",
"operation",
"==",
"None",
":",
"return",
"False",
"operation",
"=",
"operation",
".",
"upper",
"(",
")",
"if",
"operation",
"in",
"[",
"\"=\"",
",",
"\"==\"",
",",
"\"EQ\"",
"]",
":",
"return",
"ObjectComparator",
".",
"are_equal",
"(",
"value1",
",",
"value2",
")",
"if",
"operation",
"in",
"[",
"\"!=\"",
",",
"\"<>\"",
",",
"\"NE\"",
"]",
":",
"return",
"ObjectComparator",
".",
"are_not_equal",
"(",
"value1",
",",
"value2",
")",
"if",
"operation",
"in",
"[",
"\"<\"",
",",
"\"LT\"",
"]",
":",
"return",
"ObjectComparator",
".",
"less",
"(",
"value1",
",",
"value2",
")",
"if",
"operation",
"in",
"[",
"\"<=\"",
",",
"\"LE\"",
"]",
":",
"return",
"ObjectComparator",
".",
"are_equal",
"(",
"value1",
",",
"value2",
")",
"or",
"ObjectComparator",
".",
"less",
"(",
"value1",
",",
"value2",
")",
"if",
"operation",
"in",
"[",
"\">\"",
",",
"\"GT\"",
"]",
":",
"return",
"ObjectComparator",
".",
"more",
"(",
"value1",
",",
"value2",
")",
"if",
"operation",
"in",
"[",
"\">=\"",
",",
"\"GE\"",
"]",
":",
"return",
"ObjectComparator",
".",
"are_equal",
"(",
"value1",
",",
"value2",
")",
"or",
"ObjectComparator",
".",
"more",
"(",
"value1",
",",
"value2",
")",
"if",
"operation",
"==",
"\"LIKE\"",
":",
"return",
"ObjectComparator",
".",
"match",
"(",
"value1",
",",
"value2",
")",
"return",
"True"
] |
Perform comparison operation over two arguments.
The operation can be performed over values of any type.
:param value1: the first argument to compare
:param operation: the comparison operation: "==" ("=", "EQ"), "!= " ("<>", "NE"); "<"/">"
("LT"/"GT"), "<="/">=" ("LE"/"GE"); "LIKE".
:param value2: the second argument to compare
:return: result of the comparison operation
|
[
"Perform",
"comparison",
"operation",
"over",
"two",
"arguments",
".",
"The",
"operation",
"can",
"be",
"performed",
"over",
"values",
"of",
"any",
"type",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/validate/ObjectComparator.py#L25-L59
|
238,930
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/validate/ObjectComparator.py
|
ObjectComparator.are_equal
|
def are_equal(value1, value2):
"""
Checks if two values are equal. The operation can be performed over values of any type.
:param value1: the first value to compare
:param value2: the second value to compare
:return: true if values are equal and false otherwise
"""
if value1 == None or value2 == None:
return True
if value1 == None or value2 == None:
return False
return value1 == value2
|
python
|
def are_equal(value1, value2):
"""
Checks if two values are equal. The operation can be performed over values of any type.
:param value1: the first value to compare
:param value2: the second value to compare
:return: true if values are equal and false otherwise
"""
if value1 == None or value2 == None:
return True
if value1 == None or value2 == None:
return False
return value1 == value2
|
[
"def",
"are_equal",
"(",
"value1",
",",
"value2",
")",
":",
"if",
"value1",
"==",
"None",
"or",
"value2",
"==",
"None",
":",
"return",
"True",
"if",
"value1",
"==",
"None",
"or",
"value2",
"==",
"None",
":",
"return",
"False",
"return",
"value1",
"==",
"value2"
] |
Checks if two values are equal. The operation can be performed over values of any type.
:param value1: the first value to compare
:param value2: the second value to compare
:return: true if values are equal and false otherwise
|
[
"Checks",
"if",
"two",
"values",
"are",
"equal",
".",
"The",
"operation",
"can",
"be",
"performed",
"over",
"values",
"of",
"any",
"type",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/validate/ObjectComparator.py#L62-L76
|
238,931
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/validate/ObjectComparator.py
|
ObjectComparator.less
|
def less(value1, value2):
"""
Checks if first value is less than the second one.
The operation can be performed over numbers or strings.
:param value1: the first value to compare
:param value2: the second value to compare
:return: true if the first value is less than second and false otherwise.
"""
number1 = FloatConverter.to_nullable_float(value1)
number2 = FloatConverter.to_nullable_float(value2)
if number1 == None or number2 == None:
return False
return number1 < number2
|
python
|
def less(value1, value2):
"""
Checks if first value is less than the second one.
The operation can be performed over numbers or strings.
:param value1: the first value to compare
:param value2: the second value to compare
:return: true if the first value is less than second and false otherwise.
"""
number1 = FloatConverter.to_nullable_float(value1)
number2 = FloatConverter.to_nullable_float(value2)
if number1 == None or number2 == None:
return False
return number1 < number2
|
[
"def",
"less",
"(",
"value1",
",",
"value2",
")",
":",
"number1",
"=",
"FloatConverter",
".",
"to_nullable_float",
"(",
"value1",
")",
"number2",
"=",
"FloatConverter",
".",
"to_nullable_float",
"(",
"value2",
")",
"if",
"number1",
"==",
"None",
"or",
"number2",
"==",
"None",
":",
"return",
"False",
"return",
"number1",
"<",
"number2"
] |
Checks if first value is less than the second one.
The operation can be performed over numbers or strings.
:param value1: the first value to compare
:param value2: the second value to compare
:return: true if the first value is less than second and false otherwise.
|
[
"Checks",
"if",
"first",
"value",
"is",
"less",
"than",
"the",
"second",
"one",
".",
"The",
"operation",
"can",
"be",
"performed",
"over",
"numbers",
"or",
"strings",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/validate/ObjectComparator.py#L92-L109
|
238,932
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/validate/ObjectComparator.py
|
ObjectComparator.more
|
def more(value1, value2):
"""
Checks if first value is greater than the second one.
The operation can be performed over numbers or strings.
:param value1: the first value to compare
:param value2: the second value to compare
:return: true if the first value is greater than second and false otherwise.
"""
number1 = FloatConverter.to_nullable_float(value1)
number2 = FloatConverter.to_nullable_float(value2)
if number1 == None or number2 == None:
return False
return number1 > number2
|
python
|
def more(value1, value2):
"""
Checks if first value is greater than the second one.
The operation can be performed over numbers or strings.
:param value1: the first value to compare
:param value2: the second value to compare
:return: true if the first value is greater than second and false otherwise.
"""
number1 = FloatConverter.to_nullable_float(value1)
number2 = FloatConverter.to_nullable_float(value2)
if number1 == None or number2 == None:
return False
return number1 > number2
|
[
"def",
"more",
"(",
"value1",
",",
"value2",
")",
":",
"number1",
"=",
"FloatConverter",
".",
"to_nullable_float",
"(",
"value1",
")",
"number2",
"=",
"FloatConverter",
".",
"to_nullable_float",
"(",
"value2",
")",
"if",
"number1",
"==",
"None",
"or",
"number2",
"==",
"None",
":",
"return",
"False",
"return",
"number1",
">",
"number2"
] |
Checks if first value is greater than the second one.
The operation can be performed over numbers or strings.
:param value1: the first value to compare
:param value2: the second value to compare
:return: true if the first value is greater than second and false otherwise.
|
[
"Checks",
"if",
"first",
"value",
"is",
"greater",
"than",
"the",
"second",
"one",
".",
"The",
"operation",
"can",
"be",
"performed",
"over",
"numbers",
"or",
"strings",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/validate/ObjectComparator.py#L112-L129
|
238,933
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/validate/ObjectComparator.py
|
ObjectComparator.match
|
def match(value1, value2):
"""
Checks if string matches a regular expression
:param value1: a string value to match
:param value2: a regular expression string
:return: true if the value matches regular expression and false otherwise.
"""
if value1 == None and value2 == None:
return True
if value1 == None or value2 == None:
return False
string1 = str(value1)
string2 = str(value2)
return re.match(string2, string1) != None
|
python
|
def match(value1, value2):
"""
Checks if string matches a regular expression
:param value1: a string value to match
:param value2: a regular expression string
:return: true if the value matches regular expression and false otherwise.
"""
if value1 == None and value2 == None:
return True
if value1 == None or value2 == None:
return False
string1 = str(value1)
string2 = str(value2)
return re.match(string2, string1) != None
|
[
"def",
"match",
"(",
"value1",
",",
"value2",
")",
":",
"if",
"value1",
"==",
"None",
"and",
"value2",
"==",
"None",
":",
"return",
"True",
"if",
"value1",
"==",
"None",
"or",
"value2",
"==",
"None",
":",
"return",
"False",
"string1",
"=",
"str",
"(",
"value1",
")",
"string2",
"=",
"str",
"(",
"value2",
")",
"return",
"re",
".",
"match",
"(",
"string2",
",",
"string1",
")",
"!=",
"None"
] |
Checks if string matches a regular expression
:param value1: a string value to match
:param value2: a regular expression string
:return: true if the value matches regular expression and false otherwise.
|
[
"Checks",
"if",
"string",
"matches",
"a",
"regular",
"expression"
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/validate/ObjectComparator.py#L132-L149
|
238,934
|
roboogle/gtkmvc3
|
gtkmvco/examples/converter/src/controllers/about.py
|
AboutCtrl.register_view
|
def register_view(self, view):
"""Loads the text taking it from the model, then starts a
timer to scroll it."""
self.view.set_text(self.model.credits)
gobject.timeout_add(1500, self.on_begin_scroll)
return
|
python
|
def register_view(self, view):
"""Loads the text taking it from the model, then starts a
timer to scroll it."""
self.view.set_text(self.model.credits)
gobject.timeout_add(1500, self.on_begin_scroll)
return
|
[
"def",
"register_view",
"(",
"self",
",",
"view",
")",
":",
"self",
".",
"view",
".",
"set_text",
"(",
"self",
".",
"model",
".",
"credits",
")",
"gobject",
".",
"timeout_add",
"(",
"1500",
",",
"self",
".",
"on_begin_scroll",
")",
"return"
] |
Loads the text taking it from the model, then starts a
timer to scroll it.
|
[
"Loads",
"the",
"text",
"taking",
"it",
"from",
"the",
"model",
"then",
"starts",
"a",
"timer",
"to",
"scroll",
"it",
"."
] |
63405fd8d2056be26af49103b13a8d5e57fe4dff
|
https://github.com/roboogle/gtkmvc3/blob/63405fd8d2056be26af49103b13a8d5e57fe4dff/gtkmvco/examples/converter/src/controllers/about.py#L34-L39
|
238,935
|
roboogle/gtkmvc3
|
gtkmvco/examples/converter/src/controllers/about.py
|
AboutCtrl.on_scroll
|
def on_scroll(self):
"""Called to scroll text"""
try:
sw = self.view['sw_scroller']
except KeyError:
return False # destroyed!
vadj = sw.get_vadjustment()
if vadj is None: return False
val = vadj.get_value()
# is scrolling over?
if val >= vadj.upper - vadj.page_size:
self.view.show_vscrollbar()
return False
vadj.set_value(val+0.5)
return True
|
python
|
def on_scroll(self):
"""Called to scroll text"""
try:
sw = self.view['sw_scroller']
except KeyError:
return False # destroyed!
vadj = sw.get_vadjustment()
if vadj is None: return False
val = vadj.get_value()
# is scrolling over?
if val >= vadj.upper - vadj.page_size:
self.view.show_vscrollbar()
return False
vadj.set_value(val+0.5)
return True
|
[
"def",
"on_scroll",
"(",
"self",
")",
":",
"try",
":",
"sw",
"=",
"self",
".",
"view",
"[",
"'sw_scroller'",
"]",
"except",
"KeyError",
":",
"return",
"False",
"# destroyed! ",
"vadj",
"=",
"sw",
".",
"get_vadjustment",
"(",
")",
"if",
"vadj",
"is",
"None",
":",
"return",
"False",
"val",
"=",
"vadj",
".",
"get_value",
"(",
")",
"# is scrolling over?",
"if",
"val",
">=",
"vadj",
".",
"upper",
"-",
"vadj",
".",
"page_size",
":",
"self",
".",
"view",
".",
"show_vscrollbar",
"(",
")",
"return",
"False",
"vadj",
".",
"set_value",
"(",
"val",
"+",
"0.5",
")",
"return",
"True"
] |
Called to scroll text
|
[
"Called",
"to",
"scroll",
"text"
] |
63405fd8d2056be26af49103b13a8d5e57fe4dff
|
https://github.com/roboogle/gtkmvc3/blob/63405fd8d2056be26af49103b13a8d5e57fe4dff/gtkmvco/examples/converter/src/controllers/about.py#L50-L66
|
238,936
|
GearPlug/paymentsos-python
|
paymentsos/tokens.py
|
Token.create_token
|
def create_token(self, *, holder_name, card_number, credit_card_cvv, expiration_date, token_type='credit_card',
identity_document=None, billing_address=None, additional_details=None):
"""
When creating a Token, remember to use the public-key header instead of the private-key header,
and do not include the app-id header.
Args:
holder_name: Name of the credit card holder.
card_number: Credit card number.
credit_card_cvv: The CVV number on the card (3 or 4 digits) to be encrypted.
expiration_date: Credit card expiration date. Possible formats: mm-yyyy, mm-yy, mm.yyyy,
mm.yy, mm/yy, mm/yyyy, mm yyyy, or mm yy.
token_type: The type of token
billing_address: Address.
identity_document: National identity document of the card holder.
additional_details: Optional additional data stored with your token in key/value pairs.
Returns:
"""
headers = self.client._get_public_headers()
payload = {
"token_type": token_type,
"credit_card_cvv": credit_card_cvv,
"card_number": card_number,
"expiration_date": expiration_date,
"holder_name": holder_name,
"identity_document": identity_document,
"billing_address": billing_address,
"additional_details": additional_details,
}
endpoint = '/tokens'
return self.client._post(self.client.URL_BASE + endpoint, json=payload, headers=headers)
|
python
|
def create_token(self, *, holder_name, card_number, credit_card_cvv, expiration_date, token_type='credit_card',
identity_document=None, billing_address=None, additional_details=None):
"""
When creating a Token, remember to use the public-key header instead of the private-key header,
and do not include the app-id header.
Args:
holder_name: Name of the credit card holder.
card_number: Credit card number.
credit_card_cvv: The CVV number on the card (3 or 4 digits) to be encrypted.
expiration_date: Credit card expiration date. Possible formats: mm-yyyy, mm-yy, mm.yyyy,
mm.yy, mm/yy, mm/yyyy, mm yyyy, or mm yy.
token_type: The type of token
billing_address: Address.
identity_document: National identity document of the card holder.
additional_details: Optional additional data stored with your token in key/value pairs.
Returns:
"""
headers = self.client._get_public_headers()
payload = {
"token_type": token_type,
"credit_card_cvv": credit_card_cvv,
"card_number": card_number,
"expiration_date": expiration_date,
"holder_name": holder_name,
"identity_document": identity_document,
"billing_address": billing_address,
"additional_details": additional_details,
}
endpoint = '/tokens'
return self.client._post(self.client.URL_BASE + endpoint, json=payload, headers=headers)
|
[
"def",
"create_token",
"(",
"self",
",",
"*",
",",
"holder_name",
",",
"card_number",
",",
"credit_card_cvv",
",",
"expiration_date",
",",
"token_type",
"=",
"'credit_card'",
",",
"identity_document",
"=",
"None",
",",
"billing_address",
"=",
"None",
",",
"additional_details",
"=",
"None",
")",
":",
"headers",
"=",
"self",
".",
"client",
".",
"_get_public_headers",
"(",
")",
"payload",
"=",
"{",
"\"token_type\"",
":",
"token_type",
",",
"\"credit_card_cvv\"",
":",
"credit_card_cvv",
",",
"\"card_number\"",
":",
"card_number",
",",
"\"expiration_date\"",
":",
"expiration_date",
",",
"\"holder_name\"",
":",
"holder_name",
",",
"\"identity_document\"",
":",
"identity_document",
",",
"\"billing_address\"",
":",
"billing_address",
",",
"\"additional_details\"",
":",
"additional_details",
",",
"}",
"endpoint",
"=",
"'/tokens'",
"return",
"self",
".",
"client",
".",
"_post",
"(",
"self",
".",
"client",
".",
"URL_BASE",
"+",
"endpoint",
",",
"json",
"=",
"payload",
",",
"headers",
"=",
"headers",
")"
] |
When creating a Token, remember to use the public-key header instead of the private-key header,
and do not include the app-id header.
Args:
holder_name: Name of the credit card holder.
card_number: Credit card number.
credit_card_cvv: The CVV number on the card (3 or 4 digits) to be encrypted.
expiration_date: Credit card expiration date. Possible formats: mm-yyyy, mm-yy, mm.yyyy,
mm.yy, mm/yy, mm/yyyy, mm yyyy, or mm yy.
token_type: The type of token
billing_address: Address.
identity_document: National identity document of the card holder.
additional_details: Optional additional data stored with your token in key/value pairs.
Returns:
|
[
"When",
"creating",
"a",
"Token",
"remember",
"to",
"use",
"the",
"public",
"-",
"key",
"header",
"instead",
"of",
"the",
"private",
"-",
"key",
"header",
"and",
"do",
"not",
"include",
"the",
"app",
"-",
"id",
"header",
"."
] |
2f32ba83ae890c96799b71d49fc6740bc1081f89
|
https://github.com/GearPlug/paymentsos-python/blob/2f32ba83ae890c96799b71d49fc6740bc1081f89/paymentsos/tokens.py#L6-L38
|
238,937
|
GearPlug/paymentsos-python
|
paymentsos/tokens.py
|
Token.retrieve_token
|
def retrieve_token(self, token):
"""
Retrieve Token details for a specific Token.
Args:
token: The identifier of the token.
Returns:
"""
headers = self.client._get_private_headers()
endpoint = '/tokens/{}'.format(token)
return self.client._get(self.client.URL_BASE + endpoint, headers=headers)
|
python
|
def retrieve_token(self, token):
"""
Retrieve Token details for a specific Token.
Args:
token: The identifier of the token.
Returns:
"""
headers = self.client._get_private_headers()
endpoint = '/tokens/{}'.format(token)
return self.client._get(self.client.URL_BASE + endpoint, headers=headers)
|
[
"def",
"retrieve_token",
"(",
"self",
",",
"token",
")",
":",
"headers",
"=",
"self",
".",
"client",
".",
"_get_private_headers",
"(",
")",
"endpoint",
"=",
"'/tokens/{}'",
".",
"format",
"(",
"token",
")",
"return",
"self",
".",
"client",
".",
"_get",
"(",
"self",
".",
"client",
".",
"URL_BASE",
"+",
"endpoint",
",",
"headers",
"=",
"headers",
")"
] |
Retrieve Token details for a specific Token.
Args:
token: The identifier of the token.
Returns:
|
[
"Retrieve",
"Token",
"details",
"for",
"a",
"specific",
"Token",
"."
] |
2f32ba83ae890c96799b71d49fc6740bc1081f89
|
https://github.com/GearPlug/paymentsos-python/blob/2f32ba83ae890c96799b71d49fc6740bc1081f89/paymentsos/tokens.py#L40-L53
|
238,938
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/gui/djitemdata.py
|
prj_created_data
|
def prj_created_data(project, role):
"""Return the data for created
:param project: the project that holds the data
:type project: :class:`jukeboxcore.djadapter.models.Project`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the created
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return project.date_created.isoformat(' ')
|
python
|
def prj_created_data(project, role):
"""Return the data for created
:param project: the project that holds the data
:type project: :class:`jukeboxcore.djadapter.models.Project`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the created
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return project.date_created.isoformat(' ')
|
[
"def",
"prj_created_data",
"(",
"project",
",",
"role",
")",
":",
"if",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"DisplayRole",
":",
"return",
"project",
".",
"date_created",
".",
"isoformat",
"(",
"' '",
")"
] |
Return the data for created
:param project: the project that holds the data
:type project: :class:`jukeboxcore.djadapter.models.Project`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the created
:rtype: depending on role
:raises: None
|
[
"Return",
"the",
"data",
"for",
"created"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/gui/djitemdata.py#L53-L65
|
238,939
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/gui/djitemdata.py
|
prj_fps_data
|
def prj_fps_data(project, role):
"""Return the data for fps
:param project: the project that holds the data
:type project: :class:`jukeboxcore.djadapter.models.Project`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the fps
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return str(project.framerate)
|
python
|
def prj_fps_data(project, role):
"""Return the data for fps
:param project: the project that holds the data
:type project: :class:`jukeboxcore.djadapter.models.Project`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the fps
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return str(project.framerate)
|
[
"def",
"prj_fps_data",
"(",
"project",
",",
"role",
")",
":",
"if",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"DisplayRole",
":",
"return",
"str",
"(",
"project",
".",
"framerate",
")"
] |
Return the data for fps
:param project: the project that holds the data
:type project: :class:`jukeboxcore.djadapter.models.Project`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the fps
:rtype: depending on role
:raises: None
|
[
"Return",
"the",
"data",
"for",
"fps"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/gui/djitemdata.py#L83-L95
|
238,940
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/gui/djitemdata.py
|
prj_resolution_data
|
def prj_resolution_data(project, role):
"""Return the data for resolution
:param project: the project that holds the data
:type project: :class:`jukeboxcore.djadapter.models.Project`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the resolution
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return '%s x %s' % (project.resx, project.resy)
|
python
|
def prj_resolution_data(project, role):
"""Return the data for resolution
:param project: the project that holds the data
:type project: :class:`jukeboxcore.djadapter.models.Project`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the resolution
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return '%s x %s' % (project.resx, project.resy)
|
[
"def",
"prj_resolution_data",
"(",
"project",
",",
"role",
")",
":",
"if",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"DisplayRole",
":",
"return",
"'%s x %s'",
"%",
"(",
"project",
".",
"resx",
",",
"project",
".",
"resy",
")"
] |
Return the data for resolution
:param project: the project that holds the data
:type project: :class:`jukeboxcore.djadapter.models.Project`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the resolution
:rtype: depending on role
:raises: None
|
[
"Return",
"the",
"data",
"for",
"resolution"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/gui/djitemdata.py#L98-L110
|
238,941
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/gui/djitemdata.py
|
shot_duration_data
|
def shot_duration_data(shot, role):
"""Return the data for duration
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the duration
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return str(shot.duration)
|
python
|
def shot_duration_data(shot, role):
"""Return the data for duration
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the duration
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return str(shot.duration)
|
[
"def",
"shot_duration_data",
"(",
"shot",
",",
"role",
")",
":",
"if",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"DisplayRole",
":",
"return",
"str",
"(",
"shot",
".",
"duration",
")"
] |
Return the data for duration
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the duration
:rtype: depending on role
:raises: None
|
[
"Return",
"the",
"data",
"for",
"duration"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/gui/djitemdata.py#L312-L324
|
238,942
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/gui/djitemdata.py
|
shot_start_data
|
def shot_start_data(shot, role):
"""Return the data for startframe
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the start
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return str(shot.startframe)
|
python
|
def shot_start_data(shot, role):
"""Return the data for startframe
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the start
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return str(shot.startframe)
|
[
"def",
"shot_start_data",
"(",
"shot",
",",
"role",
")",
":",
"if",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"DisplayRole",
":",
"return",
"str",
"(",
"shot",
".",
"startframe",
")"
] |
Return the data for startframe
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the start
:rtype: depending on role
:raises: None
|
[
"Return",
"the",
"data",
"for",
"startframe"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/gui/djitemdata.py#L327-L339
|
238,943
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/gui/djitemdata.py
|
shot_end_data
|
def shot_end_data(shot, role):
"""Return the data for endframe
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the end
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return str(shot.endframe)
|
python
|
def shot_end_data(shot, role):
"""Return the data for endframe
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the end
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole:
return str(shot.endframe)
|
[
"def",
"shot_end_data",
"(",
"shot",
",",
"role",
")",
":",
"if",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"DisplayRole",
":",
"return",
"str",
"(",
"shot",
".",
"endframe",
")"
] |
Return the data for endframe
:param shot: the shot that holds the data
:type shot: :class:`jukeboxcore.djadapter.models.Shot`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the end
:rtype: depending on role
:raises: None
|
[
"Return",
"the",
"data",
"for",
"endframe"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/gui/djitemdata.py#L342-L354
|
238,944
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/gui/djitemdata.py
|
note_content_data
|
def note_content_data(note, role):
"""Return the data for content
:param note: the note that holds the data
:type note: :class:`jukeboxcore.djadapter.models.Note`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the created date
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
return note.content
|
python
|
def note_content_data(note, role):
"""Return the data for content
:param note: the note that holds the data
:type note: :class:`jukeboxcore.djadapter.models.Note`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the created date
:rtype: depending on role
:raises: None
"""
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
return note.content
|
[
"def",
"note_content_data",
"(",
"note",
",",
"role",
")",
":",
"if",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"DisplayRole",
"or",
"role",
"==",
"QtCore",
".",
"Qt",
".",
"EditRole",
":",
"return",
"note",
".",
"content"
] |
Return the data for content
:param note: the note that holds the data
:type note: :class:`jukeboxcore.djadapter.models.Note`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the created date
:rtype: depending on role
:raises: None
|
[
"Return",
"the",
"data",
"for",
"content"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/gui/djitemdata.py#L801-L813
|
238,945
|
roboogle/gtkmvc3
|
gtkmvco/gtkmvc3/adapters/default.py
|
remove_adapter
|
def remove_adapter(widget_class, flavour=None):
"""Removes the given widget class information from the default set
of adapters.
If widget_class had been previously added by using add_adapter,
the added adapter will be removed, restoring possibly previusly
existing adapter(s). Notice that this function will remove only
*one* adapter about given wiget_class (the first found in order),
even if many are currently stored.
@param flavour has to be used when the entry was added with a
particular flavour.
Returns True if one adapter was removed, False if no adapter was
removed."""
for it,tu in enumerate(__def_adapter):
if (widget_class == tu[WIDGET] and flavour == tu[FLAVOUR]):
del __def_adapter[it]
return True
return False
|
python
|
def remove_adapter(widget_class, flavour=None):
"""Removes the given widget class information from the default set
of adapters.
If widget_class had been previously added by using add_adapter,
the added adapter will be removed, restoring possibly previusly
existing adapter(s). Notice that this function will remove only
*one* adapter about given wiget_class (the first found in order),
even if many are currently stored.
@param flavour has to be used when the entry was added with a
particular flavour.
Returns True if one adapter was removed, False if no adapter was
removed."""
for it,tu in enumerate(__def_adapter):
if (widget_class == tu[WIDGET] and flavour == tu[FLAVOUR]):
del __def_adapter[it]
return True
return False
|
[
"def",
"remove_adapter",
"(",
"widget_class",
",",
"flavour",
"=",
"None",
")",
":",
"for",
"it",
",",
"tu",
"in",
"enumerate",
"(",
"__def_adapter",
")",
":",
"if",
"(",
"widget_class",
"==",
"tu",
"[",
"WIDGET",
"]",
"and",
"flavour",
"==",
"tu",
"[",
"FLAVOUR",
"]",
")",
":",
"del",
"__def_adapter",
"[",
"it",
"]",
"return",
"True",
"return",
"False"
] |
Removes the given widget class information from the default set
of adapters.
If widget_class had been previously added by using add_adapter,
the added adapter will be removed, restoring possibly previusly
existing adapter(s). Notice that this function will remove only
*one* adapter about given wiget_class (the first found in order),
even if many are currently stored.
@param flavour has to be used when the entry was added with a
particular flavour.
Returns True if one adapter was removed, False if no adapter was
removed.
|
[
"Removes",
"the",
"given",
"widget",
"class",
"information",
"from",
"the",
"default",
"set",
"of",
"adapters",
"."
] |
63405fd8d2056be26af49103b13a8d5e57fe4dff
|
https://github.com/roboogle/gtkmvc3/blob/63405fd8d2056be26af49103b13a8d5e57fe4dff/gtkmvco/gtkmvc3/adapters/default.py#L106-L126
|
238,946
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.get
|
def get(self, key):
"""
Gets a map element specified by its key.
The key can be defined using dot notation
and allows to recursively access elements of elements.
:param key: a key of the element to get.
:return: the value of the map element.
"""
if key == None or key == '':
return None
elif key.find('.') > 0:
return RecursiveObjectReader.get_property(self, key)
else:
return super(Parameters, self).get(key)
|
python
|
def get(self, key):
"""
Gets a map element specified by its key.
The key can be defined using dot notation
and allows to recursively access elements of elements.
:param key: a key of the element to get.
:return: the value of the map element.
"""
if key == None or key == '':
return None
elif key.find('.') > 0:
return RecursiveObjectReader.get_property(self, key)
else:
return super(Parameters, self).get(key)
|
[
"def",
"get",
"(",
"self",
",",
"key",
")",
":",
"if",
"key",
"==",
"None",
"or",
"key",
"==",
"''",
":",
"return",
"None",
"elif",
"key",
".",
"find",
"(",
"'.'",
")",
">",
"0",
":",
"return",
"RecursiveObjectReader",
".",
"get_property",
"(",
"self",
",",
"key",
")",
"else",
":",
"return",
"super",
"(",
"Parameters",
",",
"self",
")",
".",
"get",
"(",
"key",
")"
] |
Gets a map element specified by its key.
The key can be defined using dot notation
and allows to recursively access elements of elements.
:param key: a key of the element to get.
:return: the value of the map element.
|
[
"Gets",
"a",
"map",
"element",
"specified",
"by",
"its",
"key",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L37-L53
|
238,947
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.put
|
def put(self, key, value):
"""
Puts a new value into map element specified by its key.
The key can be defined using dot notation
and allows to recursively access elements of elements.
:param key: a key of the element to put.
:param value: a new value for map element.
"""
if key == None or key == '':
return None
elif key.find('.') > 0:
RecursiveObjectWriter.set_property(self, key, value)
return value
else:
self[key] = value
return value
|
python
|
def put(self, key, value):
"""
Puts a new value into map element specified by its key.
The key can be defined using dot notation
and allows to recursively access elements of elements.
:param key: a key of the element to put.
:param value: a new value for map element.
"""
if key == None or key == '':
return None
elif key.find('.') > 0:
RecursiveObjectWriter.set_property(self, key, value)
return value
else:
self[key] = value
return value
|
[
"def",
"put",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"if",
"key",
"==",
"None",
"or",
"key",
"==",
"''",
":",
"return",
"None",
"elif",
"key",
".",
"find",
"(",
"'.'",
")",
">",
"0",
":",
"RecursiveObjectWriter",
".",
"set_property",
"(",
"self",
",",
"key",
",",
"value",
")",
"return",
"value",
"else",
":",
"self",
"[",
"key",
"]",
"=",
"value",
"return",
"value"
] |
Puts a new value into map element specified by its key.
The key can be defined using dot notation
and allows to recursively access elements of elements.
:param key: a key of the element to put.
:param value: a new value for map element.
|
[
"Puts",
"a",
"new",
"value",
"into",
"map",
"element",
"specified",
"by",
"its",
"key",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L55-L73
|
238,948
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.get_as_nullable_parameters
|
def get_as_nullable_parameters(self, key):
"""
Converts map element into an Parameters or returns null if conversion is not possible.
:param key: a key of element to get.
:return: Parameters value of the element or null if conversion is not supported.
"""
value = self.get_as_nullable_map(key)
return Parameters(value) if value != None else None
|
python
|
def get_as_nullable_parameters(self, key):
"""
Converts map element into an Parameters or returns null if conversion is not possible.
:param key: a key of element to get.
:return: Parameters value of the element or null if conversion is not supported.
"""
value = self.get_as_nullable_map(key)
return Parameters(value) if value != None else None
|
[
"def",
"get_as_nullable_parameters",
"(",
"self",
",",
"key",
")",
":",
"value",
"=",
"self",
".",
"get_as_nullable_map",
"(",
"key",
")",
"return",
"Parameters",
"(",
"value",
")",
"if",
"value",
"!=",
"None",
"else",
"None"
] |
Converts map element into an Parameters or returns null if conversion is not possible.
:param key: a key of element to get.
:return: Parameters value of the element or null if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"an",
"Parameters",
"or",
"returns",
"null",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L75-L84
|
238,949
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.get_as_parameters_with_default
|
def get_as_parameters_with_default(self, key, default_value):
"""
Converts map element into an Parameters or returns default value if conversion is not possible.
:param key: a key of element to get.
:param default_value: the default value
:return: Parameters value of the element or default value if conversion is not supported.
"""
result = self.get_as_nullable_parameters(key)
return result if result != None else default_value
|
python
|
def get_as_parameters_with_default(self, key, default_value):
"""
Converts map element into an Parameters or returns default value if conversion is not possible.
:param key: a key of element to get.
:param default_value: the default value
:return: Parameters value of the element or default value if conversion is not supported.
"""
result = self.get_as_nullable_parameters(key)
return result if result != None else default_value
|
[
"def",
"get_as_parameters_with_default",
"(",
"self",
",",
"key",
",",
"default_value",
")",
":",
"result",
"=",
"self",
".",
"get_as_nullable_parameters",
"(",
"key",
")",
"return",
"result",
"if",
"result",
"!=",
"None",
"else",
"default_value"
] |
Converts map element into an Parameters or returns default value if conversion is not possible.
:param key: a key of element to get.
:param default_value: the default value
:return: Parameters value of the element or default value if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"an",
"Parameters",
"or",
"returns",
"default",
"value",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L97-L108
|
238,950
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.override
|
def override(self, parameters, recursive = False):
"""
Overrides parameters with new values from specified Parameters and returns a new Parameters object.
:param parameters: Parameters with parameters to override the current values.
:param recursive: (optional) true to perform deep copy, and false for shallow copy. Default: false
:return: a new Parameters object.
"""
result = Parameters()
if recursive:
RecursiveObjectWriter.copy_properties(result, self)
RecursiveObjectWriter.copy_properties(result, parameters)
else:
ObjectWriter.set_properties(result, self)
ObjectWriter.set_properties(result, parameters)
return result
|
python
|
def override(self, parameters, recursive = False):
"""
Overrides parameters with new values from specified Parameters and returns a new Parameters object.
:param parameters: Parameters with parameters to override the current values.
:param recursive: (optional) true to perform deep copy, and false for shallow copy. Default: false
:return: a new Parameters object.
"""
result = Parameters()
if recursive:
RecursiveObjectWriter.copy_properties(result, self)
RecursiveObjectWriter.copy_properties(result, parameters)
else:
ObjectWriter.set_properties(result, self)
ObjectWriter.set_properties(result, parameters)
return result
|
[
"def",
"override",
"(",
"self",
",",
"parameters",
",",
"recursive",
"=",
"False",
")",
":",
"result",
"=",
"Parameters",
"(",
")",
"if",
"recursive",
":",
"RecursiveObjectWriter",
".",
"copy_properties",
"(",
"result",
",",
"self",
")",
"RecursiveObjectWriter",
".",
"copy_properties",
"(",
"result",
",",
"parameters",
")",
"else",
":",
"ObjectWriter",
".",
"set_properties",
"(",
"result",
",",
"self",
")",
"ObjectWriter",
".",
"set_properties",
"(",
"result",
",",
"parameters",
")",
"return",
"result"
] |
Overrides parameters with new values from specified Parameters and returns a new Parameters object.
:param parameters: Parameters with parameters to override the current values.
:param recursive: (optional) true to perform deep copy, and false for shallow copy. Default: false
:return: a new Parameters object.
|
[
"Overrides",
"parameters",
"with",
"new",
"values",
"from",
"specified",
"Parameters",
"and",
"returns",
"a",
"new",
"Parameters",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L123-L142
|
238,951
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.set_defaults
|
def set_defaults(self, default_values, recursive = False):
"""
Set default values from specified Parameters and returns a new Parameters object.
:param default_values: Parameters with default parameter values.
:param recursive: (optional) true to perform deep copy, and false for shallow copy. Default: false
:return: a new Parameters object.
"""
result = Parameters()
if recursive:
RecursiveObjectWriter.copy_properties(result, default_values)
RecursiveObjectWriter.copy_properties(result, self)
else:
ObjectWriter.set_properties(result, default_values)
ObjectWriter.set_properties(result, self)
return result
|
python
|
def set_defaults(self, default_values, recursive = False):
"""
Set default values from specified Parameters and returns a new Parameters object.
:param default_values: Parameters with default parameter values.
:param recursive: (optional) true to perform deep copy, and false for shallow copy. Default: false
:return: a new Parameters object.
"""
result = Parameters()
if recursive:
RecursiveObjectWriter.copy_properties(result, default_values)
RecursiveObjectWriter.copy_properties(result, self)
else:
ObjectWriter.set_properties(result, default_values)
ObjectWriter.set_properties(result, self)
return result
|
[
"def",
"set_defaults",
"(",
"self",
",",
"default_values",
",",
"recursive",
"=",
"False",
")",
":",
"result",
"=",
"Parameters",
"(",
")",
"if",
"recursive",
":",
"RecursiveObjectWriter",
".",
"copy_properties",
"(",
"result",
",",
"default_values",
")",
"RecursiveObjectWriter",
".",
"copy_properties",
"(",
"result",
",",
"self",
")",
"else",
":",
"ObjectWriter",
".",
"set_properties",
"(",
"result",
",",
"default_values",
")",
"ObjectWriter",
".",
"set_properties",
"(",
"result",
",",
"self",
")",
"return",
"result"
] |
Set default values from specified Parameters and returns a new Parameters object.
:param default_values: Parameters with default parameter values.
:param recursive: (optional) true to perform deep copy, and false for shallow copy. Default: false
:return: a new Parameters object.
|
[
"Set",
"default",
"values",
"from",
"specified",
"Parameters",
"and",
"returns",
"a",
"new",
"Parameters",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L144-L163
|
238,952
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.pick
|
def pick(self, *props):
"""
Picks select parameters from this Parameters and returns them as a new Parameters object.
:param props: keys to be picked and copied over to new Parameters.
:return: a new Parameters object.
"""
result = Parameters()
for prop in props:
if self.contains_key(prop):
result.put(prop, self.get(prop))
return result
|
python
|
def pick(self, *props):
"""
Picks select parameters from this Parameters and returns them as a new Parameters object.
:param props: keys to be picked and copied over to new Parameters.
:return: a new Parameters object.
"""
result = Parameters()
for prop in props:
if self.contains_key(prop):
result.put(prop, self.get(prop))
return result
|
[
"def",
"pick",
"(",
"self",
",",
"*",
"props",
")",
":",
"result",
"=",
"Parameters",
"(",
")",
"for",
"prop",
"in",
"props",
":",
"if",
"self",
".",
"contains_key",
"(",
"prop",
")",
":",
"result",
".",
"put",
"(",
"prop",
",",
"self",
".",
"get",
"(",
"prop",
")",
")",
"return",
"result"
] |
Picks select parameters from this Parameters and returns them as a new Parameters object.
:param props: keys to be picked and copied over to new Parameters.
:return: a new Parameters object.
|
[
"Picks",
"select",
"parameters",
"from",
"this",
"Parameters",
"and",
"returns",
"them",
"as",
"a",
"new",
"Parameters",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L176-L188
|
238,953
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.omit
|
def omit(self, *props):
"""
Omits selected parameters from this Parameters and returns the rest as a new Parameters object.
:param props: keys to be omitted from copying over to new Parameters.
:return: a new Parameters object.
"""
result = Parameters(self)
for prop in props:
del result[prop]
return result
|
python
|
def omit(self, *props):
"""
Omits selected parameters from this Parameters and returns the rest as a new Parameters object.
:param props: keys to be omitted from copying over to new Parameters.
:return: a new Parameters object.
"""
result = Parameters(self)
for prop in props:
del result[prop]
return result
|
[
"def",
"omit",
"(",
"self",
",",
"*",
"props",
")",
":",
"result",
"=",
"Parameters",
"(",
"self",
")",
"for",
"prop",
"in",
"props",
":",
"del",
"result",
"[",
"prop",
"]",
"return",
"result"
] |
Omits selected parameters from this Parameters and returns the rest as a new Parameters object.
:param props: keys to be omitted from copying over to new Parameters.
:return: a new Parameters object.
|
[
"Omits",
"selected",
"parameters",
"from",
"this",
"Parameters",
"and",
"returns",
"the",
"rest",
"as",
"a",
"new",
"Parameters",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L190-L201
|
238,954
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.from_value
|
def from_value(value):
"""
Creates a new Parameters object filled with key-value pairs from specified object.
:param value: an object with key-value pairs used to initialize a new Parameters.
:return: a new Parameters object.
"""
map = value if isinstance(value, dict) else RecursiveObjectReader.get_properties(value)
return Parameters(map)
|
python
|
def from_value(value):
"""
Creates a new Parameters object filled with key-value pairs from specified object.
:param value: an object with key-value pairs used to initialize a new Parameters.
:return: a new Parameters object.
"""
map = value if isinstance(value, dict) else RecursiveObjectReader.get_properties(value)
return Parameters(map)
|
[
"def",
"from_value",
"(",
"value",
")",
":",
"map",
"=",
"value",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
"else",
"RecursiveObjectReader",
".",
"get_properties",
"(",
"value",
")",
"return",
"Parameters",
"(",
"map",
")"
] |
Creates a new Parameters object filled with key-value pairs from specified object.
:param value: an object with key-value pairs used to initialize a new Parameters.
:return: a new Parameters object.
|
[
"Creates",
"a",
"new",
"Parameters",
"object",
"filled",
"with",
"key",
"-",
"value",
"pairs",
"from",
"specified",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L212-L221
|
238,955
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Parameters.py
|
Parameters.from_config
|
def from_config(config):
"""
Creates new Parameters from ConfigMap object.
:param config: a ConfigParams that contain parameters.
:return: a new Parameters object.
"""
result = Parameters()
if config == None or len(config) == 0:
return result
for (key, value) in config.items():
result.put(key, value)
return result
|
python
|
def from_config(config):
"""
Creates new Parameters from ConfigMap object.
:param config: a ConfigParams that contain parameters.
:return: a new Parameters object.
"""
result = Parameters()
if config == None or len(config) == 0:
return result
for (key, value) in config.items():
result.put(key, value)
return result
|
[
"def",
"from_config",
"(",
"config",
")",
":",
"result",
"=",
"Parameters",
"(",
")",
"if",
"config",
"==",
"None",
"or",
"len",
"(",
"config",
")",
"==",
"0",
":",
"return",
"result",
"for",
"(",
"key",
",",
"value",
")",
"in",
"config",
".",
"items",
"(",
")",
":",
"result",
".",
"put",
"(",
"key",
",",
"value",
")",
"return",
"result"
] |
Creates new Parameters from ConfigMap object.
:param config: a ConfigParams that contain parameters.
:return: a new Parameters object.
|
[
"Creates",
"new",
"Parameters",
"from",
"ConfigMap",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Parameters.py#L261-L277
|
238,956
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/run/Executor.py
|
Executor.execute
|
def execute(correlation_id, components, args = None):
"""
Executes multiple components.
To be executed components must implement [[IExecutable]] interface.
If they don't the call to this method has no effect.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param components: a list of components that are to be executed.
:param args: execution arguments.
:return: execution result
"""
results = []
if components == None:
return
args = args if args != None else Parameters()
for component in components:
result = Executor.execute_one(correlation_id, component, args)
results.append(result)
return results
|
python
|
def execute(correlation_id, components, args = None):
"""
Executes multiple components.
To be executed components must implement [[IExecutable]] interface.
If they don't the call to this method has no effect.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param components: a list of components that are to be executed.
:param args: execution arguments.
:return: execution result
"""
results = []
if components == None:
return
args = args if args != None else Parameters()
for component in components:
result = Executor.execute_one(correlation_id, component, args)
results.append(result)
return results
|
[
"def",
"execute",
"(",
"correlation_id",
",",
"components",
",",
"args",
"=",
"None",
")",
":",
"results",
"=",
"[",
"]",
"if",
"components",
"==",
"None",
":",
"return",
"args",
"=",
"args",
"if",
"args",
"!=",
"None",
"else",
"Parameters",
"(",
")",
"for",
"component",
"in",
"components",
":",
"result",
"=",
"Executor",
".",
"execute_one",
"(",
"correlation_id",
",",
"component",
",",
"args",
")",
"results",
".",
"append",
"(",
"result",
")",
"return",
"results"
] |
Executes multiple components.
To be executed components must implement [[IExecutable]] interface.
If they don't the call to this method has no effect.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param components: a list of components that are to be executed.
:param args: execution arguments.
:return: execution result
|
[
"Executes",
"multiple",
"components",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/run/Executor.py#L41-L66
|
238,957
|
mrstephenneal/looptools
|
looptools/timer.py
|
Timer.decorator
|
def decorator(func):
"""A function timer decorator."""
def function_timer(*args, **kwargs):
"""A nested function for timing other functions."""
# Capture start time
start = time.time()
# Execute function with arguments
value = func(*args, **kwargs)
# Capture end time
end = time.time()
# Calculate run time
runtime = end - start
if runtime < 60:
runtime = str('sec: ' + str('{:f}'.format(runtime)))
else:
runtime = str('min: ' + str('{:f}'.format(runtime / 60)))
print('{func:50} --> {time}'.format(func=func.__qualname__, time=runtime))
return value
return function_timer
|
python
|
def decorator(func):
"""A function timer decorator."""
def function_timer(*args, **kwargs):
"""A nested function for timing other functions."""
# Capture start time
start = time.time()
# Execute function with arguments
value = func(*args, **kwargs)
# Capture end time
end = time.time()
# Calculate run time
runtime = end - start
if runtime < 60:
runtime = str('sec: ' + str('{:f}'.format(runtime)))
else:
runtime = str('min: ' + str('{:f}'.format(runtime / 60)))
print('{func:50} --> {time}'.format(func=func.__qualname__, time=runtime))
return value
return function_timer
|
[
"def",
"decorator",
"(",
"func",
")",
":",
"def",
"function_timer",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\"A nested function for timing other functions.\"\"\"",
"# Capture start time",
"start",
"=",
"time",
".",
"time",
"(",
")",
"# Execute function with arguments",
"value",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# Capture end time",
"end",
"=",
"time",
".",
"time",
"(",
")",
"# Calculate run time",
"runtime",
"=",
"end",
"-",
"start",
"if",
"runtime",
"<",
"60",
":",
"runtime",
"=",
"str",
"(",
"'sec: '",
"+",
"str",
"(",
"'{:f}'",
".",
"format",
"(",
"runtime",
")",
")",
")",
"else",
":",
"runtime",
"=",
"str",
"(",
"'min: '",
"+",
"str",
"(",
"'{:f}'",
".",
"format",
"(",
"runtime",
"/",
"60",
")",
")",
")",
"print",
"(",
"'{func:50} --> {time}'",
".",
"format",
"(",
"func",
"=",
"func",
".",
"__qualname__",
",",
"time",
"=",
"runtime",
")",
")",
"return",
"value",
"return",
"function_timer"
] |
A function timer decorator.
|
[
"A",
"function",
"timer",
"decorator",
"."
] |
c4ef88d78e0fb672d09a18de0aa0edd31fd4db72
|
https://github.com/mrstephenneal/looptools/blob/c4ef88d78e0fb672d09a18de0aa0edd31fd4db72/looptools/timer.py#L37-L60
|
238,958
|
zkbt/the-friendly-stars
|
thefriendlystars/finders.py
|
Finder.populateImagesFromSurveys
|
def populateImagesFromSurveys(self, surveys=dss2 + twomass):
'''
Load images from archives.
'''
# what's the coordinate center?
coordinatetosearch = '{0.ra.deg} {0.dec.deg}'.format(self.center)
# query sky view for those images
paths = astroquery.skyview.SkyView.get_images(
position=coordinatetosearch,
radius=self.radius,
survey=surveys)
# populate the images for each of these
self.images = [Image(p[0], s) for p, s in zip(paths, surveys)]
|
python
|
def populateImagesFromSurveys(self, surveys=dss2 + twomass):
'''
Load images from archives.
'''
# what's the coordinate center?
coordinatetosearch = '{0.ra.deg} {0.dec.deg}'.format(self.center)
# query sky view for those images
paths = astroquery.skyview.SkyView.get_images(
position=coordinatetosearch,
radius=self.radius,
survey=surveys)
# populate the images for each of these
self.images = [Image(p[0], s) for p, s in zip(paths, surveys)]
|
[
"def",
"populateImagesFromSurveys",
"(",
"self",
",",
"surveys",
"=",
"dss2",
"+",
"twomass",
")",
":",
"# what's the coordinate center?",
"coordinatetosearch",
"=",
"'{0.ra.deg} {0.dec.deg}'",
".",
"format",
"(",
"self",
".",
"center",
")",
"# query sky view for those images",
"paths",
"=",
"astroquery",
".",
"skyview",
".",
"SkyView",
".",
"get_images",
"(",
"position",
"=",
"coordinatetosearch",
",",
"radius",
"=",
"self",
".",
"radius",
",",
"survey",
"=",
"surveys",
")",
"# populate the images for each of these",
"self",
".",
"images",
"=",
"[",
"Image",
"(",
"p",
"[",
"0",
"]",
",",
"s",
")",
"for",
"p",
",",
"s",
"in",
"zip",
"(",
"paths",
",",
"surveys",
")",
"]"
] |
Load images from archives.
|
[
"Load",
"images",
"from",
"archives",
"."
] |
50d3f979e79e63c66629065c75595696dc79802e
|
https://github.com/zkbt/the-friendly-stars/blob/50d3f979e79e63c66629065c75595696dc79802e/thefriendlystars/finders.py#L26-L41
|
238,959
|
genialis/django-priority-batch
|
src/django_priority_batch/prioritized_batcher.py
|
PrioritizedBatcher.global_instance
|
def global_instance(cls):
"""Return a per-thread global batcher instance."""
try:
return GLOBAL_BATCHER.instance
except AttributeError:
instance = PrioritizedBatcher(
**getattr(settings, 'PRIORITIZED_BATCHER', {})
)
GLOBAL_BATCHER.instance = instance
return instance
|
python
|
def global_instance(cls):
"""Return a per-thread global batcher instance."""
try:
return GLOBAL_BATCHER.instance
except AttributeError:
instance = PrioritizedBatcher(
**getattr(settings, 'PRIORITIZED_BATCHER', {})
)
GLOBAL_BATCHER.instance = instance
return instance
|
[
"def",
"global_instance",
"(",
"cls",
")",
":",
"try",
":",
"return",
"GLOBAL_BATCHER",
".",
"instance",
"except",
"AttributeError",
":",
"instance",
"=",
"PrioritizedBatcher",
"(",
"*",
"*",
"getattr",
"(",
"settings",
",",
"'PRIORITIZED_BATCHER'",
",",
"{",
"}",
")",
")",
"GLOBAL_BATCHER",
".",
"instance",
"=",
"instance",
"return",
"instance"
] |
Return a per-thread global batcher instance.
|
[
"Return",
"a",
"per",
"-",
"thread",
"global",
"batcher",
"instance",
"."
] |
63da74ef7348a67b7e31a131f295f51511495f30
|
https://github.com/genialis/django-priority-batch/blob/63da74ef7348a67b7e31a131f295f51511495f30/src/django_priority_batch/prioritized_batcher.py#L33-L43
|
238,960
|
genialis/django-priority-batch
|
src/django_priority_batch/prioritized_batcher.py
|
PrioritizedBatcher.commit
|
def commit(self):
"""Commit a batch."""
assert self.batch is not None, "No active batch, call start() first"
logger.debug("Comitting batch from %d sources...", len(self.batch))
# Determine item priority.
by_priority = []
for name in self.batch.keys():
priority = self.priorities.get(name, self.default_priority)
by_priority.append((priority, name))
for priority, name in sorted(by_priority, key=lambda key: key[0]):
logger.debug("Processing items from '%s' (priority=%d)...", name, priority)
items = self.batch[name]
for handlers in items.values():
for agg, handler in handlers:
try:
if agg is None:
handler()
else:
handler(agg)
except Exception as error:
# Log errors and proceed to evaluate the next handler.
logger.exception("Error while invoking handler.")
self.batch = None
logger.debug("Batch committed.")
|
python
|
def commit(self):
"""Commit a batch."""
assert self.batch is not None, "No active batch, call start() first"
logger.debug("Comitting batch from %d sources...", len(self.batch))
# Determine item priority.
by_priority = []
for name in self.batch.keys():
priority = self.priorities.get(name, self.default_priority)
by_priority.append((priority, name))
for priority, name in sorted(by_priority, key=lambda key: key[0]):
logger.debug("Processing items from '%s' (priority=%d)...", name, priority)
items = self.batch[name]
for handlers in items.values():
for agg, handler in handlers:
try:
if agg is None:
handler()
else:
handler(agg)
except Exception as error:
# Log errors and proceed to evaluate the next handler.
logger.exception("Error while invoking handler.")
self.batch = None
logger.debug("Batch committed.")
|
[
"def",
"commit",
"(",
"self",
")",
":",
"assert",
"self",
".",
"batch",
"is",
"not",
"None",
",",
"\"No active batch, call start() first\"",
"logger",
".",
"debug",
"(",
"\"Comitting batch from %d sources...\"",
",",
"len",
"(",
"self",
".",
"batch",
")",
")",
"# Determine item priority.",
"by_priority",
"=",
"[",
"]",
"for",
"name",
"in",
"self",
".",
"batch",
".",
"keys",
"(",
")",
":",
"priority",
"=",
"self",
".",
"priorities",
".",
"get",
"(",
"name",
",",
"self",
".",
"default_priority",
")",
"by_priority",
".",
"append",
"(",
"(",
"priority",
",",
"name",
")",
")",
"for",
"priority",
",",
"name",
"in",
"sorted",
"(",
"by_priority",
",",
"key",
"=",
"lambda",
"key",
":",
"key",
"[",
"0",
"]",
")",
":",
"logger",
".",
"debug",
"(",
"\"Processing items from '%s' (priority=%d)...\"",
",",
"name",
",",
"priority",
")",
"items",
"=",
"self",
".",
"batch",
"[",
"name",
"]",
"for",
"handlers",
"in",
"items",
".",
"values",
"(",
")",
":",
"for",
"agg",
",",
"handler",
"in",
"handlers",
":",
"try",
":",
"if",
"agg",
"is",
"None",
":",
"handler",
"(",
")",
"else",
":",
"handler",
"(",
"agg",
")",
"except",
"Exception",
"as",
"error",
":",
"# Log errors and proceed to evaluate the next handler.",
"logger",
".",
"exception",
"(",
"\"Error while invoking handler.\"",
")",
"self",
".",
"batch",
"=",
"None",
"logger",
".",
"debug",
"(",
"\"Batch committed.\"",
")"
] |
Commit a batch.
|
[
"Commit",
"a",
"batch",
"."
] |
63da74ef7348a67b7e31a131f295f51511495f30
|
https://github.com/genialis/django-priority-batch/blob/63da74ef7348a67b7e31a131f295f51511495f30/src/django_priority_batch/prioritized_batcher.py#L57-L85
|
238,961
|
genialis/django-priority-batch
|
src/django_priority_batch/prioritized_batcher.py
|
PrioritizedBatcher.add
|
def add(self, name, handler, group_by=None, aggregator=None):
"""Add a new handler to the current batch."""
assert self.batch is not None, "No active batch, call start() first"
items = self.batch.setdefault(name, collections.OrderedDict())
if group_by is None:
# None is special as it means no grouping. In this case we must store all
# the different handlers and call them all.
items.setdefault(group_by, []).append((None, handler))
elif aggregator is not None:
agg = items.get(group_by, [(None, None)])[0][0]
items[group_by] = [(aggregator(agg), handler)]
else:
items[group_by] = [(None, handler)]
|
python
|
def add(self, name, handler, group_by=None, aggregator=None):
"""Add a new handler to the current batch."""
assert self.batch is not None, "No active batch, call start() first"
items = self.batch.setdefault(name, collections.OrderedDict())
if group_by is None:
# None is special as it means no grouping. In this case we must store all
# the different handlers and call them all.
items.setdefault(group_by, []).append((None, handler))
elif aggregator is not None:
agg = items.get(group_by, [(None, None)])[0][0]
items[group_by] = [(aggregator(agg), handler)]
else:
items[group_by] = [(None, handler)]
|
[
"def",
"add",
"(",
"self",
",",
"name",
",",
"handler",
",",
"group_by",
"=",
"None",
",",
"aggregator",
"=",
"None",
")",
":",
"assert",
"self",
".",
"batch",
"is",
"not",
"None",
",",
"\"No active batch, call start() first\"",
"items",
"=",
"self",
".",
"batch",
".",
"setdefault",
"(",
"name",
",",
"collections",
".",
"OrderedDict",
"(",
")",
")",
"if",
"group_by",
"is",
"None",
":",
"# None is special as it means no grouping. In this case we must store all",
"# the different handlers and call them all.",
"items",
".",
"setdefault",
"(",
"group_by",
",",
"[",
"]",
")",
".",
"append",
"(",
"(",
"None",
",",
"handler",
")",
")",
"elif",
"aggregator",
"is",
"not",
"None",
":",
"agg",
"=",
"items",
".",
"get",
"(",
"group_by",
",",
"[",
"(",
"None",
",",
"None",
")",
"]",
")",
"[",
"0",
"]",
"[",
"0",
"]",
"items",
"[",
"group_by",
"]",
"=",
"[",
"(",
"aggregator",
"(",
"agg",
")",
",",
"handler",
")",
"]",
"else",
":",
"items",
"[",
"group_by",
"]",
"=",
"[",
"(",
"None",
",",
"handler",
")",
"]"
] |
Add a new handler to the current batch.
|
[
"Add",
"a",
"new",
"handler",
"to",
"the",
"current",
"batch",
"."
] |
63da74ef7348a67b7e31a131f295f51511495f30
|
https://github.com/genialis/django-priority-batch/blob/63da74ef7348a67b7e31a131f295f51511495f30/src/django_priority_batch/prioritized_batcher.py#L93-L106
|
238,962
|
realestate-com-au/dashmat
|
dashmat/core_modules/splunk/splunk-sdk-1.3.0/splunklib/searchcommands/logging.py
|
configure
|
def configure(name, path=None):
""" Configure logging and return a logger and the location of its logging
configuration file.
This function expects:
+ A Splunk app directory structure::
<app-root>
bin
...
default
...
local
...
+ The current working directory is *<app-root>***/bin**.
Splunk guarantees this. If you are running the app outside of Splunk, be
sure to set the current working directory to *<app-root>***/bin** before
calling.
This function looks for a logging configuration file at each of these
locations, loading the first, if any, logging configuration file that it
finds::
local/{name}.logging.conf
default/{name}.logging.conf
local/logging.conf
default/logging.conf
The current working directory is set to *<app-root>* before the logging
configuration file is loaded. Hence, paths in the logging configuration
file are relative to *<app-root>*. The current directory is reset before
return.
You may short circuit the search for a logging configuration file by
providing an alternative file location in `path`. Logging configuration
files must be in `ConfigParser format`_.
#Arguments:
:param name: Logger name
:type name: str
:param path: Location of an alternative logging configuration file or `None`
:type path: str or NoneType
:returns: A logger and the location of its logging configuration file
.. _ConfigParser format: http://goo.gl/K6edZ8
"""
app_directory = os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0])))
if path is None:
probing_path = [
'local/%s.logging.conf' % name,
'default/%s.logging.conf' % name,
'local/logging.conf',
'default/logging.conf']
for relative_path in probing_path:
configuration_file = os.path.join(app_directory, relative_path)
if os.path.exists(configuration_file):
path = configuration_file
break
elif not os.path.isabs(path):
found = False
for conf in 'local', 'default':
configuration_file = os.path.join(app_directory, conf, path)
if os.path.exists(configuration_file):
path = configuration_file
found = True
break
if not found:
raise ValueError(
'Logging configuration file "%s" not found in local or default '
'directory' % path)
elif not os.path.exists(path):
raise ValueError('Logging configuration file "%s" not found')
if path is not None:
working_directory = os.getcwd()
os.chdir(app_directory)
try:
splunk_home = os.path.normpath(os.path.join(working_directory, os.environ['SPLUNK_HOME']))
except KeyError:
splunk_home = working_directory # reasonable in debug scenarios
try:
path = os.path.abspath(path)
fileConfig(path, {'SPLUNK_HOME': splunk_home})
finally:
os.chdir(working_directory)
if len(root.handlers) == 0:
root.addHandler(StreamHandler())
logger = getLogger(name)
return logger, path
|
python
|
def configure(name, path=None):
""" Configure logging and return a logger and the location of its logging
configuration file.
This function expects:
+ A Splunk app directory structure::
<app-root>
bin
...
default
...
local
...
+ The current working directory is *<app-root>***/bin**.
Splunk guarantees this. If you are running the app outside of Splunk, be
sure to set the current working directory to *<app-root>***/bin** before
calling.
This function looks for a logging configuration file at each of these
locations, loading the first, if any, logging configuration file that it
finds::
local/{name}.logging.conf
default/{name}.logging.conf
local/logging.conf
default/logging.conf
The current working directory is set to *<app-root>* before the logging
configuration file is loaded. Hence, paths in the logging configuration
file are relative to *<app-root>*. The current directory is reset before
return.
You may short circuit the search for a logging configuration file by
providing an alternative file location in `path`. Logging configuration
files must be in `ConfigParser format`_.
#Arguments:
:param name: Logger name
:type name: str
:param path: Location of an alternative logging configuration file or `None`
:type path: str or NoneType
:returns: A logger and the location of its logging configuration file
.. _ConfigParser format: http://goo.gl/K6edZ8
"""
app_directory = os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0])))
if path is None:
probing_path = [
'local/%s.logging.conf' % name,
'default/%s.logging.conf' % name,
'local/logging.conf',
'default/logging.conf']
for relative_path in probing_path:
configuration_file = os.path.join(app_directory, relative_path)
if os.path.exists(configuration_file):
path = configuration_file
break
elif not os.path.isabs(path):
found = False
for conf in 'local', 'default':
configuration_file = os.path.join(app_directory, conf, path)
if os.path.exists(configuration_file):
path = configuration_file
found = True
break
if not found:
raise ValueError(
'Logging configuration file "%s" not found in local or default '
'directory' % path)
elif not os.path.exists(path):
raise ValueError('Logging configuration file "%s" not found')
if path is not None:
working_directory = os.getcwd()
os.chdir(app_directory)
try:
splunk_home = os.path.normpath(os.path.join(working_directory, os.environ['SPLUNK_HOME']))
except KeyError:
splunk_home = working_directory # reasonable in debug scenarios
try:
path = os.path.abspath(path)
fileConfig(path, {'SPLUNK_HOME': splunk_home})
finally:
os.chdir(working_directory)
if len(root.handlers) == 0:
root.addHandler(StreamHandler())
logger = getLogger(name)
return logger, path
|
[
"def",
"configure",
"(",
"name",
",",
"path",
"=",
"None",
")",
":",
"app_directory",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
")",
")",
"if",
"path",
"is",
"None",
":",
"probing_path",
"=",
"[",
"'local/%s.logging.conf'",
"%",
"name",
",",
"'default/%s.logging.conf'",
"%",
"name",
",",
"'local/logging.conf'",
",",
"'default/logging.conf'",
"]",
"for",
"relative_path",
"in",
"probing_path",
":",
"configuration_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app_directory",
",",
"relative_path",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"configuration_file",
")",
":",
"path",
"=",
"configuration_file",
"break",
"elif",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"path",
")",
":",
"found",
"=",
"False",
"for",
"conf",
"in",
"'local'",
",",
"'default'",
":",
"configuration_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app_directory",
",",
"conf",
",",
"path",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"configuration_file",
")",
":",
"path",
"=",
"configuration_file",
"found",
"=",
"True",
"break",
"if",
"not",
"found",
":",
"raise",
"ValueError",
"(",
"'Logging configuration file \"%s\" not found in local or default '",
"'directory'",
"%",
"path",
")",
"elif",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"raise",
"ValueError",
"(",
"'Logging configuration file \"%s\" not found'",
")",
"if",
"path",
"is",
"not",
"None",
":",
"working_directory",
"=",
"os",
".",
"getcwd",
"(",
")",
"os",
".",
"chdir",
"(",
"app_directory",
")",
"try",
":",
"splunk_home",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"working_directory",
",",
"os",
".",
"environ",
"[",
"'SPLUNK_HOME'",
"]",
")",
")",
"except",
"KeyError",
":",
"splunk_home",
"=",
"working_directory",
"# reasonable in debug scenarios",
"try",
":",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"path",
")",
"fileConfig",
"(",
"path",
",",
"{",
"'SPLUNK_HOME'",
":",
"splunk_home",
"}",
")",
"finally",
":",
"os",
".",
"chdir",
"(",
"working_directory",
")",
"if",
"len",
"(",
"root",
".",
"handlers",
")",
"==",
"0",
":",
"root",
".",
"addHandler",
"(",
"StreamHandler",
"(",
")",
")",
"logger",
"=",
"getLogger",
"(",
"name",
")",
"return",
"logger",
",",
"path"
] |
Configure logging and return a logger and the location of its logging
configuration file.
This function expects:
+ A Splunk app directory structure::
<app-root>
bin
...
default
...
local
...
+ The current working directory is *<app-root>***/bin**.
Splunk guarantees this. If you are running the app outside of Splunk, be
sure to set the current working directory to *<app-root>***/bin** before
calling.
This function looks for a logging configuration file at each of these
locations, loading the first, if any, logging configuration file that it
finds::
local/{name}.logging.conf
default/{name}.logging.conf
local/logging.conf
default/logging.conf
The current working directory is set to *<app-root>* before the logging
configuration file is loaded. Hence, paths in the logging configuration
file are relative to *<app-root>*. The current directory is reset before
return.
You may short circuit the search for a logging configuration file by
providing an alternative file location in `path`. Logging configuration
files must be in `ConfigParser format`_.
#Arguments:
:param name: Logger name
:type name: str
:param path: Location of an alternative logging configuration file or `None`
:type path: str or NoneType
:returns: A logger and the location of its logging configuration file
.. _ConfigParser format: http://goo.gl/K6edZ8
|
[
"Configure",
"logging",
"and",
"return",
"a",
"logger",
"and",
"the",
"location",
"of",
"its",
"logging",
"configuration",
"file",
"."
] |
433886e52698f0ddb9956f087b76041966c3bcd1
|
https://github.com/realestate-com-au/dashmat/blob/433886e52698f0ddb9956f087b76041966c3bcd1/dashmat/core_modules/splunk/splunk-sdk-1.3.0/splunklib/searchcommands/logging.py#L23-L119
|
238,963
|
mikeboers/sitetools
|
sitetools/utils.py
|
expand_user
|
def expand_user(path, user=None):
"""Roughly the same as os.path.expanduser, but you can pass a default user."""
def _replace(m):
m_user = m.group(1) or user
return pwd.getpwnam(m_user).pw_dir if m_user else pwd.getpwuid(os.getuid()).pw_dir
return re.sub(r'~(\w*)', _replace, path)
|
python
|
def expand_user(path, user=None):
"""Roughly the same as os.path.expanduser, but you can pass a default user."""
def _replace(m):
m_user = m.group(1) or user
return pwd.getpwnam(m_user).pw_dir if m_user else pwd.getpwuid(os.getuid()).pw_dir
return re.sub(r'~(\w*)', _replace, path)
|
[
"def",
"expand_user",
"(",
"path",
",",
"user",
"=",
"None",
")",
":",
"def",
"_replace",
"(",
"m",
")",
":",
"m_user",
"=",
"m",
".",
"group",
"(",
"1",
")",
"or",
"user",
"return",
"pwd",
".",
"getpwnam",
"(",
"m_user",
")",
".",
"pw_dir",
"if",
"m_user",
"else",
"pwd",
".",
"getpwuid",
"(",
"os",
".",
"getuid",
"(",
")",
")",
".",
"pw_dir",
"return",
"re",
".",
"sub",
"(",
"r'~(\\w*)'",
",",
"_replace",
",",
"path",
")"
] |
Roughly the same as os.path.expanduser, but you can pass a default user.
|
[
"Roughly",
"the",
"same",
"as",
"os",
".",
"path",
".",
"expanduser",
"but",
"you",
"can",
"pass",
"a",
"default",
"user",
"."
] |
1ec4eea6902b4a276f868a711b783dd965c123b7
|
https://github.com/mikeboers/sitetools/blob/1ec4eea6902b4a276f868a711b783dd965c123b7/sitetools/utils.py#L23-L30
|
238,964
|
mikeboers/sitetools
|
sitetools/utils.py
|
unique_list
|
def unique_list(input_, key=lambda x:x):
"""Return the unique elements from the input, in order."""
seen = set()
output = []
for x in input_:
keyx = key(x)
if keyx not in seen:
seen.add(keyx)
output.append(x)
return output
|
python
|
def unique_list(input_, key=lambda x:x):
"""Return the unique elements from the input, in order."""
seen = set()
output = []
for x in input_:
keyx = key(x)
if keyx not in seen:
seen.add(keyx)
output.append(x)
return output
|
[
"def",
"unique_list",
"(",
"input_",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
")",
":",
"seen",
"=",
"set",
"(",
")",
"output",
"=",
"[",
"]",
"for",
"x",
"in",
"input_",
":",
"keyx",
"=",
"key",
"(",
"x",
")",
"if",
"keyx",
"not",
"in",
"seen",
":",
"seen",
".",
"add",
"(",
"keyx",
")",
"output",
".",
"append",
"(",
"x",
")",
"return",
"output"
] |
Return the unique elements from the input, in order.
|
[
"Return",
"the",
"unique",
"elements",
"from",
"the",
"input",
"in",
"order",
"."
] |
1ec4eea6902b4a276f868a711b783dd965c123b7
|
https://github.com/mikeboers/sitetools/blob/1ec4eea6902b4a276f868a711b783dd965c123b7/sitetools/utils.py#L33-L42
|
238,965
|
mikeboers/sitetools
|
sitetools/utils.py
|
get_environ_list
|
def get_environ_list(name, default=None):
"""Return the split colon-delimited list from an environment variable.
Returns an empty list if the variable didn't exist.
"""
packed = os.environ.get(name)
if packed is not None:
return packed.split(':')
elif default is not None:
return default
else:
return []
|
python
|
def get_environ_list(name, default=None):
"""Return the split colon-delimited list from an environment variable.
Returns an empty list if the variable didn't exist.
"""
packed = os.environ.get(name)
if packed is not None:
return packed.split(':')
elif default is not None:
return default
else:
return []
|
[
"def",
"get_environ_list",
"(",
"name",
",",
"default",
"=",
"None",
")",
":",
"packed",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"name",
")",
"if",
"packed",
"is",
"not",
"None",
":",
"return",
"packed",
".",
"split",
"(",
"':'",
")",
"elif",
"default",
"is",
"not",
"None",
":",
"return",
"default",
"else",
":",
"return",
"[",
"]"
] |
Return the split colon-delimited list from an environment variable.
Returns an empty list if the variable didn't exist.
|
[
"Return",
"the",
"split",
"colon",
"-",
"delimited",
"list",
"from",
"an",
"environment",
"variable",
"."
] |
1ec4eea6902b4a276f868a711b783dd965c123b7
|
https://github.com/mikeboers/sitetools/blob/1ec4eea6902b4a276f868a711b783dd965c123b7/sitetools/utils.py#L45-L57
|
238,966
|
Phelimb/ga4gh-mongo
|
ga4ghmongo/schema/models/variants.py
|
is_indel
|
def is_indel(reference_bases, alternate_bases):
""" Return whether or not the variant is an INDEL """
if len(reference_bases) > 1:
return True
for alt in alternate_bases:
if alt is None:
return True
elif len(alt) != len(reference_bases):
return True
return False
|
python
|
def is_indel(reference_bases, alternate_bases):
""" Return whether or not the variant is an INDEL """
if len(reference_bases) > 1:
return True
for alt in alternate_bases:
if alt is None:
return True
elif len(alt) != len(reference_bases):
return True
return False
|
[
"def",
"is_indel",
"(",
"reference_bases",
",",
"alternate_bases",
")",
":",
"if",
"len",
"(",
"reference_bases",
")",
">",
"1",
":",
"return",
"True",
"for",
"alt",
"in",
"alternate_bases",
":",
"if",
"alt",
"is",
"None",
":",
"return",
"True",
"elif",
"len",
"(",
"alt",
")",
"!=",
"len",
"(",
"reference_bases",
")",
":",
"return",
"True",
"return",
"False"
] |
Return whether or not the variant is an INDEL
|
[
"Return",
"whether",
"or",
"not",
"the",
"variant",
"is",
"an",
"INDEL"
] |
5f5a3e1922be0e0d13af1874fad6eed5418ee761
|
https://github.com/Phelimb/ga4gh-mongo/blob/5f5a3e1922be0e0d13af1874fad6eed5418ee761/ga4ghmongo/schema/models/variants.py#L268-L277
|
238,967
|
Phelimb/ga4gh-mongo
|
ga4ghmongo/schema/models/variants.py
|
is_snp
|
def is_snp(reference_bases, alternate_bases):
""" Return whether or not the variant is a SNP """
if len(reference_bases) > 1:
return False
for alt in alternate_bases:
if alt is None:
return False
if alt not in ['A', 'C', 'G', 'T', 'N', '*']:
return False
return True
|
python
|
def is_snp(reference_bases, alternate_bases):
""" Return whether or not the variant is a SNP """
if len(reference_bases) > 1:
return False
for alt in alternate_bases:
if alt is None:
return False
if alt not in ['A', 'C', 'G', 'T', 'N', '*']:
return False
return True
|
[
"def",
"is_snp",
"(",
"reference_bases",
",",
"alternate_bases",
")",
":",
"if",
"len",
"(",
"reference_bases",
")",
">",
"1",
":",
"return",
"False",
"for",
"alt",
"in",
"alternate_bases",
":",
"if",
"alt",
"is",
"None",
":",
"return",
"False",
"if",
"alt",
"not",
"in",
"[",
"'A'",
",",
"'C'",
",",
"'G'",
",",
"'T'",
",",
"'N'",
",",
"'*'",
"]",
":",
"return",
"False",
"return",
"True"
] |
Return whether or not the variant is a SNP
|
[
"Return",
"whether",
"or",
"not",
"the",
"variant",
"is",
"a",
"SNP"
] |
5f5a3e1922be0e0d13af1874fad6eed5418ee761
|
https://github.com/Phelimb/ga4gh-mongo/blob/5f5a3e1922be0e0d13af1874fad6eed5418ee761/ga4ghmongo/schema/models/variants.py#L280-L289
|
238,968
|
Phelimb/ga4gh-mongo
|
ga4ghmongo/schema/models/variants.py
|
is_deletion
|
def is_deletion(reference_bases, alternate_bases):
""" Return whether or not the INDEL is a deletion """
# if multiple alts, it is unclear if we have a transition
if len(alternate_bases) > 1:
return False
if is_indel(reference_bases, alternate_bases):
# just one alt allele
alt_allele = alternate_bases[0]
if alt_allele is None:
return True
if len(reference_bases) > len(alt_allele):
return True
else:
return False
else:
return False
|
python
|
def is_deletion(reference_bases, alternate_bases):
""" Return whether or not the INDEL is a deletion """
# if multiple alts, it is unclear if we have a transition
if len(alternate_bases) > 1:
return False
if is_indel(reference_bases, alternate_bases):
# just one alt allele
alt_allele = alternate_bases[0]
if alt_allele is None:
return True
if len(reference_bases) > len(alt_allele):
return True
else:
return False
else:
return False
|
[
"def",
"is_deletion",
"(",
"reference_bases",
",",
"alternate_bases",
")",
":",
"# if multiple alts, it is unclear if we have a transition",
"if",
"len",
"(",
"alternate_bases",
")",
">",
"1",
":",
"return",
"False",
"if",
"is_indel",
"(",
"reference_bases",
",",
"alternate_bases",
")",
":",
"# just one alt allele",
"alt_allele",
"=",
"alternate_bases",
"[",
"0",
"]",
"if",
"alt_allele",
"is",
"None",
":",
"return",
"True",
"if",
"len",
"(",
"reference_bases",
")",
">",
"len",
"(",
"alt_allele",
")",
":",
"return",
"True",
"else",
":",
"return",
"False",
"else",
":",
"return",
"False"
] |
Return whether or not the INDEL is a deletion
|
[
"Return",
"whether",
"or",
"not",
"the",
"INDEL",
"is",
"a",
"deletion"
] |
5f5a3e1922be0e0d13af1874fad6eed5418ee761
|
https://github.com/Phelimb/ga4gh-mongo/blob/5f5a3e1922be0e0d13af1874fad6eed5418ee761/ga4ghmongo/schema/models/variants.py#L292-L308
|
238,969
|
Phelimb/ga4gh-mongo
|
ga4ghmongo/schema/models/variants.py
|
Variant.overlapping
|
def overlapping(self, other):
"""Do these variants overlap in the reference"""
return (
other.start in self.ref_range) or (
self.start in other.ref_range)
|
python
|
def overlapping(self, other):
"""Do these variants overlap in the reference"""
return (
other.start in self.ref_range) or (
self.start in other.ref_range)
|
[
"def",
"overlapping",
"(",
"self",
",",
"other",
")",
":",
"return",
"(",
"other",
".",
"start",
"in",
"self",
".",
"ref_range",
")",
"or",
"(",
"self",
".",
"start",
"in",
"other",
".",
"ref_range",
")"
] |
Do these variants overlap in the reference
|
[
"Do",
"these",
"variants",
"overlap",
"in",
"the",
"reference"
] |
5f5a3e1922be0e0d13af1874fad6eed5418ee761
|
https://github.com/Phelimb/ga4gh-mongo/blob/5f5a3e1922be0e0d13af1874fad6eed5418ee761/ga4ghmongo/schema/models/variants.py#L468-L472
|
238,970
|
dmonroy/schema-migrations
|
schema_migrations/__init__.py
|
MigrationController.parse_pgurl
|
def parse_pgurl(self, url):
"""
Given a Postgres url, return a dict with keys for user, password,
host, port, and database.
"""
parsed = urlsplit(url)
return {
'user': parsed.username,
'password': parsed.password,
'database': parsed.path.lstrip('/'),
'host': parsed.hostname,
'port': parsed.port or 5432,
}
|
python
|
def parse_pgurl(self, url):
"""
Given a Postgres url, return a dict with keys for user, password,
host, port, and database.
"""
parsed = urlsplit(url)
return {
'user': parsed.username,
'password': parsed.password,
'database': parsed.path.lstrip('/'),
'host': parsed.hostname,
'port': parsed.port or 5432,
}
|
[
"def",
"parse_pgurl",
"(",
"self",
",",
"url",
")",
":",
"parsed",
"=",
"urlsplit",
"(",
"url",
")",
"return",
"{",
"'user'",
":",
"parsed",
".",
"username",
",",
"'password'",
":",
"parsed",
".",
"password",
",",
"'database'",
":",
"parsed",
".",
"path",
".",
"lstrip",
"(",
"'/'",
")",
",",
"'host'",
":",
"parsed",
".",
"hostname",
",",
"'port'",
":",
"parsed",
".",
"port",
"or",
"5432",
",",
"}"
] |
Given a Postgres url, return a dict with keys for user, password,
host, port, and database.
|
[
"Given",
"a",
"Postgres",
"url",
"return",
"a",
"dict",
"with",
"keys",
"for",
"user",
"password",
"host",
"port",
"and",
"database",
"."
] |
c70624818776029230bfe6438c121fcbcfde2f26
|
https://github.com/dmonroy/schema-migrations/blob/c70624818776029230bfe6438c121fcbcfde2f26/schema_migrations/__init__.py#L219-L232
|
238,971
|
roboogle/gtkmvc3
|
gtkmvco/examples/userman/ctrl.py
|
ExampleController.value_change
|
def value_change(self, model, name, info):
"""The model is changed and the view must be updated"""
msg = self.model.get_message(info.new)
self.view.set_msg(msg)
return
|
python
|
def value_change(self, model, name, info):
"""The model is changed and the view must be updated"""
msg = self.model.get_message(info.new)
self.view.set_msg(msg)
return
|
[
"def",
"value_change",
"(",
"self",
",",
"model",
",",
"name",
",",
"info",
")",
":",
"msg",
"=",
"self",
".",
"model",
".",
"get_message",
"(",
"info",
".",
"new",
")",
"self",
".",
"view",
".",
"set_msg",
"(",
"msg",
")",
"return"
] |
The model is changed and the view must be updated
|
[
"The",
"model",
"is",
"changed",
"and",
"the",
"view",
"must",
"be",
"updated"
] |
63405fd8d2056be26af49103b13a8d5e57fe4dff
|
https://github.com/roboogle/gtkmvc3/blob/63405fd8d2056be26af49103b13a8d5e57fe4dff/gtkmvco/examples/userman/ctrl.py#L28-L33
|
238,972
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/addons/configer/configer.py
|
ConfigerWin.reset_current_row
|
def reset_current_row(self, *args, **kwargs):
"""Reset the selected rows value to its default value
:returns: None
:rtype: None
:raises: None
"""
i = self.configobj_treev.currentIndex()
m = self.configobj_treev.model()
m.restore_default(i)
|
python
|
def reset_current_row(self, *args, **kwargs):
"""Reset the selected rows value to its default value
:returns: None
:rtype: None
:raises: None
"""
i = self.configobj_treev.currentIndex()
m = self.configobj_treev.model()
m.restore_default(i)
|
[
"def",
"reset_current_row",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"i",
"=",
"self",
".",
"configobj_treev",
".",
"currentIndex",
"(",
")",
"m",
"=",
"self",
".",
"configobj_treev",
".",
"model",
"(",
")",
"m",
".",
"restore_default",
"(",
"i",
")"
] |
Reset the selected rows value to its default value
:returns: None
:rtype: None
:raises: None
|
[
"Reset",
"the",
"selected",
"rows",
"value",
"to",
"its",
"default",
"value"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/addons/configer/configer.py#L53-L62
|
238,973
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/addons/configer/configer.py
|
ConfigerWin.get_configs
|
def get_configs(self):
"""Load all config files and return the configobjs
:returns: a list of configobjs
:raises: None
It always loads the coreconfig.
Then it looks for all configs inside the PLUGIN_CONFIG_DIR.
It will find the corresponding spec of the plugins.
If there is a spec, but no ini, it will also be loaded!
"""
# all loaded configs are stored in confs
confs = []
# always load core config. it is not part of the plugin configs
try:
confs.append(iniconf.get_core_config())
except ConfigError, e:
log.error("Could not load Core config! Reason was: %s" % e)
# get config specs that lie in the plugin path
# we have to watch the order we gather the specs
# plugins can override each other, so can config specs
# it depends on the order of the JUKEBOX_PLUGIN_PATH
specs = {}
pathenv = os.environ.get('JUKEBOX_PLUGIN_PATH', '')
paths = pathenv.split(';')
paths.append(constants.BUILTIN_PLUGIN_PATH)
for p in reversed(paths):
if p:
files = self.find_inifiles(p)
for ini in files:
base = os.path.basename(ini)
specs[base] = ini
configs = {}
files = self.find_inifiles(PLUGIN_CONFIG_DIR)
for ini in files:
base = os.path.basename(ini)
configs[base] = ini
# find matching pairs of configs and specs
# and load them
for k in configs:
spec = specs.pop(k, None)
conf = configs[k]
try:
confs.append(iniconf.load_config(conf, spec))
except ConfigError, e:
log.error("Could not load config %s, Reason was: %s" % (k ,e))
# the remaining configspecs can be used to create
# empty configs
for k in specs:
spec = specs[k]
conf = os.path.join(PLUGIN_CONFIG_DIR, k)
try:
confs.append(iniconf.load_config(conf, spec))
except ConfigError, e:
log.error("Could not load config %s, Reason was: %s" % (k ,e))
return confs
|
python
|
def get_configs(self):
"""Load all config files and return the configobjs
:returns: a list of configobjs
:raises: None
It always loads the coreconfig.
Then it looks for all configs inside the PLUGIN_CONFIG_DIR.
It will find the corresponding spec of the plugins.
If there is a spec, but no ini, it will also be loaded!
"""
# all loaded configs are stored in confs
confs = []
# always load core config. it is not part of the plugin configs
try:
confs.append(iniconf.get_core_config())
except ConfigError, e:
log.error("Could not load Core config! Reason was: %s" % e)
# get config specs that lie in the plugin path
# we have to watch the order we gather the specs
# plugins can override each other, so can config specs
# it depends on the order of the JUKEBOX_PLUGIN_PATH
specs = {}
pathenv = os.environ.get('JUKEBOX_PLUGIN_PATH', '')
paths = pathenv.split(';')
paths.append(constants.BUILTIN_PLUGIN_PATH)
for p in reversed(paths):
if p:
files = self.find_inifiles(p)
for ini in files:
base = os.path.basename(ini)
specs[base] = ini
configs = {}
files = self.find_inifiles(PLUGIN_CONFIG_DIR)
for ini in files:
base = os.path.basename(ini)
configs[base] = ini
# find matching pairs of configs and specs
# and load them
for k in configs:
spec = specs.pop(k, None)
conf = configs[k]
try:
confs.append(iniconf.load_config(conf, spec))
except ConfigError, e:
log.error("Could not load config %s, Reason was: %s" % (k ,e))
# the remaining configspecs can be used to create
# empty configs
for k in specs:
spec = specs[k]
conf = os.path.join(PLUGIN_CONFIG_DIR, k)
try:
confs.append(iniconf.load_config(conf, spec))
except ConfigError, e:
log.error("Could not load config %s, Reason was: %s" % (k ,e))
return confs
|
[
"def",
"get_configs",
"(",
"self",
")",
":",
"# all loaded configs are stored in confs",
"confs",
"=",
"[",
"]",
"# always load core config. it is not part of the plugin configs",
"try",
":",
"confs",
".",
"append",
"(",
"iniconf",
".",
"get_core_config",
"(",
")",
")",
"except",
"ConfigError",
",",
"e",
":",
"log",
".",
"error",
"(",
"\"Could not load Core config! Reason was: %s\"",
"%",
"e",
")",
"# get config specs that lie in the plugin path",
"# we have to watch the order we gather the specs",
"# plugins can override each other, so can config specs",
"# it depends on the order of the JUKEBOX_PLUGIN_PATH",
"specs",
"=",
"{",
"}",
"pathenv",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'JUKEBOX_PLUGIN_PATH'",
",",
"''",
")",
"paths",
"=",
"pathenv",
".",
"split",
"(",
"';'",
")",
"paths",
".",
"append",
"(",
"constants",
".",
"BUILTIN_PLUGIN_PATH",
")",
"for",
"p",
"in",
"reversed",
"(",
"paths",
")",
":",
"if",
"p",
":",
"files",
"=",
"self",
".",
"find_inifiles",
"(",
"p",
")",
"for",
"ini",
"in",
"files",
":",
"base",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"ini",
")",
"specs",
"[",
"base",
"]",
"=",
"ini",
"configs",
"=",
"{",
"}",
"files",
"=",
"self",
".",
"find_inifiles",
"(",
"PLUGIN_CONFIG_DIR",
")",
"for",
"ini",
"in",
"files",
":",
"base",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"ini",
")",
"configs",
"[",
"base",
"]",
"=",
"ini",
"# find matching pairs of configs and specs",
"# and load them",
"for",
"k",
"in",
"configs",
":",
"spec",
"=",
"specs",
".",
"pop",
"(",
"k",
",",
"None",
")",
"conf",
"=",
"configs",
"[",
"k",
"]",
"try",
":",
"confs",
".",
"append",
"(",
"iniconf",
".",
"load_config",
"(",
"conf",
",",
"spec",
")",
")",
"except",
"ConfigError",
",",
"e",
":",
"log",
".",
"error",
"(",
"\"Could not load config %s, Reason was: %s\"",
"%",
"(",
"k",
",",
"e",
")",
")",
"# the remaining configspecs can be used to create",
"# empty configs",
"for",
"k",
"in",
"specs",
":",
"spec",
"=",
"specs",
"[",
"k",
"]",
"conf",
"=",
"os",
".",
"path",
".",
"join",
"(",
"PLUGIN_CONFIG_DIR",
",",
"k",
")",
"try",
":",
"confs",
".",
"append",
"(",
"iniconf",
".",
"load_config",
"(",
"conf",
",",
"spec",
")",
")",
"except",
"ConfigError",
",",
"e",
":",
"log",
".",
"error",
"(",
"\"Could not load config %s, Reason was: %s\"",
"%",
"(",
"k",
",",
"e",
")",
")",
"return",
"confs"
] |
Load all config files and return the configobjs
:returns: a list of configobjs
:raises: None
It always loads the coreconfig.
Then it looks for all configs inside the PLUGIN_CONFIG_DIR.
It will find the corresponding spec of the plugins.
If there is a spec, but no ini, it will also be loaded!
|
[
"Load",
"all",
"config",
"files",
"and",
"return",
"the",
"configobjs"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/addons/configer/configer.py#L64-L124
|
238,974
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/addons/configer/configer.py
|
ConfigerWin.set_inifile
|
def set_inifile(self, current, previous):
"""Set the configobj to the current index of the files_lv
This is a slot for the currentChanged signal
:param current: the modelindex of a inifilesmodel that should be set for the configobj_treev
:type current: QModelIndex
:param previous: the previous selected index
:type previous: QModelIndex
:returns: None
:raises: None
"""
c = self.inimodel.data(current, self.inimodel.confobjRole)
self.confobjmodel = ConfigObjModel(c)
self.configobj_treev.setModel(self.confobjmodel)
self.configobj_treev.expandAll()
self.confobjmodel.dataChanged.connect(self.iniedited)
|
python
|
def set_inifile(self, current, previous):
"""Set the configobj to the current index of the files_lv
This is a slot for the currentChanged signal
:param current: the modelindex of a inifilesmodel that should be set for the configobj_treev
:type current: QModelIndex
:param previous: the previous selected index
:type previous: QModelIndex
:returns: None
:raises: None
"""
c = self.inimodel.data(current, self.inimodel.confobjRole)
self.confobjmodel = ConfigObjModel(c)
self.configobj_treev.setModel(self.confobjmodel)
self.configobj_treev.expandAll()
self.confobjmodel.dataChanged.connect(self.iniedited)
|
[
"def",
"set_inifile",
"(",
"self",
",",
"current",
",",
"previous",
")",
":",
"c",
"=",
"self",
".",
"inimodel",
".",
"data",
"(",
"current",
",",
"self",
".",
"inimodel",
".",
"confobjRole",
")",
"self",
".",
"confobjmodel",
"=",
"ConfigObjModel",
"(",
"c",
")",
"self",
".",
"configobj_treev",
".",
"setModel",
"(",
"self",
".",
"confobjmodel",
")",
"self",
".",
"configobj_treev",
".",
"expandAll",
"(",
")",
"self",
".",
"confobjmodel",
".",
"dataChanged",
".",
"connect",
"(",
"self",
".",
"iniedited",
")"
] |
Set the configobj to the current index of the files_lv
This is a slot for the currentChanged signal
:param current: the modelindex of a inifilesmodel that should be set for the configobj_treev
:type current: QModelIndex
:param previous: the previous selected index
:type previous: QModelIndex
:returns: None
:raises: None
|
[
"Set",
"the",
"configobj",
"to",
"the",
"current",
"index",
"of",
"the",
"files_lv"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/addons/configer/configer.py#L141-L157
|
238,975
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/addons/configer/configer.py
|
ConfigerWin.iniedited
|
def iniedited(self, *args, **kwargs):
"""Set the current index of inimodel to modified
:returns: None
:rtype: None
:raises: None
"""
self.inimodel.set_index_edited(self.files_lv.currentIndex(), True)
|
python
|
def iniedited(self, *args, **kwargs):
"""Set the current index of inimodel to modified
:returns: None
:rtype: None
:raises: None
"""
self.inimodel.set_index_edited(self.files_lv.currentIndex(), True)
|
[
"def",
"iniedited",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"inimodel",
".",
"set_index_edited",
"(",
"self",
".",
"files_lv",
".",
"currentIndex",
"(",
")",
",",
"True",
")"
] |
Set the current index of inimodel to modified
:returns: None
:rtype: None
:raises: None
|
[
"Set",
"the",
"current",
"index",
"of",
"inimodel",
"to",
"modified"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/addons/configer/configer.py#L159-L166
|
238,976
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/addons/configer/configer.py
|
ConfigerWin.closeEvent
|
def closeEvent(self, event):
"""Handles closing of the window. If configs were edited, ask user to continue.
:param event: the close event
:type event: QCloseEvent
:returns: None
:rtype: None
:raises: None
"""
if self.inimodel.get_edited():
r = self.doc_modified_prompt()
if r == QtGui.QMessageBox.Yes:
event.accept()
else:
event.ignore()
else:
event.accept()
|
python
|
def closeEvent(self, event):
"""Handles closing of the window. If configs were edited, ask user to continue.
:param event: the close event
:type event: QCloseEvent
:returns: None
:rtype: None
:raises: None
"""
if self.inimodel.get_edited():
r = self.doc_modified_prompt()
if r == QtGui.QMessageBox.Yes:
event.accept()
else:
event.ignore()
else:
event.accept()
|
[
"def",
"closeEvent",
"(",
"self",
",",
"event",
")",
":",
"if",
"self",
".",
"inimodel",
".",
"get_edited",
"(",
")",
":",
"r",
"=",
"self",
".",
"doc_modified_prompt",
"(",
")",
"if",
"r",
"==",
"QtGui",
".",
"QMessageBox",
".",
"Yes",
":",
"event",
".",
"accept",
"(",
")",
"else",
":",
"event",
".",
"ignore",
"(",
")",
"else",
":",
"event",
".",
"accept",
"(",
")"
] |
Handles closing of the window. If configs were edited, ask user to continue.
:param event: the close event
:type event: QCloseEvent
:returns: None
:rtype: None
:raises: None
|
[
"Handles",
"closing",
"of",
"the",
"window",
".",
"If",
"configs",
"were",
"edited",
"ask",
"user",
"to",
"continue",
"."
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/addons/configer/configer.py#L168-L184
|
238,977
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/addons/configer/configer.py
|
ConfigerWin.doc_modified_prompt
|
def doc_modified_prompt(self, ):
"""Create a message box, that asks the user to continue although files have been modified
:returns: value of the standard button of qmessagebox that has been pressed. Either Yes or Cancel.
:rtype: QtGui.QMessageBox.StandardButton
:raises: None
"""
msgbox = QtGui.QMessageBox()
msgbox.setWindowTitle("Discard changes?")
msgbox.setText("Documents have been modified.")
msgbox.setInformativeText("Do you really want to exit? Changes will be lost!")
msgbox.setStandardButtons(msgbox.Yes | msgbox.Cancel)
msgbox.setDefaultButton(msgbox.Cancel)
msgbox.exec_()
return msgbox.result()
|
python
|
def doc_modified_prompt(self, ):
"""Create a message box, that asks the user to continue although files have been modified
:returns: value of the standard button of qmessagebox that has been pressed. Either Yes or Cancel.
:rtype: QtGui.QMessageBox.StandardButton
:raises: None
"""
msgbox = QtGui.QMessageBox()
msgbox.setWindowTitle("Discard changes?")
msgbox.setText("Documents have been modified.")
msgbox.setInformativeText("Do you really want to exit? Changes will be lost!")
msgbox.setStandardButtons(msgbox.Yes | msgbox.Cancel)
msgbox.setDefaultButton(msgbox.Cancel)
msgbox.exec_()
return msgbox.result()
|
[
"def",
"doc_modified_prompt",
"(",
"self",
",",
")",
":",
"msgbox",
"=",
"QtGui",
".",
"QMessageBox",
"(",
")",
"msgbox",
".",
"setWindowTitle",
"(",
"\"Discard changes?\"",
")",
"msgbox",
".",
"setText",
"(",
"\"Documents have been modified.\"",
")",
"msgbox",
".",
"setInformativeText",
"(",
"\"Do you really want to exit? Changes will be lost!\"",
")",
"msgbox",
".",
"setStandardButtons",
"(",
"msgbox",
".",
"Yes",
"|",
"msgbox",
".",
"Cancel",
")",
"msgbox",
".",
"setDefaultButton",
"(",
"msgbox",
".",
"Cancel",
")",
"msgbox",
".",
"exec_",
"(",
")",
"return",
"msgbox",
".",
"result",
"(",
")"
] |
Create a message box, that asks the user to continue although files have been modified
:returns: value of the standard button of qmessagebox that has been pressed. Either Yes or Cancel.
:rtype: QtGui.QMessageBox.StandardButton
:raises: None
|
[
"Create",
"a",
"message",
"box",
"that",
"asks",
"the",
"user",
"to",
"continue",
"although",
"files",
"have",
"been",
"modified"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/addons/configer/configer.py#L186-L200
|
238,978
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/addons/configer/configer.py
|
ConfigerWin.save_current_config
|
def save_current_config(self, ):
"""Saves the currently displayed config
:returns: None
:rtype: None
:raises: None
This resets the edited status of the file to False.
Also asks the user to continue if config is invalid.
"""
# check if all configs validate correctly
btn = None
for row in range(self.inimodel.rowCount()):
i = self.inimodel.index(row, 0)
r = self.inimodel.validate(i)
if r is not True:
btn = self.invalid_prompt()
break
if btn == QtGui.QMessageBox.Cancel:
return
current = self.files_lv.currentIndex()
c = self.inimodel.data(current, self.inimodel.confobjRole)
c.write()
self.inimodel.set_index_edited(current, False)
|
python
|
def save_current_config(self, ):
"""Saves the currently displayed config
:returns: None
:rtype: None
:raises: None
This resets the edited status of the file to False.
Also asks the user to continue if config is invalid.
"""
# check if all configs validate correctly
btn = None
for row in range(self.inimodel.rowCount()):
i = self.inimodel.index(row, 0)
r = self.inimodel.validate(i)
if r is not True:
btn = self.invalid_prompt()
break
if btn == QtGui.QMessageBox.Cancel:
return
current = self.files_lv.currentIndex()
c = self.inimodel.data(current, self.inimodel.confobjRole)
c.write()
self.inimodel.set_index_edited(current, False)
|
[
"def",
"save_current_config",
"(",
"self",
",",
")",
":",
"# check if all configs validate correctly",
"btn",
"=",
"None",
"for",
"row",
"in",
"range",
"(",
"self",
".",
"inimodel",
".",
"rowCount",
"(",
")",
")",
":",
"i",
"=",
"self",
".",
"inimodel",
".",
"index",
"(",
"row",
",",
"0",
")",
"r",
"=",
"self",
".",
"inimodel",
".",
"validate",
"(",
"i",
")",
"if",
"r",
"is",
"not",
"True",
":",
"btn",
"=",
"self",
".",
"invalid_prompt",
"(",
")",
"break",
"if",
"btn",
"==",
"QtGui",
".",
"QMessageBox",
".",
"Cancel",
":",
"return",
"current",
"=",
"self",
".",
"files_lv",
".",
"currentIndex",
"(",
")",
"c",
"=",
"self",
".",
"inimodel",
".",
"data",
"(",
"current",
",",
"self",
".",
"inimodel",
".",
"confobjRole",
")",
"c",
".",
"write",
"(",
")",
"self",
".",
"inimodel",
".",
"set_index_edited",
"(",
"current",
",",
"False",
")"
] |
Saves the currently displayed config
:returns: None
:rtype: None
:raises: None
This resets the edited status of the file to False.
Also asks the user to continue if config is invalid.
|
[
"Saves",
"the",
"currently",
"displayed",
"config"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/addons/configer/configer.py#L218-L243
|
238,979
|
mikicz/arca
|
arca/backend/venv.py
|
VenvBackend.get_virtualenv_path
|
def get_virtualenv_path(self, requirements_option: RequirementsOptions, requirements_hash: Optional[str]) -> Path:
"""
Returns the path to the virtualenv the current state of the repository.
"""
if requirements_option == RequirementsOptions.no_requirements:
venv_name = "no_requirements"
else:
venv_name = requirements_hash
return Path(self._arca.base_dir) / "venvs" / venv_name
|
python
|
def get_virtualenv_path(self, requirements_option: RequirementsOptions, requirements_hash: Optional[str]) -> Path:
"""
Returns the path to the virtualenv the current state of the repository.
"""
if requirements_option == RequirementsOptions.no_requirements:
venv_name = "no_requirements"
else:
venv_name = requirements_hash
return Path(self._arca.base_dir) / "venvs" / venv_name
|
[
"def",
"get_virtualenv_path",
"(",
"self",
",",
"requirements_option",
":",
"RequirementsOptions",
",",
"requirements_hash",
":",
"Optional",
"[",
"str",
"]",
")",
"->",
"Path",
":",
"if",
"requirements_option",
"==",
"RequirementsOptions",
".",
"no_requirements",
":",
"venv_name",
"=",
"\"no_requirements\"",
"else",
":",
"venv_name",
"=",
"requirements_hash",
"return",
"Path",
"(",
"self",
".",
"_arca",
".",
"base_dir",
")",
"/",
"\"venvs\"",
"/",
"venv_name"
] |
Returns the path to the virtualenv the current state of the repository.
|
[
"Returns",
"the",
"path",
"to",
"the",
"virtualenv",
"the",
"current",
"state",
"of",
"the",
"repository",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/venv.py#L25-L34
|
238,980
|
mikicz/arca
|
arca/backend/venv.py
|
VenvBackend.get_or_create_environment
|
def get_or_create_environment(self, repo: str, branch: str, git_repo: Repo, repo_path: Path) -> str:
""" Handles the requirements in the target repository, returns a path to a executable of the virtualenv.
"""
return str(self.get_or_create_venv(repo_path).resolve() / "bin" / "python")
|
python
|
def get_or_create_environment(self, repo: str, branch: str, git_repo: Repo, repo_path: Path) -> str:
""" Handles the requirements in the target repository, returns a path to a executable of the virtualenv.
"""
return str(self.get_or_create_venv(repo_path).resolve() / "bin" / "python")
|
[
"def",
"get_or_create_environment",
"(",
"self",
",",
"repo",
":",
"str",
",",
"branch",
":",
"str",
",",
"git_repo",
":",
"Repo",
",",
"repo_path",
":",
"Path",
")",
"->",
"str",
":",
"return",
"str",
"(",
"self",
".",
"get_or_create_venv",
"(",
"repo_path",
")",
".",
"resolve",
"(",
")",
"/",
"\"bin\"",
"/",
"\"python\"",
")"
] |
Handles the requirements in the target repository, returns a path to a executable of the virtualenv.
|
[
"Handles",
"the",
"requirements",
"in",
"the",
"target",
"repository",
"returns",
"a",
"path",
"to",
"a",
"executable",
"of",
"the",
"virtualenv",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/venv.py#L117-L120
|
238,981
|
humilis/humilis-lambdautils
|
lambdautils/utils.py
|
annotate_mapper
|
def annotate_mapper(**decargs):
"""Add input and output watermarks to processed events."""
def decorator(func):
"""Annotate events with entry and/or exit timestamps."""
def wrapper(event, *args, **kwargs):
"""Add enter and exit annotations to the processed event."""
funcname = ":".join([func.__module__, func.__name__])
enter_ts = time.time()
out = func(event, *args, **kwargs)
enter_key = funcname + "|enter"
out = annotate_event(out, enter_key, ts=enter_ts, **decargs)
exit_key = funcname + "|exit"
out = annotate_event(out, exit_key, ts=time.time(), **decargs)
return out
return wrapper
return decorator
|
python
|
def annotate_mapper(**decargs):
"""Add input and output watermarks to processed events."""
def decorator(func):
"""Annotate events with entry and/or exit timestamps."""
def wrapper(event, *args, **kwargs):
"""Add enter and exit annotations to the processed event."""
funcname = ":".join([func.__module__, func.__name__])
enter_ts = time.time()
out = func(event, *args, **kwargs)
enter_key = funcname + "|enter"
out = annotate_event(out, enter_key, ts=enter_ts, **decargs)
exit_key = funcname + "|exit"
out = annotate_event(out, exit_key, ts=time.time(), **decargs)
return out
return wrapper
return decorator
|
[
"def",
"annotate_mapper",
"(",
"*",
"*",
"decargs",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"\"\"\"Annotate events with entry and/or exit timestamps.\"\"\"",
"def",
"wrapper",
"(",
"event",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\"Add enter and exit annotations to the processed event.\"\"\"",
"funcname",
"=",
"\":\"",
".",
"join",
"(",
"[",
"func",
".",
"__module__",
",",
"func",
".",
"__name__",
"]",
")",
"enter_ts",
"=",
"time",
".",
"time",
"(",
")",
"out",
"=",
"func",
"(",
"event",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"enter_key",
"=",
"funcname",
"+",
"\"|enter\"",
"out",
"=",
"annotate_event",
"(",
"out",
",",
"enter_key",
",",
"ts",
"=",
"enter_ts",
",",
"*",
"*",
"decargs",
")",
"exit_key",
"=",
"funcname",
"+",
"\"|exit\"",
"out",
"=",
"annotate_event",
"(",
"out",
",",
"exit_key",
",",
"ts",
"=",
"time",
".",
"time",
"(",
")",
",",
"*",
"*",
"decargs",
")",
"return",
"out",
"return",
"wrapper",
"return",
"decorator"
] |
Add input and output watermarks to processed events.
|
[
"Add",
"input",
"and",
"output",
"watermarks",
"to",
"processed",
"events",
"."
] |
58f75eb5ace23523c283708d56a9193181ea7e8e
|
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L63-L79
|
238,982
|
humilis/humilis-lambdautils
|
lambdautils/utils.py
|
annotate_filter
|
def annotate_filter(**decargs):
"""Add input and output watermarks to filtered events."""
def decorator(func):
"""Annotate events with entry and/or exit timestamps."""
def wrapper(event, *args, **kwargs):
"""Add enter and exit annotations to the processed event."""
funcname = ":".join([func.__module__, func.__name__])
enter_key = funcname + "|enter"
annotate_event(event, enter_key, **decargs)
out = func(event, *args, **kwargs)
exit_key = funcname + "|exit"
annotate_event(event, exit_key, **decargs)
return out
return wrapper
return decorator
|
python
|
def annotate_filter(**decargs):
"""Add input and output watermarks to filtered events."""
def decorator(func):
"""Annotate events with entry and/or exit timestamps."""
def wrapper(event, *args, **kwargs):
"""Add enter and exit annotations to the processed event."""
funcname = ":".join([func.__module__, func.__name__])
enter_key = funcname + "|enter"
annotate_event(event, enter_key, **decargs)
out = func(event, *args, **kwargs)
exit_key = funcname + "|exit"
annotate_event(event, exit_key, **decargs)
return out
return wrapper
return decorator
|
[
"def",
"annotate_filter",
"(",
"*",
"*",
"decargs",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"\"\"\"Annotate events with entry and/or exit timestamps.\"\"\"",
"def",
"wrapper",
"(",
"event",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\"Add enter and exit annotations to the processed event.\"\"\"",
"funcname",
"=",
"\":\"",
".",
"join",
"(",
"[",
"func",
".",
"__module__",
",",
"func",
".",
"__name__",
"]",
")",
"enter_key",
"=",
"funcname",
"+",
"\"|enter\"",
"annotate_event",
"(",
"event",
",",
"enter_key",
",",
"*",
"*",
"decargs",
")",
"out",
"=",
"func",
"(",
"event",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"exit_key",
"=",
"funcname",
"+",
"\"|exit\"",
"annotate_event",
"(",
"event",
",",
"exit_key",
",",
"*",
"*",
"decargs",
")",
"return",
"out",
"return",
"wrapper",
"return",
"decorator"
] |
Add input and output watermarks to filtered events.
|
[
"Add",
"input",
"and",
"output",
"watermarks",
"to",
"filtered",
"events",
"."
] |
58f75eb5ace23523c283708d56a9193181ea7e8e
|
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L82-L97
|
238,983
|
humilis/humilis-lambdautils
|
lambdautils/utils.py
|
_error_repr
|
def _error_repr(error):
"""A compact unique representation of an error."""
error_repr = repr(error)
if len(error_repr) > 200:
error_repr = hash(type(error))
return error_repr
|
python
|
def _error_repr(error):
"""A compact unique representation of an error."""
error_repr = repr(error)
if len(error_repr) > 200:
error_repr = hash(type(error))
return error_repr
|
[
"def",
"_error_repr",
"(",
"error",
")",
":",
"error_repr",
"=",
"repr",
"(",
"error",
")",
"if",
"len",
"(",
"error_repr",
")",
">",
"200",
":",
"error_repr",
"=",
"hash",
"(",
"type",
"(",
"error",
")",
")",
"return",
"error_repr"
] |
A compact unique representation of an error.
|
[
"A",
"compact",
"unique",
"representation",
"of",
"an",
"error",
"."
] |
58f75eb5ace23523c283708d56a9193181ea7e8e
|
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L105-L110
|
238,984
|
humilis/humilis-lambdautils
|
lambdautils/utils.py
|
annotation_has_expired
|
def annotation_has_expired(event, key, timeout):
"""Check if an event error has expired."""
anns = get_annotations(event, key)
if anns:
return (time.time() - anns[0]["ts"]) > timeout
else:
return False
|
python
|
def annotation_has_expired(event, key, timeout):
"""Check if an event error has expired."""
anns = get_annotations(event, key)
if anns:
return (time.time() - anns[0]["ts"]) > timeout
else:
return False
|
[
"def",
"annotation_has_expired",
"(",
"event",
",",
"key",
",",
"timeout",
")",
":",
"anns",
"=",
"get_annotations",
"(",
"event",
",",
"key",
")",
"if",
"anns",
":",
"return",
"(",
"time",
".",
"time",
"(",
")",
"-",
"anns",
"[",
"0",
"]",
"[",
"\"ts\"",
"]",
")",
">",
"timeout",
"else",
":",
"return",
"False"
] |
Check if an event error has expired.
|
[
"Check",
"if",
"an",
"event",
"error",
"has",
"expired",
"."
] |
58f75eb5ace23523c283708d56a9193181ea7e8e
|
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L113-L119
|
238,985
|
humilis/humilis-lambdautils
|
lambdautils/utils.py
|
replace_event_annotations
|
def replace_event_annotations(event, newanns):
"""Replace event annotations with the provided ones."""
_humilis = event.get("_humilis", {})
if not _humilis:
event["_humilis"] = {"annotation": newanns}
else:
event["_humilis"]["annotation"] = newanns
|
python
|
def replace_event_annotations(event, newanns):
"""Replace event annotations with the provided ones."""
_humilis = event.get("_humilis", {})
if not _humilis:
event["_humilis"] = {"annotation": newanns}
else:
event["_humilis"]["annotation"] = newanns
|
[
"def",
"replace_event_annotations",
"(",
"event",
",",
"newanns",
")",
":",
"_humilis",
"=",
"event",
".",
"get",
"(",
"\"_humilis\"",
",",
"{",
"}",
")",
"if",
"not",
"_humilis",
":",
"event",
"[",
"\"_humilis\"",
"]",
"=",
"{",
"\"annotation\"",
":",
"newanns",
"}",
"else",
":",
"event",
"[",
"\"_humilis\"",
"]",
"[",
"\"annotation\"",
"]",
"=",
"newanns"
] |
Replace event annotations with the provided ones.
|
[
"Replace",
"event",
"annotations",
"with",
"the",
"provided",
"ones",
"."
] |
58f75eb5ace23523c283708d56a9193181ea7e8e
|
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L127-L133
|
238,986
|
humilis/humilis-lambdautils
|
lambdautils/utils.py
|
annotate_event
|
def annotate_event(ev, key, ts=None, namespace=None, **kwargs):
"""Add an annotation to an event."""
ann = {}
if ts is None:
ts = time.time()
ann["ts"] = ts
ann["key"] = key
if namespace is None and "HUMILIS_ENVIRONMENT" in os.environ:
namespace = "{}:{}:{}".format(
os.environ.get("HUMILIS_ENVIRONMENT"),
os.environ.get("HUMILIS_LAYER"),
os.environ.get("HUMILIS_STAGE"))
if namespace is not None:
ann["namespace"] = namespace
ann.update(kwargs)
_humilis = ev.get("_humilis", {})
if not _humilis:
ev["_humilis"] = {"annotation": [ann]}
else:
ev["_humilis"]["annotation"] = _humilis.get("annotation", [])
# Clean up previous annotations with the same key
delete_annotations(ev, key)
ev["_humilis"]["annotation"].append(ann)
return ev
|
python
|
def annotate_event(ev, key, ts=None, namespace=None, **kwargs):
"""Add an annotation to an event."""
ann = {}
if ts is None:
ts = time.time()
ann["ts"] = ts
ann["key"] = key
if namespace is None and "HUMILIS_ENVIRONMENT" in os.environ:
namespace = "{}:{}:{}".format(
os.environ.get("HUMILIS_ENVIRONMENT"),
os.environ.get("HUMILIS_LAYER"),
os.environ.get("HUMILIS_STAGE"))
if namespace is not None:
ann["namespace"] = namespace
ann.update(kwargs)
_humilis = ev.get("_humilis", {})
if not _humilis:
ev["_humilis"] = {"annotation": [ann]}
else:
ev["_humilis"]["annotation"] = _humilis.get("annotation", [])
# Clean up previous annotations with the same key
delete_annotations(ev, key)
ev["_humilis"]["annotation"].append(ann)
return ev
|
[
"def",
"annotate_event",
"(",
"ev",
",",
"key",
",",
"ts",
"=",
"None",
",",
"namespace",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"ann",
"=",
"{",
"}",
"if",
"ts",
"is",
"None",
":",
"ts",
"=",
"time",
".",
"time",
"(",
")",
"ann",
"[",
"\"ts\"",
"]",
"=",
"ts",
"ann",
"[",
"\"key\"",
"]",
"=",
"key",
"if",
"namespace",
"is",
"None",
"and",
"\"HUMILIS_ENVIRONMENT\"",
"in",
"os",
".",
"environ",
":",
"namespace",
"=",
"\"{}:{}:{}\"",
".",
"format",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"\"HUMILIS_ENVIRONMENT\"",
")",
",",
"os",
".",
"environ",
".",
"get",
"(",
"\"HUMILIS_LAYER\"",
")",
",",
"os",
".",
"environ",
".",
"get",
"(",
"\"HUMILIS_STAGE\"",
")",
")",
"if",
"namespace",
"is",
"not",
"None",
":",
"ann",
"[",
"\"namespace\"",
"]",
"=",
"namespace",
"ann",
".",
"update",
"(",
"kwargs",
")",
"_humilis",
"=",
"ev",
".",
"get",
"(",
"\"_humilis\"",
",",
"{",
"}",
")",
"if",
"not",
"_humilis",
":",
"ev",
"[",
"\"_humilis\"",
"]",
"=",
"{",
"\"annotation\"",
":",
"[",
"ann",
"]",
"}",
"else",
":",
"ev",
"[",
"\"_humilis\"",
"]",
"[",
"\"annotation\"",
"]",
"=",
"_humilis",
".",
"get",
"(",
"\"annotation\"",
",",
"[",
"]",
")",
"# Clean up previous annotations with the same key",
"delete_annotations",
"(",
"ev",
",",
"key",
")",
"ev",
"[",
"\"_humilis\"",
"]",
"[",
"\"annotation\"",
"]",
".",
"append",
"(",
"ann",
")",
"return",
"ev"
] |
Add an annotation to an event.
|
[
"Add",
"an",
"annotation",
"to",
"an",
"event",
"."
] |
58f75eb5ace23523c283708d56a9193181ea7e8e
|
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L136-L161
|
238,987
|
humilis/humilis-lambdautils
|
lambdautils/utils.py
|
get_annotations
|
def get_annotations(event, key, namespace=None, matchfunc=None):
"""Produce the list of annotations for a given key."""
if matchfunc is None:
matchfunc = _is_equal
if isinstance(key, Exception):
key = _error_repr(key)
return [ann for ann in event.get("_humilis", {}).get("annotation", [])
if (matchfunc(key, ann["key"]) and
(namespace is None or ann.get("namespace") == namespace))]
|
python
|
def get_annotations(event, key, namespace=None, matchfunc=None):
"""Produce the list of annotations for a given key."""
if matchfunc is None:
matchfunc = _is_equal
if isinstance(key, Exception):
key = _error_repr(key)
return [ann for ann in event.get("_humilis", {}).get("annotation", [])
if (matchfunc(key, ann["key"]) and
(namespace is None or ann.get("namespace") == namespace))]
|
[
"def",
"get_annotations",
"(",
"event",
",",
"key",
",",
"namespace",
"=",
"None",
",",
"matchfunc",
"=",
"None",
")",
":",
"if",
"matchfunc",
"is",
"None",
":",
"matchfunc",
"=",
"_is_equal",
"if",
"isinstance",
"(",
"key",
",",
"Exception",
")",
":",
"key",
"=",
"_error_repr",
"(",
"key",
")",
"return",
"[",
"ann",
"for",
"ann",
"in",
"event",
".",
"get",
"(",
"\"_humilis\"",
",",
"{",
"}",
")",
".",
"get",
"(",
"\"annotation\"",
",",
"[",
"]",
")",
"if",
"(",
"matchfunc",
"(",
"key",
",",
"ann",
"[",
"\"key\"",
"]",
")",
"and",
"(",
"namespace",
"is",
"None",
"or",
"ann",
".",
"get",
"(",
"\"namespace\"",
")",
"==",
"namespace",
")",
")",
"]"
] |
Produce the list of annotations for a given key.
|
[
"Produce",
"the",
"list",
"of",
"annotations",
"for",
"a",
"given",
"key",
"."
] |
58f75eb5ace23523c283708d56a9193181ea7e8e
|
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L176-L184
|
238,988
|
humilis/humilis-lambdautils
|
lambdautils/utils.py
|
delete_annotations
|
def delete_annotations(event, key, namespace=None, matchfunc=None):
"""Delete all event annotations with a matching key."""
if matchfunc is None:
matchfunc = _is_equal
if isinstance(key, Exception):
key = _error_repr(key)
newanns = [ann for ann in event.get("_humilis", {}).get("annotation", [])
if not (matchfunc(key, ann["key"]) and
(namespace is None or ann.get("namespace") == namespace))]
replace_event_annotations(event, newanns)
|
python
|
def delete_annotations(event, key, namespace=None, matchfunc=None):
"""Delete all event annotations with a matching key."""
if matchfunc is None:
matchfunc = _is_equal
if isinstance(key, Exception):
key = _error_repr(key)
newanns = [ann for ann in event.get("_humilis", {}).get("annotation", [])
if not (matchfunc(key, ann["key"]) and
(namespace is None or ann.get("namespace") == namespace))]
replace_event_annotations(event, newanns)
|
[
"def",
"delete_annotations",
"(",
"event",
",",
"key",
",",
"namespace",
"=",
"None",
",",
"matchfunc",
"=",
"None",
")",
":",
"if",
"matchfunc",
"is",
"None",
":",
"matchfunc",
"=",
"_is_equal",
"if",
"isinstance",
"(",
"key",
",",
"Exception",
")",
":",
"key",
"=",
"_error_repr",
"(",
"key",
")",
"newanns",
"=",
"[",
"ann",
"for",
"ann",
"in",
"event",
".",
"get",
"(",
"\"_humilis\"",
",",
"{",
"}",
")",
".",
"get",
"(",
"\"annotation\"",
",",
"[",
"]",
")",
"if",
"not",
"(",
"matchfunc",
"(",
"key",
",",
"ann",
"[",
"\"key\"",
"]",
")",
"and",
"(",
"namespace",
"is",
"None",
"or",
"ann",
".",
"get",
"(",
"\"namespace\"",
")",
"==",
"namespace",
")",
")",
"]",
"replace_event_annotations",
"(",
"event",
",",
"newanns",
")"
] |
Delete all event annotations with a matching key.
|
[
"Delete",
"all",
"event",
"annotations",
"with",
"a",
"matching",
"key",
"."
] |
58f75eb5ace23523c283708d56a9193181ea7e8e
|
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L187-L196
|
238,989
|
humilis/humilis-lambdautils
|
lambdautils/utils.py
|
get_function_annotations
|
def get_function_annotations(event, funcname, type=None, namespace=None):
"""Produce a list of function annotations in in this event."""
if type:
postfix = "|" + type
else:
postfix = "|.+"
def matchfunc(key, annkey):
"""Check if the provider regex matches an annotation key."""
return re.match(key, annkey) is not None
return get_annotations(event, funcname + postfix, namespace=namespace,
matchfunc=matchfunc)
|
python
|
def get_function_annotations(event, funcname, type=None, namespace=None):
"""Produce a list of function annotations in in this event."""
if type:
postfix = "|" + type
else:
postfix = "|.+"
def matchfunc(key, annkey):
"""Check if the provider regex matches an annotation key."""
return re.match(key, annkey) is not None
return get_annotations(event, funcname + postfix, namespace=namespace,
matchfunc=matchfunc)
|
[
"def",
"get_function_annotations",
"(",
"event",
",",
"funcname",
",",
"type",
"=",
"None",
",",
"namespace",
"=",
"None",
")",
":",
"if",
"type",
":",
"postfix",
"=",
"\"|\"",
"+",
"type",
"else",
":",
"postfix",
"=",
"\"|.+\"",
"def",
"matchfunc",
"(",
"key",
",",
"annkey",
")",
":",
"\"\"\"Check if the provider regex matches an annotation key.\"\"\"",
"return",
"re",
".",
"match",
"(",
"key",
",",
"annkey",
")",
"is",
"not",
"None",
"return",
"get_annotations",
"(",
"event",
",",
"funcname",
"+",
"postfix",
",",
"namespace",
"=",
"namespace",
",",
"matchfunc",
"=",
"matchfunc",
")"
] |
Produce a list of function annotations in in this event.
|
[
"Produce",
"a",
"list",
"of",
"function",
"annotations",
"in",
"in",
"this",
"event",
"."
] |
58f75eb5ace23523c283708d56a9193181ea7e8e
|
https://github.com/humilis/humilis-lambdautils/blob/58f75eb5ace23523c283708d56a9193181ea7e8e/lambdautils/utils.py#L199-L211
|
238,990
|
pri22296/botify
|
botify/botify.py
|
Botify.add_task
|
def add_task(self, keywords, context, rule):
"""Map a function to a list of keywords
Parameters
----------
keywords : iterable of str
sequence of strings which should trigger the given function
context : Context
A Context object created using desired function
rule : tuple
A tuple of integers, which act as relative indices using which data
is extracted to be passed to the function passed via context.
"""
for keyword in keywords:
self._tasks[keyword] = {'context': context, 'rule': rule}
|
python
|
def add_task(self, keywords, context, rule):
"""Map a function to a list of keywords
Parameters
----------
keywords : iterable of str
sequence of strings which should trigger the given function
context : Context
A Context object created using desired function
rule : tuple
A tuple of integers, which act as relative indices using which data
is extracted to be passed to the function passed via context.
"""
for keyword in keywords:
self._tasks[keyword] = {'context': context, 'rule': rule}
|
[
"def",
"add_task",
"(",
"self",
",",
"keywords",
",",
"context",
",",
"rule",
")",
":",
"for",
"keyword",
"in",
"keywords",
":",
"self",
".",
"_tasks",
"[",
"keyword",
"]",
"=",
"{",
"'context'",
":",
"context",
",",
"'rule'",
":",
"rule",
"}"
] |
Map a function to a list of keywords
Parameters
----------
keywords : iterable of str
sequence of strings which should trigger the given function
context : Context
A Context object created using desired function
rule : tuple
A tuple of integers, which act as relative indices using which data
is extracted to be passed to the function passed via context.
|
[
"Map",
"a",
"function",
"to",
"a",
"list",
"of",
"keywords"
] |
c3ff022f4c7314e508ffaa3ce1da1ef1e784afb2
|
https://github.com/pri22296/botify/blob/c3ff022f4c7314e508ffaa3ce1da1ef1e784afb2/botify/botify.py#L73-L87
|
238,991
|
pri22296/botify
|
botify/botify.py
|
Botify.add_modifier
|
def add_modifier(self, modifier, keywords, relative_pos,
action, parameter=None):
"""Modify existing tasks based on presence of a keyword.
Parameters
----------
modifier : str
A string value which would trigger the given Modifier.
keywords : iterable of str
sequence of strings which are keywords for some task,
which has to be modified.
relative_pos : int
Relative position of the task which should be modified
in the presence of `modifier`. It's value can never be 0. Data
fields should also be considered when calculating the relative
position.
action : str
String value representing the action which should be performed
on the task. Action represents calling a arbitrary function
to perform th emodification.
parameter : object
value required by the `action`.(Default None)
"""
if relative_pos == 0:
raise ValueError("relative_pos cannot be 0")
modifier_dict = self._modifiers.get(modifier, {})
value = (action, parameter, relative_pos)
for keyword in keywords:
action_list = list(modifier_dict.get(keyword, []))
action_list.append(value)
modifier_dict[keyword] = tuple(action_list)
self._modifiers[modifier] = modifier_dict
|
python
|
def add_modifier(self, modifier, keywords, relative_pos,
action, parameter=None):
"""Modify existing tasks based on presence of a keyword.
Parameters
----------
modifier : str
A string value which would trigger the given Modifier.
keywords : iterable of str
sequence of strings which are keywords for some task,
which has to be modified.
relative_pos : int
Relative position of the task which should be modified
in the presence of `modifier`. It's value can never be 0. Data
fields should also be considered when calculating the relative
position.
action : str
String value representing the action which should be performed
on the task. Action represents calling a arbitrary function
to perform th emodification.
parameter : object
value required by the `action`.(Default None)
"""
if relative_pos == 0:
raise ValueError("relative_pos cannot be 0")
modifier_dict = self._modifiers.get(modifier, {})
value = (action, parameter, relative_pos)
for keyword in keywords:
action_list = list(modifier_dict.get(keyword, []))
action_list.append(value)
modifier_dict[keyword] = tuple(action_list)
self._modifiers[modifier] = modifier_dict
|
[
"def",
"add_modifier",
"(",
"self",
",",
"modifier",
",",
"keywords",
",",
"relative_pos",
",",
"action",
",",
"parameter",
"=",
"None",
")",
":",
"if",
"relative_pos",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"\"relative_pos cannot be 0\"",
")",
"modifier_dict",
"=",
"self",
".",
"_modifiers",
".",
"get",
"(",
"modifier",
",",
"{",
"}",
")",
"value",
"=",
"(",
"action",
",",
"parameter",
",",
"relative_pos",
")",
"for",
"keyword",
"in",
"keywords",
":",
"action_list",
"=",
"list",
"(",
"modifier_dict",
".",
"get",
"(",
"keyword",
",",
"[",
"]",
")",
")",
"action_list",
".",
"append",
"(",
"value",
")",
"modifier_dict",
"[",
"keyword",
"]",
"=",
"tuple",
"(",
"action_list",
")",
"self",
".",
"_modifiers",
"[",
"modifier",
"]",
"=",
"modifier_dict"
] |
Modify existing tasks based on presence of a keyword.
Parameters
----------
modifier : str
A string value which would trigger the given Modifier.
keywords : iterable of str
sequence of strings which are keywords for some task,
which has to be modified.
relative_pos : int
Relative position of the task which should be modified
in the presence of `modifier`. It's value can never be 0. Data
fields should also be considered when calculating the relative
position.
action : str
String value representing the action which should be performed
on the task. Action represents calling a arbitrary function
to perform th emodification.
parameter : object
value required by the `action`.(Default None)
|
[
"Modify",
"existing",
"tasks",
"based",
"on",
"presence",
"of",
"a",
"keyword",
"."
] |
c3ff022f4c7314e508ffaa3ce1da1ef1e784afb2
|
https://github.com/pri22296/botify/blob/c3ff022f4c7314e508ffaa3ce1da1ef1e784afb2/botify/botify.py#L89-L120
|
238,992
|
pri22296/botify
|
botify/botify.py
|
Botify.parse
|
def parse(self, text):
"""Parse the string `text` and return a tuple of left over Data fields.
Parameters
----------
text : str
A string to be parsed
Returns
-------
result : tuple
A tuple of left over Data after processing
"""
self._parsed_list = []
self._most_recent_report = []
self._token_list = text.lower().split()
modifier_index_list = []
for item in self._token_list:
if(self._is_token_data_callback(item)):
self._parsed_list.append(self._clean_data_callback(item))
if item in self._tasks:
d = {}
d['context'] = self._tasks[item]['context']
d['rule'] = self._tasks[item]['rule']
d['task'] = item
self._parsed_list.append(d)
if item in self._modifiers:
modifier_index_list.append((len(self._parsed_list), item))
self._apply_modifiers(modifier_index_list)
return self._evaluate()
|
python
|
def parse(self, text):
"""Parse the string `text` and return a tuple of left over Data fields.
Parameters
----------
text : str
A string to be parsed
Returns
-------
result : tuple
A tuple of left over Data after processing
"""
self._parsed_list = []
self._most_recent_report = []
self._token_list = text.lower().split()
modifier_index_list = []
for item in self._token_list:
if(self._is_token_data_callback(item)):
self._parsed_list.append(self._clean_data_callback(item))
if item in self._tasks:
d = {}
d['context'] = self._tasks[item]['context']
d['rule'] = self._tasks[item]['rule']
d['task'] = item
self._parsed_list.append(d)
if item in self._modifiers:
modifier_index_list.append((len(self._parsed_list), item))
self._apply_modifiers(modifier_index_list)
return self._evaluate()
|
[
"def",
"parse",
"(",
"self",
",",
"text",
")",
":",
"self",
".",
"_parsed_list",
"=",
"[",
"]",
"self",
".",
"_most_recent_report",
"=",
"[",
"]",
"self",
".",
"_token_list",
"=",
"text",
".",
"lower",
"(",
")",
".",
"split",
"(",
")",
"modifier_index_list",
"=",
"[",
"]",
"for",
"item",
"in",
"self",
".",
"_token_list",
":",
"if",
"(",
"self",
".",
"_is_token_data_callback",
"(",
"item",
")",
")",
":",
"self",
".",
"_parsed_list",
".",
"append",
"(",
"self",
".",
"_clean_data_callback",
"(",
"item",
")",
")",
"if",
"item",
"in",
"self",
".",
"_tasks",
":",
"d",
"=",
"{",
"}",
"d",
"[",
"'context'",
"]",
"=",
"self",
".",
"_tasks",
"[",
"item",
"]",
"[",
"'context'",
"]",
"d",
"[",
"'rule'",
"]",
"=",
"self",
".",
"_tasks",
"[",
"item",
"]",
"[",
"'rule'",
"]",
"d",
"[",
"'task'",
"]",
"=",
"item",
"self",
".",
"_parsed_list",
".",
"append",
"(",
"d",
")",
"if",
"item",
"in",
"self",
".",
"_modifiers",
":",
"modifier_index_list",
".",
"append",
"(",
"(",
"len",
"(",
"self",
".",
"_parsed_list",
")",
",",
"item",
")",
")",
"self",
".",
"_apply_modifiers",
"(",
"modifier_index_list",
")",
"return",
"self",
".",
"_evaluate",
"(",
")"
] |
Parse the string `text` and return a tuple of left over Data fields.
Parameters
----------
text : str
A string to be parsed
Returns
-------
result : tuple
A tuple of left over Data after processing
|
[
"Parse",
"the",
"string",
"text",
"and",
"return",
"a",
"tuple",
"of",
"left",
"over",
"Data",
"fields",
"."
] |
c3ff022f4c7314e508ffaa3ce1da1ef1e784afb2
|
https://github.com/pri22296/botify/blob/c3ff022f4c7314e508ffaa3ce1da1ef1e784afb2/botify/botify.py#L122-L155
|
238,993
|
racker/python-twisted-service-registry-client
|
txServiceRegistry/client.py
|
ResponseReceiver.connectionLost
|
def connectionLost(self, reason):
"""
Called when the response body has been completely delivered.
@param reason: Either a twisted.web.client.ResponseDone exception or
a twisted.web.http.PotentialDataLoss exception.
"""
self.remaining.reset()
try:
result = json.load(self.remaining)
except Exception, e:
self.finished.errback(e)
return
returnValue = result
if self.heartbeater:
self.heartbeater.nextToken = result['token']
returnValue = (result, self.heartbeater)
self.finished.callback(returnValue)
|
python
|
def connectionLost(self, reason):
"""
Called when the response body has been completely delivered.
@param reason: Either a twisted.web.client.ResponseDone exception or
a twisted.web.http.PotentialDataLoss exception.
"""
self.remaining.reset()
try:
result = json.load(self.remaining)
except Exception, e:
self.finished.errback(e)
return
returnValue = result
if self.heartbeater:
self.heartbeater.nextToken = result['token']
returnValue = (result, self.heartbeater)
self.finished.callback(returnValue)
|
[
"def",
"connectionLost",
"(",
"self",
",",
"reason",
")",
":",
"self",
".",
"remaining",
".",
"reset",
"(",
")",
"try",
":",
"result",
"=",
"json",
".",
"load",
"(",
"self",
".",
"remaining",
")",
"except",
"Exception",
",",
"e",
":",
"self",
".",
"finished",
".",
"errback",
"(",
"e",
")",
"return",
"returnValue",
"=",
"result",
"if",
"self",
".",
"heartbeater",
":",
"self",
".",
"heartbeater",
".",
"nextToken",
"=",
"result",
"[",
"'token'",
"]",
"returnValue",
"=",
"(",
"result",
",",
"self",
".",
"heartbeater",
")",
"self",
".",
"finished",
".",
"callback",
"(",
"returnValue",
")"
] |
Called when the response body has been completely delivered.
@param reason: Either a twisted.web.client.ResponseDone exception or
a twisted.web.http.PotentialDataLoss exception.
|
[
"Called",
"when",
"the",
"response",
"body",
"has",
"been",
"completely",
"delivered",
"."
] |
72adfce04c609d72f09ee2f21e9d31be12aefd80
|
https://github.com/racker/python-twisted-service-registry-client/blob/72adfce04c609d72f09ee2f21e9d31be12aefd80/txServiceRegistry/client.py#L71-L90
|
238,994
|
racker/python-twisted-service-registry-client
|
txServiceRegistry/client.py
|
BaseClient.request
|
def request(self,
method,
path,
options=None,
payload=None,
heartbeater=None,
retry_count=0):
"""
Make a request to the Service Registry API.
@param method: HTTP method ('POST', 'GET', etc.).
@type method: C{str}
@param path: Path to be appended to base URL ('/sessions', etc.).
@type path: C{str}
@param options: Options to be encoded as query parameters in the URL.
@type options: C{dict}
@param payload: Optional body
@type payload: C{dict}
@param heartbeater: Optional heartbeater passed in when
creating a session.
@type heartbeater: L{HeartBeater}
"""
def _request(authHeaders, options, payload, heartbeater, retry_count):
tenantId = authHeaders['X-Tenant-Id']
requestUrl = self.baseUrl + tenantId + path
if options:
requestUrl += '?' + urlencode(options)
payload = StringProducer(json.dumps(payload)) if payload else None
d = self.agent.request(method=method,
uri=requestUrl,
headers=None,
bodyProducer=payload)
d.addCallback(self.cbRequest,
method,
path,
options,
payload,
heartbeater,
retry_count)
return d
d = self.agent.getAuthHeaders()
d.addCallback(_request, options, payload, heartbeater, retry_count)
return d
|
python
|
def request(self,
method,
path,
options=None,
payload=None,
heartbeater=None,
retry_count=0):
"""
Make a request to the Service Registry API.
@param method: HTTP method ('POST', 'GET', etc.).
@type method: C{str}
@param path: Path to be appended to base URL ('/sessions', etc.).
@type path: C{str}
@param options: Options to be encoded as query parameters in the URL.
@type options: C{dict}
@param payload: Optional body
@type payload: C{dict}
@param heartbeater: Optional heartbeater passed in when
creating a session.
@type heartbeater: L{HeartBeater}
"""
def _request(authHeaders, options, payload, heartbeater, retry_count):
tenantId = authHeaders['X-Tenant-Id']
requestUrl = self.baseUrl + tenantId + path
if options:
requestUrl += '?' + urlencode(options)
payload = StringProducer(json.dumps(payload)) if payload else None
d = self.agent.request(method=method,
uri=requestUrl,
headers=None,
bodyProducer=payload)
d.addCallback(self.cbRequest,
method,
path,
options,
payload,
heartbeater,
retry_count)
return d
d = self.agent.getAuthHeaders()
d.addCallback(_request, options, payload, heartbeater, retry_count)
return d
|
[
"def",
"request",
"(",
"self",
",",
"method",
",",
"path",
",",
"options",
"=",
"None",
",",
"payload",
"=",
"None",
",",
"heartbeater",
"=",
"None",
",",
"retry_count",
"=",
"0",
")",
":",
"def",
"_request",
"(",
"authHeaders",
",",
"options",
",",
"payload",
",",
"heartbeater",
",",
"retry_count",
")",
":",
"tenantId",
"=",
"authHeaders",
"[",
"'X-Tenant-Id'",
"]",
"requestUrl",
"=",
"self",
".",
"baseUrl",
"+",
"tenantId",
"+",
"path",
"if",
"options",
":",
"requestUrl",
"+=",
"'?'",
"+",
"urlencode",
"(",
"options",
")",
"payload",
"=",
"StringProducer",
"(",
"json",
".",
"dumps",
"(",
"payload",
")",
")",
"if",
"payload",
"else",
"None",
"d",
"=",
"self",
".",
"agent",
".",
"request",
"(",
"method",
"=",
"method",
",",
"uri",
"=",
"requestUrl",
",",
"headers",
"=",
"None",
",",
"bodyProducer",
"=",
"payload",
")",
"d",
".",
"addCallback",
"(",
"self",
".",
"cbRequest",
",",
"method",
",",
"path",
",",
"options",
",",
"payload",
",",
"heartbeater",
",",
"retry_count",
")",
"return",
"d",
"d",
"=",
"self",
".",
"agent",
".",
"getAuthHeaders",
"(",
")",
"d",
".",
"addCallback",
"(",
"_request",
",",
"options",
",",
"payload",
",",
"heartbeater",
",",
"retry_count",
")",
"return",
"d"
] |
Make a request to the Service Registry API.
@param method: HTTP method ('POST', 'GET', etc.).
@type method: C{str}
@param path: Path to be appended to base URL ('/sessions', etc.).
@type path: C{str}
@param options: Options to be encoded as query parameters in the URL.
@type options: C{dict}
@param payload: Optional body
@type payload: C{dict}
@param heartbeater: Optional heartbeater passed in when
creating a session.
@type heartbeater: L{HeartBeater}
|
[
"Make",
"a",
"request",
"to",
"the",
"Service",
"Registry",
"API",
"."
] |
72adfce04c609d72f09ee2f21e9d31be12aefd80
|
https://github.com/racker/python-twisted-service-registry-client/blob/72adfce04c609d72f09ee2f21e9d31be12aefd80/txServiceRegistry/client.py#L149-L194
|
238,995
|
rackerlabs/txkazoo
|
txkazoo/recipe/partitioner.py
|
_SetPartitionerWrapper._initialized
|
def _initialized(self, partitioner):
"""Store the partitioner and reset the internal state.
Now that we successfully got an actual
:class:`kazoo.recipe.partitioner.SetPartitioner` object, we
store it and reset our internal ``_state`` to ``None``,
causing the ``state`` property to defer to the partitioner's
state.
"""
self._partitioner = partitioner
self._thimble = Thimble(self.reactor, self.pool,
partitioner, _blocking_partitioner_methods)
self._state = None
|
python
|
def _initialized(self, partitioner):
"""Store the partitioner and reset the internal state.
Now that we successfully got an actual
:class:`kazoo.recipe.partitioner.SetPartitioner` object, we
store it and reset our internal ``_state`` to ``None``,
causing the ``state`` property to defer to the partitioner's
state.
"""
self._partitioner = partitioner
self._thimble = Thimble(self.reactor, self.pool,
partitioner, _blocking_partitioner_methods)
self._state = None
|
[
"def",
"_initialized",
"(",
"self",
",",
"partitioner",
")",
":",
"self",
".",
"_partitioner",
"=",
"partitioner",
"self",
".",
"_thimble",
"=",
"Thimble",
"(",
"self",
".",
"reactor",
",",
"self",
".",
"pool",
",",
"partitioner",
",",
"_blocking_partitioner_methods",
")",
"self",
".",
"_state",
"=",
"None"
] |
Store the partitioner and reset the internal state.
Now that we successfully got an actual
:class:`kazoo.recipe.partitioner.SetPartitioner` object, we
store it and reset our internal ``_state`` to ``None``,
causing the ``state`` property to defer to the partitioner's
state.
|
[
"Store",
"the",
"partitioner",
"and",
"reset",
"the",
"internal",
"state",
"."
] |
a0989138cc08df7acd1d410f7e48708553839f46
|
https://github.com/rackerlabs/txkazoo/blob/a0989138cc08df7acd1d410f7e48708553839f46/txkazoo/recipe/partitioner.py#L53-L66
|
238,996
|
mikicz/arca
|
arca/backend/vagrant.py
|
VagrantBackend.inject_arca
|
def inject_arca(self, arca):
""" Apart from the usual validation stuff it also creates log file for this instance.
"""
super().inject_arca(arca)
import vagrant
self.log_path = Path(self._arca.base_dir) / "logs" / (str(uuid4()) + ".log")
self.log_path.parent.mkdir(exist_ok=True, parents=True)
logger.info("Storing vagrant log in %s", self.log_path)
self.log_cm = vagrant.make_file_cm(self.log_path)
|
python
|
def inject_arca(self, arca):
""" Apart from the usual validation stuff it also creates log file for this instance.
"""
super().inject_arca(arca)
import vagrant
self.log_path = Path(self._arca.base_dir) / "logs" / (str(uuid4()) + ".log")
self.log_path.parent.mkdir(exist_ok=True, parents=True)
logger.info("Storing vagrant log in %s", self.log_path)
self.log_cm = vagrant.make_file_cm(self.log_path)
|
[
"def",
"inject_arca",
"(",
"self",
",",
"arca",
")",
":",
"super",
"(",
")",
".",
"inject_arca",
"(",
"arca",
")",
"import",
"vagrant",
"self",
".",
"log_path",
"=",
"Path",
"(",
"self",
".",
"_arca",
".",
"base_dir",
")",
"/",
"\"logs\"",
"/",
"(",
"str",
"(",
"uuid4",
"(",
")",
")",
"+",
"\".log\"",
")",
"self",
".",
"log_path",
".",
"parent",
".",
"mkdir",
"(",
"exist_ok",
"=",
"True",
",",
"parents",
"=",
"True",
")",
"logger",
".",
"info",
"(",
"\"Storing vagrant log in %s\"",
",",
"self",
".",
"log_path",
")",
"self",
".",
"log_cm",
"=",
"vagrant",
".",
"make_file_cm",
"(",
"self",
".",
"log_path",
")"
] |
Apart from the usual validation stuff it also creates log file for this instance.
|
[
"Apart",
"from",
"the",
"usual",
"validation",
"stuff",
"it",
"also",
"creates",
"log",
"file",
"for",
"this",
"instance",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/vagrant.py#L74-L85
|
238,997
|
mikicz/arca
|
arca/backend/vagrant.py
|
VagrantBackend.init_vagrant
|
def init_vagrant(self, vagrant_file):
""" Creates a Vagrantfile in the target dir, with only the base image pulled.
Copies the runner script to the directory so it's accessible from the VM.
"""
if self.inherit_image:
image_name, image_tag = str(self.inherit_image).split(":")
else:
image_name = self.get_arca_base_name()
image_tag = self.get_python_base_tag(self.get_python_version())
logger.info("Creating Vagrantfile located in %s, base image %s:%s", vagrant_file, image_name, image_tag)
repos_dir = (Path(self._arca.base_dir) / 'repos').resolve()
vagrant_file.parent.mkdir(exist_ok=True, parents=True)
vagrant_file.write_text(dedent(f"""
# -*- mode: ruby -*-
# vi: set ft=ruby :
Vagrant.configure("2") do |config|
config.vm.box = "{self.box}"
config.ssh.insert_key = true
config.vm.provision "docker" do |d|
d.pull_images "{image_name}:{image_tag}"
end
config.vm.synced_folder ".", "/vagrant"
config.vm.synced_folder "{repos_dir}", "/srv/repos"
config.vm.provider "{self.provider}"
end
"""))
(vagrant_file.parent / "runner.py").write_text(self.RUNNER.read_text())
|
python
|
def init_vagrant(self, vagrant_file):
""" Creates a Vagrantfile in the target dir, with only the base image pulled.
Copies the runner script to the directory so it's accessible from the VM.
"""
if self.inherit_image:
image_name, image_tag = str(self.inherit_image).split(":")
else:
image_name = self.get_arca_base_name()
image_tag = self.get_python_base_tag(self.get_python_version())
logger.info("Creating Vagrantfile located in %s, base image %s:%s", vagrant_file, image_name, image_tag)
repos_dir = (Path(self._arca.base_dir) / 'repos').resolve()
vagrant_file.parent.mkdir(exist_ok=True, parents=True)
vagrant_file.write_text(dedent(f"""
# -*- mode: ruby -*-
# vi: set ft=ruby :
Vagrant.configure("2") do |config|
config.vm.box = "{self.box}"
config.ssh.insert_key = true
config.vm.provision "docker" do |d|
d.pull_images "{image_name}:{image_tag}"
end
config.vm.synced_folder ".", "/vagrant"
config.vm.synced_folder "{repos_dir}", "/srv/repos"
config.vm.provider "{self.provider}"
end
"""))
(vagrant_file.parent / "runner.py").write_text(self.RUNNER.read_text())
|
[
"def",
"init_vagrant",
"(",
"self",
",",
"vagrant_file",
")",
":",
"if",
"self",
".",
"inherit_image",
":",
"image_name",
",",
"image_tag",
"=",
"str",
"(",
"self",
".",
"inherit_image",
")",
".",
"split",
"(",
"\":\"",
")",
"else",
":",
"image_name",
"=",
"self",
".",
"get_arca_base_name",
"(",
")",
"image_tag",
"=",
"self",
".",
"get_python_base_tag",
"(",
"self",
".",
"get_python_version",
"(",
")",
")",
"logger",
".",
"info",
"(",
"\"Creating Vagrantfile located in %s, base image %s:%s\"",
",",
"vagrant_file",
",",
"image_name",
",",
"image_tag",
")",
"repos_dir",
"=",
"(",
"Path",
"(",
"self",
".",
"_arca",
".",
"base_dir",
")",
"/",
"'repos'",
")",
".",
"resolve",
"(",
")",
"vagrant_file",
".",
"parent",
".",
"mkdir",
"(",
"exist_ok",
"=",
"True",
",",
"parents",
"=",
"True",
")",
"vagrant_file",
".",
"write_text",
"(",
"dedent",
"(",
"f\"\"\"\n # -*- mode: ruby -*-\n # vi: set ft=ruby :\n\n Vagrant.configure(\"2\") do |config|\n config.vm.box = \"{self.box}\"\n config.ssh.insert_key = true\n config.vm.provision \"docker\" do |d|\n d.pull_images \"{image_name}:{image_tag}\"\n end\n\n config.vm.synced_folder \".\", \"/vagrant\"\n config.vm.synced_folder \"{repos_dir}\", \"/srv/repos\"\n config.vm.provider \"{self.provider}\"\n\n end\n \"\"\"",
")",
")",
"(",
"vagrant_file",
".",
"parent",
"/",
"\"runner.py\"",
")",
".",
"write_text",
"(",
"self",
".",
"RUNNER",
".",
"read_text",
"(",
")",
")"
] |
Creates a Vagrantfile in the target dir, with only the base image pulled.
Copies the runner script to the directory so it's accessible from the VM.
|
[
"Creates",
"a",
"Vagrantfile",
"in",
"the",
"target",
"dir",
"with",
"only",
"the",
"base",
"image",
"pulled",
".",
"Copies",
"the",
"runner",
"script",
"to",
"the",
"directory",
"so",
"it",
"s",
"accessible",
"from",
"the",
"VM",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/vagrant.py#L113-L145
|
238,998
|
mikicz/arca
|
arca/backend/vagrant.py
|
VagrantBackend.fabric_task
|
def fabric_task(self):
""" Returns a fabric task which executes the script in the Vagrant VM
"""
from fabric import api
@api.task
def run_script(container_name, definition_filename, image_name, image_tag, repository, timeout):
""" Sequence to run inside the VM.
Starts up the container if the container is not running
(and copies over the data and the runner script)
Then the definition is copied over and the script launched.
If the VM is gonna be shut down then kills the container as well.
"""
workdir = str((Path("/srv/data") / self.cwd).resolve())
cmd = "sh" if self.inherit_image else "bash"
api.run(f"docker pull {image_name}:{image_tag}")
container_running = int(api.run(f"docker ps --format '{{.Names}}' -f name={container_name} | wc -l"))
container_stopped = int(api.run(f"docker ps -a --format '{{.Names}}' -f name={container_name} | wc -l"))
if container_running == 0:
if container_stopped:
api.run(f"docker rm -f {container_name}")
api.run(f"docker run "
f"--name {container_name} "
f"--workdir \"{workdir}\" "
f"-dt {image_name}:{image_tag} "
f"{cmd} -i")
api.run(f"docker exec {container_name} mkdir -p /srv/scripts")
api.run(f"docker cp /srv/repos/{repository} {container_name}:/srv/branch")
api.run(f"docker exec --user root {container_name} bash -c 'mv /srv/branch/* /srv/data'")
api.run(f"docker exec --user root {container_name} rm -rf /srv/branch")
api.run(f"docker cp /vagrant/runner.py {container_name}:/srv/scripts/")
api.run(f"docker cp /vagrant/{definition_filename} {container_name}:/srv/scripts/")
output = api.run(
" ".join([
"docker", "exec", container_name,
"python", "/srv/scripts/runner.py", f"/srv/scripts/{definition_filename}",
]),
timeout=math.ceil(timeout)
)
if not self.keep_container_running:
api.run(f"docker kill {container_name}")
return output
return run_script
|
python
|
def fabric_task(self):
""" Returns a fabric task which executes the script in the Vagrant VM
"""
from fabric import api
@api.task
def run_script(container_name, definition_filename, image_name, image_tag, repository, timeout):
""" Sequence to run inside the VM.
Starts up the container if the container is not running
(and copies over the data and the runner script)
Then the definition is copied over and the script launched.
If the VM is gonna be shut down then kills the container as well.
"""
workdir = str((Path("/srv/data") / self.cwd).resolve())
cmd = "sh" if self.inherit_image else "bash"
api.run(f"docker pull {image_name}:{image_tag}")
container_running = int(api.run(f"docker ps --format '{{.Names}}' -f name={container_name} | wc -l"))
container_stopped = int(api.run(f"docker ps -a --format '{{.Names}}' -f name={container_name} | wc -l"))
if container_running == 0:
if container_stopped:
api.run(f"docker rm -f {container_name}")
api.run(f"docker run "
f"--name {container_name} "
f"--workdir \"{workdir}\" "
f"-dt {image_name}:{image_tag} "
f"{cmd} -i")
api.run(f"docker exec {container_name} mkdir -p /srv/scripts")
api.run(f"docker cp /srv/repos/{repository} {container_name}:/srv/branch")
api.run(f"docker exec --user root {container_name} bash -c 'mv /srv/branch/* /srv/data'")
api.run(f"docker exec --user root {container_name} rm -rf /srv/branch")
api.run(f"docker cp /vagrant/runner.py {container_name}:/srv/scripts/")
api.run(f"docker cp /vagrant/{definition_filename} {container_name}:/srv/scripts/")
output = api.run(
" ".join([
"docker", "exec", container_name,
"python", "/srv/scripts/runner.py", f"/srv/scripts/{definition_filename}",
]),
timeout=math.ceil(timeout)
)
if not self.keep_container_running:
api.run(f"docker kill {container_name}")
return output
return run_script
|
[
"def",
"fabric_task",
"(",
"self",
")",
":",
"from",
"fabric",
"import",
"api",
"@",
"api",
".",
"task",
"def",
"run_script",
"(",
"container_name",
",",
"definition_filename",
",",
"image_name",
",",
"image_tag",
",",
"repository",
",",
"timeout",
")",
":",
"\"\"\" Sequence to run inside the VM.\n Starts up the container if the container is not running\n (and copies over the data and the runner script)\n Then the definition is copied over and the script launched.\n If the VM is gonna be shut down then kills the container as well.\n \"\"\"",
"workdir",
"=",
"str",
"(",
"(",
"Path",
"(",
"\"/srv/data\"",
")",
"/",
"self",
".",
"cwd",
")",
".",
"resolve",
"(",
")",
")",
"cmd",
"=",
"\"sh\"",
"if",
"self",
".",
"inherit_image",
"else",
"\"bash\"",
"api",
".",
"run",
"(",
"f\"docker pull {image_name}:{image_tag}\"",
")",
"container_running",
"=",
"int",
"(",
"api",
".",
"run",
"(",
"f\"docker ps --format '{{.Names}}' -f name={container_name} | wc -l\"",
")",
")",
"container_stopped",
"=",
"int",
"(",
"api",
".",
"run",
"(",
"f\"docker ps -a --format '{{.Names}}' -f name={container_name} | wc -l\"",
")",
")",
"if",
"container_running",
"==",
"0",
":",
"if",
"container_stopped",
":",
"api",
".",
"run",
"(",
"f\"docker rm -f {container_name}\"",
")",
"api",
".",
"run",
"(",
"f\"docker run \"",
"f\"--name {container_name} \"",
"f\"--workdir \\\"{workdir}\\\" \"",
"f\"-dt {image_name}:{image_tag} \"",
"f\"{cmd} -i\"",
")",
"api",
".",
"run",
"(",
"f\"docker exec {container_name} mkdir -p /srv/scripts\"",
")",
"api",
".",
"run",
"(",
"f\"docker cp /srv/repos/{repository} {container_name}:/srv/branch\"",
")",
"api",
".",
"run",
"(",
"f\"docker exec --user root {container_name} bash -c 'mv /srv/branch/* /srv/data'\"",
")",
"api",
".",
"run",
"(",
"f\"docker exec --user root {container_name} rm -rf /srv/branch\"",
")",
"api",
".",
"run",
"(",
"f\"docker cp /vagrant/runner.py {container_name}:/srv/scripts/\"",
")",
"api",
".",
"run",
"(",
"f\"docker cp /vagrant/{definition_filename} {container_name}:/srv/scripts/\"",
")",
"output",
"=",
"api",
".",
"run",
"(",
"\" \"",
".",
"join",
"(",
"[",
"\"docker\"",
",",
"\"exec\"",
",",
"container_name",
",",
"\"python\"",
",",
"\"/srv/scripts/runner.py\"",
",",
"f\"/srv/scripts/{definition_filename}\"",
",",
"]",
")",
",",
"timeout",
"=",
"math",
".",
"ceil",
"(",
"timeout",
")",
")",
"if",
"not",
"self",
".",
"keep_container_running",
":",
"api",
".",
"run",
"(",
"f\"docker kill {container_name}\"",
")",
"return",
"output",
"return",
"run_script"
] |
Returns a fabric task which executes the script in the Vagrant VM
|
[
"Returns",
"a",
"fabric",
"task",
"which",
"executes",
"the",
"script",
"in",
"the",
"Vagrant",
"VM"
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/vagrant.py#L148-L200
|
238,999
|
mikicz/arca
|
arca/backend/vagrant.py
|
VagrantBackend.ensure_vm_running
|
def ensure_vm_running(self, vm_location):
""" Gets or creates a Vagrantfile in ``vm_location`` and calls ``vagrant up`` if the VM is not running.
"""
import vagrant
if self.vagrant is None:
vagrant_file = vm_location / "Vagrantfile"
if not vagrant_file.exists():
self.init_vagrant(vagrant_file)
self.vagrant = vagrant.Vagrant(vm_location,
quiet_stdout=self.quiet,
quiet_stderr=self.quiet,
out_cm=self.log_cm,
err_cm=self.log_cm)
status = [x for x in self.vagrant.status() if x.name == "default"][0]
if status.state != "running":
try:
self.vagrant.up()
except subprocess.CalledProcessError:
raise BuildError("Vagrant VM couldn't up launched. See output for details.")
|
python
|
def ensure_vm_running(self, vm_location):
""" Gets or creates a Vagrantfile in ``vm_location`` and calls ``vagrant up`` if the VM is not running.
"""
import vagrant
if self.vagrant is None:
vagrant_file = vm_location / "Vagrantfile"
if not vagrant_file.exists():
self.init_vagrant(vagrant_file)
self.vagrant = vagrant.Vagrant(vm_location,
quiet_stdout=self.quiet,
quiet_stderr=self.quiet,
out_cm=self.log_cm,
err_cm=self.log_cm)
status = [x for x in self.vagrant.status() if x.name == "default"][0]
if status.state != "running":
try:
self.vagrant.up()
except subprocess.CalledProcessError:
raise BuildError("Vagrant VM couldn't up launched. See output for details.")
|
[
"def",
"ensure_vm_running",
"(",
"self",
",",
"vm_location",
")",
":",
"import",
"vagrant",
"if",
"self",
".",
"vagrant",
"is",
"None",
":",
"vagrant_file",
"=",
"vm_location",
"/",
"\"Vagrantfile\"",
"if",
"not",
"vagrant_file",
".",
"exists",
"(",
")",
":",
"self",
".",
"init_vagrant",
"(",
"vagrant_file",
")",
"self",
".",
"vagrant",
"=",
"vagrant",
".",
"Vagrant",
"(",
"vm_location",
",",
"quiet_stdout",
"=",
"self",
".",
"quiet",
",",
"quiet_stderr",
"=",
"self",
".",
"quiet",
",",
"out_cm",
"=",
"self",
".",
"log_cm",
",",
"err_cm",
"=",
"self",
".",
"log_cm",
")",
"status",
"=",
"[",
"x",
"for",
"x",
"in",
"self",
".",
"vagrant",
".",
"status",
"(",
")",
"if",
"x",
".",
"name",
"==",
"\"default\"",
"]",
"[",
"0",
"]",
"if",
"status",
".",
"state",
"!=",
"\"running\"",
":",
"try",
":",
"self",
".",
"vagrant",
".",
"up",
"(",
")",
"except",
"subprocess",
".",
"CalledProcessError",
":",
"raise",
"BuildError",
"(",
"\"Vagrant VM couldn't up launched. See output for details.\"",
")"
] |
Gets or creates a Vagrantfile in ``vm_location`` and calls ``vagrant up`` if the VM is not running.
|
[
"Gets",
"or",
"creates",
"a",
"Vagrantfile",
"in",
"vm_location",
"and",
"calls",
"vagrant",
"up",
"if",
"the",
"VM",
"is",
"not",
"running",
"."
] |
e67fdc00be473ecf8ec16d024e1a3f2c47ca882c
|
https://github.com/mikicz/arca/blob/e67fdc00be473ecf8ec16d024e1a3f2c47ca882c/arca/backend/vagrant.py#L202-L224
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.